radeon_evergreen.c revision 1.3 1 /* $NetBSD: radeon_evergreen.c,v 1.3 2019/10/04 12:29:05 mrg Exp $ */
2
3 /*
4 * Copyright 2010 Advanced Micro Devices, Inc.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 * OTHER DEALINGS IN THE SOFTWARE.
23 *
24 * Authors: Alex Deucher
25 */
26 #include <sys/cdefs.h>
27 __KERNEL_RCSID(0, "$NetBSD: radeon_evergreen.c,v 1.3 2019/10/04 12:29:05 mrg Exp $");
28
29 #include <linux/bitops.h>
30 #include <linux/firmware.h>
31 #include <linux/slab.h>
32 #include <drm/drmP.h>
33 #include "radeon.h"
34 #include "radeon_asic.h"
35 #include "radeon_audio.h"
36 #include <drm/radeon_drm.h>
37 #include "evergreend.h"
38 #include "atom.h"
39 #include "avivod.h"
40 #include "evergreen_reg.h"
41 #include "evergreen_blit_shaders.h"
42 #include "radeon_ucode.h"
43
44 /*
45 * Indirect registers accessor
46 */
47 u32 eg_cg_rreg(struct radeon_device *rdev, u32 reg)
48 {
49 unsigned long flags;
50 u32 r;
51
52 spin_lock_irqsave(&rdev->cg_idx_lock, flags);
53 WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
54 r = RREG32(EVERGREEN_CG_IND_DATA);
55 spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
56 return r;
57 }
58
59 void eg_cg_wreg(struct radeon_device *rdev, u32 reg, u32 v)
60 {
61 unsigned long flags;
62
63 spin_lock_irqsave(&rdev->cg_idx_lock, flags);
64 WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
65 WREG32(EVERGREEN_CG_IND_DATA, (v));
66 spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
67 }
68
69 u32 eg_pif_phy0_rreg(struct radeon_device *rdev, u32 reg)
70 {
71 unsigned long flags;
72 u32 r;
73
74 spin_lock_irqsave(&rdev->pif_idx_lock, flags);
75 WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
76 r = RREG32(EVERGREEN_PIF_PHY0_DATA);
77 spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
78 return r;
79 }
80
81 void eg_pif_phy0_wreg(struct radeon_device *rdev, u32 reg, u32 v)
82 {
83 unsigned long flags;
84
85 spin_lock_irqsave(&rdev->pif_idx_lock, flags);
86 WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
87 WREG32(EVERGREEN_PIF_PHY0_DATA, (v));
88 spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
89 }
90
91 u32 eg_pif_phy1_rreg(struct radeon_device *rdev, u32 reg)
92 {
93 unsigned long flags;
94 u32 r;
95
96 spin_lock_irqsave(&rdev->pif_idx_lock, flags);
97 WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
98 r = RREG32(EVERGREEN_PIF_PHY1_DATA);
99 spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
100 return r;
101 }
102
103 void eg_pif_phy1_wreg(struct radeon_device *rdev, u32 reg, u32 v)
104 {
105 unsigned long flags;
106
107 spin_lock_irqsave(&rdev->pif_idx_lock, flags);
108 WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
109 WREG32(EVERGREEN_PIF_PHY1_DATA, (v));
110 spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
111 }
112
113 static const u32 crtc_offsets[6] =
114 {
115 EVERGREEN_CRTC0_REGISTER_OFFSET,
116 EVERGREEN_CRTC1_REGISTER_OFFSET,
117 EVERGREEN_CRTC2_REGISTER_OFFSET,
118 EVERGREEN_CRTC3_REGISTER_OFFSET,
119 EVERGREEN_CRTC4_REGISTER_OFFSET,
120 EVERGREEN_CRTC5_REGISTER_OFFSET
121 };
122
123 #include "clearstate_evergreen.h"
124
125 static const u32 sumo_rlc_save_restore_register_list[] =
126 {
127 0x98fc,
128 0x9830,
129 0x9834,
130 0x9838,
131 0x9870,
132 0x9874,
133 0x8a14,
134 0x8b24,
135 0x8bcc,
136 0x8b10,
137 0x8d00,
138 0x8d04,
139 0x8c00,
140 0x8c04,
141 0x8c08,
142 0x8c0c,
143 0x8d8c,
144 0x8c20,
145 0x8c24,
146 0x8c28,
147 0x8c18,
148 0x8c1c,
149 0x8cf0,
150 0x8e2c,
151 0x8e38,
152 0x8c30,
153 0x9508,
154 0x9688,
155 0x9608,
156 0x960c,
157 0x9610,
158 0x9614,
159 0x88c4,
160 0x88d4,
161 0xa008,
162 0x900c,
163 0x9100,
164 0x913c,
165 0x98f8,
166 0x98f4,
167 0x9b7c,
168 0x3f8c,
169 0x8950,
170 0x8954,
171 0x8a18,
172 0x8b28,
173 0x9144,
174 0x9148,
175 0x914c,
176 0x3f90,
177 0x3f94,
178 0x915c,
179 0x9160,
180 0x9178,
181 0x917c,
182 0x9180,
183 0x918c,
184 0x9190,
185 0x9194,
186 0x9198,
187 0x919c,
188 0x91a8,
189 0x91ac,
190 0x91b0,
191 0x91b4,
192 0x91b8,
193 0x91c4,
194 0x91c8,
195 0x91cc,
196 0x91d0,
197 0x91d4,
198 0x91e0,
199 0x91e4,
200 0x91ec,
201 0x91f0,
202 0x91f4,
203 0x9200,
204 0x9204,
205 0x929c,
206 0x9150,
207 0x802c,
208 };
209
210 static void evergreen_gpu_init(struct radeon_device *rdev);
211 void evergreen_fini(struct radeon_device *rdev);
212 void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
213 void evergreen_program_aspm(struct radeon_device *rdev);
214 extern void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
215 int ring, u32 cp_int_cntl);
216 extern void cayman_vm_decode_fault(struct radeon_device *rdev,
217 u32 status, u32 addr);
218 void cik_init_cp_pg_table(struct radeon_device *rdev);
219
220 extern u32 si_get_csb_size(struct radeon_device *rdev);
221 extern void si_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
222 extern u32 cik_get_csb_size(struct radeon_device *rdev);
223 extern void cik_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
224 extern void rv770_set_clk_bypass_mode(struct radeon_device *rdev);
225
226 static const u32 evergreen_golden_registers[] =
227 {
228 0x3f90, 0xffff0000, 0xff000000,
229 0x9148, 0xffff0000, 0xff000000,
230 0x3f94, 0xffff0000, 0xff000000,
231 0x914c, 0xffff0000, 0xff000000,
232 0x9b7c, 0xffffffff, 0x00000000,
233 0x8a14, 0xffffffff, 0x00000007,
234 0x8b10, 0xffffffff, 0x00000000,
235 0x960c, 0xffffffff, 0x54763210,
236 0x88c4, 0xffffffff, 0x000000c2,
237 0x88d4, 0xffffffff, 0x00000010,
238 0x8974, 0xffffffff, 0x00000000,
239 0xc78, 0x00000080, 0x00000080,
240 0x5eb4, 0xffffffff, 0x00000002,
241 0x5e78, 0xffffffff, 0x001000f0,
242 0x6104, 0x01000300, 0x00000000,
243 0x5bc0, 0x00300000, 0x00000000,
244 0x7030, 0xffffffff, 0x00000011,
245 0x7c30, 0xffffffff, 0x00000011,
246 0x10830, 0xffffffff, 0x00000011,
247 0x11430, 0xffffffff, 0x00000011,
248 0x12030, 0xffffffff, 0x00000011,
249 0x12c30, 0xffffffff, 0x00000011,
250 0xd02c, 0xffffffff, 0x08421000,
251 0x240c, 0xffffffff, 0x00000380,
252 0x8b24, 0xffffffff, 0x00ff0fff,
253 0x28a4c, 0x06000000, 0x06000000,
254 0x10c, 0x00000001, 0x00000001,
255 0x8d00, 0xffffffff, 0x100e4848,
256 0x8d04, 0xffffffff, 0x00164745,
257 0x8c00, 0xffffffff, 0xe4000003,
258 0x8c04, 0xffffffff, 0x40600060,
259 0x8c08, 0xffffffff, 0x001c001c,
260 0x8cf0, 0xffffffff, 0x08e00620,
261 0x8c20, 0xffffffff, 0x00800080,
262 0x8c24, 0xffffffff, 0x00800080,
263 0x8c18, 0xffffffff, 0x20202078,
264 0x8c1c, 0xffffffff, 0x00001010,
265 0x28350, 0xffffffff, 0x00000000,
266 0xa008, 0xffffffff, 0x00010000,
267 0x5c4, 0xffffffff, 0x00000001,
268 0x9508, 0xffffffff, 0x00000002,
269 0x913c, 0x0000000f, 0x0000000a
270 };
271
272 static const u32 evergreen_golden_registers2[] =
273 {
274 0x2f4c, 0xffffffff, 0x00000000,
275 0x54f4, 0xffffffff, 0x00000000,
276 0x54f0, 0xffffffff, 0x00000000,
277 0x5498, 0xffffffff, 0x00000000,
278 0x549c, 0xffffffff, 0x00000000,
279 0x5494, 0xffffffff, 0x00000000,
280 0x53cc, 0xffffffff, 0x00000000,
281 0x53c8, 0xffffffff, 0x00000000,
282 0x53c4, 0xffffffff, 0x00000000,
283 0x53c0, 0xffffffff, 0x00000000,
284 0x53bc, 0xffffffff, 0x00000000,
285 0x53b8, 0xffffffff, 0x00000000,
286 0x53b4, 0xffffffff, 0x00000000,
287 0x53b0, 0xffffffff, 0x00000000
288 };
289
290 static const u32 cypress_mgcg_init[] =
291 {
292 0x802c, 0xffffffff, 0xc0000000,
293 0x5448, 0xffffffff, 0x00000100,
294 0x55e4, 0xffffffff, 0x00000100,
295 0x160c, 0xffffffff, 0x00000100,
296 0x5644, 0xffffffff, 0x00000100,
297 0xc164, 0xffffffff, 0x00000100,
298 0x8a18, 0xffffffff, 0x00000100,
299 0x897c, 0xffffffff, 0x06000100,
300 0x8b28, 0xffffffff, 0x00000100,
301 0x9144, 0xffffffff, 0x00000100,
302 0x9a60, 0xffffffff, 0x00000100,
303 0x9868, 0xffffffff, 0x00000100,
304 0x8d58, 0xffffffff, 0x00000100,
305 0x9510, 0xffffffff, 0x00000100,
306 0x949c, 0xffffffff, 0x00000100,
307 0x9654, 0xffffffff, 0x00000100,
308 0x9030, 0xffffffff, 0x00000100,
309 0x9034, 0xffffffff, 0x00000100,
310 0x9038, 0xffffffff, 0x00000100,
311 0x903c, 0xffffffff, 0x00000100,
312 0x9040, 0xffffffff, 0x00000100,
313 0xa200, 0xffffffff, 0x00000100,
314 0xa204, 0xffffffff, 0x00000100,
315 0xa208, 0xffffffff, 0x00000100,
316 0xa20c, 0xffffffff, 0x00000100,
317 0x971c, 0xffffffff, 0x00000100,
318 0x977c, 0xffffffff, 0x00000100,
319 0x3f80, 0xffffffff, 0x00000100,
320 0xa210, 0xffffffff, 0x00000100,
321 0xa214, 0xffffffff, 0x00000100,
322 0x4d8, 0xffffffff, 0x00000100,
323 0x9784, 0xffffffff, 0x00000100,
324 0x9698, 0xffffffff, 0x00000100,
325 0x4d4, 0xffffffff, 0x00000200,
326 0x30cc, 0xffffffff, 0x00000100,
327 0xd0c0, 0xffffffff, 0xff000100,
328 0x802c, 0xffffffff, 0x40000000,
329 0x915c, 0xffffffff, 0x00010000,
330 0x9160, 0xffffffff, 0x00030002,
331 0x9178, 0xffffffff, 0x00070000,
332 0x917c, 0xffffffff, 0x00030002,
333 0x9180, 0xffffffff, 0x00050004,
334 0x918c, 0xffffffff, 0x00010006,
335 0x9190, 0xffffffff, 0x00090008,
336 0x9194, 0xffffffff, 0x00070000,
337 0x9198, 0xffffffff, 0x00030002,
338 0x919c, 0xffffffff, 0x00050004,
339 0x91a8, 0xffffffff, 0x00010006,
340 0x91ac, 0xffffffff, 0x00090008,
341 0x91b0, 0xffffffff, 0x00070000,
342 0x91b4, 0xffffffff, 0x00030002,
343 0x91b8, 0xffffffff, 0x00050004,
344 0x91c4, 0xffffffff, 0x00010006,
345 0x91c8, 0xffffffff, 0x00090008,
346 0x91cc, 0xffffffff, 0x00070000,
347 0x91d0, 0xffffffff, 0x00030002,
348 0x91d4, 0xffffffff, 0x00050004,
349 0x91e0, 0xffffffff, 0x00010006,
350 0x91e4, 0xffffffff, 0x00090008,
351 0x91e8, 0xffffffff, 0x00000000,
352 0x91ec, 0xffffffff, 0x00070000,
353 0x91f0, 0xffffffff, 0x00030002,
354 0x91f4, 0xffffffff, 0x00050004,
355 0x9200, 0xffffffff, 0x00010006,
356 0x9204, 0xffffffff, 0x00090008,
357 0x9208, 0xffffffff, 0x00070000,
358 0x920c, 0xffffffff, 0x00030002,
359 0x9210, 0xffffffff, 0x00050004,
360 0x921c, 0xffffffff, 0x00010006,
361 0x9220, 0xffffffff, 0x00090008,
362 0x9224, 0xffffffff, 0x00070000,
363 0x9228, 0xffffffff, 0x00030002,
364 0x922c, 0xffffffff, 0x00050004,
365 0x9238, 0xffffffff, 0x00010006,
366 0x923c, 0xffffffff, 0x00090008,
367 0x9240, 0xffffffff, 0x00070000,
368 0x9244, 0xffffffff, 0x00030002,
369 0x9248, 0xffffffff, 0x00050004,
370 0x9254, 0xffffffff, 0x00010006,
371 0x9258, 0xffffffff, 0x00090008,
372 0x925c, 0xffffffff, 0x00070000,
373 0x9260, 0xffffffff, 0x00030002,
374 0x9264, 0xffffffff, 0x00050004,
375 0x9270, 0xffffffff, 0x00010006,
376 0x9274, 0xffffffff, 0x00090008,
377 0x9278, 0xffffffff, 0x00070000,
378 0x927c, 0xffffffff, 0x00030002,
379 0x9280, 0xffffffff, 0x00050004,
380 0x928c, 0xffffffff, 0x00010006,
381 0x9290, 0xffffffff, 0x00090008,
382 0x9294, 0xffffffff, 0x00000000,
383 0x929c, 0xffffffff, 0x00000001,
384 0x802c, 0xffffffff, 0x40010000,
385 0x915c, 0xffffffff, 0x00010000,
386 0x9160, 0xffffffff, 0x00030002,
387 0x9178, 0xffffffff, 0x00070000,
388 0x917c, 0xffffffff, 0x00030002,
389 0x9180, 0xffffffff, 0x00050004,
390 0x918c, 0xffffffff, 0x00010006,
391 0x9190, 0xffffffff, 0x00090008,
392 0x9194, 0xffffffff, 0x00070000,
393 0x9198, 0xffffffff, 0x00030002,
394 0x919c, 0xffffffff, 0x00050004,
395 0x91a8, 0xffffffff, 0x00010006,
396 0x91ac, 0xffffffff, 0x00090008,
397 0x91b0, 0xffffffff, 0x00070000,
398 0x91b4, 0xffffffff, 0x00030002,
399 0x91b8, 0xffffffff, 0x00050004,
400 0x91c4, 0xffffffff, 0x00010006,
401 0x91c8, 0xffffffff, 0x00090008,
402 0x91cc, 0xffffffff, 0x00070000,
403 0x91d0, 0xffffffff, 0x00030002,
404 0x91d4, 0xffffffff, 0x00050004,
405 0x91e0, 0xffffffff, 0x00010006,
406 0x91e4, 0xffffffff, 0x00090008,
407 0x91e8, 0xffffffff, 0x00000000,
408 0x91ec, 0xffffffff, 0x00070000,
409 0x91f0, 0xffffffff, 0x00030002,
410 0x91f4, 0xffffffff, 0x00050004,
411 0x9200, 0xffffffff, 0x00010006,
412 0x9204, 0xffffffff, 0x00090008,
413 0x9208, 0xffffffff, 0x00070000,
414 0x920c, 0xffffffff, 0x00030002,
415 0x9210, 0xffffffff, 0x00050004,
416 0x921c, 0xffffffff, 0x00010006,
417 0x9220, 0xffffffff, 0x00090008,
418 0x9224, 0xffffffff, 0x00070000,
419 0x9228, 0xffffffff, 0x00030002,
420 0x922c, 0xffffffff, 0x00050004,
421 0x9238, 0xffffffff, 0x00010006,
422 0x923c, 0xffffffff, 0x00090008,
423 0x9240, 0xffffffff, 0x00070000,
424 0x9244, 0xffffffff, 0x00030002,
425 0x9248, 0xffffffff, 0x00050004,
426 0x9254, 0xffffffff, 0x00010006,
427 0x9258, 0xffffffff, 0x00090008,
428 0x925c, 0xffffffff, 0x00070000,
429 0x9260, 0xffffffff, 0x00030002,
430 0x9264, 0xffffffff, 0x00050004,
431 0x9270, 0xffffffff, 0x00010006,
432 0x9274, 0xffffffff, 0x00090008,
433 0x9278, 0xffffffff, 0x00070000,
434 0x927c, 0xffffffff, 0x00030002,
435 0x9280, 0xffffffff, 0x00050004,
436 0x928c, 0xffffffff, 0x00010006,
437 0x9290, 0xffffffff, 0x00090008,
438 0x9294, 0xffffffff, 0x00000000,
439 0x929c, 0xffffffff, 0x00000001,
440 0x802c, 0xffffffff, 0xc0000000
441 };
442
443 static const u32 redwood_mgcg_init[] =
444 {
445 0x802c, 0xffffffff, 0xc0000000,
446 0x5448, 0xffffffff, 0x00000100,
447 0x55e4, 0xffffffff, 0x00000100,
448 0x160c, 0xffffffff, 0x00000100,
449 0x5644, 0xffffffff, 0x00000100,
450 0xc164, 0xffffffff, 0x00000100,
451 0x8a18, 0xffffffff, 0x00000100,
452 0x897c, 0xffffffff, 0x06000100,
453 0x8b28, 0xffffffff, 0x00000100,
454 0x9144, 0xffffffff, 0x00000100,
455 0x9a60, 0xffffffff, 0x00000100,
456 0x9868, 0xffffffff, 0x00000100,
457 0x8d58, 0xffffffff, 0x00000100,
458 0x9510, 0xffffffff, 0x00000100,
459 0x949c, 0xffffffff, 0x00000100,
460 0x9654, 0xffffffff, 0x00000100,
461 0x9030, 0xffffffff, 0x00000100,
462 0x9034, 0xffffffff, 0x00000100,
463 0x9038, 0xffffffff, 0x00000100,
464 0x903c, 0xffffffff, 0x00000100,
465 0x9040, 0xffffffff, 0x00000100,
466 0xa200, 0xffffffff, 0x00000100,
467 0xa204, 0xffffffff, 0x00000100,
468 0xa208, 0xffffffff, 0x00000100,
469 0xa20c, 0xffffffff, 0x00000100,
470 0x971c, 0xffffffff, 0x00000100,
471 0x977c, 0xffffffff, 0x00000100,
472 0x3f80, 0xffffffff, 0x00000100,
473 0xa210, 0xffffffff, 0x00000100,
474 0xa214, 0xffffffff, 0x00000100,
475 0x4d8, 0xffffffff, 0x00000100,
476 0x9784, 0xffffffff, 0x00000100,
477 0x9698, 0xffffffff, 0x00000100,
478 0x4d4, 0xffffffff, 0x00000200,
479 0x30cc, 0xffffffff, 0x00000100,
480 0xd0c0, 0xffffffff, 0xff000100,
481 0x802c, 0xffffffff, 0x40000000,
482 0x915c, 0xffffffff, 0x00010000,
483 0x9160, 0xffffffff, 0x00030002,
484 0x9178, 0xffffffff, 0x00070000,
485 0x917c, 0xffffffff, 0x00030002,
486 0x9180, 0xffffffff, 0x00050004,
487 0x918c, 0xffffffff, 0x00010006,
488 0x9190, 0xffffffff, 0x00090008,
489 0x9194, 0xffffffff, 0x00070000,
490 0x9198, 0xffffffff, 0x00030002,
491 0x919c, 0xffffffff, 0x00050004,
492 0x91a8, 0xffffffff, 0x00010006,
493 0x91ac, 0xffffffff, 0x00090008,
494 0x91b0, 0xffffffff, 0x00070000,
495 0x91b4, 0xffffffff, 0x00030002,
496 0x91b8, 0xffffffff, 0x00050004,
497 0x91c4, 0xffffffff, 0x00010006,
498 0x91c8, 0xffffffff, 0x00090008,
499 0x91cc, 0xffffffff, 0x00070000,
500 0x91d0, 0xffffffff, 0x00030002,
501 0x91d4, 0xffffffff, 0x00050004,
502 0x91e0, 0xffffffff, 0x00010006,
503 0x91e4, 0xffffffff, 0x00090008,
504 0x91e8, 0xffffffff, 0x00000000,
505 0x91ec, 0xffffffff, 0x00070000,
506 0x91f0, 0xffffffff, 0x00030002,
507 0x91f4, 0xffffffff, 0x00050004,
508 0x9200, 0xffffffff, 0x00010006,
509 0x9204, 0xffffffff, 0x00090008,
510 0x9294, 0xffffffff, 0x00000000,
511 0x929c, 0xffffffff, 0x00000001,
512 0x802c, 0xffffffff, 0xc0000000
513 };
514
515 static const u32 cedar_golden_registers[] =
516 {
517 0x3f90, 0xffff0000, 0xff000000,
518 0x9148, 0xffff0000, 0xff000000,
519 0x3f94, 0xffff0000, 0xff000000,
520 0x914c, 0xffff0000, 0xff000000,
521 0x9b7c, 0xffffffff, 0x00000000,
522 0x8a14, 0xffffffff, 0x00000007,
523 0x8b10, 0xffffffff, 0x00000000,
524 0x960c, 0xffffffff, 0x54763210,
525 0x88c4, 0xffffffff, 0x000000c2,
526 0x88d4, 0xffffffff, 0x00000000,
527 0x8974, 0xffffffff, 0x00000000,
528 0xc78, 0x00000080, 0x00000080,
529 0x5eb4, 0xffffffff, 0x00000002,
530 0x5e78, 0xffffffff, 0x001000f0,
531 0x6104, 0x01000300, 0x00000000,
532 0x5bc0, 0x00300000, 0x00000000,
533 0x7030, 0xffffffff, 0x00000011,
534 0x7c30, 0xffffffff, 0x00000011,
535 0x10830, 0xffffffff, 0x00000011,
536 0x11430, 0xffffffff, 0x00000011,
537 0xd02c, 0xffffffff, 0x08421000,
538 0x240c, 0xffffffff, 0x00000380,
539 0x8b24, 0xffffffff, 0x00ff0fff,
540 0x28a4c, 0x06000000, 0x06000000,
541 0x10c, 0x00000001, 0x00000001,
542 0x8d00, 0xffffffff, 0x100e4848,
543 0x8d04, 0xffffffff, 0x00164745,
544 0x8c00, 0xffffffff, 0xe4000003,
545 0x8c04, 0xffffffff, 0x40600060,
546 0x8c08, 0xffffffff, 0x001c001c,
547 0x8cf0, 0xffffffff, 0x08e00410,
548 0x8c20, 0xffffffff, 0x00800080,
549 0x8c24, 0xffffffff, 0x00800080,
550 0x8c18, 0xffffffff, 0x20202078,
551 0x8c1c, 0xffffffff, 0x00001010,
552 0x28350, 0xffffffff, 0x00000000,
553 0xa008, 0xffffffff, 0x00010000,
554 0x5c4, 0xffffffff, 0x00000001,
555 0x9508, 0xffffffff, 0x00000002
556 };
557
558 static const u32 cedar_mgcg_init[] =
559 {
560 0x802c, 0xffffffff, 0xc0000000,
561 0x5448, 0xffffffff, 0x00000100,
562 0x55e4, 0xffffffff, 0x00000100,
563 0x160c, 0xffffffff, 0x00000100,
564 0x5644, 0xffffffff, 0x00000100,
565 0xc164, 0xffffffff, 0x00000100,
566 0x8a18, 0xffffffff, 0x00000100,
567 0x897c, 0xffffffff, 0x06000100,
568 0x8b28, 0xffffffff, 0x00000100,
569 0x9144, 0xffffffff, 0x00000100,
570 0x9a60, 0xffffffff, 0x00000100,
571 0x9868, 0xffffffff, 0x00000100,
572 0x8d58, 0xffffffff, 0x00000100,
573 0x9510, 0xffffffff, 0x00000100,
574 0x949c, 0xffffffff, 0x00000100,
575 0x9654, 0xffffffff, 0x00000100,
576 0x9030, 0xffffffff, 0x00000100,
577 0x9034, 0xffffffff, 0x00000100,
578 0x9038, 0xffffffff, 0x00000100,
579 0x903c, 0xffffffff, 0x00000100,
580 0x9040, 0xffffffff, 0x00000100,
581 0xa200, 0xffffffff, 0x00000100,
582 0xa204, 0xffffffff, 0x00000100,
583 0xa208, 0xffffffff, 0x00000100,
584 0xa20c, 0xffffffff, 0x00000100,
585 0x971c, 0xffffffff, 0x00000100,
586 0x977c, 0xffffffff, 0x00000100,
587 0x3f80, 0xffffffff, 0x00000100,
588 0xa210, 0xffffffff, 0x00000100,
589 0xa214, 0xffffffff, 0x00000100,
590 0x4d8, 0xffffffff, 0x00000100,
591 0x9784, 0xffffffff, 0x00000100,
592 0x9698, 0xffffffff, 0x00000100,
593 0x4d4, 0xffffffff, 0x00000200,
594 0x30cc, 0xffffffff, 0x00000100,
595 0xd0c0, 0xffffffff, 0xff000100,
596 0x802c, 0xffffffff, 0x40000000,
597 0x915c, 0xffffffff, 0x00010000,
598 0x9178, 0xffffffff, 0x00050000,
599 0x917c, 0xffffffff, 0x00030002,
600 0x918c, 0xffffffff, 0x00010004,
601 0x9190, 0xffffffff, 0x00070006,
602 0x9194, 0xffffffff, 0x00050000,
603 0x9198, 0xffffffff, 0x00030002,
604 0x91a8, 0xffffffff, 0x00010004,
605 0x91ac, 0xffffffff, 0x00070006,
606 0x91e8, 0xffffffff, 0x00000000,
607 0x9294, 0xffffffff, 0x00000000,
608 0x929c, 0xffffffff, 0x00000001,
609 0x802c, 0xffffffff, 0xc0000000
610 };
611
612 static const u32 juniper_mgcg_init[] =
613 {
614 0x802c, 0xffffffff, 0xc0000000,
615 0x5448, 0xffffffff, 0x00000100,
616 0x55e4, 0xffffffff, 0x00000100,
617 0x160c, 0xffffffff, 0x00000100,
618 0x5644, 0xffffffff, 0x00000100,
619 0xc164, 0xffffffff, 0x00000100,
620 0x8a18, 0xffffffff, 0x00000100,
621 0x897c, 0xffffffff, 0x06000100,
622 0x8b28, 0xffffffff, 0x00000100,
623 0x9144, 0xffffffff, 0x00000100,
624 0x9a60, 0xffffffff, 0x00000100,
625 0x9868, 0xffffffff, 0x00000100,
626 0x8d58, 0xffffffff, 0x00000100,
627 0x9510, 0xffffffff, 0x00000100,
628 0x949c, 0xffffffff, 0x00000100,
629 0x9654, 0xffffffff, 0x00000100,
630 0x9030, 0xffffffff, 0x00000100,
631 0x9034, 0xffffffff, 0x00000100,
632 0x9038, 0xffffffff, 0x00000100,
633 0x903c, 0xffffffff, 0x00000100,
634 0x9040, 0xffffffff, 0x00000100,
635 0xa200, 0xffffffff, 0x00000100,
636 0xa204, 0xffffffff, 0x00000100,
637 0xa208, 0xffffffff, 0x00000100,
638 0xa20c, 0xffffffff, 0x00000100,
639 0x971c, 0xffffffff, 0x00000100,
640 0xd0c0, 0xffffffff, 0xff000100,
641 0x802c, 0xffffffff, 0x40000000,
642 0x915c, 0xffffffff, 0x00010000,
643 0x9160, 0xffffffff, 0x00030002,
644 0x9178, 0xffffffff, 0x00070000,
645 0x917c, 0xffffffff, 0x00030002,
646 0x9180, 0xffffffff, 0x00050004,
647 0x918c, 0xffffffff, 0x00010006,
648 0x9190, 0xffffffff, 0x00090008,
649 0x9194, 0xffffffff, 0x00070000,
650 0x9198, 0xffffffff, 0x00030002,
651 0x919c, 0xffffffff, 0x00050004,
652 0x91a8, 0xffffffff, 0x00010006,
653 0x91ac, 0xffffffff, 0x00090008,
654 0x91b0, 0xffffffff, 0x00070000,
655 0x91b4, 0xffffffff, 0x00030002,
656 0x91b8, 0xffffffff, 0x00050004,
657 0x91c4, 0xffffffff, 0x00010006,
658 0x91c8, 0xffffffff, 0x00090008,
659 0x91cc, 0xffffffff, 0x00070000,
660 0x91d0, 0xffffffff, 0x00030002,
661 0x91d4, 0xffffffff, 0x00050004,
662 0x91e0, 0xffffffff, 0x00010006,
663 0x91e4, 0xffffffff, 0x00090008,
664 0x91e8, 0xffffffff, 0x00000000,
665 0x91ec, 0xffffffff, 0x00070000,
666 0x91f0, 0xffffffff, 0x00030002,
667 0x91f4, 0xffffffff, 0x00050004,
668 0x9200, 0xffffffff, 0x00010006,
669 0x9204, 0xffffffff, 0x00090008,
670 0x9208, 0xffffffff, 0x00070000,
671 0x920c, 0xffffffff, 0x00030002,
672 0x9210, 0xffffffff, 0x00050004,
673 0x921c, 0xffffffff, 0x00010006,
674 0x9220, 0xffffffff, 0x00090008,
675 0x9224, 0xffffffff, 0x00070000,
676 0x9228, 0xffffffff, 0x00030002,
677 0x922c, 0xffffffff, 0x00050004,
678 0x9238, 0xffffffff, 0x00010006,
679 0x923c, 0xffffffff, 0x00090008,
680 0x9240, 0xffffffff, 0x00070000,
681 0x9244, 0xffffffff, 0x00030002,
682 0x9248, 0xffffffff, 0x00050004,
683 0x9254, 0xffffffff, 0x00010006,
684 0x9258, 0xffffffff, 0x00090008,
685 0x925c, 0xffffffff, 0x00070000,
686 0x9260, 0xffffffff, 0x00030002,
687 0x9264, 0xffffffff, 0x00050004,
688 0x9270, 0xffffffff, 0x00010006,
689 0x9274, 0xffffffff, 0x00090008,
690 0x9278, 0xffffffff, 0x00070000,
691 0x927c, 0xffffffff, 0x00030002,
692 0x9280, 0xffffffff, 0x00050004,
693 0x928c, 0xffffffff, 0x00010006,
694 0x9290, 0xffffffff, 0x00090008,
695 0x9294, 0xffffffff, 0x00000000,
696 0x929c, 0xffffffff, 0x00000001,
697 0x802c, 0xffffffff, 0xc0000000,
698 0x977c, 0xffffffff, 0x00000100,
699 0x3f80, 0xffffffff, 0x00000100,
700 0xa210, 0xffffffff, 0x00000100,
701 0xa214, 0xffffffff, 0x00000100,
702 0x4d8, 0xffffffff, 0x00000100,
703 0x9784, 0xffffffff, 0x00000100,
704 0x9698, 0xffffffff, 0x00000100,
705 0x4d4, 0xffffffff, 0x00000200,
706 0x30cc, 0xffffffff, 0x00000100,
707 0x802c, 0xffffffff, 0xc0000000
708 };
709
710 static const u32 supersumo_golden_registers[] =
711 {
712 0x5eb4, 0xffffffff, 0x00000002,
713 0x5c4, 0xffffffff, 0x00000001,
714 0x7030, 0xffffffff, 0x00000011,
715 0x7c30, 0xffffffff, 0x00000011,
716 0x6104, 0x01000300, 0x00000000,
717 0x5bc0, 0x00300000, 0x00000000,
718 0x8c04, 0xffffffff, 0x40600060,
719 0x8c08, 0xffffffff, 0x001c001c,
720 0x8c20, 0xffffffff, 0x00800080,
721 0x8c24, 0xffffffff, 0x00800080,
722 0x8c18, 0xffffffff, 0x20202078,
723 0x8c1c, 0xffffffff, 0x00001010,
724 0x918c, 0xffffffff, 0x00010006,
725 0x91a8, 0xffffffff, 0x00010006,
726 0x91c4, 0xffffffff, 0x00010006,
727 0x91e0, 0xffffffff, 0x00010006,
728 0x9200, 0xffffffff, 0x00010006,
729 0x9150, 0xffffffff, 0x6e944040,
730 0x917c, 0xffffffff, 0x00030002,
731 0x9180, 0xffffffff, 0x00050004,
732 0x9198, 0xffffffff, 0x00030002,
733 0x919c, 0xffffffff, 0x00050004,
734 0x91b4, 0xffffffff, 0x00030002,
735 0x91b8, 0xffffffff, 0x00050004,
736 0x91d0, 0xffffffff, 0x00030002,
737 0x91d4, 0xffffffff, 0x00050004,
738 0x91f0, 0xffffffff, 0x00030002,
739 0x91f4, 0xffffffff, 0x00050004,
740 0x915c, 0xffffffff, 0x00010000,
741 0x9160, 0xffffffff, 0x00030002,
742 0x3f90, 0xffff0000, 0xff000000,
743 0x9178, 0xffffffff, 0x00070000,
744 0x9194, 0xffffffff, 0x00070000,
745 0x91b0, 0xffffffff, 0x00070000,
746 0x91cc, 0xffffffff, 0x00070000,
747 0x91ec, 0xffffffff, 0x00070000,
748 0x9148, 0xffff0000, 0xff000000,
749 0x9190, 0xffffffff, 0x00090008,
750 0x91ac, 0xffffffff, 0x00090008,
751 0x91c8, 0xffffffff, 0x00090008,
752 0x91e4, 0xffffffff, 0x00090008,
753 0x9204, 0xffffffff, 0x00090008,
754 0x3f94, 0xffff0000, 0xff000000,
755 0x914c, 0xffff0000, 0xff000000,
756 0x929c, 0xffffffff, 0x00000001,
757 0x8a18, 0xffffffff, 0x00000100,
758 0x8b28, 0xffffffff, 0x00000100,
759 0x9144, 0xffffffff, 0x00000100,
760 0x5644, 0xffffffff, 0x00000100,
761 0x9b7c, 0xffffffff, 0x00000000,
762 0x8030, 0xffffffff, 0x0000100a,
763 0x8a14, 0xffffffff, 0x00000007,
764 0x8b24, 0xffffffff, 0x00ff0fff,
765 0x8b10, 0xffffffff, 0x00000000,
766 0x28a4c, 0x06000000, 0x06000000,
767 0x4d8, 0xffffffff, 0x00000100,
768 0x913c, 0xffff000f, 0x0100000a,
769 0x960c, 0xffffffff, 0x54763210,
770 0x88c4, 0xffffffff, 0x000000c2,
771 0x88d4, 0xffffffff, 0x00000010,
772 0x8974, 0xffffffff, 0x00000000,
773 0xc78, 0x00000080, 0x00000080,
774 0x5e78, 0xffffffff, 0x001000f0,
775 0xd02c, 0xffffffff, 0x08421000,
776 0xa008, 0xffffffff, 0x00010000,
777 0x8d00, 0xffffffff, 0x100e4848,
778 0x8d04, 0xffffffff, 0x00164745,
779 0x8c00, 0xffffffff, 0xe4000003,
780 0x8cf0, 0x1fffffff, 0x08e00620,
781 0x28350, 0xffffffff, 0x00000000,
782 0x9508, 0xffffffff, 0x00000002
783 };
784
785 static const u32 sumo_golden_registers[] =
786 {
787 0x900c, 0x00ffffff, 0x0017071f,
788 0x8c18, 0xffffffff, 0x10101060,
789 0x8c1c, 0xffffffff, 0x00001010,
790 0x8c30, 0x0000000f, 0x00000005,
791 0x9688, 0x0000000f, 0x00000007
792 };
793
794 static const u32 wrestler_golden_registers[] =
795 {
796 0x5eb4, 0xffffffff, 0x00000002,
797 0x5c4, 0xffffffff, 0x00000001,
798 0x7030, 0xffffffff, 0x00000011,
799 0x7c30, 0xffffffff, 0x00000011,
800 0x6104, 0x01000300, 0x00000000,
801 0x5bc0, 0x00300000, 0x00000000,
802 0x918c, 0xffffffff, 0x00010006,
803 0x91a8, 0xffffffff, 0x00010006,
804 0x9150, 0xffffffff, 0x6e944040,
805 0x917c, 0xffffffff, 0x00030002,
806 0x9198, 0xffffffff, 0x00030002,
807 0x915c, 0xffffffff, 0x00010000,
808 0x3f90, 0xffff0000, 0xff000000,
809 0x9178, 0xffffffff, 0x00070000,
810 0x9194, 0xffffffff, 0x00070000,
811 0x9148, 0xffff0000, 0xff000000,
812 0x9190, 0xffffffff, 0x00090008,
813 0x91ac, 0xffffffff, 0x00090008,
814 0x3f94, 0xffff0000, 0xff000000,
815 0x914c, 0xffff0000, 0xff000000,
816 0x929c, 0xffffffff, 0x00000001,
817 0x8a18, 0xffffffff, 0x00000100,
818 0x8b28, 0xffffffff, 0x00000100,
819 0x9144, 0xffffffff, 0x00000100,
820 0x9b7c, 0xffffffff, 0x00000000,
821 0x8030, 0xffffffff, 0x0000100a,
822 0x8a14, 0xffffffff, 0x00000001,
823 0x8b24, 0xffffffff, 0x00ff0fff,
824 0x8b10, 0xffffffff, 0x00000000,
825 0x28a4c, 0x06000000, 0x06000000,
826 0x4d8, 0xffffffff, 0x00000100,
827 0x913c, 0xffff000f, 0x0100000a,
828 0x960c, 0xffffffff, 0x54763210,
829 0x88c4, 0xffffffff, 0x000000c2,
830 0x88d4, 0xffffffff, 0x00000010,
831 0x8974, 0xffffffff, 0x00000000,
832 0xc78, 0x00000080, 0x00000080,
833 0x5e78, 0xffffffff, 0x001000f0,
834 0xd02c, 0xffffffff, 0x08421000,
835 0xa008, 0xffffffff, 0x00010000,
836 0x8d00, 0xffffffff, 0x100e4848,
837 0x8d04, 0xffffffff, 0x00164745,
838 0x8c00, 0xffffffff, 0xe4000003,
839 0x8cf0, 0x1fffffff, 0x08e00410,
840 0x28350, 0xffffffff, 0x00000000,
841 0x9508, 0xffffffff, 0x00000002,
842 0x900c, 0xffffffff, 0x0017071f,
843 0x8c18, 0xffffffff, 0x10101060,
844 0x8c1c, 0xffffffff, 0x00001010
845 };
846
847 static const u32 barts_golden_registers[] =
848 {
849 0x5eb4, 0xffffffff, 0x00000002,
850 0x5e78, 0x8f311ff1, 0x001000f0,
851 0x3f90, 0xffff0000, 0xff000000,
852 0x9148, 0xffff0000, 0xff000000,
853 0x3f94, 0xffff0000, 0xff000000,
854 0x914c, 0xffff0000, 0xff000000,
855 0xc78, 0x00000080, 0x00000080,
856 0xbd4, 0x70073777, 0x00010001,
857 0xd02c, 0xbfffff1f, 0x08421000,
858 0xd0b8, 0x03773777, 0x02011003,
859 0x5bc0, 0x00200000, 0x50100000,
860 0x98f8, 0x33773777, 0x02011003,
861 0x98fc, 0xffffffff, 0x76543210,
862 0x7030, 0x31000311, 0x00000011,
863 0x2f48, 0x00000007, 0x02011003,
864 0x6b28, 0x00000010, 0x00000012,
865 0x7728, 0x00000010, 0x00000012,
866 0x10328, 0x00000010, 0x00000012,
867 0x10f28, 0x00000010, 0x00000012,
868 0x11b28, 0x00000010, 0x00000012,
869 0x12728, 0x00000010, 0x00000012,
870 0x240c, 0x000007ff, 0x00000380,
871 0x8a14, 0xf000001f, 0x00000007,
872 0x8b24, 0x3fff3fff, 0x00ff0fff,
873 0x8b10, 0x0000ff0f, 0x00000000,
874 0x28a4c, 0x07ffffff, 0x06000000,
875 0x10c, 0x00000001, 0x00010003,
876 0xa02c, 0xffffffff, 0x0000009b,
877 0x913c, 0x0000000f, 0x0100000a,
878 0x8d00, 0xffff7f7f, 0x100e4848,
879 0x8d04, 0x00ffffff, 0x00164745,
880 0x8c00, 0xfffc0003, 0xe4000003,
881 0x8c04, 0xf8ff00ff, 0x40600060,
882 0x8c08, 0x00ff00ff, 0x001c001c,
883 0x8cf0, 0x1fff1fff, 0x08e00620,
884 0x8c20, 0x0fff0fff, 0x00800080,
885 0x8c24, 0x0fff0fff, 0x00800080,
886 0x8c18, 0xffffffff, 0x20202078,
887 0x8c1c, 0x0000ffff, 0x00001010,
888 0x28350, 0x00000f01, 0x00000000,
889 0x9508, 0x3700001f, 0x00000002,
890 0x960c, 0xffffffff, 0x54763210,
891 0x88c4, 0x001f3ae3, 0x000000c2,
892 0x88d4, 0x0000001f, 0x00000010,
893 0x8974, 0xffffffff, 0x00000000
894 };
895
896 static const u32 turks_golden_registers[] =
897 {
898 0x5eb4, 0xffffffff, 0x00000002,
899 0x5e78, 0x8f311ff1, 0x001000f0,
900 0x8c8, 0x00003000, 0x00001070,
901 0x8cc, 0x000fffff, 0x00040035,
902 0x3f90, 0xffff0000, 0xfff00000,
903 0x9148, 0xffff0000, 0xfff00000,
904 0x3f94, 0xffff0000, 0xfff00000,
905 0x914c, 0xffff0000, 0xfff00000,
906 0xc78, 0x00000080, 0x00000080,
907 0xbd4, 0x00073007, 0x00010002,
908 0xd02c, 0xbfffff1f, 0x08421000,
909 0xd0b8, 0x03773777, 0x02010002,
910 0x5bc0, 0x00200000, 0x50100000,
911 0x98f8, 0x33773777, 0x00010002,
912 0x98fc, 0xffffffff, 0x33221100,
913 0x7030, 0x31000311, 0x00000011,
914 0x2f48, 0x33773777, 0x00010002,
915 0x6b28, 0x00000010, 0x00000012,
916 0x7728, 0x00000010, 0x00000012,
917 0x10328, 0x00000010, 0x00000012,
918 0x10f28, 0x00000010, 0x00000012,
919 0x11b28, 0x00000010, 0x00000012,
920 0x12728, 0x00000010, 0x00000012,
921 0x240c, 0x000007ff, 0x00000380,
922 0x8a14, 0xf000001f, 0x00000007,
923 0x8b24, 0x3fff3fff, 0x00ff0fff,
924 0x8b10, 0x0000ff0f, 0x00000000,
925 0x28a4c, 0x07ffffff, 0x06000000,
926 0x10c, 0x00000001, 0x00010003,
927 0xa02c, 0xffffffff, 0x0000009b,
928 0x913c, 0x0000000f, 0x0100000a,
929 0x8d00, 0xffff7f7f, 0x100e4848,
930 0x8d04, 0x00ffffff, 0x00164745,
931 0x8c00, 0xfffc0003, 0xe4000003,
932 0x8c04, 0xf8ff00ff, 0x40600060,
933 0x8c08, 0x00ff00ff, 0x001c001c,
934 0x8cf0, 0x1fff1fff, 0x08e00410,
935 0x8c20, 0x0fff0fff, 0x00800080,
936 0x8c24, 0x0fff0fff, 0x00800080,
937 0x8c18, 0xffffffff, 0x20202078,
938 0x8c1c, 0x0000ffff, 0x00001010,
939 0x28350, 0x00000f01, 0x00000000,
940 0x9508, 0x3700001f, 0x00000002,
941 0x960c, 0xffffffff, 0x54763210,
942 0x88c4, 0x001f3ae3, 0x000000c2,
943 0x88d4, 0x0000001f, 0x00000010,
944 0x8974, 0xffffffff, 0x00000000
945 };
946
947 static const u32 caicos_golden_registers[] =
948 {
949 0x5eb4, 0xffffffff, 0x00000002,
950 0x5e78, 0x8f311ff1, 0x001000f0,
951 0x8c8, 0x00003420, 0x00001450,
952 0x8cc, 0x000fffff, 0x00040035,
953 0x3f90, 0xffff0000, 0xfffc0000,
954 0x9148, 0xffff0000, 0xfffc0000,
955 0x3f94, 0xffff0000, 0xfffc0000,
956 0x914c, 0xffff0000, 0xfffc0000,
957 0xc78, 0x00000080, 0x00000080,
958 0xbd4, 0x00073007, 0x00010001,
959 0xd02c, 0xbfffff1f, 0x08421000,
960 0xd0b8, 0x03773777, 0x02010001,
961 0x5bc0, 0x00200000, 0x50100000,
962 0x98f8, 0x33773777, 0x02010001,
963 0x98fc, 0xffffffff, 0x33221100,
964 0x7030, 0x31000311, 0x00000011,
965 0x2f48, 0x33773777, 0x02010001,
966 0x6b28, 0x00000010, 0x00000012,
967 0x7728, 0x00000010, 0x00000012,
968 0x10328, 0x00000010, 0x00000012,
969 0x10f28, 0x00000010, 0x00000012,
970 0x11b28, 0x00000010, 0x00000012,
971 0x12728, 0x00000010, 0x00000012,
972 0x240c, 0x000007ff, 0x00000380,
973 0x8a14, 0xf000001f, 0x00000001,
974 0x8b24, 0x3fff3fff, 0x00ff0fff,
975 0x8b10, 0x0000ff0f, 0x00000000,
976 0x28a4c, 0x07ffffff, 0x06000000,
977 0x10c, 0x00000001, 0x00010003,
978 0xa02c, 0xffffffff, 0x0000009b,
979 0x913c, 0x0000000f, 0x0100000a,
980 0x8d00, 0xffff7f7f, 0x100e4848,
981 0x8d04, 0x00ffffff, 0x00164745,
982 0x8c00, 0xfffc0003, 0xe4000003,
983 0x8c04, 0xf8ff00ff, 0x40600060,
984 0x8c08, 0x00ff00ff, 0x001c001c,
985 0x8cf0, 0x1fff1fff, 0x08e00410,
986 0x8c20, 0x0fff0fff, 0x00800080,
987 0x8c24, 0x0fff0fff, 0x00800080,
988 0x8c18, 0xffffffff, 0x20202078,
989 0x8c1c, 0x0000ffff, 0x00001010,
990 0x28350, 0x00000f01, 0x00000000,
991 0x9508, 0x3700001f, 0x00000002,
992 0x960c, 0xffffffff, 0x54763210,
993 0x88c4, 0x001f3ae3, 0x000000c2,
994 0x88d4, 0x0000001f, 0x00000010,
995 0x8974, 0xffffffff, 0x00000000
996 };
997
998 static void evergreen_init_golden_registers(struct radeon_device *rdev)
999 {
1000 switch (rdev->family) {
1001 case CHIP_CYPRESS:
1002 case CHIP_HEMLOCK:
1003 radeon_program_register_sequence(rdev,
1004 evergreen_golden_registers,
1005 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1006 radeon_program_register_sequence(rdev,
1007 evergreen_golden_registers2,
1008 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1009 radeon_program_register_sequence(rdev,
1010 cypress_mgcg_init,
1011 (const u32)ARRAY_SIZE(cypress_mgcg_init));
1012 break;
1013 case CHIP_JUNIPER:
1014 radeon_program_register_sequence(rdev,
1015 evergreen_golden_registers,
1016 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1017 radeon_program_register_sequence(rdev,
1018 evergreen_golden_registers2,
1019 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1020 radeon_program_register_sequence(rdev,
1021 juniper_mgcg_init,
1022 (const u32)ARRAY_SIZE(juniper_mgcg_init));
1023 break;
1024 case CHIP_REDWOOD:
1025 radeon_program_register_sequence(rdev,
1026 evergreen_golden_registers,
1027 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1028 radeon_program_register_sequence(rdev,
1029 evergreen_golden_registers2,
1030 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1031 radeon_program_register_sequence(rdev,
1032 redwood_mgcg_init,
1033 (const u32)ARRAY_SIZE(redwood_mgcg_init));
1034 break;
1035 case CHIP_CEDAR:
1036 radeon_program_register_sequence(rdev,
1037 cedar_golden_registers,
1038 (const u32)ARRAY_SIZE(cedar_golden_registers));
1039 radeon_program_register_sequence(rdev,
1040 evergreen_golden_registers2,
1041 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1042 radeon_program_register_sequence(rdev,
1043 cedar_mgcg_init,
1044 (const u32)ARRAY_SIZE(cedar_mgcg_init));
1045 break;
1046 case CHIP_PALM:
1047 radeon_program_register_sequence(rdev,
1048 wrestler_golden_registers,
1049 (const u32)ARRAY_SIZE(wrestler_golden_registers));
1050 break;
1051 case CHIP_SUMO:
1052 radeon_program_register_sequence(rdev,
1053 supersumo_golden_registers,
1054 (const u32)ARRAY_SIZE(supersumo_golden_registers));
1055 break;
1056 case CHIP_SUMO2:
1057 radeon_program_register_sequence(rdev,
1058 supersumo_golden_registers,
1059 (const u32)ARRAY_SIZE(supersumo_golden_registers));
1060 radeon_program_register_sequence(rdev,
1061 sumo_golden_registers,
1062 (const u32)ARRAY_SIZE(sumo_golden_registers));
1063 break;
1064 case CHIP_BARTS:
1065 radeon_program_register_sequence(rdev,
1066 barts_golden_registers,
1067 (const u32)ARRAY_SIZE(barts_golden_registers));
1068 break;
1069 case CHIP_TURKS:
1070 radeon_program_register_sequence(rdev,
1071 turks_golden_registers,
1072 (const u32)ARRAY_SIZE(turks_golden_registers));
1073 break;
1074 case CHIP_CAICOS:
1075 radeon_program_register_sequence(rdev,
1076 caicos_golden_registers,
1077 (const u32)ARRAY_SIZE(caicos_golden_registers));
1078 break;
1079 default:
1080 break;
1081 }
1082 }
1083
1084 /**
1085 * evergreen_get_allowed_info_register - fetch the register for the info ioctl
1086 *
1087 * @rdev: radeon_device pointer
1088 * @reg: register offset in bytes
1089 * @val: register value
1090 *
1091 * Returns 0 for success or -EINVAL for an invalid register
1092 *
1093 */
1094 int evergreen_get_allowed_info_register(struct radeon_device *rdev,
1095 u32 reg, u32 *val)
1096 {
1097 switch (reg) {
1098 case GRBM_STATUS:
1099 case GRBM_STATUS_SE0:
1100 case GRBM_STATUS_SE1:
1101 case SRBM_STATUS:
1102 case SRBM_STATUS2:
1103 case DMA_STATUS_REG:
1104 case UVD_STATUS:
1105 *val = RREG32(reg);
1106 return 0;
1107 default:
1108 return -EINVAL;
1109 }
1110 }
1111
1112 void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
1113 unsigned *bankh, unsigned *mtaspect,
1114 unsigned *tile_split)
1115 {
1116 *bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
1117 *bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
1118 *mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
1119 *tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
1120 switch (*bankw) {
1121 default:
1122 case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
1123 case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
1124 case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
1125 case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
1126 }
1127 switch (*bankh) {
1128 default:
1129 case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
1130 case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
1131 case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
1132 case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
1133 }
1134 switch (*mtaspect) {
1135 default:
1136 case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
1137 case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
1138 case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
1139 case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
1140 }
1141 }
1142
1143 static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
1144 u32 cntl_reg, u32 status_reg)
1145 {
1146 int r, i;
1147 struct atom_clock_dividers dividers;
1148
1149 r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1150 clock, false, ÷rs);
1151 if (r)
1152 return r;
1153
1154 WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
1155
1156 for (i = 0; i < 100; i++) {
1157 if (RREG32(status_reg) & DCLK_STATUS)
1158 break;
1159 mdelay(10);
1160 }
1161 if (i == 100)
1162 return -ETIMEDOUT;
1163
1164 return 0;
1165 }
1166
1167 int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1168 {
1169 int r = 0;
1170 u32 cg_scratch = RREG32(CG_SCRATCH1);
1171
1172 r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
1173 if (r)
1174 goto done;
1175 cg_scratch &= 0xffff0000;
1176 cg_scratch |= vclk / 100; /* Mhz */
1177
1178 r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
1179 if (r)
1180 goto done;
1181 cg_scratch &= 0x0000ffff;
1182 cg_scratch |= (dclk / 100) << 16; /* Mhz */
1183
1184 done:
1185 WREG32(CG_SCRATCH1, cg_scratch);
1186
1187 return r;
1188 }
1189
1190 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1191 {
1192 /* start off with something large */
1193 unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
1194 int r;
1195
1196 /* bypass vclk and dclk with bclk */
1197 WREG32_P(CG_UPLL_FUNC_CNTL_2,
1198 VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1199 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1200
1201 /* put PLL in bypass mode */
1202 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1203
1204 if (!vclk || !dclk) {
1205 /* keep the Bypass mode, put PLL to sleep */
1206 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1207 return 0;
1208 }
1209
1210 r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1211 16384, 0x03FFFFFF, 0, 128, 5,
1212 &fb_div, &vclk_div, &dclk_div);
1213 if (r)
1214 return r;
1215
1216 /* set VCO_MODE to 1 */
1217 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1218
1219 /* toggle UPLL_SLEEP to 1 then back to 0 */
1220 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1221 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1222
1223 /* deassert UPLL_RESET */
1224 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1225
1226 mdelay(1);
1227
1228 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1229 if (r)
1230 return r;
1231
1232 /* assert UPLL_RESET again */
1233 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1234
1235 /* disable spread spectrum. */
1236 WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1237
1238 /* set feedback divider */
1239 WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1240
1241 /* set ref divider to 0 */
1242 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1243
1244 if (fb_div < 307200)
1245 WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1246 else
1247 WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1248
1249 /* set PDIV_A and PDIV_B */
1250 WREG32_P(CG_UPLL_FUNC_CNTL_2,
1251 UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1252 ~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1253
1254 /* give the PLL some time to settle */
1255 mdelay(15);
1256
1257 /* deassert PLL_RESET */
1258 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1259
1260 mdelay(15);
1261
1262 /* switch from bypass mode to normal mode */
1263 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1264
1265 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1266 if (r)
1267 return r;
1268
1269 /* switch VCLK and DCLK selection */
1270 WREG32_P(CG_UPLL_FUNC_CNTL_2,
1271 VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1272 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1273
1274 mdelay(100);
1275
1276 return 0;
1277 }
1278
1279 void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1280 {
1281 #ifndef __NetBSD__ /* XXX radeon pcie */
1282 int readrq;
1283 u16 v;
1284
1285 readrq = pcie_get_readrq(rdev->pdev);
1286 v = ffs(readrq) - 8;
1287 /* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1288 * to avoid hangs or perfomance issues
1289 */
1290 if ((v == 0) || (v == 6) || (v == 7))
1291 pcie_set_readrq(rdev->pdev, 512);
1292 #endif
1293 }
1294
1295 void dce4_program_fmt(struct drm_encoder *encoder)
1296 {
1297 struct drm_device *dev = encoder->dev;
1298 struct radeon_device *rdev = dev->dev_private;
1299 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1300 struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
1301 struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
1302 int bpc = 0;
1303 u32 tmp = 0;
1304 enum radeon_connector_dither dither = RADEON_FMT_DITHER_DISABLE;
1305
1306 if (connector) {
1307 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1308 bpc = radeon_get_monitor_bpc(connector);
1309 dither = radeon_connector->dither;
1310 }
1311
1312 /* LVDS/eDP FMT is set up by atom */
1313 if (radeon_encoder->devices & ATOM_DEVICE_LCD_SUPPORT)
1314 return;
1315
1316 /* not needed for analog */
1317 if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) ||
1318 (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2))
1319 return;
1320
1321 if (bpc == 0)
1322 return;
1323
1324 switch (bpc) {
1325 case 6:
1326 if (dither == RADEON_FMT_DITHER_ENABLE)
1327 /* XXX sort out optimal dither settings */
1328 tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1329 FMT_SPATIAL_DITHER_EN);
1330 else
1331 tmp |= FMT_TRUNCATE_EN;
1332 break;
1333 case 8:
1334 if (dither == RADEON_FMT_DITHER_ENABLE)
1335 /* XXX sort out optimal dither settings */
1336 tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1337 FMT_RGB_RANDOM_ENABLE |
1338 FMT_SPATIAL_DITHER_EN | FMT_SPATIAL_DITHER_DEPTH);
1339 else
1340 tmp |= (FMT_TRUNCATE_EN | FMT_TRUNCATE_DEPTH);
1341 break;
1342 case 10:
1343 default:
1344 /* not needed */
1345 break;
1346 }
1347
1348 WREG32(FMT_BIT_DEPTH_CONTROL + radeon_crtc->crtc_offset, tmp);
1349 }
1350
1351 static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1352 {
1353 if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1354 return true;
1355 else
1356 return false;
1357 }
1358
1359 static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1360 {
1361 u32 pos1, pos2;
1362
1363 pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1364 pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1365
1366 if (pos1 != pos2)
1367 return true;
1368 else
1369 return false;
1370 }
1371
1372 /**
1373 * dce4_wait_for_vblank - vblank wait asic callback.
1374 *
1375 * @rdev: radeon_device pointer
1376 * @crtc: crtc to wait for vblank on
1377 *
1378 * Wait for vblank on the requested crtc (evergreen+).
1379 */
1380 void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1381 {
1382 unsigned i = 0;
1383
1384 if (crtc >= rdev->num_crtc)
1385 return;
1386
1387 if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1388 return;
1389
1390 /* depending on when we hit vblank, we may be close to active; if so,
1391 * wait for another frame.
1392 */
1393 while (dce4_is_in_vblank(rdev, crtc)) {
1394 if (i++ % 100 == 0) {
1395 if (!dce4_is_counter_moving(rdev, crtc))
1396 break;
1397 }
1398 }
1399
1400 while (!dce4_is_in_vblank(rdev, crtc)) {
1401 if (i++ % 100 == 0) {
1402 if (!dce4_is_counter_moving(rdev, crtc))
1403 break;
1404 }
1405 }
1406 }
1407
1408 /**
1409 * evergreen_page_flip - pageflip callback.
1410 *
1411 * @rdev: radeon_device pointer
1412 * @crtc_id: crtc to cleanup pageflip on
1413 * @crtc_base: new address of the crtc (GPU MC address)
1414 *
1415 * Triggers the actual pageflip by updating the primary
1416 * surface base address (evergreen+).
1417 */
1418 void evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
1419 {
1420 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1421
1422 /* update the scanout addresses */
1423 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1424 upper_32_bits(crtc_base));
1425 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1426 (u32)crtc_base);
1427 /* post the write */
1428 RREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset);
1429 }
1430
1431 /**
1432 * evergreen_page_flip_pending - check if page flip is still pending
1433 *
1434 * @rdev: radeon_device pointer
1435 * @crtc_id: crtc to check
1436 *
1437 * Returns the current update pending status.
1438 */
1439 bool evergreen_page_flip_pending(struct radeon_device *rdev, int crtc_id)
1440 {
1441 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1442
1443 /* Return current update_pending status: */
1444 return !!(RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) &
1445 EVERGREEN_GRPH_SURFACE_UPDATE_PENDING);
1446 }
1447
1448 /* get temperature in millidegrees */
1449 int evergreen_get_temp(struct radeon_device *rdev)
1450 {
1451 u32 temp, toffset;
1452 int actual_temp = 0;
1453
1454 if (rdev->family == CHIP_JUNIPER) {
1455 toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1456 TOFFSET_SHIFT;
1457 temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1458 TS0_ADC_DOUT_SHIFT;
1459
1460 if (toffset & 0x100)
1461 actual_temp = temp / 2 - (0x200 - toffset);
1462 else
1463 actual_temp = temp / 2 + toffset;
1464
1465 actual_temp = actual_temp * 1000;
1466
1467 } else {
1468 temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1469 ASIC_T_SHIFT;
1470
1471 if (temp & 0x400)
1472 actual_temp = -256;
1473 else if (temp & 0x200)
1474 actual_temp = 255;
1475 else if (temp & 0x100) {
1476 actual_temp = temp & 0x1ff;
1477 actual_temp |= ~0x1ff;
1478 } else
1479 actual_temp = temp & 0xff;
1480
1481 actual_temp = (actual_temp * 1000) / 2;
1482 }
1483
1484 return actual_temp;
1485 }
1486
1487 int sumo_get_temp(struct radeon_device *rdev)
1488 {
1489 u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1490 int actual_temp = temp - 49;
1491
1492 return actual_temp * 1000;
1493 }
1494
1495 /**
1496 * sumo_pm_init_profile - Initialize power profiles callback.
1497 *
1498 * @rdev: radeon_device pointer
1499 *
1500 * Initialize the power states used in profile mode
1501 * (sumo, trinity, SI).
1502 * Used for profile mode only.
1503 */
1504 void sumo_pm_init_profile(struct radeon_device *rdev)
1505 {
1506 int idx;
1507
1508 /* default */
1509 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1510 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1511 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1512 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1513
1514 /* low,mid sh/mh */
1515 if (rdev->flags & RADEON_IS_MOBILITY)
1516 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1517 else
1518 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1519
1520 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1521 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1522 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1523 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1524
1525 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1526 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1527 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1528 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1529
1530 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1531 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1532 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1533 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1534
1535 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1536 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1537 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1538 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1539
1540 /* high sh/mh */
1541 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1542 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1543 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1544 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1545 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1546 rdev->pm.power_state[idx].num_clock_modes - 1;
1547
1548 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1549 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1550 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1551 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1552 rdev->pm.power_state[idx].num_clock_modes - 1;
1553 }
1554
1555 /**
1556 * btc_pm_init_profile - Initialize power profiles callback.
1557 *
1558 * @rdev: radeon_device pointer
1559 *
1560 * Initialize the power states used in profile mode
1561 * (BTC, cayman).
1562 * Used for profile mode only.
1563 */
1564 void btc_pm_init_profile(struct radeon_device *rdev)
1565 {
1566 int idx;
1567
1568 /* default */
1569 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1570 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1571 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1572 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1573 /* starting with BTC, there is one state that is used for both
1574 * MH and SH. Difference is that we always use the high clock index for
1575 * mclk.
1576 */
1577 if (rdev->flags & RADEON_IS_MOBILITY)
1578 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1579 else
1580 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1581 /* low sh */
1582 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1583 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1584 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1585 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1586 /* mid sh */
1587 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1588 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1589 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1590 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1591 /* high sh */
1592 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1593 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1594 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1595 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1596 /* low mh */
1597 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1598 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1599 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1600 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1601 /* mid mh */
1602 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1603 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1604 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1605 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1606 /* high mh */
1607 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1608 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1609 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1610 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1611 }
1612
1613 /**
1614 * evergreen_pm_misc - set additional pm hw parameters callback.
1615 *
1616 * @rdev: radeon_device pointer
1617 *
1618 * Set non-clock parameters associated with a power state
1619 * (voltage, etc.) (evergreen+).
1620 */
1621 void evergreen_pm_misc(struct radeon_device *rdev)
1622 {
1623 int req_ps_idx = rdev->pm.requested_power_state_index;
1624 int req_cm_idx = rdev->pm.requested_clock_mode_index;
1625 struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1626 struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1627
1628 if (voltage->type == VOLTAGE_SW) {
1629 /* 0xff0x are flags rather then an actual voltage */
1630 if ((voltage->voltage & 0xff00) == 0xff00)
1631 return;
1632 if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1633 radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1634 rdev->pm.current_vddc = voltage->voltage;
1635 DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1636 }
1637
1638 /* starting with BTC, there is one state that is used for both
1639 * MH and SH. Difference is that we always use the high clock index for
1640 * mclk and vddci.
1641 */
1642 if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1643 (rdev->family >= CHIP_BARTS) &&
1644 rdev->pm.active_crtc_count &&
1645 ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1646 (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1647 voltage = &rdev->pm.power_state[req_ps_idx].
1648 clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1649
1650 /* 0xff0x are flags rather then an actual voltage */
1651 if ((voltage->vddci & 0xff00) == 0xff00)
1652 return;
1653 if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1654 radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1655 rdev->pm.current_vddci = voltage->vddci;
1656 DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1657 }
1658 }
1659 }
1660
1661 /**
1662 * evergreen_pm_prepare - pre-power state change callback.
1663 *
1664 * @rdev: radeon_device pointer
1665 *
1666 * Prepare for a power state change (evergreen+).
1667 */
1668 void evergreen_pm_prepare(struct radeon_device *rdev)
1669 {
1670 struct drm_device *ddev = rdev->ddev;
1671 struct drm_crtc *crtc;
1672 struct radeon_crtc *radeon_crtc;
1673 u32 tmp;
1674
1675 /* disable any active CRTCs */
1676 list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1677 radeon_crtc = to_radeon_crtc(crtc);
1678 if (radeon_crtc->enabled) {
1679 tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1680 tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1681 WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1682 }
1683 }
1684 }
1685
1686 /**
1687 * evergreen_pm_finish - post-power state change callback.
1688 *
1689 * @rdev: radeon_device pointer
1690 *
1691 * Clean up after a power state change (evergreen+).
1692 */
1693 void evergreen_pm_finish(struct radeon_device *rdev)
1694 {
1695 struct drm_device *ddev = rdev->ddev;
1696 struct drm_crtc *crtc;
1697 struct radeon_crtc *radeon_crtc;
1698 u32 tmp;
1699
1700 /* enable any active CRTCs */
1701 list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1702 radeon_crtc = to_radeon_crtc(crtc);
1703 if (radeon_crtc->enabled) {
1704 tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1705 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1706 WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1707 }
1708 }
1709 }
1710
1711 /**
1712 * evergreen_hpd_sense - hpd sense callback.
1713 *
1714 * @rdev: radeon_device pointer
1715 * @hpd: hpd (hotplug detect) pin
1716 *
1717 * Checks if a digital monitor is connected (evergreen+).
1718 * Returns true if connected, false if not connected.
1719 */
1720 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1721 {
1722 bool connected = false;
1723
1724 switch (hpd) {
1725 case RADEON_HPD_1:
1726 if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1727 connected = true;
1728 break;
1729 case RADEON_HPD_2:
1730 if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1731 connected = true;
1732 break;
1733 case RADEON_HPD_3:
1734 if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1735 connected = true;
1736 break;
1737 case RADEON_HPD_4:
1738 if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1739 connected = true;
1740 break;
1741 case RADEON_HPD_5:
1742 if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1743 connected = true;
1744 break;
1745 case RADEON_HPD_6:
1746 if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1747 connected = true;
1748 break;
1749 default:
1750 break;
1751 }
1752
1753 return connected;
1754 }
1755
1756 /**
1757 * evergreen_hpd_set_polarity - hpd set polarity callback.
1758 *
1759 * @rdev: radeon_device pointer
1760 * @hpd: hpd (hotplug detect) pin
1761 *
1762 * Set the polarity of the hpd pin (evergreen+).
1763 */
1764 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1765 enum radeon_hpd_id hpd)
1766 {
1767 u32 tmp;
1768 bool connected = evergreen_hpd_sense(rdev, hpd);
1769
1770 switch (hpd) {
1771 case RADEON_HPD_1:
1772 tmp = RREG32(DC_HPD1_INT_CONTROL);
1773 if (connected)
1774 tmp &= ~DC_HPDx_INT_POLARITY;
1775 else
1776 tmp |= DC_HPDx_INT_POLARITY;
1777 WREG32(DC_HPD1_INT_CONTROL, tmp);
1778 break;
1779 case RADEON_HPD_2:
1780 tmp = RREG32(DC_HPD2_INT_CONTROL);
1781 if (connected)
1782 tmp &= ~DC_HPDx_INT_POLARITY;
1783 else
1784 tmp |= DC_HPDx_INT_POLARITY;
1785 WREG32(DC_HPD2_INT_CONTROL, tmp);
1786 break;
1787 case RADEON_HPD_3:
1788 tmp = RREG32(DC_HPD3_INT_CONTROL);
1789 if (connected)
1790 tmp &= ~DC_HPDx_INT_POLARITY;
1791 else
1792 tmp |= DC_HPDx_INT_POLARITY;
1793 WREG32(DC_HPD3_INT_CONTROL, tmp);
1794 break;
1795 case RADEON_HPD_4:
1796 tmp = RREG32(DC_HPD4_INT_CONTROL);
1797 if (connected)
1798 tmp &= ~DC_HPDx_INT_POLARITY;
1799 else
1800 tmp |= DC_HPDx_INT_POLARITY;
1801 WREG32(DC_HPD4_INT_CONTROL, tmp);
1802 break;
1803 case RADEON_HPD_5:
1804 tmp = RREG32(DC_HPD5_INT_CONTROL);
1805 if (connected)
1806 tmp &= ~DC_HPDx_INT_POLARITY;
1807 else
1808 tmp |= DC_HPDx_INT_POLARITY;
1809 WREG32(DC_HPD5_INT_CONTROL, tmp);
1810 break;
1811 case RADEON_HPD_6:
1812 tmp = RREG32(DC_HPD6_INT_CONTROL);
1813 if (connected)
1814 tmp &= ~DC_HPDx_INT_POLARITY;
1815 else
1816 tmp |= DC_HPDx_INT_POLARITY;
1817 WREG32(DC_HPD6_INT_CONTROL, tmp);
1818 break;
1819 default:
1820 break;
1821 }
1822 }
1823
1824 /**
1825 * evergreen_hpd_init - hpd setup callback.
1826 *
1827 * @rdev: radeon_device pointer
1828 *
1829 * Setup the hpd pins used by the card (evergreen+).
1830 * Enable the pin, set the polarity, and enable the hpd interrupts.
1831 */
1832 void evergreen_hpd_init(struct radeon_device *rdev)
1833 {
1834 struct drm_device *dev = rdev->ddev;
1835 struct drm_connector *connector;
1836 unsigned enabled = 0;
1837 u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1838 DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1839
1840 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1841 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1842
1843 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1844 connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1845 /* don't try to enable hpd on eDP or LVDS avoid breaking the
1846 * aux dp channel on imac and help (but not completely fix)
1847 * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1848 * also avoid interrupt storms during dpms.
1849 */
1850 continue;
1851 }
1852 switch (radeon_connector->hpd.hpd) {
1853 case RADEON_HPD_1:
1854 WREG32(DC_HPD1_CONTROL, tmp);
1855 break;
1856 case RADEON_HPD_2:
1857 WREG32(DC_HPD2_CONTROL, tmp);
1858 break;
1859 case RADEON_HPD_3:
1860 WREG32(DC_HPD3_CONTROL, tmp);
1861 break;
1862 case RADEON_HPD_4:
1863 WREG32(DC_HPD4_CONTROL, tmp);
1864 break;
1865 case RADEON_HPD_5:
1866 WREG32(DC_HPD5_CONTROL, tmp);
1867 break;
1868 case RADEON_HPD_6:
1869 WREG32(DC_HPD6_CONTROL, tmp);
1870 break;
1871 default:
1872 break;
1873 }
1874 radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1875 if (radeon_connector->hpd.hpd != RADEON_HPD_NONE)
1876 enabled |= 1 << radeon_connector->hpd.hpd;
1877 }
1878 radeon_irq_kms_enable_hpd(rdev, enabled);
1879 }
1880
1881 /**
1882 * evergreen_hpd_fini - hpd tear down callback.
1883 *
1884 * @rdev: radeon_device pointer
1885 *
1886 * Tear down the hpd pins used by the card (evergreen+).
1887 * Disable the hpd interrupts.
1888 */
1889 void evergreen_hpd_fini(struct radeon_device *rdev)
1890 {
1891 struct drm_device *dev = rdev->ddev;
1892 struct drm_connector *connector;
1893 unsigned disabled = 0;
1894
1895 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1896 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1897 switch (radeon_connector->hpd.hpd) {
1898 case RADEON_HPD_1:
1899 WREG32(DC_HPD1_CONTROL, 0);
1900 break;
1901 case RADEON_HPD_2:
1902 WREG32(DC_HPD2_CONTROL, 0);
1903 break;
1904 case RADEON_HPD_3:
1905 WREG32(DC_HPD3_CONTROL, 0);
1906 break;
1907 case RADEON_HPD_4:
1908 WREG32(DC_HPD4_CONTROL, 0);
1909 break;
1910 case RADEON_HPD_5:
1911 WREG32(DC_HPD5_CONTROL, 0);
1912 break;
1913 case RADEON_HPD_6:
1914 WREG32(DC_HPD6_CONTROL, 0);
1915 break;
1916 default:
1917 break;
1918 }
1919 disabled |= 1 << radeon_connector->hpd.hpd;
1920 }
1921 radeon_irq_kms_disable_hpd(rdev, disabled);
1922 }
1923
1924 /* watermark setup */
1925
1926 static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1927 struct radeon_crtc *radeon_crtc,
1928 struct drm_display_mode *mode,
1929 struct drm_display_mode *other_mode)
1930 {
1931 u32 tmp, buffer_alloc, i;
1932 u32 pipe_offset = radeon_crtc->crtc_id * 0x20;
1933 /*
1934 * Line Buffer Setup
1935 * There are 3 line buffers, each one shared by 2 display controllers.
1936 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1937 * the display controllers. The paritioning is done via one of four
1938 * preset allocations specified in bits 2:0:
1939 * first display controller
1940 * 0 - first half of lb (3840 * 2)
1941 * 1 - first 3/4 of lb (5760 * 2)
1942 * 2 - whole lb (7680 * 2), other crtc must be disabled
1943 * 3 - first 1/4 of lb (1920 * 2)
1944 * second display controller
1945 * 4 - second half of lb (3840 * 2)
1946 * 5 - second 3/4 of lb (5760 * 2)
1947 * 6 - whole lb (7680 * 2), other crtc must be disabled
1948 * 7 - last 1/4 of lb (1920 * 2)
1949 */
1950 /* this can get tricky if we have two large displays on a paired group
1951 * of crtcs. Ideally for multiple large displays we'd assign them to
1952 * non-linked crtcs for maximum line buffer allocation.
1953 */
1954 if (radeon_crtc->base.enabled && mode) {
1955 if (other_mode) {
1956 tmp = 0; /* 1/2 */
1957 buffer_alloc = 1;
1958 } else {
1959 tmp = 2; /* whole */
1960 buffer_alloc = 2;
1961 }
1962 } else {
1963 tmp = 0;
1964 buffer_alloc = 0;
1965 }
1966
1967 /* second controller of the pair uses second half of the lb */
1968 if (radeon_crtc->crtc_id % 2)
1969 tmp += 4;
1970 WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1971
1972 if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE5(rdev)) {
1973 WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
1974 DMIF_BUFFERS_ALLOCATED(buffer_alloc));
1975 for (i = 0; i < rdev->usec_timeout; i++) {
1976 if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
1977 DMIF_BUFFERS_ALLOCATED_COMPLETED)
1978 break;
1979 udelay(1);
1980 }
1981 }
1982
1983 if (radeon_crtc->base.enabled && mode) {
1984 switch (tmp) {
1985 case 0:
1986 case 4:
1987 default:
1988 if (ASIC_IS_DCE5(rdev))
1989 return 4096 * 2;
1990 else
1991 return 3840 * 2;
1992 case 1:
1993 case 5:
1994 if (ASIC_IS_DCE5(rdev))
1995 return 6144 * 2;
1996 else
1997 return 5760 * 2;
1998 case 2:
1999 case 6:
2000 if (ASIC_IS_DCE5(rdev))
2001 return 8192 * 2;
2002 else
2003 return 7680 * 2;
2004 case 3:
2005 case 7:
2006 if (ASIC_IS_DCE5(rdev))
2007 return 2048 * 2;
2008 else
2009 return 1920 * 2;
2010 }
2011 }
2012
2013 /* controller not enabled, so no lb used */
2014 return 0;
2015 }
2016
2017 u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
2018 {
2019 u32 tmp = RREG32(MC_SHARED_CHMAP);
2020
2021 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
2022 case 0:
2023 default:
2024 return 1;
2025 case 1:
2026 return 2;
2027 case 2:
2028 return 4;
2029 case 3:
2030 return 8;
2031 }
2032 }
2033
2034 struct evergreen_wm_params {
2035 u32 dram_channels; /* number of dram channels */
2036 u32 yclk; /* bandwidth per dram data pin in kHz */
2037 u32 sclk; /* engine clock in kHz */
2038 u32 disp_clk; /* display clock in kHz */
2039 u32 src_width; /* viewport width */
2040 u32 active_time; /* active display time in ns */
2041 u32 blank_time; /* blank time in ns */
2042 bool interlaced; /* mode is interlaced */
2043 fixed20_12 vsc; /* vertical scale ratio */
2044 u32 num_heads; /* number of active crtcs */
2045 u32 bytes_per_pixel; /* bytes per pixel display + overlay */
2046 u32 lb_size; /* line buffer allocated to pipe */
2047 u32 vtaps; /* vertical scaler taps */
2048 };
2049
2050 static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
2051 {
2052 /* Calculate DRAM Bandwidth and the part allocated to display. */
2053 fixed20_12 dram_efficiency; /* 0.7 */
2054 fixed20_12 yclk, dram_channels, bandwidth;
2055 fixed20_12 a;
2056
2057 a.full = dfixed_const(1000);
2058 yclk.full = dfixed_const(wm->yclk);
2059 yclk.full = dfixed_div(yclk, a);
2060 dram_channels.full = dfixed_const(wm->dram_channels * 4);
2061 a.full = dfixed_const(10);
2062 dram_efficiency.full = dfixed_const(7);
2063 dram_efficiency.full = dfixed_div(dram_efficiency, a);
2064 bandwidth.full = dfixed_mul(dram_channels, yclk);
2065 bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
2066
2067 return dfixed_trunc(bandwidth);
2068 }
2069
2070 static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2071 {
2072 /* Calculate DRAM Bandwidth and the part allocated to display. */
2073 fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
2074 fixed20_12 yclk, dram_channels, bandwidth;
2075 fixed20_12 a;
2076
2077 a.full = dfixed_const(1000);
2078 yclk.full = dfixed_const(wm->yclk);
2079 yclk.full = dfixed_div(yclk, a);
2080 dram_channels.full = dfixed_const(wm->dram_channels * 4);
2081 a.full = dfixed_const(10);
2082 disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
2083 disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
2084 bandwidth.full = dfixed_mul(dram_channels, yclk);
2085 bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
2086
2087 return dfixed_trunc(bandwidth);
2088 }
2089
2090 static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
2091 {
2092 /* Calculate the display Data return Bandwidth */
2093 fixed20_12 return_efficiency; /* 0.8 */
2094 fixed20_12 sclk, bandwidth;
2095 fixed20_12 a;
2096
2097 a.full = dfixed_const(1000);
2098 sclk.full = dfixed_const(wm->sclk);
2099 sclk.full = dfixed_div(sclk, a);
2100 a.full = dfixed_const(10);
2101 return_efficiency.full = dfixed_const(8);
2102 return_efficiency.full = dfixed_div(return_efficiency, a);
2103 a.full = dfixed_const(32);
2104 bandwidth.full = dfixed_mul(a, sclk);
2105 bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
2106
2107 return dfixed_trunc(bandwidth);
2108 }
2109
2110 static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
2111 {
2112 /* Calculate the DMIF Request Bandwidth */
2113 fixed20_12 disp_clk_request_efficiency; /* 0.8 */
2114 fixed20_12 disp_clk, bandwidth;
2115 fixed20_12 a;
2116
2117 a.full = dfixed_const(1000);
2118 disp_clk.full = dfixed_const(wm->disp_clk);
2119 disp_clk.full = dfixed_div(disp_clk, a);
2120 a.full = dfixed_const(10);
2121 disp_clk_request_efficiency.full = dfixed_const(8);
2122 disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
2123 a.full = dfixed_const(32);
2124 bandwidth.full = dfixed_mul(a, disp_clk);
2125 bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
2126
2127 return dfixed_trunc(bandwidth);
2128 }
2129
2130 static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
2131 {
2132 /* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
2133 u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
2134 u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
2135 u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
2136
2137 return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
2138 }
2139
2140 static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
2141 {
2142 /* Calculate the display mode Average Bandwidth
2143 * DisplayMode should contain the source and destination dimensions,
2144 * timing, etc.
2145 */
2146 fixed20_12 bpp;
2147 fixed20_12 line_time;
2148 fixed20_12 src_width;
2149 fixed20_12 bandwidth;
2150 fixed20_12 a;
2151
2152 a.full = dfixed_const(1000);
2153 line_time.full = dfixed_const(wm->active_time + wm->blank_time);
2154 line_time.full = dfixed_div(line_time, a);
2155 bpp.full = dfixed_const(wm->bytes_per_pixel);
2156 src_width.full = dfixed_const(wm->src_width);
2157 bandwidth.full = dfixed_mul(src_width, bpp);
2158 bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
2159 bandwidth.full = dfixed_div(bandwidth, line_time);
2160
2161 return dfixed_trunc(bandwidth);
2162 }
2163
2164 static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
2165 {
2166 /* First calcualte the latency in ns */
2167 u32 mc_latency = 2000; /* 2000 ns. */
2168 u32 available_bandwidth = evergreen_available_bandwidth(wm);
2169 u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
2170 u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
2171 u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
2172 u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
2173 (wm->num_heads * cursor_line_pair_return_time);
2174 u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
2175 u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
2176 fixed20_12 a, b, c;
2177
2178 if (wm->num_heads == 0)
2179 return 0;
2180
2181 a.full = dfixed_const(2);
2182 b.full = dfixed_const(1);
2183 if ((wm->vsc.full > a.full) ||
2184 ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
2185 (wm->vtaps >= 5) ||
2186 ((wm->vsc.full >= a.full) && wm->interlaced))
2187 max_src_lines_per_dst_line = 4;
2188 else
2189 max_src_lines_per_dst_line = 2;
2190
2191 a.full = dfixed_const(available_bandwidth);
2192 b.full = dfixed_const(wm->num_heads);
2193 a.full = dfixed_div(a, b);
2194
2195 b.full = dfixed_const(1000);
2196 c.full = dfixed_const(wm->disp_clk);
2197 b.full = dfixed_div(c, b);
2198 c.full = dfixed_const(wm->bytes_per_pixel);
2199 b.full = dfixed_mul(b, c);
2200
2201 lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
2202
2203 a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2204 b.full = dfixed_const(1000);
2205 c.full = dfixed_const(lb_fill_bw);
2206 b.full = dfixed_div(c, b);
2207 a.full = dfixed_div(a, b);
2208 line_fill_time = dfixed_trunc(a);
2209
2210 if (line_fill_time < wm->active_time)
2211 return latency;
2212 else
2213 return latency + (line_fill_time - wm->active_time);
2214
2215 }
2216
2217 static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2218 {
2219 if (evergreen_average_bandwidth(wm) <=
2220 (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2221 return true;
2222 else
2223 return false;
2224 };
2225
2226 static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2227 {
2228 if (evergreen_average_bandwidth(wm) <=
2229 (evergreen_available_bandwidth(wm) / wm->num_heads))
2230 return true;
2231 else
2232 return false;
2233 };
2234
2235 static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2236 {
2237 u32 lb_partitions = wm->lb_size / wm->src_width;
2238 u32 line_time = wm->active_time + wm->blank_time;
2239 u32 latency_tolerant_lines;
2240 u32 latency_hiding;
2241 fixed20_12 a;
2242
2243 a.full = dfixed_const(1);
2244 if (wm->vsc.full > a.full)
2245 latency_tolerant_lines = 1;
2246 else {
2247 if (lb_partitions <= (wm->vtaps + 1))
2248 latency_tolerant_lines = 1;
2249 else
2250 latency_tolerant_lines = 2;
2251 }
2252
2253 latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2254
2255 if (evergreen_latency_watermark(wm) <= latency_hiding)
2256 return true;
2257 else
2258 return false;
2259 }
2260
2261 static void evergreen_program_watermarks(struct radeon_device *rdev,
2262 struct radeon_crtc *radeon_crtc,
2263 u32 lb_size, u32 num_heads)
2264 {
2265 struct drm_display_mode *mode = &radeon_crtc->base.mode;
2266 struct evergreen_wm_params wm_low, wm_high;
2267 u32 dram_channels;
2268 u32 pixel_period;
2269 u32 line_time = 0;
2270 u32 latency_watermark_a = 0, latency_watermark_b = 0;
2271 u32 priority_a_mark = 0, priority_b_mark = 0;
2272 u32 priority_a_cnt = PRIORITY_OFF;
2273 u32 priority_b_cnt = PRIORITY_OFF;
2274 u32 pipe_offset = radeon_crtc->crtc_id * 16;
2275 u32 tmp, arb_control3;
2276 fixed20_12 a, b, c;
2277
2278 if (radeon_crtc->base.enabled && num_heads && mode) {
2279 pixel_period = 1000000 / (u32)mode->clock;
2280 line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2281 priority_a_cnt = 0;
2282 priority_b_cnt = 0;
2283 dram_channels = evergreen_get_number_of_dram_channels(rdev);
2284
2285 /* watermark for high clocks */
2286 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2287 wm_high.yclk =
2288 radeon_dpm_get_mclk(rdev, false) * 10;
2289 wm_high.sclk =
2290 radeon_dpm_get_sclk(rdev, false) * 10;
2291 } else {
2292 wm_high.yclk = rdev->pm.current_mclk * 10;
2293 wm_high.sclk = rdev->pm.current_sclk * 10;
2294 }
2295
2296 wm_high.disp_clk = mode->clock;
2297 wm_high.src_width = mode->crtc_hdisplay;
2298 wm_high.active_time = mode->crtc_hdisplay * pixel_period;
2299 wm_high.blank_time = line_time - wm_high.active_time;
2300 wm_high.interlaced = false;
2301 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2302 wm_high.interlaced = true;
2303 wm_high.vsc = radeon_crtc->vsc;
2304 wm_high.vtaps = 1;
2305 if (radeon_crtc->rmx_type != RMX_OFF)
2306 wm_high.vtaps = 2;
2307 wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2308 wm_high.lb_size = lb_size;
2309 wm_high.dram_channels = dram_channels;
2310 wm_high.num_heads = num_heads;
2311
2312 /* watermark for low clocks */
2313 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2314 wm_low.yclk =
2315 radeon_dpm_get_mclk(rdev, true) * 10;
2316 wm_low.sclk =
2317 radeon_dpm_get_sclk(rdev, true) * 10;
2318 } else {
2319 wm_low.yclk = rdev->pm.current_mclk * 10;
2320 wm_low.sclk = rdev->pm.current_sclk * 10;
2321 }
2322
2323 wm_low.disp_clk = mode->clock;
2324 wm_low.src_width = mode->crtc_hdisplay;
2325 wm_low.active_time = mode->crtc_hdisplay * pixel_period;
2326 wm_low.blank_time = line_time - wm_low.active_time;
2327 wm_low.interlaced = false;
2328 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2329 wm_low.interlaced = true;
2330 wm_low.vsc = radeon_crtc->vsc;
2331 wm_low.vtaps = 1;
2332 if (radeon_crtc->rmx_type != RMX_OFF)
2333 wm_low.vtaps = 2;
2334 wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2335 wm_low.lb_size = lb_size;
2336 wm_low.dram_channels = dram_channels;
2337 wm_low.num_heads = num_heads;
2338
2339 /* set for high clocks */
2340 latency_watermark_a = min(evergreen_latency_watermark(&wm_high), (u32)65535);
2341 /* set for low clocks */
2342 latency_watermark_b = min(evergreen_latency_watermark(&wm_low), (u32)65535);
2343
2344 /* possibly force display priority to high */
2345 /* should really do this at mode validation time... */
2346 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2347 !evergreen_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2348 !evergreen_check_latency_hiding(&wm_high) ||
2349 (rdev->disp_priority == 2)) {
2350 DRM_DEBUG_KMS("force priority a to high\n");
2351 priority_a_cnt |= PRIORITY_ALWAYS_ON;
2352 }
2353 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2354 !evergreen_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2355 !evergreen_check_latency_hiding(&wm_low) ||
2356 (rdev->disp_priority == 2)) {
2357 DRM_DEBUG_KMS("force priority b to high\n");
2358 priority_b_cnt |= PRIORITY_ALWAYS_ON;
2359 }
2360
2361 a.full = dfixed_const(1000);
2362 b.full = dfixed_const(mode->clock);
2363 b.full = dfixed_div(b, a);
2364 c.full = dfixed_const(latency_watermark_a);
2365 c.full = dfixed_mul(c, b);
2366 c.full = dfixed_mul(c, radeon_crtc->hsc);
2367 c.full = dfixed_div(c, a);
2368 a.full = dfixed_const(16);
2369 c.full = dfixed_div(c, a);
2370 priority_a_mark = dfixed_trunc(c);
2371 priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2372
2373 a.full = dfixed_const(1000);
2374 b.full = dfixed_const(mode->clock);
2375 b.full = dfixed_div(b, a);
2376 c.full = dfixed_const(latency_watermark_b);
2377 c.full = dfixed_mul(c, b);
2378 c.full = dfixed_mul(c, radeon_crtc->hsc);
2379 c.full = dfixed_div(c, a);
2380 a.full = dfixed_const(16);
2381 c.full = dfixed_div(c, a);
2382 priority_b_mark = dfixed_trunc(c);
2383 priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2384
2385 /* Save number of lines the linebuffer leads before the scanout */
2386 radeon_crtc->lb_vblank_lead_lines = DIV_ROUND_UP(lb_size, mode->crtc_hdisplay);
2387 }
2388
2389 /* select wm A */
2390 arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2391 tmp = arb_control3;
2392 tmp &= ~LATENCY_WATERMARK_MASK(3);
2393 tmp |= LATENCY_WATERMARK_MASK(1);
2394 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2395 WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2396 (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2397 LATENCY_HIGH_WATERMARK(line_time)));
2398 /* select wm B */
2399 tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2400 tmp &= ~LATENCY_WATERMARK_MASK(3);
2401 tmp |= LATENCY_WATERMARK_MASK(2);
2402 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2403 WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2404 (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2405 LATENCY_HIGH_WATERMARK(line_time)));
2406 /* restore original selection */
2407 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2408
2409 /* write the priority marks */
2410 WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2411 WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2412
2413 /* save values for DPM */
2414 radeon_crtc->line_time = line_time;
2415 radeon_crtc->wm_high = latency_watermark_a;
2416 radeon_crtc->wm_low = latency_watermark_b;
2417 }
2418
2419 /**
2420 * evergreen_bandwidth_update - update display watermarks callback.
2421 *
2422 * @rdev: radeon_device pointer
2423 *
2424 * Update the display watermarks based on the requested mode(s)
2425 * (evergreen+).
2426 */
2427 void evergreen_bandwidth_update(struct radeon_device *rdev)
2428 {
2429 struct drm_display_mode *mode0 = NULL;
2430 struct drm_display_mode *mode1 = NULL;
2431 u32 num_heads = 0, lb_size;
2432 int i;
2433
2434 if (!rdev->mode_info.mode_config_initialized)
2435 return;
2436
2437 radeon_update_display_priority(rdev);
2438
2439 for (i = 0; i < rdev->num_crtc; i++) {
2440 if (rdev->mode_info.crtcs[i]->base.enabled)
2441 num_heads++;
2442 }
2443 for (i = 0; i < rdev->num_crtc; i += 2) {
2444 mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2445 mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2446 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2447 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2448 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2449 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2450 }
2451 }
2452
2453 /**
2454 * evergreen_mc_wait_for_idle - wait for MC idle callback.
2455 *
2456 * @rdev: radeon_device pointer
2457 *
2458 * Wait for the MC (memory controller) to be idle.
2459 * (evergreen+).
2460 * Returns 0 if the MC is idle, -1 if not.
2461 */
2462 int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
2463 {
2464 unsigned i;
2465 u32 tmp;
2466
2467 for (i = 0; i < rdev->usec_timeout; i++) {
2468 /* read MC_STATUS */
2469 tmp = RREG32(SRBM_STATUS) & 0x1F00;
2470 if (!tmp)
2471 return 0;
2472 udelay(1);
2473 }
2474 return -1;
2475 }
2476
2477 /*
2478 * GART
2479 */
2480 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2481 {
2482 unsigned i;
2483 u32 tmp;
2484
2485 WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2486
2487 WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2488 for (i = 0; i < rdev->usec_timeout; i++) {
2489 /* read MC_STATUS */
2490 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2491 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2492 if (tmp == 2) {
2493 printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
2494 return;
2495 }
2496 if (tmp) {
2497 return;
2498 }
2499 udelay(1);
2500 }
2501 }
2502
2503 static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
2504 {
2505 u32 tmp;
2506 int r;
2507
2508 if (rdev->gart.robj == NULL) {
2509 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2510 return -EINVAL;
2511 }
2512 r = radeon_gart_table_vram_pin(rdev);
2513 if (r)
2514 return r;
2515 /* Setup L2 cache */
2516 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2517 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2518 EFFECTIVE_L2_QUEUE_SIZE(7));
2519 WREG32(VM_L2_CNTL2, 0);
2520 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2521 /* Setup TLB control */
2522 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2523 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2524 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2525 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2526 if (rdev->flags & RADEON_IS_IGP) {
2527 WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2528 WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2529 WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2530 } else {
2531 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2532 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2533 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2534 if ((rdev->family == CHIP_JUNIPER) ||
2535 (rdev->family == CHIP_CYPRESS) ||
2536 (rdev->family == CHIP_HEMLOCK) ||
2537 (rdev->family == CHIP_BARTS))
2538 WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
2539 }
2540 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2541 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2542 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2543 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2544 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2545 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2546 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2547 WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2548 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2549 WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2550 (u32)(rdev->dummy_page.addr >> 12));
2551 WREG32(VM_CONTEXT1_CNTL, 0);
2552
2553 evergreen_pcie_gart_tlb_flush(rdev);
2554 DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2555 (unsigned)(rdev->mc.gtt_size >> 20),
2556 (unsigned long long)rdev->gart.table_addr);
2557 rdev->gart.ready = true;
2558 return 0;
2559 }
2560
2561 static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
2562 {
2563 u32 tmp;
2564
2565 /* Disable all tables */
2566 WREG32(VM_CONTEXT0_CNTL, 0);
2567 WREG32(VM_CONTEXT1_CNTL, 0);
2568
2569 /* Setup L2 cache */
2570 WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2571 EFFECTIVE_L2_QUEUE_SIZE(7));
2572 WREG32(VM_L2_CNTL2, 0);
2573 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2574 /* Setup TLB control */
2575 tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2576 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2577 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2578 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2579 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2580 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2581 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2582 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2583 radeon_gart_table_vram_unpin(rdev);
2584 }
2585
2586 static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
2587 {
2588 evergreen_pcie_gart_disable(rdev);
2589 radeon_gart_table_vram_free(rdev);
2590 radeon_gart_fini(rdev);
2591 }
2592
2593
2594 static void evergreen_agp_enable(struct radeon_device *rdev)
2595 {
2596 u32 tmp;
2597
2598 /* Setup L2 cache */
2599 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2600 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2601 EFFECTIVE_L2_QUEUE_SIZE(7));
2602 WREG32(VM_L2_CNTL2, 0);
2603 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2604 /* Setup TLB control */
2605 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2606 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2607 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2608 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2609 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2610 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2611 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2612 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2613 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2614 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2615 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2616 WREG32(VM_CONTEXT0_CNTL, 0);
2617 WREG32(VM_CONTEXT1_CNTL, 0);
2618 }
2619
2620 static const unsigned ni_dig_offsets[] =
2621 {
2622 NI_DIG0_REGISTER_OFFSET,
2623 NI_DIG1_REGISTER_OFFSET,
2624 NI_DIG2_REGISTER_OFFSET,
2625 NI_DIG3_REGISTER_OFFSET,
2626 NI_DIG4_REGISTER_OFFSET,
2627 NI_DIG5_REGISTER_OFFSET
2628 };
2629
2630 static const unsigned ni_tx_offsets[] =
2631 {
2632 NI_DCIO_UNIPHY0_UNIPHY_TX_CONTROL1,
2633 NI_DCIO_UNIPHY1_UNIPHY_TX_CONTROL1,
2634 NI_DCIO_UNIPHY2_UNIPHY_TX_CONTROL1,
2635 NI_DCIO_UNIPHY3_UNIPHY_TX_CONTROL1,
2636 NI_DCIO_UNIPHY4_UNIPHY_TX_CONTROL1,
2637 NI_DCIO_UNIPHY5_UNIPHY_TX_CONTROL1
2638 };
2639
2640 static const unsigned evergreen_dp_offsets[] =
2641 {
2642 EVERGREEN_DP0_REGISTER_OFFSET,
2643 EVERGREEN_DP1_REGISTER_OFFSET,
2644 EVERGREEN_DP2_REGISTER_OFFSET,
2645 EVERGREEN_DP3_REGISTER_OFFSET,
2646 EVERGREEN_DP4_REGISTER_OFFSET,
2647 EVERGREEN_DP5_REGISTER_OFFSET
2648 };
2649
2650
2651 /*
2652 * Assumption is that EVERGREEN_CRTC_MASTER_EN enable for requested crtc
2653 * We go from crtc to connector and it is not relible since it
2654 * should be an opposite direction .If crtc is enable then
2655 * find the dig_fe which selects this crtc and insure that it enable.
2656 * if such dig_fe is found then find dig_be which selects found dig_be and
2657 * insure that it enable and in DP_SST mode.
2658 * if UNIPHY_PLL_CONTROL1.enable then we should disconnect timing
2659 * from dp symbols clocks .
2660 */
2661 static bool evergreen_is_dp_sst_stream_enabled(struct radeon_device *rdev,
2662 unsigned crtc_id, unsigned *ret_dig_fe)
2663 {
2664 unsigned i;
2665 unsigned dig_fe;
2666 unsigned dig_be;
2667 unsigned dig_en_be;
2668 unsigned uniphy_pll;
2669 unsigned digs_fe_selected;
2670 unsigned dig_be_mode;
2671 unsigned dig_fe_mask;
2672 bool is_enabled = false;
2673 bool found_crtc = false;
2674
2675 /* loop through all running dig_fe to find selected crtc */
2676 for (i = 0; i < ARRAY_SIZE(ni_dig_offsets); i++) {
2677 dig_fe = RREG32(NI_DIG_FE_CNTL + ni_dig_offsets[i]);
2678 if (dig_fe & NI_DIG_FE_CNTL_SYMCLK_FE_ON &&
2679 crtc_id == NI_DIG_FE_CNTL_SOURCE_SELECT(dig_fe)) {
2680 /* found running pipe */
2681 found_crtc = true;
2682 dig_fe_mask = 1 << i;
2683 dig_fe = i;
2684 break;
2685 }
2686 }
2687
2688 if (found_crtc) {
2689 /* loop through all running dig_be to find selected dig_fe */
2690 for (i = 0; i < ARRAY_SIZE(ni_dig_offsets); i++) {
2691 dig_be = RREG32(NI_DIG_BE_CNTL + ni_dig_offsets[i]);
2692 /* if dig_fe_selected by dig_be? */
2693 digs_fe_selected = NI_DIG_BE_CNTL_FE_SOURCE_SELECT(dig_be);
2694 dig_be_mode = NI_DIG_FE_CNTL_MODE(dig_be);
2695 if (dig_fe_mask & digs_fe_selected &&
2696 /* if dig_be in sst mode? */
2697 dig_be_mode == NI_DIG_BE_DPSST) {
2698 dig_en_be = RREG32(NI_DIG_BE_EN_CNTL +
2699 ni_dig_offsets[i]);
2700 uniphy_pll = RREG32(NI_DCIO_UNIPHY0_PLL_CONTROL1 +
2701 ni_tx_offsets[i]);
2702 /* dig_be enable and tx is running */
2703 if (dig_en_be & NI_DIG_BE_EN_CNTL_ENABLE &&
2704 dig_en_be & NI_DIG_BE_EN_CNTL_SYMBCLK_ON &&
2705 uniphy_pll & NI_DCIO_UNIPHY0_PLL_CONTROL1_ENABLE) {
2706 is_enabled = true;
2707 *ret_dig_fe = dig_fe;
2708 break;
2709 }
2710 }
2711 }
2712 }
2713
2714 return is_enabled;
2715 }
2716
2717 /*
2718 * Blank dig when in dp sst mode
2719 * Dig ignores crtc timing
2720 */
2721 static void evergreen_blank_dp_output(struct radeon_device *rdev,
2722 unsigned dig_fe)
2723 {
2724 unsigned stream_ctrl;
2725 unsigned fifo_ctrl;
2726 unsigned counter = 0;
2727
2728 if (dig_fe >= ARRAY_SIZE(evergreen_dp_offsets)) {
2729 DRM_ERROR("invalid dig_fe %d\n", dig_fe);
2730 return;
2731 }
2732
2733 stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2734 evergreen_dp_offsets[dig_fe]);
2735 if (!(stream_ctrl & EVERGREEN_DP_VID_STREAM_CNTL_ENABLE)) {
2736 DRM_ERROR("dig %d , should be enable\n", dig_fe);
2737 return;
2738 }
2739
2740 stream_ctrl &=~EVERGREEN_DP_VID_STREAM_CNTL_ENABLE;
2741 WREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2742 evergreen_dp_offsets[dig_fe], stream_ctrl);
2743
2744 stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2745 evergreen_dp_offsets[dig_fe]);
2746 while (counter < 32 && stream_ctrl & EVERGREEN_DP_VID_STREAM_STATUS) {
2747 msleep(1);
2748 counter++;
2749 stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2750 evergreen_dp_offsets[dig_fe]);
2751 }
2752 if (counter >= 32 )
2753 DRM_ERROR("counter exceeds %d\n", counter);
2754
2755 fifo_ctrl = RREG32(EVERGREEN_DP_STEER_FIFO + evergreen_dp_offsets[dig_fe]);
2756 fifo_ctrl |= EVERGREEN_DP_STEER_FIFO_RESET;
2757 WREG32(EVERGREEN_DP_STEER_FIFO + evergreen_dp_offsets[dig_fe], fifo_ctrl);
2758
2759 }
2760
2761 void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
2762 {
2763 u32 crtc_enabled, tmp, frame_count, blackout;
2764 int i, j;
2765 unsigned dig_fe;
2766
2767 if (!ASIC_IS_NODCE(rdev)) {
2768 save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2769 save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2770
2771 /* disable VGA render */
2772 WREG32(VGA_RENDER_CONTROL, 0);
2773 }
2774 /* blank the display controllers */
2775 for (i = 0; i < rdev->num_crtc; i++) {
2776 crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2777 if (crtc_enabled) {
2778 save->crtc_enabled[i] = true;
2779 if (ASIC_IS_DCE6(rdev)) {
2780 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2781 if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2782 radeon_wait_for_vblank(rdev, i);
2783 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2784 tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2785 WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2786 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2787 }
2788 } else {
2789 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2790 if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2791 radeon_wait_for_vblank(rdev, i);
2792 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2793 tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2794 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2795 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2796 }
2797 }
2798 /* wait for the next frame */
2799 frame_count = radeon_get_vblank_counter(rdev, i);
2800 for (j = 0; j < rdev->usec_timeout; j++) {
2801 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2802 break;
2803 udelay(1);
2804 }
2805 /*we should disable dig if it drives dp sst*/
2806 /*but we are in radeon_device_init and the topology is unknown*/
2807 /*and it is available after radeon_modeset_init*/
2808 /*the following method radeon_atom_encoder_dpms_dig*/
2809 /*does the job if we initialize it properly*/
2810 /*for now we do it this manually*/
2811 /**/
2812 if (ASIC_IS_DCE5(rdev) &&
2813 evergreen_is_dp_sst_stream_enabled(rdev, i ,&dig_fe))
2814 evergreen_blank_dp_output(rdev, dig_fe);
2815 /*we could remove 6 lines below*/
2816 /* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2817 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2818 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2819 tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2820 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2821 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2822 save->crtc_enabled[i] = false;
2823 /* ***** */
2824 } else {
2825 save->crtc_enabled[i] = false;
2826 }
2827 }
2828
2829 radeon_mc_wait_for_idle(rdev);
2830
2831 blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2832 if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2833 /* Block CPU access */
2834 WREG32(BIF_FB_EN, 0);
2835 /* blackout the MC */
2836 blackout &= ~BLACKOUT_MODE_MASK;
2837 WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2838 }
2839 /* wait for the MC to settle */
2840 udelay(100);
2841
2842 /* lock double buffered regs */
2843 for (i = 0; i < rdev->num_crtc; i++) {
2844 if (save->crtc_enabled[i]) {
2845 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2846 if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2847 tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2848 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2849 }
2850 tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2851 if (!(tmp & 1)) {
2852 tmp |= 1;
2853 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2854 }
2855 }
2856 }
2857 }
2858
2859 void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
2860 {
2861 u32 tmp, frame_count;
2862 int i, j;
2863
2864 /* update crtc base addresses */
2865 for (i = 0; i < rdev->num_crtc; i++) {
2866 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2867 upper_32_bits(rdev->mc.vram_start));
2868 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2869 upper_32_bits(rdev->mc.vram_start));
2870 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
2871 (u32)rdev->mc.vram_start);
2872 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
2873 (u32)rdev->mc.vram_start);
2874 }
2875
2876 if (!ASIC_IS_NODCE(rdev)) {
2877 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2878 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2879 }
2880
2881 /* unlock regs and wait for update */
2882 for (i = 0; i < rdev->num_crtc; i++) {
2883 if (save->crtc_enabled[i]) {
2884 tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2885 if ((tmp & 0x7) != 3) {
2886 tmp &= ~0x7;
2887 tmp |= 0x3;
2888 WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2889 }
2890 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2891 if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2892 tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2893 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2894 }
2895 tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2896 if (tmp & 1) {
2897 tmp &= ~1;
2898 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2899 }
2900 for (j = 0; j < rdev->usec_timeout; j++) {
2901 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2902 if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2903 break;
2904 udelay(1);
2905 }
2906 }
2907 }
2908
2909 /* unblackout the MC */
2910 tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2911 tmp &= ~BLACKOUT_MODE_MASK;
2912 WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2913 /* allow CPU access */
2914 WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2915
2916 for (i = 0; i < rdev->num_crtc; i++) {
2917 if (save->crtc_enabled[i]) {
2918 if (ASIC_IS_DCE6(rdev)) {
2919 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2920 tmp &= ~EVERGREEN_CRTC_BLANK_DATA_EN;
2921 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2922 WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2923 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2924 } else {
2925 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2926 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2927 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2928 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2929 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2930 }
2931 /* wait for the next frame */
2932 frame_count = radeon_get_vblank_counter(rdev, i);
2933 for (j = 0; j < rdev->usec_timeout; j++) {
2934 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2935 break;
2936 udelay(1);
2937 }
2938 }
2939 }
2940 if (!ASIC_IS_NODCE(rdev)) {
2941 /* Unlock vga access */
2942 WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2943 mdelay(1);
2944 WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2945 }
2946 }
2947
2948 void evergreen_mc_program(struct radeon_device *rdev)
2949 {
2950 struct evergreen_mc_save save;
2951 u32 tmp;
2952 int i, j;
2953
2954 /* Initialize HDP */
2955 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2956 WREG32((0x2c14 + j), 0x00000000);
2957 WREG32((0x2c18 + j), 0x00000000);
2958 WREG32((0x2c1c + j), 0x00000000);
2959 WREG32((0x2c20 + j), 0x00000000);
2960 WREG32((0x2c24 + j), 0x00000000);
2961 }
2962 WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2963
2964 evergreen_mc_stop(rdev, &save);
2965 if (evergreen_mc_wait_for_idle(rdev)) {
2966 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2967 }
2968 /* Lockout access through VGA aperture*/
2969 WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2970 /* Update configuration */
2971 if (rdev->flags & RADEON_IS_AGP) {
2972 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2973 /* VRAM before AGP */
2974 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2975 rdev->mc.vram_start >> 12);
2976 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2977 rdev->mc.gtt_end >> 12);
2978 } else {
2979 /* VRAM after AGP */
2980 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2981 rdev->mc.gtt_start >> 12);
2982 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2983 rdev->mc.vram_end >> 12);
2984 }
2985 } else {
2986 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2987 rdev->mc.vram_start >> 12);
2988 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2989 rdev->mc.vram_end >> 12);
2990 }
2991 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2992 /* llano/ontario only */
2993 if ((rdev->family == CHIP_PALM) ||
2994 (rdev->family == CHIP_SUMO) ||
2995 (rdev->family == CHIP_SUMO2)) {
2996 tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2997 tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2998 tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2999 WREG32(MC_FUS_VM_FB_OFFSET, tmp);
3000 }
3001 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
3002 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
3003 WREG32(MC_VM_FB_LOCATION, tmp);
3004 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
3005 WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
3006 WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
3007 if (rdev->flags & RADEON_IS_AGP) {
3008 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
3009 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
3010 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
3011 } else {
3012 WREG32(MC_VM_AGP_BASE, 0);
3013 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
3014 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
3015 }
3016 if (evergreen_mc_wait_for_idle(rdev)) {
3017 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3018 }
3019 evergreen_mc_resume(rdev, &save);
3020 /* we need to own VRAM, so turn off the VGA renderer here
3021 * to stop it overwriting our objects */
3022 rv515_vga_render_disable(rdev);
3023 }
3024
3025 /*
3026 * CP.
3027 */
3028 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
3029 {
3030 struct radeon_ring *ring = &rdev->ring[ib->ring];
3031 u32 next_rptr;
3032
3033 /* set to DX10/11 mode */
3034 radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
3035 radeon_ring_write(ring, 1);
3036
3037 if (ring->rptr_save_reg) {
3038 next_rptr = ring->wptr + 3 + 4;
3039 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
3040 radeon_ring_write(ring, ((ring->rptr_save_reg -
3041 PACKET3_SET_CONFIG_REG_START) >> 2));
3042 radeon_ring_write(ring, next_rptr);
3043 } else if (rdev->wb.enabled) {
3044 next_rptr = ring->wptr + 5 + 4;
3045 radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
3046 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
3047 radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
3048 radeon_ring_write(ring, next_rptr);
3049 radeon_ring_write(ring, 0);
3050 }
3051
3052 radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
3053 radeon_ring_write(ring,
3054 #ifdef __BIG_ENDIAN
3055 (2 << 0) |
3056 #endif
3057 (ib->gpu_addr & 0xFFFFFFFC));
3058 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
3059 radeon_ring_write(ring, ib->length_dw);
3060 }
3061
3062
3063 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
3064 {
3065 const __be32 *fw_data;
3066 int i;
3067
3068 if (!rdev->me_fw || !rdev->pfp_fw)
3069 return -EINVAL;
3070
3071 r700_cp_stop(rdev);
3072 WREG32(CP_RB_CNTL,
3073 #ifdef __BIG_ENDIAN
3074 BUF_SWAP_32BIT |
3075 #endif
3076 RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
3077
3078 fw_data = (const __be32 *)rdev->pfp_fw->data;
3079 WREG32(CP_PFP_UCODE_ADDR, 0);
3080 for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
3081 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
3082 WREG32(CP_PFP_UCODE_ADDR, 0);
3083
3084 fw_data = (const __be32 *)rdev->me_fw->data;
3085 WREG32(CP_ME_RAM_WADDR, 0);
3086 for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
3087 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
3088
3089 WREG32(CP_PFP_UCODE_ADDR, 0);
3090 WREG32(CP_ME_RAM_WADDR, 0);
3091 WREG32(CP_ME_RAM_RADDR, 0);
3092 return 0;
3093 }
3094
3095 static int evergreen_cp_start(struct radeon_device *rdev)
3096 {
3097 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3098 int r, i;
3099 uint32_t cp_me;
3100
3101 r = radeon_ring_lock(rdev, ring, 7);
3102 if (r) {
3103 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3104 return r;
3105 }
3106 radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
3107 radeon_ring_write(ring, 0x1);
3108 radeon_ring_write(ring, 0x0);
3109 radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
3110 radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
3111 radeon_ring_write(ring, 0);
3112 radeon_ring_write(ring, 0);
3113 radeon_ring_unlock_commit(rdev, ring, false);
3114
3115 cp_me = 0xff;
3116 WREG32(CP_ME_CNTL, cp_me);
3117
3118 r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
3119 if (r) {
3120 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3121 return r;
3122 }
3123
3124 /* setup clear context state */
3125 radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3126 radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
3127
3128 for (i = 0; i < evergreen_default_size; i++)
3129 radeon_ring_write(ring, evergreen_default_state[i]);
3130
3131 radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3132 radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
3133
3134 /* set clear context state */
3135 radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
3136 radeon_ring_write(ring, 0);
3137
3138 /* SQ_VTX_BASE_VTX_LOC */
3139 radeon_ring_write(ring, 0xc0026f00);
3140 radeon_ring_write(ring, 0x00000000);
3141 radeon_ring_write(ring, 0x00000000);
3142 radeon_ring_write(ring, 0x00000000);
3143
3144 /* Clear consts */
3145 radeon_ring_write(ring, 0xc0036f00);
3146 radeon_ring_write(ring, 0x00000bc4);
3147 radeon_ring_write(ring, 0xffffffff);
3148 radeon_ring_write(ring, 0xffffffff);
3149 radeon_ring_write(ring, 0xffffffff);
3150
3151 radeon_ring_write(ring, 0xc0026900);
3152 radeon_ring_write(ring, 0x00000316);
3153 radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
3154 radeon_ring_write(ring, 0x00000010); /* */
3155
3156 radeon_ring_unlock_commit(rdev, ring, false);
3157
3158 return 0;
3159 }
3160
3161 static int evergreen_cp_resume(struct radeon_device *rdev)
3162 {
3163 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3164 u32 tmp;
3165 u32 rb_bufsz;
3166 int r;
3167
3168 /* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
3169 WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
3170 SOFT_RESET_PA |
3171 SOFT_RESET_SH |
3172 SOFT_RESET_VGT |
3173 SOFT_RESET_SPI |
3174 SOFT_RESET_SX));
3175 RREG32(GRBM_SOFT_RESET);
3176 mdelay(15);
3177 WREG32(GRBM_SOFT_RESET, 0);
3178 RREG32(GRBM_SOFT_RESET);
3179
3180 /* Set ring buffer size */
3181 rb_bufsz = order_base_2(ring->ring_size / 8);
3182 tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
3183 #ifdef __BIG_ENDIAN
3184 tmp |= BUF_SWAP_32BIT;
3185 #endif
3186 WREG32(CP_RB_CNTL, tmp);
3187 WREG32(CP_SEM_WAIT_TIMER, 0x0);
3188 WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
3189
3190 /* Set the write pointer delay */
3191 WREG32(CP_RB_WPTR_DELAY, 0);
3192
3193 /* Initialize the ring buffer's read and write pointers */
3194 WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
3195 WREG32(CP_RB_RPTR_WR, 0);
3196 ring->wptr = 0;
3197 WREG32(CP_RB_WPTR, ring->wptr);
3198
3199 /* set the wb address whether it's enabled or not */
3200 WREG32(CP_RB_RPTR_ADDR,
3201 ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
3202 WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
3203 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
3204
3205 if (rdev->wb.enabled)
3206 WREG32(SCRATCH_UMSK, 0xff);
3207 else {
3208 tmp |= RB_NO_UPDATE;
3209 WREG32(SCRATCH_UMSK, 0);
3210 }
3211
3212 mdelay(1);
3213 WREG32(CP_RB_CNTL, tmp);
3214
3215 WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
3216 WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
3217
3218 evergreen_cp_start(rdev);
3219 ring->ready = true;
3220 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
3221 if (r) {
3222 ring->ready = false;
3223 return r;
3224 }
3225 return 0;
3226 }
3227
3228 /*
3229 * Core functions
3230 */
3231 static void evergreen_gpu_init(struct radeon_device *rdev)
3232 {
3233 u32 gb_addr_config;
3234 u32 mc_shared_chmap __unused, mc_arb_ramcfg;
3235 u32 sx_debug_1;
3236 u32 smx_dc_ctl0;
3237 u32 sq_config;
3238 u32 sq_lds_resource_mgmt;
3239 u32 sq_gpr_resource_mgmt_1;
3240 u32 sq_gpr_resource_mgmt_2;
3241 u32 sq_gpr_resource_mgmt_3;
3242 u32 sq_thread_resource_mgmt;
3243 u32 sq_thread_resource_mgmt_2;
3244 u32 sq_stack_resource_mgmt_1;
3245 u32 sq_stack_resource_mgmt_2;
3246 u32 sq_stack_resource_mgmt_3;
3247 u32 vgt_cache_invalidation;
3248 u32 hdp_host_path_cntl, tmp;
3249 u32 disabled_rb_mask;
3250 int i, j, ps_thread_count;
3251
3252 switch (rdev->family) {
3253 case CHIP_CYPRESS:
3254 case CHIP_HEMLOCK:
3255 rdev->config.evergreen.num_ses = 2;
3256 rdev->config.evergreen.max_pipes = 4;
3257 rdev->config.evergreen.max_tile_pipes = 8;
3258 rdev->config.evergreen.max_simds = 10;
3259 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3260 rdev->config.evergreen.max_gprs = 256;
3261 rdev->config.evergreen.max_threads = 248;
3262 rdev->config.evergreen.max_gs_threads = 32;
3263 rdev->config.evergreen.max_stack_entries = 512;
3264 rdev->config.evergreen.sx_num_of_sets = 4;
3265 rdev->config.evergreen.sx_max_export_size = 256;
3266 rdev->config.evergreen.sx_max_export_pos_size = 64;
3267 rdev->config.evergreen.sx_max_export_smx_size = 192;
3268 rdev->config.evergreen.max_hw_contexts = 8;
3269 rdev->config.evergreen.sq_num_cf_insts = 2;
3270
3271 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3272 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3273 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3274 gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
3275 break;
3276 case CHIP_JUNIPER:
3277 rdev->config.evergreen.num_ses = 1;
3278 rdev->config.evergreen.max_pipes = 4;
3279 rdev->config.evergreen.max_tile_pipes = 4;
3280 rdev->config.evergreen.max_simds = 10;
3281 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3282 rdev->config.evergreen.max_gprs = 256;
3283 rdev->config.evergreen.max_threads = 248;
3284 rdev->config.evergreen.max_gs_threads = 32;
3285 rdev->config.evergreen.max_stack_entries = 512;
3286 rdev->config.evergreen.sx_num_of_sets = 4;
3287 rdev->config.evergreen.sx_max_export_size = 256;
3288 rdev->config.evergreen.sx_max_export_pos_size = 64;
3289 rdev->config.evergreen.sx_max_export_smx_size = 192;
3290 rdev->config.evergreen.max_hw_contexts = 8;
3291 rdev->config.evergreen.sq_num_cf_insts = 2;
3292
3293 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3294 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3295 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3296 gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
3297 break;
3298 case CHIP_REDWOOD:
3299 rdev->config.evergreen.num_ses = 1;
3300 rdev->config.evergreen.max_pipes = 4;
3301 rdev->config.evergreen.max_tile_pipes = 4;
3302 rdev->config.evergreen.max_simds = 5;
3303 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3304 rdev->config.evergreen.max_gprs = 256;
3305 rdev->config.evergreen.max_threads = 248;
3306 rdev->config.evergreen.max_gs_threads = 32;
3307 rdev->config.evergreen.max_stack_entries = 256;
3308 rdev->config.evergreen.sx_num_of_sets = 4;
3309 rdev->config.evergreen.sx_max_export_size = 256;
3310 rdev->config.evergreen.sx_max_export_pos_size = 64;
3311 rdev->config.evergreen.sx_max_export_smx_size = 192;
3312 rdev->config.evergreen.max_hw_contexts = 8;
3313 rdev->config.evergreen.sq_num_cf_insts = 2;
3314
3315 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3316 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3317 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3318 gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
3319 break;
3320 case CHIP_CEDAR:
3321 default:
3322 rdev->config.evergreen.num_ses = 1;
3323 rdev->config.evergreen.max_pipes = 2;
3324 rdev->config.evergreen.max_tile_pipes = 2;
3325 rdev->config.evergreen.max_simds = 2;
3326 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3327 rdev->config.evergreen.max_gprs = 256;
3328 rdev->config.evergreen.max_threads = 192;
3329 rdev->config.evergreen.max_gs_threads = 16;
3330 rdev->config.evergreen.max_stack_entries = 256;
3331 rdev->config.evergreen.sx_num_of_sets = 4;
3332 rdev->config.evergreen.sx_max_export_size = 128;
3333 rdev->config.evergreen.sx_max_export_pos_size = 32;
3334 rdev->config.evergreen.sx_max_export_smx_size = 96;
3335 rdev->config.evergreen.max_hw_contexts = 4;
3336 rdev->config.evergreen.sq_num_cf_insts = 1;
3337
3338 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3339 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3340 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3341 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3342 break;
3343 case CHIP_PALM:
3344 rdev->config.evergreen.num_ses = 1;
3345 rdev->config.evergreen.max_pipes = 2;
3346 rdev->config.evergreen.max_tile_pipes = 2;
3347 rdev->config.evergreen.max_simds = 2;
3348 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3349 rdev->config.evergreen.max_gprs = 256;
3350 rdev->config.evergreen.max_threads = 192;
3351 rdev->config.evergreen.max_gs_threads = 16;
3352 rdev->config.evergreen.max_stack_entries = 256;
3353 rdev->config.evergreen.sx_num_of_sets = 4;
3354 rdev->config.evergreen.sx_max_export_size = 128;
3355 rdev->config.evergreen.sx_max_export_pos_size = 32;
3356 rdev->config.evergreen.sx_max_export_smx_size = 96;
3357 rdev->config.evergreen.max_hw_contexts = 4;
3358 rdev->config.evergreen.sq_num_cf_insts = 1;
3359
3360 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3361 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3362 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3363 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3364 break;
3365 case CHIP_SUMO:
3366 rdev->config.evergreen.num_ses = 1;
3367 rdev->config.evergreen.max_pipes = 4;
3368 rdev->config.evergreen.max_tile_pipes = 4;
3369 if (rdev->pdev->device == 0x9648)
3370 rdev->config.evergreen.max_simds = 3;
3371 else if ((rdev->pdev->device == 0x9647) ||
3372 (rdev->pdev->device == 0x964a))
3373 rdev->config.evergreen.max_simds = 4;
3374 else
3375 rdev->config.evergreen.max_simds = 5;
3376 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3377 rdev->config.evergreen.max_gprs = 256;
3378 rdev->config.evergreen.max_threads = 248;
3379 rdev->config.evergreen.max_gs_threads = 32;
3380 rdev->config.evergreen.max_stack_entries = 256;
3381 rdev->config.evergreen.sx_num_of_sets = 4;
3382 rdev->config.evergreen.sx_max_export_size = 256;
3383 rdev->config.evergreen.sx_max_export_pos_size = 64;
3384 rdev->config.evergreen.sx_max_export_smx_size = 192;
3385 rdev->config.evergreen.max_hw_contexts = 8;
3386 rdev->config.evergreen.sq_num_cf_insts = 2;
3387
3388 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3389 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3390 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3391 gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
3392 break;
3393 case CHIP_SUMO2:
3394 rdev->config.evergreen.num_ses = 1;
3395 rdev->config.evergreen.max_pipes = 4;
3396 rdev->config.evergreen.max_tile_pipes = 4;
3397 rdev->config.evergreen.max_simds = 2;
3398 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3399 rdev->config.evergreen.max_gprs = 256;
3400 rdev->config.evergreen.max_threads = 248;
3401 rdev->config.evergreen.max_gs_threads = 32;
3402 rdev->config.evergreen.max_stack_entries = 512;
3403 rdev->config.evergreen.sx_num_of_sets = 4;
3404 rdev->config.evergreen.sx_max_export_size = 256;
3405 rdev->config.evergreen.sx_max_export_pos_size = 64;
3406 rdev->config.evergreen.sx_max_export_smx_size = 192;
3407 rdev->config.evergreen.max_hw_contexts = 4;
3408 rdev->config.evergreen.sq_num_cf_insts = 2;
3409
3410 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3411 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3412 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3413 gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
3414 break;
3415 case CHIP_BARTS:
3416 rdev->config.evergreen.num_ses = 2;
3417 rdev->config.evergreen.max_pipes = 4;
3418 rdev->config.evergreen.max_tile_pipes = 8;
3419 rdev->config.evergreen.max_simds = 7;
3420 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3421 rdev->config.evergreen.max_gprs = 256;
3422 rdev->config.evergreen.max_threads = 248;
3423 rdev->config.evergreen.max_gs_threads = 32;
3424 rdev->config.evergreen.max_stack_entries = 512;
3425 rdev->config.evergreen.sx_num_of_sets = 4;
3426 rdev->config.evergreen.sx_max_export_size = 256;
3427 rdev->config.evergreen.sx_max_export_pos_size = 64;
3428 rdev->config.evergreen.sx_max_export_smx_size = 192;
3429 rdev->config.evergreen.max_hw_contexts = 8;
3430 rdev->config.evergreen.sq_num_cf_insts = 2;
3431
3432 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3433 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3434 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3435 gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
3436 break;
3437 case CHIP_TURKS:
3438 rdev->config.evergreen.num_ses = 1;
3439 rdev->config.evergreen.max_pipes = 4;
3440 rdev->config.evergreen.max_tile_pipes = 4;
3441 rdev->config.evergreen.max_simds = 6;
3442 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3443 rdev->config.evergreen.max_gprs = 256;
3444 rdev->config.evergreen.max_threads = 248;
3445 rdev->config.evergreen.max_gs_threads = 32;
3446 rdev->config.evergreen.max_stack_entries = 256;
3447 rdev->config.evergreen.sx_num_of_sets = 4;
3448 rdev->config.evergreen.sx_max_export_size = 256;
3449 rdev->config.evergreen.sx_max_export_pos_size = 64;
3450 rdev->config.evergreen.sx_max_export_smx_size = 192;
3451 rdev->config.evergreen.max_hw_contexts = 8;
3452 rdev->config.evergreen.sq_num_cf_insts = 2;
3453
3454 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3455 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3456 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3457 gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
3458 break;
3459 case CHIP_CAICOS:
3460 rdev->config.evergreen.num_ses = 1;
3461 rdev->config.evergreen.max_pipes = 2;
3462 rdev->config.evergreen.max_tile_pipes = 2;
3463 rdev->config.evergreen.max_simds = 2;
3464 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3465 rdev->config.evergreen.max_gprs = 256;
3466 rdev->config.evergreen.max_threads = 192;
3467 rdev->config.evergreen.max_gs_threads = 16;
3468 rdev->config.evergreen.max_stack_entries = 256;
3469 rdev->config.evergreen.sx_num_of_sets = 4;
3470 rdev->config.evergreen.sx_max_export_size = 128;
3471 rdev->config.evergreen.sx_max_export_pos_size = 32;
3472 rdev->config.evergreen.sx_max_export_smx_size = 96;
3473 rdev->config.evergreen.max_hw_contexts = 4;
3474 rdev->config.evergreen.sq_num_cf_insts = 1;
3475
3476 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3477 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3478 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3479 gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
3480 break;
3481 }
3482
3483 /* Initialize HDP */
3484 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3485 WREG32((0x2c14 + j), 0x00000000);
3486 WREG32((0x2c18 + j), 0x00000000);
3487 WREG32((0x2c1c + j), 0x00000000);
3488 WREG32((0x2c20 + j), 0x00000000);
3489 WREG32((0x2c24 + j), 0x00000000);
3490 }
3491
3492 WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3493 WREG32(SRBM_INT_CNTL, 0x1);
3494 WREG32(SRBM_INT_ACK, 0x1);
3495
3496 evergreen_fix_pci_max_read_req_size(rdev);
3497
3498 mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3499 if ((rdev->family == CHIP_PALM) ||
3500 (rdev->family == CHIP_SUMO) ||
3501 (rdev->family == CHIP_SUMO2))
3502 mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3503 else
3504 mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3505
3506 /* setup tiling info dword. gb_addr_config is not adequate since it does
3507 * not have bank info, so create a custom tiling dword.
3508 * bits 3:0 num_pipes
3509 * bits 7:4 num_banks
3510 * bits 11:8 group_size
3511 * bits 15:12 row_size
3512 */
3513 rdev->config.evergreen.tile_config = 0;
3514 switch (rdev->config.evergreen.max_tile_pipes) {
3515 case 1:
3516 default:
3517 rdev->config.evergreen.tile_config |= (0 << 0);
3518 break;
3519 case 2:
3520 rdev->config.evergreen.tile_config |= (1 << 0);
3521 break;
3522 case 4:
3523 rdev->config.evergreen.tile_config |= (2 << 0);
3524 break;
3525 case 8:
3526 rdev->config.evergreen.tile_config |= (3 << 0);
3527 break;
3528 }
3529 /* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
3530 if (rdev->flags & RADEON_IS_IGP)
3531 rdev->config.evergreen.tile_config |= 1 << 4;
3532 else {
3533 switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3534 case 0: /* four banks */
3535 rdev->config.evergreen.tile_config |= 0 << 4;
3536 break;
3537 case 1: /* eight banks */
3538 rdev->config.evergreen.tile_config |= 1 << 4;
3539 break;
3540 case 2: /* sixteen banks */
3541 default:
3542 rdev->config.evergreen.tile_config |= 2 << 4;
3543 break;
3544 }
3545 }
3546 rdev->config.evergreen.tile_config |= 0 << 8;
3547 rdev->config.evergreen.tile_config |=
3548 ((gb_addr_config & 0x30000000) >> 28) << 12;
3549
3550 if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3551 u32 efuse_straps_4;
3552 u32 efuse_straps_3;
3553
3554 efuse_straps_4 = RREG32_RCU(0x204);
3555 efuse_straps_3 = RREG32_RCU(0x203);
3556 tmp = (((efuse_straps_4 & 0xf) << 4) |
3557 ((efuse_straps_3 & 0xf0000000) >> 28));
3558 } else {
3559 tmp = 0;
3560 for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3561 u32 rb_disable_bitmap;
3562
3563 WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3564 WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3565 rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3566 tmp <<= 4;
3567 tmp |= rb_disable_bitmap;
3568 }
3569 }
3570 /* enabled rb are just the one not disabled :) */
3571 disabled_rb_mask = tmp;
3572 tmp = 0;
3573 for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3574 tmp |= (1 << i);
3575 /* if all the backends are disabled, fix it up here */
3576 if ((disabled_rb_mask & tmp) == tmp) {
3577 for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3578 disabled_rb_mask &= ~(1 << i);
3579 }
3580
3581 for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
3582 u32 simd_disable_bitmap;
3583
3584 WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3585 WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3586 simd_disable_bitmap = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
3587 simd_disable_bitmap |= 0xffffffff << rdev->config.evergreen.max_simds;
3588 tmp <<= 16;
3589 tmp |= simd_disable_bitmap;
3590 }
3591 rdev->config.evergreen.active_simds = hweight32(~tmp);
3592
3593 WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3594 WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3595
3596 WREG32(GB_ADDR_CONFIG, gb_addr_config);
3597 WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3598 WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3599 WREG32(DMA_TILING_CONFIG, gb_addr_config);
3600 WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3601 WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3602 WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3603
3604 if ((rdev->config.evergreen.max_backends == 1) &&
3605 (rdev->flags & RADEON_IS_IGP)) {
3606 if ((disabled_rb_mask & 3) == 1) {
3607 /* RB0 disabled, RB1 enabled */
3608 tmp = 0x11111111;
3609 } else {
3610 /* RB1 disabled, RB0 enabled */
3611 tmp = 0x00000000;
3612 }
3613 } else {
3614 tmp = gb_addr_config & NUM_PIPES_MASK;
3615 tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3616 EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3617 }
3618 WREG32(GB_BACKEND_MAP, tmp);
3619
3620 WREG32(CGTS_SYS_TCC_DISABLE, 0);
3621 WREG32(CGTS_TCC_DISABLE, 0);
3622 WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3623 WREG32(CGTS_USER_TCC_DISABLE, 0);
3624
3625 /* set HW defaults for 3D engine */
3626 WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3627 ROQ_IB2_START(0x2b)));
3628
3629 WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3630
3631 WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3632 SYNC_GRADIENT |
3633 SYNC_WALKER |
3634 SYNC_ALIGNER));
3635
3636 sx_debug_1 = RREG32(SX_DEBUG_1);
3637 sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3638 WREG32(SX_DEBUG_1, sx_debug_1);
3639
3640
3641 smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3642 smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3643 smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3644 WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3645
3646 if (rdev->family <= CHIP_SUMO2)
3647 WREG32(SMX_SAR_CTL0, 0x00010000);
3648
3649 WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3650 POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3651 SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3652
3653 WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3654 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3655 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3656
3657 WREG32(VGT_NUM_INSTANCES, 1);
3658 WREG32(SPI_CONFIG_CNTL, 0);
3659 WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3660 WREG32(CP_PERFMON_CNTL, 0);
3661
3662 WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3663 FETCH_FIFO_HIWATER(0x4) |
3664 DONE_FIFO_HIWATER(0xe0) |
3665 ALU_UPDATE_FIFO_HIWATER(0x8)));
3666
3667 sq_config = RREG32(SQ_CONFIG);
3668 sq_config &= ~(PS_PRIO(3) |
3669 VS_PRIO(3) |
3670 GS_PRIO(3) |
3671 ES_PRIO(3));
3672 sq_config |= (VC_ENABLE |
3673 EXPORT_SRC_C |
3674 PS_PRIO(0) |
3675 VS_PRIO(1) |
3676 GS_PRIO(2) |
3677 ES_PRIO(3));
3678
3679 switch (rdev->family) {
3680 case CHIP_CEDAR:
3681 case CHIP_PALM:
3682 case CHIP_SUMO:
3683 case CHIP_SUMO2:
3684 case CHIP_CAICOS:
3685 /* no vertex cache */
3686 sq_config &= ~VC_ENABLE;
3687 break;
3688 default:
3689 break;
3690 }
3691
3692 sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3693
3694 sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3695 sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3696 sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3697 sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3698 sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3699 sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3700 sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3701
3702 switch (rdev->family) {
3703 case CHIP_CEDAR:
3704 case CHIP_PALM:
3705 case CHIP_SUMO:
3706 case CHIP_SUMO2:
3707 ps_thread_count = 96;
3708 break;
3709 default:
3710 ps_thread_count = 128;
3711 break;
3712 }
3713
3714 sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3715 sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3716 sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3717 sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3718 sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3719 sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3720
3721 sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3722 sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3723 sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3724 sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3725 sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3726 sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3727
3728 WREG32(SQ_CONFIG, sq_config);
3729 WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3730 WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3731 WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3732 WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3733 WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3734 WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3735 WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3736 WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3737 WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3738 WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3739
3740 WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3741 FORCE_EOV_MAX_REZ_CNT(255)));
3742
3743 switch (rdev->family) {
3744 case CHIP_CEDAR:
3745 case CHIP_PALM:
3746 case CHIP_SUMO:
3747 case CHIP_SUMO2:
3748 case CHIP_CAICOS:
3749 vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3750 break;
3751 default:
3752 vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3753 break;
3754 }
3755 vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3756 WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3757
3758 WREG32(VGT_GS_VERTEX_REUSE, 16);
3759 WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3760 WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3761
3762 WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3763 WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3764
3765 WREG32(CB_PERF_CTR0_SEL_0, 0);
3766 WREG32(CB_PERF_CTR0_SEL_1, 0);
3767 WREG32(CB_PERF_CTR1_SEL_0, 0);
3768 WREG32(CB_PERF_CTR1_SEL_1, 0);
3769 WREG32(CB_PERF_CTR2_SEL_0, 0);
3770 WREG32(CB_PERF_CTR2_SEL_1, 0);
3771 WREG32(CB_PERF_CTR3_SEL_0, 0);
3772 WREG32(CB_PERF_CTR3_SEL_1, 0);
3773
3774 /* clear render buffer base addresses */
3775 WREG32(CB_COLOR0_BASE, 0);
3776 WREG32(CB_COLOR1_BASE, 0);
3777 WREG32(CB_COLOR2_BASE, 0);
3778 WREG32(CB_COLOR3_BASE, 0);
3779 WREG32(CB_COLOR4_BASE, 0);
3780 WREG32(CB_COLOR5_BASE, 0);
3781 WREG32(CB_COLOR6_BASE, 0);
3782 WREG32(CB_COLOR7_BASE, 0);
3783 WREG32(CB_COLOR8_BASE, 0);
3784 WREG32(CB_COLOR9_BASE, 0);
3785 WREG32(CB_COLOR10_BASE, 0);
3786 WREG32(CB_COLOR11_BASE, 0);
3787
3788 /* set the shader const cache sizes to 0 */
3789 for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3790 WREG32(i, 0);
3791 for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3792 WREG32(i, 0);
3793
3794 tmp = RREG32(HDP_MISC_CNTL);
3795 tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3796 WREG32(HDP_MISC_CNTL, tmp);
3797
3798 hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3799 WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3800
3801 WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3802
3803 udelay(50);
3804
3805 }
3806
3807 int evergreen_mc_init(struct radeon_device *rdev)
3808 {
3809 u32 tmp;
3810 int chansize, numchan;
3811
3812 /* Get VRAM informations */
3813 rdev->mc.vram_is_ddr = true;
3814 if ((rdev->family == CHIP_PALM) ||
3815 (rdev->family == CHIP_SUMO) ||
3816 (rdev->family == CHIP_SUMO2))
3817 tmp = RREG32(FUS_MC_ARB_RAMCFG);
3818 else
3819 tmp = RREG32(MC_ARB_RAMCFG);
3820 if (tmp & CHANSIZE_OVERRIDE) {
3821 chansize = 16;
3822 } else if (tmp & CHANSIZE_MASK) {
3823 chansize = 64;
3824 } else {
3825 chansize = 32;
3826 }
3827 tmp = RREG32(MC_SHARED_CHMAP);
3828 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3829 case 0:
3830 default:
3831 numchan = 1;
3832 break;
3833 case 1:
3834 numchan = 2;
3835 break;
3836 case 2:
3837 numchan = 4;
3838 break;
3839 case 3:
3840 numchan = 8;
3841 break;
3842 }
3843 rdev->mc.vram_width = numchan * chansize;
3844 /* Could aper size report 0 ? */
3845 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3846 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
3847 /* Setup GPU memory space */
3848 if ((rdev->family == CHIP_PALM) ||
3849 (rdev->family == CHIP_SUMO) ||
3850 (rdev->family == CHIP_SUMO2)) {
3851 /* size in bytes on fusion */
3852 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3853 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3854 } else {
3855 /* size in MB on evergreen/cayman/tn */
3856 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3857 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3858 }
3859 rdev->mc.visible_vram_size = rdev->mc.aper_size;
3860 r700_vram_gtt_location(rdev, &rdev->mc);
3861 radeon_update_bandwidth_info(rdev);
3862
3863 return 0;
3864 }
3865
3866 void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
3867 {
3868 dev_info(rdev->dev, " GRBM_STATUS = 0x%08X\n",
3869 RREG32(GRBM_STATUS));
3870 dev_info(rdev->dev, " GRBM_STATUS_SE0 = 0x%08X\n",
3871 RREG32(GRBM_STATUS_SE0));
3872 dev_info(rdev->dev, " GRBM_STATUS_SE1 = 0x%08X\n",
3873 RREG32(GRBM_STATUS_SE1));
3874 dev_info(rdev->dev, " SRBM_STATUS = 0x%08X\n",
3875 RREG32(SRBM_STATUS));
3876 dev_info(rdev->dev, " SRBM_STATUS2 = 0x%08X\n",
3877 RREG32(SRBM_STATUS2));
3878 dev_info(rdev->dev, " R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3879 RREG32(CP_STALLED_STAT1));
3880 dev_info(rdev->dev, " R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3881 RREG32(CP_STALLED_STAT2));
3882 dev_info(rdev->dev, " R_00867C_CP_BUSY_STAT = 0x%08X\n",
3883 RREG32(CP_BUSY_STAT));
3884 dev_info(rdev->dev, " R_008680_CP_STAT = 0x%08X\n",
3885 RREG32(CP_STAT));
3886 dev_info(rdev->dev, " R_00D034_DMA_STATUS_REG = 0x%08X\n",
3887 RREG32(DMA_STATUS_REG));
3888 if (rdev->family >= CHIP_CAYMAN) {
3889 dev_info(rdev->dev, " R_00D834_DMA_STATUS_REG = 0x%08X\n",
3890 RREG32(DMA_STATUS_REG + 0x800));
3891 }
3892 }
3893
3894 bool evergreen_is_display_hung(struct radeon_device *rdev)
3895 {
3896 u32 crtc_hung = 0;
3897 u32 crtc_status[6];
3898 u32 i, j, tmp;
3899
3900 for (i = 0; i < rdev->num_crtc; i++) {
3901 if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3902 crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3903 crtc_hung |= (1 << i);
3904 }
3905 }
3906
3907 for (j = 0; j < 10; j++) {
3908 for (i = 0; i < rdev->num_crtc; i++) {
3909 if (crtc_hung & (1 << i)) {
3910 tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3911 if (tmp != crtc_status[i])
3912 crtc_hung &= ~(1 << i);
3913 }
3914 }
3915 if (crtc_hung == 0)
3916 return false;
3917 udelay(100);
3918 }
3919
3920 return true;
3921 }
3922
3923 u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3924 {
3925 u32 reset_mask = 0;
3926 u32 tmp;
3927
3928 /* GRBM_STATUS */
3929 tmp = RREG32(GRBM_STATUS);
3930 if (tmp & (PA_BUSY | SC_BUSY |
3931 SH_BUSY | SX_BUSY |
3932 TA_BUSY | VGT_BUSY |
3933 DB_BUSY | CB_BUSY |
3934 SPI_BUSY | VGT_BUSY_NO_DMA))
3935 reset_mask |= RADEON_RESET_GFX;
3936
3937 if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3938 CP_BUSY | CP_COHERENCY_BUSY))
3939 reset_mask |= RADEON_RESET_CP;
3940
3941 if (tmp & GRBM_EE_BUSY)
3942 reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3943
3944 /* DMA_STATUS_REG */
3945 tmp = RREG32(DMA_STATUS_REG);
3946 if (!(tmp & DMA_IDLE))
3947 reset_mask |= RADEON_RESET_DMA;
3948
3949 /* SRBM_STATUS2 */
3950 tmp = RREG32(SRBM_STATUS2);
3951 if (tmp & DMA_BUSY)
3952 reset_mask |= RADEON_RESET_DMA;
3953
3954 /* SRBM_STATUS */
3955 tmp = RREG32(SRBM_STATUS);
3956 if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3957 reset_mask |= RADEON_RESET_RLC;
3958
3959 if (tmp & IH_BUSY)
3960 reset_mask |= RADEON_RESET_IH;
3961
3962 if (tmp & SEM_BUSY)
3963 reset_mask |= RADEON_RESET_SEM;
3964
3965 if (tmp & GRBM_RQ_PENDING)
3966 reset_mask |= RADEON_RESET_GRBM;
3967
3968 if (tmp & VMC_BUSY)
3969 reset_mask |= RADEON_RESET_VMC;
3970
3971 if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3972 MCC_BUSY | MCD_BUSY))
3973 reset_mask |= RADEON_RESET_MC;
3974
3975 if (evergreen_is_display_hung(rdev))
3976 reset_mask |= RADEON_RESET_DISPLAY;
3977
3978 /* VM_L2_STATUS */
3979 tmp = RREG32(VM_L2_STATUS);
3980 if (tmp & L2_BUSY)
3981 reset_mask |= RADEON_RESET_VMC;
3982
3983 /* Skip MC reset as it's mostly likely not hung, just busy */
3984 if (reset_mask & RADEON_RESET_MC) {
3985 DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3986 reset_mask &= ~RADEON_RESET_MC;
3987 }
3988
3989 return reset_mask;
3990 }
3991
3992 static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3993 {
3994 struct evergreen_mc_save save;
3995 u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3996 u32 tmp;
3997
3998 if (reset_mask == 0)
3999 return;
4000
4001 dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
4002
4003 evergreen_print_gpu_status_regs(rdev);
4004
4005 /* Disable CP parsing/prefetching */
4006 WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
4007
4008 if (reset_mask & RADEON_RESET_DMA) {
4009 /* Disable DMA */
4010 tmp = RREG32(DMA_RB_CNTL);
4011 tmp &= ~DMA_RB_ENABLE;
4012 WREG32(DMA_RB_CNTL, tmp);
4013 }
4014
4015 udelay(50);
4016
4017 evergreen_mc_stop(rdev, &save);
4018 if (evergreen_mc_wait_for_idle(rdev)) {
4019 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
4020 }
4021
4022 if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
4023 grbm_soft_reset |= SOFT_RESET_DB |
4024 SOFT_RESET_CB |
4025 SOFT_RESET_PA |
4026 SOFT_RESET_SC |
4027 SOFT_RESET_SPI |
4028 SOFT_RESET_SX |
4029 SOFT_RESET_SH |
4030 SOFT_RESET_TC |
4031 SOFT_RESET_TA |
4032 SOFT_RESET_VC |
4033 SOFT_RESET_VGT;
4034 }
4035
4036 if (reset_mask & RADEON_RESET_CP) {
4037 grbm_soft_reset |= SOFT_RESET_CP |
4038 SOFT_RESET_VGT;
4039
4040 srbm_soft_reset |= SOFT_RESET_GRBM;
4041 }
4042
4043 if (reset_mask & RADEON_RESET_DMA)
4044 srbm_soft_reset |= SOFT_RESET_DMA;
4045
4046 if (reset_mask & RADEON_RESET_DISPLAY)
4047 srbm_soft_reset |= SOFT_RESET_DC;
4048
4049 if (reset_mask & RADEON_RESET_RLC)
4050 srbm_soft_reset |= SOFT_RESET_RLC;
4051
4052 if (reset_mask & RADEON_RESET_SEM)
4053 srbm_soft_reset |= SOFT_RESET_SEM;
4054
4055 if (reset_mask & RADEON_RESET_IH)
4056 srbm_soft_reset |= SOFT_RESET_IH;
4057
4058 if (reset_mask & RADEON_RESET_GRBM)
4059 srbm_soft_reset |= SOFT_RESET_GRBM;
4060
4061 if (reset_mask & RADEON_RESET_VMC)
4062 srbm_soft_reset |= SOFT_RESET_VMC;
4063
4064 if (!(rdev->flags & RADEON_IS_IGP)) {
4065 if (reset_mask & RADEON_RESET_MC)
4066 srbm_soft_reset |= SOFT_RESET_MC;
4067 }
4068
4069 if (grbm_soft_reset) {
4070 tmp = RREG32(GRBM_SOFT_RESET);
4071 tmp |= grbm_soft_reset;
4072 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
4073 WREG32(GRBM_SOFT_RESET, tmp);
4074 tmp = RREG32(GRBM_SOFT_RESET);
4075
4076 udelay(50);
4077
4078 tmp &= ~grbm_soft_reset;
4079 WREG32(GRBM_SOFT_RESET, tmp);
4080 tmp = RREG32(GRBM_SOFT_RESET);
4081 }
4082
4083 if (srbm_soft_reset) {
4084 tmp = RREG32(SRBM_SOFT_RESET);
4085 tmp |= srbm_soft_reset;
4086 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
4087 WREG32(SRBM_SOFT_RESET, tmp);
4088 tmp = RREG32(SRBM_SOFT_RESET);
4089
4090 udelay(50);
4091
4092 tmp &= ~srbm_soft_reset;
4093 WREG32(SRBM_SOFT_RESET, tmp);
4094 tmp = RREG32(SRBM_SOFT_RESET);
4095 }
4096
4097 /* Wait a little for things to settle down */
4098 udelay(50);
4099
4100 evergreen_mc_resume(rdev, &save);
4101 udelay(50);
4102
4103 evergreen_print_gpu_status_regs(rdev);
4104 }
4105
4106 void evergreen_gpu_pci_config_reset(struct radeon_device *rdev)
4107 {
4108 struct evergreen_mc_save save;
4109 u32 tmp, i;
4110
4111 dev_info(rdev->dev, "GPU pci config reset\n");
4112
4113 /* disable dpm? */
4114
4115 /* Disable CP parsing/prefetching */
4116 WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
4117 udelay(50);
4118 /* Disable DMA */
4119 tmp = RREG32(DMA_RB_CNTL);
4120 tmp &= ~DMA_RB_ENABLE;
4121 WREG32(DMA_RB_CNTL, tmp);
4122 /* XXX other engines? */
4123
4124 /* halt the rlc */
4125 r600_rlc_stop(rdev);
4126
4127 udelay(50);
4128
4129 /* set mclk/sclk to bypass */
4130 rv770_set_clk_bypass_mode(rdev);
4131 /* disable BM */
4132 pci_clear_master(rdev->pdev);
4133 /* disable mem access */
4134 evergreen_mc_stop(rdev, &save);
4135 if (evergreen_mc_wait_for_idle(rdev)) {
4136 dev_warn(rdev->dev, "Wait for MC idle timed out !\n");
4137 }
4138 /* reset */
4139 radeon_pci_config_reset(rdev);
4140 /* wait for asic to come out of reset */
4141 for (i = 0; i < rdev->usec_timeout; i++) {
4142 if (RREG32(CONFIG_MEMSIZE) != 0xffffffff)
4143 break;
4144 udelay(1);
4145 }
4146 }
4147
4148 int evergreen_asic_reset(struct radeon_device *rdev)
4149 {
4150 u32 reset_mask;
4151
4152 reset_mask = evergreen_gpu_check_soft_reset(rdev);
4153
4154 if (reset_mask)
4155 r600_set_bios_scratch_engine_hung(rdev, true);
4156
4157 /* try soft reset */
4158 evergreen_gpu_soft_reset(rdev, reset_mask);
4159
4160 reset_mask = evergreen_gpu_check_soft_reset(rdev);
4161
4162 /* try pci config reset */
4163 if (reset_mask && radeon_hard_reset)
4164 evergreen_gpu_pci_config_reset(rdev);
4165
4166 reset_mask = evergreen_gpu_check_soft_reset(rdev);
4167
4168 if (!reset_mask)
4169 r600_set_bios_scratch_engine_hung(rdev, false);
4170
4171 return 0;
4172 }
4173
4174 /**
4175 * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
4176 *
4177 * @rdev: radeon_device pointer
4178 * @ring: radeon_ring structure holding ring information
4179 *
4180 * Check if the GFX engine is locked up.
4181 * Returns true if the engine appears to be locked up, false if not.
4182 */
4183 bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
4184 {
4185 u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
4186
4187 if (!(reset_mask & (RADEON_RESET_GFX |
4188 RADEON_RESET_COMPUTE |
4189 RADEON_RESET_CP))) {
4190 radeon_ring_lockup_update(rdev, ring);
4191 return false;
4192 }
4193 return radeon_ring_test_lockup(rdev, ring);
4194 }
4195
4196 /*
4197 * RLC
4198 */
4199 #define RLC_SAVE_RESTORE_LIST_END_MARKER 0x00000000
4200 #define RLC_CLEAR_STATE_END_MARKER 0x00000001
4201
4202 void sumo_rlc_fini(struct radeon_device *rdev)
4203 {
4204 int r;
4205
4206 /* save restore block */
4207 if (rdev->rlc.save_restore_obj) {
4208 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4209 if (unlikely(r != 0))
4210 dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r);
4211 radeon_bo_unpin(rdev->rlc.save_restore_obj);
4212 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4213
4214 radeon_bo_unref(&rdev->rlc.save_restore_obj);
4215 rdev->rlc.save_restore_obj = NULL;
4216 }
4217
4218 /* clear state block */
4219 if (rdev->rlc.clear_state_obj) {
4220 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4221 if (unlikely(r != 0))
4222 dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r);
4223 radeon_bo_unpin(rdev->rlc.clear_state_obj);
4224 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4225
4226 radeon_bo_unref(&rdev->rlc.clear_state_obj);
4227 rdev->rlc.clear_state_obj = NULL;
4228 }
4229
4230 /* clear state block */
4231 if (rdev->rlc.cp_table_obj) {
4232 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4233 if (unlikely(r != 0))
4234 dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4235 radeon_bo_unpin(rdev->rlc.cp_table_obj);
4236 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4237
4238 radeon_bo_unref(&rdev->rlc.cp_table_obj);
4239 rdev->rlc.cp_table_obj = NULL;
4240 }
4241 }
4242
4243 #define CP_ME_TABLE_SIZE 96
4244
4245 int sumo_rlc_init(struct radeon_device *rdev)
4246 {
4247 const u32 *src_ptr;
4248 volatile u32 *dst_ptr;
4249 u32 dws, data, i, j, k, reg_num;
4250 u32 reg_list_num, reg_list_hdr_blk_index, reg_list_blk_index = 0;
4251 u64 reg_list_mc_addr;
4252 const struct cs_section_def *cs_data;
4253 int r;
4254
4255 src_ptr = rdev->rlc.reg_list;
4256 dws = rdev->rlc.reg_list_size;
4257 if (rdev->family >= CHIP_BONAIRE) {
4258 dws += (5 * 16) + 48 + 48 + 64;
4259 }
4260 cs_data = rdev->rlc.cs_data;
4261
4262 if (src_ptr) {
4263 /* save restore block */
4264 if (rdev->rlc.save_restore_obj == NULL) {
4265 r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4266 RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4267 NULL, &rdev->rlc.save_restore_obj);
4268 if (r) {
4269 dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r);
4270 return r;
4271 }
4272 }
4273
4274 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4275 if (unlikely(r != 0)) {
4276 sumo_rlc_fini(rdev);
4277 return r;
4278 }
4279 r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM,
4280 &rdev->rlc.save_restore_gpu_addr);
4281 if (r) {
4282 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4283 dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r);
4284 sumo_rlc_fini(rdev);
4285 return r;
4286 }
4287
4288 r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void **)__UNVOLATILE(&rdev->rlc.sr_ptr));
4289 if (r) {
4290 dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r);
4291 sumo_rlc_fini(rdev);
4292 return r;
4293 }
4294 /* write the sr buffer */
4295 dst_ptr = rdev->rlc.sr_ptr;
4296 if (rdev->family >= CHIP_TAHITI) {
4297 /* SI */
4298 for (i = 0; i < rdev->rlc.reg_list_size; i++)
4299 dst_ptr[i] = cpu_to_le32(src_ptr[i]);
4300 } else {
4301 /* ON/LN/TN */
4302 /* format:
4303 * dw0: (reg2 << 16) | reg1
4304 * dw1: reg1 save space
4305 * dw2: reg2 save space
4306 */
4307 for (i = 0; i < dws; i++) {
4308 data = src_ptr[i] >> 2;
4309 i++;
4310 if (i < dws)
4311 data |= (src_ptr[i] >> 2) << 16;
4312 j = (((i - 1) * 3) / 2);
4313 dst_ptr[j] = cpu_to_le32(data);
4314 }
4315 j = ((i * 3) / 2);
4316 dst_ptr[j] = cpu_to_le32(RLC_SAVE_RESTORE_LIST_END_MARKER);
4317 }
4318 radeon_bo_kunmap(rdev->rlc.save_restore_obj);
4319 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4320 }
4321
4322 if (cs_data) {
4323 /* clear state block */
4324 if (rdev->family >= CHIP_BONAIRE) {
4325 rdev->rlc.clear_state_size = dws = cik_get_csb_size(rdev);
4326 } else if (rdev->family >= CHIP_TAHITI) {
4327 rdev->rlc.clear_state_size = si_get_csb_size(rdev);
4328 dws = rdev->rlc.clear_state_size + (256 / 4);
4329 } else {
4330 reg_list_num = 0;
4331 dws = 0;
4332 for (i = 0; cs_data[i].section != NULL; i++) {
4333 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4334 reg_list_num++;
4335 dws += cs_data[i].section[j].reg_count;
4336 }
4337 }
4338 reg_list_blk_index = (3 * reg_list_num + 2);
4339 dws += reg_list_blk_index;
4340 rdev->rlc.clear_state_size = dws;
4341 }
4342
4343 if (rdev->rlc.clear_state_obj == NULL) {
4344 r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4345 RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4346 NULL, &rdev->rlc.clear_state_obj);
4347 if (r) {
4348 dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r);
4349 sumo_rlc_fini(rdev);
4350 return r;
4351 }
4352 }
4353 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4354 if (unlikely(r != 0)) {
4355 sumo_rlc_fini(rdev);
4356 return r;
4357 }
4358 r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM,
4359 &rdev->rlc.clear_state_gpu_addr);
4360 if (r) {
4361 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4362 dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r);
4363 sumo_rlc_fini(rdev);
4364 return r;
4365 }
4366
4367 r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void **)__UNVOLATILE(&rdev->rlc.cs_ptr));
4368 if (r) {
4369 dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r);
4370 sumo_rlc_fini(rdev);
4371 return r;
4372 }
4373 /* set up the cs buffer */
4374 dst_ptr = rdev->rlc.cs_ptr;
4375 if (rdev->family >= CHIP_BONAIRE) {
4376 cik_get_csb_buffer(rdev, dst_ptr);
4377 } else if (rdev->family >= CHIP_TAHITI) {
4378 reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256;
4379 dst_ptr[0] = cpu_to_le32(upper_32_bits(reg_list_mc_addr));
4380 dst_ptr[1] = cpu_to_le32(lower_32_bits(reg_list_mc_addr));
4381 dst_ptr[2] = cpu_to_le32(rdev->rlc.clear_state_size);
4382 si_get_csb_buffer(rdev, &dst_ptr[(256/4)]);
4383 } else {
4384 reg_list_hdr_blk_index = 0;
4385 reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
4386 data = upper_32_bits(reg_list_mc_addr);
4387 dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4388 reg_list_hdr_blk_index++;
4389 for (i = 0; cs_data[i].section != NULL; i++) {
4390 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4391 reg_num = cs_data[i].section[j].reg_count;
4392 data = reg_list_mc_addr & 0xffffffff;
4393 dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4394 reg_list_hdr_blk_index++;
4395
4396 data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
4397 dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4398 reg_list_hdr_blk_index++;
4399
4400 data = 0x08000000 | (reg_num * 4);
4401 dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4402 reg_list_hdr_blk_index++;
4403
4404 for (k = 0; k < reg_num; k++) {
4405 data = cs_data[i].section[j].extent[k];
4406 dst_ptr[reg_list_blk_index + k] = cpu_to_le32(data);
4407 }
4408 reg_list_mc_addr += reg_num * 4;
4409 reg_list_blk_index += reg_num;
4410 }
4411 }
4412 dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(RLC_CLEAR_STATE_END_MARKER);
4413 }
4414 radeon_bo_kunmap(rdev->rlc.clear_state_obj);
4415 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4416 }
4417
4418 if (rdev->rlc.cp_table_size) {
4419 if (rdev->rlc.cp_table_obj == NULL) {
4420 r = radeon_bo_create(rdev, rdev->rlc.cp_table_size,
4421 PAGE_SIZE, true,
4422 RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4423 NULL, &rdev->rlc.cp_table_obj);
4424 if (r) {
4425 dev_warn(rdev->dev, "(%d) create RLC cp table bo failed\n", r);
4426 sumo_rlc_fini(rdev);
4427 return r;
4428 }
4429 }
4430
4431 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4432 if (unlikely(r != 0)) {
4433 dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4434 sumo_rlc_fini(rdev);
4435 return r;
4436 }
4437 r = radeon_bo_pin(rdev->rlc.cp_table_obj, RADEON_GEM_DOMAIN_VRAM,
4438 &rdev->rlc.cp_table_gpu_addr);
4439 if (r) {
4440 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4441 dev_warn(rdev->dev, "(%d) pin RLC cp_table bo failed\n", r);
4442 sumo_rlc_fini(rdev);
4443 return r;
4444 }
4445 r = radeon_bo_kmap(rdev->rlc.cp_table_obj, (void **)__UNVOLATILE(&rdev->rlc.cp_table_ptr));
4446 if (r) {
4447 dev_warn(rdev->dev, "(%d) map RLC cp table bo failed\n", r);
4448 sumo_rlc_fini(rdev);
4449 return r;
4450 }
4451
4452 cik_init_cp_pg_table(rdev);
4453
4454 radeon_bo_kunmap(rdev->rlc.cp_table_obj);
4455 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4456
4457 }
4458
4459 return 0;
4460 }
4461
4462 static void evergreen_rlc_start(struct radeon_device *rdev)
4463 {
4464 u32 mask = RLC_ENABLE;
4465
4466 if (rdev->flags & RADEON_IS_IGP) {
4467 mask |= GFX_POWER_GATING_ENABLE | GFX_POWER_GATING_SRC;
4468 }
4469
4470 WREG32(RLC_CNTL, mask);
4471 }
4472
4473 int evergreen_rlc_resume(struct radeon_device *rdev)
4474 {
4475 u32 i;
4476 const __be32 *fw_data;
4477
4478 if (!rdev->rlc_fw)
4479 return -EINVAL;
4480
4481 r600_rlc_stop(rdev);
4482
4483 WREG32(RLC_HB_CNTL, 0);
4484
4485 if (rdev->flags & RADEON_IS_IGP) {
4486 if (rdev->family == CHIP_ARUBA) {
4487 u32 always_on_bitmap =
4488 3 | (3 << (16 * rdev->config.cayman.max_shader_engines));
4489 /* find out the number of active simds */
4490 u32 tmp = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
4491 tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
4492 tmp = hweight32(~tmp);
4493 if (tmp == rdev->config.cayman.max_simds_per_se) {
4494 WREG32(TN_RLC_LB_ALWAYS_ACTIVE_SIMD_MASK, always_on_bitmap);
4495 WREG32(TN_RLC_LB_PARAMS, 0x00601004);
4496 WREG32(TN_RLC_LB_INIT_SIMD_MASK, 0xffffffff);
4497 WREG32(TN_RLC_LB_CNTR_INIT, 0x00000000);
4498 WREG32(TN_RLC_LB_CNTR_MAX, 0x00002000);
4499 }
4500 } else {
4501 WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4502 WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4503 }
4504 WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4505 WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
4506 } else {
4507 WREG32(RLC_HB_BASE, 0);
4508 WREG32(RLC_HB_RPTR, 0);
4509 WREG32(RLC_HB_WPTR, 0);
4510 WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4511 WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4512 }
4513 WREG32(RLC_MC_CNTL, 0);
4514 WREG32(RLC_UCODE_CNTL, 0);
4515
4516 fw_data = (const __be32 *)rdev->rlc_fw->data;
4517 if (rdev->family >= CHIP_ARUBA) {
4518 for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
4519 WREG32(RLC_UCODE_ADDR, i);
4520 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4521 }
4522 } else if (rdev->family >= CHIP_CAYMAN) {
4523 for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
4524 WREG32(RLC_UCODE_ADDR, i);
4525 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4526 }
4527 } else {
4528 for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
4529 WREG32(RLC_UCODE_ADDR, i);
4530 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4531 }
4532 }
4533 WREG32(RLC_UCODE_ADDR, 0);
4534
4535 evergreen_rlc_start(rdev);
4536
4537 return 0;
4538 }
4539
4540 /* Interrupts */
4541
4542 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
4543 {
4544 if (crtc >= rdev->num_crtc)
4545 return 0;
4546 else
4547 return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
4548 }
4549
4550 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
4551 {
4552 u32 tmp;
4553
4554 if (rdev->family >= CHIP_CAYMAN) {
4555 cayman_cp_int_cntl_setup(rdev, 0,
4556 CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4557 cayman_cp_int_cntl_setup(rdev, 1, 0);
4558 cayman_cp_int_cntl_setup(rdev, 2, 0);
4559 tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4560 WREG32(CAYMAN_DMA1_CNTL, tmp);
4561 } else
4562 WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4563 tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4564 WREG32(DMA_CNTL, tmp);
4565 WREG32(GRBM_INT_CNTL, 0);
4566 WREG32(SRBM_INT_CNTL, 0);
4567 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4568 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4569 if (rdev->num_crtc >= 4) {
4570 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4571 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4572 }
4573 if (rdev->num_crtc >= 6) {
4574 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4575 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4576 }
4577
4578 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4579 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4580 if (rdev->num_crtc >= 4) {
4581 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4582 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4583 }
4584 if (rdev->num_crtc >= 6) {
4585 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4586 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4587 }
4588
4589 /* only one DAC on DCE5 */
4590 if (!ASIC_IS_DCE5(rdev))
4591 WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
4592 WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
4593
4594 tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4595 WREG32(DC_HPD1_INT_CONTROL, tmp);
4596 tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4597 WREG32(DC_HPD2_INT_CONTROL, tmp);
4598 tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4599 WREG32(DC_HPD3_INT_CONTROL, tmp);
4600 tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4601 WREG32(DC_HPD4_INT_CONTROL, tmp);
4602 tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4603 WREG32(DC_HPD5_INT_CONTROL, tmp);
4604 tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4605 WREG32(DC_HPD6_INT_CONTROL, tmp);
4606
4607 }
4608
4609 int evergreen_irq_set(struct radeon_device *rdev)
4610 {
4611 u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
4612 u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
4613 u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
4614 u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
4615 u32 grbm_int_cntl = 0;
4616 u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
4617 u32 dma_cntl, dma_cntl1 = 0;
4618 u32 thermal_int = 0;
4619
4620 if (!rdev->irq.installed) {
4621 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
4622 return -EINVAL;
4623 }
4624 /* don't enable anything if the ih is disabled */
4625 if (!rdev->ih.enabled) {
4626 r600_disable_interrupts(rdev);
4627 /* force the active interrupt state to all disabled */
4628 evergreen_disable_interrupt_state(rdev);
4629 return 0;
4630 }
4631
4632 hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4633 hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4634 hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4635 hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4636 hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4637 hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4638 if (rdev->family == CHIP_ARUBA)
4639 thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) &
4640 ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4641 else
4642 thermal_int = RREG32(CG_THERMAL_INT) &
4643 ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4644
4645 afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4646 afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4647 afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4648 afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4649 afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4650 afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4651
4652 dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4653
4654 if (rdev->family >= CHIP_CAYMAN) {
4655 /* enable CP interrupts on all rings */
4656 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4657 DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4658 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4659 }
4660 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
4661 DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
4662 cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
4663 }
4664 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
4665 DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
4666 cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
4667 }
4668 } else {
4669 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4670 DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4671 cp_int_cntl |= RB_INT_ENABLE;
4672 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4673 }
4674 }
4675
4676 if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
4677 DRM_DEBUG("r600_irq_set: sw int dma\n");
4678 dma_cntl |= TRAP_ENABLE;
4679 }
4680
4681 if (rdev->family >= CHIP_CAYMAN) {
4682 dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4683 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
4684 DRM_DEBUG("r600_irq_set: sw int dma1\n");
4685 dma_cntl1 |= TRAP_ENABLE;
4686 }
4687 }
4688
4689 if (rdev->irq.dpm_thermal) {
4690 DRM_DEBUG("dpm thermal\n");
4691 thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
4692 }
4693
4694 if (rdev->irq.crtc_vblank_int[0] ||
4695 atomic_read(&rdev->irq.pflip[0])) {
4696 DRM_DEBUG("evergreen_irq_set: vblank 0\n");
4697 crtc1 |= VBLANK_INT_MASK;
4698 }
4699 if (rdev->irq.crtc_vblank_int[1] ||
4700 atomic_read(&rdev->irq.pflip[1])) {
4701 DRM_DEBUG("evergreen_irq_set: vblank 1\n");
4702 crtc2 |= VBLANK_INT_MASK;
4703 }
4704 if (rdev->irq.crtc_vblank_int[2] ||
4705 atomic_read(&rdev->irq.pflip[2])) {
4706 DRM_DEBUG("evergreen_irq_set: vblank 2\n");
4707 crtc3 |= VBLANK_INT_MASK;
4708 }
4709 if (rdev->irq.crtc_vblank_int[3] ||
4710 atomic_read(&rdev->irq.pflip[3])) {
4711 DRM_DEBUG("evergreen_irq_set: vblank 3\n");
4712 crtc4 |= VBLANK_INT_MASK;
4713 }
4714 if (rdev->irq.crtc_vblank_int[4] ||
4715 atomic_read(&rdev->irq.pflip[4])) {
4716 DRM_DEBUG("evergreen_irq_set: vblank 4\n");
4717 crtc5 |= VBLANK_INT_MASK;
4718 }
4719 if (rdev->irq.crtc_vblank_int[5] ||
4720 atomic_read(&rdev->irq.pflip[5])) {
4721 DRM_DEBUG("evergreen_irq_set: vblank 5\n");
4722 crtc6 |= VBLANK_INT_MASK;
4723 }
4724 if (rdev->irq.hpd[0]) {
4725 DRM_DEBUG("evergreen_irq_set: hpd 1\n");
4726 hpd1 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4727 }
4728 if (rdev->irq.hpd[1]) {
4729 DRM_DEBUG("evergreen_irq_set: hpd 2\n");
4730 hpd2 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4731 }
4732 if (rdev->irq.hpd[2]) {
4733 DRM_DEBUG("evergreen_irq_set: hpd 3\n");
4734 hpd3 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4735 }
4736 if (rdev->irq.hpd[3]) {
4737 DRM_DEBUG("evergreen_irq_set: hpd 4\n");
4738 hpd4 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4739 }
4740 if (rdev->irq.hpd[4]) {
4741 DRM_DEBUG("evergreen_irq_set: hpd 5\n");
4742 hpd5 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4743 }
4744 if (rdev->irq.hpd[5]) {
4745 DRM_DEBUG("evergreen_irq_set: hpd 6\n");
4746 hpd6 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4747 }
4748 if (rdev->irq.afmt[0]) {
4749 DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
4750 afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4751 }
4752 if (rdev->irq.afmt[1]) {
4753 DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
4754 afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4755 }
4756 if (rdev->irq.afmt[2]) {
4757 DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
4758 afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4759 }
4760 if (rdev->irq.afmt[3]) {
4761 DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
4762 afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4763 }
4764 if (rdev->irq.afmt[4]) {
4765 DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
4766 afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4767 }
4768 if (rdev->irq.afmt[5]) {
4769 DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
4770 afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4771 }
4772
4773 if (rdev->family >= CHIP_CAYMAN) {
4774 cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
4775 cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
4776 cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
4777 } else
4778 WREG32(CP_INT_CNTL, cp_int_cntl);
4779
4780 WREG32(DMA_CNTL, dma_cntl);
4781
4782 if (rdev->family >= CHIP_CAYMAN)
4783 WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
4784
4785 WREG32(GRBM_INT_CNTL, grbm_int_cntl);
4786
4787 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
4788 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
4789 if (rdev->num_crtc >= 4) {
4790 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
4791 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
4792 }
4793 if (rdev->num_crtc >= 6) {
4794 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
4795 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
4796 }
4797
4798 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET,
4799 GRPH_PFLIP_INT_MASK);
4800 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET,
4801 GRPH_PFLIP_INT_MASK);
4802 if (rdev->num_crtc >= 4) {
4803 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET,
4804 GRPH_PFLIP_INT_MASK);
4805 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET,
4806 GRPH_PFLIP_INT_MASK);
4807 }
4808 if (rdev->num_crtc >= 6) {
4809 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET,
4810 GRPH_PFLIP_INT_MASK);
4811 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET,
4812 GRPH_PFLIP_INT_MASK);
4813 }
4814
4815 WREG32(DC_HPD1_INT_CONTROL, hpd1);
4816 WREG32(DC_HPD2_INT_CONTROL, hpd2);
4817 WREG32(DC_HPD3_INT_CONTROL, hpd3);
4818 WREG32(DC_HPD4_INT_CONTROL, hpd4);
4819 WREG32(DC_HPD5_INT_CONTROL, hpd5);
4820 WREG32(DC_HPD6_INT_CONTROL, hpd6);
4821 if (rdev->family == CHIP_ARUBA)
4822 WREG32(TN_CG_THERMAL_INT_CTRL, thermal_int);
4823 else
4824 WREG32(CG_THERMAL_INT, thermal_int);
4825
4826 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
4827 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
4828 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
4829 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
4830 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
4831 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
4832
4833 /* posting read */
4834 RREG32(SRBM_STATUS);
4835
4836 return 0;
4837 }
4838
4839 static void evergreen_irq_ack(struct radeon_device *rdev)
4840 {
4841 u32 tmp;
4842
4843 rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4844 rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4845 rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4846 rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4847 rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4848 rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4849 rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4850 rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4851 if (rdev->num_crtc >= 4) {
4852 rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4853 rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4854 }
4855 if (rdev->num_crtc >= 6) {
4856 rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4857 rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4858 }
4859
4860 rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4861 rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4862 rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4863 rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4864 rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4865 rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4866
4867 if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4868 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4869 if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4870 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4871 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
4872 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
4873 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
4874 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
4875 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
4876 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
4877 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
4878 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4879
4880 if (rdev->num_crtc >= 4) {
4881 if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4882 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4883 if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4884 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4885 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4886 WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4887 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4888 WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4889 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4890 WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4891 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4892 WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4893 }
4894
4895 if (rdev->num_crtc >= 6) {
4896 if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4897 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4898 if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4899 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4900 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4901 WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4902 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4903 WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4904 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4905 WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4906 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4907 WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4908 }
4909
4910 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4911 tmp = RREG32(DC_HPD1_INT_CONTROL);
4912 tmp |= DC_HPDx_INT_ACK;
4913 WREG32(DC_HPD1_INT_CONTROL, tmp);
4914 }
4915 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4916 tmp = RREG32(DC_HPD2_INT_CONTROL);
4917 tmp |= DC_HPDx_INT_ACK;
4918 WREG32(DC_HPD2_INT_CONTROL, tmp);
4919 }
4920 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4921 tmp = RREG32(DC_HPD3_INT_CONTROL);
4922 tmp |= DC_HPDx_INT_ACK;
4923 WREG32(DC_HPD3_INT_CONTROL, tmp);
4924 }
4925 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4926 tmp = RREG32(DC_HPD4_INT_CONTROL);
4927 tmp |= DC_HPDx_INT_ACK;
4928 WREG32(DC_HPD4_INT_CONTROL, tmp);
4929 }
4930 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4931 tmp = RREG32(DC_HPD5_INT_CONTROL);
4932 tmp |= DC_HPDx_INT_ACK;
4933 WREG32(DC_HPD5_INT_CONTROL, tmp);
4934 }
4935 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4936 tmp = RREG32(DC_HPD6_INT_CONTROL);
4937 tmp |= DC_HPDx_INT_ACK;
4938 WREG32(DC_HPD6_INT_CONTROL, tmp);
4939 }
4940
4941 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT) {
4942 tmp = RREG32(DC_HPD1_INT_CONTROL);
4943 tmp |= DC_HPDx_RX_INT_ACK;
4944 WREG32(DC_HPD1_INT_CONTROL, tmp);
4945 }
4946 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT) {
4947 tmp = RREG32(DC_HPD2_INT_CONTROL);
4948 tmp |= DC_HPDx_RX_INT_ACK;
4949 WREG32(DC_HPD2_INT_CONTROL, tmp);
4950 }
4951 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT) {
4952 tmp = RREG32(DC_HPD3_INT_CONTROL);
4953 tmp |= DC_HPDx_RX_INT_ACK;
4954 WREG32(DC_HPD3_INT_CONTROL, tmp);
4955 }
4956 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT) {
4957 tmp = RREG32(DC_HPD4_INT_CONTROL);
4958 tmp |= DC_HPDx_RX_INT_ACK;
4959 WREG32(DC_HPD4_INT_CONTROL, tmp);
4960 }
4961 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT) {
4962 tmp = RREG32(DC_HPD5_INT_CONTROL);
4963 tmp |= DC_HPDx_RX_INT_ACK;
4964 WREG32(DC_HPD5_INT_CONTROL, tmp);
4965 }
4966 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT) {
4967 tmp = RREG32(DC_HPD6_INT_CONTROL);
4968 tmp |= DC_HPDx_RX_INT_ACK;
4969 WREG32(DC_HPD6_INT_CONTROL, tmp);
4970 }
4971
4972 if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4973 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4974 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4975 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4976 }
4977 if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4978 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4979 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4980 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4981 }
4982 if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4983 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4984 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4985 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4986 }
4987 if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4988 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4989 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4990 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4991 }
4992 if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4993 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4994 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4995 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4996 }
4997 if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4998 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4999 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
5000 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
5001 }
5002 }
5003
5004 static void evergreen_irq_disable(struct radeon_device *rdev)
5005 {
5006 r600_disable_interrupts(rdev);
5007 /* Wait and acknowledge irq */
5008 mdelay(1);
5009 evergreen_irq_ack(rdev);
5010 evergreen_disable_interrupt_state(rdev);
5011 }
5012
5013 void evergreen_irq_suspend(struct radeon_device *rdev)
5014 {
5015 evergreen_irq_disable(rdev);
5016 r600_rlc_stop(rdev);
5017 }
5018
5019 static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
5020 {
5021 u32 wptr, tmp;
5022
5023 if (rdev->wb.enabled)
5024 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
5025 else
5026 wptr = RREG32(IH_RB_WPTR);
5027
5028 if (wptr & RB_OVERFLOW) {
5029 wptr &= ~RB_OVERFLOW;
5030 /* When a ring buffer overflow happen start parsing interrupt
5031 * from the last not overwritten vector (wptr + 16). Hopefully
5032 * this should allow us to catchup.
5033 */
5034 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n",
5035 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask);
5036 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
5037 tmp = RREG32(IH_RB_CNTL);
5038 tmp |= IH_WPTR_OVERFLOW_CLEAR;
5039 WREG32(IH_RB_CNTL, tmp);
5040 }
5041 return (wptr & rdev->ih.ptr_mask);
5042 }
5043
5044 int evergreen_irq_process(struct radeon_device *rdev)
5045 {
5046 u32 wptr;
5047 u32 rptr;
5048 u32 src_id, src_data;
5049 u32 ring_index;
5050 bool queue_hotplug = false;
5051 bool queue_hdmi = false;
5052 bool queue_dp = false;
5053 bool queue_thermal = false;
5054 u32 status, addr;
5055
5056 if (!rdev->ih.enabled || rdev->shutdown)
5057 return IRQ_NONE;
5058
5059 wptr = evergreen_get_ih_wptr(rdev);
5060
5061 restart_ih:
5062 /* is somebody else already processing irqs? */
5063 if (atomic_xchg(&rdev->ih.lock, 1))
5064 return IRQ_NONE;
5065
5066 rptr = rdev->ih.rptr;
5067 DRM_DEBUG("evergreen_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
5068
5069 /* Order reading of wptr vs. reading of IH ring data */
5070 rmb();
5071
5072 /* display interrupts */
5073 evergreen_irq_ack(rdev);
5074
5075 while (rptr != wptr) {
5076 /* wptr/rptr are in bytes! */
5077 ring_index = rptr / 4;
5078 src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
5079 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
5080
5081 switch (src_id) {
5082 case 1: /* D1 vblank/vline */
5083 switch (src_data) {
5084 case 0: /* D1 vblank */
5085 if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT))
5086 DRM_DEBUG("IH: D1 vblank - IH event w/o asserted irq bit?\n");
5087
5088 if (rdev->irq.crtc_vblank_int[0]) {
5089 drm_handle_vblank(rdev->ddev, 0);
5090 #ifdef __NetBSD__
5091 spin_lock(&rdev->irq.vblank_lock);
5092 rdev->pm.vblank_sync = true;
5093 DRM_SPIN_WAKEUP_ONE(&rdev->irq.vblank_queue, &rdev->irq.vblank_lock);
5094 spin_unlock(&rdev->irq.vblank_lock);
5095 #else
5096 rdev->pm.vblank_sync = true;
5097 wake_up(&rdev->irq.vblank_queue);
5098 #endif
5099 }
5100 if (atomic_read(&rdev->irq.pflip[0]))
5101 radeon_crtc_handle_vblank(rdev, 0);
5102 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
5103 DRM_DEBUG("IH: D1 vblank\n");
5104
5105 break;
5106 case 1: /* D1 vline */
5107 if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT))
5108 DRM_DEBUG("IH: D1 vline - IH event w/o asserted irq bit?\n");
5109
5110 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
5111 DRM_DEBUG("IH: D1 vline\n");
5112
5113 break;
5114 default:
5115 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5116 break;
5117 }
5118 break;
5119 case 2: /* D2 vblank/vline */
5120 switch (src_data) {
5121 case 0: /* D2 vblank */
5122 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT))
5123 DRM_DEBUG("IH: D2 vblank - IH event w/o asserted irq bit?\n");
5124
5125 if (rdev->irq.crtc_vblank_int[1]) {
5126 drm_handle_vblank(rdev->ddev, 1);
5127 #ifdef __NetBSD__
5128 spin_lock(&rdev->irq.vblank_lock);
5129 rdev->pm.vblank_sync = true;
5130 DRM_SPIN_WAKEUP_ONE(&rdev->irq.vblank_queue, &rdev->irq.vblank_lock);
5131 spin_unlock(&rdev->irq.vblank_lock);
5132 #else
5133 rdev->pm.vblank_sync = true;
5134 wake_up(&rdev->irq.vblank_queue);
5135 #endif
5136 }
5137 if (atomic_read(&rdev->irq.pflip[1]))
5138 radeon_crtc_handle_vblank(rdev, 1);
5139 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
5140 DRM_DEBUG("IH: D2 vblank\n");
5141
5142 break;
5143 case 1: /* D2 vline */
5144 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT))
5145 DRM_DEBUG("IH: D2 vline - IH event w/o asserted irq bit?\n");
5146
5147 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
5148 DRM_DEBUG("IH: D2 vline\n");
5149
5150 break;
5151 default:
5152 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5153 break;
5154 }
5155 break;
5156 case 3: /* D3 vblank/vline */
5157 switch (src_data) {
5158 case 0: /* D3 vblank */
5159 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT))
5160 DRM_DEBUG("IH: D3 vblank - IH event w/o asserted irq bit?\n");
5161
5162 if (rdev->irq.crtc_vblank_int[2]) {
5163 drm_handle_vblank(rdev->ddev, 2);
5164 #ifdef __NetBSD__
5165 spin_lock(&rdev->irq.vblank_lock);
5166 rdev->pm.vblank_sync = true;
5167 DRM_SPIN_WAKEUP_ONE(&rdev->irq.vblank_queue, &rdev->irq.vblank_lock);
5168 spin_unlock(&rdev->irq.vblank_lock);
5169 #else
5170 rdev->pm.vblank_sync = true;
5171 wake_up(&rdev->irq.vblank_queue);
5172 #endif
5173 }
5174 if (atomic_read(&rdev->irq.pflip[2]))
5175 radeon_crtc_handle_vblank(rdev, 2);
5176 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
5177 DRM_DEBUG("IH: D3 vblank\n");
5178
5179 break;
5180 case 1: /* D3 vline */
5181 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT))
5182 DRM_DEBUG("IH: D3 vline - IH event w/o asserted irq bit?\n");
5183
5184 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
5185 DRM_DEBUG("IH: D3 vline\n");
5186
5187 break;
5188 default:
5189 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5190 break;
5191 }
5192 break;
5193 case 4: /* D4 vblank/vline */
5194 switch (src_data) {
5195 case 0: /* D4 vblank */
5196 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT))
5197 DRM_DEBUG("IH: D4 vblank - IH event w/o asserted irq bit?\n");
5198
5199 if (rdev->irq.crtc_vblank_int[3]) {
5200 drm_handle_vblank(rdev->ddev, 3);
5201 #ifdef __NetBSD__
5202 spin_lock(&rdev->irq.vblank_lock);
5203 rdev->pm.vblank_sync = true;
5204 DRM_SPIN_WAKEUP_ONE(&rdev->irq.vblank_queue, &rdev->irq.vblank_lock);
5205 spin_unlock(&rdev->irq.vblank_lock);
5206 #else
5207 rdev->pm.vblank_sync = true;
5208 wake_up(&rdev->irq.vblank_queue);
5209 #endif
5210 }
5211 if (atomic_read(&rdev->irq.pflip[3]))
5212 radeon_crtc_handle_vblank(rdev, 3);
5213 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
5214 DRM_DEBUG("IH: D4 vblank\n");
5215
5216 break;
5217 case 1: /* D4 vline */
5218 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT))
5219 DRM_DEBUG("IH: D4 vline - IH event w/o asserted irq bit?\n");
5220
5221 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
5222 DRM_DEBUG("IH: D4 vline\n");
5223
5224 break;
5225 default:
5226 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5227 break;
5228 }
5229 break;
5230 case 5: /* D5 vblank/vline */
5231 switch (src_data) {
5232 case 0: /* D5 vblank */
5233 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT))
5234 DRM_DEBUG("IH: D5 vblank - IH event w/o asserted irq bit?\n");
5235
5236 if (rdev->irq.crtc_vblank_int[4]) {
5237 drm_handle_vblank(rdev->ddev, 4);
5238 rdev->pm.vblank_sync = true;
5239 #ifdef __NetBSD__
5240 spin_lock(&rdev->irq.vblank_lock);
5241 rdev->pm.vblank_sync = true;
5242 DRM_SPIN_WAKEUP_ONE(&rdev->irq.vblank_queue, &rdev->irq.vblank_lock);
5243 spin_unlock(&rdev->irq.vblank_lock);
5244 #else
5245 wake_up(&rdev->irq.vblank_queue);
5246 #endif
5247 }
5248 if (atomic_read(&rdev->irq.pflip[4]))
5249 radeon_crtc_handle_vblank(rdev, 4);
5250 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
5251 DRM_DEBUG("IH: D5 vblank\n");
5252
5253 break;
5254 case 1: /* D5 vline */
5255 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT))
5256 DRM_DEBUG("IH: D5 vline - IH event w/o asserted irq bit?\n");
5257
5258 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
5259 DRM_DEBUG("IH: D5 vline\n");
5260
5261 break;
5262 default:
5263 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5264 break;
5265 }
5266 break;
5267 case 6: /* D6 vblank/vline */
5268 switch (src_data) {
5269 case 0: /* D6 vblank */
5270 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT))
5271 DRM_DEBUG("IH: D6 vblank - IH event w/o asserted irq bit?\n");
5272
5273 if (rdev->irq.crtc_vblank_int[5]) {
5274 drm_handle_vblank(rdev->ddev, 5);
5275 #ifdef __NetBSD__
5276 spin_lock(&rdev->irq.vblank_lock);
5277 rdev->pm.vblank_sync = true;
5278 DRM_SPIN_WAKEUP_ONE(&rdev->irq.vblank_queue, &rdev->irq.vblank_lock);
5279 spin_unlock(&rdev->irq.vblank_lock);
5280 #else
5281 rdev->pm.vblank_sync = true;
5282 wake_up(&rdev->irq.vblank_queue);
5283 #endif
5284 }
5285 if (atomic_read(&rdev->irq.pflip[5]))
5286 radeon_crtc_handle_vblank(rdev, 5);
5287 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
5288 DRM_DEBUG("IH: D6 vblank\n");
5289
5290 break;
5291 case 1: /* D6 vline */
5292 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT))
5293 DRM_DEBUG("IH: D6 vline - IH event w/o asserted irq bit?\n");
5294
5295 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
5296 DRM_DEBUG("IH: D6 vline\n");
5297
5298 break;
5299 default:
5300 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5301 break;
5302 }
5303 break;
5304 case 8: /* D1 page flip */
5305 case 10: /* D2 page flip */
5306 case 12: /* D3 page flip */
5307 case 14: /* D4 page flip */
5308 case 16: /* D5 page flip */
5309 case 18: /* D6 page flip */
5310 DRM_DEBUG("IH: D%d flip\n", ((src_id - 8) >> 1) + 1);
5311 if (radeon_use_pflipirq > 0)
5312 radeon_crtc_handle_flip(rdev, (src_id - 8) >> 1);
5313 break;
5314 case 42: /* HPD hotplug */
5315 switch (src_data) {
5316 case 0:
5317 if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT))
5318 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5319
5320 rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
5321 queue_hotplug = true;
5322 DRM_DEBUG("IH: HPD1\n");
5323 break;
5324 case 1:
5325 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT))
5326 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5327
5328 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
5329 queue_hotplug = true;
5330 DRM_DEBUG("IH: HPD2\n");
5331 break;
5332 case 2:
5333 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT))
5334 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5335
5336 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
5337 queue_hotplug = true;
5338 DRM_DEBUG("IH: HPD3\n");
5339 break;
5340 case 3:
5341 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT))
5342 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5343
5344 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
5345 queue_hotplug = true;
5346 DRM_DEBUG("IH: HPD4\n");
5347 break;
5348 case 4:
5349 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT))
5350 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5351
5352 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
5353 queue_hotplug = true;
5354 DRM_DEBUG("IH: HPD5\n");
5355 break;
5356 case 5:
5357 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT))
5358 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5359
5360 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
5361 queue_hotplug = true;
5362 DRM_DEBUG("IH: HPD6\n");
5363 break;
5364 case 6:
5365 if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT))
5366 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5367
5368 rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_RX_INTERRUPT;
5369 queue_dp = true;
5370 DRM_DEBUG("IH: HPD_RX 1\n");
5371 break;
5372 case 7:
5373 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT))
5374 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5375
5376 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_RX_INTERRUPT;
5377 queue_dp = true;
5378 DRM_DEBUG("IH: HPD_RX 2\n");
5379 break;
5380 case 8:
5381 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT))
5382 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5383
5384 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_RX_INTERRUPT;
5385 queue_dp = true;
5386 DRM_DEBUG("IH: HPD_RX 3\n");
5387 break;
5388 case 9:
5389 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT))
5390 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5391
5392 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_RX_INTERRUPT;
5393 queue_dp = true;
5394 DRM_DEBUG("IH: HPD_RX 4\n");
5395 break;
5396 case 10:
5397 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT))
5398 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5399
5400 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_RX_INTERRUPT;
5401 queue_dp = true;
5402 DRM_DEBUG("IH: HPD_RX 5\n");
5403 break;
5404 case 11:
5405 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT))
5406 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5407
5408 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_RX_INTERRUPT;
5409 queue_dp = true;
5410 DRM_DEBUG("IH: HPD_RX 6\n");
5411 break;
5412 default:
5413 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5414 break;
5415 }
5416 break;
5417 case 44: /* hdmi */
5418 switch (src_data) {
5419 case 0:
5420 if (!(rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG))
5421 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5422
5423 rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
5424 queue_hdmi = true;
5425 DRM_DEBUG("IH: HDMI0\n");
5426 break;
5427 case 1:
5428 if (!(rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG))
5429 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5430
5431 rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
5432 queue_hdmi = true;
5433 DRM_DEBUG("IH: HDMI1\n");
5434 break;
5435 case 2:
5436 if (!(rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG))
5437 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5438
5439 rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
5440 queue_hdmi = true;
5441 DRM_DEBUG("IH: HDMI2\n");
5442 break;
5443 case 3:
5444 if (!(rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG))
5445 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5446
5447 rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
5448 queue_hdmi = true;
5449 DRM_DEBUG("IH: HDMI3\n");
5450 break;
5451 case 4:
5452 if (!(rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG))
5453 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5454
5455 rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
5456 queue_hdmi = true;
5457 DRM_DEBUG("IH: HDMI4\n");
5458 break;
5459 case 5:
5460 if (!(rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG))
5461 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5462
5463 rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
5464 queue_hdmi = true;
5465 DRM_DEBUG("IH: HDMI5\n");
5466 break;
5467 default:
5468 DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
5469 break;
5470 }
5471 break;
5472 case 96:
5473 DRM_ERROR("SRBM_READ_ERROR: 0x%x\n", RREG32(SRBM_READ_ERROR));
5474 WREG32(SRBM_INT_ACK, 0x1);
5475 break;
5476 case 124: /* UVD */
5477 DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
5478 radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
5479 break;
5480 case 146:
5481 case 147:
5482 addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
5483 status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
5484 /* reset addr and status */
5485 WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
5486 if (addr == 0x0 && status == 0x0)
5487 break;
5488 dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
5489 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n",
5490 addr);
5491 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
5492 status);
5493 cayman_vm_decode_fault(rdev, status, addr);
5494 break;
5495 case 176: /* CP_INT in ring buffer */
5496 case 177: /* CP_INT in IB1 */
5497 case 178: /* CP_INT in IB2 */
5498 DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
5499 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5500 break;
5501 case 181: /* CP EOP event */
5502 DRM_DEBUG("IH: CP EOP\n");
5503 if (rdev->family >= CHIP_CAYMAN) {
5504 switch (src_data) {
5505 case 0:
5506 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5507 break;
5508 case 1:
5509 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
5510 break;
5511 case 2:
5512 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
5513 break;
5514 }
5515 } else
5516 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5517 break;
5518 case 224: /* DMA trap event */
5519 DRM_DEBUG("IH: DMA trap\n");
5520 radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
5521 break;
5522 case 230: /* thermal low to high */
5523 DRM_DEBUG("IH: thermal low to high\n");
5524 rdev->pm.dpm.thermal.high_to_low = false;
5525 queue_thermal = true;
5526 break;
5527 case 231: /* thermal high to low */
5528 DRM_DEBUG("IH: thermal high to low\n");
5529 rdev->pm.dpm.thermal.high_to_low = true;
5530 queue_thermal = true;
5531 break;
5532 case 233: /* GUI IDLE */
5533 DRM_DEBUG("IH: GUI idle\n");
5534 break;
5535 case 244: /* DMA trap event */
5536 if (rdev->family >= CHIP_CAYMAN) {
5537 DRM_DEBUG("IH: DMA1 trap\n");
5538 radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
5539 }
5540 break;
5541 default:
5542 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5543 break;
5544 }
5545
5546 /* wptr/rptr are in bytes! */
5547 rptr += 16;
5548 rptr &= rdev->ih.ptr_mask;
5549 WREG32(IH_RB_RPTR, rptr);
5550 }
5551 if (queue_dp)
5552 schedule_work(&rdev->dp_work);
5553 if (queue_hotplug)
5554 schedule_delayed_work(&rdev->hotplug_work, 0);
5555 if (queue_hdmi)
5556 schedule_work(&rdev->audio_work);
5557 if (queue_thermal && rdev->pm.dpm_enabled)
5558 schedule_work(&rdev->pm.dpm.thermal.work);
5559 rdev->ih.rptr = rptr;
5560 atomic_set(&rdev->ih.lock, 0);
5561
5562 /* make sure wptr hasn't changed while processing */
5563 wptr = evergreen_get_ih_wptr(rdev);
5564 if (wptr != rptr)
5565 goto restart_ih;
5566
5567 return IRQ_HANDLED;
5568 }
5569
5570 static int evergreen_startup(struct radeon_device *rdev)
5571 {
5572 struct radeon_ring *ring;
5573 int r;
5574
5575 /* enable pcie gen2 link */
5576 evergreen_pcie_gen2_enable(rdev);
5577 /* enable aspm */
5578 evergreen_program_aspm(rdev);
5579
5580 /* scratch needs to be initialized before MC */
5581 r = r600_vram_scratch_init(rdev);
5582 if (r)
5583 return r;
5584
5585 evergreen_mc_program(rdev);
5586
5587 if (ASIC_IS_DCE5(rdev) && !rdev->pm.dpm_enabled) {
5588 r = ni_mc_load_microcode(rdev);
5589 if (r) {
5590 DRM_ERROR("Failed to load MC firmware!\n");
5591 return r;
5592 }
5593 }
5594
5595 if (rdev->flags & RADEON_IS_AGP) {
5596 evergreen_agp_enable(rdev);
5597 } else {
5598 r = evergreen_pcie_gart_enable(rdev);
5599 if (r)
5600 return r;
5601 }
5602 evergreen_gpu_init(rdev);
5603
5604 /* allocate rlc buffers */
5605 if (rdev->flags & RADEON_IS_IGP) {
5606 rdev->rlc.reg_list = sumo_rlc_save_restore_register_list;
5607 rdev->rlc.reg_list_size =
5608 (u32)ARRAY_SIZE(sumo_rlc_save_restore_register_list);
5609 rdev->rlc.cs_data = evergreen_cs_data;
5610 r = sumo_rlc_init(rdev);
5611 if (r) {
5612 DRM_ERROR("Failed to init rlc BOs!\n");
5613 return r;
5614 }
5615 }
5616
5617 /* allocate wb buffer */
5618 r = radeon_wb_init(rdev);
5619 if (r)
5620 return r;
5621
5622 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
5623 if (r) {
5624 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
5625 return r;
5626 }
5627
5628 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
5629 if (r) {
5630 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
5631 return r;
5632 }
5633
5634 r = uvd_v2_2_resume(rdev);
5635 if (!r) {
5636 r = radeon_fence_driver_start_ring(rdev,
5637 R600_RING_TYPE_UVD_INDEX);
5638 if (r)
5639 dev_err(rdev->dev, "UVD fences init error (%d).\n", r);
5640 }
5641
5642 if (r)
5643 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
5644
5645 /* Enable IRQ */
5646 if (!rdev->irq.installed) {
5647 r = radeon_irq_kms_init(rdev);
5648 if (r)
5649 return r;
5650 }
5651
5652 r = r600_irq_init(rdev);
5653 if (r) {
5654 DRM_ERROR("radeon: IH init failed (%d).\n", r);
5655 radeon_irq_kms_fini(rdev);
5656 return r;
5657 }
5658 evergreen_irq_set(rdev);
5659
5660 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
5661 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
5662 RADEON_CP_PACKET2);
5663 if (r)
5664 return r;
5665
5666 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
5667 r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
5668 DMA_PACKET(DMA_PACKET_NOP, 0, 0));
5669 if (r)
5670 return r;
5671
5672 r = evergreen_cp_load_microcode(rdev);
5673 if (r)
5674 return r;
5675 r = evergreen_cp_resume(rdev);
5676 if (r)
5677 return r;
5678 r = r600_dma_resume(rdev);
5679 if (r)
5680 return r;
5681
5682 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
5683 if (ring->ring_size) {
5684 r = radeon_ring_init(rdev, ring, ring->ring_size, 0,
5685 RADEON_CP_PACKET2);
5686 if (!r)
5687 r = uvd_v1_0_init(rdev);
5688
5689 if (r)
5690 DRM_ERROR("radeon: error initializing UVD (%d).\n", r);
5691 }
5692
5693 r = radeon_ib_pool_init(rdev);
5694 if (r) {
5695 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
5696 return r;
5697 }
5698
5699 r = radeon_audio_init(rdev);
5700 if (r) {
5701 DRM_ERROR("radeon: audio init failed\n");
5702 return r;
5703 }
5704
5705 return 0;
5706 }
5707
5708 int evergreen_resume(struct radeon_device *rdev)
5709 {
5710 int r;
5711
5712 /* reset the asic, the gfx blocks are often in a bad state
5713 * after the driver is unloaded or after a resume
5714 */
5715 if (radeon_asic_reset(rdev))
5716 dev_warn(rdev->dev, "GPU reset failed !\n");
5717 /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
5718 * posting will perform necessary task to bring back GPU into good
5719 * shape.
5720 */
5721 /* post card */
5722 atom_asic_init(rdev->mode_info.atom_context);
5723
5724 /* init golden registers */
5725 evergreen_init_golden_registers(rdev);
5726
5727 if (rdev->pm.pm_method == PM_METHOD_DPM)
5728 radeon_pm_resume(rdev);
5729
5730 rdev->accel_working = true;
5731 r = evergreen_startup(rdev);
5732 if (r) {
5733 DRM_ERROR("evergreen startup failed on resume\n");
5734 rdev->accel_working = false;
5735 return r;
5736 }
5737
5738 return r;
5739
5740 }
5741
5742 int evergreen_suspend(struct radeon_device *rdev)
5743 {
5744 radeon_pm_suspend(rdev);
5745 radeon_audio_fini(rdev);
5746 uvd_v1_0_fini(rdev);
5747 radeon_uvd_suspend(rdev);
5748 r700_cp_stop(rdev);
5749 r600_dma_stop(rdev);
5750 evergreen_irq_suspend(rdev);
5751 radeon_wb_disable(rdev);
5752 evergreen_pcie_gart_disable(rdev);
5753
5754 return 0;
5755 }
5756
5757 /* Plan is to move initialization in that function and use
5758 * helper function so that radeon_device_init pretty much
5759 * do nothing more than calling asic specific function. This
5760 * should also allow to remove a bunch of callback function
5761 * like vram_info.
5762 */
5763 int evergreen_init(struct radeon_device *rdev)
5764 {
5765 int r;
5766
5767 /* Read BIOS */
5768 if (!radeon_get_bios(rdev)) {
5769 if (ASIC_IS_AVIVO(rdev))
5770 return -EINVAL;
5771 }
5772 /* Must be an ATOMBIOS */
5773 if (!rdev->is_atom_bios) {
5774 dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
5775 return -EINVAL;
5776 }
5777 r = radeon_atombios_init(rdev);
5778 if (r)
5779 return r;
5780 /* reset the asic, the gfx blocks are often in a bad state
5781 * after the driver is unloaded or after a resume
5782 */
5783 if (radeon_asic_reset(rdev))
5784 dev_warn(rdev->dev, "GPU reset failed !\n");
5785 /* Post card if necessary */
5786 if (!radeon_card_posted(rdev)) {
5787 if (!rdev->bios) {
5788 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
5789 return -EINVAL;
5790 }
5791 DRM_INFO("GPU not posted. posting now...\n");
5792 atom_asic_init(rdev->mode_info.atom_context);
5793 }
5794 /* init golden registers */
5795 evergreen_init_golden_registers(rdev);
5796 /* Initialize scratch registers */
5797 r600_scratch_init(rdev);
5798 /* Initialize surface registers */
5799 radeon_surface_init(rdev);
5800 /* Initialize clocks */
5801 radeon_get_clock_info(rdev->ddev);
5802 /* Fence driver */
5803 r = radeon_fence_driver_init(rdev);
5804 if (r)
5805 return r;
5806 /* initialize AGP */
5807 if (rdev->flags & RADEON_IS_AGP) {
5808 r = radeon_agp_init(rdev);
5809 if (r)
5810 radeon_agp_disable(rdev);
5811 }
5812 /* initialize memory controller */
5813 r = evergreen_mc_init(rdev);
5814 if (r)
5815 return r;
5816 /* Memory manager */
5817 r = radeon_bo_init(rdev);
5818 if (r)
5819 return r;
5820
5821 if (ASIC_IS_DCE5(rdev)) {
5822 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
5823 r = ni_init_microcode(rdev);
5824 if (r) {
5825 DRM_ERROR("Failed to load firmware!\n");
5826 return r;
5827 }
5828 }
5829 } else {
5830 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
5831 r = r600_init_microcode(rdev);
5832 if (r) {
5833 DRM_ERROR("Failed to load firmware!\n");
5834 return r;
5835 }
5836 }
5837 }
5838
5839 /* Initialize power management */
5840 radeon_pm_init(rdev);
5841
5842 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
5843 r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
5844
5845 rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
5846 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
5847
5848 r = radeon_uvd_init(rdev);
5849 if (!r) {
5850 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
5851 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX],
5852 4096);
5853 }
5854
5855 rdev->ih.ring_obj = NULL;
5856 r600_ih_ring_init(rdev, 64 * 1024);
5857
5858 r = r600_pcie_gart_init(rdev);
5859 if (r)
5860 return r;
5861
5862 rdev->accel_working = true;
5863 r = evergreen_startup(rdev);
5864 if (r) {
5865 dev_err(rdev->dev, "disabling GPU acceleration\n");
5866 r700_cp_fini(rdev);
5867 r600_dma_fini(rdev);
5868 r600_irq_fini(rdev);
5869 if (rdev->flags & RADEON_IS_IGP)
5870 sumo_rlc_fini(rdev);
5871 radeon_wb_fini(rdev);
5872 radeon_ib_pool_fini(rdev);
5873 radeon_irq_kms_fini(rdev);
5874 evergreen_pcie_gart_fini(rdev);
5875 rdev->accel_working = false;
5876 }
5877
5878 /* Don't start up if the MC ucode is missing on BTC parts.
5879 * The default clocks and voltages before the MC ucode
5880 * is loaded are not suffient for advanced operations.
5881 */
5882 if (ASIC_IS_DCE5(rdev)) {
5883 if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
5884 DRM_ERROR("radeon: MC ucode required for NI+.\n");
5885 return -EINVAL;
5886 }
5887 }
5888
5889 return 0;
5890 }
5891
5892 void evergreen_fini(struct radeon_device *rdev)
5893 {
5894 radeon_pm_fini(rdev);
5895 radeon_audio_fini(rdev);
5896 r700_cp_fini(rdev);
5897 r600_dma_fini(rdev);
5898 r600_irq_fini(rdev);
5899 if (rdev->flags & RADEON_IS_IGP)
5900 sumo_rlc_fini(rdev);
5901 radeon_wb_fini(rdev);
5902 radeon_ib_pool_fini(rdev);
5903 radeon_irq_kms_fini(rdev);
5904 uvd_v1_0_fini(rdev);
5905 radeon_uvd_fini(rdev);
5906 evergreen_pcie_gart_fini(rdev);
5907 r600_vram_scratch_fini(rdev);
5908 radeon_gem_fini(rdev);
5909 radeon_fence_driver_fini(rdev);
5910 radeon_agp_fini(rdev);
5911 radeon_bo_fini(rdev);
5912 radeon_atombios_fini(rdev);
5913 kfree(rdev->bios);
5914 rdev->bios = NULL;
5915 }
5916
5917 void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
5918 {
5919 #ifndef __NetBSD__ /* XXX radeon pcie */
5920 u32 link_width_cntl, speed_cntl;
5921
5922 if (radeon_pcie_gen2 == 0)
5923 return;
5924
5925 if (rdev->flags & RADEON_IS_IGP)
5926 return;
5927
5928 if (!(rdev->flags & RADEON_IS_PCIE))
5929 return;
5930
5931 /* x2 cards have a special sequence */
5932 if (ASIC_IS_X2(rdev))
5933 return;
5934
5935 if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
5936 (rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
5937 return;
5938
5939 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5940 if (speed_cntl & LC_CURRENT_DATA_RATE) {
5941 DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5942 return;
5943 }
5944
5945 DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5946
5947 if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5948 (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5949
5950 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5951 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5952 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5953
5954 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5955 speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
5956 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5957
5958 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5959 speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
5960 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5961
5962 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5963 speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
5964 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5965
5966 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5967 speed_cntl |= LC_GEN2_EN_STRAP;
5968 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5969
5970 } else {
5971 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5972 /* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5973 if (1)
5974 link_width_cntl |= LC_UPCONFIGURE_DIS;
5975 else
5976 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5977 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5978 }
5979 #endif
5980 }
5981
5982 void evergreen_program_aspm(struct radeon_device *rdev)
5983 {
5984 u32 data, orig;
5985 u32 pcie_lc_cntl, pcie_lc_cntl_old;
5986 bool disable_l0s, disable_l1 = false, disable_plloff_in_l1 = false;
5987 /* fusion_platform = true
5988 * if the system is a fusion system
5989 * (APU or DGPU in a fusion system).
5990 * todo: check if the system is a fusion platform.
5991 */
5992 bool fusion_platform = false;
5993
5994 if (radeon_aspm == 0)
5995 return;
5996
5997 if (!(rdev->flags & RADEON_IS_PCIE))
5998 return;
5999
6000 switch (rdev->family) {
6001 case CHIP_CYPRESS:
6002 case CHIP_HEMLOCK:
6003 case CHIP_JUNIPER:
6004 case CHIP_REDWOOD:
6005 case CHIP_CEDAR:
6006 case CHIP_SUMO:
6007 case CHIP_SUMO2:
6008 case CHIP_PALM:
6009 case CHIP_ARUBA:
6010 disable_l0s = true;
6011 break;
6012 default:
6013 disable_l0s = false;
6014 break;
6015 }
6016
6017 if (rdev->flags & RADEON_IS_IGP)
6018 fusion_platform = true; /* XXX also dGPUs in a fusion system */
6019
6020 data = orig = RREG32_PIF_PHY0(PB0_PIF_PAIRING);
6021 if (fusion_platform)
6022 data &= ~MULTI_PIF;
6023 else
6024 data |= MULTI_PIF;
6025 if (data != orig)
6026 WREG32_PIF_PHY0(PB0_PIF_PAIRING, data);
6027
6028 data = orig = RREG32_PIF_PHY1(PB1_PIF_PAIRING);
6029 if (fusion_platform)
6030 data &= ~MULTI_PIF;
6031 else
6032 data |= MULTI_PIF;
6033 if (data != orig)
6034 WREG32_PIF_PHY1(PB1_PIF_PAIRING, data);
6035
6036 pcie_lc_cntl = pcie_lc_cntl_old = RREG32_PCIE_PORT(PCIE_LC_CNTL);
6037 pcie_lc_cntl &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
6038 if (!disable_l0s) {
6039 if (rdev->family >= CHIP_BARTS)
6040 pcie_lc_cntl |= LC_L0S_INACTIVITY(7);
6041 else
6042 pcie_lc_cntl |= LC_L0S_INACTIVITY(3);
6043 }
6044
6045 if (!disable_l1) {
6046 if (rdev->family >= CHIP_BARTS)
6047 pcie_lc_cntl |= LC_L1_INACTIVITY(7);
6048 else
6049 pcie_lc_cntl |= LC_L1_INACTIVITY(8);
6050
6051 if (!disable_plloff_in_l1) {
6052 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
6053 data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
6054 data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
6055 if (data != orig)
6056 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
6057
6058 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
6059 data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
6060 data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
6061 if (data != orig)
6062 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
6063
6064 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
6065 data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
6066 data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
6067 if (data != orig)
6068 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
6069
6070 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
6071 data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
6072 data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
6073 if (data != orig)
6074 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
6075
6076 if (rdev->family >= CHIP_BARTS) {
6077 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
6078 data &= ~PLL_RAMP_UP_TIME_0_MASK;
6079 data |= PLL_RAMP_UP_TIME_0(4);
6080 if (data != orig)
6081 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
6082
6083 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
6084 data &= ~PLL_RAMP_UP_TIME_1_MASK;
6085 data |= PLL_RAMP_UP_TIME_1(4);
6086 if (data != orig)
6087 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
6088
6089 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
6090 data &= ~PLL_RAMP_UP_TIME_0_MASK;
6091 data |= PLL_RAMP_UP_TIME_0(4);
6092 if (data != orig)
6093 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
6094
6095 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
6096 data &= ~PLL_RAMP_UP_TIME_1_MASK;
6097 data |= PLL_RAMP_UP_TIME_1(4);
6098 if (data != orig)
6099 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
6100 }
6101
6102 data = orig = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
6103 data &= ~LC_DYN_LANES_PWR_STATE_MASK;
6104 data |= LC_DYN_LANES_PWR_STATE(3);
6105 if (data != orig)
6106 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
6107
6108 if (rdev->family >= CHIP_BARTS) {
6109 data = orig = RREG32_PIF_PHY0(PB0_PIF_CNTL);
6110 data &= ~LS2_EXIT_TIME_MASK;
6111 data |= LS2_EXIT_TIME(1);
6112 if (data != orig)
6113 WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
6114
6115 data = orig = RREG32_PIF_PHY1(PB1_PIF_CNTL);
6116 data &= ~LS2_EXIT_TIME_MASK;
6117 data |= LS2_EXIT_TIME(1);
6118 if (data != orig)
6119 WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
6120 }
6121 }
6122 }
6123
6124 /* evergreen parts only */
6125 if (rdev->family < CHIP_BARTS)
6126 pcie_lc_cntl |= LC_PMI_TO_L1_DIS;
6127
6128 if (pcie_lc_cntl != pcie_lc_cntl_old)
6129 WREG32_PCIE_PORT(PCIE_LC_CNTL, pcie_lc_cntl);
6130 }
6131