radeon_evergreen.c revision 1.1.2.2 1 /* $NetBSD: radeon_evergreen.c,v 1.1.2.2 2018/09/06 06:56:32 pgoyette Exp $ */
2
3 /*
4 * Copyright 2010 Advanced Micro Devices, Inc.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 * OTHER DEALINGS IN THE SOFTWARE.
23 *
24 * Authors: Alex Deucher
25 */
26 #include <sys/cdefs.h>
27 __KERNEL_RCSID(0, "$NetBSD: radeon_evergreen.c,v 1.1.2.2 2018/09/06 06:56:32 pgoyette Exp $");
28
29 #include <linux/bitops.h>
30 #include <linux/firmware.h>
31 #include <linux/slab.h>
32 #include <drm/drmP.h>
33 #include "radeon.h"
34 #include "radeon_asic.h"
35 #include "radeon_audio.h"
36 #include <drm/radeon_drm.h>
37 #include "evergreend.h"
38 #include "atom.h"
39 #include "avivod.h"
40 #include "evergreen_reg.h"
41 #include "evergreen_blit_shaders.h"
42 #include "radeon_ucode.h"
43
44 /*
45 * Indirect registers accessor
46 */
47 u32 eg_cg_rreg(struct radeon_device *rdev, u32 reg)
48 {
49 unsigned long flags;
50 u32 r;
51
52 spin_lock_irqsave(&rdev->cg_idx_lock, flags);
53 WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
54 r = RREG32(EVERGREEN_CG_IND_DATA);
55 spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
56 return r;
57 }
58
59 void eg_cg_wreg(struct radeon_device *rdev, u32 reg, u32 v)
60 {
61 unsigned long flags;
62
63 spin_lock_irqsave(&rdev->cg_idx_lock, flags);
64 WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
65 WREG32(EVERGREEN_CG_IND_DATA, (v));
66 spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
67 }
68
69 u32 eg_pif_phy0_rreg(struct radeon_device *rdev, u32 reg)
70 {
71 unsigned long flags;
72 u32 r;
73
74 spin_lock_irqsave(&rdev->pif_idx_lock, flags);
75 WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
76 r = RREG32(EVERGREEN_PIF_PHY0_DATA);
77 spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
78 return r;
79 }
80
81 void eg_pif_phy0_wreg(struct radeon_device *rdev, u32 reg, u32 v)
82 {
83 unsigned long flags;
84
85 spin_lock_irqsave(&rdev->pif_idx_lock, flags);
86 WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
87 WREG32(EVERGREEN_PIF_PHY0_DATA, (v));
88 spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
89 }
90
91 u32 eg_pif_phy1_rreg(struct radeon_device *rdev, u32 reg)
92 {
93 unsigned long flags;
94 u32 r;
95
96 spin_lock_irqsave(&rdev->pif_idx_lock, flags);
97 WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
98 r = RREG32(EVERGREEN_PIF_PHY1_DATA);
99 spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
100 return r;
101 }
102
103 void eg_pif_phy1_wreg(struct radeon_device *rdev, u32 reg, u32 v)
104 {
105 unsigned long flags;
106
107 spin_lock_irqsave(&rdev->pif_idx_lock, flags);
108 WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
109 WREG32(EVERGREEN_PIF_PHY1_DATA, (v));
110 spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
111 }
112
113 static const u32 crtc_offsets[6] =
114 {
115 EVERGREEN_CRTC0_REGISTER_OFFSET,
116 EVERGREEN_CRTC1_REGISTER_OFFSET,
117 EVERGREEN_CRTC2_REGISTER_OFFSET,
118 EVERGREEN_CRTC3_REGISTER_OFFSET,
119 EVERGREEN_CRTC4_REGISTER_OFFSET,
120 EVERGREEN_CRTC5_REGISTER_OFFSET
121 };
122
123 #include "clearstate_evergreen.h"
124
125 static const u32 sumo_rlc_save_restore_register_list[] =
126 {
127 0x98fc,
128 0x9830,
129 0x9834,
130 0x9838,
131 0x9870,
132 0x9874,
133 0x8a14,
134 0x8b24,
135 0x8bcc,
136 0x8b10,
137 0x8d00,
138 0x8d04,
139 0x8c00,
140 0x8c04,
141 0x8c08,
142 0x8c0c,
143 0x8d8c,
144 0x8c20,
145 0x8c24,
146 0x8c28,
147 0x8c18,
148 0x8c1c,
149 0x8cf0,
150 0x8e2c,
151 0x8e38,
152 0x8c30,
153 0x9508,
154 0x9688,
155 0x9608,
156 0x960c,
157 0x9610,
158 0x9614,
159 0x88c4,
160 0x88d4,
161 0xa008,
162 0x900c,
163 0x9100,
164 0x913c,
165 0x98f8,
166 0x98f4,
167 0x9b7c,
168 0x3f8c,
169 0x8950,
170 0x8954,
171 0x8a18,
172 0x8b28,
173 0x9144,
174 0x9148,
175 0x914c,
176 0x3f90,
177 0x3f94,
178 0x915c,
179 0x9160,
180 0x9178,
181 0x917c,
182 0x9180,
183 0x918c,
184 0x9190,
185 0x9194,
186 0x9198,
187 0x919c,
188 0x91a8,
189 0x91ac,
190 0x91b0,
191 0x91b4,
192 0x91b8,
193 0x91c4,
194 0x91c8,
195 0x91cc,
196 0x91d0,
197 0x91d4,
198 0x91e0,
199 0x91e4,
200 0x91ec,
201 0x91f0,
202 0x91f4,
203 0x9200,
204 0x9204,
205 0x929c,
206 0x9150,
207 0x802c,
208 };
209
210 static void evergreen_gpu_init(struct radeon_device *rdev);
211 void evergreen_fini(struct radeon_device *rdev);
212 void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
213 void evergreen_program_aspm(struct radeon_device *rdev);
214 extern void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
215 int ring, u32 cp_int_cntl);
216 extern void cayman_vm_decode_fault(struct radeon_device *rdev,
217 u32 status, u32 addr);
218 void cik_init_cp_pg_table(struct radeon_device *rdev);
219
220 extern u32 si_get_csb_size(struct radeon_device *rdev);
221 extern void si_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
222 extern u32 cik_get_csb_size(struct radeon_device *rdev);
223 extern void cik_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
224 extern void rv770_set_clk_bypass_mode(struct radeon_device *rdev);
225
226 static const u32 evergreen_golden_registers[] =
227 {
228 0x3f90, 0xffff0000, 0xff000000,
229 0x9148, 0xffff0000, 0xff000000,
230 0x3f94, 0xffff0000, 0xff000000,
231 0x914c, 0xffff0000, 0xff000000,
232 0x9b7c, 0xffffffff, 0x00000000,
233 0x8a14, 0xffffffff, 0x00000007,
234 0x8b10, 0xffffffff, 0x00000000,
235 0x960c, 0xffffffff, 0x54763210,
236 0x88c4, 0xffffffff, 0x000000c2,
237 0x88d4, 0xffffffff, 0x00000010,
238 0x8974, 0xffffffff, 0x00000000,
239 0xc78, 0x00000080, 0x00000080,
240 0x5eb4, 0xffffffff, 0x00000002,
241 0x5e78, 0xffffffff, 0x001000f0,
242 0x6104, 0x01000300, 0x00000000,
243 0x5bc0, 0x00300000, 0x00000000,
244 0x7030, 0xffffffff, 0x00000011,
245 0x7c30, 0xffffffff, 0x00000011,
246 0x10830, 0xffffffff, 0x00000011,
247 0x11430, 0xffffffff, 0x00000011,
248 0x12030, 0xffffffff, 0x00000011,
249 0x12c30, 0xffffffff, 0x00000011,
250 0xd02c, 0xffffffff, 0x08421000,
251 0x240c, 0xffffffff, 0x00000380,
252 0x8b24, 0xffffffff, 0x00ff0fff,
253 0x28a4c, 0x06000000, 0x06000000,
254 0x10c, 0x00000001, 0x00000001,
255 0x8d00, 0xffffffff, 0x100e4848,
256 0x8d04, 0xffffffff, 0x00164745,
257 0x8c00, 0xffffffff, 0xe4000003,
258 0x8c04, 0xffffffff, 0x40600060,
259 0x8c08, 0xffffffff, 0x001c001c,
260 0x8cf0, 0xffffffff, 0x08e00620,
261 0x8c20, 0xffffffff, 0x00800080,
262 0x8c24, 0xffffffff, 0x00800080,
263 0x8c18, 0xffffffff, 0x20202078,
264 0x8c1c, 0xffffffff, 0x00001010,
265 0x28350, 0xffffffff, 0x00000000,
266 0xa008, 0xffffffff, 0x00010000,
267 0x5c4, 0xffffffff, 0x00000001,
268 0x9508, 0xffffffff, 0x00000002,
269 0x913c, 0x0000000f, 0x0000000a
270 };
271
272 static const u32 evergreen_golden_registers2[] =
273 {
274 0x2f4c, 0xffffffff, 0x00000000,
275 0x54f4, 0xffffffff, 0x00000000,
276 0x54f0, 0xffffffff, 0x00000000,
277 0x5498, 0xffffffff, 0x00000000,
278 0x549c, 0xffffffff, 0x00000000,
279 0x5494, 0xffffffff, 0x00000000,
280 0x53cc, 0xffffffff, 0x00000000,
281 0x53c8, 0xffffffff, 0x00000000,
282 0x53c4, 0xffffffff, 0x00000000,
283 0x53c0, 0xffffffff, 0x00000000,
284 0x53bc, 0xffffffff, 0x00000000,
285 0x53b8, 0xffffffff, 0x00000000,
286 0x53b4, 0xffffffff, 0x00000000,
287 0x53b0, 0xffffffff, 0x00000000
288 };
289
290 static const u32 cypress_mgcg_init[] =
291 {
292 0x802c, 0xffffffff, 0xc0000000,
293 0x5448, 0xffffffff, 0x00000100,
294 0x55e4, 0xffffffff, 0x00000100,
295 0x160c, 0xffffffff, 0x00000100,
296 0x5644, 0xffffffff, 0x00000100,
297 0xc164, 0xffffffff, 0x00000100,
298 0x8a18, 0xffffffff, 0x00000100,
299 0x897c, 0xffffffff, 0x06000100,
300 0x8b28, 0xffffffff, 0x00000100,
301 0x9144, 0xffffffff, 0x00000100,
302 0x9a60, 0xffffffff, 0x00000100,
303 0x9868, 0xffffffff, 0x00000100,
304 0x8d58, 0xffffffff, 0x00000100,
305 0x9510, 0xffffffff, 0x00000100,
306 0x949c, 0xffffffff, 0x00000100,
307 0x9654, 0xffffffff, 0x00000100,
308 0x9030, 0xffffffff, 0x00000100,
309 0x9034, 0xffffffff, 0x00000100,
310 0x9038, 0xffffffff, 0x00000100,
311 0x903c, 0xffffffff, 0x00000100,
312 0x9040, 0xffffffff, 0x00000100,
313 0xa200, 0xffffffff, 0x00000100,
314 0xa204, 0xffffffff, 0x00000100,
315 0xa208, 0xffffffff, 0x00000100,
316 0xa20c, 0xffffffff, 0x00000100,
317 0x971c, 0xffffffff, 0x00000100,
318 0x977c, 0xffffffff, 0x00000100,
319 0x3f80, 0xffffffff, 0x00000100,
320 0xa210, 0xffffffff, 0x00000100,
321 0xa214, 0xffffffff, 0x00000100,
322 0x4d8, 0xffffffff, 0x00000100,
323 0x9784, 0xffffffff, 0x00000100,
324 0x9698, 0xffffffff, 0x00000100,
325 0x4d4, 0xffffffff, 0x00000200,
326 0x30cc, 0xffffffff, 0x00000100,
327 0xd0c0, 0xffffffff, 0xff000100,
328 0x802c, 0xffffffff, 0x40000000,
329 0x915c, 0xffffffff, 0x00010000,
330 0x9160, 0xffffffff, 0x00030002,
331 0x9178, 0xffffffff, 0x00070000,
332 0x917c, 0xffffffff, 0x00030002,
333 0x9180, 0xffffffff, 0x00050004,
334 0x918c, 0xffffffff, 0x00010006,
335 0x9190, 0xffffffff, 0x00090008,
336 0x9194, 0xffffffff, 0x00070000,
337 0x9198, 0xffffffff, 0x00030002,
338 0x919c, 0xffffffff, 0x00050004,
339 0x91a8, 0xffffffff, 0x00010006,
340 0x91ac, 0xffffffff, 0x00090008,
341 0x91b0, 0xffffffff, 0x00070000,
342 0x91b4, 0xffffffff, 0x00030002,
343 0x91b8, 0xffffffff, 0x00050004,
344 0x91c4, 0xffffffff, 0x00010006,
345 0x91c8, 0xffffffff, 0x00090008,
346 0x91cc, 0xffffffff, 0x00070000,
347 0x91d0, 0xffffffff, 0x00030002,
348 0x91d4, 0xffffffff, 0x00050004,
349 0x91e0, 0xffffffff, 0x00010006,
350 0x91e4, 0xffffffff, 0x00090008,
351 0x91e8, 0xffffffff, 0x00000000,
352 0x91ec, 0xffffffff, 0x00070000,
353 0x91f0, 0xffffffff, 0x00030002,
354 0x91f4, 0xffffffff, 0x00050004,
355 0x9200, 0xffffffff, 0x00010006,
356 0x9204, 0xffffffff, 0x00090008,
357 0x9208, 0xffffffff, 0x00070000,
358 0x920c, 0xffffffff, 0x00030002,
359 0x9210, 0xffffffff, 0x00050004,
360 0x921c, 0xffffffff, 0x00010006,
361 0x9220, 0xffffffff, 0x00090008,
362 0x9224, 0xffffffff, 0x00070000,
363 0x9228, 0xffffffff, 0x00030002,
364 0x922c, 0xffffffff, 0x00050004,
365 0x9238, 0xffffffff, 0x00010006,
366 0x923c, 0xffffffff, 0x00090008,
367 0x9240, 0xffffffff, 0x00070000,
368 0x9244, 0xffffffff, 0x00030002,
369 0x9248, 0xffffffff, 0x00050004,
370 0x9254, 0xffffffff, 0x00010006,
371 0x9258, 0xffffffff, 0x00090008,
372 0x925c, 0xffffffff, 0x00070000,
373 0x9260, 0xffffffff, 0x00030002,
374 0x9264, 0xffffffff, 0x00050004,
375 0x9270, 0xffffffff, 0x00010006,
376 0x9274, 0xffffffff, 0x00090008,
377 0x9278, 0xffffffff, 0x00070000,
378 0x927c, 0xffffffff, 0x00030002,
379 0x9280, 0xffffffff, 0x00050004,
380 0x928c, 0xffffffff, 0x00010006,
381 0x9290, 0xffffffff, 0x00090008,
382 0x9294, 0xffffffff, 0x00000000,
383 0x929c, 0xffffffff, 0x00000001,
384 0x802c, 0xffffffff, 0x40010000,
385 0x915c, 0xffffffff, 0x00010000,
386 0x9160, 0xffffffff, 0x00030002,
387 0x9178, 0xffffffff, 0x00070000,
388 0x917c, 0xffffffff, 0x00030002,
389 0x9180, 0xffffffff, 0x00050004,
390 0x918c, 0xffffffff, 0x00010006,
391 0x9190, 0xffffffff, 0x00090008,
392 0x9194, 0xffffffff, 0x00070000,
393 0x9198, 0xffffffff, 0x00030002,
394 0x919c, 0xffffffff, 0x00050004,
395 0x91a8, 0xffffffff, 0x00010006,
396 0x91ac, 0xffffffff, 0x00090008,
397 0x91b0, 0xffffffff, 0x00070000,
398 0x91b4, 0xffffffff, 0x00030002,
399 0x91b8, 0xffffffff, 0x00050004,
400 0x91c4, 0xffffffff, 0x00010006,
401 0x91c8, 0xffffffff, 0x00090008,
402 0x91cc, 0xffffffff, 0x00070000,
403 0x91d0, 0xffffffff, 0x00030002,
404 0x91d4, 0xffffffff, 0x00050004,
405 0x91e0, 0xffffffff, 0x00010006,
406 0x91e4, 0xffffffff, 0x00090008,
407 0x91e8, 0xffffffff, 0x00000000,
408 0x91ec, 0xffffffff, 0x00070000,
409 0x91f0, 0xffffffff, 0x00030002,
410 0x91f4, 0xffffffff, 0x00050004,
411 0x9200, 0xffffffff, 0x00010006,
412 0x9204, 0xffffffff, 0x00090008,
413 0x9208, 0xffffffff, 0x00070000,
414 0x920c, 0xffffffff, 0x00030002,
415 0x9210, 0xffffffff, 0x00050004,
416 0x921c, 0xffffffff, 0x00010006,
417 0x9220, 0xffffffff, 0x00090008,
418 0x9224, 0xffffffff, 0x00070000,
419 0x9228, 0xffffffff, 0x00030002,
420 0x922c, 0xffffffff, 0x00050004,
421 0x9238, 0xffffffff, 0x00010006,
422 0x923c, 0xffffffff, 0x00090008,
423 0x9240, 0xffffffff, 0x00070000,
424 0x9244, 0xffffffff, 0x00030002,
425 0x9248, 0xffffffff, 0x00050004,
426 0x9254, 0xffffffff, 0x00010006,
427 0x9258, 0xffffffff, 0x00090008,
428 0x925c, 0xffffffff, 0x00070000,
429 0x9260, 0xffffffff, 0x00030002,
430 0x9264, 0xffffffff, 0x00050004,
431 0x9270, 0xffffffff, 0x00010006,
432 0x9274, 0xffffffff, 0x00090008,
433 0x9278, 0xffffffff, 0x00070000,
434 0x927c, 0xffffffff, 0x00030002,
435 0x9280, 0xffffffff, 0x00050004,
436 0x928c, 0xffffffff, 0x00010006,
437 0x9290, 0xffffffff, 0x00090008,
438 0x9294, 0xffffffff, 0x00000000,
439 0x929c, 0xffffffff, 0x00000001,
440 0x802c, 0xffffffff, 0xc0000000
441 };
442
443 static const u32 redwood_mgcg_init[] =
444 {
445 0x802c, 0xffffffff, 0xc0000000,
446 0x5448, 0xffffffff, 0x00000100,
447 0x55e4, 0xffffffff, 0x00000100,
448 0x160c, 0xffffffff, 0x00000100,
449 0x5644, 0xffffffff, 0x00000100,
450 0xc164, 0xffffffff, 0x00000100,
451 0x8a18, 0xffffffff, 0x00000100,
452 0x897c, 0xffffffff, 0x06000100,
453 0x8b28, 0xffffffff, 0x00000100,
454 0x9144, 0xffffffff, 0x00000100,
455 0x9a60, 0xffffffff, 0x00000100,
456 0x9868, 0xffffffff, 0x00000100,
457 0x8d58, 0xffffffff, 0x00000100,
458 0x9510, 0xffffffff, 0x00000100,
459 0x949c, 0xffffffff, 0x00000100,
460 0x9654, 0xffffffff, 0x00000100,
461 0x9030, 0xffffffff, 0x00000100,
462 0x9034, 0xffffffff, 0x00000100,
463 0x9038, 0xffffffff, 0x00000100,
464 0x903c, 0xffffffff, 0x00000100,
465 0x9040, 0xffffffff, 0x00000100,
466 0xa200, 0xffffffff, 0x00000100,
467 0xa204, 0xffffffff, 0x00000100,
468 0xa208, 0xffffffff, 0x00000100,
469 0xa20c, 0xffffffff, 0x00000100,
470 0x971c, 0xffffffff, 0x00000100,
471 0x977c, 0xffffffff, 0x00000100,
472 0x3f80, 0xffffffff, 0x00000100,
473 0xa210, 0xffffffff, 0x00000100,
474 0xa214, 0xffffffff, 0x00000100,
475 0x4d8, 0xffffffff, 0x00000100,
476 0x9784, 0xffffffff, 0x00000100,
477 0x9698, 0xffffffff, 0x00000100,
478 0x4d4, 0xffffffff, 0x00000200,
479 0x30cc, 0xffffffff, 0x00000100,
480 0xd0c0, 0xffffffff, 0xff000100,
481 0x802c, 0xffffffff, 0x40000000,
482 0x915c, 0xffffffff, 0x00010000,
483 0x9160, 0xffffffff, 0x00030002,
484 0x9178, 0xffffffff, 0x00070000,
485 0x917c, 0xffffffff, 0x00030002,
486 0x9180, 0xffffffff, 0x00050004,
487 0x918c, 0xffffffff, 0x00010006,
488 0x9190, 0xffffffff, 0x00090008,
489 0x9194, 0xffffffff, 0x00070000,
490 0x9198, 0xffffffff, 0x00030002,
491 0x919c, 0xffffffff, 0x00050004,
492 0x91a8, 0xffffffff, 0x00010006,
493 0x91ac, 0xffffffff, 0x00090008,
494 0x91b0, 0xffffffff, 0x00070000,
495 0x91b4, 0xffffffff, 0x00030002,
496 0x91b8, 0xffffffff, 0x00050004,
497 0x91c4, 0xffffffff, 0x00010006,
498 0x91c8, 0xffffffff, 0x00090008,
499 0x91cc, 0xffffffff, 0x00070000,
500 0x91d0, 0xffffffff, 0x00030002,
501 0x91d4, 0xffffffff, 0x00050004,
502 0x91e0, 0xffffffff, 0x00010006,
503 0x91e4, 0xffffffff, 0x00090008,
504 0x91e8, 0xffffffff, 0x00000000,
505 0x91ec, 0xffffffff, 0x00070000,
506 0x91f0, 0xffffffff, 0x00030002,
507 0x91f4, 0xffffffff, 0x00050004,
508 0x9200, 0xffffffff, 0x00010006,
509 0x9204, 0xffffffff, 0x00090008,
510 0x9294, 0xffffffff, 0x00000000,
511 0x929c, 0xffffffff, 0x00000001,
512 0x802c, 0xffffffff, 0xc0000000
513 };
514
515 static const u32 cedar_golden_registers[] =
516 {
517 0x3f90, 0xffff0000, 0xff000000,
518 0x9148, 0xffff0000, 0xff000000,
519 0x3f94, 0xffff0000, 0xff000000,
520 0x914c, 0xffff0000, 0xff000000,
521 0x9b7c, 0xffffffff, 0x00000000,
522 0x8a14, 0xffffffff, 0x00000007,
523 0x8b10, 0xffffffff, 0x00000000,
524 0x960c, 0xffffffff, 0x54763210,
525 0x88c4, 0xffffffff, 0x000000c2,
526 0x88d4, 0xffffffff, 0x00000000,
527 0x8974, 0xffffffff, 0x00000000,
528 0xc78, 0x00000080, 0x00000080,
529 0x5eb4, 0xffffffff, 0x00000002,
530 0x5e78, 0xffffffff, 0x001000f0,
531 0x6104, 0x01000300, 0x00000000,
532 0x5bc0, 0x00300000, 0x00000000,
533 0x7030, 0xffffffff, 0x00000011,
534 0x7c30, 0xffffffff, 0x00000011,
535 0x10830, 0xffffffff, 0x00000011,
536 0x11430, 0xffffffff, 0x00000011,
537 0xd02c, 0xffffffff, 0x08421000,
538 0x240c, 0xffffffff, 0x00000380,
539 0x8b24, 0xffffffff, 0x00ff0fff,
540 0x28a4c, 0x06000000, 0x06000000,
541 0x10c, 0x00000001, 0x00000001,
542 0x8d00, 0xffffffff, 0x100e4848,
543 0x8d04, 0xffffffff, 0x00164745,
544 0x8c00, 0xffffffff, 0xe4000003,
545 0x8c04, 0xffffffff, 0x40600060,
546 0x8c08, 0xffffffff, 0x001c001c,
547 0x8cf0, 0xffffffff, 0x08e00410,
548 0x8c20, 0xffffffff, 0x00800080,
549 0x8c24, 0xffffffff, 0x00800080,
550 0x8c18, 0xffffffff, 0x20202078,
551 0x8c1c, 0xffffffff, 0x00001010,
552 0x28350, 0xffffffff, 0x00000000,
553 0xa008, 0xffffffff, 0x00010000,
554 0x5c4, 0xffffffff, 0x00000001,
555 0x9508, 0xffffffff, 0x00000002
556 };
557
558 static const u32 cedar_mgcg_init[] =
559 {
560 0x802c, 0xffffffff, 0xc0000000,
561 0x5448, 0xffffffff, 0x00000100,
562 0x55e4, 0xffffffff, 0x00000100,
563 0x160c, 0xffffffff, 0x00000100,
564 0x5644, 0xffffffff, 0x00000100,
565 0xc164, 0xffffffff, 0x00000100,
566 0x8a18, 0xffffffff, 0x00000100,
567 0x897c, 0xffffffff, 0x06000100,
568 0x8b28, 0xffffffff, 0x00000100,
569 0x9144, 0xffffffff, 0x00000100,
570 0x9a60, 0xffffffff, 0x00000100,
571 0x9868, 0xffffffff, 0x00000100,
572 0x8d58, 0xffffffff, 0x00000100,
573 0x9510, 0xffffffff, 0x00000100,
574 0x949c, 0xffffffff, 0x00000100,
575 0x9654, 0xffffffff, 0x00000100,
576 0x9030, 0xffffffff, 0x00000100,
577 0x9034, 0xffffffff, 0x00000100,
578 0x9038, 0xffffffff, 0x00000100,
579 0x903c, 0xffffffff, 0x00000100,
580 0x9040, 0xffffffff, 0x00000100,
581 0xa200, 0xffffffff, 0x00000100,
582 0xa204, 0xffffffff, 0x00000100,
583 0xa208, 0xffffffff, 0x00000100,
584 0xa20c, 0xffffffff, 0x00000100,
585 0x971c, 0xffffffff, 0x00000100,
586 0x977c, 0xffffffff, 0x00000100,
587 0x3f80, 0xffffffff, 0x00000100,
588 0xa210, 0xffffffff, 0x00000100,
589 0xa214, 0xffffffff, 0x00000100,
590 0x4d8, 0xffffffff, 0x00000100,
591 0x9784, 0xffffffff, 0x00000100,
592 0x9698, 0xffffffff, 0x00000100,
593 0x4d4, 0xffffffff, 0x00000200,
594 0x30cc, 0xffffffff, 0x00000100,
595 0xd0c0, 0xffffffff, 0xff000100,
596 0x802c, 0xffffffff, 0x40000000,
597 0x915c, 0xffffffff, 0x00010000,
598 0x9178, 0xffffffff, 0x00050000,
599 0x917c, 0xffffffff, 0x00030002,
600 0x918c, 0xffffffff, 0x00010004,
601 0x9190, 0xffffffff, 0x00070006,
602 0x9194, 0xffffffff, 0x00050000,
603 0x9198, 0xffffffff, 0x00030002,
604 0x91a8, 0xffffffff, 0x00010004,
605 0x91ac, 0xffffffff, 0x00070006,
606 0x91e8, 0xffffffff, 0x00000000,
607 0x9294, 0xffffffff, 0x00000000,
608 0x929c, 0xffffffff, 0x00000001,
609 0x802c, 0xffffffff, 0xc0000000
610 };
611
612 static const u32 juniper_mgcg_init[] =
613 {
614 0x802c, 0xffffffff, 0xc0000000,
615 0x5448, 0xffffffff, 0x00000100,
616 0x55e4, 0xffffffff, 0x00000100,
617 0x160c, 0xffffffff, 0x00000100,
618 0x5644, 0xffffffff, 0x00000100,
619 0xc164, 0xffffffff, 0x00000100,
620 0x8a18, 0xffffffff, 0x00000100,
621 0x897c, 0xffffffff, 0x06000100,
622 0x8b28, 0xffffffff, 0x00000100,
623 0x9144, 0xffffffff, 0x00000100,
624 0x9a60, 0xffffffff, 0x00000100,
625 0x9868, 0xffffffff, 0x00000100,
626 0x8d58, 0xffffffff, 0x00000100,
627 0x9510, 0xffffffff, 0x00000100,
628 0x949c, 0xffffffff, 0x00000100,
629 0x9654, 0xffffffff, 0x00000100,
630 0x9030, 0xffffffff, 0x00000100,
631 0x9034, 0xffffffff, 0x00000100,
632 0x9038, 0xffffffff, 0x00000100,
633 0x903c, 0xffffffff, 0x00000100,
634 0x9040, 0xffffffff, 0x00000100,
635 0xa200, 0xffffffff, 0x00000100,
636 0xa204, 0xffffffff, 0x00000100,
637 0xa208, 0xffffffff, 0x00000100,
638 0xa20c, 0xffffffff, 0x00000100,
639 0x971c, 0xffffffff, 0x00000100,
640 0xd0c0, 0xffffffff, 0xff000100,
641 0x802c, 0xffffffff, 0x40000000,
642 0x915c, 0xffffffff, 0x00010000,
643 0x9160, 0xffffffff, 0x00030002,
644 0x9178, 0xffffffff, 0x00070000,
645 0x917c, 0xffffffff, 0x00030002,
646 0x9180, 0xffffffff, 0x00050004,
647 0x918c, 0xffffffff, 0x00010006,
648 0x9190, 0xffffffff, 0x00090008,
649 0x9194, 0xffffffff, 0x00070000,
650 0x9198, 0xffffffff, 0x00030002,
651 0x919c, 0xffffffff, 0x00050004,
652 0x91a8, 0xffffffff, 0x00010006,
653 0x91ac, 0xffffffff, 0x00090008,
654 0x91b0, 0xffffffff, 0x00070000,
655 0x91b4, 0xffffffff, 0x00030002,
656 0x91b8, 0xffffffff, 0x00050004,
657 0x91c4, 0xffffffff, 0x00010006,
658 0x91c8, 0xffffffff, 0x00090008,
659 0x91cc, 0xffffffff, 0x00070000,
660 0x91d0, 0xffffffff, 0x00030002,
661 0x91d4, 0xffffffff, 0x00050004,
662 0x91e0, 0xffffffff, 0x00010006,
663 0x91e4, 0xffffffff, 0x00090008,
664 0x91e8, 0xffffffff, 0x00000000,
665 0x91ec, 0xffffffff, 0x00070000,
666 0x91f0, 0xffffffff, 0x00030002,
667 0x91f4, 0xffffffff, 0x00050004,
668 0x9200, 0xffffffff, 0x00010006,
669 0x9204, 0xffffffff, 0x00090008,
670 0x9208, 0xffffffff, 0x00070000,
671 0x920c, 0xffffffff, 0x00030002,
672 0x9210, 0xffffffff, 0x00050004,
673 0x921c, 0xffffffff, 0x00010006,
674 0x9220, 0xffffffff, 0x00090008,
675 0x9224, 0xffffffff, 0x00070000,
676 0x9228, 0xffffffff, 0x00030002,
677 0x922c, 0xffffffff, 0x00050004,
678 0x9238, 0xffffffff, 0x00010006,
679 0x923c, 0xffffffff, 0x00090008,
680 0x9240, 0xffffffff, 0x00070000,
681 0x9244, 0xffffffff, 0x00030002,
682 0x9248, 0xffffffff, 0x00050004,
683 0x9254, 0xffffffff, 0x00010006,
684 0x9258, 0xffffffff, 0x00090008,
685 0x925c, 0xffffffff, 0x00070000,
686 0x9260, 0xffffffff, 0x00030002,
687 0x9264, 0xffffffff, 0x00050004,
688 0x9270, 0xffffffff, 0x00010006,
689 0x9274, 0xffffffff, 0x00090008,
690 0x9278, 0xffffffff, 0x00070000,
691 0x927c, 0xffffffff, 0x00030002,
692 0x9280, 0xffffffff, 0x00050004,
693 0x928c, 0xffffffff, 0x00010006,
694 0x9290, 0xffffffff, 0x00090008,
695 0x9294, 0xffffffff, 0x00000000,
696 0x929c, 0xffffffff, 0x00000001,
697 0x802c, 0xffffffff, 0xc0000000,
698 0x977c, 0xffffffff, 0x00000100,
699 0x3f80, 0xffffffff, 0x00000100,
700 0xa210, 0xffffffff, 0x00000100,
701 0xa214, 0xffffffff, 0x00000100,
702 0x4d8, 0xffffffff, 0x00000100,
703 0x9784, 0xffffffff, 0x00000100,
704 0x9698, 0xffffffff, 0x00000100,
705 0x4d4, 0xffffffff, 0x00000200,
706 0x30cc, 0xffffffff, 0x00000100,
707 0x802c, 0xffffffff, 0xc0000000
708 };
709
710 static const u32 supersumo_golden_registers[] =
711 {
712 0x5eb4, 0xffffffff, 0x00000002,
713 0x5c4, 0xffffffff, 0x00000001,
714 0x7030, 0xffffffff, 0x00000011,
715 0x7c30, 0xffffffff, 0x00000011,
716 0x6104, 0x01000300, 0x00000000,
717 0x5bc0, 0x00300000, 0x00000000,
718 0x8c04, 0xffffffff, 0x40600060,
719 0x8c08, 0xffffffff, 0x001c001c,
720 0x8c20, 0xffffffff, 0x00800080,
721 0x8c24, 0xffffffff, 0x00800080,
722 0x8c18, 0xffffffff, 0x20202078,
723 0x8c1c, 0xffffffff, 0x00001010,
724 0x918c, 0xffffffff, 0x00010006,
725 0x91a8, 0xffffffff, 0x00010006,
726 0x91c4, 0xffffffff, 0x00010006,
727 0x91e0, 0xffffffff, 0x00010006,
728 0x9200, 0xffffffff, 0x00010006,
729 0x9150, 0xffffffff, 0x6e944040,
730 0x917c, 0xffffffff, 0x00030002,
731 0x9180, 0xffffffff, 0x00050004,
732 0x9198, 0xffffffff, 0x00030002,
733 0x919c, 0xffffffff, 0x00050004,
734 0x91b4, 0xffffffff, 0x00030002,
735 0x91b8, 0xffffffff, 0x00050004,
736 0x91d0, 0xffffffff, 0x00030002,
737 0x91d4, 0xffffffff, 0x00050004,
738 0x91f0, 0xffffffff, 0x00030002,
739 0x91f4, 0xffffffff, 0x00050004,
740 0x915c, 0xffffffff, 0x00010000,
741 0x9160, 0xffffffff, 0x00030002,
742 0x3f90, 0xffff0000, 0xff000000,
743 0x9178, 0xffffffff, 0x00070000,
744 0x9194, 0xffffffff, 0x00070000,
745 0x91b0, 0xffffffff, 0x00070000,
746 0x91cc, 0xffffffff, 0x00070000,
747 0x91ec, 0xffffffff, 0x00070000,
748 0x9148, 0xffff0000, 0xff000000,
749 0x9190, 0xffffffff, 0x00090008,
750 0x91ac, 0xffffffff, 0x00090008,
751 0x91c8, 0xffffffff, 0x00090008,
752 0x91e4, 0xffffffff, 0x00090008,
753 0x9204, 0xffffffff, 0x00090008,
754 0x3f94, 0xffff0000, 0xff000000,
755 0x914c, 0xffff0000, 0xff000000,
756 0x929c, 0xffffffff, 0x00000001,
757 0x8a18, 0xffffffff, 0x00000100,
758 0x8b28, 0xffffffff, 0x00000100,
759 0x9144, 0xffffffff, 0x00000100,
760 0x5644, 0xffffffff, 0x00000100,
761 0x9b7c, 0xffffffff, 0x00000000,
762 0x8030, 0xffffffff, 0x0000100a,
763 0x8a14, 0xffffffff, 0x00000007,
764 0x8b24, 0xffffffff, 0x00ff0fff,
765 0x8b10, 0xffffffff, 0x00000000,
766 0x28a4c, 0x06000000, 0x06000000,
767 0x4d8, 0xffffffff, 0x00000100,
768 0x913c, 0xffff000f, 0x0100000a,
769 0x960c, 0xffffffff, 0x54763210,
770 0x88c4, 0xffffffff, 0x000000c2,
771 0x88d4, 0xffffffff, 0x00000010,
772 0x8974, 0xffffffff, 0x00000000,
773 0xc78, 0x00000080, 0x00000080,
774 0x5e78, 0xffffffff, 0x001000f0,
775 0xd02c, 0xffffffff, 0x08421000,
776 0xa008, 0xffffffff, 0x00010000,
777 0x8d00, 0xffffffff, 0x100e4848,
778 0x8d04, 0xffffffff, 0x00164745,
779 0x8c00, 0xffffffff, 0xe4000003,
780 0x8cf0, 0x1fffffff, 0x08e00620,
781 0x28350, 0xffffffff, 0x00000000,
782 0x9508, 0xffffffff, 0x00000002
783 };
784
785 static const u32 sumo_golden_registers[] =
786 {
787 0x900c, 0x00ffffff, 0x0017071f,
788 0x8c18, 0xffffffff, 0x10101060,
789 0x8c1c, 0xffffffff, 0x00001010,
790 0x8c30, 0x0000000f, 0x00000005,
791 0x9688, 0x0000000f, 0x00000007
792 };
793
794 static const u32 wrestler_golden_registers[] =
795 {
796 0x5eb4, 0xffffffff, 0x00000002,
797 0x5c4, 0xffffffff, 0x00000001,
798 0x7030, 0xffffffff, 0x00000011,
799 0x7c30, 0xffffffff, 0x00000011,
800 0x6104, 0x01000300, 0x00000000,
801 0x5bc0, 0x00300000, 0x00000000,
802 0x918c, 0xffffffff, 0x00010006,
803 0x91a8, 0xffffffff, 0x00010006,
804 0x9150, 0xffffffff, 0x6e944040,
805 0x917c, 0xffffffff, 0x00030002,
806 0x9198, 0xffffffff, 0x00030002,
807 0x915c, 0xffffffff, 0x00010000,
808 0x3f90, 0xffff0000, 0xff000000,
809 0x9178, 0xffffffff, 0x00070000,
810 0x9194, 0xffffffff, 0x00070000,
811 0x9148, 0xffff0000, 0xff000000,
812 0x9190, 0xffffffff, 0x00090008,
813 0x91ac, 0xffffffff, 0x00090008,
814 0x3f94, 0xffff0000, 0xff000000,
815 0x914c, 0xffff0000, 0xff000000,
816 0x929c, 0xffffffff, 0x00000001,
817 0x8a18, 0xffffffff, 0x00000100,
818 0x8b28, 0xffffffff, 0x00000100,
819 0x9144, 0xffffffff, 0x00000100,
820 0x9b7c, 0xffffffff, 0x00000000,
821 0x8030, 0xffffffff, 0x0000100a,
822 0x8a14, 0xffffffff, 0x00000001,
823 0x8b24, 0xffffffff, 0x00ff0fff,
824 0x8b10, 0xffffffff, 0x00000000,
825 0x28a4c, 0x06000000, 0x06000000,
826 0x4d8, 0xffffffff, 0x00000100,
827 0x913c, 0xffff000f, 0x0100000a,
828 0x960c, 0xffffffff, 0x54763210,
829 0x88c4, 0xffffffff, 0x000000c2,
830 0x88d4, 0xffffffff, 0x00000010,
831 0x8974, 0xffffffff, 0x00000000,
832 0xc78, 0x00000080, 0x00000080,
833 0x5e78, 0xffffffff, 0x001000f0,
834 0xd02c, 0xffffffff, 0x08421000,
835 0xa008, 0xffffffff, 0x00010000,
836 0x8d00, 0xffffffff, 0x100e4848,
837 0x8d04, 0xffffffff, 0x00164745,
838 0x8c00, 0xffffffff, 0xe4000003,
839 0x8cf0, 0x1fffffff, 0x08e00410,
840 0x28350, 0xffffffff, 0x00000000,
841 0x9508, 0xffffffff, 0x00000002,
842 0x900c, 0xffffffff, 0x0017071f,
843 0x8c18, 0xffffffff, 0x10101060,
844 0x8c1c, 0xffffffff, 0x00001010
845 };
846
847 static const u32 barts_golden_registers[] =
848 {
849 0x5eb4, 0xffffffff, 0x00000002,
850 0x5e78, 0x8f311ff1, 0x001000f0,
851 0x3f90, 0xffff0000, 0xff000000,
852 0x9148, 0xffff0000, 0xff000000,
853 0x3f94, 0xffff0000, 0xff000000,
854 0x914c, 0xffff0000, 0xff000000,
855 0xc78, 0x00000080, 0x00000080,
856 0xbd4, 0x70073777, 0x00010001,
857 0xd02c, 0xbfffff1f, 0x08421000,
858 0xd0b8, 0x03773777, 0x02011003,
859 0x5bc0, 0x00200000, 0x50100000,
860 0x98f8, 0x33773777, 0x02011003,
861 0x98fc, 0xffffffff, 0x76543210,
862 0x7030, 0x31000311, 0x00000011,
863 0x2f48, 0x00000007, 0x02011003,
864 0x6b28, 0x00000010, 0x00000012,
865 0x7728, 0x00000010, 0x00000012,
866 0x10328, 0x00000010, 0x00000012,
867 0x10f28, 0x00000010, 0x00000012,
868 0x11b28, 0x00000010, 0x00000012,
869 0x12728, 0x00000010, 0x00000012,
870 0x240c, 0x000007ff, 0x00000380,
871 0x8a14, 0xf000001f, 0x00000007,
872 0x8b24, 0x3fff3fff, 0x00ff0fff,
873 0x8b10, 0x0000ff0f, 0x00000000,
874 0x28a4c, 0x07ffffff, 0x06000000,
875 0x10c, 0x00000001, 0x00010003,
876 0xa02c, 0xffffffff, 0x0000009b,
877 0x913c, 0x0000000f, 0x0100000a,
878 0x8d00, 0xffff7f7f, 0x100e4848,
879 0x8d04, 0x00ffffff, 0x00164745,
880 0x8c00, 0xfffc0003, 0xe4000003,
881 0x8c04, 0xf8ff00ff, 0x40600060,
882 0x8c08, 0x00ff00ff, 0x001c001c,
883 0x8cf0, 0x1fff1fff, 0x08e00620,
884 0x8c20, 0x0fff0fff, 0x00800080,
885 0x8c24, 0x0fff0fff, 0x00800080,
886 0x8c18, 0xffffffff, 0x20202078,
887 0x8c1c, 0x0000ffff, 0x00001010,
888 0x28350, 0x00000f01, 0x00000000,
889 0x9508, 0x3700001f, 0x00000002,
890 0x960c, 0xffffffff, 0x54763210,
891 0x88c4, 0x001f3ae3, 0x000000c2,
892 0x88d4, 0x0000001f, 0x00000010,
893 0x8974, 0xffffffff, 0x00000000
894 };
895
896 static const u32 turks_golden_registers[] =
897 {
898 0x5eb4, 0xffffffff, 0x00000002,
899 0x5e78, 0x8f311ff1, 0x001000f0,
900 0x8c8, 0x00003000, 0x00001070,
901 0x8cc, 0x000fffff, 0x00040035,
902 0x3f90, 0xffff0000, 0xfff00000,
903 0x9148, 0xffff0000, 0xfff00000,
904 0x3f94, 0xffff0000, 0xfff00000,
905 0x914c, 0xffff0000, 0xfff00000,
906 0xc78, 0x00000080, 0x00000080,
907 0xbd4, 0x00073007, 0x00010002,
908 0xd02c, 0xbfffff1f, 0x08421000,
909 0xd0b8, 0x03773777, 0x02010002,
910 0x5bc0, 0x00200000, 0x50100000,
911 0x98f8, 0x33773777, 0x00010002,
912 0x98fc, 0xffffffff, 0x33221100,
913 0x7030, 0x31000311, 0x00000011,
914 0x2f48, 0x33773777, 0x00010002,
915 0x6b28, 0x00000010, 0x00000012,
916 0x7728, 0x00000010, 0x00000012,
917 0x10328, 0x00000010, 0x00000012,
918 0x10f28, 0x00000010, 0x00000012,
919 0x11b28, 0x00000010, 0x00000012,
920 0x12728, 0x00000010, 0x00000012,
921 0x240c, 0x000007ff, 0x00000380,
922 0x8a14, 0xf000001f, 0x00000007,
923 0x8b24, 0x3fff3fff, 0x00ff0fff,
924 0x8b10, 0x0000ff0f, 0x00000000,
925 0x28a4c, 0x07ffffff, 0x06000000,
926 0x10c, 0x00000001, 0x00010003,
927 0xa02c, 0xffffffff, 0x0000009b,
928 0x913c, 0x0000000f, 0x0100000a,
929 0x8d00, 0xffff7f7f, 0x100e4848,
930 0x8d04, 0x00ffffff, 0x00164745,
931 0x8c00, 0xfffc0003, 0xe4000003,
932 0x8c04, 0xf8ff00ff, 0x40600060,
933 0x8c08, 0x00ff00ff, 0x001c001c,
934 0x8cf0, 0x1fff1fff, 0x08e00410,
935 0x8c20, 0x0fff0fff, 0x00800080,
936 0x8c24, 0x0fff0fff, 0x00800080,
937 0x8c18, 0xffffffff, 0x20202078,
938 0x8c1c, 0x0000ffff, 0x00001010,
939 0x28350, 0x00000f01, 0x00000000,
940 0x9508, 0x3700001f, 0x00000002,
941 0x960c, 0xffffffff, 0x54763210,
942 0x88c4, 0x001f3ae3, 0x000000c2,
943 0x88d4, 0x0000001f, 0x00000010,
944 0x8974, 0xffffffff, 0x00000000
945 };
946
947 static const u32 caicos_golden_registers[] =
948 {
949 0x5eb4, 0xffffffff, 0x00000002,
950 0x5e78, 0x8f311ff1, 0x001000f0,
951 0x8c8, 0x00003420, 0x00001450,
952 0x8cc, 0x000fffff, 0x00040035,
953 0x3f90, 0xffff0000, 0xfffc0000,
954 0x9148, 0xffff0000, 0xfffc0000,
955 0x3f94, 0xffff0000, 0xfffc0000,
956 0x914c, 0xffff0000, 0xfffc0000,
957 0xc78, 0x00000080, 0x00000080,
958 0xbd4, 0x00073007, 0x00010001,
959 0xd02c, 0xbfffff1f, 0x08421000,
960 0xd0b8, 0x03773777, 0x02010001,
961 0x5bc0, 0x00200000, 0x50100000,
962 0x98f8, 0x33773777, 0x02010001,
963 0x98fc, 0xffffffff, 0x33221100,
964 0x7030, 0x31000311, 0x00000011,
965 0x2f48, 0x33773777, 0x02010001,
966 0x6b28, 0x00000010, 0x00000012,
967 0x7728, 0x00000010, 0x00000012,
968 0x10328, 0x00000010, 0x00000012,
969 0x10f28, 0x00000010, 0x00000012,
970 0x11b28, 0x00000010, 0x00000012,
971 0x12728, 0x00000010, 0x00000012,
972 0x240c, 0x000007ff, 0x00000380,
973 0x8a14, 0xf000001f, 0x00000001,
974 0x8b24, 0x3fff3fff, 0x00ff0fff,
975 0x8b10, 0x0000ff0f, 0x00000000,
976 0x28a4c, 0x07ffffff, 0x06000000,
977 0x10c, 0x00000001, 0x00010003,
978 0xa02c, 0xffffffff, 0x0000009b,
979 0x913c, 0x0000000f, 0x0100000a,
980 0x8d00, 0xffff7f7f, 0x100e4848,
981 0x8d04, 0x00ffffff, 0x00164745,
982 0x8c00, 0xfffc0003, 0xe4000003,
983 0x8c04, 0xf8ff00ff, 0x40600060,
984 0x8c08, 0x00ff00ff, 0x001c001c,
985 0x8cf0, 0x1fff1fff, 0x08e00410,
986 0x8c20, 0x0fff0fff, 0x00800080,
987 0x8c24, 0x0fff0fff, 0x00800080,
988 0x8c18, 0xffffffff, 0x20202078,
989 0x8c1c, 0x0000ffff, 0x00001010,
990 0x28350, 0x00000f01, 0x00000000,
991 0x9508, 0x3700001f, 0x00000002,
992 0x960c, 0xffffffff, 0x54763210,
993 0x88c4, 0x001f3ae3, 0x000000c2,
994 0x88d4, 0x0000001f, 0x00000010,
995 0x8974, 0xffffffff, 0x00000000
996 };
997
998 static void evergreen_init_golden_registers(struct radeon_device *rdev)
999 {
1000 switch (rdev->family) {
1001 case CHIP_CYPRESS:
1002 case CHIP_HEMLOCK:
1003 radeon_program_register_sequence(rdev,
1004 evergreen_golden_registers,
1005 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1006 radeon_program_register_sequence(rdev,
1007 evergreen_golden_registers2,
1008 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1009 radeon_program_register_sequence(rdev,
1010 cypress_mgcg_init,
1011 (const u32)ARRAY_SIZE(cypress_mgcg_init));
1012 break;
1013 case CHIP_JUNIPER:
1014 radeon_program_register_sequence(rdev,
1015 evergreen_golden_registers,
1016 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1017 radeon_program_register_sequence(rdev,
1018 evergreen_golden_registers2,
1019 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1020 radeon_program_register_sequence(rdev,
1021 juniper_mgcg_init,
1022 (const u32)ARRAY_SIZE(juniper_mgcg_init));
1023 break;
1024 case CHIP_REDWOOD:
1025 radeon_program_register_sequence(rdev,
1026 evergreen_golden_registers,
1027 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1028 radeon_program_register_sequence(rdev,
1029 evergreen_golden_registers2,
1030 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1031 radeon_program_register_sequence(rdev,
1032 redwood_mgcg_init,
1033 (const u32)ARRAY_SIZE(redwood_mgcg_init));
1034 break;
1035 case CHIP_CEDAR:
1036 radeon_program_register_sequence(rdev,
1037 cedar_golden_registers,
1038 (const u32)ARRAY_SIZE(cedar_golden_registers));
1039 radeon_program_register_sequence(rdev,
1040 evergreen_golden_registers2,
1041 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1042 radeon_program_register_sequence(rdev,
1043 cedar_mgcg_init,
1044 (const u32)ARRAY_SIZE(cedar_mgcg_init));
1045 break;
1046 case CHIP_PALM:
1047 radeon_program_register_sequence(rdev,
1048 wrestler_golden_registers,
1049 (const u32)ARRAY_SIZE(wrestler_golden_registers));
1050 break;
1051 case CHIP_SUMO:
1052 radeon_program_register_sequence(rdev,
1053 supersumo_golden_registers,
1054 (const u32)ARRAY_SIZE(supersumo_golden_registers));
1055 break;
1056 case CHIP_SUMO2:
1057 radeon_program_register_sequence(rdev,
1058 supersumo_golden_registers,
1059 (const u32)ARRAY_SIZE(supersumo_golden_registers));
1060 radeon_program_register_sequence(rdev,
1061 sumo_golden_registers,
1062 (const u32)ARRAY_SIZE(sumo_golden_registers));
1063 break;
1064 case CHIP_BARTS:
1065 radeon_program_register_sequence(rdev,
1066 barts_golden_registers,
1067 (const u32)ARRAY_SIZE(barts_golden_registers));
1068 break;
1069 case CHIP_TURKS:
1070 radeon_program_register_sequence(rdev,
1071 turks_golden_registers,
1072 (const u32)ARRAY_SIZE(turks_golden_registers));
1073 break;
1074 case CHIP_CAICOS:
1075 radeon_program_register_sequence(rdev,
1076 caicos_golden_registers,
1077 (const u32)ARRAY_SIZE(caicos_golden_registers));
1078 break;
1079 default:
1080 break;
1081 }
1082 }
1083
1084 /**
1085 * evergreen_get_allowed_info_register - fetch the register for the info ioctl
1086 *
1087 * @rdev: radeon_device pointer
1088 * @reg: register offset in bytes
1089 * @val: register value
1090 *
1091 * Returns 0 for success or -EINVAL for an invalid register
1092 *
1093 */
1094 int evergreen_get_allowed_info_register(struct radeon_device *rdev,
1095 u32 reg, u32 *val)
1096 {
1097 switch (reg) {
1098 case GRBM_STATUS:
1099 case GRBM_STATUS_SE0:
1100 case GRBM_STATUS_SE1:
1101 case SRBM_STATUS:
1102 case SRBM_STATUS2:
1103 case DMA_STATUS_REG:
1104 case UVD_STATUS:
1105 *val = RREG32(reg);
1106 return 0;
1107 default:
1108 return -EINVAL;
1109 }
1110 }
1111
1112 void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
1113 unsigned *bankh, unsigned *mtaspect,
1114 unsigned *tile_split)
1115 {
1116 *bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
1117 *bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
1118 *mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
1119 *tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
1120 switch (*bankw) {
1121 default:
1122 case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
1123 case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
1124 case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
1125 case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
1126 }
1127 switch (*bankh) {
1128 default:
1129 case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
1130 case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
1131 case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
1132 case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
1133 }
1134 switch (*mtaspect) {
1135 default:
1136 case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
1137 case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
1138 case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
1139 case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
1140 }
1141 }
1142
1143 static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
1144 u32 cntl_reg, u32 status_reg)
1145 {
1146 int r, i;
1147 struct atom_clock_dividers dividers;
1148
1149 r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1150 clock, false, ÷rs);
1151 if (r)
1152 return r;
1153
1154 WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
1155
1156 for (i = 0; i < 100; i++) {
1157 if (RREG32(status_reg) & DCLK_STATUS)
1158 break;
1159 mdelay(10);
1160 }
1161 if (i == 100)
1162 return -ETIMEDOUT;
1163
1164 return 0;
1165 }
1166
1167 int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1168 {
1169 int r = 0;
1170 u32 cg_scratch = RREG32(CG_SCRATCH1);
1171
1172 r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
1173 if (r)
1174 goto done;
1175 cg_scratch &= 0xffff0000;
1176 cg_scratch |= vclk / 100; /* Mhz */
1177
1178 r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
1179 if (r)
1180 goto done;
1181 cg_scratch &= 0x0000ffff;
1182 cg_scratch |= (dclk / 100) << 16; /* Mhz */
1183
1184 done:
1185 WREG32(CG_SCRATCH1, cg_scratch);
1186
1187 return r;
1188 }
1189
1190 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1191 {
1192 /* start off with something large */
1193 unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
1194 int r;
1195
1196 /* bypass vclk and dclk with bclk */
1197 WREG32_P(CG_UPLL_FUNC_CNTL_2,
1198 VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1199 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1200
1201 /* put PLL in bypass mode */
1202 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1203
1204 if (!vclk || !dclk) {
1205 /* keep the Bypass mode, put PLL to sleep */
1206 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1207 return 0;
1208 }
1209
1210 r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1211 16384, 0x03FFFFFF, 0, 128, 5,
1212 &fb_div, &vclk_div, &dclk_div);
1213 if (r)
1214 return r;
1215
1216 /* set VCO_MODE to 1 */
1217 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1218
1219 /* toggle UPLL_SLEEP to 1 then back to 0 */
1220 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1221 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1222
1223 /* deassert UPLL_RESET */
1224 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1225
1226 mdelay(1);
1227
1228 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1229 if (r)
1230 return r;
1231
1232 /* assert UPLL_RESET again */
1233 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1234
1235 /* disable spread spectrum. */
1236 WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1237
1238 /* set feedback divider */
1239 WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1240
1241 /* set ref divider to 0 */
1242 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1243
1244 if (fb_div < 307200)
1245 WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1246 else
1247 WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1248
1249 /* set PDIV_A and PDIV_B */
1250 WREG32_P(CG_UPLL_FUNC_CNTL_2,
1251 UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1252 ~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1253
1254 /* give the PLL some time to settle */
1255 mdelay(15);
1256
1257 /* deassert PLL_RESET */
1258 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1259
1260 mdelay(15);
1261
1262 /* switch from bypass mode to normal mode */
1263 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1264
1265 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1266 if (r)
1267 return r;
1268
1269 /* switch VCLK and DCLK selection */
1270 WREG32_P(CG_UPLL_FUNC_CNTL_2,
1271 VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1272 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1273
1274 mdelay(100);
1275
1276 return 0;
1277 }
1278
1279 void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1280 {
1281 #ifndef __NetBSD__ /* XXX radeon pcie */
1282 int readrq;
1283 u16 v;
1284
1285 readrq = pcie_get_readrq(rdev->pdev);
1286 v = ffs(readrq) - 8;
1287 /* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1288 * to avoid hangs or perfomance issues
1289 */
1290 if ((v == 0) || (v == 6) || (v == 7))
1291 pcie_set_readrq(rdev->pdev, 512);
1292 #endif
1293 }
1294
1295 void dce4_program_fmt(struct drm_encoder *encoder)
1296 {
1297 struct drm_device *dev = encoder->dev;
1298 struct radeon_device *rdev = dev->dev_private;
1299 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1300 struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
1301 struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
1302 int bpc = 0;
1303 u32 tmp = 0;
1304 enum radeon_connector_dither dither = RADEON_FMT_DITHER_DISABLE;
1305
1306 if (connector) {
1307 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1308 bpc = radeon_get_monitor_bpc(connector);
1309 dither = radeon_connector->dither;
1310 }
1311
1312 /* LVDS/eDP FMT is set up by atom */
1313 if (radeon_encoder->devices & ATOM_DEVICE_LCD_SUPPORT)
1314 return;
1315
1316 /* not needed for analog */
1317 if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) ||
1318 (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2))
1319 return;
1320
1321 if (bpc == 0)
1322 return;
1323
1324 switch (bpc) {
1325 case 6:
1326 if (dither == RADEON_FMT_DITHER_ENABLE)
1327 /* XXX sort out optimal dither settings */
1328 tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1329 FMT_SPATIAL_DITHER_EN);
1330 else
1331 tmp |= FMT_TRUNCATE_EN;
1332 break;
1333 case 8:
1334 if (dither == RADEON_FMT_DITHER_ENABLE)
1335 /* XXX sort out optimal dither settings */
1336 tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1337 FMT_RGB_RANDOM_ENABLE |
1338 FMT_SPATIAL_DITHER_EN | FMT_SPATIAL_DITHER_DEPTH);
1339 else
1340 tmp |= (FMT_TRUNCATE_EN | FMT_TRUNCATE_DEPTH);
1341 break;
1342 case 10:
1343 default:
1344 /* not needed */
1345 break;
1346 }
1347
1348 WREG32(FMT_BIT_DEPTH_CONTROL + radeon_crtc->crtc_offset, tmp);
1349 }
1350
1351 static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1352 {
1353 if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1354 return true;
1355 else
1356 return false;
1357 }
1358
1359 static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1360 {
1361 u32 pos1, pos2;
1362
1363 pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1364 pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1365
1366 if (pos1 != pos2)
1367 return true;
1368 else
1369 return false;
1370 }
1371
1372 /**
1373 * dce4_wait_for_vblank - vblank wait asic callback.
1374 *
1375 * @rdev: radeon_device pointer
1376 * @crtc: crtc to wait for vblank on
1377 *
1378 * Wait for vblank on the requested crtc (evergreen+).
1379 */
1380 void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1381 {
1382 unsigned i = 0;
1383
1384 if (crtc >= rdev->num_crtc)
1385 return;
1386
1387 if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1388 return;
1389
1390 /* depending on when we hit vblank, we may be close to active; if so,
1391 * wait for another frame.
1392 */
1393 while (dce4_is_in_vblank(rdev, crtc)) {
1394 if (i++ % 100 == 0) {
1395 if (!dce4_is_counter_moving(rdev, crtc))
1396 break;
1397 }
1398 }
1399
1400 while (!dce4_is_in_vblank(rdev, crtc)) {
1401 if (i++ % 100 == 0) {
1402 if (!dce4_is_counter_moving(rdev, crtc))
1403 break;
1404 }
1405 }
1406 }
1407
1408 /**
1409 * evergreen_page_flip - pageflip callback.
1410 *
1411 * @rdev: radeon_device pointer
1412 * @crtc_id: crtc to cleanup pageflip on
1413 * @crtc_base: new address of the crtc (GPU MC address)
1414 *
1415 * Triggers the actual pageflip by updating the primary
1416 * surface base address (evergreen+).
1417 */
1418 void evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
1419 {
1420 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1421
1422 /* update the scanout addresses */
1423 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1424 upper_32_bits(crtc_base));
1425 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1426 (u32)crtc_base);
1427 /* post the write */
1428 RREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset);
1429 }
1430
1431 /**
1432 * evergreen_page_flip_pending - check if page flip is still pending
1433 *
1434 * @rdev: radeon_device pointer
1435 * @crtc_id: crtc to check
1436 *
1437 * Returns the current update pending status.
1438 */
1439 bool evergreen_page_flip_pending(struct radeon_device *rdev, int crtc_id)
1440 {
1441 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1442
1443 /* Return current update_pending status: */
1444 return !!(RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) &
1445 EVERGREEN_GRPH_SURFACE_UPDATE_PENDING);
1446 }
1447
1448 /* get temperature in millidegrees */
1449 int evergreen_get_temp(struct radeon_device *rdev)
1450 {
1451 u32 temp, toffset;
1452 int actual_temp = 0;
1453
1454 if (rdev->family == CHIP_JUNIPER) {
1455 toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1456 TOFFSET_SHIFT;
1457 temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1458 TS0_ADC_DOUT_SHIFT;
1459
1460 if (toffset & 0x100)
1461 actual_temp = temp / 2 - (0x200 - toffset);
1462 else
1463 actual_temp = temp / 2 + toffset;
1464
1465 actual_temp = actual_temp * 1000;
1466
1467 } else {
1468 temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1469 ASIC_T_SHIFT;
1470
1471 if (temp & 0x400)
1472 actual_temp = -256;
1473 else if (temp & 0x200)
1474 actual_temp = 255;
1475 else if (temp & 0x100) {
1476 actual_temp = temp & 0x1ff;
1477 actual_temp |= ~0x1ff;
1478 } else
1479 actual_temp = temp & 0xff;
1480
1481 actual_temp = (actual_temp * 1000) / 2;
1482 }
1483
1484 return actual_temp;
1485 }
1486
1487 int sumo_get_temp(struct radeon_device *rdev)
1488 {
1489 u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1490 int actual_temp = temp - 49;
1491
1492 return actual_temp * 1000;
1493 }
1494
1495 /**
1496 * sumo_pm_init_profile - Initialize power profiles callback.
1497 *
1498 * @rdev: radeon_device pointer
1499 *
1500 * Initialize the power states used in profile mode
1501 * (sumo, trinity, SI).
1502 * Used for profile mode only.
1503 */
1504 void sumo_pm_init_profile(struct radeon_device *rdev)
1505 {
1506 int idx;
1507
1508 /* default */
1509 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1510 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1511 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1512 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1513
1514 /* low,mid sh/mh */
1515 if (rdev->flags & RADEON_IS_MOBILITY)
1516 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1517 else
1518 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1519
1520 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1521 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1522 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1523 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1524
1525 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1526 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1527 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1528 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1529
1530 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1531 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1532 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1533 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1534
1535 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1536 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1537 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1538 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1539
1540 /* high sh/mh */
1541 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1542 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1543 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1544 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1545 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1546 rdev->pm.power_state[idx].num_clock_modes - 1;
1547
1548 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1549 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1550 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1551 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1552 rdev->pm.power_state[idx].num_clock_modes - 1;
1553 }
1554
1555 /**
1556 * btc_pm_init_profile - Initialize power profiles callback.
1557 *
1558 * @rdev: radeon_device pointer
1559 *
1560 * Initialize the power states used in profile mode
1561 * (BTC, cayman).
1562 * Used for profile mode only.
1563 */
1564 void btc_pm_init_profile(struct radeon_device *rdev)
1565 {
1566 int idx;
1567
1568 /* default */
1569 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1570 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1571 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1572 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1573 /* starting with BTC, there is one state that is used for both
1574 * MH and SH. Difference is that we always use the high clock index for
1575 * mclk.
1576 */
1577 if (rdev->flags & RADEON_IS_MOBILITY)
1578 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1579 else
1580 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1581 /* low sh */
1582 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1583 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1584 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1585 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1586 /* mid sh */
1587 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1588 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1589 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1590 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1591 /* high sh */
1592 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1593 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1594 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1595 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1596 /* low mh */
1597 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1598 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1599 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1600 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1601 /* mid mh */
1602 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1603 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1604 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1605 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1606 /* high mh */
1607 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1608 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1609 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1610 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1611 }
1612
1613 /**
1614 * evergreen_pm_misc - set additional pm hw parameters callback.
1615 *
1616 * @rdev: radeon_device pointer
1617 *
1618 * Set non-clock parameters associated with a power state
1619 * (voltage, etc.) (evergreen+).
1620 */
1621 void evergreen_pm_misc(struct radeon_device *rdev)
1622 {
1623 int req_ps_idx = rdev->pm.requested_power_state_index;
1624 int req_cm_idx = rdev->pm.requested_clock_mode_index;
1625 struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1626 struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1627
1628 if (voltage->type == VOLTAGE_SW) {
1629 /* 0xff0x are flags rather then an actual voltage */
1630 if ((voltage->voltage & 0xff00) == 0xff00)
1631 return;
1632 if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1633 radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1634 rdev->pm.current_vddc = voltage->voltage;
1635 DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1636 }
1637
1638 /* starting with BTC, there is one state that is used for both
1639 * MH and SH. Difference is that we always use the high clock index for
1640 * mclk and vddci.
1641 */
1642 if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1643 (rdev->family >= CHIP_BARTS) &&
1644 rdev->pm.active_crtc_count &&
1645 ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1646 (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1647 voltage = &rdev->pm.power_state[req_ps_idx].
1648 clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1649
1650 /* 0xff0x are flags rather then an actual voltage */
1651 if ((voltage->vddci & 0xff00) == 0xff00)
1652 return;
1653 if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1654 radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1655 rdev->pm.current_vddci = voltage->vddci;
1656 DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1657 }
1658 }
1659 }
1660
1661 /**
1662 * evergreen_pm_prepare - pre-power state change callback.
1663 *
1664 * @rdev: radeon_device pointer
1665 *
1666 * Prepare for a power state change (evergreen+).
1667 */
1668 void evergreen_pm_prepare(struct radeon_device *rdev)
1669 {
1670 struct drm_device *ddev = rdev->ddev;
1671 struct drm_crtc *crtc;
1672 struct radeon_crtc *radeon_crtc;
1673 u32 tmp;
1674
1675 /* disable any active CRTCs */
1676 list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1677 radeon_crtc = to_radeon_crtc(crtc);
1678 if (radeon_crtc->enabled) {
1679 tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1680 tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1681 WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1682 }
1683 }
1684 }
1685
1686 /**
1687 * evergreen_pm_finish - post-power state change callback.
1688 *
1689 * @rdev: radeon_device pointer
1690 *
1691 * Clean up after a power state change (evergreen+).
1692 */
1693 void evergreen_pm_finish(struct radeon_device *rdev)
1694 {
1695 struct drm_device *ddev = rdev->ddev;
1696 struct drm_crtc *crtc;
1697 struct radeon_crtc *radeon_crtc;
1698 u32 tmp;
1699
1700 /* enable any active CRTCs */
1701 list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1702 radeon_crtc = to_radeon_crtc(crtc);
1703 if (radeon_crtc->enabled) {
1704 tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1705 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1706 WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1707 }
1708 }
1709 }
1710
1711 /**
1712 * evergreen_hpd_sense - hpd sense callback.
1713 *
1714 * @rdev: radeon_device pointer
1715 * @hpd: hpd (hotplug detect) pin
1716 *
1717 * Checks if a digital monitor is connected (evergreen+).
1718 * Returns true if connected, false if not connected.
1719 */
1720 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1721 {
1722 bool connected = false;
1723
1724 switch (hpd) {
1725 case RADEON_HPD_1:
1726 if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1727 connected = true;
1728 break;
1729 case RADEON_HPD_2:
1730 if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1731 connected = true;
1732 break;
1733 case RADEON_HPD_3:
1734 if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1735 connected = true;
1736 break;
1737 case RADEON_HPD_4:
1738 if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1739 connected = true;
1740 break;
1741 case RADEON_HPD_5:
1742 if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1743 connected = true;
1744 break;
1745 case RADEON_HPD_6:
1746 if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1747 connected = true;
1748 break;
1749 default:
1750 break;
1751 }
1752
1753 return connected;
1754 }
1755
1756 /**
1757 * evergreen_hpd_set_polarity - hpd set polarity callback.
1758 *
1759 * @rdev: radeon_device pointer
1760 * @hpd: hpd (hotplug detect) pin
1761 *
1762 * Set the polarity of the hpd pin (evergreen+).
1763 */
1764 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1765 enum radeon_hpd_id hpd)
1766 {
1767 u32 tmp;
1768 bool connected = evergreen_hpd_sense(rdev, hpd);
1769
1770 switch (hpd) {
1771 case RADEON_HPD_1:
1772 tmp = RREG32(DC_HPD1_INT_CONTROL);
1773 if (connected)
1774 tmp &= ~DC_HPDx_INT_POLARITY;
1775 else
1776 tmp |= DC_HPDx_INT_POLARITY;
1777 WREG32(DC_HPD1_INT_CONTROL, tmp);
1778 break;
1779 case RADEON_HPD_2:
1780 tmp = RREG32(DC_HPD2_INT_CONTROL);
1781 if (connected)
1782 tmp &= ~DC_HPDx_INT_POLARITY;
1783 else
1784 tmp |= DC_HPDx_INT_POLARITY;
1785 WREG32(DC_HPD2_INT_CONTROL, tmp);
1786 break;
1787 case RADEON_HPD_3:
1788 tmp = RREG32(DC_HPD3_INT_CONTROL);
1789 if (connected)
1790 tmp &= ~DC_HPDx_INT_POLARITY;
1791 else
1792 tmp |= DC_HPDx_INT_POLARITY;
1793 WREG32(DC_HPD3_INT_CONTROL, tmp);
1794 break;
1795 case RADEON_HPD_4:
1796 tmp = RREG32(DC_HPD4_INT_CONTROL);
1797 if (connected)
1798 tmp &= ~DC_HPDx_INT_POLARITY;
1799 else
1800 tmp |= DC_HPDx_INT_POLARITY;
1801 WREG32(DC_HPD4_INT_CONTROL, tmp);
1802 break;
1803 case RADEON_HPD_5:
1804 tmp = RREG32(DC_HPD5_INT_CONTROL);
1805 if (connected)
1806 tmp &= ~DC_HPDx_INT_POLARITY;
1807 else
1808 tmp |= DC_HPDx_INT_POLARITY;
1809 WREG32(DC_HPD5_INT_CONTROL, tmp);
1810 break;
1811 case RADEON_HPD_6:
1812 tmp = RREG32(DC_HPD6_INT_CONTROL);
1813 if (connected)
1814 tmp &= ~DC_HPDx_INT_POLARITY;
1815 else
1816 tmp |= DC_HPDx_INT_POLARITY;
1817 WREG32(DC_HPD6_INT_CONTROL, tmp);
1818 break;
1819 default:
1820 break;
1821 }
1822 }
1823
1824 /**
1825 * evergreen_hpd_init - hpd setup callback.
1826 *
1827 * @rdev: radeon_device pointer
1828 *
1829 * Setup the hpd pins used by the card (evergreen+).
1830 * Enable the pin, set the polarity, and enable the hpd interrupts.
1831 */
1832 void evergreen_hpd_init(struct radeon_device *rdev)
1833 {
1834 struct drm_device *dev = rdev->ddev;
1835 struct drm_connector *connector;
1836 unsigned enabled = 0;
1837 u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1838 DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1839
1840 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1841 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1842
1843 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1844 connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1845 /* don't try to enable hpd on eDP or LVDS avoid breaking the
1846 * aux dp channel on imac and help (but not completely fix)
1847 * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1848 * also avoid interrupt storms during dpms.
1849 */
1850 continue;
1851 }
1852 switch (radeon_connector->hpd.hpd) {
1853 case RADEON_HPD_1:
1854 WREG32(DC_HPD1_CONTROL, tmp);
1855 break;
1856 case RADEON_HPD_2:
1857 WREG32(DC_HPD2_CONTROL, tmp);
1858 break;
1859 case RADEON_HPD_3:
1860 WREG32(DC_HPD3_CONTROL, tmp);
1861 break;
1862 case RADEON_HPD_4:
1863 WREG32(DC_HPD4_CONTROL, tmp);
1864 break;
1865 case RADEON_HPD_5:
1866 WREG32(DC_HPD5_CONTROL, tmp);
1867 break;
1868 case RADEON_HPD_6:
1869 WREG32(DC_HPD6_CONTROL, tmp);
1870 break;
1871 default:
1872 break;
1873 }
1874 radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1875 enabled |= 1 << radeon_connector->hpd.hpd;
1876 }
1877 radeon_irq_kms_enable_hpd(rdev, enabled);
1878 }
1879
1880 /**
1881 * evergreen_hpd_fini - hpd tear down callback.
1882 *
1883 * @rdev: radeon_device pointer
1884 *
1885 * Tear down the hpd pins used by the card (evergreen+).
1886 * Disable the hpd interrupts.
1887 */
1888 void evergreen_hpd_fini(struct radeon_device *rdev)
1889 {
1890 struct drm_device *dev = rdev->ddev;
1891 struct drm_connector *connector;
1892 unsigned disabled = 0;
1893
1894 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1895 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1896 switch (radeon_connector->hpd.hpd) {
1897 case RADEON_HPD_1:
1898 WREG32(DC_HPD1_CONTROL, 0);
1899 break;
1900 case RADEON_HPD_2:
1901 WREG32(DC_HPD2_CONTROL, 0);
1902 break;
1903 case RADEON_HPD_3:
1904 WREG32(DC_HPD3_CONTROL, 0);
1905 break;
1906 case RADEON_HPD_4:
1907 WREG32(DC_HPD4_CONTROL, 0);
1908 break;
1909 case RADEON_HPD_5:
1910 WREG32(DC_HPD5_CONTROL, 0);
1911 break;
1912 case RADEON_HPD_6:
1913 WREG32(DC_HPD6_CONTROL, 0);
1914 break;
1915 default:
1916 break;
1917 }
1918 disabled |= 1 << radeon_connector->hpd.hpd;
1919 }
1920 radeon_irq_kms_disable_hpd(rdev, disabled);
1921 }
1922
1923 /* watermark setup */
1924
1925 static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1926 struct radeon_crtc *radeon_crtc,
1927 struct drm_display_mode *mode,
1928 struct drm_display_mode *other_mode)
1929 {
1930 u32 tmp, buffer_alloc, i;
1931 u32 pipe_offset = radeon_crtc->crtc_id * 0x20;
1932 /*
1933 * Line Buffer Setup
1934 * There are 3 line buffers, each one shared by 2 display controllers.
1935 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1936 * the display controllers. The paritioning is done via one of four
1937 * preset allocations specified in bits 2:0:
1938 * first display controller
1939 * 0 - first half of lb (3840 * 2)
1940 * 1 - first 3/4 of lb (5760 * 2)
1941 * 2 - whole lb (7680 * 2), other crtc must be disabled
1942 * 3 - first 1/4 of lb (1920 * 2)
1943 * second display controller
1944 * 4 - second half of lb (3840 * 2)
1945 * 5 - second 3/4 of lb (5760 * 2)
1946 * 6 - whole lb (7680 * 2), other crtc must be disabled
1947 * 7 - last 1/4 of lb (1920 * 2)
1948 */
1949 /* this can get tricky if we have two large displays on a paired group
1950 * of crtcs. Ideally for multiple large displays we'd assign them to
1951 * non-linked crtcs for maximum line buffer allocation.
1952 */
1953 if (radeon_crtc->base.enabled && mode) {
1954 if (other_mode) {
1955 tmp = 0; /* 1/2 */
1956 buffer_alloc = 1;
1957 } else {
1958 tmp = 2; /* whole */
1959 buffer_alloc = 2;
1960 }
1961 } else {
1962 tmp = 0;
1963 buffer_alloc = 0;
1964 }
1965
1966 /* second controller of the pair uses second half of the lb */
1967 if (radeon_crtc->crtc_id % 2)
1968 tmp += 4;
1969 WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1970
1971 if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE5(rdev)) {
1972 WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
1973 DMIF_BUFFERS_ALLOCATED(buffer_alloc));
1974 for (i = 0; i < rdev->usec_timeout; i++) {
1975 if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
1976 DMIF_BUFFERS_ALLOCATED_COMPLETED)
1977 break;
1978 udelay(1);
1979 }
1980 }
1981
1982 if (radeon_crtc->base.enabled && mode) {
1983 switch (tmp) {
1984 case 0:
1985 case 4:
1986 default:
1987 if (ASIC_IS_DCE5(rdev))
1988 return 4096 * 2;
1989 else
1990 return 3840 * 2;
1991 case 1:
1992 case 5:
1993 if (ASIC_IS_DCE5(rdev))
1994 return 6144 * 2;
1995 else
1996 return 5760 * 2;
1997 case 2:
1998 case 6:
1999 if (ASIC_IS_DCE5(rdev))
2000 return 8192 * 2;
2001 else
2002 return 7680 * 2;
2003 case 3:
2004 case 7:
2005 if (ASIC_IS_DCE5(rdev))
2006 return 2048 * 2;
2007 else
2008 return 1920 * 2;
2009 }
2010 }
2011
2012 /* controller not enabled, so no lb used */
2013 return 0;
2014 }
2015
2016 u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
2017 {
2018 u32 tmp = RREG32(MC_SHARED_CHMAP);
2019
2020 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
2021 case 0:
2022 default:
2023 return 1;
2024 case 1:
2025 return 2;
2026 case 2:
2027 return 4;
2028 case 3:
2029 return 8;
2030 }
2031 }
2032
2033 struct evergreen_wm_params {
2034 u32 dram_channels; /* number of dram channels */
2035 u32 yclk; /* bandwidth per dram data pin in kHz */
2036 u32 sclk; /* engine clock in kHz */
2037 u32 disp_clk; /* display clock in kHz */
2038 u32 src_width; /* viewport width */
2039 u32 active_time; /* active display time in ns */
2040 u32 blank_time; /* blank time in ns */
2041 bool interlaced; /* mode is interlaced */
2042 fixed20_12 vsc; /* vertical scale ratio */
2043 u32 num_heads; /* number of active crtcs */
2044 u32 bytes_per_pixel; /* bytes per pixel display + overlay */
2045 u32 lb_size; /* line buffer allocated to pipe */
2046 u32 vtaps; /* vertical scaler taps */
2047 };
2048
2049 static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
2050 {
2051 /* Calculate DRAM Bandwidth and the part allocated to display. */
2052 fixed20_12 dram_efficiency; /* 0.7 */
2053 fixed20_12 yclk, dram_channels, bandwidth;
2054 fixed20_12 a;
2055
2056 a.full = dfixed_const(1000);
2057 yclk.full = dfixed_const(wm->yclk);
2058 yclk.full = dfixed_div(yclk, a);
2059 dram_channels.full = dfixed_const(wm->dram_channels * 4);
2060 a.full = dfixed_const(10);
2061 dram_efficiency.full = dfixed_const(7);
2062 dram_efficiency.full = dfixed_div(dram_efficiency, a);
2063 bandwidth.full = dfixed_mul(dram_channels, yclk);
2064 bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
2065
2066 return dfixed_trunc(bandwidth);
2067 }
2068
2069 static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2070 {
2071 /* Calculate DRAM Bandwidth and the part allocated to display. */
2072 fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
2073 fixed20_12 yclk, dram_channels, bandwidth;
2074 fixed20_12 a;
2075
2076 a.full = dfixed_const(1000);
2077 yclk.full = dfixed_const(wm->yclk);
2078 yclk.full = dfixed_div(yclk, a);
2079 dram_channels.full = dfixed_const(wm->dram_channels * 4);
2080 a.full = dfixed_const(10);
2081 disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
2082 disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
2083 bandwidth.full = dfixed_mul(dram_channels, yclk);
2084 bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
2085
2086 return dfixed_trunc(bandwidth);
2087 }
2088
2089 static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
2090 {
2091 /* Calculate the display Data return Bandwidth */
2092 fixed20_12 return_efficiency; /* 0.8 */
2093 fixed20_12 sclk, bandwidth;
2094 fixed20_12 a;
2095
2096 a.full = dfixed_const(1000);
2097 sclk.full = dfixed_const(wm->sclk);
2098 sclk.full = dfixed_div(sclk, a);
2099 a.full = dfixed_const(10);
2100 return_efficiency.full = dfixed_const(8);
2101 return_efficiency.full = dfixed_div(return_efficiency, a);
2102 a.full = dfixed_const(32);
2103 bandwidth.full = dfixed_mul(a, sclk);
2104 bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
2105
2106 return dfixed_trunc(bandwidth);
2107 }
2108
2109 static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
2110 {
2111 /* Calculate the DMIF Request Bandwidth */
2112 fixed20_12 disp_clk_request_efficiency; /* 0.8 */
2113 fixed20_12 disp_clk, bandwidth;
2114 fixed20_12 a;
2115
2116 a.full = dfixed_const(1000);
2117 disp_clk.full = dfixed_const(wm->disp_clk);
2118 disp_clk.full = dfixed_div(disp_clk, a);
2119 a.full = dfixed_const(10);
2120 disp_clk_request_efficiency.full = dfixed_const(8);
2121 disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
2122 a.full = dfixed_const(32);
2123 bandwidth.full = dfixed_mul(a, disp_clk);
2124 bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
2125
2126 return dfixed_trunc(bandwidth);
2127 }
2128
2129 static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
2130 {
2131 /* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
2132 u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
2133 u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
2134 u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
2135
2136 return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
2137 }
2138
2139 static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
2140 {
2141 /* Calculate the display mode Average Bandwidth
2142 * DisplayMode should contain the source and destination dimensions,
2143 * timing, etc.
2144 */
2145 fixed20_12 bpp;
2146 fixed20_12 line_time;
2147 fixed20_12 src_width;
2148 fixed20_12 bandwidth;
2149 fixed20_12 a;
2150
2151 a.full = dfixed_const(1000);
2152 line_time.full = dfixed_const(wm->active_time + wm->blank_time);
2153 line_time.full = dfixed_div(line_time, a);
2154 bpp.full = dfixed_const(wm->bytes_per_pixel);
2155 src_width.full = dfixed_const(wm->src_width);
2156 bandwidth.full = dfixed_mul(src_width, bpp);
2157 bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
2158 bandwidth.full = dfixed_div(bandwidth, line_time);
2159
2160 return dfixed_trunc(bandwidth);
2161 }
2162
2163 static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
2164 {
2165 /* First calcualte the latency in ns */
2166 u32 mc_latency = 2000; /* 2000 ns. */
2167 u32 available_bandwidth = evergreen_available_bandwidth(wm);
2168 u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
2169 u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
2170 u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
2171 u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
2172 (wm->num_heads * cursor_line_pair_return_time);
2173 u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
2174 u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
2175 fixed20_12 a, b, c;
2176
2177 if (wm->num_heads == 0)
2178 return 0;
2179
2180 a.full = dfixed_const(2);
2181 b.full = dfixed_const(1);
2182 if ((wm->vsc.full > a.full) ||
2183 ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
2184 (wm->vtaps >= 5) ||
2185 ((wm->vsc.full >= a.full) && wm->interlaced))
2186 max_src_lines_per_dst_line = 4;
2187 else
2188 max_src_lines_per_dst_line = 2;
2189
2190 a.full = dfixed_const(available_bandwidth);
2191 b.full = dfixed_const(wm->num_heads);
2192 a.full = dfixed_div(a, b);
2193
2194 b.full = dfixed_const(1000);
2195 c.full = dfixed_const(wm->disp_clk);
2196 b.full = dfixed_div(c, b);
2197 c.full = dfixed_const(wm->bytes_per_pixel);
2198 b.full = dfixed_mul(b, c);
2199
2200 lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
2201
2202 a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2203 b.full = dfixed_const(1000);
2204 c.full = dfixed_const(lb_fill_bw);
2205 b.full = dfixed_div(c, b);
2206 a.full = dfixed_div(a, b);
2207 line_fill_time = dfixed_trunc(a);
2208
2209 if (line_fill_time < wm->active_time)
2210 return latency;
2211 else
2212 return latency + (line_fill_time - wm->active_time);
2213
2214 }
2215
2216 static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2217 {
2218 if (evergreen_average_bandwidth(wm) <=
2219 (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2220 return true;
2221 else
2222 return false;
2223 };
2224
2225 static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2226 {
2227 if (evergreen_average_bandwidth(wm) <=
2228 (evergreen_available_bandwidth(wm) / wm->num_heads))
2229 return true;
2230 else
2231 return false;
2232 };
2233
2234 static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2235 {
2236 u32 lb_partitions = wm->lb_size / wm->src_width;
2237 u32 line_time = wm->active_time + wm->blank_time;
2238 u32 latency_tolerant_lines;
2239 u32 latency_hiding;
2240 fixed20_12 a;
2241
2242 a.full = dfixed_const(1);
2243 if (wm->vsc.full > a.full)
2244 latency_tolerant_lines = 1;
2245 else {
2246 if (lb_partitions <= (wm->vtaps + 1))
2247 latency_tolerant_lines = 1;
2248 else
2249 latency_tolerant_lines = 2;
2250 }
2251
2252 latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2253
2254 if (evergreen_latency_watermark(wm) <= latency_hiding)
2255 return true;
2256 else
2257 return false;
2258 }
2259
2260 static void evergreen_program_watermarks(struct radeon_device *rdev,
2261 struct radeon_crtc *radeon_crtc,
2262 u32 lb_size, u32 num_heads)
2263 {
2264 struct drm_display_mode *mode = &radeon_crtc->base.mode;
2265 struct evergreen_wm_params wm_low, wm_high;
2266 u32 dram_channels;
2267 u32 pixel_period;
2268 u32 line_time = 0;
2269 u32 latency_watermark_a = 0, latency_watermark_b = 0;
2270 u32 priority_a_mark = 0, priority_b_mark = 0;
2271 u32 priority_a_cnt = PRIORITY_OFF;
2272 u32 priority_b_cnt = PRIORITY_OFF;
2273 u32 pipe_offset = radeon_crtc->crtc_id * 16;
2274 u32 tmp, arb_control3;
2275 fixed20_12 a, b, c;
2276
2277 if (radeon_crtc->base.enabled && num_heads && mode) {
2278 pixel_period = 1000000 / (u32)mode->clock;
2279 line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2280 priority_a_cnt = 0;
2281 priority_b_cnt = 0;
2282 dram_channels = evergreen_get_number_of_dram_channels(rdev);
2283
2284 /* watermark for high clocks */
2285 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2286 wm_high.yclk =
2287 radeon_dpm_get_mclk(rdev, false) * 10;
2288 wm_high.sclk =
2289 radeon_dpm_get_sclk(rdev, false) * 10;
2290 } else {
2291 wm_high.yclk = rdev->pm.current_mclk * 10;
2292 wm_high.sclk = rdev->pm.current_sclk * 10;
2293 }
2294
2295 wm_high.disp_clk = mode->clock;
2296 wm_high.src_width = mode->crtc_hdisplay;
2297 wm_high.active_time = mode->crtc_hdisplay * pixel_period;
2298 wm_high.blank_time = line_time - wm_high.active_time;
2299 wm_high.interlaced = false;
2300 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2301 wm_high.interlaced = true;
2302 wm_high.vsc = radeon_crtc->vsc;
2303 wm_high.vtaps = 1;
2304 if (radeon_crtc->rmx_type != RMX_OFF)
2305 wm_high.vtaps = 2;
2306 wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2307 wm_high.lb_size = lb_size;
2308 wm_high.dram_channels = dram_channels;
2309 wm_high.num_heads = num_heads;
2310
2311 /* watermark for low clocks */
2312 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2313 wm_low.yclk =
2314 radeon_dpm_get_mclk(rdev, true) * 10;
2315 wm_low.sclk =
2316 radeon_dpm_get_sclk(rdev, true) * 10;
2317 } else {
2318 wm_low.yclk = rdev->pm.current_mclk * 10;
2319 wm_low.sclk = rdev->pm.current_sclk * 10;
2320 }
2321
2322 wm_low.disp_clk = mode->clock;
2323 wm_low.src_width = mode->crtc_hdisplay;
2324 wm_low.active_time = mode->crtc_hdisplay * pixel_period;
2325 wm_low.blank_time = line_time - wm_low.active_time;
2326 wm_low.interlaced = false;
2327 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2328 wm_low.interlaced = true;
2329 wm_low.vsc = radeon_crtc->vsc;
2330 wm_low.vtaps = 1;
2331 if (radeon_crtc->rmx_type != RMX_OFF)
2332 wm_low.vtaps = 2;
2333 wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2334 wm_low.lb_size = lb_size;
2335 wm_low.dram_channels = dram_channels;
2336 wm_low.num_heads = num_heads;
2337
2338 /* set for high clocks */
2339 latency_watermark_a = min(evergreen_latency_watermark(&wm_high), (u32)65535);
2340 /* set for low clocks */
2341 latency_watermark_b = min(evergreen_latency_watermark(&wm_low), (u32)65535);
2342
2343 /* possibly force display priority to high */
2344 /* should really do this at mode validation time... */
2345 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2346 !evergreen_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2347 !evergreen_check_latency_hiding(&wm_high) ||
2348 (rdev->disp_priority == 2)) {
2349 DRM_DEBUG_KMS("force priority a to high\n");
2350 priority_a_cnt |= PRIORITY_ALWAYS_ON;
2351 }
2352 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2353 !evergreen_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2354 !evergreen_check_latency_hiding(&wm_low) ||
2355 (rdev->disp_priority == 2)) {
2356 DRM_DEBUG_KMS("force priority b to high\n");
2357 priority_b_cnt |= PRIORITY_ALWAYS_ON;
2358 }
2359
2360 a.full = dfixed_const(1000);
2361 b.full = dfixed_const(mode->clock);
2362 b.full = dfixed_div(b, a);
2363 c.full = dfixed_const(latency_watermark_a);
2364 c.full = dfixed_mul(c, b);
2365 c.full = dfixed_mul(c, radeon_crtc->hsc);
2366 c.full = dfixed_div(c, a);
2367 a.full = dfixed_const(16);
2368 c.full = dfixed_div(c, a);
2369 priority_a_mark = dfixed_trunc(c);
2370 priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2371
2372 a.full = dfixed_const(1000);
2373 b.full = dfixed_const(mode->clock);
2374 b.full = dfixed_div(b, a);
2375 c.full = dfixed_const(latency_watermark_b);
2376 c.full = dfixed_mul(c, b);
2377 c.full = dfixed_mul(c, radeon_crtc->hsc);
2378 c.full = dfixed_div(c, a);
2379 a.full = dfixed_const(16);
2380 c.full = dfixed_div(c, a);
2381 priority_b_mark = dfixed_trunc(c);
2382 priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2383
2384 /* Save number of lines the linebuffer leads before the scanout */
2385 radeon_crtc->lb_vblank_lead_lines = DIV_ROUND_UP(lb_size, mode->crtc_hdisplay);
2386 }
2387
2388 /* select wm A */
2389 arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2390 tmp = arb_control3;
2391 tmp &= ~LATENCY_WATERMARK_MASK(3);
2392 tmp |= LATENCY_WATERMARK_MASK(1);
2393 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2394 WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2395 (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2396 LATENCY_HIGH_WATERMARK(line_time)));
2397 /* select wm B */
2398 tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2399 tmp &= ~LATENCY_WATERMARK_MASK(3);
2400 tmp |= LATENCY_WATERMARK_MASK(2);
2401 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2402 WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2403 (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2404 LATENCY_HIGH_WATERMARK(line_time)));
2405 /* restore original selection */
2406 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2407
2408 /* write the priority marks */
2409 WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2410 WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2411
2412 /* save values for DPM */
2413 radeon_crtc->line_time = line_time;
2414 radeon_crtc->wm_high = latency_watermark_a;
2415 radeon_crtc->wm_low = latency_watermark_b;
2416 }
2417
2418 /**
2419 * evergreen_bandwidth_update - update display watermarks callback.
2420 *
2421 * @rdev: radeon_device pointer
2422 *
2423 * Update the display watermarks based on the requested mode(s)
2424 * (evergreen+).
2425 */
2426 void evergreen_bandwidth_update(struct radeon_device *rdev)
2427 {
2428 struct drm_display_mode *mode0 = NULL;
2429 struct drm_display_mode *mode1 = NULL;
2430 u32 num_heads = 0, lb_size;
2431 int i;
2432
2433 if (!rdev->mode_info.mode_config_initialized)
2434 return;
2435
2436 radeon_update_display_priority(rdev);
2437
2438 for (i = 0; i < rdev->num_crtc; i++) {
2439 if (rdev->mode_info.crtcs[i]->base.enabled)
2440 num_heads++;
2441 }
2442 for (i = 0; i < rdev->num_crtc; i += 2) {
2443 mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2444 mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2445 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2446 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2447 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2448 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2449 }
2450 }
2451
2452 /**
2453 * evergreen_mc_wait_for_idle - wait for MC idle callback.
2454 *
2455 * @rdev: radeon_device pointer
2456 *
2457 * Wait for the MC (memory controller) to be idle.
2458 * (evergreen+).
2459 * Returns 0 if the MC is idle, -1 if not.
2460 */
2461 int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
2462 {
2463 unsigned i;
2464 u32 tmp;
2465
2466 for (i = 0; i < rdev->usec_timeout; i++) {
2467 /* read MC_STATUS */
2468 tmp = RREG32(SRBM_STATUS) & 0x1F00;
2469 if (!tmp)
2470 return 0;
2471 udelay(1);
2472 }
2473 return -1;
2474 }
2475
2476 /*
2477 * GART
2478 */
2479 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2480 {
2481 unsigned i;
2482 u32 tmp;
2483
2484 WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2485
2486 WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2487 for (i = 0; i < rdev->usec_timeout; i++) {
2488 /* read MC_STATUS */
2489 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2490 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2491 if (tmp == 2) {
2492 printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
2493 return;
2494 }
2495 if (tmp) {
2496 return;
2497 }
2498 udelay(1);
2499 }
2500 }
2501
2502 static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
2503 {
2504 u32 tmp;
2505 int r;
2506
2507 if (rdev->gart.robj == NULL) {
2508 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2509 return -EINVAL;
2510 }
2511 r = radeon_gart_table_vram_pin(rdev);
2512 if (r)
2513 return r;
2514 /* Setup L2 cache */
2515 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2516 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2517 EFFECTIVE_L2_QUEUE_SIZE(7));
2518 WREG32(VM_L2_CNTL2, 0);
2519 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2520 /* Setup TLB control */
2521 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2522 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2523 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2524 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2525 if (rdev->flags & RADEON_IS_IGP) {
2526 WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2527 WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2528 WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2529 } else {
2530 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2531 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2532 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2533 if ((rdev->family == CHIP_JUNIPER) ||
2534 (rdev->family == CHIP_CYPRESS) ||
2535 (rdev->family == CHIP_HEMLOCK) ||
2536 (rdev->family == CHIP_BARTS))
2537 WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
2538 }
2539 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2540 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2541 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2542 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2543 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2544 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2545 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2546 WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2547 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2548 WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2549 (u32)(rdev->dummy_page.addr >> 12));
2550 WREG32(VM_CONTEXT1_CNTL, 0);
2551
2552 evergreen_pcie_gart_tlb_flush(rdev);
2553 DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2554 (unsigned)(rdev->mc.gtt_size >> 20),
2555 (unsigned long long)rdev->gart.table_addr);
2556 rdev->gart.ready = true;
2557 return 0;
2558 }
2559
2560 static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
2561 {
2562 u32 tmp;
2563
2564 /* Disable all tables */
2565 WREG32(VM_CONTEXT0_CNTL, 0);
2566 WREG32(VM_CONTEXT1_CNTL, 0);
2567
2568 /* Setup L2 cache */
2569 WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2570 EFFECTIVE_L2_QUEUE_SIZE(7));
2571 WREG32(VM_L2_CNTL2, 0);
2572 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2573 /* Setup TLB control */
2574 tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2575 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2576 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2577 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2578 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2579 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2580 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2581 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2582 radeon_gart_table_vram_unpin(rdev);
2583 }
2584
2585 static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
2586 {
2587 evergreen_pcie_gart_disable(rdev);
2588 radeon_gart_table_vram_free(rdev);
2589 radeon_gart_fini(rdev);
2590 }
2591
2592
2593 static void evergreen_agp_enable(struct radeon_device *rdev)
2594 {
2595 u32 tmp;
2596
2597 /* Setup L2 cache */
2598 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2599 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2600 EFFECTIVE_L2_QUEUE_SIZE(7));
2601 WREG32(VM_L2_CNTL2, 0);
2602 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2603 /* Setup TLB control */
2604 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2605 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2606 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2607 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2608 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2609 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2610 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2611 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2612 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2613 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2614 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2615 WREG32(VM_CONTEXT0_CNTL, 0);
2616 WREG32(VM_CONTEXT1_CNTL, 0);
2617 }
2618
2619 static const unsigned ni_dig_offsets[] =
2620 {
2621 NI_DIG0_REGISTER_OFFSET,
2622 NI_DIG1_REGISTER_OFFSET,
2623 NI_DIG2_REGISTER_OFFSET,
2624 NI_DIG3_REGISTER_OFFSET,
2625 NI_DIG4_REGISTER_OFFSET,
2626 NI_DIG5_REGISTER_OFFSET
2627 };
2628
2629 static const unsigned ni_tx_offsets[] =
2630 {
2631 NI_DCIO_UNIPHY0_UNIPHY_TX_CONTROL1,
2632 NI_DCIO_UNIPHY1_UNIPHY_TX_CONTROL1,
2633 NI_DCIO_UNIPHY2_UNIPHY_TX_CONTROL1,
2634 NI_DCIO_UNIPHY3_UNIPHY_TX_CONTROL1,
2635 NI_DCIO_UNIPHY4_UNIPHY_TX_CONTROL1,
2636 NI_DCIO_UNIPHY5_UNIPHY_TX_CONTROL1
2637 };
2638
2639 static const unsigned evergreen_dp_offsets[] =
2640 {
2641 EVERGREEN_DP0_REGISTER_OFFSET,
2642 EVERGREEN_DP1_REGISTER_OFFSET,
2643 EVERGREEN_DP2_REGISTER_OFFSET,
2644 EVERGREEN_DP3_REGISTER_OFFSET,
2645 EVERGREEN_DP4_REGISTER_OFFSET,
2646 EVERGREEN_DP5_REGISTER_OFFSET
2647 };
2648
2649
2650 /*
2651 * Assumption is that EVERGREEN_CRTC_MASTER_EN enable for requested crtc
2652 * We go from crtc to connector and it is not relible since it
2653 * should be an opposite direction .If crtc is enable then
2654 * find the dig_fe which selects this crtc and insure that it enable.
2655 * if such dig_fe is found then find dig_be which selects found dig_be and
2656 * insure that it enable and in DP_SST mode.
2657 * if UNIPHY_PLL_CONTROL1.enable then we should disconnect timing
2658 * from dp symbols clocks .
2659 */
2660 static bool evergreen_is_dp_sst_stream_enabled(struct radeon_device *rdev,
2661 unsigned crtc_id, unsigned *ret_dig_fe)
2662 {
2663 unsigned i;
2664 unsigned dig_fe;
2665 unsigned dig_be;
2666 unsigned dig_en_be;
2667 unsigned uniphy_pll;
2668 unsigned digs_fe_selected;
2669 unsigned dig_be_mode;
2670 unsigned dig_fe_mask;
2671 bool is_enabled = false;
2672 bool found_crtc = false;
2673
2674 /* loop through all running dig_fe to find selected crtc */
2675 for (i = 0; i < ARRAY_SIZE(ni_dig_offsets); i++) {
2676 dig_fe = RREG32(NI_DIG_FE_CNTL + ni_dig_offsets[i]);
2677 if (dig_fe & NI_DIG_FE_CNTL_SYMCLK_FE_ON &&
2678 crtc_id == NI_DIG_FE_CNTL_SOURCE_SELECT(dig_fe)) {
2679 /* found running pipe */
2680 found_crtc = true;
2681 dig_fe_mask = 1 << i;
2682 dig_fe = i;
2683 break;
2684 }
2685 }
2686
2687 if (found_crtc) {
2688 /* loop through all running dig_be to find selected dig_fe */
2689 for (i = 0; i < ARRAY_SIZE(ni_dig_offsets); i++) {
2690 dig_be = RREG32(NI_DIG_BE_CNTL + ni_dig_offsets[i]);
2691 /* if dig_fe_selected by dig_be? */
2692 digs_fe_selected = NI_DIG_BE_CNTL_FE_SOURCE_SELECT(dig_be);
2693 dig_be_mode = NI_DIG_FE_CNTL_MODE(dig_be);
2694 if (dig_fe_mask & digs_fe_selected &&
2695 /* if dig_be in sst mode? */
2696 dig_be_mode == NI_DIG_BE_DPSST) {
2697 dig_en_be = RREG32(NI_DIG_BE_EN_CNTL +
2698 ni_dig_offsets[i]);
2699 uniphy_pll = RREG32(NI_DCIO_UNIPHY0_PLL_CONTROL1 +
2700 ni_tx_offsets[i]);
2701 /* dig_be enable and tx is running */
2702 if (dig_en_be & NI_DIG_BE_EN_CNTL_ENABLE &&
2703 dig_en_be & NI_DIG_BE_EN_CNTL_SYMBCLK_ON &&
2704 uniphy_pll & NI_DCIO_UNIPHY0_PLL_CONTROL1_ENABLE) {
2705 is_enabled = true;
2706 *ret_dig_fe = dig_fe;
2707 break;
2708 }
2709 }
2710 }
2711 }
2712
2713 return is_enabled;
2714 }
2715
2716 /*
2717 * Blank dig when in dp sst mode
2718 * Dig ignores crtc timing
2719 */
2720 static void evergreen_blank_dp_output(struct radeon_device *rdev,
2721 unsigned dig_fe)
2722 {
2723 unsigned stream_ctrl;
2724 unsigned fifo_ctrl;
2725 unsigned counter = 0;
2726
2727 if (dig_fe >= ARRAY_SIZE(evergreen_dp_offsets)) {
2728 DRM_ERROR("invalid dig_fe %d\n", dig_fe);
2729 return;
2730 }
2731
2732 stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2733 evergreen_dp_offsets[dig_fe]);
2734 if (!(stream_ctrl & EVERGREEN_DP_VID_STREAM_CNTL_ENABLE)) {
2735 DRM_ERROR("dig %d , should be enable\n", dig_fe);
2736 return;
2737 }
2738
2739 stream_ctrl &=~EVERGREEN_DP_VID_STREAM_CNTL_ENABLE;
2740 WREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2741 evergreen_dp_offsets[dig_fe], stream_ctrl);
2742
2743 stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2744 evergreen_dp_offsets[dig_fe]);
2745 while (counter < 32 && stream_ctrl & EVERGREEN_DP_VID_STREAM_STATUS) {
2746 msleep(1);
2747 counter++;
2748 stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2749 evergreen_dp_offsets[dig_fe]);
2750 }
2751 if (counter >= 32 )
2752 DRM_ERROR("counter exceeds %d\n", counter);
2753
2754 fifo_ctrl = RREG32(EVERGREEN_DP_STEER_FIFO + evergreen_dp_offsets[dig_fe]);
2755 fifo_ctrl |= EVERGREEN_DP_STEER_FIFO_RESET;
2756 WREG32(EVERGREEN_DP_STEER_FIFO + evergreen_dp_offsets[dig_fe], fifo_ctrl);
2757
2758 }
2759
2760 void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
2761 {
2762 u32 crtc_enabled, tmp, frame_count, blackout;
2763 int i, j;
2764 unsigned dig_fe;
2765
2766 if (!ASIC_IS_NODCE(rdev)) {
2767 save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2768 save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2769
2770 /* disable VGA render */
2771 WREG32(VGA_RENDER_CONTROL, 0);
2772 }
2773 /* blank the display controllers */
2774 for (i = 0; i < rdev->num_crtc; i++) {
2775 crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2776 if (crtc_enabled) {
2777 save->crtc_enabled[i] = true;
2778 if (ASIC_IS_DCE6(rdev)) {
2779 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2780 if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2781 radeon_wait_for_vblank(rdev, i);
2782 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2783 tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2784 WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2785 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2786 }
2787 } else {
2788 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2789 if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2790 radeon_wait_for_vblank(rdev, i);
2791 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2792 tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2793 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2794 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2795 }
2796 }
2797 /* wait for the next frame */
2798 frame_count = radeon_get_vblank_counter(rdev, i);
2799 for (j = 0; j < rdev->usec_timeout; j++) {
2800 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2801 break;
2802 udelay(1);
2803 }
2804 /*we should disable dig if it drives dp sst*/
2805 /*but we are in radeon_device_init and the topology is unknown*/
2806 /*and it is available after radeon_modeset_init*/
2807 /*the following method radeon_atom_encoder_dpms_dig*/
2808 /*does the job if we initialize it properly*/
2809 /*for now we do it this manually*/
2810 /**/
2811 if (ASIC_IS_DCE5(rdev) &&
2812 evergreen_is_dp_sst_stream_enabled(rdev, i ,&dig_fe))
2813 evergreen_blank_dp_output(rdev, dig_fe);
2814 /*we could remove 6 lines below*/
2815 /* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2816 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2817 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2818 tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2819 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2820 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2821 save->crtc_enabled[i] = false;
2822 /* ***** */
2823 } else {
2824 save->crtc_enabled[i] = false;
2825 }
2826 }
2827
2828 radeon_mc_wait_for_idle(rdev);
2829
2830 blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2831 if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2832 /* Block CPU access */
2833 WREG32(BIF_FB_EN, 0);
2834 /* blackout the MC */
2835 blackout &= ~BLACKOUT_MODE_MASK;
2836 WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2837 }
2838 /* wait for the MC to settle */
2839 udelay(100);
2840
2841 /* lock double buffered regs */
2842 for (i = 0; i < rdev->num_crtc; i++) {
2843 if (save->crtc_enabled[i]) {
2844 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2845 if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2846 tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2847 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2848 }
2849 tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2850 if (!(tmp & 1)) {
2851 tmp |= 1;
2852 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2853 }
2854 }
2855 }
2856 }
2857
2858 void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
2859 {
2860 u32 tmp, frame_count;
2861 int i, j;
2862
2863 /* update crtc base addresses */
2864 for (i = 0; i < rdev->num_crtc; i++) {
2865 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2866 upper_32_bits(rdev->mc.vram_start));
2867 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2868 upper_32_bits(rdev->mc.vram_start));
2869 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
2870 (u32)rdev->mc.vram_start);
2871 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
2872 (u32)rdev->mc.vram_start);
2873 }
2874
2875 if (!ASIC_IS_NODCE(rdev)) {
2876 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2877 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2878 }
2879
2880 /* unlock regs and wait for update */
2881 for (i = 0; i < rdev->num_crtc; i++) {
2882 if (save->crtc_enabled[i]) {
2883 tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2884 if ((tmp & 0x7) != 3) {
2885 tmp &= ~0x7;
2886 tmp |= 0x3;
2887 WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2888 }
2889 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2890 if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2891 tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2892 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2893 }
2894 tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2895 if (tmp & 1) {
2896 tmp &= ~1;
2897 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2898 }
2899 for (j = 0; j < rdev->usec_timeout; j++) {
2900 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2901 if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2902 break;
2903 udelay(1);
2904 }
2905 }
2906 }
2907
2908 /* unblackout the MC */
2909 tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2910 tmp &= ~BLACKOUT_MODE_MASK;
2911 WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2912 /* allow CPU access */
2913 WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2914
2915 for (i = 0; i < rdev->num_crtc; i++) {
2916 if (save->crtc_enabled[i]) {
2917 if (ASIC_IS_DCE6(rdev)) {
2918 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2919 tmp &= ~EVERGREEN_CRTC_BLANK_DATA_EN;
2920 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2921 WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2922 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2923 } else {
2924 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2925 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2926 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2927 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2928 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2929 }
2930 /* wait for the next frame */
2931 frame_count = radeon_get_vblank_counter(rdev, i);
2932 for (j = 0; j < rdev->usec_timeout; j++) {
2933 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2934 break;
2935 udelay(1);
2936 }
2937 }
2938 }
2939 if (!ASIC_IS_NODCE(rdev)) {
2940 /* Unlock vga access */
2941 WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2942 mdelay(1);
2943 WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2944 }
2945 }
2946
2947 void evergreen_mc_program(struct radeon_device *rdev)
2948 {
2949 struct evergreen_mc_save save;
2950 u32 tmp;
2951 int i, j;
2952
2953 /* Initialize HDP */
2954 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2955 WREG32((0x2c14 + j), 0x00000000);
2956 WREG32((0x2c18 + j), 0x00000000);
2957 WREG32((0x2c1c + j), 0x00000000);
2958 WREG32((0x2c20 + j), 0x00000000);
2959 WREG32((0x2c24 + j), 0x00000000);
2960 }
2961 WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2962
2963 evergreen_mc_stop(rdev, &save);
2964 if (evergreen_mc_wait_for_idle(rdev)) {
2965 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2966 }
2967 /* Lockout access through VGA aperture*/
2968 WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2969 /* Update configuration */
2970 if (rdev->flags & RADEON_IS_AGP) {
2971 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2972 /* VRAM before AGP */
2973 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2974 rdev->mc.vram_start >> 12);
2975 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2976 rdev->mc.gtt_end >> 12);
2977 } else {
2978 /* VRAM after AGP */
2979 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2980 rdev->mc.gtt_start >> 12);
2981 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2982 rdev->mc.vram_end >> 12);
2983 }
2984 } else {
2985 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2986 rdev->mc.vram_start >> 12);
2987 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2988 rdev->mc.vram_end >> 12);
2989 }
2990 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2991 /* llano/ontario only */
2992 if ((rdev->family == CHIP_PALM) ||
2993 (rdev->family == CHIP_SUMO) ||
2994 (rdev->family == CHIP_SUMO2)) {
2995 tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2996 tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2997 tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2998 WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2999 }
3000 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
3001 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
3002 WREG32(MC_VM_FB_LOCATION, tmp);
3003 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
3004 WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
3005 WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
3006 if (rdev->flags & RADEON_IS_AGP) {
3007 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
3008 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
3009 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
3010 } else {
3011 WREG32(MC_VM_AGP_BASE, 0);
3012 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
3013 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
3014 }
3015 if (evergreen_mc_wait_for_idle(rdev)) {
3016 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3017 }
3018 evergreen_mc_resume(rdev, &save);
3019 /* we need to own VRAM, so turn off the VGA renderer here
3020 * to stop it overwriting our objects */
3021 rv515_vga_render_disable(rdev);
3022 }
3023
3024 /*
3025 * CP.
3026 */
3027 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
3028 {
3029 struct radeon_ring *ring = &rdev->ring[ib->ring];
3030 u32 next_rptr;
3031
3032 /* set to DX10/11 mode */
3033 radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
3034 radeon_ring_write(ring, 1);
3035
3036 if (ring->rptr_save_reg) {
3037 next_rptr = ring->wptr + 3 + 4;
3038 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
3039 radeon_ring_write(ring, ((ring->rptr_save_reg -
3040 PACKET3_SET_CONFIG_REG_START) >> 2));
3041 radeon_ring_write(ring, next_rptr);
3042 } else if (rdev->wb.enabled) {
3043 next_rptr = ring->wptr + 5 + 4;
3044 radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
3045 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
3046 radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
3047 radeon_ring_write(ring, next_rptr);
3048 radeon_ring_write(ring, 0);
3049 }
3050
3051 radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
3052 radeon_ring_write(ring,
3053 #ifdef __BIG_ENDIAN
3054 (2 << 0) |
3055 #endif
3056 (ib->gpu_addr & 0xFFFFFFFC));
3057 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
3058 radeon_ring_write(ring, ib->length_dw);
3059 }
3060
3061
3062 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
3063 {
3064 const __be32 *fw_data;
3065 int i;
3066
3067 if (!rdev->me_fw || !rdev->pfp_fw)
3068 return -EINVAL;
3069
3070 r700_cp_stop(rdev);
3071 WREG32(CP_RB_CNTL,
3072 #ifdef __BIG_ENDIAN
3073 BUF_SWAP_32BIT |
3074 #endif
3075 RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
3076
3077 fw_data = (const __be32 *)rdev->pfp_fw->data;
3078 WREG32(CP_PFP_UCODE_ADDR, 0);
3079 for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
3080 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
3081 WREG32(CP_PFP_UCODE_ADDR, 0);
3082
3083 fw_data = (const __be32 *)rdev->me_fw->data;
3084 WREG32(CP_ME_RAM_WADDR, 0);
3085 for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
3086 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
3087
3088 WREG32(CP_PFP_UCODE_ADDR, 0);
3089 WREG32(CP_ME_RAM_WADDR, 0);
3090 WREG32(CP_ME_RAM_RADDR, 0);
3091 return 0;
3092 }
3093
3094 static int evergreen_cp_start(struct radeon_device *rdev)
3095 {
3096 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3097 int r, i;
3098 uint32_t cp_me;
3099
3100 r = radeon_ring_lock(rdev, ring, 7);
3101 if (r) {
3102 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3103 return r;
3104 }
3105 radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
3106 radeon_ring_write(ring, 0x1);
3107 radeon_ring_write(ring, 0x0);
3108 radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
3109 radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
3110 radeon_ring_write(ring, 0);
3111 radeon_ring_write(ring, 0);
3112 radeon_ring_unlock_commit(rdev, ring, false);
3113
3114 cp_me = 0xff;
3115 WREG32(CP_ME_CNTL, cp_me);
3116
3117 r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
3118 if (r) {
3119 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3120 return r;
3121 }
3122
3123 /* setup clear context state */
3124 radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3125 radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
3126
3127 for (i = 0; i < evergreen_default_size; i++)
3128 radeon_ring_write(ring, evergreen_default_state[i]);
3129
3130 radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3131 radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
3132
3133 /* set clear context state */
3134 radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
3135 radeon_ring_write(ring, 0);
3136
3137 /* SQ_VTX_BASE_VTX_LOC */
3138 radeon_ring_write(ring, 0xc0026f00);
3139 radeon_ring_write(ring, 0x00000000);
3140 radeon_ring_write(ring, 0x00000000);
3141 radeon_ring_write(ring, 0x00000000);
3142
3143 /* Clear consts */
3144 radeon_ring_write(ring, 0xc0036f00);
3145 radeon_ring_write(ring, 0x00000bc4);
3146 radeon_ring_write(ring, 0xffffffff);
3147 radeon_ring_write(ring, 0xffffffff);
3148 radeon_ring_write(ring, 0xffffffff);
3149
3150 radeon_ring_write(ring, 0xc0026900);
3151 radeon_ring_write(ring, 0x00000316);
3152 radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
3153 radeon_ring_write(ring, 0x00000010); /* */
3154
3155 radeon_ring_unlock_commit(rdev, ring, false);
3156
3157 return 0;
3158 }
3159
3160 static int evergreen_cp_resume(struct radeon_device *rdev)
3161 {
3162 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3163 u32 tmp;
3164 u32 rb_bufsz;
3165 int r;
3166
3167 /* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
3168 WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
3169 SOFT_RESET_PA |
3170 SOFT_RESET_SH |
3171 SOFT_RESET_VGT |
3172 SOFT_RESET_SPI |
3173 SOFT_RESET_SX));
3174 RREG32(GRBM_SOFT_RESET);
3175 mdelay(15);
3176 WREG32(GRBM_SOFT_RESET, 0);
3177 RREG32(GRBM_SOFT_RESET);
3178
3179 /* Set ring buffer size */
3180 rb_bufsz = order_base_2(ring->ring_size / 8);
3181 tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
3182 #ifdef __BIG_ENDIAN
3183 tmp |= BUF_SWAP_32BIT;
3184 #endif
3185 WREG32(CP_RB_CNTL, tmp);
3186 WREG32(CP_SEM_WAIT_TIMER, 0x0);
3187 WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
3188
3189 /* Set the write pointer delay */
3190 WREG32(CP_RB_WPTR_DELAY, 0);
3191
3192 /* Initialize the ring buffer's read and write pointers */
3193 WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
3194 WREG32(CP_RB_RPTR_WR, 0);
3195 ring->wptr = 0;
3196 WREG32(CP_RB_WPTR, ring->wptr);
3197
3198 /* set the wb address whether it's enabled or not */
3199 WREG32(CP_RB_RPTR_ADDR,
3200 ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
3201 WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
3202 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
3203
3204 if (rdev->wb.enabled)
3205 WREG32(SCRATCH_UMSK, 0xff);
3206 else {
3207 tmp |= RB_NO_UPDATE;
3208 WREG32(SCRATCH_UMSK, 0);
3209 }
3210
3211 mdelay(1);
3212 WREG32(CP_RB_CNTL, tmp);
3213
3214 WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
3215 WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
3216
3217 evergreen_cp_start(rdev);
3218 ring->ready = true;
3219 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
3220 if (r) {
3221 ring->ready = false;
3222 return r;
3223 }
3224 return 0;
3225 }
3226
3227 /*
3228 * Core functions
3229 */
3230 static void evergreen_gpu_init(struct radeon_device *rdev)
3231 {
3232 u32 gb_addr_config;
3233 u32 mc_shared_chmap __unused, mc_arb_ramcfg;
3234 u32 sx_debug_1;
3235 u32 smx_dc_ctl0;
3236 u32 sq_config;
3237 u32 sq_lds_resource_mgmt;
3238 u32 sq_gpr_resource_mgmt_1;
3239 u32 sq_gpr_resource_mgmt_2;
3240 u32 sq_gpr_resource_mgmt_3;
3241 u32 sq_thread_resource_mgmt;
3242 u32 sq_thread_resource_mgmt_2;
3243 u32 sq_stack_resource_mgmt_1;
3244 u32 sq_stack_resource_mgmt_2;
3245 u32 sq_stack_resource_mgmt_3;
3246 u32 vgt_cache_invalidation;
3247 u32 hdp_host_path_cntl, tmp;
3248 u32 disabled_rb_mask;
3249 int i, j, ps_thread_count;
3250
3251 switch (rdev->family) {
3252 case CHIP_CYPRESS:
3253 case CHIP_HEMLOCK:
3254 rdev->config.evergreen.num_ses = 2;
3255 rdev->config.evergreen.max_pipes = 4;
3256 rdev->config.evergreen.max_tile_pipes = 8;
3257 rdev->config.evergreen.max_simds = 10;
3258 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3259 rdev->config.evergreen.max_gprs = 256;
3260 rdev->config.evergreen.max_threads = 248;
3261 rdev->config.evergreen.max_gs_threads = 32;
3262 rdev->config.evergreen.max_stack_entries = 512;
3263 rdev->config.evergreen.sx_num_of_sets = 4;
3264 rdev->config.evergreen.sx_max_export_size = 256;
3265 rdev->config.evergreen.sx_max_export_pos_size = 64;
3266 rdev->config.evergreen.sx_max_export_smx_size = 192;
3267 rdev->config.evergreen.max_hw_contexts = 8;
3268 rdev->config.evergreen.sq_num_cf_insts = 2;
3269
3270 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3271 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3272 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3273 gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
3274 break;
3275 case CHIP_JUNIPER:
3276 rdev->config.evergreen.num_ses = 1;
3277 rdev->config.evergreen.max_pipes = 4;
3278 rdev->config.evergreen.max_tile_pipes = 4;
3279 rdev->config.evergreen.max_simds = 10;
3280 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3281 rdev->config.evergreen.max_gprs = 256;
3282 rdev->config.evergreen.max_threads = 248;
3283 rdev->config.evergreen.max_gs_threads = 32;
3284 rdev->config.evergreen.max_stack_entries = 512;
3285 rdev->config.evergreen.sx_num_of_sets = 4;
3286 rdev->config.evergreen.sx_max_export_size = 256;
3287 rdev->config.evergreen.sx_max_export_pos_size = 64;
3288 rdev->config.evergreen.sx_max_export_smx_size = 192;
3289 rdev->config.evergreen.max_hw_contexts = 8;
3290 rdev->config.evergreen.sq_num_cf_insts = 2;
3291
3292 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3293 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3294 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3295 gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
3296 break;
3297 case CHIP_REDWOOD:
3298 rdev->config.evergreen.num_ses = 1;
3299 rdev->config.evergreen.max_pipes = 4;
3300 rdev->config.evergreen.max_tile_pipes = 4;
3301 rdev->config.evergreen.max_simds = 5;
3302 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3303 rdev->config.evergreen.max_gprs = 256;
3304 rdev->config.evergreen.max_threads = 248;
3305 rdev->config.evergreen.max_gs_threads = 32;
3306 rdev->config.evergreen.max_stack_entries = 256;
3307 rdev->config.evergreen.sx_num_of_sets = 4;
3308 rdev->config.evergreen.sx_max_export_size = 256;
3309 rdev->config.evergreen.sx_max_export_pos_size = 64;
3310 rdev->config.evergreen.sx_max_export_smx_size = 192;
3311 rdev->config.evergreen.max_hw_contexts = 8;
3312 rdev->config.evergreen.sq_num_cf_insts = 2;
3313
3314 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3315 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3316 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3317 gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
3318 break;
3319 case CHIP_CEDAR:
3320 default:
3321 rdev->config.evergreen.num_ses = 1;
3322 rdev->config.evergreen.max_pipes = 2;
3323 rdev->config.evergreen.max_tile_pipes = 2;
3324 rdev->config.evergreen.max_simds = 2;
3325 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3326 rdev->config.evergreen.max_gprs = 256;
3327 rdev->config.evergreen.max_threads = 192;
3328 rdev->config.evergreen.max_gs_threads = 16;
3329 rdev->config.evergreen.max_stack_entries = 256;
3330 rdev->config.evergreen.sx_num_of_sets = 4;
3331 rdev->config.evergreen.sx_max_export_size = 128;
3332 rdev->config.evergreen.sx_max_export_pos_size = 32;
3333 rdev->config.evergreen.sx_max_export_smx_size = 96;
3334 rdev->config.evergreen.max_hw_contexts = 4;
3335 rdev->config.evergreen.sq_num_cf_insts = 1;
3336
3337 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3338 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3339 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3340 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3341 break;
3342 case CHIP_PALM:
3343 rdev->config.evergreen.num_ses = 1;
3344 rdev->config.evergreen.max_pipes = 2;
3345 rdev->config.evergreen.max_tile_pipes = 2;
3346 rdev->config.evergreen.max_simds = 2;
3347 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3348 rdev->config.evergreen.max_gprs = 256;
3349 rdev->config.evergreen.max_threads = 192;
3350 rdev->config.evergreen.max_gs_threads = 16;
3351 rdev->config.evergreen.max_stack_entries = 256;
3352 rdev->config.evergreen.sx_num_of_sets = 4;
3353 rdev->config.evergreen.sx_max_export_size = 128;
3354 rdev->config.evergreen.sx_max_export_pos_size = 32;
3355 rdev->config.evergreen.sx_max_export_smx_size = 96;
3356 rdev->config.evergreen.max_hw_contexts = 4;
3357 rdev->config.evergreen.sq_num_cf_insts = 1;
3358
3359 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3360 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3361 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3362 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3363 break;
3364 case CHIP_SUMO:
3365 rdev->config.evergreen.num_ses = 1;
3366 rdev->config.evergreen.max_pipes = 4;
3367 rdev->config.evergreen.max_tile_pipes = 4;
3368 if (rdev->pdev->device == 0x9648)
3369 rdev->config.evergreen.max_simds = 3;
3370 else if ((rdev->pdev->device == 0x9647) ||
3371 (rdev->pdev->device == 0x964a))
3372 rdev->config.evergreen.max_simds = 4;
3373 else
3374 rdev->config.evergreen.max_simds = 5;
3375 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3376 rdev->config.evergreen.max_gprs = 256;
3377 rdev->config.evergreen.max_threads = 248;
3378 rdev->config.evergreen.max_gs_threads = 32;
3379 rdev->config.evergreen.max_stack_entries = 256;
3380 rdev->config.evergreen.sx_num_of_sets = 4;
3381 rdev->config.evergreen.sx_max_export_size = 256;
3382 rdev->config.evergreen.sx_max_export_pos_size = 64;
3383 rdev->config.evergreen.sx_max_export_smx_size = 192;
3384 rdev->config.evergreen.max_hw_contexts = 8;
3385 rdev->config.evergreen.sq_num_cf_insts = 2;
3386
3387 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3388 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3389 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3390 gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
3391 break;
3392 case CHIP_SUMO2:
3393 rdev->config.evergreen.num_ses = 1;
3394 rdev->config.evergreen.max_pipes = 4;
3395 rdev->config.evergreen.max_tile_pipes = 4;
3396 rdev->config.evergreen.max_simds = 2;
3397 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3398 rdev->config.evergreen.max_gprs = 256;
3399 rdev->config.evergreen.max_threads = 248;
3400 rdev->config.evergreen.max_gs_threads = 32;
3401 rdev->config.evergreen.max_stack_entries = 512;
3402 rdev->config.evergreen.sx_num_of_sets = 4;
3403 rdev->config.evergreen.sx_max_export_size = 256;
3404 rdev->config.evergreen.sx_max_export_pos_size = 64;
3405 rdev->config.evergreen.sx_max_export_smx_size = 192;
3406 rdev->config.evergreen.max_hw_contexts = 4;
3407 rdev->config.evergreen.sq_num_cf_insts = 2;
3408
3409 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3410 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3411 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3412 gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
3413 break;
3414 case CHIP_BARTS:
3415 rdev->config.evergreen.num_ses = 2;
3416 rdev->config.evergreen.max_pipes = 4;
3417 rdev->config.evergreen.max_tile_pipes = 8;
3418 rdev->config.evergreen.max_simds = 7;
3419 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3420 rdev->config.evergreen.max_gprs = 256;
3421 rdev->config.evergreen.max_threads = 248;
3422 rdev->config.evergreen.max_gs_threads = 32;
3423 rdev->config.evergreen.max_stack_entries = 512;
3424 rdev->config.evergreen.sx_num_of_sets = 4;
3425 rdev->config.evergreen.sx_max_export_size = 256;
3426 rdev->config.evergreen.sx_max_export_pos_size = 64;
3427 rdev->config.evergreen.sx_max_export_smx_size = 192;
3428 rdev->config.evergreen.max_hw_contexts = 8;
3429 rdev->config.evergreen.sq_num_cf_insts = 2;
3430
3431 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3432 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3433 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3434 gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
3435 break;
3436 case CHIP_TURKS:
3437 rdev->config.evergreen.num_ses = 1;
3438 rdev->config.evergreen.max_pipes = 4;
3439 rdev->config.evergreen.max_tile_pipes = 4;
3440 rdev->config.evergreen.max_simds = 6;
3441 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3442 rdev->config.evergreen.max_gprs = 256;
3443 rdev->config.evergreen.max_threads = 248;
3444 rdev->config.evergreen.max_gs_threads = 32;
3445 rdev->config.evergreen.max_stack_entries = 256;
3446 rdev->config.evergreen.sx_num_of_sets = 4;
3447 rdev->config.evergreen.sx_max_export_size = 256;
3448 rdev->config.evergreen.sx_max_export_pos_size = 64;
3449 rdev->config.evergreen.sx_max_export_smx_size = 192;
3450 rdev->config.evergreen.max_hw_contexts = 8;
3451 rdev->config.evergreen.sq_num_cf_insts = 2;
3452
3453 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3454 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3455 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3456 gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
3457 break;
3458 case CHIP_CAICOS:
3459 rdev->config.evergreen.num_ses = 1;
3460 rdev->config.evergreen.max_pipes = 2;
3461 rdev->config.evergreen.max_tile_pipes = 2;
3462 rdev->config.evergreen.max_simds = 2;
3463 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3464 rdev->config.evergreen.max_gprs = 256;
3465 rdev->config.evergreen.max_threads = 192;
3466 rdev->config.evergreen.max_gs_threads = 16;
3467 rdev->config.evergreen.max_stack_entries = 256;
3468 rdev->config.evergreen.sx_num_of_sets = 4;
3469 rdev->config.evergreen.sx_max_export_size = 128;
3470 rdev->config.evergreen.sx_max_export_pos_size = 32;
3471 rdev->config.evergreen.sx_max_export_smx_size = 96;
3472 rdev->config.evergreen.max_hw_contexts = 4;
3473 rdev->config.evergreen.sq_num_cf_insts = 1;
3474
3475 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3476 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3477 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3478 gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
3479 break;
3480 }
3481
3482 /* Initialize HDP */
3483 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3484 WREG32((0x2c14 + j), 0x00000000);
3485 WREG32((0x2c18 + j), 0x00000000);
3486 WREG32((0x2c1c + j), 0x00000000);
3487 WREG32((0x2c20 + j), 0x00000000);
3488 WREG32((0x2c24 + j), 0x00000000);
3489 }
3490
3491 WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3492 WREG32(SRBM_INT_CNTL, 0x1);
3493 WREG32(SRBM_INT_ACK, 0x1);
3494
3495 evergreen_fix_pci_max_read_req_size(rdev);
3496
3497 mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3498 if ((rdev->family == CHIP_PALM) ||
3499 (rdev->family == CHIP_SUMO) ||
3500 (rdev->family == CHIP_SUMO2))
3501 mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3502 else
3503 mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3504
3505 /* setup tiling info dword. gb_addr_config is not adequate since it does
3506 * not have bank info, so create a custom tiling dword.
3507 * bits 3:0 num_pipes
3508 * bits 7:4 num_banks
3509 * bits 11:8 group_size
3510 * bits 15:12 row_size
3511 */
3512 rdev->config.evergreen.tile_config = 0;
3513 switch (rdev->config.evergreen.max_tile_pipes) {
3514 case 1:
3515 default:
3516 rdev->config.evergreen.tile_config |= (0 << 0);
3517 break;
3518 case 2:
3519 rdev->config.evergreen.tile_config |= (1 << 0);
3520 break;
3521 case 4:
3522 rdev->config.evergreen.tile_config |= (2 << 0);
3523 break;
3524 case 8:
3525 rdev->config.evergreen.tile_config |= (3 << 0);
3526 break;
3527 }
3528 /* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
3529 if (rdev->flags & RADEON_IS_IGP)
3530 rdev->config.evergreen.tile_config |= 1 << 4;
3531 else {
3532 switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3533 case 0: /* four banks */
3534 rdev->config.evergreen.tile_config |= 0 << 4;
3535 break;
3536 case 1: /* eight banks */
3537 rdev->config.evergreen.tile_config |= 1 << 4;
3538 break;
3539 case 2: /* sixteen banks */
3540 default:
3541 rdev->config.evergreen.tile_config |= 2 << 4;
3542 break;
3543 }
3544 }
3545 rdev->config.evergreen.tile_config |= 0 << 8;
3546 rdev->config.evergreen.tile_config |=
3547 ((gb_addr_config & 0x30000000) >> 28) << 12;
3548
3549 if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3550 u32 efuse_straps_4;
3551 u32 efuse_straps_3;
3552
3553 efuse_straps_4 = RREG32_RCU(0x204);
3554 efuse_straps_3 = RREG32_RCU(0x203);
3555 tmp = (((efuse_straps_4 & 0xf) << 4) |
3556 ((efuse_straps_3 & 0xf0000000) >> 28));
3557 } else {
3558 tmp = 0;
3559 for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3560 u32 rb_disable_bitmap;
3561
3562 WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3563 WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3564 rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3565 tmp <<= 4;
3566 tmp |= rb_disable_bitmap;
3567 }
3568 }
3569 /* enabled rb are just the one not disabled :) */
3570 disabled_rb_mask = tmp;
3571 tmp = 0;
3572 for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3573 tmp |= (1 << i);
3574 /* if all the backends are disabled, fix it up here */
3575 if ((disabled_rb_mask & tmp) == tmp) {
3576 for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3577 disabled_rb_mask &= ~(1 << i);
3578 }
3579
3580 for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
3581 u32 simd_disable_bitmap;
3582
3583 WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3584 WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3585 simd_disable_bitmap = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
3586 simd_disable_bitmap |= 0xffffffff << rdev->config.evergreen.max_simds;
3587 tmp <<= 16;
3588 tmp |= simd_disable_bitmap;
3589 }
3590 rdev->config.evergreen.active_simds = hweight32(~tmp);
3591
3592 WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3593 WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3594
3595 WREG32(GB_ADDR_CONFIG, gb_addr_config);
3596 WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3597 WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3598 WREG32(DMA_TILING_CONFIG, gb_addr_config);
3599 WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3600 WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3601 WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3602
3603 if ((rdev->config.evergreen.max_backends == 1) &&
3604 (rdev->flags & RADEON_IS_IGP)) {
3605 if ((disabled_rb_mask & 3) == 1) {
3606 /* RB0 disabled, RB1 enabled */
3607 tmp = 0x11111111;
3608 } else {
3609 /* RB1 disabled, RB0 enabled */
3610 tmp = 0x00000000;
3611 }
3612 } else {
3613 tmp = gb_addr_config & NUM_PIPES_MASK;
3614 tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3615 EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3616 }
3617 WREG32(GB_BACKEND_MAP, tmp);
3618
3619 WREG32(CGTS_SYS_TCC_DISABLE, 0);
3620 WREG32(CGTS_TCC_DISABLE, 0);
3621 WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3622 WREG32(CGTS_USER_TCC_DISABLE, 0);
3623
3624 /* set HW defaults for 3D engine */
3625 WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3626 ROQ_IB2_START(0x2b)));
3627
3628 WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3629
3630 WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3631 SYNC_GRADIENT |
3632 SYNC_WALKER |
3633 SYNC_ALIGNER));
3634
3635 sx_debug_1 = RREG32(SX_DEBUG_1);
3636 sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3637 WREG32(SX_DEBUG_1, sx_debug_1);
3638
3639
3640 smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3641 smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3642 smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3643 WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3644
3645 if (rdev->family <= CHIP_SUMO2)
3646 WREG32(SMX_SAR_CTL0, 0x00010000);
3647
3648 WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3649 POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3650 SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3651
3652 WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3653 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3654 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3655
3656 WREG32(VGT_NUM_INSTANCES, 1);
3657 WREG32(SPI_CONFIG_CNTL, 0);
3658 WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3659 WREG32(CP_PERFMON_CNTL, 0);
3660
3661 WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3662 FETCH_FIFO_HIWATER(0x4) |
3663 DONE_FIFO_HIWATER(0xe0) |
3664 ALU_UPDATE_FIFO_HIWATER(0x8)));
3665
3666 sq_config = RREG32(SQ_CONFIG);
3667 sq_config &= ~(PS_PRIO(3) |
3668 VS_PRIO(3) |
3669 GS_PRIO(3) |
3670 ES_PRIO(3));
3671 sq_config |= (VC_ENABLE |
3672 EXPORT_SRC_C |
3673 PS_PRIO(0) |
3674 VS_PRIO(1) |
3675 GS_PRIO(2) |
3676 ES_PRIO(3));
3677
3678 switch (rdev->family) {
3679 case CHIP_CEDAR:
3680 case CHIP_PALM:
3681 case CHIP_SUMO:
3682 case CHIP_SUMO2:
3683 case CHIP_CAICOS:
3684 /* no vertex cache */
3685 sq_config &= ~VC_ENABLE;
3686 break;
3687 default:
3688 break;
3689 }
3690
3691 sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3692
3693 sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3694 sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3695 sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3696 sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3697 sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3698 sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3699 sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3700
3701 switch (rdev->family) {
3702 case CHIP_CEDAR:
3703 case CHIP_PALM:
3704 case CHIP_SUMO:
3705 case CHIP_SUMO2:
3706 ps_thread_count = 96;
3707 break;
3708 default:
3709 ps_thread_count = 128;
3710 break;
3711 }
3712
3713 sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3714 sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3715 sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3716 sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3717 sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3718 sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3719
3720 sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3721 sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3722 sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3723 sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3724 sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3725 sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3726
3727 WREG32(SQ_CONFIG, sq_config);
3728 WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3729 WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3730 WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3731 WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3732 WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3733 WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3734 WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3735 WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3736 WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3737 WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3738
3739 WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3740 FORCE_EOV_MAX_REZ_CNT(255)));
3741
3742 switch (rdev->family) {
3743 case CHIP_CEDAR:
3744 case CHIP_PALM:
3745 case CHIP_SUMO:
3746 case CHIP_SUMO2:
3747 case CHIP_CAICOS:
3748 vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3749 break;
3750 default:
3751 vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3752 break;
3753 }
3754 vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3755 WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3756
3757 WREG32(VGT_GS_VERTEX_REUSE, 16);
3758 WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3759 WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3760
3761 WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3762 WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3763
3764 WREG32(CB_PERF_CTR0_SEL_0, 0);
3765 WREG32(CB_PERF_CTR0_SEL_1, 0);
3766 WREG32(CB_PERF_CTR1_SEL_0, 0);
3767 WREG32(CB_PERF_CTR1_SEL_1, 0);
3768 WREG32(CB_PERF_CTR2_SEL_0, 0);
3769 WREG32(CB_PERF_CTR2_SEL_1, 0);
3770 WREG32(CB_PERF_CTR3_SEL_0, 0);
3771 WREG32(CB_PERF_CTR3_SEL_1, 0);
3772
3773 /* clear render buffer base addresses */
3774 WREG32(CB_COLOR0_BASE, 0);
3775 WREG32(CB_COLOR1_BASE, 0);
3776 WREG32(CB_COLOR2_BASE, 0);
3777 WREG32(CB_COLOR3_BASE, 0);
3778 WREG32(CB_COLOR4_BASE, 0);
3779 WREG32(CB_COLOR5_BASE, 0);
3780 WREG32(CB_COLOR6_BASE, 0);
3781 WREG32(CB_COLOR7_BASE, 0);
3782 WREG32(CB_COLOR8_BASE, 0);
3783 WREG32(CB_COLOR9_BASE, 0);
3784 WREG32(CB_COLOR10_BASE, 0);
3785 WREG32(CB_COLOR11_BASE, 0);
3786
3787 /* set the shader const cache sizes to 0 */
3788 for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3789 WREG32(i, 0);
3790 for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3791 WREG32(i, 0);
3792
3793 tmp = RREG32(HDP_MISC_CNTL);
3794 tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3795 WREG32(HDP_MISC_CNTL, tmp);
3796
3797 hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3798 WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3799
3800 WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3801
3802 udelay(50);
3803
3804 }
3805
3806 int evergreen_mc_init(struct radeon_device *rdev)
3807 {
3808 u32 tmp;
3809 int chansize, numchan;
3810
3811 /* Get VRAM informations */
3812 rdev->mc.vram_is_ddr = true;
3813 if ((rdev->family == CHIP_PALM) ||
3814 (rdev->family == CHIP_SUMO) ||
3815 (rdev->family == CHIP_SUMO2))
3816 tmp = RREG32(FUS_MC_ARB_RAMCFG);
3817 else
3818 tmp = RREG32(MC_ARB_RAMCFG);
3819 if (tmp & CHANSIZE_OVERRIDE) {
3820 chansize = 16;
3821 } else if (tmp & CHANSIZE_MASK) {
3822 chansize = 64;
3823 } else {
3824 chansize = 32;
3825 }
3826 tmp = RREG32(MC_SHARED_CHMAP);
3827 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3828 case 0:
3829 default:
3830 numchan = 1;
3831 break;
3832 case 1:
3833 numchan = 2;
3834 break;
3835 case 2:
3836 numchan = 4;
3837 break;
3838 case 3:
3839 numchan = 8;
3840 break;
3841 }
3842 rdev->mc.vram_width = numchan * chansize;
3843 /* Could aper size report 0 ? */
3844 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3845 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
3846 /* Setup GPU memory space */
3847 if ((rdev->family == CHIP_PALM) ||
3848 (rdev->family == CHIP_SUMO) ||
3849 (rdev->family == CHIP_SUMO2)) {
3850 /* size in bytes on fusion */
3851 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3852 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3853 } else {
3854 /* size in MB on evergreen/cayman/tn */
3855 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3856 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3857 }
3858 rdev->mc.visible_vram_size = rdev->mc.aper_size;
3859 r700_vram_gtt_location(rdev, &rdev->mc);
3860 radeon_update_bandwidth_info(rdev);
3861
3862 return 0;
3863 }
3864
3865 void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
3866 {
3867 dev_info(rdev->dev, " GRBM_STATUS = 0x%08X\n",
3868 RREG32(GRBM_STATUS));
3869 dev_info(rdev->dev, " GRBM_STATUS_SE0 = 0x%08X\n",
3870 RREG32(GRBM_STATUS_SE0));
3871 dev_info(rdev->dev, " GRBM_STATUS_SE1 = 0x%08X\n",
3872 RREG32(GRBM_STATUS_SE1));
3873 dev_info(rdev->dev, " SRBM_STATUS = 0x%08X\n",
3874 RREG32(SRBM_STATUS));
3875 dev_info(rdev->dev, " SRBM_STATUS2 = 0x%08X\n",
3876 RREG32(SRBM_STATUS2));
3877 dev_info(rdev->dev, " R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3878 RREG32(CP_STALLED_STAT1));
3879 dev_info(rdev->dev, " R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3880 RREG32(CP_STALLED_STAT2));
3881 dev_info(rdev->dev, " R_00867C_CP_BUSY_STAT = 0x%08X\n",
3882 RREG32(CP_BUSY_STAT));
3883 dev_info(rdev->dev, " R_008680_CP_STAT = 0x%08X\n",
3884 RREG32(CP_STAT));
3885 dev_info(rdev->dev, " R_00D034_DMA_STATUS_REG = 0x%08X\n",
3886 RREG32(DMA_STATUS_REG));
3887 if (rdev->family >= CHIP_CAYMAN) {
3888 dev_info(rdev->dev, " R_00D834_DMA_STATUS_REG = 0x%08X\n",
3889 RREG32(DMA_STATUS_REG + 0x800));
3890 }
3891 }
3892
3893 bool evergreen_is_display_hung(struct radeon_device *rdev)
3894 {
3895 u32 crtc_hung = 0;
3896 u32 crtc_status[6];
3897 u32 i, j, tmp;
3898
3899 for (i = 0; i < rdev->num_crtc; i++) {
3900 if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3901 crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3902 crtc_hung |= (1 << i);
3903 }
3904 }
3905
3906 for (j = 0; j < 10; j++) {
3907 for (i = 0; i < rdev->num_crtc; i++) {
3908 if (crtc_hung & (1 << i)) {
3909 tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3910 if (tmp != crtc_status[i])
3911 crtc_hung &= ~(1 << i);
3912 }
3913 }
3914 if (crtc_hung == 0)
3915 return false;
3916 udelay(100);
3917 }
3918
3919 return true;
3920 }
3921
3922 u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3923 {
3924 u32 reset_mask = 0;
3925 u32 tmp;
3926
3927 /* GRBM_STATUS */
3928 tmp = RREG32(GRBM_STATUS);
3929 if (tmp & (PA_BUSY | SC_BUSY |
3930 SH_BUSY | SX_BUSY |
3931 TA_BUSY | VGT_BUSY |
3932 DB_BUSY | CB_BUSY |
3933 SPI_BUSY | VGT_BUSY_NO_DMA))
3934 reset_mask |= RADEON_RESET_GFX;
3935
3936 if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3937 CP_BUSY | CP_COHERENCY_BUSY))
3938 reset_mask |= RADEON_RESET_CP;
3939
3940 if (tmp & GRBM_EE_BUSY)
3941 reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3942
3943 /* DMA_STATUS_REG */
3944 tmp = RREG32(DMA_STATUS_REG);
3945 if (!(tmp & DMA_IDLE))
3946 reset_mask |= RADEON_RESET_DMA;
3947
3948 /* SRBM_STATUS2 */
3949 tmp = RREG32(SRBM_STATUS2);
3950 if (tmp & DMA_BUSY)
3951 reset_mask |= RADEON_RESET_DMA;
3952
3953 /* SRBM_STATUS */
3954 tmp = RREG32(SRBM_STATUS);
3955 if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3956 reset_mask |= RADEON_RESET_RLC;
3957
3958 if (tmp & IH_BUSY)
3959 reset_mask |= RADEON_RESET_IH;
3960
3961 if (tmp & SEM_BUSY)
3962 reset_mask |= RADEON_RESET_SEM;
3963
3964 if (tmp & GRBM_RQ_PENDING)
3965 reset_mask |= RADEON_RESET_GRBM;
3966
3967 if (tmp & VMC_BUSY)
3968 reset_mask |= RADEON_RESET_VMC;
3969
3970 if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3971 MCC_BUSY | MCD_BUSY))
3972 reset_mask |= RADEON_RESET_MC;
3973
3974 if (evergreen_is_display_hung(rdev))
3975 reset_mask |= RADEON_RESET_DISPLAY;
3976
3977 /* VM_L2_STATUS */
3978 tmp = RREG32(VM_L2_STATUS);
3979 if (tmp & L2_BUSY)
3980 reset_mask |= RADEON_RESET_VMC;
3981
3982 /* Skip MC reset as it's mostly likely not hung, just busy */
3983 if (reset_mask & RADEON_RESET_MC) {
3984 DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3985 reset_mask &= ~RADEON_RESET_MC;
3986 }
3987
3988 return reset_mask;
3989 }
3990
3991 static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3992 {
3993 struct evergreen_mc_save save;
3994 u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3995 u32 tmp;
3996
3997 if (reset_mask == 0)
3998 return;
3999
4000 dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
4001
4002 evergreen_print_gpu_status_regs(rdev);
4003
4004 /* Disable CP parsing/prefetching */
4005 WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
4006
4007 if (reset_mask & RADEON_RESET_DMA) {
4008 /* Disable DMA */
4009 tmp = RREG32(DMA_RB_CNTL);
4010 tmp &= ~DMA_RB_ENABLE;
4011 WREG32(DMA_RB_CNTL, tmp);
4012 }
4013
4014 udelay(50);
4015
4016 evergreen_mc_stop(rdev, &save);
4017 if (evergreen_mc_wait_for_idle(rdev)) {
4018 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
4019 }
4020
4021 if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
4022 grbm_soft_reset |= SOFT_RESET_DB |
4023 SOFT_RESET_CB |
4024 SOFT_RESET_PA |
4025 SOFT_RESET_SC |
4026 SOFT_RESET_SPI |
4027 SOFT_RESET_SX |
4028 SOFT_RESET_SH |
4029 SOFT_RESET_TC |
4030 SOFT_RESET_TA |
4031 SOFT_RESET_VC |
4032 SOFT_RESET_VGT;
4033 }
4034
4035 if (reset_mask & RADEON_RESET_CP) {
4036 grbm_soft_reset |= SOFT_RESET_CP |
4037 SOFT_RESET_VGT;
4038
4039 srbm_soft_reset |= SOFT_RESET_GRBM;
4040 }
4041
4042 if (reset_mask & RADEON_RESET_DMA)
4043 srbm_soft_reset |= SOFT_RESET_DMA;
4044
4045 if (reset_mask & RADEON_RESET_DISPLAY)
4046 srbm_soft_reset |= SOFT_RESET_DC;
4047
4048 if (reset_mask & RADEON_RESET_RLC)
4049 srbm_soft_reset |= SOFT_RESET_RLC;
4050
4051 if (reset_mask & RADEON_RESET_SEM)
4052 srbm_soft_reset |= SOFT_RESET_SEM;
4053
4054 if (reset_mask & RADEON_RESET_IH)
4055 srbm_soft_reset |= SOFT_RESET_IH;
4056
4057 if (reset_mask & RADEON_RESET_GRBM)
4058 srbm_soft_reset |= SOFT_RESET_GRBM;
4059
4060 if (reset_mask & RADEON_RESET_VMC)
4061 srbm_soft_reset |= SOFT_RESET_VMC;
4062
4063 if (!(rdev->flags & RADEON_IS_IGP)) {
4064 if (reset_mask & RADEON_RESET_MC)
4065 srbm_soft_reset |= SOFT_RESET_MC;
4066 }
4067
4068 if (grbm_soft_reset) {
4069 tmp = RREG32(GRBM_SOFT_RESET);
4070 tmp |= grbm_soft_reset;
4071 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
4072 WREG32(GRBM_SOFT_RESET, tmp);
4073 tmp = RREG32(GRBM_SOFT_RESET);
4074
4075 udelay(50);
4076
4077 tmp &= ~grbm_soft_reset;
4078 WREG32(GRBM_SOFT_RESET, tmp);
4079 tmp = RREG32(GRBM_SOFT_RESET);
4080 }
4081
4082 if (srbm_soft_reset) {
4083 tmp = RREG32(SRBM_SOFT_RESET);
4084 tmp |= srbm_soft_reset;
4085 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
4086 WREG32(SRBM_SOFT_RESET, tmp);
4087 tmp = RREG32(SRBM_SOFT_RESET);
4088
4089 udelay(50);
4090
4091 tmp &= ~srbm_soft_reset;
4092 WREG32(SRBM_SOFT_RESET, tmp);
4093 tmp = RREG32(SRBM_SOFT_RESET);
4094 }
4095
4096 /* Wait a little for things to settle down */
4097 udelay(50);
4098
4099 evergreen_mc_resume(rdev, &save);
4100 udelay(50);
4101
4102 evergreen_print_gpu_status_regs(rdev);
4103 }
4104
4105 void evergreen_gpu_pci_config_reset(struct radeon_device *rdev)
4106 {
4107 struct evergreen_mc_save save;
4108 u32 tmp, i;
4109
4110 dev_info(rdev->dev, "GPU pci config reset\n");
4111
4112 /* disable dpm? */
4113
4114 /* Disable CP parsing/prefetching */
4115 WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
4116 udelay(50);
4117 /* Disable DMA */
4118 tmp = RREG32(DMA_RB_CNTL);
4119 tmp &= ~DMA_RB_ENABLE;
4120 WREG32(DMA_RB_CNTL, tmp);
4121 /* XXX other engines? */
4122
4123 /* halt the rlc */
4124 r600_rlc_stop(rdev);
4125
4126 udelay(50);
4127
4128 /* set mclk/sclk to bypass */
4129 rv770_set_clk_bypass_mode(rdev);
4130 /* disable BM */
4131 pci_clear_master(rdev->pdev);
4132 /* disable mem access */
4133 evergreen_mc_stop(rdev, &save);
4134 if (evergreen_mc_wait_for_idle(rdev)) {
4135 dev_warn(rdev->dev, "Wait for MC idle timed out !\n");
4136 }
4137 /* reset */
4138 radeon_pci_config_reset(rdev);
4139 /* wait for asic to come out of reset */
4140 for (i = 0; i < rdev->usec_timeout; i++) {
4141 if (RREG32(CONFIG_MEMSIZE) != 0xffffffff)
4142 break;
4143 udelay(1);
4144 }
4145 }
4146
4147 int evergreen_asic_reset(struct radeon_device *rdev)
4148 {
4149 u32 reset_mask;
4150
4151 reset_mask = evergreen_gpu_check_soft_reset(rdev);
4152
4153 if (reset_mask)
4154 r600_set_bios_scratch_engine_hung(rdev, true);
4155
4156 /* try soft reset */
4157 evergreen_gpu_soft_reset(rdev, reset_mask);
4158
4159 reset_mask = evergreen_gpu_check_soft_reset(rdev);
4160
4161 /* try pci config reset */
4162 if (reset_mask && radeon_hard_reset)
4163 evergreen_gpu_pci_config_reset(rdev);
4164
4165 reset_mask = evergreen_gpu_check_soft_reset(rdev);
4166
4167 if (!reset_mask)
4168 r600_set_bios_scratch_engine_hung(rdev, false);
4169
4170 return 0;
4171 }
4172
4173 /**
4174 * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
4175 *
4176 * @rdev: radeon_device pointer
4177 * @ring: radeon_ring structure holding ring information
4178 *
4179 * Check if the GFX engine is locked up.
4180 * Returns true if the engine appears to be locked up, false if not.
4181 */
4182 bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
4183 {
4184 u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
4185
4186 if (!(reset_mask & (RADEON_RESET_GFX |
4187 RADEON_RESET_COMPUTE |
4188 RADEON_RESET_CP))) {
4189 radeon_ring_lockup_update(rdev, ring);
4190 return false;
4191 }
4192 return radeon_ring_test_lockup(rdev, ring);
4193 }
4194
4195 /*
4196 * RLC
4197 */
4198 #define RLC_SAVE_RESTORE_LIST_END_MARKER 0x00000000
4199 #define RLC_CLEAR_STATE_END_MARKER 0x00000001
4200
4201 void sumo_rlc_fini(struct radeon_device *rdev)
4202 {
4203 int r;
4204
4205 /* save restore block */
4206 if (rdev->rlc.save_restore_obj) {
4207 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4208 if (unlikely(r != 0))
4209 dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r);
4210 radeon_bo_unpin(rdev->rlc.save_restore_obj);
4211 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4212
4213 radeon_bo_unref(&rdev->rlc.save_restore_obj);
4214 rdev->rlc.save_restore_obj = NULL;
4215 }
4216
4217 /* clear state block */
4218 if (rdev->rlc.clear_state_obj) {
4219 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4220 if (unlikely(r != 0))
4221 dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r);
4222 radeon_bo_unpin(rdev->rlc.clear_state_obj);
4223 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4224
4225 radeon_bo_unref(&rdev->rlc.clear_state_obj);
4226 rdev->rlc.clear_state_obj = NULL;
4227 }
4228
4229 /* clear state block */
4230 if (rdev->rlc.cp_table_obj) {
4231 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4232 if (unlikely(r != 0))
4233 dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4234 radeon_bo_unpin(rdev->rlc.cp_table_obj);
4235 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4236
4237 radeon_bo_unref(&rdev->rlc.cp_table_obj);
4238 rdev->rlc.cp_table_obj = NULL;
4239 }
4240 }
4241
4242 #define CP_ME_TABLE_SIZE 96
4243
4244 int sumo_rlc_init(struct radeon_device *rdev)
4245 {
4246 const u32 *src_ptr;
4247 volatile u32 *dst_ptr;
4248 u32 dws, data, i, j, k, reg_num;
4249 u32 reg_list_num, reg_list_hdr_blk_index, reg_list_blk_index = 0;
4250 u64 reg_list_mc_addr;
4251 const struct cs_section_def *cs_data;
4252 int r;
4253
4254 src_ptr = rdev->rlc.reg_list;
4255 dws = rdev->rlc.reg_list_size;
4256 if (rdev->family >= CHIP_BONAIRE) {
4257 dws += (5 * 16) + 48 + 48 + 64;
4258 }
4259 cs_data = rdev->rlc.cs_data;
4260
4261 if (src_ptr) {
4262 /* save restore block */
4263 if (rdev->rlc.save_restore_obj == NULL) {
4264 r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4265 RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4266 NULL, &rdev->rlc.save_restore_obj);
4267 if (r) {
4268 dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r);
4269 return r;
4270 }
4271 }
4272
4273 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4274 if (unlikely(r != 0)) {
4275 sumo_rlc_fini(rdev);
4276 return r;
4277 }
4278 r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM,
4279 &rdev->rlc.save_restore_gpu_addr);
4280 if (r) {
4281 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4282 dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r);
4283 sumo_rlc_fini(rdev);
4284 return r;
4285 }
4286
4287 r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void **)__UNVOLATILE(&rdev->rlc.sr_ptr));
4288 if (r) {
4289 dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r);
4290 sumo_rlc_fini(rdev);
4291 return r;
4292 }
4293 /* write the sr buffer */
4294 dst_ptr = rdev->rlc.sr_ptr;
4295 if (rdev->family >= CHIP_TAHITI) {
4296 /* SI */
4297 for (i = 0; i < rdev->rlc.reg_list_size; i++)
4298 dst_ptr[i] = cpu_to_le32(src_ptr[i]);
4299 } else {
4300 /* ON/LN/TN */
4301 /* format:
4302 * dw0: (reg2 << 16) | reg1
4303 * dw1: reg1 save space
4304 * dw2: reg2 save space
4305 */
4306 for (i = 0; i < dws; i++) {
4307 data = src_ptr[i] >> 2;
4308 i++;
4309 if (i < dws)
4310 data |= (src_ptr[i] >> 2) << 16;
4311 j = (((i - 1) * 3) / 2);
4312 dst_ptr[j] = cpu_to_le32(data);
4313 }
4314 j = ((i * 3) / 2);
4315 dst_ptr[j] = cpu_to_le32(RLC_SAVE_RESTORE_LIST_END_MARKER);
4316 }
4317 radeon_bo_kunmap(rdev->rlc.save_restore_obj);
4318 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4319 }
4320
4321 if (cs_data) {
4322 /* clear state block */
4323 if (rdev->family >= CHIP_BONAIRE) {
4324 rdev->rlc.clear_state_size = dws = cik_get_csb_size(rdev);
4325 } else if (rdev->family >= CHIP_TAHITI) {
4326 rdev->rlc.clear_state_size = si_get_csb_size(rdev);
4327 dws = rdev->rlc.clear_state_size + (256 / 4);
4328 } else {
4329 reg_list_num = 0;
4330 dws = 0;
4331 for (i = 0; cs_data[i].section != NULL; i++) {
4332 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4333 reg_list_num++;
4334 dws += cs_data[i].section[j].reg_count;
4335 }
4336 }
4337 reg_list_blk_index = (3 * reg_list_num + 2);
4338 dws += reg_list_blk_index;
4339 rdev->rlc.clear_state_size = dws;
4340 }
4341
4342 if (rdev->rlc.clear_state_obj == NULL) {
4343 r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4344 RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4345 NULL, &rdev->rlc.clear_state_obj);
4346 if (r) {
4347 dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r);
4348 sumo_rlc_fini(rdev);
4349 return r;
4350 }
4351 }
4352 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4353 if (unlikely(r != 0)) {
4354 sumo_rlc_fini(rdev);
4355 return r;
4356 }
4357 r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM,
4358 &rdev->rlc.clear_state_gpu_addr);
4359 if (r) {
4360 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4361 dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r);
4362 sumo_rlc_fini(rdev);
4363 return r;
4364 }
4365
4366 r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void **)__UNVOLATILE(&rdev->rlc.cs_ptr));
4367 if (r) {
4368 dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r);
4369 sumo_rlc_fini(rdev);
4370 return r;
4371 }
4372 /* set up the cs buffer */
4373 dst_ptr = rdev->rlc.cs_ptr;
4374 if (rdev->family >= CHIP_BONAIRE) {
4375 cik_get_csb_buffer(rdev, dst_ptr);
4376 } else if (rdev->family >= CHIP_TAHITI) {
4377 reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256;
4378 dst_ptr[0] = cpu_to_le32(upper_32_bits(reg_list_mc_addr));
4379 dst_ptr[1] = cpu_to_le32(lower_32_bits(reg_list_mc_addr));
4380 dst_ptr[2] = cpu_to_le32(rdev->rlc.clear_state_size);
4381 si_get_csb_buffer(rdev, &dst_ptr[(256/4)]);
4382 } else {
4383 reg_list_hdr_blk_index = 0;
4384 reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
4385 data = upper_32_bits(reg_list_mc_addr);
4386 dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4387 reg_list_hdr_blk_index++;
4388 for (i = 0; cs_data[i].section != NULL; i++) {
4389 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4390 reg_num = cs_data[i].section[j].reg_count;
4391 data = reg_list_mc_addr & 0xffffffff;
4392 dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4393 reg_list_hdr_blk_index++;
4394
4395 data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
4396 dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4397 reg_list_hdr_blk_index++;
4398
4399 data = 0x08000000 | (reg_num * 4);
4400 dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4401 reg_list_hdr_blk_index++;
4402
4403 for (k = 0; k < reg_num; k++) {
4404 data = cs_data[i].section[j].extent[k];
4405 dst_ptr[reg_list_blk_index + k] = cpu_to_le32(data);
4406 }
4407 reg_list_mc_addr += reg_num * 4;
4408 reg_list_blk_index += reg_num;
4409 }
4410 }
4411 dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(RLC_CLEAR_STATE_END_MARKER);
4412 }
4413 radeon_bo_kunmap(rdev->rlc.clear_state_obj);
4414 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4415 }
4416
4417 if (rdev->rlc.cp_table_size) {
4418 if (rdev->rlc.cp_table_obj == NULL) {
4419 r = radeon_bo_create(rdev, rdev->rlc.cp_table_size,
4420 PAGE_SIZE, true,
4421 RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4422 NULL, &rdev->rlc.cp_table_obj);
4423 if (r) {
4424 dev_warn(rdev->dev, "(%d) create RLC cp table bo failed\n", r);
4425 sumo_rlc_fini(rdev);
4426 return r;
4427 }
4428 }
4429
4430 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4431 if (unlikely(r != 0)) {
4432 dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4433 sumo_rlc_fini(rdev);
4434 return r;
4435 }
4436 r = radeon_bo_pin(rdev->rlc.cp_table_obj, RADEON_GEM_DOMAIN_VRAM,
4437 &rdev->rlc.cp_table_gpu_addr);
4438 if (r) {
4439 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4440 dev_warn(rdev->dev, "(%d) pin RLC cp_table bo failed\n", r);
4441 sumo_rlc_fini(rdev);
4442 return r;
4443 }
4444 r = radeon_bo_kmap(rdev->rlc.cp_table_obj, (void **)__UNVOLATILE(&rdev->rlc.cp_table_ptr));
4445 if (r) {
4446 dev_warn(rdev->dev, "(%d) map RLC cp table bo failed\n", r);
4447 sumo_rlc_fini(rdev);
4448 return r;
4449 }
4450
4451 cik_init_cp_pg_table(rdev);
4452
4453 radeon_bo_kunmap(rdev->rlc.cp_table_obj);
4454 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4455
4456 }
4457
4458 return 0;
4459 }
4460
4461 static void evergreen_rlc_start(struct radeon_device *rdev)
4462 {
4463 u32 mask = RLC_ENABLE;
4464
4465 if (rdev->flags & RADEON_IS_IGP) {
4466 mask |= GFX_POWER_GATING_ENABLE | GFX_POWER_GATING_SRC;
4467 }
4468
4469 WREG32(RLC_CNTL, mask);
4470 }
4471
4472 int evergreen_rlc_resume(struct radeon_device *rdev)
4473 {
4474 u32 i;
4475 const __be32 *fw_data;
4476
4477 if (!rdev->rlc_fw)
4478 return -EINVAL;
4479
4480 r600_rlc_stop(rdev);
4481
4482 WREG32(RLC_HB_CNTL, 0);
4483
4484 if (rdev->flags & RADEON_IS_IGP) {
4485 if (rdev->family == CHIP_ARUBA) {
4486 u32 always_on_bitmap =
4487 3 | (3 << (16 * rdev->config.cayman.max_shader_engines));
4488 /* find out the number of active simds */
4489 u32 tmp = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
4490 tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
4491 tmp = hweight32(~tmp);
4492 if (tmp == rdev->config.cayman.max_simds_per_se) {
4493 WREG32(TN_RLC_LB_ALWAYS_ACTIVE_SIMD_MASK, always_on_bitmap);
4494 WREG32(TN_RLC_LB_PARAMS, 0x00601004);
4495 WREG32(TN_RLC_LB_INIT_SIMD_MASK, 0xffffffff);
4496 WREG32(TN_RLC_LB_CNTR_INIT, 0x00000000);
4497 WREG32(TN_RLC_LB_CNTR_MAX, 0x00002000);
4498 }
4499 } else {
4500 WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4501 WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4502 }
4503 WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4504 WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
4505 } else {
4506 WREG32(RLC_HB_BASE, 0);
4507 WREG32(RLC_HB_RPTR, 0);
4508 WREG32(RLC_HB_WPTR, 0);
4509 WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4510 WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4511 }
4512 WREG32(RLC_MC_CNTL, 0);
4513 WREG32(RLC_UCODE_CNTL, 0);
4514
4515 fw_data = (const __be32 *)rdev->rlc_fw->data;
4516 if (rdev->family >= CHIP_ARUBA) {
4517 for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
4518 WREG32(RLC_UCODE_ADDR, i);
4519 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4520 }
4521 } else if (rdev->family >= CHIP_CAYMAN) {
4522 for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
4523 WREG32(RLC_UCODE_ADDR, i);
4524 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4525 }
4526 } else {
4527 for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
4528 WREG32(RLC_UCODE_ADDR, i);
4529 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4530 }
4531 }
4532 WREG32(RLC_UCODE_ADDR, 0);
4533
4534 evergreen_rlc_start(rdev);
4535
4536 return 0;
4537 }
4538
4539 /* Interrupts */
4540
4541 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
4542 {
4543 if (crtc >= rdev->num_crtc)
4544 return 0;
4545 else
4546 return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
4547 }
4548
4549 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
4550 {
4551 u32 tmp;
4552
4553 if (rdev->family >= CHIP_CAYMAN) {
4554 cayman_cp_int_cntl_setup(rdev, 0,
4555 CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4556 cayman_cp_int_cntl_setup(rdev, 1, 0);
4557 cayman_cp_int_cntl_setup(rdev, 2, 0);
4558 tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4559 WREG32(CAYMAN_DMA1_CNTL, tmp);
4560 } else
4561 WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4562 tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4563 WREG32(DMA_CNTL, tmp);
4564 WREG32(GRBM_INT_CNTL, 0);
4565 WREG32(SRBM_INT_CNTL, 0);
4566 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4567 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4568 if (rdev->num_crtc >= 4) {
4569 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4570 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4571 }
4572 if (rdev->num_crtc >= 6) {
4573 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4574 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4575 }
4576
4577 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4578 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4579 if (rdev->num_crtc >= 4) {
4580 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4581 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4582 }
4583 if (rdev->num_crtc >= 6) {
4584 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4585 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4586 }
4587
4588 /* only one DAC on DCE5 */
4589 if (!ASIC_IS_DCE5(rdev))
4590 WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
4591 WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
4592
4593 tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4594 WREG32(DC_HPD1_INT_CONTROL, tmp);
4595 tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4596 WREG32(DC_HPD2_INT_CONTROL, tmp);
4597 tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4598 WREG32(DC_HPD3_INT_CONTROL, tmp);
4599 tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4600 WREG32(DC_HPD4_INT_CONTROL, tmp);
4601 tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4602 WREG32(DC_HPD5_INT_CONTROL, tmp);
4603 tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4604 WREG32(DC_HPD6_INT_CONTROL, tmp);
4605
4606 }
4607
4608 int evergreen_irq_set(struct radeon_device *rdev)
4609 {
4610 u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
4611 u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
4612 u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
4613 u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
4614 u32 grbm_int_cntl = 0;
4615 u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
4616 u32 dma_cntl, dma_cntl1 = 0;
4617 u32 thermal_int = 0;
4618
4619 if (!rdev->irq.installed) {
4620 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
4621 return -EINVAL;
4622 }
4623 /* don't enable anything if the ih is disabled */
4624 if (!rdev->ih.enabled) {
4625 r600_disable_interrupts(rdev);
4626 /* force the active interrupt state to all disabled */
4627 evergreen_disable_interrupt_state(rdev);
4628 return 0;
4629 }
4630
4631 hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4632 hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4633 hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4634 hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4635 hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4636 hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4637 if (rdev->family == CHIP_ARUBA)
4638 thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) &
4639 ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4640 else
4641 thermal_int = RREG32(CG_THERMAL_INT) &
4642 ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4643
4644 afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4645 afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4646 afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4647 afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4648 afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4649 afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4650
4651 dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4652
4653 if (rdev->family >= CHIP_CAYMAN) {
4654 /* enable CP interrupts on all rings */
4655 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4656 DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4657 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4658 }
4659 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
4660 DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
4661 cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
4662 }
4663 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
4664 DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
4665 cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
4666 }
4667 } else {
4668 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4669 DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4670 cp_int_cntl |= RB_INT_ENABLE;
4671 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4672 }
4673 }
4674
4675 if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
4676 DRM_DEBUG("r600_irq_set: sw int dma\n");
4677 dma_cntl |= TRAP_ENABLE;
4678 }
4679
4680 if (rdev->family >= CHIP_CAYMAN) {
4681 dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4682 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
4683 DRM_DEBUG("r600_irq_set: sw int dma1\n");
4684 dma_cntl1 |= TRAP_ENABLE;
4685 }
4686 }
4687
4688 if (rdev->irq.dpm_thermal) {
4689 DRM_DEBUG("dpm thermal\n");
4690 thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
4691 }
4692
4693 if (rdev->irq.crtc_vblank_int[0] ||
4694 atomic_read(&rdev->irq.pflip[0])) {
4695 DRM_DEBUG("evergreen_irq_set: vblank 0\n");
4696 crtc1 |= VBLANK_INT_MASK;
4697 }
4698 if (rdev->irq.crtc_vblank_int[1] ||
4699 atomic_read(&rdev->irq.pflip[1])) {
4700 DRM_DEBUG("evergreen_irq_set: vblank 1\n");
4701 crtc2 |= VBLANK_INT_MASK;
4702 }
4703 if (rdev->irq.crtc_vblank_int[2] ||
4704 atomic_read(&rdev->irq.pflip[2])) {
4705 DRM_DEBUG("evergreen_irq_set: vblank 2\n");
4706 crtc3 |= VBLANK_INT_MASK;
4707 }
4708 if (rdev->irq.crtc_vblank_int[3] ||
4709 atomic_read(&rdev->irq.pflip[3])) {
4710 DRM_DEBUG("evergreen_irq_set: vblank 3\n");
4711 crtc4 |= VBLANK_INT_MASK;
4712 }
4713 if (rdev->irq.crtc_vblank_int[4] ||
4714 atomic_read(&rdev->irq.pflip[4])) {
4715 DRM_DEBUG("evergreen_irq_set: vblank 4\n");
4716 crtc5 |= VBLANK_INT_MASK;
4717 }
4718 if (rdev->irq.crtc_vblank_int[5] ||
4719 atomic_read(&rdev->irq.pflip[5])) {
4720 DRM_DEBUG("evergreen_irq_set: vblank 5\n");
4721 crtc6 |= VBLANK_INT_MASK;
4722 }
4723 if (rdev->irq.hpd[0]) {
4724 DRM_DEBUG("evergreen_irq_set: hpd 1\n");
4725 hpd1 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4726 }
4727 if (rdev->irq.hpd[1]) {
4728 DRM_DEBUG("evergreen_irq_set: hpd 2\n");
4729 hpd2 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4730 }
4731 if (rdev->irq.hpd[2]) {
4732 DRM_DEBUG("evergreen_irq_set: hpd 3\n");
4733 hpd3 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4734 }
4735 if (rdev->irq.hpd[3]) {
4736 DRM_DEBUG("evergreen_irq_set: hpd 4\n");
4737 hpd4 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4738 }
4739 if (rdev->irq.hpd[4]) {
4740 DRM_DEBUG("evergreen_irq_set: hpd 5\n");
4741 hpd5 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4742 }
4743 if (rdev->irq.hpd[5]) {
4744 DRM_DEBUG("evergreen_irq_set: hpd 6\n");
4745 hpd6 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4746 }
4747 if (rdev->irq.afmt[0]) {
4748 DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
4749 afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4750 }
4751 if (rdev->irq.afmt[1]) {
4752 DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
4753 afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4754 }
4755 if (rdev->irq.afmt[2]) {
4756 DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
4757 afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4758 }
4759 if (rdev->irq.afmt[3]) {
4760 DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
4761 afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4762 }
4763 if (rdev->irq.afmt[4]) {
4764 DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
4765 afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4766 }
4767 if (rdev->irq.afmt[5]) {
4768 DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
4769 afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4770 }
4771
4772 if (rdev->family >= CHIP_CAYMAN) {
4773 cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
4774 cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
4775 cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
4776 } else
4777 WREG32(CP_INT_CNTL, cp_int_cntl);
4778
4779 WREG32(DMA_CNTL, dma_cntl);
4780
4781 if (rdev->family >= CHIP_CAYMAN)
4782 WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
4783
4784 WREG32(GRBM_INT_CNTL, grbm_int_cntl);
4785
4786 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
4787 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
4788 if (rdev->num_crtc >= 4) {
4789 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
4790 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
4791 }
4792 if (rdev->num_crtc >= 6) {
4793 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
4794 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
4795 }
4796
4797 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET,
4798 GRPH_PFLIP_INT_MASK);
4799 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET,
4800 GRPH_PFLIP_INT_MASK);
4801 if (rdev->num_crtc >= 4) {
4802 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET,
4803 GRPH_PFLIP_INT_MASK);
4804 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET,
4805 GRPH_PFLIP_INT_MASK);
4806 }
4807 if (rdev->num_crtc >= 6) {
4808 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET,
4809 GRPH_PFLIP_INT_MASK);
4810 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET,
4811 GRPH_PFLIP_INT_MASK);
4812 }
4813
4814 WREG32(DC_HPD1_INT_CONTROL, hpd1);
4815 WREG32(DC_HPD2_INT_CONTROL, hpd2);
4816 WREG32(DC_HPD3_INT_CONTROL, hpd3);
4817 WREG32(DC_HPD4_INT_CONTROL, hpd4);
4818 WREG32(DC_HPD5_INT_CONTROL, hpd5);
4819 WREG32(DC_HPD6_INT_CONTROL, hpd6);
4820 if (rdev->family == CHIP_ARUBA)
4821 WREG32(TN_CG_THERMAL_INT_CTRL, thermal_int);
4822 else
4823 WREG32(CG_THERMAL_INT, thermal_int);
4824
4825 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
4826 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
4827 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
4828 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
4829 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
4830 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
4831
4832 /* posting read */
4833 RREG32(SRBM_STATUS);
4834
4835 return 0;
4836 }
4837
4838 static void evergreen_irq_ack(struct radeon_device *rdev)
4839 {
4840 u32 tmp;
4841
4842 rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4843 rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4844 rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4845 rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4846 rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4847 rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4848 rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4849 rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4850 if (rdev->num_crtc >= 4) {
4851 rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4852 rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4853 }
4854 if (rdev->num_crtc >= 6) {
4855 rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4856 rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4857 }
4858
4859 rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4860 rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4861 rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4862 rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4863 rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4864 rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4865
4866 if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4867 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4868 if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4869 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4870 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
4871 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
4872 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
4873 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
4874 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
4875 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
4876 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
4877 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4878
4879 if (rdev->num_crtc >= 4) {
4880 if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4881 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4882 if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4883 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4884 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4885 WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4886 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4887 WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4888 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4889 WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4890 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4891 WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4892 }
4893
4894 if (rdev->num_crtc >= 6) {
4895 if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4896 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4897 if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4898 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4899 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4900 WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4901 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4902 WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4903 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4904 WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4905 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4906 WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4907 }
4908
4909 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4910 tmp = RREG32(DC_HPD1_INT_CONTROL);
4911 tmp |= DC_HPDx_INT_ACK;
4912 WREG32(DC_HPD1_INT_CONTROL, tmp);
4913 }
4914 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4915 tmp = RREG32(DC_HPD2_INT_CONTROL);
4916 tmp |= DC_HPDx_INT_ACK;
4917 WREG32(DC_HPD2_INT_CONTROL, tmp);
4918 }
4919 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4920 tmp = RREG32(DC_HPD3_INT_CONTROL);
4921 tmp |= DC_HPDx_INT_ACK;
4922 WREG32(DC_HPD3_INT_CONTROL, tmp);
4923 }
4924 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4925 tmp = RREG32(DC_HPD4_INT_CONTROL);
4926 tmp |= DC_HPDx_INT_ACK;
4927 WREG32(DC_HPD4_INT_CONTROL, tmp);
4928 }
4929 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4930 tmp = RREG32(DC_HPD5_INT_CONTROL);
4931 tmp |= DC_HPDx_INT_ACK;
4932 WREG32(DC_HPD5_INT_CONTROL, tmp);
4933 }
4934 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4935 tmp = RREG32(DC_HPD6_INT_CONTROL);
4936 tmp |= DC_HPDx_INT_ACK;
4937 WREG32(DC_HPD6_INT_CONTROL, tmp);
4938 }
4939
4940 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT) {
4941 tmp = RREG32(DC_HPD1_INT_CONTROL);
4942 tmp |= DC_HPDx_RX_INT_ACK;
4943 WREG32(DC_HPD1_INT_CONTROL, tmp);
4944 }
4945 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT) {
4946 tmp = RREG32(DC_HPD2_INT_CONTROL);
4947 tmp |= DC_HPDx_RX_INT_ACK;
4948 WREG32(DC_HPD2_INT_CONTROL, tmp);
4949 }
4950 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT) {
4951 tmp = RREG32(DC_HPD3_INT_CONTROL);
4952 tmp |= DC_HPDx_RX_INT_ACK;
4953 WREG32(DC_HPD3_INT_CONTROL, tmp);
4954 }
4955 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT) {
4956 tmp = RREG32(DC_HPD4_INT_CONTROL);
4957 tmp |= DC_HPDx_RX_INT_ACK;
4958 WREG32(DC_HPD4_INT_CONTROL, tmp);
4959 }
4960 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT) {
4961 tmp = RREG32(DC_HPD5_INT_CONTROL);
4962 tmp |= DC_HPDx_RX_INT_ACK;
4963 WREG32(DC_HPD5_INT_CONTROL, tmp);
4964 }
4965 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT) {
4966 tmp = RREG32(DC_HPD6_INT_CONTROL);
4967 tmp |= DC_HPDx_RX_INT_ACK;
4968 WREG32(DC_HPD6_INT_CONTROL, tmp);
4969 }
4970
4971 if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4972 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4973 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4974 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4975 }
4976 if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4977 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4978 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4979 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4980 }
4981 if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4982 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4983 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4984 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4985 }
4986 if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4987 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4988 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4989 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4990 }
4991 if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4992 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4993 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4994 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4995 }
4996 if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4997 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4998 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4999 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
5000 }
5001 }
5002
5003 static void evergreen_irq_disable(struct radeon_device *rdev)
5004 {
5005 r600_disable_interrupts(rdev);
5006 /* Wait and acknowledge irq */
5007 mdelay(1);
5008 evergreen_irq_ack(rdev);
5009 evergreen_disable_interrupt_state(rdev);
5010 }
5011
5012 void evergreen_irq_suspend(struct radeon_device *rdev)
5013 {
5014 evergreen_irq_disable(rdev);
5015 r600_rlc_stop(rdev);
5016 }
5017
5018 static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
5019 {
5020 u32 wptr, tmp;
5021
5022 if (rdev->wb.enabled)
5023 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
5024 else
5025 wptr = RREG32(IH_RB_WPTR);
5026
5027 if (wptr & RB_OVERFLOW) {
5028 wptr &= ~RB_OVERFLOW;
5029 /* When a ring buffer overflow happen start parsing interrupt
5030 * from the last not overwritten vector (wptr + 16). Hopefully
5031 * this should allow us to catchup.
5032 */
5033 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n",
5034 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask);
5035 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
5036 tmp = RREG32(IH_RB_CNTL);
5037 tmp |= IH_WPTR_OVERFLOW_CLEAR;
5038 WREG32(IH_RB_CNTL, tmp);
5039 }
5040 return (wptr & rdev->ih.ptr_mask);
5041 }
5042
5043 int evergreen_irq_process(struct radeon_device *rdev)
5044 {
5045 u32 wptr;
5046 u32 rptr;
5047 u32 src_id, src_data;
5048 u32 ring_index;
5049 bool queue_hotplug = false;
5050 bool queue_hdmi = false;
5051 bool queue_dp = false;
5052 bool queue_thermal = false;
5053 u32 status, addr;
5054
5055 if (!rdev->ih.enabled || rdev->shutdown)
5056 return IRQ_NONE;
5057
5058 wptr = evergreen_get_ih_wptr(rdev);
5059
5060 restart_ih:
5061 /* is somebody else already processing irqs? */
5062 if (atomic_xchg(&rdev->ih.lock, 1))
5063 return IRQ_NONE;
5064
5065 rptr = rdev->ih.rptr;
5066 DRM_DEBUG("evergreen_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
5067
5068 /* Order reading of wptr vs. reading of IH ring data */
5069 rmb();
5070
5071 /* display interrupts */
5072 evergreen_irq_ack(rdev);
5073
5074 while (rptr != wptr) {
5075 /* wptr/rptr are in bytes! */
5076 ring_index = rptr / 4;
5077 src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
5078 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
5079
5080 switch (src_id) {
5081 case 1: /* D1 vblank/vline */
5082 switch (src_data) {
5083 case 0: /* D1 vblank */
5084 if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT))
5085 DRM_DEBUG("IH: D1 vblank - IH event w/o asserted irq bit?\n");
5086
5087 if (rdev->irq.crtc_vblank_int[0]) {
5088 drm_handle_vblank(rdev->ddev, 0);
5089 #ifdef __NetBSD__
5090 spin_lock(&rdev->irq.vblank_lock);
5091 rdev->pm.vblank_sync = true;
5092 DRM_SPIN_WAKEUP_ONE(&rdev->irq.vblank_queue, &rdev->irq.vblank_lock);
5093 spin_unlock(&rdev->irq.vblank_lock);
5094 #else
5095 rdev->pm.vblank_sync = true;
5096 wake_up(&rdev->irq.vblank_queue);
5097 #endif
5098 }
5099 if (atomic_read(&rdev->irq.pflip[0]))
5100 radeon_crtc_handle_vblank(rdev, 0);
5101 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
5102 DRM_DEBUG("IH: D1 vblank\n");
5103
5104 break;
5105 case 1: /* D1 vline */
5106 if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT))
5107 DRM_DEBUG("IH: D1 vline - IH event w/o asserted irq bit?\n");
5108
5109 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
5110 DRM_DEBUG("IH: D1 vline\n");
5111
5112 break;
5113 default:
5114 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5115 break;
5116 }
5117 break;
5118 case 2: /* D2 vblank/vline */
5119 switch (src_data) {
5120 case 0: /* D2 vblank */
5121 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT))
5122 DRM_DEBUG("IH: D2 vblank - IH event w/o asserted irq bit?\n");
5123
5124 if (rdev->irq.crtc_vblank_int[1]) {
5125 drm_handle_vblank(rdev->ddev, 1);
5126 #ifdef __NetBSD__
5127 spin_lock(&rdev->irq.vblank_lock);
5128 rdev->pm.vblank_sync = true;
5129 DRM_SPIN_WAKEUP_ONE(&rdev->irq.vblank_queue, &rdev->irq.vblank_lock);
5130 spin_unlock(&rdev->irq.vblank_lock);
5131 #else
5132 rdev->pm.vblank_sync = true;
5133 wake_up(&rdev->irq.vblank_queue);
5134 #endif
5135 }
5136 if (atomic_read(&rdev->irq.pflip[1]))
5137 radeon_crtc_handle_vblank(rdev, 1);
5138 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
5139 DRM_DEBUG("IH: D2 vblank\n");
5140
5141 break;
5142 case 1: /* D2 vline */
5143 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT))
5144 DRM_DEBUG("IH: D2 vline - IH event w/o asserted irq bit?\n");
5145
5146 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
5147 DRM_DEBUG("IH: D2 vline\n");
5148
5149 break;
5150 default:
5151 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5152 break;
5153 }
5154 break;
5155 case 3: /* D3 vblank/vline */
5156 switch (src_data) {
5157 case 0: /* D3 vblank */
5158 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT))
5159 DRM_DEBUG("IH: D3 vblank - IH event w/o asserted irq bit?\n");
5160
5161 if (rdev->irq.crtc_vblank_int[2]) {
5162 drm_handle_vblank(rdev->ddev, 2);
5163 #ifdef __NetBSD__
5164 spin_lock(&rdev->irq.vblank_lock);
5165 rdev->pm.vblank_sync = true;
5166 DRM_SPIN_WAKEUP_ONE(&rdev->irq.vblank_queue, &rdev->irq.vblank_lock);
5167 spin_unlock(&rdev->irq.vblank_lock);
5168 #else
5169 rdev->pm.vblank_sync = true;
5170 wake_up(&rdev->irq.vblank_queue);
5171 #endif
5172 }
5173 if (atomic_read(&rdev->irq.pflip[2]))
5174 radeon_crtc_handle_vblank(rdev, 2);
5175 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
5176 DRM_DEBUG("IH: D3 vblank\n");
5177
5178 break;
5179 case 1: /* D3 vline */
5180 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT))
5181 DRM_DEBUG("IH: D3 vline - IH event w/o asserted irq bit?\n");
5182
5183 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
5184 DRM_DEBUG("IH: D3 vline\n");
5185
5186 break;
5187 default:
5188 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5189 break;
5190 }
5191 break;
5192 case 4: /* D4 vblank/vline */
5193 switch (src_data) {
5194 case 0: /* D4 vblank */
5195 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT))
5196 DRM_DEBUG("IH: D4 vblank - IH event w/o asserted irq bit?\n");
5197
5198 if (rdev->irq.crtc_vblank_int[3]) {
5199 drm_handle_vblank(rdev->ddev, 3);
5200 #ifdef __NetBSD__
5201 spin_lock(&rdev->irq.vblank_lock);
5202 rdev->pm.vblank_sync = true;
5203 DRM_SPIN_WAKEUP_ONE(&rdev->irq.vblank_queue, &rdev->irq.vblank_lock);
5204 spin_unlock(&rdev->irq.vblank_lock);
5205 #else
5206 rdev->pm.vblank_sync = true;
5207 wake_up(&rdev->irq.vblank_queue);
5208 #endif
5209 }
5210 if (atomic_read(&rdev->irq.pflip[3]))
5211 radeon_crtc_handle_vblank(rdev, 3);
5212 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
5213 DRM_DEBUG("IH: D4 vblank\n");
5214
5215 break;
5216 case 1: /* D4 vline */
5217 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT))
5218 DRM_DEBUG("IH: D4 vline - IH event w/o asserted irq bit?\n");
5219
5220 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
5221 DRM_DEBUG("IH: D4 vline\n");
5222
5223 break;
5224 default:
5225 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5226 break;
5227 }
5228 break;
5229 case 5: /* D5 vblank/vline */
5230 switch (src_data) {
5231 case 0: /* D5 vblank */
5232 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT))
5233 DRM_DEBUG("IH: D5 vblank - IH event w/o asserted irq bit?\n");
5234
5235 if (rdev->irq.crtc_vblank_int[4]) {
5236 drm_handle_vblank(rdev->ddev, 4);
5237 rdev->pm.vblank_sync = true;
5238 #ifdef __NetBSD__
5239 spin_lock(&rdev->irq.vblank_lock);
5240 rdev->pm.vblank_sync = true;
5241 DRM_SPIN_WAKEUP_ONE(&rdev->irq.vblank_queue, &rdev->irq.vblank_lock);
5242 spin_unlock(&rdev->irq.vblank_lock);
5243 #else
5244 wake_up(&rdev->irq.vblank_queue);
5245 #endif
5246 }
5247 if (atomic_read(&rdev->irq.pflip[4]))
5248 radeon_crtc_handle_vblank(rdev, 4);
5249 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
5250 DRM_DEBUG("IH: D5 vblank\n");
5251
5252 break;
5253 case 1: /* D5 vline */
5254 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT))
5255 DRM_DEBUG("IH: D5 vline - IH event w/o asserted irq bit?\n");
5256
5257 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
5258 DRM_DEBUG("IH: D5 vline\n");
5259
5260 break;
5261 default:
5262 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5263 break;
5264 }
5265 break;
5266 case 6: /* D6 vblank/vline */
5267 switch (src_data) {
5268 case 0: /* D6 vblank */
5269 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT))
5270 DRM_DEBUG("IH: D6 vblank - IH event w/o asserted irq bit?\n");
5271
5272 if (rdev->irq.crtc_vblank_int[5]) {
5273 drm_handle_vblank(rdev->ddev, 5);
5274 #ifdef __NetBSD__
5275 spin_lock(&rdev->irq.vblank_lock);
5276 rdev->pm.vblank_sync = true;
5277 DRM_SPIN_WAKEUP_ONE(&rdev->irq.vblank_queue, &rdev->irq.vblank_lock);
5278 spin_unlock(&rdev->irq.vblank_lock);
5279 #else
5280 rdev->pm.vblank_sync = true;
5281 wake_up(&rdev->irq.vblank_queue);
5282 #endif
5283 }
5284 if (atomic_read(&rdev->irq.pflip[5]))
5285 radeon_crtc_handle_vblank(rdev, 5);
5286 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
5287 DRM_DEBUG("IH: D6 vblank\n");
5288
5289 break;
5290 case 1: /* D6 vline */
5291 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT))
5292 DRM_DEBUG("IH: D6 vline - IH event w/o asserted irq bit?\n");
5293
5294 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
5295 DRM_DEBUG("IH: D6 vline\n");
5296
5297 break;
5298 default:
5299 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5300 break;
5301 }
5302 break;
5303 case 8: /* D1 page flip */
5304 case 10: /* D2 page flip */
5305 case 12: /* D3 page flip */
5306 case 14: /* D4 page flip */
5307 case 16: /* D5 page flip */
5308 case 18: /* D6 page flip */
5309 DRM_DEBUG("IH: D%d flip\n", ((src_id - 8) >> 1) + 1);
5310 if (radeon_use_pflipirq > 0)
5311 radeon_crtc_handle_flip(rdev, (src_id - 8) >> 1);
5312 break;
5313 case 42: /* HPD hotplug */
5314 switch (src_data) {
5315 case 0:
5316 if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT))
5317 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5318
5319 rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
5320 queue_hotplug = true;
5321 DRM_DEBUG("IH: HPD1\n");
5322 break;
5323 case 1:
5324 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT))
5325 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5326
5327 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
5328 queue_hotplug = true;
5329 DRM_DEBUG("IH: HPD2\n");
5330 break;
5331 case 2:
5332 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT))
5333 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5334
5335 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
5336 queue_hotplug = true;
5337 DRM_DEBUG("IH: HPD3\n");
5338 break;
5339 case 3:
5340 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT))
5341 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5342
5343 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
5344 queue_hotplug = true;
5345 DRM_DEBUG("IH: HPD4\n");
5346 break;
5347 case 4:
5348 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT))
5349 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5350
5351 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
5352 queue_hotplug = true;
5353 DRM_DEBUG("IH: HPD5\n");
5354 break;
5355 case 5:
5356 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT))
5357 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5358
5359 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
5360 queue_hotplug = true;
5361 DRM_DEBUG("IH: HPD6\n");
5362 break;
5363 case 6:
5364 if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT))
5365 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5366
5367 rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_RX_INTERRUPT;
5368 queue_dp = true;
5369 DRM_DEBUG("IH: HPD_RX 1\n");
5370 break;
5371 case 7:
5372 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT))
5373 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5374
5375 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_RX_INTERRUPT;
5376 queue_dp = true;
5377 DRM_DEBUG("IH: HPD_RX 2\n");
5378 break;
5379 case 8:
5380 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT))
5381 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5382
5383 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_RX_INTERRUPT;
5384 queue_dp = true;
5385 DRM_DEBUG("IH: HPD_RX 3\n");
5386 break;
5387 case 9:
5388 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT))
5389 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5390
5391 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_RX_INTERRUPT;
5392 queue_dp = true;
5393 DRM_DEBUG("IH: HPD_RX 4\n");
5394 break;
5395 case 10:
5396 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT))
5397 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5398
5399 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_RX_INTERRUPT;
5400 queue_dp = true;
5401 DRM_DEBUG("IH: HPD_RX 5\n");
5402 break;
5403 case 11:
5404 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT))
5405 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5406
5407 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_RX_INTERRUPT;
5408 queue_dp = true;
5409 DRM_DEBUG("IH: HPD_RX 6\n");
5410 break;
5411 default:
5412 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5413 break;
5414 }
5415 break;
5416 case 44: /* hdmi */
5417 switch (src_data) {
5418 case 0:
5419 if (!(rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG))
5420 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5421
5422 rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
5423 queue_hdmi = true;
5424 DRM_DEBUG("IH: HDMI0\n");
5425 break;
5426 case 1:
5427 if (!(rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG))
5428 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5429
5430 rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
5431 queue_hdmi = true;
5432 DRM_DEBUG("IH: HDMI1\n");
5433 break;
5434 case 2:
5435 if (!(rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG))
5436 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5437
5438 rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
5439 queue_hdmi = true;
5440 DRM_DEBUG("IH: HDMI2\n");
5441 break;
5442 case 3:
5443 if (!(rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG))
5444 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5445
5446 rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
5447 queue_hdmi = true;
5448 DRM_DEBUG("IH: HDMI3\n");
5449 break;
5450 case 4:
5451 if (!(rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG))
5452 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5453
5454 rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
5455 queue_hdmi = true;
5456 DRM_DEBUG("IH: HDMI4\n");
5457 break;
5458 case 5:
5459 if (!(rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG))
5460 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5461
5462 rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
5463 queue_hdmi = true;
5464 DRM_DEBUG("IH: HDMI5\n");
5465 break;
5466 default:
5467 DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
5468 break;
5469 }
5470 case 96:
5471 DRM_ERROR("SRBM_READ_ERROR: 0x%x\n", RREG32(SRBM_READ_ERROR));
5472 WREG32(SRBM_INT_ACK, 0x1);
5473 break;
5474 case 124: /* UVD */
5475 DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
5476 radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
5477 break;
5478 case 146:
5479 case 147:
5480 addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
5481 status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
5482 /* reset addr and status */
5483 WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
5484 if (addr == 0x0 && status == 0x0)
5485 break;
5486 dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
5487 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n",
5488 addr);
5489 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
5490 status);
5491 cayman_vm_decode_fault(rdev, status, addr);
5492 break;
5493 case 176: /* CP_INT in ring buffer */
5494 case 177: /* CP_INT in IB1 */
5495 case 178: /* CP_INT in IB2 */
5496 DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
5497 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5498 break;
5499 case 181: /* CP EOP event */
5500 DRM_DEBUG("IH: CP EOP\n");
5501 if (rdev->family >= CHIP_CAYMAN) {
5502 switch (src_data) {
5503 case 0:
5504 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5505 break;
5506 case 1:
5507 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
5508 break;
5509 case 2:
5510 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
5511 break;
5512 }
5513 } else
5514 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5515 break;
5516 case 224: /* DMA trap event */
5517 DRM_DEBUG("IH: DMA trap\n");
5518 radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
5519 break;
5520 case 230: /* thermal low to high */
5521 DRM_DEBUG("IH: thermal low to high\n");
5522 rdev->pm.dpm.thermal.high_to_low = false;
5523 queue_thermal = true;
5524 break;
5525 case 231: /* thermal high to low */
5526 DRM_DEBUG("IH: thermal high to low\n");
5527 rdev->pm.dpm.thermal.high_to_low = true;
5528 queue_thermal = true;
5529 break;
5530 case 233: /* GUI IDLE */
5531 DRM_DEBUG("IH: GUI idle\n");
5532 break;
5533 case 244: /* DMA trap event */
5534 if (rdev->family >= CHIP_CAYMAN) {
5535 DRM_DEBUG("IH: DMA1 trap\n");
5536 radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
5537 }
5538 break;
5539 default:
5540 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5541 break;
5542 }
5543
5544 /* wptr/rptr are in bytes! */
5545 rptr += 16;
5546 rptr &= rdev->ih.ptr_mask;
5547 WREG32(IH_RB_RPTR, rptr);
5548 }
5549 if (queue_dp)
5550 schedule_work(&rdev->dp_work);
5551 if (queue_hotplug)
5552 schedule_delayed_work(&rdev->hotplug_work, 0);
5553 if (queue_hdmi)
5554 schedule_work(&rdev->audio_work);
5555 if (queue_thermal && rdev->pm.dpm_enabled)
5556 schedule_work(&rdev->pm.dpm.thermal.work);
5557 rdev->ih.rptr = rptr;
5558 atomic_set(&rdev->ih.lock, 0);
5559
5560 /* make sure wptr hasn't changed while processing */
5561 wptr = evergreen_get_ih_wptr(rdev);
5562 if (wptr != rptr)
5563 goto restart_ih;
5564
5565 return IRQ_HANDLED;
5566 }
5567
5568 static int evergreen_startup(struct radeon_device *rdev)
5569 {
5570 struct radeon_ring *ring;
5571 int r;
5572
5573 /* enable pcie gen2 link */
5574 evergreen_pcie_gen2_enable(rdev);
5575 /* enable aspm */
5576 evergreen_program_aspm(rdev);
5577
5578 /* scratch needs to be initialized before MC */
5579 r = r600_vram_scratch_init(rdev);
5580 if (r)
5581 return r;
5582
5583 evergreen_mc_program(rdev);
5584
5585 if (ASIC_IS_DCE5(rdev) && !rdev->pm.dpm_enabled) {
5586 r = ni_mc_load_microcode(rdev);
5587 if (r) {
5588 DRM_ERROR("Failed to load MC firmware!\n");
5589 return r;
5590 }
5591 }
5592
5593 if (rdev->flags & RADEON_IS_AGP) {
5594 evergreen_agp_enable(rdev);
5595 } else {
5596 r = evergreen_pcie_gart_enable(rdev);
5597 if (r)
5598 return r;
5599 }
5600 evergreen_gpu_init(rdev);
5601
5602 /* allocate rlc buffers */
5603 if (rdev->flags & RADEON_IS_IGP) {
5604 rdev->rlc.reg_list = sumo_rlc_save_restore_register_list;
5605 rdev->rlc.reg_list_size =
5606 (u32)ARRAY_SIZE(sumo_rlc_save_restore_register_list);
5607 rdev->rlc.cs_data = evergreen_cs_data;
5608 r = sumo_rlc_init(rdev);
5609 if (r) {
5610 DRM_ERROR("Failed to init rlc BOs!\n");
5611 return r;
5612 }
5613 }
5614
5615 /* allocate wb buffer */
5616 r = radeon_wb_init(rdev);
5617 if (r)
5618 return r;
5619
5620 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
5621 if (r) {
5622 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
5623 return r;
5624 }
5625
5626 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
5627 if (r) {
5628 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
5629 return r;
5630 }
5631
5632 r = uvd_v2_2_resume(rdev);
5633 if (!r) {
5634 r = radeon_fence_driver_start_ring(rdev,
5635 R600_RING_TYPE_UVD_INDEX);
5636 if (r)
5637 dev_err(rdev->dev, "UVD fences init error (%d).\n", r);
5638 }
5639
5640 if (r)
5641 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
5642
5643 /* Enable IRQ */
5644 if (!rdev->irq.installed) {
5645 r = radeon_irq_kms_init(rdev);
5646 if (r)
5647 return r;
5648 }
5649
5650 r = r600_irq_init(rdev);
5651 if (r) {
5652 DRM_ERROR("radeon: IH init failed (%d).\n", r);
5653 radeon_irq_kms_fini(rdev);
5654 return r;
5655 }
5656 evergreen_irq_set(rdev);
5657
5658 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
5659 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
5660 RADEON_CP_PACKET2);
5661 if (r)
5662 return r;
5663
5664 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
5665 r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
5666 DMA_PACKET(DMA_PACKET_NOP, 0, 0));
5667 if (r)
5668 return r;
5669
5670 r = evergreen_cp_load_microcode(rdev);
5671 if (r)
5672 return r;
5673 r = evergreen_cp_resume(rdev);
5674 if (r)
5675 return r;
5676 r = r600_dma_resume(rdev);
5677 if (r)
5678 return r;
5679
5680 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
5681 if (ring->ring_size) {
5682 r = radeon_ring_init(rdev, ring, ring->ring_size, 0,
5683 RADEON_CP_PACKET2);
5684 if (!r)
5685 r = uvd_v1_0_init(rdev);
5686
5687 if (r)
5688 DRM_ERROR("radeon: error initializing UVD (%d).\n", r);
5689 }
5690
5691 r = radeon_ib_pool_init(rdev);
5692 if (r) {
5693 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
5694 return r;
5695 }
5696
5697 r = radeon_audio_init(rdev);
5698 if (r) {
5699 DRM_ERROR("radeon: audio init failed\n");
5700 return r;
5701 }
5702
5703 return 0;
5704 }
5705
5706 int evergreen_resume(struct radeon_device *rdev)
5707 {
5708 int r;
5709
5710 /* reset the asic, the gfx blocks are often in a bad state
5711 * after the driver is unloaded or after a resume
5712 */
5713 if (radeon_asic_reset(rdev))
5714 dev_warn(rdev->dev, "GPU reset failed !\n");
5715 /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
5716 * posting will perform necessary task to bring back GPU into good
5717 * shape.
5718 */
5719 /* post card */
5720 atom_asic_init(rdev->mode_info.atom_context);
5721
5722 /* init golden registers */
5723 evergreen_init_golden_registers(rdev);
5724
5725 if (rdev->pm.pm_method == PM_METHOD_DPM)
5726 radeon_pm_resume(rdev);
5727
5728 rdev->accel_working = true;
5729 r = evergreen_startup(rdev);
5730 if (r) {
5731 DRM_ERROR("evergreen startup failed on resume\n");
5732 rdev->accel_working = false;
5733 return r;
5734 }
5735
5736 return r;
5737
5738 }
5739
5740 int evergreen_suspend(struct radeon_device *rdev)
5741 {
5742 radeon_pm_suspend(rdev);
5743 radeon_audio_fini(rdev);
5744 uvd_v1_0_fini(rdev);
5745 radeon_uvd_suspend(rdev);
5746 r700_cp_stop(rdev);
5747 r600_dma_stop(rdev);
5748 evergreen_irq_suspend(rdev);
5749 radeon_wb_disable(rdev);
5750 evergreen_pcie_gart_disable(rdev);
5751
5752 return 0;
5753 }
5754
5755 /* Plan is to move initialization in that function and use
5756 * helper function so that radeon_device_init pretty much
5757 * do nothing more than calling asic specific function. This
5758 * should also allow to remove a bunch of callback function
5759 * like vram_info.
5760 */
5761 int evergreen_init(struct radeon_device *rdev)
5762 {
5763 int r;
5764
5765 /* Read BIOS */
5766 if (!radeon_get_bios(rdev)) {
5767 if (ASIC_IS_AVIVO(rdev))
5768 return -EINVAL;
5769 }
5770 /* Must be an ATOMBIOS */
5771 if (!rdev->is_atom_bios) {
5772 dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
5773 return -EINVAL;
5774 }
5775 r = radeon_atombios_init(rdev);
5776 if (r)
5777 return r;
5778 /* reset the asic, the gfx blocks are often in a bad state
5779 * after the driver is unloaded or after a resume
5780 */
5781 if (radeon_asic_reset(rdev))
5782 dev_warn(rdev->dev, "GPU reset failed !\n");
5783 /* Post card if necessary */
5784 if (!radeon_card_posted(rdev)) {
5785 if (!rdev->bios) {
5786 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
5787 return -EINVAL;
5788 }
5789 DRM_INFO("GPU not posted. posting now...\n");
5790 atom_asic_init(rdev->mode_info.atom_context);
5791 }
5792 /* init golden registers */
5793 evergreen_init_golden_registers(rdev);
5794 /* Initialize scratch registers */
5795 r600_scratch_init(rdev);
5796 /* Initialize surface registers */
5797 radeon_surface_init(rdev);
5798 /* Initialize clocks */
5799 radeon_get_clock_info(rdev->ddev);
5800 /* Fence driver */
5801 r = radeon_fence_driver_init(rdev);
5802 if (r)
5803 return r;
5804 /* initialize AGP */
5805 if (rdev->flags & RADEON_IS_AGP) {
5806 r = radeon_agp_init(rdev);
5807 if (r)
5808 radeon_agp_disable(rdev);
5809 }
5810 /* initialize memory controller */
5811 r = evergreen_mc_init(rdev);
5812 if (r)
5813 return r;
5814 /* Memory manager */
5815 r = radeon_bo_init(rdev);
5816 if (r)
5817 return r;
5818
5819 if (ASIC_IS_DCE5(rdev)) {
5820 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
5821 r = ni_init_microcode(rdev);
5822 if (r) {
5823 DRM_ERROR("Failed to load firmware!\n");
5824 return r;
5825 }
5826 }
5827 } else {
5828 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
5829 r = r600_init_microcode(rdev);
5830 if (r) {
5831 DRM_ERROR("Failed to load firmware!\n");
5832 return r;
5833 }
5834 }
5835 }
5836
5837 /* Initialize power management */
5838 radeon_pm_init(rdev);
5839
5840 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
5841 r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
5842
5843 rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
5844 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
5845
5846 r = radeon_uvd_init(rdev);
5847 if (!r) {
5848 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
5849 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX],
5850 4096);
5851 }
5852
5853 rdev->ih.ring_obj = NULL;
5854 r600_ih_ring_init(rdev, 64 * 1024);
5855
5856 r = r600_pcie_gart_init(rdev);
5857 if (r)
5858 return r;
5859
5860 rdev->accel_working = true;
5861 r = evergreen_startup(rdev);
5862 if (r) {
5863 dev_err(rdev->dev, "disabling GPU acceleration\n");
5864 r700_cp_fini(rdev);
5865 r600_dma_fini(rdev);
5866 r600_irq_fini(rdev);
5867 if (rdev->flags & RADEON_IS_IGP)
5868 sumo_rlc_fini(rdev);
5869 radeon_wb_fini(rdev);
5870 radeon_ib_pool_fini(rdev);
5871 radeon_irq_kms_fini(rdev);
5872 evergreen_pcie_gart_fini(rdev);
5873 rdev->accel_working = false;
5874 }
5875
5876 /* Don't start up if the MC ucode is missing on BTC parts.
5877 * The default clocks and voltages before the MC ucode
5878 * is loaded are not suffient for advanced operations.
5879 */
5880 if (ASIC_IS_DCE5(rdev)) {
5881 if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
5882 DRM_ERROR("radeon: MC ucode required for NI+.\n");
5883 return -EINVAL;
5884 }
5885 }
5886
5887 return 0;
5888 }
5889
5890 void evergreen_fini(struct radeon_device *rdev)
5891 {
5892 radeon_pm_fini(rdev);
5893 radeon_audio_fini(rdev);
5894 r700_cp_fini(rdev);
5895 r600_dma_fini(rdev);
5896 r600_irq_fini(rdev);
5897 if (rdev->flags & RADEON_IS_IGP)
5898 sumo_rlc_fini(rdev);
5899 radeon_wb_fini(rdev);
5900 radeon_ib_pool_fini(rdev);
5901 radeon_irq_kms_fini(rdev);
5902 uvd_v1_0_fini(rdev);
5903 radeon_uvd_fini(rdev);
5904 evergreen_pcie_gart_fini(rdev);
5905 r600_vram_scratch_fini(rdev);
5906 radeon_gem_fini(rdev);
5907 radeon_fence_driver_fini(rdev);
5908 radeon_agp_fini(rdev);
5909 radeon_bo_fini(rdev);
5910 radeon_atombios_fini(rdev);
5911 kfree(rdev->bios);
5912 rdev->bios = NULL;
5913 }
5914
5915 void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
5916 {
5917 #ifndef __NetBSD__ /* XXX radeon pcie */
5918 u32 link_width_cntl, speed_cntl;
5919
5920 if (radeon_pcie_gen2 == 0)
5921 return;
5922
5923 if (rdev->flags & RADEON_IS_IGP)
5924 return;
5925
5926 if (!(rdev->flags & RADEON_IS_PCIE))
5927 return;
5928
5929 /* x2 cards have a special sequence */
5930 if (ASIC_IS_X2(rdev))
5931 return;
5932
5933 if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
5934 (rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
5935 return;
5936
5937 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5938 if (speed_cntl & LC_CURRENT_DATA_RATE) {
5939 DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5940 return;
5941 }
5942
5943 DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5944
5945 if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5946 (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5947
5948 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5949 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5950 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5951
5952 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5953 speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
5954 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5955
5956 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5957 speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
5958 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5959
5960 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5961 speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
5962 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5963
5964 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5965 speed_cntl |= LC_GEN2_EN_STRAP;
5966 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5967
5968 } else {
5969 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5970 /* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5971 if (1)
5972 link_width_cntl |= LC_UPCONFIGURE_DIS;
5973 else
5974 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5975 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5976 }
5977 #endif
5978 }
5979
5980 void evergreen_program_aspm(struct radeon_device *rdev)
5981 {
5982 u32 data, orig;
5983 u32 pcie_lc_cntl, pcie_lc_cntl_old;
5984 bool disable_l0s, disable_l1 = false, disable_plloff_in_l1 = false;
5985 /* fusion_platform = true
5986 * if the system is a fusion system
5987 * (APU or DGPU in a fusion system).
5988 * todo: check if the system is a fusion platform.
5989 */
5990 bool fusion_platform = false;
5991
5992 if (radeon_aspm == 0)
5993 return;
5994
5995 if (!(rdev->flags & RADEON_IS_PCIE))
5996 return;
5997
5998 switch (rdev->family) {
5999 case CHIP_CYPRESS:
6000 case CHIP_HEMLOCK:
6001 case CHIP_JUNIPER:
6002 case CHIP_REDWOOD:
6003 case CHIP_CEDAR:
6004 case CHIP_SUMO:
6005 case CHIP_SUMO2:
6006 case CHIP_PALM:
6007 case CHIP_ARUBA:
6008 disable_l0s = true;
6009 break;
6010 default:
6011 disable_l0s = false;
6012 break;
6013 }
6014
6015 if (rdev->flags & RADEON_IS_IGP)
6016 fusion_platform = true; /* XXX also dGPUs in a fusion system */
6017
6018 data = orig = RREG32_PIF_PHY0(PB0_PIF_PAIRING);
6019 if (fusion_platform)
6020 data &= ~MULTI_PIF;
6021 else
6022 data |= MULTI_PIF;
6023 if (data != orig)
6024 WREG32_PIF_PHY0(PB0_PIF_PAIRING, data);
6025
6026 data = orig = RREG32_PIF_PHY1(PB1_PIF_PAIRING);
6027 if (fusion_platform)
6028 data &= ~MULTI_PIF;
6029 else
6030 data |= MULTI_PIF;
6031 if (data != orig)
6032 WREG32_PIF_PHY1(PB1_PIF_PAIRING, data);
6033
6034 pcie_lc_cntl = pcie_lc_cntl_old = RREG32_PCIE_PORT(PCIE_LC_CNTL);
6035 pcie_lc_cntl &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
6036 if (!disable_l0s) {
6037 if (rdev->family >= CHIP_BARTS)
6038 pcie_lc_cntl |= LC_L0S_INACTIVITY(7);
6039 else
6040 pcie_lc_cntl |= LC_L0S_INACTIVITY(3);
6041 }
6042
6043 if (!disable_l1) {
6044 if (rdev->family >= CHIP_BARTS)
6045 pcie_lc_cntl |= LC_L1_INACTIVITY(7);
6046 else
6047 pcie_lc_cntl |= LC_L1_INACTIVITY(8);
6048
6049 if (!disable_plloff_in_l1) {
6050 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
6051 data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
6052 data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
6053 if (data != orig)
6054 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
6055
6056 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
6057 data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
6058 data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
6059 if (data != orig)
6060 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
6061
6062 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
6063 data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
6064 data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
6065 if (data != orig)
6066 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
6067
6068 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
6069 data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
6070 data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
6071 if (data != orig)
6072 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
6073
6074 if (rdev->family >= CHIP_BARTS) {
6075 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
6076 data &= ~PLL_RAMP_UP_TIME_0_MASK;
6077 data |= PLL_RAMP_UP_TIME_0(4);
6078 if (data != orig)
6079 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
6080
6081 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
6082 data &= ~PLL_RAMP_UP_TIME_1_MASK;
6083 data |= PLL_RAMP_UP_TIME_1(4);
6084 if (data != orig)
6085 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
6086
6087 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
6088 data &= ~PLL_RAMP_UP_TIME_0_MASK;
6089 data |= PLL_RAMP_UP_TIME_0(4);
6090 if (data != orig)
6091 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
6092
6093 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
6094 data &= ~PLL_RAMP_UP_TIME_1_MASK;
6095 data |= PLL_RAMP_UP_TIME_1(4);
6096 if (data != orig)
6097 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
6098 }
6099
6100 data = orig = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
6101 data &= ~LC_DYN_LANES_PWR_STATE_MASK;
6102 data |= LC_DYN_LANES_PWR_STATE(3);
6103 if (data != orig)
6104 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
6105
6106 if (rdev->family >= CHIP_BARTS) {
6107 data = orig = RREG32_PIF_PHY0(PB0_PIF_CNTL);
6108 data &= ~LS2_EXIT_TIME_MASK;
6109 data |= LS2_EXIT_TIME(1);
6110 if (data != orig)
6111 WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
6112
6113 data = orig = RREG32_PIF_PHY1(PB1_PIF_CNTL);
6114 data &= ~LS2_EXIT_TIME_MASK;
6115 data |= LS2_EXIT_TIME(1);
6116 if (data != orig)
6117 WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
6118 }
6119 }
6120 }
6121
6122 /* evergreen parts only */
6123 if (rdev->family < CHIP_BARTS)
6124 pcie_lc_cntl |= LC_PMI_TO_L1_DIS;
6125
6126 if (pcie_lc_cntl != pcie_lc_cntl_old)
6127 WREG32_PCIE_PORT(PCIE_LC_CNTL, pcie_lc_cntl);
6128 }
6129