Home | History | Annotate | Line # | Download | only in radeon
radeon_evergreen.c revision 1.1.6.3
      1 /*	$NetBSD: radeon_evergreen.c,v 1.1.6.3 2020/04/08 14:08:26 martin Exp $	*/
      2 
      3 /*
      4  * Copyright 2010 Advanced Micro Devices, Inc.
      5  *
      6  * Permission is hereby granted, free of charge, to any person obtaining a
      7  * copy of this software and associated documentation files (the "Software"),
      8  * to deal in the Software without restriction, including without limitation
      9  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
     10  * and/or sell copies of the Software, and to permit persons to whom the
     11  * Software is furnished to do so, subject to the following conditions:
     12  *
     13  * The above copyright notice and this permission notice shall be included in
     14  * all copies or substantial portions of the Software.
     15  *
     16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
     17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
     18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
     19  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
     20  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
     21  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
     22  * OTHER DEALINGS IN THE SOFTWARE.
     23  *
     24  * Authors: Alex Deucher
     25  */
     26 #include <sys/cdefs.h>
     27 __KERNEL_RCSID(0, "$NetBSD: radeon_evergreen.c,v 1.1.6.3 2020/04/08 14:08:26 martin Exp $");
     28 
     29 #include <linux/firmware.h>
     30 #include <linux/slab.h>
     31 #include <drm/drmP.h>
     32 #include "radeon.h"
     33 #include "radeon_asic.h"
     34 #include "radeon_audio.h"
     35 #include <drm/radeon_drm.h>
     36 #include "evergreend.h"
     37 #include "atom.h"
     38 #include "avivod.h"
     39 #include "evergreen_reg.h"
     40 #include "evergreen_blit_shaders.h"
     41 #include "radeon_ucode.h"
     42 
     43 /*
     44  * Indirect registers accessor
     45  */
     46 u32 eg_cg_rreg(struct radeon_device *rdev, u32 reg)
     47 {
     48 	unsigned long flags;
     49 	u32 r;
     50 
     51 	spin_lock_irqsave(&rdev->cg_idx_lock, flags);
     52 	WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
     53 	r = RREG32(EVERGREEN_CG_IND_DATA);
     54 	spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
     55 	return r;
     56 }
     57 
     58 void eg_cg_wreg(struct radeon_device *rdev, u32 reg, u32 v)
     59 {
     60 	unsigned long flags;
     61 
     62 	spin_lock_irqsave(&rdev->cg_idx_lock, flags);
     63 	WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
     64 	WREG32(EVERGREEN_CG_IND_DATA, (v));
     65 	spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
     66 }
     67 
     68 u32 eg_pif_phy0_rreg(struct radeon_device *rdev, u32 reg)
     69 {
     70 	unsigned long flags;
     71 	u32 r;
     72 
     73 	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
     74 	WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
     75 	r = RREG32(EVERGREEN_PIF_PHY0_DATA);
     76 	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
     77 	return r;
     78 }
     79 
     80 void eg_pif_phy0_wreg(struct radeon_device *rdev, u32 reg, u32 v)
     81 {
     82 	unsigned long flags;
     83 
     84 	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
     85 	WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
     86 	WREG32(EVERGREEN_PIF_PHY0_DATA, (v));
     87 	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
     88 }
     89 
     90 u32 eg_pif_phy1_rreg(struct radeon_device *rdev, u32 reg)
     91 {
     92 	unsigned long flags;
     93 	u32 r;
     94 
     95 	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
     96 	WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
     97 	r = RREG32(EVERGREEN_PIF_PHY1_DATA);
     98 	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
     99 	return r;
    100 }
    101 
    102 void eg_pif_phy1_wreg(struct radeon_device *rdev, u32 reg, u32 v)
    103 {
    104 	unsigned long flags;
    105 
    106 	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
    107 	WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
    108 	WREG32(EVERGREEN_PIF_PHY1_DATA, (v));
    109 	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
    110 }
    111 
    112 static const u32 crtc_offsets[6] =
    113 {
    114 	EVERGREEN_CRTC0_REGISTER_OFFSET,
    115 	EVERGREEN_CRTC1_REGISTER_OFFSET,
    116 	EVERGREEN_CRTC2_REGISTER_OFFSET,
    117 	EVERGREEN_CRTC3_REGISTER_OFFSET,
    118 	EVERGREEN_CRTC4_REGISTER_OFFSET,
    119 	EVERGREEN_CRTC5_REGISTER_OFFSET
    120 };
    121 
    122 #include "clearstate_evergreen.h"
    123 
    124 static const u32 sumo_rlc_save_restore_register_list[] =
    125 {
    126 	0x98fc,
    127 	0x9830,
    128 	0x9834,
    129 	0x9838,
    130 	0x9870,
    131 	0x9874,
    132 	0x8a14,
    133 	0x8b24,
    134 	0x8bcc,
    135 	0x8b10,
    136 	0x8d00,
    137 	0x8d04,
    138 	0x8c00,
    139 	0x8c04,
    140 	0x8c08,
    141 	0x8c0c,
    142 	0x8d8c,
    143 	0x8c20,
    144 	0x8c24,
    145 	0x8c28,
    146 	0x8c18,
    147 	0x8c1c,
    148 	0x8cf0,
    149 	0x8e2c,
    150 	0x8e38,
    151 	0x8c30,
    152 	0x9508,
    153 	0x9688,
    154 	0x9608,
    155 	0x960c,
    156 	0x9610,
    157 	0x9614,
    158 	0x88c4,
    159 	0x88d4,
    160 	0xa008,
    161 	0x900c,
    162 	0x9100,
    163 	0x913c,
    164 	0x98f8,
    165 	0x98f4,
    166 	0x9b7c,
    167 	0x3f8c,
    168 	0x8950,
    169 	0x8954,
    170 	0x8a18,
    171 	0x8b28,
    172 	0x9144,
    173 	0x9148,
    174 	0x914c,
    175 	0x3f90,
    176 	0x3f94,
    177 	0x915c,
    178 	0x9160,
    179 	0x9178,
    180 	0x917c,
    181 	0x9180,
    182 	0x918c,
    183 	0x9190,
    184 	0x9194,
    185 	0x9198,
    186 	0x919c,
    187 	0x91a8,
    188 	0x91ac,
    189 	0x91b0,
    190 	0x91b4,
    191 	0x91b8,
    192 	0x91c4,
    193 	0x91c8,
    194 	0x91cc,
    195 	0x91d0,
    196 	0x91d4,
    197 	0x91e0,
    198 	0x91e4,
    199 	0x91ec,
    200 	0x91f0,
    201 	0x91f4,
    202 	0x9200,
    203 	0x9204,
    204 	0x929c,
    205 	0x9150,
    206 	0x802c,
    207 };
    208 
    209 static void evergreen_gpu_init(struct radeon_device *rdev);
    210 void evergreen_fini(struct radeon_device *rdev);
    211 void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
    212 void evergreen_program_aspm(struct radeon_device *rdev);
    213 extern void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
    214 				     int ring, u32 cp_int_cntl);
    215 extern void cayman_vm_decode_fault(struct radeon_device *rdev,
    216 				   u32 status, u32 addr);
    217 void cik_init_cp_pg_table(struct radeon_device *rdev);
    218 
    219 extern u32 si_get_csb_size(struct radeon_device *rdev);
    220 extern void si_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
    221 extern u32 cik_get_csb_size(struct radeon_device *rdev);
    222 extern void cik_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
    223 extern void rv770_set_clk_bypass_mode(struct radeon_device *rdev);
    224 
    225 static const u32 evergreen_golden_registers[] =
    226 {
    227 	0x3f90, 0xffff0000, 0xff000000,
    228 	0x9148, 0xffff0000, 0xff000000,
    229 	0x3f94, 0xffff0000, 0xff000000,
    230 	0x914c, 0xffff0000, 0xff000000,
    231 	0x9b7c, 0xffffffff, 0x00000000,
    232 	0x8a14, 0xffffffff, 0x00000007,
    233 	0x8b10, 0xffffffff, 0x00000000,
    234 	0x960c, 0xffffffff, 0x54763210,
    235 	0x88c4, 0xffffffff, 0x000000c2,
    236 	0x88d4, 0xffffffff, 0x00000010,
    237 	0x8974, 0xffffffff, 0x00000000,
    238 	0xc78, 0x00000080, 0x00000080,
    239 	0x5eb4, 0xffffffff, 0x00000002,
    240 	0x5e78, 0xffffffff, 0x001000f0,
    241 	0x6104, 0x01000300, 0x00000000,
    242 	0x5bc0, 0x00300000, 0x00000000,
    243 	0x7030, 0xffffffff, 0x00000011,
    244 	0x7c30, 0xffffffff, 0x00000011,
    245 	0x10830, 0xffffffff, 0x00000011,
    246 	0x11430, 0xffffffff, 0x00000011,
    247 	0x12030, 0xffffffff, 0x00000011,
    248 	0x12c30, 0xffffffff, 0x00000011,
    249 	0xd02c, 0xffffffff, 0x08421000,
    250 	0x240c, 0xffffffff, 0x00000380,
    251 	0x8b24, 0xffffffff, 0x00ff0fff,
    252 	0x28a4c, 0x06000000, 0x06000000,
    253 	0x10c, 0x00000001, 0x00000001,
    254 	0x8d00, 0xffffffff, 0x100e4848,
    255 	0x8d04, 0xffffffff, 0x00164745,
    256 	0x8c00, 0xffffffff, 0xe4000003,
    257 	0x8c04, 0xffffffff, 0x40600060,
    258 	0x8c08, 0xffffffff, 0x001c001c,
    259 	0x8cf0, 0xffffffff, 0x08e00620,
    260 	0x8c20, 0xffffffff, 0x00800080,
    261 	0x8c24, 0xffffffff, 0x00800080,
    262 	0x8c18, 0xffffffff, 0x20202078,
    263 	0x8c1c, 0xffffffff, 0x00001010,
    264 	0x28350, 0xffffffff, 0x00000000,
    265 	0xa008, 0xffffffff, 0x00010000,
    266 	0x5c4, 0xffffffff, 0x00000001,
    267 	0x9508, 0xffffffff, 0x00000002,
    268 	0x913c, 0x0000000f, 0x0000000a
    269 };
    270 
    271 static const u32 evergreen_golden_registers2[] =
    272 {
    273 	0x2f4c, 0xffffffff, 0x00000000,
    274 	0x54f4, 0xffffffff, 0x00000000,
    275 	0x54f0, 0xffffffff, 0x00000000,
    276 	0x5498, 0xffffffff, 0x00000000,
    277 	0x549c, 0xffffffff, 0x00000000,
    278 	0x5494, 0xffffffff, 0x00000000,
    279 	0x53cc, 0xffffffff, 0x00000000,
    280 	0x53c8, 0xffffffff, 0x00000000,
    281 	0x53c4, 0xffffffff, 0x00000000,
    282 	0x53c0, 0xffffffff, 0x00000000,
    283 	0x53bc, 0xffffffff, 0x00000000,
    284 	0x53b8, 0xffffffff, 0x00000000,
    285 	0x53b4, 0xffffffff, 0x00000000,
    286 	0x53b0, 0xffffffff, 0x00000000
    287 };
    288 
    289 static const u32 cypress_mgcg_init[] =
    290 {
    291 	0x802c, 0xffffffff, 0xc0000000,
    292 	0x5448, 0xffffffff, 0x00000100,
    293 	0x55e4, 0xffffffff, 0x00000100,
    294 	0x160c, 0xffffffff, 0x00000100,
    295 	0x5644, 0xffffffff, 0x00000100,
    296 	0xc164, 0xffffffff, 0x00000100,
    297 	0x8a18, 0xffffffff, 0x00000100,
    298 	0x897c, 0xffffffff, 0x06000100,
    299 	0x8b28, 0xffffffff, 0x00000100,
    300 	0x9144, 0xffffffff, 0x00000100,
    301 	0x9a60, 0xffffffff, 0x00000100,
    302 	0x9868, 0xffffffff, 0x00000100,
    303 	0x8d58, 0xffffffff, 0x00000100,
    304 	0x9510, 0xffffffff, 0x00000100,
    305 	0x949c, 0xffffffff, 0x00000100,
    306 	0x9654, 0xffffffff, 0x00000100,
    307 	0x9030, 0xffffffff, 0x00000100,
    308 	0x9034, 0xffffffff, 0x00000100,
    309 	0x9038, 0xffffffff, 0x00000100,
    310 	0x903c, 0xffffffff, 0x00000100,
    311 	0x9040, 0xffffffff, 0x00000100,
    312 	0xa200, 0xffffffff, 0x00000100,
    313 	0xa204, 0xffffffff, 0x00000100,
    314 	0xa208, 0xffffffff, 0x00000100,
    315 	0xa20c, 0xffffffff, 0x00000100,
    316 	0x971c, 0xffffffff, 0x00000100,
    317 	0x977c, 0xffffffff, 0x00000100,
    318 	0x3f80, 0xffffffff, 0x00000100,
    319 	0xa210, 0xffffffff, 0x00000100,
    320 	0xa214, 0xffffffff, 0x00000100,
    321 	0x4d8, 0xffffffff, 0x00000100,
    322 	0x9784, 0xffffffff, 0x00000100,
    323 	0x9698, 0xffffffff, 0x00000100,
    324 	0x4d4, 0xffffffff, 0x00000200,
    325 	0x30cc, 0xffffffff, 0x00000100,
    326 	0xd0c0, 0xffffffff, 0xff000100,
    327 	0x802c, 0xffffffff, 0x40000000,
    328 	0x915c, 0xffffffff, 0x00010000,
    329 	0x9160, 0xffffffff, 0x00030002,
    330 	0x9178, 0xffffffff, 0x00070000,
    331 	0x917c, 0xffffffff, 0x00030002,
    332 	0x9180, 0xffffffff, 0x00050004,
    333 	0x918c, 0xffffffff, 0x00010006,
    334 	0x9190, 0xffffffff, 0x00090008,
    335 	0x9194, 0xffffffff, 0x00070000,
    336 	0x9198, 0xffffffff, 0x00030002,
    337 	0x919c, 0xffffffff, 0x00050004,
    338 	0x91a8, 0xffffffff, 0x00010006,
    339 	0x91ac, 0xffffffff, 0x00090008,
    340 	0x91b0, 0xffffffff, 0x00070000,
    341 	0x91b4, 0xffffffff, 0x00030002,
    342 	0x91b8, 0xffffffff, 0x00050004,
    343 	0x91c4, 0xffffffff, 0x00010006,
    344 	0x91c8, 0xffffffff, 0x00090008,
    345 	0x91cc, 0xffffffff, 0x00070000,
    346 	0x91d0, 0xffffffff, 0x00030002,
    347 	0x91d4, 0xffffffff, 0x00050004,
    348 	0x91e0, 0xffffffff, 0x00010006,
    349 	0x91e4, 0xffffffff, 0x00090008,
    350 	0x91e8, 0xffffffff, 0x00000000,
    351 	0x91ec, 0xffffffff, 0x00070000,
    352 	0x91f0, 0xffffffff, 0x00030002,
    353 	0x91f4, 0xffffffff, 0x00050004,
    354 	0x9200, 0xffffffff, 0x00010006,
    355 	0x9204, 0xffffffff, 0x00090008,
    356 	0x9208, 0xffffffff, 0x00070000,
    357 	0x920c, 0xffffffff, 0x00030002,
    358 	0x9210, 0xffffffff, 0x00050004,
    359 	0x921c, 0xffffffff, 0x00010006,
    360 	0x9220, 0xffffffff, 0x00090008,
    361 	0x9224, 0xffffffff, 0x00070000,
    362 	0x9228, 0xffffffff, 0x00030002,
    363 	0x922c, 0xffffffff, 0x00050004,
    364 	0x9238, 0xffffffff, 0x00010006,
    365 	0x923c, 0xffffffff, 0x00090008,
    366 	0x9240, 0xffffffff, 0x00070000,
    367 	0x9244, 0xffffffff, 0x00030002,
    368 	0x9248, 0xffffffff, 0x00050004,
    369 	0x9254, 0xffffffff, 0x00010006,
    370 	0x9258, 0xffffffff, 0x00090008,
    371 	0x925c, 0xffffffff, 0x00070000,
    372 	0x9260, 0xffffffff, 0x00030002,
    373 	0x9264, 0xffffffff, 0x00050004,
    374 	0x9270, 0xffffffff, 0x00010006,
    375 	0x9274, 0xffffffff, 0x00090008,
    376 	0x9278, 0xffffffff, 0x00070000,
    377 	0x927c, 0xffffffff, 0x00030002,
    378 	0x9280, 0xffffffff, 0x00050004,
    379 	0x928c, 0xffffffff, 0x00010006,
    380 	0x9290, 0xffffffff, 0x00090008,
    381 	0x9294, 0xffffffff, 0x00000000,
    382 	0x929c, 0xffffffff, 0x00000001,
    383 	0x802c, 0xffffffff, 0x40010000,
    384 	0x915c, 0xffffffff, 0x00010000,
    385 	0x9160, 0xffffffff, 0x00030002,
    386 	0x9178, 0xffffffff, 0x00070000,
    387 	0x917c, 0xffffffff, 0x00030002,
    388 	0x9180, 0xffffffff, 0x00050004,
    389 	0x918c, 0xffffffff, 0x00010006,
    390 	0x9190, 0xffffffff, 0x00090008,
    391 	0x9194, 0xffffffff, 0x00070000,
    392 	0x9198, 0xffffffff, 0x00030002,
    393 	0x919c, 0xffffffff, 0x00050004,
    394 	0x91a8, 0xffffffff, 0x00010006,
    395 	0x91ac, 0xffffffff, 0x00090008,
    396 	0x91b0, 0xffffffff, 0x00070000,
    397 	0x91b4, 0xffffffff, 0x00030002,
    398 	0x91b8, 0xffffffff, 0x00050004,
    399 	0x91c4, 0xffffffff, 0x00010006,
    400 	0x91c8, 0xffffffff, 0x00090008,
    401 	0x91cc, 0xffffffff, 0x00070000,
    402 	0x91d0, 0xffffffff, 0x00030002,
    403 	0x91d4, 0xffffffff, 0x00050004,
    404 	0x91e0, 0xffffffff, 0x00010006,
    405 	0x91e4, 0xffffffff, 0x00090008,
    406 	0x91e8, 0xffffffff, 0x00000000,
    407 	0x91ec, 0xffffffff, 0x00070000,
    408 	0x91f0, 0xffffffff, 0x00030002,
    409 	0x91f4, 0xffffffff, 0x00050004,
    410 	0x9200, 0xffffffff, 0x00010006,
    411 	0x9204, 0xffffffff, 0x00090008,
    412 	0x9208, 0xffffffff, 0x00070000,
    413 	0x920c, 0xffffffff, 0x00030002,
    414 	0x9210, 0xffffffff, 0x00050004,
    415 	0x921c, 0xffffffff, 0x00010006,
    416 	0x9220, 0xffffffff, 0x00090008,
    417 	0x9224, 0xffffffff, 0x00070000,
    418 	0x9228, 0xffffffff, 0x00030002,
    419 	0x922c, 0xffffffff, 0x00050004,
    420 	0x9238, 0xffffffff, 0x00010006,
    421 	0x923c, 0xffffffff, 0x00090008,
    422 	0x9240, 0xffffffff, 0x00070000,
    423 	0x9244, 0xffffffff, 0x00030002,
    424 	0x9248, 0xffffffff, 0x00050004,
    425 	0x9254, 0xffffffff, 0x00010006,
    426 	0x9258, 0xffffffff, 0x00090008,
    427 	0x925c, 0xffffffff, 0x00070000,
    428 	0x9260, 0xffffffff, 0x00030002,
    429 	0x9264, 0xffffffff, 0x00050004,
    430 	0x9270, 0xffffffff, 0x00010006,
    431 	0x9274, 0xffffffff, 0x00090008,
    432 	0x9278, 0xffffffff, 0x00070000,
    433 	0x927c, 0xffffffff, 0x00030002,
    434 	0x9280, 0xffffffff, 0x00050004,
    435 	0x928c, 0xffffffff, 0x00010006,
    436 	0x9290, 0xffffffff, 0x00090008,
    437 	0x9294, 0xffffffff, 0x00000000,
    438 	0x929c, 0xffffffff, 0x00000001,
    439 	0x802c, 0xffffffff, 0xc0000000
    440 };
    441 
    442 static const u32 redwood_mgcg_init[] =
    443 {
    444 	0x802c, 0xffffffff, 0xc0000000,
    445 	0x5448, 0xffffffff, 0x00000100,
    446 	0x55e4, 0xffffffff, 0x00000100,
    447 	0x160c, 0xffffffff, 0x00000100,
    448 	0x5644, 0xffffffff, 0x00000100,
    449 	0xc164, 0xffffffff, 0x00000100,
    450 	0x8a18, 0xffffffff, 0x00000100,
    451 	0x897c, 0xffffffff, 0x06000100,
    452 	0x8b28, 0xffffffff, 0x00000100,
    453 	0x9144, 0xffffffff, 0x00000100,
    454 	0x9a60, 0xffffffff, 0x00000100,
    455 	0x9868, 0xffffffff, 0x00000100,
    456 	0x8d58, 0xffffffff, 0x00000100,
    457 	0x9510, 0xffffffff, 0x00000100,
    458 	0x949c, 0xffffffff, 0x00000100,
    459 	0x9654, 0xffffffff, 0x00000100,
    460 	0x9030, 0xffffffff, 0x00000100,
    461 	0x9034, 0xffffffff, 0x00000100,
    462 	0x9038, 0xffffffff, 0x00000100,
    463 	0x903c, 0xffffffff, 0x00000100,
    464 	0x9040, 0xffffffff, 0x00000100,
    465 	0xa200, 0xffffffff, 0x00000100,
    466 	0xa204, 0xffffffff, 0x00000100,
    467 	0xa208, 0xffffffff, 0x00000100,
    468 	0xa20c, 0xffffffff, 0x00000100,
    469 	0x971c, 0xffffffff, 0x00000100,
    470 	0x977c, 0xffffffff, 0x00000100,
    471 	0x3f80, 0xffffffff, 0x00000100,
    472 	0xa210, 0xffffffff, 0x00000100,
    473 	0xa214, 0xffffffff, 0x00000100,
    474 	0x4d8, 0xffffffff, 0x00000100,
    475 	0x9784, 0xffffffff, 0x00000100,
    476 	0x9698, 0xffffffff, 0x00000100,
    477 	0x4d4, 0xffffffff, 0x00000200,
    478 	0x30cc, 0xffffffff, 0x00000100,
    479 	0xd0c0, 0xffffffff, 0xff000100,
    480 	0x802c, 0xffffffff, 0x40000000,
    481 	0x915c, 0xffffffff, 0x00010000,
    482 	0x9160, 0xffffffff, 0x00030002,
    483 	0x9178, 0xffffffff, 0x00070000,
    484 	0x917c, 0xffffffff, 0x00030002,
    485 	0x9180, 0xffffffff, 0x00050004,
    486 	0x918c, 0xffffffff, 0x00010006,
    487 	0x9190, 0xffffffff, 0x00090008,
    488 	0x9194, 0xffffffff, 0x00070000,
    489 	0x9198, 0xffffffff, 0x00030002,
    490 	0x919c, 0xffffffff, 0x00050004,
    491 	0x91a8, 0xffffffff, 0x00010006,
    492 	0x91ac, 0xffffffff, 0x00090008,
    493 	0x91b0, 0xffffffff, 0x00070000,
    494 	0x91b4, 0xffffffff, 0x00030002,
    495 	0x91b8, 0xffffffff, 0x00050004,
    496 	0x91c4, 0xffffffff, 0x00010006,
    497 	0x91c8, 0xffffffff, 0x00090008,
    498 	0x91cc, 0xffffffff, 0x00070000,
    499 	0x91d0, 0xffffffff, 0x00030002,
    500 	0x91d4, 0xffffffff, 0x00050004,
    501 	0x91e0, 0xffffffff, 0x00010006,
    502 	0x91e4, 0xffffffff, 0x00090008,
    503 	0x91e8, 0xffffffff, 0x00000000,
    504 	0x91ec, 0xffffffff, 0x00070000,
    505 	0x91f0, 0xffffffff, 0x00030002,
    506 	0x91f4, 0xffffffff, 0x00050004,
    507 	0x9200, 0xffffffff, 0x00010006,
    508 	0x9204, 0xffffffff, 0x00090008,
    509 	0x9294, 0xffffffff, 0x00000000,
    510 	0x929c, 0xffffffff, 0x00000001,
    511 	0x802c, 0xffffffff, 0xc0000000
    512 };
    513 
    514 static const u32 cedar_golden_registers[] =
    515 {
    516 	0x3f90, 0xffff0000, 0xff000000,
    517 	0x9148, 0xffff0000, 0xff000000,
    518 	0x3f94, 0xffff0000, 0xff000000,
    519 	0x914c, 0xffff0000, 0xff000000,
    520 	0x9b7c, 0xffffffff, 0x00000000,
    521 	0x8a14, 0xffffffff, 0x00000007,
    522 	0x8b10, 0xffffffff, 0x00000000,
    523 	0x960c, 0xffffffff, 0x54763210,
    524 	0x88c4, 0xffffffff, 0x000000c2,
    525 	0x88d4, 0xffffffff, 0x00000000,
    526 	0x8974, 0xffffffff, 0x00000000,
    527 	0xc78, 0x00000080, 0x00000080,
    528 	0x5eb4, 0xffffffff, 0x00000002,
    529 	0x5e78, 0xffffffff, 0x001000f0,
    530 	0x6104, 0x01000300, 0x00000000,
    531 	0x5bc0, 0x00300000, 0x00000000,
    532 	0x7030, 0xffffffff, 0x00000011,
    533 	0x7c30, 0xffffffff, 0x00000011,
    534 	0x10830, 0xffffffff, 0x00000011,
    535 	0x11430, 0xffffffff, 0x00000011,
    536 	0xd02c, 0xffffffff, 0x08421000,
    537 	0x240c, 0xffffffff, 0x00000380,
    538 	0x8b24, 0xffffffff, 0x00ff0fff,
    539 	0x28a4c, 0x06000000, 0x06000000,
    540 	0x10c, 0x00000001, 0x00000001,
    541 	0x8d00, 0xffffffff, 0x100e4848,
    542 	0x8d04, 0xffffffff, 0x00164745,
    543 	0x8c00, 0xffffffff, 0xe4000003,
    544 	0x8c04, 0xffffffff, 0x40600060,
    545 	0x8c08, 0xffffffff, 0x001c001c,
    546 	0x8cf0, 0xffffffff, 0x08e00410,
    547 	0x8c20, 0xffffffff, 0x00800080,
    548 	0x8c24, 0xffffffff, 0x00800080,
    549 	0x8c18, 0xffffffff, 0x20202078,
    550 	0x8c1c, 0xffffffff, 0x00001010,
    551 	0x28350, 0xffffffff, 0x00000000,
    552 	0xa008, 0xffffffff, 0x00010000,
    553 	0x5c4, 0xffffffff, 0x00000001,
    554 	0x9508, 0xffffffff, 0x00000002
    555 };
    556 
    557 static const u32 cedar_mgcg_init[] =
    558 {
    559 	0x802c, 0xffffffff, 0xc0000000,
    560 	0x5448, 0xffffffff, 0x00000100,
    561 	0x55e4, 0xffffffff, 0x00000100,
    562 	0x160c, 0xffffffff, 0x00000100,
    563 	0x5644, 0xffffffff, 0x00000100,
    564 	0xc164, 0xffffffff, 0x00000100,
    565 	0x8a18, 0xffffffff, 0x00000100,
    566 	0x897c, 0xffffffff, 0x06000100,
    567 	0x8b28, 0xffffffff, 0x00000100,
    568 	0x9144, 0xffffffff, 0x00000100,
    569 	0x9a60, 0xffffffff, 0x00000100,
    570 	0x9868, 0xffffffff, 0x00000100,
    571 	0x8d58, 0xffffffff, 0x00000100,
    572 	0x9510, 0xffffffff, 0x00000100,
    573 	0x949c, 0xffffffff, 0x00000100,
    574 	0x9654, 0xffffffff, 0x00000100,
    575 	0x9030, 0xffffffff, 0x00000100,
    576 	0x9034, 0xffffffff, 0x00000100,
    577 	0x9038, 0xffffffff, 0x00000100,
    578 	0x903c, 0xffffffff, 0x00000100,
    579 	0x9040, 0xffffffff, 0x00000100,
    580 	0xa200, 0xffffffff, 0x00000100,
    581 	0xa204, 0xffffffff, 0x00000100,
    582 	0xa208, 0xffffffff, 0x00000100,
    583 	0xa20c, 0xffffffff, 0x00000100,
    584 	0x971c, 0xffffffff, 0x00000100,
    585 	0x977c, 0xffffffff, 0x00000100,
    586 	0x3f80, 0xffffffff, 0x00000100,
    587 	0xa210, 0xffffffff, 0x00000100,
    588 	0xa214, 0xffffffff, 0x00000100,
    589 	0x4d8, 0xffffffff, 0x00000100,
    590 	0x9784, 0xffffffff, 0x00000100,
    591 	0x9698, 0xffffffff, 0x00000100,
    592 	0x4d4, 0xffffffff, 0x00000200,
    593 	0x30cc, 0xffffffff, 0x00000100,
    594 	0xd0c0, 0xffffffff, 0xff000100,
    595 	0x802c, 0xffffffff, 0x40000000,
    596 	0x915c, 0xffffffff, 0x00010000,
    597 	0x9178, 0xffffffff, 0x00050000,
    598 	0x917c, 0xffffffff, 0x00030002,
    599 	0x918c, 0xffffffff, 0x00010004,
    600 	0x9190, 0xffffffff, 0x00070006,
    601 	0x9194, 0xffffffff, 0x00050000,
    602 	0x9198, 0xffffffff, 0x00030002,
    603 	0x91a8, 0xffffffff, 0x00010004,
    604 	0x91ac, 0xffffffff, 0x00070006,
    605 	0x91e8, 0xffffffff, 0x00000000,
    606 	0x9294, 0xffffffff, 0x00000000,
    607 	0x929c, 0xffffffff, 0x00000001,
    608 	0x802c, 0xffffffff, 0xc0000000
    609 };
    610 
    611 static const u32 juniper_mgcg_init[] =
    612 {
    613 	0x802c, 0xffffffff, 0xc0000000,
    614 	0x5448, 0xffffffff, 0x00000100,
    615 	0x55e4, 0xffffffff, 0x00000100,
    616 	0x160c, 0xffffffff, 0x00000100,
    617 	0x5644, 0xffffffff, 0x00000100,
    618 	0xc164, 0xffffffff, 0x00000100,
    619 	0x8a18, 0xffffffff, 0x00000100,
    620 	0x897c, 0xffffffff, 0x06000100,
    621 	0x8b28, 0xffffffff, 0x00000100,
    622 	0x9144, 0xffffffff, 0x00000100,
    623 	0x9a60, 0xffffffff, 0x00000100,
    624 	0x9868, 0xffffffff, 0x00000100,
    625 	0x8d58, 0xffffffff, 0x00000100,
    626 	0x9510, 0xffffffff, 0x00000100,
    627 	0x949c, 0xffffffff, 0x00000100,
    628 	0x9654, 0xffffffff, 0x00000100,
    629 	0x9030, 0xffffffff, 0x00000100,
    630 	0x9034, 0xffffffff, 0x00000100,
    631 	0x9038, 0xffffffff, 0x00000100,
    632 	0x903c, 0xffffffff, 0x00000100,
    633 	0x9040, 0xffffffff, 0x00000100,
    634 	0xa200, 0xffffffff, 0x00000100,
    635 	0xa204, 0xffffffff, 0x00000100,
    636 	0xa208, 0xffffffff, 0x00000100,
    637 	0xa20c, 0xffffffff, 0x00000100,
    638 	0x971c, 0xffffffff, 0x00000100,
    639 	0xd0c0, 0xffffffff, 0xff000100,
    640 	0x802c, 0xffffffff, 0x40000000,
    641 	0x915c, 0xffffffff, 0x00010000,
    642 	0x9160, 0xffffffff, 0x00030002,
    643 	0x9178, 0xffffffff, 0x00070000,
    644 	0x917c, 0xffffffff, 0x00030002,
    645 	0x9180, 0xffffffff, 0x00050004,
    646 	0x918c, 0xffffffff, 0x00010006,
    647 	0x9190, 0xffffffff, 0x00090008,
    648 	0x9194, 0xffffffff, 0x00070000,
    649 	0x9198, 0xffffffff, 0x00030002,
    650 	0x919c, 0xffffffff, 0x00050004,
    651 	0x91a8, 0xffffffff, 0x00010006,
    652 	0x91ac, 0xffffffff, 0x00090008,
    653 	0x91b0, 0xffffffff, 0x00070000,
    654 	0x91b4, 0xffffffff, 0x00030002,
    655 	0x91b8, 0xffffffff, 0x00050004,
    656 	0x91c4, 0xffffffff, 0x00010006,
    657 	0x91c8, 0xffffffff, 0x00090008,
    658 	0x91cc, 0xffffffff, 0x00070000,
    659 	0x91d0, 0xffffffff, 0x00030002,
    660 	0x91d4, 0xffffffff, 0x00050004,
    661 	0x91e0, 0xffffffff, 0x00010006,
    662 	0x91e4, 0xffffffff, 0x00090008,
    663 	0x91e8, 0xffffffff, 0x00000000,
    664 	0x91ec, 0xffffffff, 0x00070000,
    665 	0x91f0, 0xffffffff, 0x00030002,
    666 	0x91f4, 0xffffffff, 0x00050004,
    667 	0x9200, 0xffffffff, 0x00010006,
    668 	0x9204, 0xffffffff, 0x00090008,
    669 	0x9208, 0xffffffff, 0x00070000,
    670 	0x920c, 0xffffffff, 0x00030002,
    671 	0x9210, 0xffffffff, 0x00050004,
    672 	0x921c, 0xffffffff, 0x00010006,
    673 	0x9220, 0xffffffff, 0x00090008,
    674 	0x9224, 0xffffffff, 0x00070000,
    675 	0x9228, 0xffffffff, 0x00030002,
    676 	0x922c, 0xffffffff, 0x00050004,
    677 	0x9238, 0xffffffff, 0x00010006,
    678 	0x923c, 0xffffffff, 0x00090008,
    679 	0x9240, 0xffffffff, 0x00070000,
    680 	0x9244, 0xffffffff, 0x00030002,
    681 	0x9248, 0xffffffff, 0x00050004,
    682 	0x9254, 0xffffffff, 0x00010006,
    683 	0x9258, 0xffffffff, 0x00090008,
    684 	0x925c, 0xffffffff, 0x00070000,
    685 	0x9260, 0xffffffff, 0x00030002,
    686 	0x9264, 0xffffffff, 0x00050004,
    687 	0x9270, 0xffffffff, 0x00010006,
    688 	0x9274, 0xffffffff, 0x00090008,
    689 	0x9278, 0xffffffff, 0x00070000,
    690 	0x927c, 0xffffffff, 0x00030002,
    691 	0x9280, 0xffffffff, 0x00050004,
    692 	0x928c, 0xffffffff, 0x00010006,
    693 	0x9290, 0xffffffff, 0x00090008,
    694 	0x9294, 0xffffffff, 0x00000000,
    695 	0x929c, 0xffffffff, 0x00000001,
    696 	0x802c, 0xffffffff, 0xc0000000,
    697 	0x977c, 0xffffffff, 0x00000100,
    698 	0x3f80, 0xffffffff, 0x00000100,
    699 	0xa210, 0xffffffff, 0x00000100,
    700 	0xa214, 0xffffffff, 0x00000100,
    701 	0x4d8, 0xffffffff, 0x00000100,
    702 	0x9784, 0xffffffff, 0x00000100,
    703 	0x9698, 0xffffffff, 0x00000100,
    704 	0x4d4, 0xffffffff, 0x00000200,
    705 	0x30cc, 0xffffffff, 0x00000100,
    706 	0x802c, 0xffffffff, 0xc0000000
    707 };
    708 
    709 static const u32 supersumo_golden_registers[] =
    710 {
    711 	0x5eb4, 0xffffffff, 0x00000002,
    712 	0x5c4, 0xffffffff, 0x00000001,
    713 	0x7030, 0xffffffff, 0x00000011,
    714 	0x7c30, 0xffffffff, 0x00000011,
    715 	0x6104, 0x01000300, 0x00000000,
    716 	0x5bc0, 0x00300000, 0x00000000,
    717 	0x8c04, 0xffffffff, 0x40600060,
    718 	0x8c08, 0xffffffff, 0x001c001c,
    719 	0x8c20, 0xffffffff, 0x00800080,
    720 	0x8c24, 0xffffffff, 0x00800080,
    721 	0x8c18, 0xffffffff, 0x20202078,
    722 	0x8c1c, 0xffffffff, 0x00001010,
    723 	0x918c, 0xffffffff, 0x00010006,
    724 	0x91a8, 0xffffffff, 0x00010006,
    725 	0x91c4, 0xffffffff, 0x00010006,
    726 	0x91e0, 0xffffffff, 0x00010006,
    727 	0x9200, 0xffffffff, 0x00010006,
    728 	0x9150, 0xffffffff, 0x6e944040,
    729 	0x917c, 0xffffffff, 0x00030002,
    730 	0x9180, 0xffffffff, 0x00050004,
    731 	0x9198, 0xffffffff, 0x00030002,
    732 	0x919c, 0xffffffff, 0x00050004,
    733 	0x91b4, 0xffffffff, 0x00030002,
    734 	0x91b8, 0xffffffff, 0x00050004,
    735 	0x91d0, 0xffffffff, 0x00030002,
    736 	0x91d4, 0xffffffff, 0x00050004,
    737 	0x91f0, 0xffffffff, 0x00030002,
    738 	0x91f4, 0xffffffff, 0x00050004,
    739 	0x915c, 0xffffffff, 0x00010000,
    740 	0x9160, 0xffffffff, 0x00030002,
    741 	0x3f90, 0xffff0000, 0xff000000,
    742 	0x9178, 0xffffffff, 0x00070000,
    743 	0x9194, 0xffffffff, 0x00070000,
    744 	0x91b0, 0xffffffff, 0x00070000,
    745 	0x91cc, 0xffffffff, 0x00070000,
    746 	0x91ec, 0xffffffff, 0x00070000,
    747 	0x9148, 0xffff0000, 0xff000000,
    748 	0x9190, 0xffffffff, 0x00090008,
    749 	0x91ac, 0xffffffff, 0x00090008,
    750 	0x91c8, 0xffffffff, 0x00090008,
    751 	0x91e4, 0xffffffff, 0x00090008,
    752 	0x9204, 0xffffffff, 0x00090008,
    753 	0x3f94, 0xffff0000, 0xff000000,
    754 	0x914c, 0xffff0000, 0xff000000,
    755 	0x929c, 0xffffffff, 0x00000001,
    756 	0x8a18, 0xffffffff, 0x00000100,
    757 	0x8b28, 0xffffffff, 0x00000100,
    758 	0x9144, 0xffffffff, 0x00000100,
    759 	0x5644, 0xffffffff, 0x00000100,
    760 	0x9b7c, 0xffffffff, 0x00000000,
    761 	0x8030, 0xffffffff, 0x0000100a,
    762 	0x8a14, 0xffffffff, 0x00000007,
    763 	0x8b24, 0xffffffff, 0x00ff0fff,
    764 	0x8b10, 0xffffffff, 0x00000000,
    765 	0x28a4c, 0x06000000, 0x06000000,
    766 	0x4d8, 0xffffffff, 0x00000100,
    767 	0x913c, 0xffff000f, 0x0100000a,
    768 	0x960c, 0xffffffff, 0x54763210,
    769 	0x88c4, 0xffffffff, 0x000000c2,
    770 	0x88d4, 0xffffffff, 0x00000010,
    771 	0x8974, 0xffffffff, 0x00000000,
    772 	0xc78, 0x00000080, 0x00000080,
    773 	0x5e78, 0xffffffff, 0x001000f0,
    774 	0xd02c, 0xffffffff, 0x08421000,
    775 	0xa008, 0xffffffff, 0x00010000,
    776 	0x8d00, 0xffffffff, 0x100e4848,
    777 	0x8d04, 0xffffffff, 0x00164745,
    778 	0x8c00, 0xffffffff, 0xe4000003,
    779 	0x8cf0, 0x1fffffff, 0x08e00620,
    780 	0x28350, 0xffffffff, 0x00000000,
    781 	0x9508, 0xffffffff, 0x00000002
    782 };
    783 
    784 static const u32 sumo_golden_registers[] =
    785 {
    786 	0x900c, 0x00ffffff, 0x0017071f,
    787 	0x8c18, 0xffffffff, 0x10101060,
    788 	0x8c1c, 0xffffffff, 0x00001010,
    789 	0x8c30, 0x0000000f, 0x00000005,
    790 	0x9688, 0x0000000f, 0x00000007
    791 };
    792 
    793 static const u32 wrestler_golden_registers[] =
    794 {
    795 	0x5eb4, 0xffffffff, 0x00000002,
    796 	0x5c4, 0xffffffff, 0x00000001,
    797 	0x7030, 0xffffffff, 0x00000011,
    798 	0x7c30, 0xffffffff, 0x00000011,
    799 	0x6104, 0x01000300, 0x00000000,
    800 	0x5bc0, 0x00300000, 0x00000000,
    801 	0x918c, 0xffffffff, 0x00010006,
    802 	0x91a8, 0xffffffff, 0x00010006,
    803 	0x9150, 0xffffffff, 0x6e944040,
    804 	0x917c, 0xffffffff, 0x00030002,
    805 	0x9198, 0xffffffff, 0x00030002,
    806 	0x915c, 0xffffffff, 0x00010000,
    807 	0x3f90, 0xffff0000, 0xff000000,
    808 	0x9178, 0xffffffff, 0x00070000,
    809 	0x9194, 0xffffffff, 0x00070000,
    810 	0x9148, 0xffff0000, 0xff000000,
    811 	0x9190, 0xffffffff, 0x00090008,
    812 	0x91ac, 0xffffffff, 0x00090008,
    813 	0x3f94, 0xffff0000, 0xff000000,
    814 	0x914c, 0xffff0000, 0xff000000,
    815 	0x929c, 0xffffffff, 0x00000001,
    816 	0x8a18, 0xffffffff, 0x00000100,
    817 	0x8b28, 0xffffffff, 0x00000100,
    818 	0x9144, 0xffffffff, 0x00000100,
    819 	0x9b7c, 0xffffffff, 0x00000000,
    820 	0x8030, 0xffffffff, 0x0000100a,
    821 	0x8a14, 0xffffffff, 0x00000001,
    822 	0x8b24, 0xffffffff, 0x00ff0fff,
    823 	0x8b10, 0xffffffff, 0x00000000,
    824 	0x28a4c, 0x06000000, 0x06000000,
    825 	0x4d8, 0xffffffff, 0x00000100,
    826 	0x913c, 0xffff000f, 0x0100000a,
    827 	0x960c, 0xffffffff, 0x54763210,
    828 	0x88c4, 0xffffffff, 0x000000c2,
    829 	0x88d4, 0xffffffff, 0x00000010,
    830 	0x8974, 0xffffffff, 0x00000000,
    831 	0xc78, 0x00000080, 0x00000080,
    832 	0x5e78, 0xffffffff, 0x001000f0,
    833 	0xd02c, 0xffffffff, 0x08421000,
    834 	0xa008, 0xffffffff, 0x00010000,
    835 	0x8d00, 0xffffffff, 0x100e4848,
    836 	0x8d04, 0xffffffff, 0x00164745,
    837 	0x8c00, 0xffffffff, 0xe4000003,
    838 	0x8cf0, 0x1fffffff, 0x08e00410,
    839 	0x28350, 0xffffffff, 0x00000000,
    840 	0x9508, 0xffffffff, 0x00000002,
    841 	0x900c, 0xffffffff, 0x0017071f,
    842 	0x8c18, 0xffffffff, 0x10101060,
    843 	0x8c1c, 0xffffffff, 0x00001010
    844 };
    845 
    846 static const u32 barts_golden_registers[] =
    847 {
    848 	0x5eb4, 0xffffffff, 0x00000002,
    849 	0x5e78, 0x8f311ff1, 0x001000f0,
    850 	0x3f90, 0xffff0000, 0xff000000,
    851 	0x9148, 0xffff0000, 0xff000000,
    852 	0x3f94, 0xffff0000, 0xff000000,
    853 	0x914c, 0xffff0000, 0xff000000,
    854 	0xc78, 0x00000080, 0x00000080,
    855 	0xbd4, 0x70073777, 0x00010001,
    856 	0xd02c, 0xbfffff1f, 0x08421000,
    857 	0xd0b8, 0x03773777, 0x02011003,
    858 	0x5bc0, 0x00200000, 0x50100000,
    859 	0x98f8, 0x33773777, 0x02011003,
    860 	0x98fc, 0xffffffff, 0x76543210,
    861 	0x7030, 0x31000311, 0x00000011,
    862 	0x2f48, 0x00000007, 0x02011003,
    863 	0x6b28, 0x00000010, 0x00000012,
    864 	0x7728, 0x00000010, 0x00000012,
    865 	0x10328, 0x00000010, 0x00000012,
    866 	0x10f28, 0x00000010, 0x00000012,
    867 	0x11b28, 0x00000010, 0x00000012,
    868 	0x12728, 0x00000010, 0x00000012,
    869 	0x240c, 0x000007ff, 0x00000380,
    870 	0x8a14, 0xf000001f, 0x00000007,
    871 	0x8b24, 0x3fff3fff, 0x00ff0fff,
    872 	0x8b10, 0x0000ff0f, 0x00000000,
    873 	0x28a4c, 0x07ffffff, 0x06000000,
    874 	0x10c, 0x00000001, 0x00010003,
    875 	0xa02c, 0xffffffff, 0x0000009b,
    876 	0x913c, 0x0000000f, 0x0100000a,
    877 	0x8d00, 0xffff7f7f, 0x100e4848,
    878 	0x8d04, 0x00ffffff, 0x00164745,
    879 	0x8c00, 0xfffc0003, 0xe4000003,
    880 	0x8c04, 0xf8ff00ff, 0x40600060,
    881 	0x8c08, 0x00ff00ff, 0x001c001c,
    882 	0x8cf0, 0x1fff1fff, 0x08e00620,
    883 	0x8c20, 0x0fff0fff, 0x00800080,
    884 	0x8c24, 0x0fff0fff, 0x00800080,
    885 	0x8c18, 0xffffffff, 0x20202078,
    886 	0x8c1c, 0x0000ffff, 0x00001010,
    887 	0x28350, 0x00000f01, 0x00000000,
    888 	0x9508, 0x3700001f, 0x00000002,
    889 	0x960c, 0xffffffff, 0x54763210,
    890 	0x88c4, 0x001f3ae3, 0x000000c2,
    891 	0x88d4, 0x0000001f, 0x00000010,
    892 	0x8974, 0xffffffff, 0x00000000
    893 };
    894 
    895 static const u32 turks_golden_registers[] =
    896 {
    897 	0x5eb4, 0xffffffff, 0x00000002,
    898 	0x5e78, 0x8f311ff1, 0x001000f0,
    899 	0x8c8, 0x00003000, 0x00001070,
    900 	0x8cc, 0x000fffff, 0x00040035,
    901 	0x3f90, 0xffff0000, 0xfff00000,
    902 	0x9148, 0xffff0000, 0xfff00000,
    903 	0x3f94, 0xffff0000, 0xfff00000,
    904 	0x914c, 0xffff0000, 0xfff00000,
    905 	0xc78, 0x00000080, 0x00000080,
    906 	0xbd4, 0x00073007, 0x00010002,
    907 	0xd02c, 0xbfffff1f, 0x08421000,
    908 	0xd0b8, 0x03773777, 0x02010002,
    909 	0x5bc0, 0x00200000, 0x50100000,
    910 	0x98f8, 0x33773777, 0x00010002,
    911 	0x98fc, 0xffffffff, 0x33221100,
    912 	0x7030, 0x31000311, 0x00000011,
    913 	0x2f48, 0x33773777, 0x00010002,
    914 	0x6b28, 0x00000010, 0x00000012,
    915 	0x7728, 0x00000010, 0x00000012,
    916 	0x10328, 0x00000010, 0x00000012,
    917 	0x10f28, 0x00000010, 0x00000012,
    918 	0x11b28, 0x00000010, 0x00000012,
    919 	0x12728, 0x00000010, 0x00000012,
    920 	0x240c, 0x000007ff, 0x00000380,
    921 	0x8a14, 0xf000001f, 0x00000007,
    922 	0x8b24, 0x3fff3fff, 0x00ff0fff,
    923 	0x8b10, 0x0000ff0f, 0x00000000,
    924 	0x28a4c, 0x07ffffff, 0x06000000,
    925 	0x10c, 0x00000001, 0x00010003,
    926 	0xa02c, 0xffffffff, 0x0000009b,
    927 	0x913c, 0x0000000f, 0x0100000a,
    928 	0x8d00, 0xffff7f7f, 0x100e4848,
    929 	0x8d04, 0x00ffffff, 0x00164745,
    930 	0x8c00, 0xfffc0003, 0xe4000003,
    931 	0x8c04, 0xf8ff00ff, 0x40600060,
    932 	0x8c08, 0x00ff00ff, 0x001c001c,
    933 	0x8cf0, 0x1fff1fff, 0x08e00410,
    934 	0x8c20, 0x0fff0fff, 0x00800080,
    935 	0x8c24, 0x0fff0fff, 0x00800080,
    936 	0x8c18, 0xffffffff, 0x20202078,
    937 	0x8c1c, 0x0000ffff, 0x00001010,
    938 	0x28350, 0x00000f01, 0x00000000,
    939 	0x9508, 0x3700001f, 0x00000002,
    940 	0x960c, 0xffffffff, 0x54763210,
    941 	0x88c4, 0x001f3ae3, 0x000000c2,
    942 	0x88d4, 0x0000001f, 0x00000010,
    943 	0x8974, 0xffffffff, 0x00000000
    944 };
    945 
    946 static const u32 caicos_golden_registers[] =
    947 {
    948 	0x5eb4, 0xffffffff, 0x00000002,
    949 	0x5e78, 0x8f311ff1, 0x001000f0,
    950 	0x8c8, 0x00003420, 0x00001450,
    951 	0x8cc, 0x000fffff, 0x00040035,
    952 	0x3f90, 0xffff0000, 0xfffc0000,
    953 	0x9148, 0xffff0000, 0xfffc0000,
    954 	0x3f94, 0xffff0000, 0xfffc0000,
    955 	0x914c, 0xffff0000, 0xfffc0000,
    956 	0xc78, 0x00000080, 0x00000080,
    957 	0xbd4, 0x00073007, 0x00010001,
    958 	0xd02c, 0xbfffff1f, 0x08421000,
    959 	0xd0b8, 0x03773777, 0x02010001,
    960 	0x5bc0, 0x00200000, 0x50100000,
    961 	0x98f8, 0x33773777, 0x02010001,
    962 	0x98fc, 0xffffffff, 0x33221100,
    963 	0x7030, 0x31000311, 0x00000011,
    964 	0x2f48, 0x33773777, 0x02010001,
    965 	0x6b28, 0x00000010, 0x00000012,
    966 	0x7728, 0x00000010, 0x00000012,
    967 	0x10328, 0x00000010, 0x00000012,
    968 	0x10f28, 0x00000010, 0x00000012,
    969 	0x11b28, 0x00000010, 0x00000012,
    970 	0x12728, 0x00000010, 0x00000012,
    971 	0x240c, 0x000007ff, 0x00000380,
    972 	0x8a14, 0xf000001f, 0x00000001,
    973 	0x8b24, 0x3fff3fff, 0x00ff0fff,
    974 	0x8b10, 0x0000ff0f, 0x00000000,
    975 	0x28a4c, 0x07ffffff, 0x06000000,
    976 	0x10c, 0x00000001, 0x00010003,
    977 	0xa02c, 0xffffffff, 0x0000009b,
    978 	0x913c, 0x0000000f, 0x0100000a,
    979 	0x8d00, 0xffff7f7f, 0x100e4848,
    980 	0x8d04, 0x00ffffff, 0x00164745,
    981 	0x8c00, 0xfffc0003, 0xe4000003,
    982 	0x8c04, 0xf8ff00ff, 0x40600060,
    983 	0x8c08, 0x00ff00ff, 0x001c001c,
    984 	0x8cf0, 0x1fff1fff, 0x08e00410,
    985 	0x8c20, 0x0fff0fff, 0x00800080,
    986 	0x8c24, 0x0fff0fff, 0x00800080,
    987 	0x8c18, 0xffffffff, 0x20202078,
    988 	0x8c1c, 0x0000ffff, 0x00001010,
    989 	0x28350, 0x00000f01, 0x00000000,
    990 	0x9508, 0x3700001f, 0x00000002,
    991 	0x960c, 0xffffffff, 0x54763210,
    992 	0x88c4, 0x001f3ae3, 0x000000c2,
    993 	0x88d4, 0x0000001f, 0x00000010,
    994 	0x8974, 0xffffffff, 0x00000000
    995 };
    996 
    997 static void evergreen_init_golden_registers(struct radeon_device *rdev)
    998 {
    999 	switch (rdev->family) {
   1000 	case CHIP_CYPRESS:
   1001 	case CHIP_HEMLOCK:
   1002 		radeon_program_register_sequence(rdev,
   1003 						 evergreen_golden_registers,
   1004 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
   1005 		radeon_program_register_sequence(rdev,
   1006 						 evergreen_golden_registers2,
   1007 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
   1008 		radeon_program_register_sequence(rdev,
   1009 						 cypress_mgcg_init,
   1010 						 (const u32)ARRAY_SIZE(cypress_mgcg_init));
   1011 		break;
   1012 	case CHIP_JUNIPER:
   1013 		radeon_program_register_sequence(rdev,
   1014 						 evergreen_golden_registers,
   1015 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
   1016 		radeon_program_register_sequence(rdev,
   1017 						 evergreen_golden_registers2,
   1018 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
   1019 		radeon_program_register_sequence(rdev,
   1020 						 juniper_mgcg_init,
   1021 						 (const u32)ARRAY_SIZE(juniper_mgcg_init));
   1022 		break;
   1023 	case CHIP_REDWOOD:
   1024 		radeon_program_register_sequence(rdev,
   1025 						 evergreen_golden_registers,
   1026 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
   1027 		radeon_program_register_sequence(rdev,
   1028 						 evergreen_golden_registers2,
   1029 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
   1030 		radeon_program_register_sequence(rdev,
   1031 						 redwood_mgcg_init,
   1032 						 (const u32)ARRAY_SIZE(redwood_mgcg_init));
   1033 		break;
   1034 	case CHIP_CEDAR:
   1035 		radeon_program_register_sequence(rdev,
   1036 						 cedar_golden_registers,
   1037 						 (const u32)ARRAY_SIZE(cedar_golden_registers));
   1038 		radeon_program_register_sequence(rdev,
   1039 						 evergreen_golden_registers2,
   1040 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
   1041 		radeon_program_register_sequence(rdev,
   1042 						 cedar_mgcg_init,
   1043 						 (const u32)ARRAY_SIZE(cedar_mgcg_init));
   1044 		break;
   1045 	case CHIP_PALM:
   1046 		radeon_program_register_sequence(rdev,
   1047 						 wrestler_golden_registers,
   1048 						 (const u32)ARRAY_SIZE(wrestler_golden_registers));
   1049 		break;
   1050 	case CHIP_SUMO:
   1051 		radeon_program_register_sequence(rdev,
   1052 						 supersumo_golden_registers,
   1053 						 (const u32)ARRAY_SIZE(supersumo_golden_registers));
   1054 		break;
   1055 	case CHIP_SUMO2:
   1056 		radeon_program_register_sequence(rdev,
   1057 						 supersumo_golden_registers,
   1058 						 (const u32)ARRAY_SIZE(supersumo_golden_registers));
   1059 		radeon_program_register_sequence(rdev,
   1060 						 sumo_golden_registers,
   1061 						 (const u32)ARRAY_SIZE(sumo_golden_registers));
   1062 		break;
   1063 	case CHIP_BARTS:
   1064 		radeon_program_register_sequence(rdev,
   1065 						 barts_golden_registers,
   1066 						 (const u32)ARRAY_SIZE(barts_golden_registers));
   1067 		break;
   1068 	case CHIP_TURKS:
   1069 		radeon_program_register_sequence(rdev,
   1070 						 turks_golden_registers,
   1071 						 (const u32)ARRAY_SIZE(turks_golden_registers));
   1072 		break;
   1073 	case CHIP_CAICOS:
   1074 		radeon_program_register_sequence(rdev,
   1075 						 caicos_golden_registers,
   1076 						 (const u32)ARRAY_SIZE(caicos_golden_registers));
   1077 		break;
   1078 	default:
   1079 		break;
   1080 	}
   1081 }
   1082 
   1083 /**
   1084  * evergreen_get_allowed_info_register - fetch the register for the info ioctl
   1085  *
   1086  * @rdev: radeon_device pointer
   1087  * @reg: register offset in bytes
   1088  * @val: register value
   1089  *
   1090  * Returns 0 for success or -EINVAL for an invalid register
   1091  *
   1092  */
   1093 int evergreen_get_allowed_info_register(struct radeon_device *rdev,
   1094 					u32 reg, u32 *val)
   1095 {
   1096 	switch (reg) {
   1097 	case GRBM_STATUS:
   1098 	case GRBM_STATUS_SE0:
   1099 	case GRBM_STATUS_SE1:
   1100 	case SRBM_STATUS:
   1101 	case SRBM_STATUS2:
   1102 	case DMA_STATUS_REG:
   1103 	case UVD_STATUS:
   1104 		*val = RREG32(reg);
   1105 		return 0;
   1106 	default:
   1107 		return -EINVAL;
   1108 	}
   1109 }
   1110 
   1111 void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
   1112 			     unsigned *bankh, unsigned *mtaspect,
   1113 			     unsigned *tile_split)
   1114 {
   1115 	*bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
   1116 	*bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
   1117 	*mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
   1118 	*tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
   1119 	switch (*bankw) {
   1120 	default:
   1121 	case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
   1122 	case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
   1123 	case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
   1124 	case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
   1125 	}
   1126 	switch (*bankh) {
   1127 	default:
   1128 	case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
   1129 	case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
   1130 	case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
   1131 	case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
   1132 	}
   1133 	switch (*mtaspect) {
   1134 	default:
   1135 	case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
   1136 	case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
   1137 	case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
   1138 	case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
   1139 	}
   1140 }
   1141 
   1142 static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
   1143 			      u32 cntl_reg, u32 status_reg)
   1144 {
   1145 	int r, i;
   1146 	struct atom_clock_dividers dividers;
   1147 
   1148         r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
   1149 					   clock, false, &dividers);
   1150 	if (r)
   1151 		return r;
   1152 
   1153 	WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
   1154 
   1155 	for (i = 0; i < 100; i++) {
   1156 		if (RREG32(status_reg) & DCLK_STATUS)
   1157 			break;
   1158 		mdelay(10);
   1159 	}
   1160 	if (i == 100)
   1161 		return -ETIMEDOUT;
   1162 
   1163 	return 0;
   1164 }
   1165 
   1166 int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
   1167 {
   1168 	int r = 0;
   1169 	u32 cg_scratch = RREG32(CG_SCRATCH1);
   1170 
   1171 	r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
   1172 	if (r)
   1173 		goto done;
   1174 	cg_scratch &= 0xffff0000;
   1175 	cg_scratch |= vclk / 100; /* Mhz */
   1176 
   1177 	r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
   1178 	if (r)
   1179 		goto done;
   1180 	cg_scratch &= 0x0000ffff;
   1181 	cg_scratch |= (dclk / 100) << 16; /* Mhz */
   1182 
   1183 done:
   1184 	WREG32(CG_SCRATCH1, cg_scratch);
   1185 
   1186 	return r;
   1187 }
   1188 
   1189 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
   1190 {
   1191 	/* start off with something large */
   1192 	unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
   1193 	int r;
   1194 
   1195 	/* bypass vclk and dclk with bclk */
   1196 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
   1197 		VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
   1198 		~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
   1199 
   1200 	/* put PLL in bypass mode */
   1201 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
   1202 
   1203 	if (!vclk || !dclk) {
   1204 		/* keep the Bypass mode, put PLL to sleep */
   1205 		WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
   1206 		return 0;
   1207 	}
   1208 
   1209 	r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
   1210 					  16384, 0x03FFFFFF, 0, 128, 5,
   1211 					  &fb_div, &vclk_div, &dclk_div);
   1212 	if (r)
   1213 		return r;
   1214 
   1215 	/* set VCO_MODE to 1 */
   1216 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
   1217 
   1218 	/* toggle UPLL_SLEEP to 1 then back to 0 */
   1219 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
   1220 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
   1221 
   1222 	/* deassert UPLL_RESET */
   1223 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
   1224 
   1225 	mdelay(1);
   1226 
   1227 	r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
   1228 	if (r)
   1229 		return r;
   1230 
   1231 	/* assert UPLL_RESET again */
   1232 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
   1233 
   1234 	/* disable spread spectrum. */
   1235 	WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
   1236 
   1237 	/* set feedback divider */
   1238 	WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
   1239 
   1240 	/* set ref divider to 0 */
   1241 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
   1242 
   1243 	if (fb_div < 307200)
   1244 		WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
   1245 	else
   1246 		WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
   1247 
   1248 	/* set PDIV_A and PDIV_B */
   1249 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
   1250 		UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
   1251 		~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
   1252 
   1253 	/* give the PLL some time to settle */
   1254 	mdelay(15);
   1255 
   1256 	/* deassert PLL_RESET */
   1257 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
   1258 
   1259 	mdelay(15);
   1260 
   1261 	/* switch from bypass mode to normal mode */
   1262 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
   1263 
   1264 	r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
   1265 	if (r)
   1266 		return r;
   1267 
   1268 	/* switch VCLK and DCLK selection */
   1269 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
   1270 		VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
   1271 		~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
   1272 
   1273 	mdelay(100);
   1274 
   1275 	return 0;
   1276 }
   1277 
   1278 void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
   1279 {
   1280 #ifndef __NetBSD__		/* XXX radeon pcie */
   1281 	int readrq;
   1282 	u16 v;
   1283 
   1284 	readrq = pcie_get_readrq(rdev->pdev);
   1285 	v = ffs(readrq) - 8;
   1286 	/* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
   1287 	 * to avoid hangs or perfomance issues
   1288 	 */
   1289 	if ((v == 0) || (v == 6) || (v == 7))
   1290 		pcie_set_readrq(rdev->pdev, 512);
   1291 #endif
   1292 }
   1293 
   1294 void dce4_program_fmt(struct drm_encoder *encoder)
   1295 {
   1296 	struct drm_device *dev = encoder->dev;
   1297 	struct radeon_device *rdev = dev->dev_private;
   1298 	struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
   1299 	struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
   1300 	struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
   1301 	int bpc = 0;
   1302 	u32 tmp = 0;
   1303 	enum radeon_connector_dither dither = RADEON_FMT_DITHER_DISABLE;
   1304 
   1305 	if (connector) {
   1306 		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
   1307 		bpc = radeon_get_monitor_bpc(connector);
   1308 		dither = radeon_connector->dither;
   1309 	}
   1310 
   1311 	/* LVDS/eDP FMT is set up by atom */
   1312 	if (radeon_encoder->devices & ATOM_DEVICE_LCD_SUPPORT)
   1313 		return;
   1314 
   1315 	/* not needed for analog */
   1316 	if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) ||
   1317 	    (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2))
   1318 		return;
   1319 
   1320 	if (bpc == 0)
   1321 		return;
   1322 
   1323 	switch (bpc) {
   1324 	case 6:
   1325 		if (dither == RADEON_FMT_DITHER_ENABLE)
   1326 			/* XXX sort out optimal dither settings */
   1327 			tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
   1328 				FMT_SPATIAL_DITHER_EN);
   1329 		else
   1330 			tmp |= FMT_TRUNCATE_EN;
   1331 		break;
   1332 	case 8:
   1333 		if (dither == RADEON_FMT_DITHER_ENABLE)
   1334 			/* XXX sort out optimal dither settings */
   1335 			tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
   1336 				FMT_RGB_RANDOM_ENABLE |
   1337 				FMT_SPATIAL_DITHER_EN | FMT_SPATIAL_DITHER_DEPTH);
   1338 		else
   1339 			tmp |= (FMT_TRUNCATE_EN | FMT_TRUNCATE_DEPTH);
   1340 		break;
   1341 	case 10:
   1342 	default:
   1343 		/* not needed */
   1344 		break;
   1345 	}
   1346 
   1347 	WREG32(FMT_BIT_DEPTH_CONTROL + radeon_crtc->crtc_offset, tmp);
   1348 }
   1349 
   1350 static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
   1351 {
   1352 	if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
   1353 		return true;
   1354 	else
   1355 		return false;
   1356 }
   1357 
   1358 static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
   1359 {
   1360 	u32 pos1, pos2;
   1361 
   1362 	pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
   1363 	pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
   1364 
   1365 	if (pos1 != pos2)
   1366 		return true;
   1367 	else
   1368 		return false;
   1369 }
   1370 
   1371 /**
   1372  * dce4_wait_for_vblank - vblank wait asic callback.
   1373  *
   1374  * @rdev: radeon_device pointer
   1375  * @crtc: crtc to wait for vblank on
   1376  *
   1377  * Wait for vblank on the requested crtc (evergreen+).
   1378  */
   1379 void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
   1380 {
   1381 	unsigned i = 0;
   1382 
   1383 	if (crtc >= rdev->num_crtc)
   1384 		return;
   1385 
   1386 	if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
   1387 		return;
   1388 
   1389 	/* depending on when we hit vblank, we may be close to active; if so,
   1390 	 * wait for another frame.
   1391 	 */
   1392 	while (dce4_is_in_vblank(rdev, crtc)) {
   1393 		if (i++ % 100 == 0) {
   1394 			if (!dce4_is_counter_moving(rdev, crtc))
   1395 				break;
   1396 		}
   1397 	}
   1398 
   1399 	while (!dce4_is_in_vblank(rdev, crtc)) {
   1400 		if (i++ % 100 == 0) {
   1401 			if (!dce4_is_counter_moving(rdev, crtc))
   1402 				break;
   1403 		}
   1404 	}
   1405 }
   1406 
   1407 /**
   1408  * evergreen_page_flip - pageflip callback.
   1409  *
   1410  * @rdev: radeon_device pointer
   1411  * @crtc_id: crtc to cleanup pageflip on
   1412  * @crtc_base: new address of the crtc (GPU MC address)
   1413  *
   1414  * Triggers the actual pageflip by updating the primary
   1415  * surface base address (evergreen+).
   1416  */
   1417 void evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
   1418 {
   1419 	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
   1420 
   1421 	/* update the scanout addresses */
   1422 	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
   1423 	       upper_32_bits(crtc_base));
   1424 	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
   1425 	       (u32)crtc_base);
   1426 	/* post the write */
   1427 	RREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset);
   1428 }
   1429 
   1430 /**
   1431  * evergreen_page_flip_pending - check if page flip is still pending
   1432  *
   1433  * @rdev: radeon_device pointer
   1434  * @crtc_id: crtc to check
   1435  *
   1436  * Returns the current update pending status.
   1437  */
   1438 bool evergreen_page_flip_pending(struct radeon_device *rdev, int crtc_id)
   1439 {
   1440 	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
   1441 
   1442 	/* Return current update_pending status: */
   1443 	return !!(RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) &
   1444 		EVERGREEN_GRPH_SURFACE_UPDATE_PENDING);
   1445 }
   1446 
   1447 /* get temperature in millidegrees */
   1448 int evergreen_get_temp(struct radeon_device *rdev)
   1449 {
   1450 	u32 temp, toffset;
   1451 	int actual_temp = 0;
   1452 
   1453 	if (rdev->family == CHIP_JUNIPER) {
   1454 		toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
   1455 			TOFFSET_SHIFT;
   1456 		temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
   1457 			TS0_ADC_DOUT_SHIFT;
   1458 
   1459 		if (toffset & 0x100)
   1460 			actual_temp = temp / 2 - (0x200 - toffset);
   1461 		else
   1462 			actual_temp = temp / 2 + toffset;
   1463 
   1464 		actual_temp = actual_temp * 1000;
   1465 
   1466 	} else {
   1467 		temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
   1468 			ASIC_T_SHIFT;
   1469 
   1470 		if (temp & 0x400)
   1471 			actual_temp = -256;
   1472 		else if (temp & 0x200)
   1473 			actual_temp = 255;
   1474 		else if (temp & 0x100) {
   1475 			actual_temp = temp & 0x1ff;
   1476 			actual_temp |= ~0x1ff;
   1477 		} else
   1478 			actual_temp = temp & 0xff;
   1479 
   1480 		actual_temp = (actual_temp * 1000) / 2;
   1481 	}
   1482 
   1483 	return actual_temp;
   1484 }
   1485 
   1486 int sumo_get_temp(struct radeon_device *rdev)
   1487 {
   1488 	u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
   1489 	int actual_temp = temp - 49;
   1490 
   1491 	return actual_temp * 1000;
   1492 }
   1493 
   1494 /**
   1495  * sumo_pm_init_profile - Initialize power profiles callback.
   1496  *
   1497  * @rdev: radeon_device pointer
   1498  *
   1499  * Initialize the power states used in profile mode
   1500  * (sumo, trinity, SI).
   1501  * Used for profile mode only.
   1502  */
   1503 void sumo_pm_init_profile(struct radeon_device *rdev)
   1504 {
   1505 	int idx;
   1506 
   1507 	/* default */
   1508 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
   1509 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
   1510 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
   1511 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
   1512 
   1513 	/* low,mid sh/mh */
   1514 	if (rdev->flags & RADEON_IS_MOBILITY)
   1515 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
   1516 	else
   1517 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
   1518 
   1519 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
   1520 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
   1521 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
   1522 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
   1523 
   1524 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
   1525 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
   1526 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
   1527 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
   1528 
   1529 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
   1530 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
   1531 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
   1532 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
   1533 
   1534 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
   1535 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
   1536 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
   1537 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
   1538 
   1539 	/* high sh/mh */
   1540 	idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
   1541 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
   1542 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
   1543 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
   1544 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
   1545 		rdev->pm.power_state[idx].num_clock_modes - 1;
   1546 
   1547 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
   1548 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
   1549 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
   1550 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
   1551 		rdev->pm.power_state[idx].num_clock_modes - 1;
   1552 }
   1553 
   1554 /**
   1555  * btc_pm_init_profile - Initialize power profiles callback.
   1556  *
   1557  * @rdev: radeon_device pointer
   1558  *
   1559  * Initialize the power states used in profile mode
   1560  * (BTC, cayman).
   1561  * Used for profile mode only.
   1562  */
   1563 void btc_pm_init_profile(struct radeon_device *rdev)
   1564 {
   1565 	int idx;
   1566 
   1567 	/* default */
   1568 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
   1569 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
   1570 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
   1571 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
   1572 	/* starting with BTC, there is one state that is used for both
   1573 	 * MH and SH.  Difference is that we always use the high clock index for
   1574 	 * mclk.
   1575 	 */
   1576 	if (rdev->flags & RADEON_IS_MOBILITY)
   1577 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
   1578 	else
   1579 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
   1580 	/* low sh */
   1581 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
   1582 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
   1583 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
   1584 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
   1585 	/* mid sh */
   1586 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
   1587 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
   1588 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
   1589 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
   1590 	/* high sh */
   1591 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
   1592 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
   1593 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
   1594 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
   1595 	/* low mh */
   1596 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
   1597 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
   1598 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
   1599 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
   1600 	/* mid mh */
   1601 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
   1602 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
   1603 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
   1604 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
   1605 	/* high mh */
   1606 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
   1607 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
   1608 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
   1609 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
   1610 }
   1611 
   1612 /**
   1613  * evergreen_pm_misc - set additional pm hw parameters callback.
   1614  *
   1615  * @rdev: radeon_device pointer
   1616  *
   1617  * Set non-clock parameters associated with a power state
   1618  * (voltage, etc.) (evergreen+).
   1619  */
   1620 void evergreen_pm_misc(struct radeon_device *rdev)
   1621 {
   1622 	int req_ps_idx = rdev->pm.requested_power_state_index;
   1623 	int req_cm_idx = rdev->pm.requested_clock_mode_index;
   1624 	struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
   1625 	struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
   1626 
   1627 	if (voltage->type == VOLTAGE_SW) {
   1628 		/* 0xff0x are flags rather then an actual voltage */
   1629 		if ((voltage->voltage & 0xff00) == 0xff00)
   1630 			return;
   1631 		if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
   1632 			radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
   1633 			rdev->pm.current_vddc = voltage->voltage;
   1634 			DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
   1635 		}
   1636 
   1637 		/* starting with BTC, there is one state that is used for both
   1638 		 * MH and SH.  Difference is that we always use the high clock index for
   1639 		 * mclk and vddci.
   1640 		 */
   1641 		if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
   1642 		    (rdev->family >= CHIP_BARTS) &&
   1643 		    rdev->pm.active_crtc_count &&
   1644 		    ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
   1645 		     (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
   1646 			voltage = &rdev->pm.power_state[req_ps_idx].
   1647 				clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
   1648 
   1649 		/* 0xff0x are flags rather then an actual voltage */
   1650 		if ((voltage->vddci & 0xff00) == 0xff00)
   1651 			return;
   1652 		if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
   1653 			radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
   1654 			rdev->pm.current_vddci = voltage->vddci;
   1655 			DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
   1656 		}
   1657 	}
   1658 }
   1659 
   1660 /**
   1661  * evergreen_pm_prepare - pre-power state change callback.
   1662  *
   1663  * @rdev: radeon_device pointer
   1664  *
   1665  * Prepare for a power state change (evergreen+).
   1666  */
   1667 void evergreen_pm_prepare(struct radeon_device *rdev)
   1668 {
   1669 	struct drm_device *ddev = rdev->ddev;
   1670 	struct drm_crtc *crtc;
   1671 	struct radeon_crtc *radeon_crtc;
   1672 	u32 tmp;
   1673 
   1674 	/* disable any active CRTCs */
   1675 	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
   1676 		radeon_crtc = to_radeon_crtc(crtc);
   1677 		if (radeon_crtc->enabled) {
   1678 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
   1679 			tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
   1680 			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
   1681 		}
   1682 	}
   1683 }
   1684 
   1685 /**
   1686  * evergreen_pm_finish - post-power state change callback.
   1687  *
   1688  * @rdev: radeon_device pointer
   1689  *
   1690  * Clean up after a power state change (evergreen+).
   1691  */
   1692 void evergreen_pm_finish(struct radeon_device *rdev)
   1693 {
   1694 	struct drm_device *ddev = rdev->ddev;
   1695 	struct drm_crtc *crtc;
   1696 	struct radeon_crtc *radeon_crtc;
   1697 	u32 tmp;
   1698 
   1699 	/* enable any active CRTCs */
   1700 	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
   1701 		radeon_crtc = to_radeon_crtc(crtc);
   1702 		if (radeon_crtc->enabled) {
   1703 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
   1704 			tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
   1705 			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
   1706 		}
   1707 	}
   1708 }
   1709 
   1710 /**
   1711  * evergreen_hpd_sense - hpd sense callback.
   1712  *
   1713  * @rdev: radeon_device pointer
   1714  * @hpd: hpd (hotplug detect) pin
   1715  *
   1716  * Checks if a digital monitor is connected (evergreen+).
   1717  * Returns true if connected, false if not connected.
   1718  */
   1719 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
   1720 {
   1721 	bool connected = false;
   1722 
   1723 	switch (hpd) {
   1724 	case RADEON_HPD_1:
   1725 		if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
   1726 			connected = true;
   1727 		break;
   1728 	case RADEON_HPD_2:
   1729 		if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
   1730 			connected = true;
   1731 		break;
   1732 	case RADEON_HPD_3:
   1733 		if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
   1734 			connected = true;
   1735 		break;
   1736 	case RADEON_HPD_4:
   1737 		if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
   1738 			connected = true;
   1739 		break;
   1740 	case RADEON_HPD_5:
   1741 		if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
   1742 			connected = true;
   1743 		break;
   1744 	case RADEON_HPD_6:
   1745 		if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
   1746 			connected = true;
   1747 		break;
   1748 	default:
   1749 		break;
   1750 	}
   1751 
   1752 	return connected;
   1753 }
   1754 
   1755 /**
   1756  * evergreen_hpd_set_polarity - hpd set polarity callback.
   1757  *
   1758  * @rdev: radeon_device pointer
   1759  * @hpd: hpd (hotplug detect) pin
   1760  *
   1761  * Set the polarity of the hpd pin (evergreen+).
   1762  */
   1763 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
   1764 				enum radeon_hpd_id hpd)
   1765 {
   1766 	u32 tmp;
   1767 	bool connected = evergreen_hpd_sense(rdev, hpd);
   1768 
   1769 	switch (hpd) {
   1770 	case RADEON_HPD_1:
   1771 		tmp = RREG32(DC_HPD1_INT_CONTROL);
   1772 		if (connected)
   1773 			tmp &= ~DC_HPDx_INT_POLARITY;
   1774 		else
   1775 			tmp |= DC_HPDx_INT_POLARITY;
   1776 		WREG32(DC_HPD1_INT_CONTROL, tmp);
   1777 		break;
   1778 	case RADEON_HPD_2:
   1779 		tmp = RREG32(DC_HPD2_INT_CONTROL);
   1780 		if (connected)
   1781 			tmp &= ~DC_HPDx_INT_POLARITY;
   1782 		else
   1783 			tmp |= DC_HPDx_INT_POLARITY;
   1784 		WREG32(DC_HPD2_INT_CONTROL, tmp);
   1785 		break;
   1786 	case RADEON_HPD_3:
   1787 		tmp = RREG32(DC_HPD3_INT_CONTROL);
   1788 		if (connected)
   1789 			tmp &= ~DC_HPDx_INT_POLARITY;
   1790 		else
   1791 			tmp |= DC_HPDx_INT_POLARITY;
   1792 		WREG32(DC_HPD3_INT_CONTROL, tmp);
   1793 		break;
   1794 	case RADEON_HPD_4:
   1795 		tmp = RREG32(DC_HPD4_INT_CONTROL);
   1796 		if (connected)
   1797 			tmp &= ~DC_HPDx_INT_POLARITY;
   1798 		else
   1799 			tmp |= DC_HPDx_INT_POLARITY;
   1800 		WREG32(DC_HPD4_INT_CONTROL, tmp);
   1801 		break;
   1802 	case RADEON_HPD_5:
   1803 		tmp = RREG32(DC_HPD5_INT_CONTROL);
   1804 		if (connected)
   1805 			tmp &= ~DC_HPDx_INT_POLARITY;
   1806 		else
   1807 			tmp |= DC_HPDx_INT_POLARITY;
   1808 		WREG32(DC_HPD5_INT_CONTROL, tmp);
   1809 			break;
   1810 	case RADEON_HPD_6:
   1811 		tmp = RREG32(DC_HPD6_INT_CONTROL);
   1812 		if (connected)
   1813 			tmp &= ~DC_HPDx_INT_POLARITY;
   1814 		else
   1815 			tmp |= DC_HPDx_INT_POLARITY;
   1816 		WREG32(DC_HPD6_INT_CONTROL, tmp);
   1817 		break;
   1818 	default:
   1819 		break;
   1820 	}
   1821 }
   1822 
   1823 /**
   1824  * evergreen_hpd_init - hpd setup callback.
   1825  *
   1826  * @rdev: radeon_device pointer
   1827  *
   1828  * Setup the hpd pins used by the card (evergreen+).
   1829  * Enable the pin, set the polarity, and enable the hpd interrupts.
   1830  */
   1831 void evergreen_hpd_init(struct radeon_device *rdev)
   1832 {
   1833 	struct drm_device *dev = rdev->ddev;
   1834 	struct drm_connector *connector;
   1835 	unsigned enabled = 0;
   1836 	u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
   1837 		DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
   1838 
   1839 	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
   1840 		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
   1841 
   1842 		if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
   1843 		    connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
   1844 			/* don't try to enable hpd on eDP or LVDS avoid breaking the
   1845 			 * aux dp channel on imac and help (but not completely fix)
   1846 			 * https://bugzilla.redhat.com/show_bug.cgi?id=726143
   1847 			 * also avoid interrupt storms during dpms.
   1848 			 */
   1849 			continue;
   1850 		}
   1851 		switch (radeon_connector->hpd.hpd) {
   1852 		case RADEON_HPD_1:
   1853 			WREG32(DC_HPD1_CONTROL, tmp);
   1854 			break;
   1855 		case RADEON_HPD_2:
   1856 			WREG32(DC_HPD2_CONTROL, tmp);
   1857 			break;
   1858 		case RADEON_HPD_3:
   1859 			WREG32(DC_HPD3_CONTROL, tmp);
   1860 			break;
   1861 		case RADEON_HPD_4:
   1862 			WREG32(DC_HPD4_CONTROL, tmp);
   1863 			break;
   1864 		case RADEON_HPD_5:
   1865 			WREG32(DC_HPD5_CONTROL, tmp);
   1866 			break;
   1867 		case RADEON_HPD_6:
   1868 			WREG32(DC_HPD6_CONTROL, tmp);
   1869 			break;
   1870 		default:
   1871 			break;
   1872 		}
   1873 		radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
   1874 		enabled |= 1 << radeon_connector->hpd.hpd;
   1875 	}
   1876 	radeon_irq_kms_enable_hpd(rdev, enabled);
   1877 }
   1878 
   1879 /**
   1880  * evergreen_hpd_fini - hpd tear down callback.
   1881  *
   1882  * @rdev: radeon_device pointer
   1883  *
   1884  * Tear down the hpd pins used by the card (evergreen+).
   1885  * Disable the hpd interrupts.
   1886  */
   1887 void evergreen_hpd_fini(struct radeon_device *rdev)
   1888 {
   1889 	struct drm_device *dev = rdev->ddev;
   1890 	struct drm_connector *connector;
   1891 	unsigned disabled = 0;
   1892 
   1893 	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
   1894 		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
   1895 		switch (radeon_connector->hpd.hpd) {
   1896 		case RADEON_HPD_1:
   1897 			WREG32(DC_HPD1_CONTROL, 0);
   1898 			break;
   1899 		case RADEON_HPD_2:
   1900 			WREG32(DC_HPD2_CONTROL, 0);
   1901 			break;
   1902 		case RADEON_HPD_3:
   1903 			WREG32(DC_HPD3_CONTROL, 0);
   1904 			break;
   1905 		case RADEON_HPD_4:
   1906 			WREG32(DC_HPD4_CONTROL, 0);
   1907 			break;
   1908 		case RADEON_HPD_5:
   1909 			WREG32(DC_HPD5_CONTROL, 0);
   1910 			break;
   1911 		case RADEON_HPD_6:
   1912 			WREG32(DC_HPD6_CONTROL, 0);
   1913 			break;
   1914 		default:
   1915 			break;
   1916 		}
   1917 		disabled |= 1 << radeon_connector->hpd.hpd;
   1918 	}
   1919 	radeon_irq_kms_disable_hpd(rdev, disabled);
   1920 }
   1921 
   1922 /* watermark setup */
   1923 
   1924 static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
   1925 					struct radeon_crtc *radeon_crtc,
   1926 					struct drm_display_mode *mode,
   1927 					struct drm_display_mode *other_mode)
   1928 {
   1929 	u32 tmp, buffer_alloc, i;
   1930 	u32 pipe_offset = radeon_crtc->crtc_id * 0x20;
   1931 	/*
   1932 	 * Line Buffer Setup
   1933 	 * There are 3 line buffers, each one shared by 2 display controllers.
   1934 	 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
   1935 	 * the display controllers.  The paritioning is done via one of four
   1936 	 * preset allocations specified in bits 2:0:
   1937 	 * first display controller
   1938 	 *  0 - first half of lb (3840 * 2)
   1939 	 *  1 - first 3/4 of lb (5760 * 2)
   1940 	 *  2 - whole lb (7680 * 2), other crtc must be disabled
   1941 	 *  3 - first 1/4 of lb (1920 * 2)
   1942 	 * second display controller
   1943 	 *  4 - second half of lb (3840 * 2)
   1944 	 *  5 - second 3/4 of lb (5760 * 2)
   1945 	 *  6 - whole lb (7680 * 2), other crtc must be disabled
   1946 	 *  7 - last 1/4 of lb (1920 * 2)
   1947 	 */
   1948 	/* this can get tricky if we have two large displays on a paired group
   1949 	 * of crtcs.  Ideally for multiple large displays we'd assign them to
   1950 	 * non-linked crtcs for maximum line buffer allocation.
   1951 	 */
   1952 	if (radeon_crtc->base.enabled && mode) {
   1953 		if (other_mode) {
   1954 			tmp = 0; /* 1/2 */
   1955 			buffer_alloc = 1;
   1956 		} else {
   1957 			tmp = 2; /* whole */
   1958 			buffer_alloc = 2;
   1959 		}
   1960 	} else {
   1961 		tmp = 0;
   1962 		buffer_alloc = 0;
   1963 	}
   1964 
   1965 	/* second controller of the pair uses second half of the lb */
   1966 	if (radeon_crtc->crtc_id % 2)
   1967 		tmp += 4;
   1968 	WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
   1969 
   1970 	if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE5(rdev)) {
   1971 		WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
   1972 		       DMIF_BUFFERS_ALLOCATED(buffer_alloc));
   1973 		for (i = 0; i < rdev->usec_timeout; i++) {
   1974 			if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
   1975 			    DMIF_BUFFERS_ALLOCATED_COMPLETED)
   1976 				break;
   1977 			udelay(1);
   1978 		}
   1979 	}
   1980 
   1981 	if (radeon_crtc->base.enabled && mode) {
   1982 		switch (tmp) {
   1983 		case 0:
   1984 		case 4:
   1985 		default:
   1986 			if (ASIC_IS_DCE5(rdev))
   1987 				return 4096 * 2;
   1988 			else
   1989 				return 3840 * 2;
   1990 		case 1:
   1991 		case 5:
   1992 			if (ASIC_IS_DCE5(rdev))
   1993 				return 6144 * 2;
   1994 			else
   1995 				return 5760 * 2;
   1996 		case 2:
   1997 		case 6:
   1998 			if (ASIC_IS_DCE5(rdev))
   1999 				return 8192 * 2;
   2000 			else
   2001 				return 7680 * 2;
   2002 		case 3:
   2003 		case 7:
   2004 			if (ASIC_IS_DCE5(rdev))
   2005 				return 2048 * 2;
   2006 			else
   2007 				return 1920 * 2;
   2008 		}
   2009 	}
   2010 
   2011 	/* controller not enabled, so no lb used */
   2012 	return 0;
   2013 }
   2014 
   2015 u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
   2016 {
   2017 	u32 tmp = RREG32(MC_SHARED_CHMAP);
   2018 
   2019 	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
   2020 	case 0:
   2021 	default:
   2022 		return 1;
   2023 	case 1:
   2024 		return 2;
   2025 	case 2:
   2026 		return 4;
   2027 	case 3:
   2028 		return 8;
   2029 	}
   2030 }
   2031 
   2032 struct evergreen_wm_params {
   2033 	u32 dram_channels; /* number of dram channels */
   2034 	u32 yclk;          /* bandwidth per dram data pin in kHz */
   2035 	u32 sclk;          /* engine clock in kHz */
   2036 	u32 disp_clk;      /* display clock in kHz */
   2037 	u32 src_width;     /* viewport width */
   2038 	u32 active_time;   /* active display time in ns */
   2039 	u32 blank_time;    /* blank time in ns */
   2040 	bool interlaced;    /* mode is interlaced */
   2041 	fixed20_12 vsc;    /* vertical scale ratio */
   2042 	u32 num_heads;     /* number of active crtcs */
   2043 	u32 bytes_per_pixel; /* bytes per pixel display + overlay */
   2044 	u32 lb_size;       /* line buffer allocated to pipe */
   2045 	u32 vtaps;         /* vertical scaler taps */
   2046 };
   2047 
   2048 static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
   2049 {
   2050 	/* Calculate DRAM Bandwidth and the part allocated to display. */
   2051 	fixed20_12 dram_efficiency; /* 0.7 */
   2052 	fixed20_12 yclk, dram_channels, bandwidth;
   2053 	fixed20_12 a;
   2054 
   2055 	a.full = dfixed_const(1000);
   2056 	yclk.full = dfixed_const(wm->yclk);
   2057 	yclk.full = dfixed_div(yclk, a);
   2058 	dram_channels.full = dfixed_const(wm->dram_channels * 4);
   2059 	a.full = dfixed_const(10);
   2060 	dram_efficiency.full = dfixed_const(7);
   2061 	dram_efficiency.full = dfixed_div(dram_efficiency, a);
   2062 	bandwidth.full = dfixed_mul(dram_channels, yclk);
   2063 	bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
   2064 
   2065 	return dfixed_trunc(bandwidth);
   2066 }
   2067 
   2068 static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
   2069 {
   2070 	/* Calculate DRAM Bandwidth and the part allocated to display. */
   2071 	fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
   2072 	fixed20_12 yclk, dram_channels, bandwidth;
   2073 	fixed20_12 a;
   2074 
   2075 	a.full = dfixed_const(1000);
   2076 	yclk.full = dfixed_const(wm->yclk);
   2077 	yclk.full = dfixed_div(yclk, a);
   2078 	dram_channels.full = dfixed_const(wm->dram_channels * 4);
   2079 	a.full = dfixed_const(10);
   2080 	disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
   2081 	disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
   2082 	bandwidth.full = dfixed_mul(dram_channels, yclk);
   2083 	bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
   2084 
   2085 	return dfixed_trunc(bandwidth);
   2086 }
   2087 
   2088 static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
   2089 {
   2090 	/* Calculate the display Data return Bandwidth */
   2091 	fixed20_12 return_efficiency; /* 0.8 */
   2092 	fixed20_12 sclk, bandwidth;
   2093 	fixed20_12 a;
   2094 
   2095 	a.full = dfixed_const(1000);
   2096 	sclk.full = dfixed_const(wm->sclk);
   2097 	sclk.full = dfixed_div(sclk, a);
   2098 	a.full = dfixed_const(10);
   2099 	return_efficiency.full = dfixed_const(8);
   2100 	return_efficiency.full = dfixed_div(return_efficiency, a);
   2101 	a.full = dfixed_const(32);
   2102 	bandwidth.full = dfixed_mul(a, sclk);
   2103 	bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
   2104 
   2105 	return dfixed_trunc(bandwidth);
   2106 }
   2107 
   2108 static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
   2109 {
   2110 	/* Calculate the DMIF Request Bandwidth */
   2111 	fixed20_12 disp_clk_request_efficiency; /* 0.8 */
   2112 	fixed20_12 disp_clk, bandwidth;
   2113 	fixed20_12 a;
   2114 
   2115 	a.full = dfixed_const(1000);
   2116 	disp_clk.full = dfixed_const(wm->disp_clk);
   2117 	disp_clk.full = dfixed_div(disp_clk, a);
   2118 	a.full = dfixed_const(10);
   2119 	disp_clk_request_efficiency.full = dfixed_const(8);
   2120 	disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
   2121 	a.full = dfixed_const(32);
   2122 	bandwidth.full = dfixed_mul(a, disp_clk);
   2123 	bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
   2124 
   2125 	return dfixed_trunc(bandwidth);
   2126 }
   2127 
   2128 static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
   2129 {
   2130 	/* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
   2131 	u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
   2132 	u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
   2133 	u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
   2134 
   2135 	return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
   2136 }
   2137 
   2138 static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
   2139 {
   2140 	/* Calculate the display mode Average Bandwidth
   2141 	 * DisplayMode should contain the source and destination dimensions,
   2142 	 * timing, etc.
   2143 	 */
   2144 	fixed20_12 bpp;
   2145 	fixed20_12 line_time;
   2146 	fixed20_12 src_width;
   2147 	fixed20_12 bandwidth;
   2148 	fixed20_12 a;
   2149 
   2150 	a.full = dfixed_const(1000);
   2151 	line_time.full = dfixed_const(wm->active_time + wm->blank_time);
   2152 	line_time.full = dfixed_div(line_time, a);
   2153 	bpp.full = dfixed_const(wm->bytes_per_pixel);
   2154 	src_width.full = dfixed_const(wm->src_width);
   2155 	bandwidth.full = dfixed_mul(src_width, bpp);
   2156 	bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
   2157 	bandwidth.full = dfixed_div(bandwidth, line_time);
   2158 
   2159 	return dfixed_trunc(bandwidth);
   2160 }
   2161 
   2162 static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
   2163 {
   2164 	/* First calcualte the latency in ns */
   2165 	u32 mc_latency = 2000; /* 2000 ns. */
   2166 	u32 available_bandwidth = evergreen_available_bandwidth(wm);
   2167 	u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
   2168 	u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
   2169 	u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
   2170 	u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
   2171 		(wm->num_heads * cursor_line_pair_return_time);
   2172 	u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
   2173 	u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
   2174 	fixed20_12 a, b, c;
   2175 
   2176 	if (wm->num_heads == 0)
   2177 		return 0;
   2178 
   2179 	a.full = dfixed_const(2);
   2180 	b.full = dfixed_const(1);
   2181 	if ((wm->vsc.full > a.full) ||
   2182 	    ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
   2183 	    (wm->vtaps >= 5) ||
   2184 	    ((wm->vsc.full >= a.full) && wm->interlaced))
   2185 		max_src_lines_per_dst_line = 4;
   2186 	else
   2187 		max_src_lines_per_dst_line = 2;
   2188 
   2189 	a.full = dfixed_const(available_bandwidth);
   2190 	b.full = dfixed_const(wm->num_heads);
   2191 	a.full = dfixed_div(a, b);
   2192 
   2193 	b.full = dfixed_const(1000);
   2194 	c.full = dfixed_const(wm->disp_clk);
   2195 	b.full = dfixed_div(c, b);
   2196 	c.full = dfixed_const(wm->bytes_per_pixel);
   2197 	b.full = dfixed_mul(b, c);
   2198 
   2199 	lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
   2200 
   2201 	a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
   2202 	b.full = dfixed_const(1000);
   2203 	c.full = dfixed_const(lb_fill_bw);
   2204 	b.full = dfixed_div(c, b);
   2205 	a.full = dfixed_div(a, b);
   2206 	line_fill_time = dfixed_trunc(a);
   2207 
   2208 	if (line_fill_time < wm->active_time)
   2209 		return latency;
   2210 	else
   2211 		return latency + (line_fill_time - wm->active_time);
   2212 
   2213 }
   2214 
   2215 static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
   2216 {
   2217 	if (evergreen_average_bandwidth(wm) <=
   2218 	    (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
   2219 		return true;
   2220 	else
   2221 		return false;
   2222 };
   2223 
   2224 static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
   2225 {
   2226 	if (evergreen_average_bandwidth(wm) <=
   2227 	    (evergreen_available_bandwidth(wm) / wm->num_heads))
   2228 		return true;
   2229 	else
   2230 		return false;
   2231 };
   2232 
   2233 static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
   2234 {
   2235 	u32 lb_partitions = wm->lb_size / wm->src_width;
   2236 	u32 line_time = wm->active_time + wm->blank_time;
   2237 	u32 latency_tolerant_lines;
   2238 	u32 latency_hiding;
   2239 	fixed20_12 a;
   2240 
   2241 	a.full = dfixed_const(1);
   2242 	if (wm->vsc.full > a.full)
   2243 		latency_tolerant_lines = 1;
   2244 	else {
   2245 		if (lb_partitions <= (wm->vtaps + 1))
   2246 			latency_tolerant_lines = 1;
   2247 		else
   2248 			latency_tolerant_lines = 2;
   2249 	}
   2250 
   2251 	latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
   2252 
   2253 	if (evergreen_latency_watermark(wm) <= latency_hiding)
   2254 		return true;
   2255 	else
   2256 		return false;
   2257 }
   2258 
   2259 static void evergreen_program_watermarks(struct radeon_device *rdev,
   2260 					 struct radeon_crtc *radeon_crtc,
   2261 					 u32 lb_size, u32 num_heads)
   2262 {
   2263 	struct drm_display_mode *mode = &radeon_crtc->base.mode;
   2264 	struct evergreen_wm_params wm_low, wm_high;
   2265 	u32 dram_channels;
   2266 	u32 pixel_period;
   2267 	u32 line_time = 0;
   2268 	u32 latency_watermark_a = 0, latency_watermark_b = 0;
   2269 	u32 priority_a_mark = 0, priority_b_mark = 0;
   2270 	u32 priority_a_cnt = PRIORITY_OFF;
   2271 	u32 priority_b_cnt = PRIORITY_OFF;
   2272 	u32 pipe_offset = radeon_crtc->crtc_id * 16;
   2273 	u32 tmp, arb_control3;
   2274 	fixed20_12 a, b, c;
   2275 
   2276 	if (radeon_crtc->base.enabled && num_heads && mode) {
   2277 		pixel_period = 1000000 / (u32)mode->clock;
   2278 		line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
   2279 		priority_a_cnt = 0;
   2280 		priority_b_cnt = 0;
   2281 		dram_channels = evergreen_get_number_of_dram_channels(rdev);
   2282 
   2283 		/* watermark for high clocks */
   2284 		if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
   2285 			wm_high.yclk =
   2286 				radeon_dpm_get_mclk(rdev, false) * 10;
   2287 			wm_high.sclk =
   2288 				radeon_dpm_get_sclk(rdev, false) * 10;
   2289 		} else {
   2290 			wm_high.yclk = rdev->pm.current_mclk * 10;
   2291 			wm_high.sclk = rdev->pm.current_sclk * 10;
   2292 		}
   2293 
   2294 		wm_high.disp_clk = mode->clock;
   2295 		wm_high.src_width = mode->crtc_hdisplay;
   2296 		wm_high.active_time = mode->crtc_hdisplay * pixel_period;
   2297 		wm_high.blank_time = line_time - wm_high.active_time;
   2298 		wm_high.interlaced = false;
   2299 		if (mode->flags & DRM_MODE_FLAG_INTERLACE)
   2300 			wm_high.interlaced = true;
   2301 		wm_high.vsc = radeon_crtc->vsc;
   2302 		wm_high.vtaps = 1;
   2303 		if (radeon_crtc->rmx_type != RMX_OFF)
   2304 			wm_high.vtaps = 2;
   2305 		wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
   2306 		wm_high.lb_size = lb_size;
   2307 		wm_high.dram_channels = dram_channels;
   2308 		wm_high.num_heads = num_heads;
   2309 
   2310 		/* watermark for low clocks */
   2311 		if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
   2312 			wm_low.yclk =
   2313 				radeon_dpm_get_mclk(rdev, true) * 10;
   2314 			wm_low.sclk =
   2315 				radeon_dpm_get_sclk(rdev, true) * 10;
   2316 		} else {
   2317 			wm_low.yclk = rdev->pm.current_mclk * 10;
   2318 			wm_low.sclk = rdev->pm.current_sclk * 10;
   2319 		}
   2320 
   2321 		wm_low.disp_clk = mode->clock;
   2322 		wm_low.src_width = mode->crtc_hdisplay;
   2323 		wm_low.active_time = mode->crtc_hdisplay * pixel_period;
   2324 		wm_low.blank_time = line_time - wm_low.active_time;
   2325 		wm_low.interlaced = false;
   2326 		if (mode->flags & DRM_MODE_FLAG_INTERLACE)
   2327 			wm_low.interlaced = true;
   2328 		wm_low.vsc = radeon_crtc->vsc;
   2329 		wm_low.vtaps = 1;
   2330 		if (radeon_crtc->rmx_type != RMX_OFF)
   2331 			wm_low.vtaps = 2;
   2332 		wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
   2333 		wm_low.lb_size = lb_size;
   2334 		wm_low.dram_channels = dram_channels;
   2335 		wm_low.num_heads = num_heads;
   2336 
   2337 		/* set for high clocks */
   2338 		latency_watermark_a = min(evergreen_latency_watermark(&wm_high), (u32)65535);
   2339 		/* set for low clocks */
   2340 		latency_watermark_b = min(evergreen_latency_watermark(&wm_low), (u32)65535);
   2341 
   2342 		/* possibly force display priority to high */
   2343 		/* should really do this at mode validation time... */
   2344 		if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
   2345 		    !evergreen_average_bandwidth_vs_available_bandwidth(&wm_high) ||
   2346 		    !evergreen_check_latency_hiding(&wm_high) ||
   2347 		    (rdev->disp_priority == 2)) {
   2348 			DRM_DEBUG_KMS("force priority a to high\n");
   2349 			priority_a_cnt |= PRIORITY_ALWAYS_ON;
   2350 		}
   2351 		if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
   2352 		    !evergreen_average_bandwidth_vs_available_bandwidth(&wm_low) ||
   2353 		    !evergreen_check_latency_hiding(&wm_low) ||
   2354 		    (rdev->disp_priority == 2)) {
   2355 			DRM_DEBUG_KMS("force priority b to high\n");
   2356 			priority_b_cnt |= PRIORITY_ALWAYS_ON;
   2357 		}
   2358 
   2359 		a.full = dfixed_const(1000);
   2360 		b.full = dfixed_const(mode->clock);
   2361 		b.full = dfixed_div(b, a);
   2362 		c.full = dfixed_const(latency_watermark_a);
   2363 		c.full = dfixed_mul(c, b);
   2364 		c.full = dfixed_mul(c, radeon_crtc->hsc);
   2365 		c.full = dfixed_div(c, a);
   2366 		a.full = dfixed_const(16);
   2367 		c.full = dfixed_div(c, a);
   2368 		priority_a_mark = dfixed_trunc(c);
   2369 		priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
   2370 
   2371 		a.full = dfixed_const(1000);
   2372 		b.full = dfixed_const(mode->clock);
   2373 		b.full = dfixed_div(b, a);
   2374 		c.full = dfixed_const(latency_watermark_b);
   2375 		c.full = dfixed_mul(c, b);
   2376 		c.full = dfixed_mul(c, radeon_crtc->hsc);
   2377 		c.full = dfixed_div(c, a);
   2378 		a.full = dfixed_const(16);
   2379 		c.full = dfixed_div(c, a);
   2380 		priority_b_mark = dfixed_trunc(c);
   2381 		priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
   2382 
   2383 		/* Save number of lines the linebuffer leads before the scanout */
   2384 		radeon_crtc->lb_vblank_lead_lines = DIV_ROUND_UP(lb_size, mode->crtc_hdisplay);
   2385 	}
   2386 
   2387 	/* select wm A */
   2388 	arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
   2389 	tmp = arb_control3;
   2390 	tmp &= ~LATENCY_WATERMARK_MASK(3);
   2391 	tmp |= LATENCY_WATERMARK_MASK(1);
   2392 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
   2393 	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
   2394 	       (LATENCY_LOW_WATERMARK(latency_watermark_a) |
   2395 		LATENCY_HIGH_WATERMARK(line_time)));
   2396 	/* select wm B */
   2397 	tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
   2398 	tmp &= ~LATENCY_WATERMARK_MASK(3);
   2399 	tmp |= LATENCY_WATERMARK_MASK(2);
   2400 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
   2401 	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
   2402 	       (LATENCY_LOW_WATERMARK(latency_watermark_b) |
   2403 		LATENCY_HIGH_WATERMARK(line_time)));
   2404 	/* restore original selection */
   2405 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
   2406 
   2407 	/* write the priority marks */
   2408 	WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
   2409 	WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
   2410 
   2411 	/* save values for DPM */
   2412 	radeon_crtc->line_time = line_time;
   2413 	radeon_crtc->wm_high = latency_watermark_a;
   2414 	radeon_crtc->wm_low = latency_watermark_b;
   2415 }
   2416 
   2417 /**
   2418  * evergreen_bandwidth_update - update display watermarks callback.
   2419  *
   2420  * @rdev: radeon_device pointer
   2421  *
   2422  * Update the display watermarks based on the requested mode(s)
   2423  * (evergreen+).
   2424  */
   2425 void evergreen_bandwidth_update(struct radeon_device *rdev)
   2426 {
   2427 	struct drm_display_mode *mode0 = NULL;
   2428 	struct drm_display_mode *mode1 = NULL;
   2429 	u32 num_heads = 0, lb_size;
   2430 	int i;
   2431 
   2432 	if (!rdev->mode_info.mode_config_initialized)
   2433 		return;
   2434 
   2435 	radeon_update_display_priority(rdev);
   2436 
   2437 	for (i = 0; i < rdev->num_crtc; i++) {
   2438 		if (rdev->mode_info.crtcs[i]->base.enabled)
   2439 			num_heads++;
   2440 	}
   2441 	for (i = 0; i < rdev->num_crtc; i += 2) {
   2442 		mode0 = &rdev->mode_info.crtcs[i]->base.mode;
   2443 		mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
   2444 		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
   2445 		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
   2446 		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
   2447 		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
   2448 	}
   2449 }
   2450 
   2451 /**
   2452  * evergreen_mc_wait_for_idle - wait for MC idle callback.
   2453  *
   2454  * @rdev: radeon_device pointer
   2455  *
   2456  * Wait for the MC (memory controller) to be idle.
   2457  * (evergreen+).
   2458  * Returns 0 if the MC is idle, -1 if not.
   2459  */
   2460 int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
   2461 {
   2462 	unsigned i;
   2463 	u32 tmp;
   2464 
   2465 	for (i = 0; i < rdev->usec_timeout; i++) {
   2466 		/* read MC_STATUS */
   2467 		tmp = RREG32(SRBM_STATUS) & 0x1F00;
   2468 		if (!tmp)
   2469 			return 0;
   2470 		udelay(1);
   2471 	}
   2472 	return -1;
   2473 }
   2474 
   2475 /*
   2476  * GART
   2477  */
   2478 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
   2479 {
   2480 	unsigned i;
   2481 	u32 tmp;
   2482 
   2483 	WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
   2484 
   2485 	WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
   2486 	for (i = 0; i < rdev->usec_timeout; i++) {
   2487 		/* read MC_STATUS */
   2488 		tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
   2489 		tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
   2490 		if (tmp == 2) {
   2491 			printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
   2492 			return;
   2493 		}
   2494 		if (tmp) {
   2495 			return;
   2496 		}
   2497 		udelay(1);
   2498 	}
   2499 }
   2500 
   2501 static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
   2502 {
   2503 	u32 tmp;
   2504 	int r;
   2505 
   2506 	if (rdev->gart.robj == NULL) {
   2507 		dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
   2508 		return -EINVAL;
   2509 	}
   2510 	r = radeon_gart_table_vram_pin(rdev);
   2511 	if (r)
   2512 		return r;
   2513 	/* Setup L2 cache */
   2514 	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
   2515 				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
   2516 				EFFECTIVE_L2_QUEUE_SIZE(7));
   2517 	WREG32(VM_L2_CNTL2, 0);
   2518 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
   2519 	/* Setup TLB control */
   2520 	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
   2521 		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
   2522 		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
   2523 		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
   2524 	if (rdev->flags & RADEON_IS_IGP) {
   2525 		WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
   2526 		WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
   2527 		WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
   2528 	} else {
   2529 		WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
   2530 		WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
   2531 		WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
   2532 		if ((rdev->family == CHIP_JUNIPER) ||
   2533 		    (rdev->family == CHIP_CYPRESS) ||
   2534 		    (rdev->family == CHIP_HEMLOCK) ||
   2535 		    (rdev->family == CHIP_BARTS))
   2536 			WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
   2537 	}
   2538 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
   2539 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
   2540 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
   2541 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
   2542 	WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
   2543 	WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
   2544 	WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
   2545 	WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
   2546 				RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
   2547 	WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
   2548 			(u32)(rdev->dummy_page.addr >> 12));
   2549 	WREG32(VM_CONTEXT1_CNTL, 0);
   2550 
   2551 	evergreen_pcie_gart_tlb_flush(rdev);
   2552 	DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
   2553 		 (unsigned)(rdev->mc.gtt_size >> 20),
   2554 		 (unsigned long long)rdev->gart.table_addr);
   2555 	rdev->gart.ready = true;
   2556 	return 0;
   2557 }
   2558 
   2559 static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
   2560 {
   2561 	u32 tmp;
   2562 
   2563 	/* Disable all tables */
   2564 	WREG32(VM_CONTEXT0_CNTL, 0);
   2565 	WREG32(VM_CONTEXT1_CNTL, 0);
   2566 
   2567 	/* Setup L2 cache */
   2568 	WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
   2569 				EFFECTIVE_L2_QUEUE_SIZE(7));
   2570 	WREG32(VM_L2_CNTL2, 0);
   2571 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
   2572 	/* Setup TLB control */
   2573 	tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
   2574 	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
   2575 	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
   2576 	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
   2577 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
   2578 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
   2579 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
   2580 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
   2581 	radeon_gart_table_vram_unpin(rdev);
   2582 }
   2583 
   2584 static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
   2585 {
   2586 	evergreen_pcie_gart_disable(rdev);
   2587 	radeon_gart_table_vram_free(rdev);
   2588 	radeon_gart_fini(rdev);
   2589 }
   2590 
   2591 
   2592 static void evergreen_agp_enable(struct radeon_device *rdev)
   2593 {
   2594 	u32 tmp;
   2595 
   2596 	/* Setup L2 cache */
   2597 	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
   2598 				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
   2599 				EFFECTIVE_L2_QUEUE_SIZE(7));
   2600 	WREG32(VM_L2_CNTL2, 0);
   2601 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
   2602 	/* Setup TLB control */
   2603 	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
   2604 		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
   2605 		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
   2606 		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
   2607 	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
   2608 	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
   2609 	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
   2610 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
   2611 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
   2612 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
   2613 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
   2614 	WREG32(VM_CONTEXT0_CNTL, 0);
   2615 	WREG32(VM_CONTEXT1_CNTL, 0);
   2616 }
   2617 
   2618 static const unsigned ni_dig_offsets[] =
   2619 {
   2620 	NI_DIG0_REGISTER_OFFSET,
   2621 	NI_DIG1_REGISTER_OFFSET,
   2622 	NI_DIG2_REGISTER_OFFSET,
   2623 	NI_DIG3_REGISTER_OFFSET,
   2624 	NI_DIG4_REGISTER_OFFSET,
   2625 	NI_DIG5_REGISTER_OFFSET
   2626 };
   2627 
   2628 static const unsigned ni_tx_offsets[] =
   2629 {
   2630 	NI_DCIO_UNIPHY0_UNIPHY_TX_CONTROL1,
   2631 	NI_DCIO_UNIPHY1_UNIPHY_TX_CONTROL1,
   2632 	NI_DCIO_UNIPHY2_UNIPHY_TX_CONTROL1,
   2633 	NI_DCIO_UNIPHY3_UNIPHY_TX_CONTROL1,
   2634 	NI_DCIO_UNIPHY4_UNIPHY_TX_CONTROL1,
   2635 	NI_DCIO_UNIPHY5_UNIPHY_TX_CONTROL1
   2636 };
   2637 
   2638 static const unsigned evergreen_dp_offsets[] =
   2639 {
   2640 	EVERGREEN_DP0_REGISTER_OFFSET,
   2641 	EVERGREEN_DP1_REGISTER_OFFSET,
   2642 	EVERGREEN_DP2_REGISTER_OFFSET,
   2643 	EVERGREEN_DP3_REGISTER_OFFSET,
   2644 	EVERGREEN_DP4_REGISTER_OFFSET,
   2645 	EVERGREEN_DP5_REGISTER_OFFSET
   2646 };
   2647 
   2648 
   2649 /*
   2650  * Assumption is that EVERGREEN_CRTC_MASTER_EN enable for requested crtc
   2651  * We go from crtc to connector and it is not relible  since it
   2652  * should be an opposite direction .If crtc is enable then
   2653  * find the dig_fe which selects this crtc and insure that it enable.
   2654  * if such dig_fe is found then find dig_be which selects found dig_be and
   2655  * insure that it enable and in DP_SST mode.
   2656  * if UNIPHY_PLL_CONTROL1.enable then we should disconnect timing
   2657  * from dp symbols clocks .
   2658  */
   2659 static bool evergreen_is_dp_sst_stream_enabled(struct radeon_device *rdev,
   2660 					       unsigned crtc_id, unsigned *ret_dig_fe)
   2661 {
   2662 	unsigned i;
   2663 	unsigned dig_fe;
   2664 	unsigned dig_be;
   2665 	unsigned dig_en_be;
   2666 	unsigned uniphy_pll;
   2667 	unsigned digs_fe_selected;
   2668 	unsigned dig_be_mode;
   2669 	unsigned dig_fe_mask;
   2670 	bool is_enabled = false;
   2671 	bool found_crtc = false;
   2672 
   2673 	/* loop through all running dig_fe to find selected crtc */
   2674 	for (i = 0; i < ARRAY_SIZE(ni_dig_offsets); i++) {
   2675 		dig_fe = RREG32(NI_DIG_FE_CNTL + ni_dig_offsets[i]);
   2676 		if (dig_fe & NI_DIG_FE_CNTL_SYMCLK_FE_ON &&
   2677 		    crtc_id == NI_DIG_FE_CNTL_SOURCE_SELECT(dig_fe)) {
   2678 			/* found running pipe */
   2679 			found_crtc = true;
   2680 			dig_fe_mask = 1 << i;
   2681 			dig_fe = i;
   2682 			break;
   2683 		}
   2684 	}
   2685 
   2686 	if (found_crtc) {
   2687 		/* loop through all running dig_be to find selected dig_fe */
   2688 		for (i = 0; i < ARRAY_SIZE(ni_dig_offsets); i++) {
   2689 			dig_be = RREG32(NI_DIG_BE_CNTL + ni_dig_offsets[i]);
   2690 			/* if dig_fe_selected by dig_be? */
   2691 			digs_fe_selected = NI_DIG_BE_CNTL_FE_SOURCE_SELECT(dig_be);
   2692 			dig_be_mode = NI_DIG_FE_CNTL_MODE(dig_be);
   2693 			if (dig_fe_mask &  digs_fe_selected &&
   2694 			    /* if dig_be in sst mode? */
   2695 			    dig_be_mode == NI_DIG_BE_DPSST) {
   2696 				dig_en_be = RREG32(NI_DIG_BE_EN_CNTL +
   2697 						   ni_dig_offsets[i]);
   2698 				uniphy_pll = RREG32(NI_DCIO_UNIPHY0_PLL_CONTROL1 +
   2699 						    ni_tx_offsets[i]);
   2700 				/* dig_be enable and tx is running */
   2701 				if (dig_en_be & NI_DIG_BE_EN_CNTL_ENABLE &&
   2702 				    dig_en_be & NI_DIG_BE_EN_CNTL_SYMBCLK_ON &&
   2703 				    uniphy_pll & NI_DCIO_UNIPHY0_PLL_CONTROL1_ENABLE) {
   2704 					is_enabled = true;
   2705 					*ret_dig_fe = dig_fe;
   2706 					break;
   2707 				}
   2708 			}
   2709 		}
   2710 	}
   2711 
   2712 	return is_enabled;
   2713 }
   2714 
   2715 /*
   2716  * Blank dig when in dp sst mode
   2717  * Dig ignores crtc timing
   2718  */
   2719 static void evergreen_blank_dp_output(struct radeon_device *rdev,
   2720 				      unsigned dig_fe)
   2721 {
   2722 	unsigned stream_ctrl;
   2723 	unsigned fifo_ctrl;
   2724 	unsigned counter = 0;
   2725 
   2726 	if (dig_fe >= ARRAY_SIZE(evergreen_dp_offsets)) {
   2727 		DRM_ERROR("invalid dig_fe %d\n", dig_fe);
   2728 		return;
   2729 	}
   2730 
   2731 	stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
   2732 			     evergreen_dp_offsets[dig_fe]);
   2733 	if (!(stream_ctrl & EVERGREEN_DP_VID_STREAM_CNTL_ENABLE)) {
   2734 		DRM_ERROR("dig %d , should be enable\n", dig_fe);
   2735 		return;
   2736 	}
   2737 
   2738 	stream_ctrl &=~EVERGREEN_DP_VID_STREAM_CNTL_ENABLE;
   2739 	WREG32(EVERGREEN_DP_VID_STREAM_CNTL +
   2740 	       evergreen_dp_offsets[dig_fe], stream_ctrl);
   2741 
   2742 	stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
   2743 			     evergreen_dp_offsets[dig_fe]);
   2744 	while (counter < 32 && stream_ctrl & EVERGREEN_DP_VID_STREAM_STATUS) {
   2745 		msleep(1);
   2746 		counter++;
   2747 		stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
   2748 				     evergreen_dp_offsets[dig_fe]);
   2749 	}
   2750 	if (counter >= 32 )
   2751 		DRM_ERROR("counter exceeds %d\n", counter);
   2752 
   2753 	fifo_ctrl = RREG32(EVERGREEN_DP_STEER_FIFO + evergreen_dp_offsets[dig_fe]);
   2754 	fifo_ctrl |= EVERGREEN_DP_STEER_FIFO_RESET;
   2755 	WREG32(EVERGREEN_DP_STEER_FIFO + evergreen_dp_offsets[dig_fe], fifo_ctrl);
   2756 
   2757 }
   2758 
   2759 void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
   2760 {
   2761 	u32 crtc_enabled, tmp, frame_count, blackout;
   2762 	int i, j;
   2763 	unsigned dig_fe;
   2764 
   2765 	if (!ASIC_IS_NODCE(rdev)) {
   2766 		save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
   2767 		save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
   2768 
   2769 		/* disable VGA render */
   2770 		WREG32(VGA_RENDER_CONTROL, 0);
   2771 	}
   2772 	/* blank the display controllers */
   2773 	for (i = 0; i < rdev->num_crtc; i++) {
   2774 		crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
   2775 		if (crtc_enabled) {
   2776 			save->crtc_enabled[i] = true;
   2777 			if (ASIC_IS_DCE6(rdev)) {
   2778 				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
   2779 				if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
   2780 					radeon_wait_for_vblank(rdev, i);
   2781 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
   2782 					tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
   2783 					WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
   2784 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
   2785 				}
   2786 			} else {
   2787 				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
   2788 				if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
   2789 					radeon_wait_for_vblank(rdev, i);
   2790 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
   2791 					tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
   2792 					WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
   2793 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
   2794 				}
   2795 			}
   2796 			/* wait for the next frame */
   2797 			frame_count = radeon_get_vblank_counter(rdev, i);
   2798 			for (j = 0; j < rdev->usec_timeout; j++) {
   2799 				if (radeon_get_vblank_counter(rdev, i) != frame_count)
   2800 					break;
   2801 				udelay(1);
   2802 			}
   2803 			/*we should disable dig if it drives dp sst*/
   2804 			/*but we are in radeon_device_init and the topology is unknown*/
   2805 			/*and it is available after radeon_modeset_init*/
   2806 			/*the following method radeon_atom_encoder_dpms_dig*/
   2807 			/*does the job if we initialize it properly*/
   2808 			/*for now we do it this manually*/
   2809 			/**/
   2810 			if (ASIC_IS_DCE5(rdev) &&
   2811 			    evergreen_is_dp_sst_stream_enabled(rdev, i ,&dig_fe))
   2812 				evergreen_blank_dp_output(rdev, dig_fe);
   2813 			/*we could remove 6 lines below*/
   2814 			/* XXX this is a hack to avoid strange behavior with EFI on certain systems */
   2815 			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
   2816 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
   2817 			tmp &= ~EVERGREEN_CRTC_MASTER_EN;
   2818 			WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
   2819 			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
   2820 			save->crtc_enabled[i] = false;
   2821 			/* ***** */
   2822 		} else {
   2823 			save->crtc_enabled[i] = false;
   2824 		}
   2825 	}
   2826 
   2827 	radeon_mc_wait_for_idle(rdev);
   2828 
   2829 	blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
   2830 	if ((blackout & BLACKOUT_MODE_MASK) != 1) {
   2831 		/* Block CPU access */
   2832 		WREG32(BIF_FB_EN, 0);
   2833 		/* blackout the MC */
   2834 		blackout &= ~BLACKOUT_MODE_MASK;
   2835 		WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
   2836 	}
   2837 	/* wait for the MC to settle */
   2838 	udelay(100);
   2839 
   2840 	/* lock double buffered regs */
   2841 	for (i = 0; i < rdev->num_crtc; i++) {
   2842 		if (save->crtc_enabled[i]) {
   2843 			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
   2844 			if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
   2845 				tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
   2846 				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
   2847 			}
   2848 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
   2849 			if (!(tmp & 1)) {
   2850 				tmp |= 1;
   2851 				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
   2852 			}
   2853 		}
   2854 	}
   2855 }
   2856 
   2857 void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
   2858 {
   2859 	u32 tmp, frame_count;
   2860 	int i, j;
   2861 
   2862 	/* update crtc base addresses */
   2863 	for (i = 0; i < rdev->num_crtc; i++) {
   2864 		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
   2865 		       upper_32_bits(rdev->mc.vram_start));
   2866 		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
   2867 		       upper_32_bits(rdev->mc.vram_start));
   2868 		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
   2869 		       (u32)rdev->mc.vram_start);
   2870 		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
   2871 		       (u32)rdev->mc.vram_start);
   2872 	}
   2873 
   2874 	if (!ASIC_IS_NODCE(rdev)) {
   2875 		WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
   2876 		WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
   2877 	}
   2878 
   2879 	/* unlock regs and wait for update */
   2880 	for (i = 0; i < rdev->num_crtc; i++) {
   2881 		if (save->crtc_enabled[i]) {
   2882 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
   2883 			if ((tmp & 0x7) != 3) {
   2884 				tmp &= ~0x7;
   2885 				tmp |= 0x3;
   2886 				WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
   2887 			}
   2888 			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
   2889 			if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
   2890 				tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
   2891 				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
   2892 			}
   2893 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
   2894 			if (tmp & 1) {
   2895 				tmp &= ~1;
   2896 				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
   2897 			}
   2898 			for (j = 0; j < rdev->usec_timeout; j++) {
   2899 				tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
   2900 				if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
   2901 					break;
   2902 				udelay(1);
   2903 			}
   2904 		}
   2905 	}
   2906 
   2907 	/* unblackout the MC */
   2908 	tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
   2909 	tmp &= ~BLACKOUT_MODE_MASK;
   2910 	WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
   2911 	/* allow CPU access */
   2912 	WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
   2913 
   2914 	for (i = 0; i < rdev->num_crtc; i++) {
   2915 		if (save->crtc_enabled[i]) {
   2916 			if (ASIC_IS_DCE6(rdev)) {
   2917 				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
   2918 				tmp &= ~EVERGREEN_CRTC_BLANK_DATA_EN;
   2919 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
   2920 				WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
   2921 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
   2922 			} else {
   2923 				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
   2924 				tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
   2925 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
   2926 				WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
   2927 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
   2928 			}
   2929 			/* wait for the next frame */
   2930 			frame_count = radeon_get_vblank_counter(rdev, i);
   2931 			for (j = 0; j < rdev->usec_timeout; j++) {
   2932 				if (radeon_get_vblank_counter(rdev, i) != frame_count)
   2933 					break;
   2934 				udelay(1);
   2935 			}
   2936 		}
   2937 	}
   2938 	if (!ASIC_IS_NODCE(rdev)) {
   2939 		/* Unlock vga access */
   2940 		WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
   2941 		mdelay(1);
   2942 		WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
   2943 	}
   2944 }
   2945 
   2946 void evergreen_mc_program(struct radeon_device *rdev)
   2947 {
   2948 	struct evergreen_mc_save save;
   2949 	u32 tmp;
   2950 	int i, j;
   2951 
   2952 	/* Initialize HDP */
   2953 	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
   2954 		WREG32((0x2c14 + j), 0x00000000);
   2955 		WREG32((0x2c18 + j), 0x00000000);
   2956 		WREG32((0x2c1c + j), 0x00000000);
   2957 		WREG32((0x2c20 + j), 0x00000000);
   2958 		WREG32((0x2c24 + j), 0x00000000);
   2959 	}
   2960 	WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
   2961 
   2962 	evergreen_mc_stop(rdev, &save);
   2963 	if (evergreen_mc_wait_for_idle(rdev)) {
   2964 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
   2965 	}
   2966 	/* Lockout access through VGA aperture*/
   2967 	WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
   2968 	/* Update configuration */
   2969 	if (rdev->flags & RADEON_IS_AGP) {
   2970 		if (rdev->mc.vram_start < rdev->mc.gtt_start) {
   2971 			/* VRAM before AGP */
   2972 			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
   2973 				rdev->mc.vram_start >> 12);
   2974 			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
   2975 				rdev->mc.gtt_end >> 12);
   2976 		} else {
   2977 			/* VRAM after AGP */
   2978 			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
   2979 				rdev->mc.gtt_start >> 12);
   2980 			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
   2981 				rdev->mc.vram_end >> 12);
   2982 		}
   2983 	} else {
   2984 		WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
   2985 			rdev->mc.vram_start >> 12);
   2986 		WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
   2987 			rdev->mc.vram_end >> 12);
   2988 	}
   2989 	WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
   2990 	/* llano/ontario only */
   2991 	if ((rdev->family == CHIP_PALM) ||
   2992 	    (rdev->family == CHIP_SUMO) ||
   2993 	    (rdev->family == CHIP_SUMO2)) {
   2994 		tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
   2995 		tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
   2996 		tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
   2997 		WREG32(MC_FUS_VM_FB_OFFSET, tmp);
   2998 	}
   2999 	tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
   3000 	tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
   3001 	WREG32(MC_VM_FB_LOCATION, tmp);
   3002 	WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
   3003 	WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
   3004 	WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
   3005 	if (rdev->flags & RADEON_IS_AGP) {
   3006 		WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
   3007 		WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
   3008 		WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
   3009 	} else {
   3010 		WREG32(MC_VM_AGP_BASE, 0);
   3011 		WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
   3012 		WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
   3013 	}
   3014 	if (evergreen_mc_wait_for_idle(rdev)) {
   3015 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
   3016 	}
   3017 	evergreen_mc_resume(rdev, &save);
   3018 	/* we need to own VRAM, so turn off the VGA renderer here
   3019 	 * to stop it overwriting our objects */
   3020 	rv515_vga_render_disable(rdev);
   3021 }
   3022 
   3023 /*
   3024  * CP.
   3025  */
   3026 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
   3027 {
   3028 	struct radeon_ring *ring = &rdev->ring[ib->ring];
   3029 	u32 next_rptr;
   3030 
   3031 	/* set to DX10/11 mode */
   3032 	radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
   3033 	radeon_ring_write(ring, 1);
   3034 
   3035 	if (ring->rptr_save_reg) {
   3036 		next_rptr = ring->wptr + 3 + 4;
   3037 		radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
   3038 		radeon_ring_write(ring, ((ring->rptr_save_reg -
   3039 					  PACKET3_SET_CONFIG_REG_START) >> 2));
   3040 		radeon_ring_write(ring, next_rptr);
   3041 	} else if (rdev->wb.enabled) {
   3042 		next_rptr = ring->wptr + 5 + 4;
   3043 		radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
   3044 		radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
   3045 		radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
   3046 		radeon_ring_write(ring, next_rptr);
   3047 		radeon_ring_write(ring, 0);
   3048 	}
   3049 
   3050 	radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
   3051 	radeon_ring_write(ring,
   3052 #ifdef __BIG_ENDIAN
   3053 			  (2 << 0) |
   3054 #endif
   3055 			  (ib->gpu_addr & 0xFFFFFFFC));
   3056 	radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
   3057 	radeon_ring_write(ring, ib->length_dw);
   3058 }
   3059 
   3060 
   3061 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
   3062 {
   3063 	const __be32 *fw_data;
   3064 	int i;
   3065 
   3066 	if (!rdev->me_fw || !rdev->pfp_fw)
   3067 		return -EINVAL;
   3068 
   3069 	r700_cp_stop(rdev);
   3070 	WREG32(CP_RB_CNTL,
   3071 #ifdef __BIG_ENDIAN
   3072 	       BUF_SWAP_32BIT |
   3073 #endif
   3074 	       RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
   3075 
   3076 	fw_data = (const __be32 *)rdev->pfp_fw->data;
   3077 	WREG32(CP_PFP_UCODE_ADDR, 0);
   3078 	for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
   3079 		WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
   3080 	WREG32(CP_PFP_UCODE_ADDR, 0);
   3081 
   3082 	fw_data = (const __be32 *)rdev->me_fw->data;
   3083 	WREG32(CP_ME_RAM_WADDR, 0);
   3084 	for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
   3085 		WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
   3086 
   3087 	WREG32(CP_PFP_UCODE_ADDR, 0);
   3088 	WREG32(CP_ME_RAM_WADDR, 0);
   3089 	WREG32(CP_ME_RAM_RADDR, 0);
   3090 	return 0;
   3091 }
   3092 
   3093 static int evergreen_cp_start(struct radeon_device *rdev)
   3094 {
   3095 	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
   3096 	int r, i;
   3097 	uint32_t cp_me;
   3098 
   3099 	r = radeon_ring_lock(rdev, ring, 7);
   3100 	if (r) {
   3101 		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
   3102 		return r;
   3103 	}
   3104 	radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
   3105 	radeon_ring_write(ring, 0x1);
   3106 	radeon_ring_write(ring, 0x0);
   3107 	radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
   3108 	radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
   3109 	radeon_ring_write(ring, 0);
   3110 	radeon_ring_write(ring, 0);
   3111 	radeon_ring_unlock_commit(rdev, ring, false);
   3112 
   3113 	cp_me = 0xff;
   3114 	WREG32(CP_ME_CNTL, cp_me);
   3115 
   3116 	r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
   3117 	if (r) {
   3118 		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
   3119 		return r;
   3120 	}
   3121 
   3122 	/* setup clear context state */
   3123 	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
   3124 	radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
   3125 
   3126 	for (i = 0; i < evergreen_default_size; i++)
   3127 		radeon_ring_write(ring, evergreen_default_state[i]);
   3128 
   3129 	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
   3130 	radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
   3131 
   3132 	/* set clear context state */
   3133 	radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
   3134 	radeon_ring_write(ring, 0);
   3135 
   3136 	/* SQ_VTX_BASE_VTX_LOC */
   3137 	radeon_ring_write(ring, 0xc0026f00);
   3138 	radeon_ring_write(ring, 0x00000000);
   3139 	radeon_ring_write(ring, 0x00000000);
   3140 	radeon_ring_write(ring, 0x00000000);
   3141 
   3142 	/* Clear consts */
   3143 	radeon_ring_write(ring, 0xc0036f00);
   3144 	radeon_ring_write(ring, 0x00000bc4);
   3145 	radeon_ring_write(ring, 0xffffffff);
   3146 	radeon_ring_write(ring, 0xffffffff);
   3147 	radeon_ring_write(ring, 0xffffffff);
   3148 
   3149 	radeon_ring_write(ring, 0xc0026900);
   3150 	radeon_ring_write(ring, 0x00000316);
   3151 	radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
   3152 	radeon_ring_write(ring, 0x00000010); /*  */
   3153 
   3154 	radeon_ring_unlock_commit(rdev, ring, false);
   3155 
   3156 	return 0;
   3157 }
   3158 
   3159 static int evergreen_cp_resume(struct radeon_device *rdev)
   3160 {
   3161 	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
   3162 	u32 tmp;
   3163 	u32 rb_bufsz;
   3164 	int r;
   3165 
   3166 	/* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
   3167 	WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
   3168 				 SOFT_RESET_PA |
   3169 				 SOFT_RESET_SH |
   3170 				 SOFT_RESET_VGT |
   3171 				 SOFT_RESET_SPI |
   3172 				 SOFT_RESET_SX));
   3173 	RREG32(GRBM_SOFT_RESET);
   3174 	mdelay(15);
   3175 	WREG32(GRBM_SOFT_RESET, 0);
   3176 	RREG32(GRBM_SOFT_RESET);
   3177 
   3178 	/* Set ring buffer size */
   3179 	rb_bufsz = order_base_2(ring->ring_size / 8);
   3180 	tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
   3181 #ifdef __BIG_ENDIAN
   3182 	tmp |= BUF_SWAP_32BIT;
   3183 #endif
   3184 	WREG32(CP_RB_CNTL, tmp);
   3185 	WREG32(CP_SEM_WAIT_TIMER, 0x0);
   3186 	WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
   3187 
   3188 	/* Set the write pointer delay */
   3189 	WREG32(CP_RB_WPTR_DELAY, 0);
   3190 
   3191 	/* Initialize the ring buffer's read and write pointers */
   3192 	WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
   3193 	WREG32(CP_RB_RPTR_WR, 0);
   3194 	ring->wptr = 0;
   3195 	WREG32(CP_RB_WPTR, ring->wptr);
   3196 
   3197 	/* set the wb address whether it's enabled or not */
   3198 	WREG32(CP_RB_RPTR_ADDR,
   3199 	       ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
   3200 	WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
   3201 	WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
   3202 
   3203 	if (rdev->wb.enabled)
   3204 		WREG32(SCRATCH_UMSK, 0xff);
   3205 	else {
   3206 		tmp |= RB_NO_UPDATE;
   3207 		WREG32(SCRATCH_UMSK, 0);
   3208 	}
   3209 
   3210 	mdelay(1);
   3211 	WREG32(CP_RB_CNTL, tmp);
   3212 
   3213 	WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
   3214 	WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
   3215 
   3216 	evergreen_cp_start(rdev);
   3217 	ring->ready = true;
   3218 	r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
   3219 	if (r) {
   3220 		ring->ready = false;
   3221 		return r;
   3222 	}
   3223 	return 0;
   3224 }
   3225 
   3226 /*
   3227  * Core functions
   3228  */
   3229 static void evergreen_gpu_init(struct radeon_device *rdev)
   3230 {
   3231 	u32 gb_addr_config;
   3232 	u32 mc_shared_chmap __unused, mc_arb_ramcfg;
   3233 	u32 sx_debug_1;
   3234 	u32 smx_dc_ctl0;
   3235 	u32 sq_config;
   3236 	u32 sq_lds_resource_mgmt;
   3237 	u32 sq_gpr_resource_mgmt_1;
   3238 	u32 sq_gpr_resource_mgmt_2;
   3239 	u32 sq_gpr_resource_mgmt_3;
   3240 	u32 sq_thread_resource_mgmt;
   3241 	u32 sq_thread_resource_mgmt_2;
   3242 	u32 sq_stack_resource_mgmt_1;
   3243 	u32 sq_stack_resource_mgmt_2;
   3244 	u32 sq_stack_resource_mgmt_3;
   3245 	u32 vgt_cache_invalidation;
   3246 	u32 hdp_host_path_cntl, tmp;
   3247 	u32 disabled_rb_mask;
   3248 	int i, j, ps_thread_count;
   3249 
   3250 	switch (rdev->family) {
   3251 	case CHIP_CYPRESS:
   3252 	case CHIP_HEMLOCK:
   3253 		rdev->config.evergreen.num_ses = 2;
   3254 		rdev->config.evergreen.max_pipes = 4;
   3255 		rdev->config.evergreen.max_tile_pipes = 8;
   3256 		rdev->config.evergreen.max_simds = 10;
   3257 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
   3258 		rdev->config.evergreen.max_gprs = 256;
   3259 		rdev->config.evergreen.max_threads = 248;
   3260 		rdev->config.evergreen.max_gs_threads = 32;
   3261 		rdev->config.evergreen.max_stack_entries = 512;
   3262 		rdev->config.evergreen.sx_num_of_sets = 4;
   3263 		rdev->config.evergreen.sx_max_export_size = 256;
   3264 		rdev->config.evergreen.sx_max_export_pos_size = 64;
   3265 		rdev->config.evergreen.sx_max_export_smx_size = 192;
   3266 		rdev->config.evergreen.max_hw_contexts = 8;
   3267 		rdev->config.evergreen.sq_num_cf_insts = 2;
   3268 
   3269 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
   3270 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
   3271 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
   3272 		gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
   3273 		break;
   3274 	case CHIP_JUNIPER:
   3275 		rdev->config.evergreen.num_ses = 1;
   3276 		rdev->config.evergreen.max_pipes = 4;
   3277 		rdev->config.evergreen.max_tile_pipes = 4;
   3278 		rdev->config.evergreen.max_simds = 10;
   3279 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
   3280 		rdev->config.evergreen.max_gprs = 256;
   3281 		rdev->config.evergreen.max_threads = 248;
   3282 		rdev->config.evergreen.max_gs_threads = 32;
   3283 		rdev->config.evergreen.max_stack_entries = 512;
   3284 		rdev->config.evergreen.sx_num_of_sets = 4;
   3285 		rdev->config.evergreen.sx_max_export_size = 256;
   3286 		rdev->config.evergreen.sx_max_export_pos_size = 64;
   3287 		rdev->config.evergreen.sx_max_export_smx_size = 192;
   3288 		rdev->config.evergreen.max_hw_contexts = 8;
   3289 		rdev->config.evergreen.sq_num_cf_insts = 2;
   3290 
   3291 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
   3292 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
   3293 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
   3294 		gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
   3295 		break;
   3296 	case CHIP_REDWOOD:
   3297 		rdev->config.evergreen.num_ses = 1;
   3298 		rdev->config.evergreen.max_pipes = 4;
   3299 		rdev->config.evergreen.max_tile_pipes = 4;
   3300 		rdev->config.evergreen.max_simds = 5;
   3301 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
   3302 		rdev->config.evergreen.max_gprs = 256;
   3303 		rdev->config.evergreen.max_threads = 248;
   3304 		rdev->config.evergreen.max_gs_threads = 32;
   3305 		rdev->config.evergreen.max_stack_entries = 256;
   3306 		rdev->config.evergreen.sx_num_of_sets = 4;
   3307 		rdev->config.evergreen.sx_max_export_size = 256;
   3308 		rdev->config.evergreen.sx_max_export_pos_size = 64;
   3309 		rdev->config.evergreen.sx_max_export_smx_size = 192;
   3310 		rdev->config.evergreen.max_hw_contexts = 8;
   3311 		rdev->config.evergreen.sq_num_cf_insts = 2;
   3312 
   3313 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
   3314 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
   3315 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
   3316 		gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
   3317 		break;
   3318 	case CHIP_CEDAR:
   3319 	default:
   3320 		rdev->config.evergreen.num_ses = 1;
   3321 		rdev->config.evergreen.max_pipes = 2;
   3322 		rdev->config.evergreen.max_tile_pipes = 2;
   3323 		rdev->config.evergreen.max_simds = 2;
   3324 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
   3325 		rdev->config.evergreen.max_gprs = 256;
   3326 		rdev->config.evergreen.max_threads = 192;
   3327 		rdev->config.evergreen.max_gs_threads = 16;
   3328 		rdev->config.evergreen.max_stack_entries = 256;
   3329 		rdev->config.evergreen.sx_num_of_sets = 4;
   3330 		rdev->config.evergreen.sx_max_export_size = 128;
   3331 		rdev->config.evergreen.sx_max_export_pos_size = 32;
   3332 		rdev->config.evergreen.sx_max_export_smx_size = 96;
   3333 		rdev->config.evergreen.max_hw_contexts = 4;
   3334 		rdev->config.evergreen.sq_num_cf_insts = 1;
   3335 
   3336 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
   3337 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
   3338 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
   3339 		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
   3340 		break;
   3341 	case CHIP_PALM:
   3342 		rdev->config.evergreen.num_ses = 1;
   3343 		rdev->config.evergreen.max_pipes = 2;
   3344 		rdev->config.evergreen.max_tile_pipes = 2;
   3345 		rdev->config.evergreen.max_simds = 2;
   3346 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
   3347 		rdev->config.evergreen.max_gprs = 256;
   3348 		rdev->config.evergreen.max_threads = 192;
   3349 		rdev->config.evergreen.max_gs_threads = 16;
   3350 		rdev->config.evergreen.max_stack_entries = 256;
   3351 		rdev->config.evergreen.sx_num_of_sets = 4;
   3352 		rdev->config.evergreen.sx_max_export_size = 128;
   3353 		rdev->config.evergreen.sx_max_export_pos_size = 32;
   3354 		rdev->config.evergreen.sx_max_export_smx_size = 96;
   3355 		rdev->config.evergreen.max_hw_contexts = 4;
   3356 		rdev->config.evergreen.sq_num_cf_insts = 1;
   3357 
   3358 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
   3359 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
   3360 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
   3361 		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
   3362 		break;
   3363 	case CHIP_SUMO:
   3364 		rdev->config.evergreen.num_ses = 1;
   3365 		rdev->config.evergreen.max_pipes = 4;
   3366 		rdev->config.evergreen.max_tile_pipes = 4;
   3367 		if (rdev->pdev->device == 0x9648)
   3368 			rdev->config.evergreen.max_simds = 3;
   3369 		else if ((rdev->pdev->device == 0x9647) ||
   3370 			 (rdev->pdev->device == 0x964a))
   3371 			rdev->config.evergreen.max_simds = 4;
   3372 		else
   3373 			rdev->config.evergreen.max_simds = 5;
   3374 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
   3375 		rdev->config.evergreen.max_gprs = 256;
   3376 		rdev->config.evergreen.max_threads = 248;
   3377 		rdev->config.evergreen.max_gs_threads = 32;
   3378 		rdev->config.evergreen.max_stack_entries = 256;
   3379 		rdev->config.evergreen.sx_num_of_sets = 4;
   3380 		rdev->config.evergreen.sx_max_export_size = 256;
   3381 		rdev->config.evergreen.sx_max_export_pos_size = 64;
   3382 		rdev->config.evergreen.sx_max_export_smx_size = 192;
   3383 		rdev->config.evergreen.max_hw_contexts = 8;
   3384 		rdev->config.evergreen.sq_num_cf_insts = 2;
   3385 
   3386 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
   3387 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
   3388 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
   3389 		gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
   3390 		break;
   3391 	case CHIP_SUMO2:
   3392 		rdev->config.evergreen.num_ses = 1;
   3393 		rdev->config.evergreen.max_pipes = 4;
   3394 		rdev->config.evergreen.max_tile_pipes = 4;
   3395 		rdev->config.evergreen.max_simds = 2;
   3396 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
   3397 		rdev->config.evergreen.max_gprs = 256;
   3398 		rdev->config.evergreen.max_threads = 248;
   3399 		rdev->config.evergreen.max_gs_threads = 32;
   3400 		rdev->config.evergreen.max_stack_entries = 512;
   3401 		rdev->config.evergreen.sx_num_of_sets = 4;
   3402 		rdev->config.evergreen.sx_max_export_size = 256;
   3403 		rdev->config.evergreen.sx_max_export_pos_size = 64;
   3404 		rdev->config.evergreen.sx_max_export_smx_size = 192;
   3405 		rdev->config.evergreen.max_hw_contexts = 4;
   3406 		rdev->config.evergreen.sq_num_cf_insts = 2;
   3407 
   3408 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
   3409 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
   3410 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
   3411 		gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
   3412 		break;
   3413 	case CHIP_BARTS:
   3414 		rdev->config.evergreen.num_ses = 2;
   3415 		rdev->config.evergreen.max_pipes = 4;
   3416 		rdev->config.evergreen.max_tile_pipes = 8;
   3417 		rdev->config.evergreen.max_simds = 7;
   3418 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
   3419 		rdev->config.evergreen.max_gprs = 256;
   3420 		rdev->config.evergreen.max_threads = 248;
   3421 		rdev->config.evergreen.max_gs_threads = 32;
   3422 		rdev->config.evergreen.max_stack_entries = 512;
   3423 		rdev->config.evergreen.sx_num_of_sets = 4;
   3424 		rdev->config.evergreen.sx_max_export_size = 256;
   3425 		rdev->config.evergreen.sx_max_export_pos_size = 64;
   3426 		rdev->config.evergreen.sx_max_export_smx_size = 192;
   3427 		rdev->config.evergreen.max_hw_contexts = 8;
   3428 		rdev->config.evergreen.sq_num_cf_insts = 2;
   3429 
   3430 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
   3431 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
   3432 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
   3433 		gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
   3434 		break;
   3435 	case CHIP_TURKS:
   3436 		rdev->config.evergreen.num_ses = 1;
   3437 		rdev->config.evergreen.max_pipes = 4;
   3438 		rdev->config.evergreen.max_tile_pipes = 4;
   3439 		rdev->config.evergreen.max_simds = 6;
   3440 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
   3441 		rdev->config.evergreen.max_gprs = 256;
   3442 		rdev->config.evergreen.max_threads = 248;
   3443 		rdev->config.evergreen.max_gs_threads = 32;
   3444 		rdev->config.evergreen.max_stack_entries = 256;
   3445 		rdev->config.evergreen.sx_num_of_sets = 4;
   3446 		rdev->config.evergreen.sx_max_export_size = 256;
   3447 		rdev->config.evergreen.sx_max_export_pos_size = 64;
   3448 		rdev->config.evergreen.sx_max_export_smx_size = 192;
   3449 		rdev->config.evergreen.max_hw_contexts = 8;
   3450 		rdev->config.evergreen.sq_num_cf_insts = 2;
   3451 
   3452 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
   3453 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
   3454 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
   3455 		gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
   3456 		break;
   3457 	case CHIP_CAICOS:
   3458 		rdev->config.evergreen.num_ses = 1;
   3459 		rdev->config.evergreen.max_pipes = 2;
   3460 		rdev->config.evergreen.max_tile_pipes = 2;
   3461 		rdev->config.evergreen.max_simds = 2;
   3462 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
   3463 		rdev->config.evergreen.max_gprs = 256;
   3464 		rdev->config.evergreen.max_threads = 192;
   3465 		rdev->config.evergreen.max_gs_threads = 16;
   3466 		rdev->config.evergreen.max_stack_entries = 256;
   3467 		rdev->config.evergreen.sx_num_of_sets = 4;
   3468 		rdev->config.evergreen.sx_max_export_size = 128;
   3469 		rdev->config.evergreen.sx_max_export_pos_size = 32;
   3470 		rdev->config.evergreen.sx_max_export_smx_size = 96;
   3471 		rdev->config.evergreen.max_hw_contexts = 4;
   3472 		rdev->config.evergreen.sq_num_cf_insts = 1;
   3473 
   3474 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
   3475 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
   3476 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
   3477 		gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
   3478 		break;
   3479 	}
   3480 
   3481 	/* Initialize HDP */
   3482 	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
   3483 		WREG32((0x2c14 + j), 0x00000000);
   3484 		WREG32((0x2c18 + j), 0x00000000);
   3485 		WREG32((0x2c1c + j), 0x00000000);
   3486 		WREG32((0x2c20 + j), 0x00000000);
   3487 		WREG32((0x2c24 + j), 0x00000000);
   3488 	}
   3489 
   3490 	WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
   3491 	WREG32(SRBM_INT_CNTL, 0x1);
   3492 	WREG32(SRBM_INT_ACK, 0x1);
   3493 
   3494 	evergreen_fix_pci_max_read_req_size(rdev);
   3495 
   3496 	mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
   3497 	if ((rdev->family == CHIP_PALM) ||
   3498 	    (rdev->family == CHIP_SUMO) ||
   3499 	    (rdev->family == CHIP_SUMO2))
   3500 		mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
   3501 	else
   3502 		mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
   3503 
   3504 	/* setup tiling info dword.  gb_addr_config is not adequate since it does
   3505 	 * not have bank info, so create a custom tiling dword.
   3506 	 * bits 3:0   num_pipes
   3507 	 * bits 7:4   num_banks
   3508 	 * bits 11:8  group_size
   3509 	 * bits 15:12 row_size
   3510 	 */
   3511 	rdev->config.evergreen.tile_config = 0;
   3512 	switch (rdev->config.evergreen.max_tile_pipes) {
   3513 	case 1:
   3514 	default:
   3515 		rdev->config.evergreen.tile_config |= (0 << 0);
   3516 		break;
   3517 	case 2:
   3518 		rdev->config.evergreen.tile_config |= (1 << 0);
   3519 		break;
   3520 	case 4:
   3521 		rdev->config.evergreen.tile_config |= (2 << 0);
   3522 		break;
   3523 	case 8:
   3524 		rdev->config.evergreen.tile_config |= (3 << 0);
   3525 		break;
   3526 	}
   3527 	/* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
   3528 	if (rdev->flags & RADEON_IS_IGP)
   3529 		rdev->config.evergreen.tile_config |= 1 << 4;
   3530 	else {
   3531 		switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
   3532 		case 0: /* four banks */
   3533 			rdev->config.evergreen.tile_config |= 0 << 4;
   3534 			break;
   3535 		case 1: /* eight banks */
   3536 			rdev->config.evergreen.tile_config |= 1 << 4;
   3537 			break;
   3538 		case 2: /* sixteen banks */
   3539 		default:
   3540 			rdev->config.evergreen.tile_config |= 2 << 4;
   3541 			break;
   3542 		}
   3543 	}
   3544 	rdev->config.evergreen.tile_config |= 0 << 8;
   3545 	rdev->config.evergreen.tile_config |=
   3546 		((gb_addr_config & 0x30000000) >> 28) << 12;
   3547 
   3548 	if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
   3549 		u32 efuse_straps_4;
   3550 		u32 efuse_straps_3;
   3551 
   3552 		efuse_straps_4 = RREG32_RCU(0x204);
   3553 		efuse_straps_3 = RREG32_RCU(0x203);
   3554 		tmp = (((efuse_straps_4 & 0xf) << 4) |
   3555 		      ((efuse_straps_3 & 0xf0000000) >> 28));
   3556 	} else {
   3557 		tmp = 0;
   3558 		for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
   3559 			u32 rb_disable_bitmap;
   3560 
   3561 			WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
   3562 			WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
   3563 			rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
   3564 			tmp <<= 4;
   3565 			tmp |= rb_disable_bitmap;
   3566 		}
   3567 	}
   3568 	/* enabled rb are just the one not disabled :) */
   3569 	disabled_rb_mask = tmp;
   3570 	tmp = 0;
   3571 	for (i = 0; i < rdev->config.evergreen.max_backends; i++)
   3572 		tmp |= (1 << i);
   3573 	/* if all the backends are disabled, fix it up here */
   3574 	if ((disabled_rb_mask & tmp) == tmp) {
   3575 		for (i = 0; i < rdev->config.evergreen.max_backends; i++)
   3576 			disabled_rb_mask &= ~(1 << i);
   3577 	}
   3578 
   3579 	for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
   3580 		u32 simd_disable_bitmap;
   3581 
   3582 		WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
   3583 		WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
   3584 		simd_disable_bitmap = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
   3585 		simd_disable_bitmap |= 0xffffffff << rdev->config.evergreen.max_simds;
   3586 		tmp <<= 16;
   3587 		tmp |= simd_disable_bitmap;
   3588 	}
   3589 	rdev->config.evergreen.active_simds = hweight32(~tmp);
   3590 
   3591 	WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
   3592 	WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
   3593 
   3594 	WREG32(GB_ADDR_CONFIG, gb_addr_config);
   3595 	WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
   3596 	WREG32(HDP_ADDR_CONFIG, gb_addr_config);
   3597 	WREG32(DMA_TILING_CONFIG, gb_addr_config);
   3598 	WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
   3599 	WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
   3600 	WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
   3601 
   3602 	if ((rdev->config.evergreen.max_backends == 1) &&
   3603 	    (rdev->flags & RADEON_IS_IGP)) {
   3604 		if ((disabled_rb_mask & 3) == 1) {
   3605 			/* RB0 disabled, RB1 enabled */
   3606 			tmp = 0x11111111;
   3607 		} else {
   3608 			/* RB1 disabled, RB0 enabled */
   3609 			tmp = 0x00000000;
   3610 		}
   3611 	} else {
   3612 		tmp = gb_addr_config & NUM_PIPES_MASK;
   3613 		tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
   3614 						EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
   3615 	}
   3616 	WREG32(GB_BACKEND_MAP, tmp);
   3617 
   3618 	WREG32(CGTS_SYS_TCC_DISABLE, 0);
   3619 	WREG32(CGTS_TCC_DISABLE, 0);
   3620 	WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
   3621 	WREG32(CGTS_USER_TCC_DISABLE, 0);
   3622 
   3623 	/* set HW defaults for 3D engine */
   3624 	WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
   3625 				     ROQ_IB2_START(0x2b)));
   3626 
   3627 	WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
   3628 
   3629 	WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
   3630 			     SYNC_GRADIENT |
   3631 			     SYNC_WALKER |
   3632 			     SYNC_ALIGNER));
   3633 
   3634 	sx_debug_1 = RREG32(SX_DEBUG_1);
   3635 	sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
   3636 	WREG32(SX_DEBUG_1, sx_debug_1);
   3637 
   3638 
   3639 	smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
   3640 	smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
   3641 	smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
   3642 	WREG32(SMX_DC_CTL0, smx_dc_ctl0);
   3643 
   3644 	if (rdev->family <= CHIP_SUMO2)
   3645 		WREG32(SMX_SAR_CTL0, 0x00010000);
   3646 
   3647 	WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
   3648 					POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
   3649 					SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
   3650 
   3651 	WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
   3652 				 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
   3653 				 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
   3654 
   3655 	WREG32(VGT_NUM_INSTANCES, 1);
   3656 	WREG32(SPI_CONFIG_CNTL, 0);
   3657 	WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
   3658 	WREG32(CP_PERFMON_CNTL, 0);
   3659 
   3660 	WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
   3661 				  FETCH_FIFO_HIWATER(0x4) |
   3662 				  DONE_FIFO_HIWATER(0xe0) |
   3663 				  ALU_UPDATE_FIFO_HIWATER(0x8)));
   3664 
   3665 	sq_config = RREG32(SQ_CONFIG);
   3666 	sq_config &= ~(PS_PRIO(3) |
   3667 		       VS_PRIO(3) |
   3668 		       GS_PRIO(3) |
   3669 		       ES_PRIO(3));
   3670 	sq_config |= (VC_ENABLE |
   3671 		      EXPORT_SRC_C |
   3672 		      PS_PRIO(0) |
   3673 		      VS_PRIO(1) |
   3674 		      GS_PRIO(2) |
   3675 		      ES_PRIO(3));
   3676 
   3677 	switch (rdev->family) {
   3678 	case CHIP_CEDAR:
   3679 	case CHIP_PALM:
   3680 	case CHIP_SUMO:
   3681 	case CHIP_SUMO2:
   3682 	case CHIP_CAICOS:
   3683 		/* no vertex cache */
   3684 		sq_config &= ~VC_ENABLE;
   3685 		break;
   3686 	default:
   3687 		break;
   3688 	}
   3689 
   3690 	sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
   3691 
   3692 	sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
   3693 	sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
   3694 	sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
   3695 	sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
   3696 	sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
   3697 	sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
   3698 	sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
   3699 
   3700 	switch (rdev->family) {
   3701 	case CHIP_CEDAR:
   3702 	case CHIP_PALM:
   3703 	case CHIP_SUMO:
   3704 	case CHIP_SUMO2:
   3705 		ps_thread_count = 96;
   3706 		break;
   3707 	default:
   3708 		ps_thread_count = 128;
   3709 		break;
   3710 	}
   3711 
   3712 	sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
   3713 	sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
   3714 	sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
   3715 	sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
   3716 	sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
   3717 	sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
   3718 
   3719 	sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
   3720 	sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
   3721 	sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
   3722 	sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
   3723 	sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
   3724 	sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
   3725 
   3726 	WREG32(SQ_CONFIG, sq_config);
   3727 	WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
   3728 	WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
   3729 	WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
   3730 	WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
   3731 	WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
   3732 	WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
   3733 	WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
   3734 	WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
   3735 	WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
   3736 	WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
   3737 
   3738 	WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
   3739 					  FORCE_EOV_MAX_REZ_CNT(255)));
   3740 
   3741 	switch (rdev->family) {
   3742 	case CHIP_CEDAR:
   3743 	case CHIP_PALM:
   3744 	case CHIP_SUMO:
   3745 	case CHIP_SUMO2:
   3746 	case CHIP_CAICOS:
   3747 		vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
   3748 		break;
   3749 	default:
   3750 		vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
   3751 		break;
   3752 	}
   3753 	vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
   3754 	WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
   3755 
   3756 	WREG32(VGT_GS_VERTEX_REUSE, 16);
   3757 	WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
   3758 	WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
   3759 
   3760 	WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
   3761 	WREG32(VGT_OUT_DEALLOC_CNTL, 16);
   3762 
   3763 	WREG32(CB_PERF_CTR0_SEL_0, 0);
   3764 	WREG32(CB_PERF_CTR0_SEL_1, 0);
   3765 	WREG32(CB_PERF_CTR1_SEL_0, 0);
   3766 	WREG32(CB_PERF_CTR1_SEL_1, 0);
   3767 	WREG32(CB_PERF_CTR2_SEL_0, 0);
   3768 	WREG32(CB_PERF_CTR2_SEL_1, 0);
   3769 	WREG32(CB_PERF_CTR3_SEL_0, 0);
   3770 	WREG32(CB_PERF_CTR3_SEL_1, 0);
   3771 
   3772 	/* clear render buffer base addresses */
   3773 	WREG32(CB_COLOR0_BASE, 0);
   3774 	WREG32(CB_COLOR1_BASE, 0);
   3775 	WREG32(CB_COLOR2_BASE, 0);
   3776 	WREG32(CB_COLOR3_BASE, 0);
   3777 	WREG32(CB_COLOR4_BASE, 0);
   3778 	WREG32(CB_COLOR5_BASE, 0);
   3779 	WREG32(CB_COLOR6_BASE, 0);
   3780 	WREG32(CB_COLOR7_BASE, 0);
   3781 	WREG32(CB_COLOR8_BASE, 0);
   3782 	WREG32(CB_COLOR9_BASE, 0);
   3783 	WREG32(CB_COLOR10_BASE, 0);
   3784 	WREG32(CB_COLOR11_BASE, 0);
   3785 
   3786 	/* set the shader const cache sizes to 0 */
   3787 	for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
   3788 		WREG32(i, 0);
   3789 	for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
   3790 		WREG32(i, 0);
   3791 
   3792 	tmp = RREG32(HDP_MISC_CNTL);
   3793 	tmp |= HDP_FLUSH_INVALIDATE_CACHE;
   3794 	WREG32(HDP_MISC_CNTL, tmp);
   3795 
   3796 	hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
   3797 	WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
   3798 
   3799 	WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
   3800 
   3801 	udelay(50);
   3802 
   3803 }
   3804 
   3805 int evergreen_mc_init(struct radeon_device *rdev)
   3806 {
   3807 	u32 tmp;
   3808 	int chansize, numchan;
   3809 
   3810 	/* Get VRAM informations */
   3811 	rdev->mc.vram_is_ddr = true;
   3812 	if ((rdev->family == CHIP_PALM) ||
   3813 	    (rdev->family == CHIP_SUMO) ||
   3814 	    (rdev->family == CHIP_SUMO2))
   3815 		tmp = RREG32(FUS_MC_ARB_RAMCFG);
   3816 	else
   3817 		tmp = RREG32(MC_ARB_RAMCFG);
   3818 	if (tmp & CHANSIZE_OVERRIDE) {
   3819 		chansize = 16;
   3820 	} else if (tmp & CHANSIZE_MASK) {
   3821 		chansize = 64;
   3822 	} else {
   3823 		chansize = 32;
   3824 	}
   3825 	tmp = RREG32(MC_SHARED_CHMAP);
   3826 	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
   3827 	case 0:
   3828 	default:
   3829 		numchan = 1;
   3830 		break;
   3831 	case 1:
   3832 		numchan = 2;
   3833 		break;
   3834 	case 2:
   3835 		numchan = 4;
   3836 		break;
   3837 	case 3:
   3838 		numchan = 8;
   3839 		break;
   3840 	}
   3841 	rdev->mc.vram_width = numchan * chansize;
   3842 	/* Could aper size report 0 ? */
   3843 	rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
   3844 	rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
   3845 	/* Setup GPU memory space */
   3846 	if ((rdev->family == CHIP_PALM) ||
   3847 	    (rdev->family == CHIP_SUMO) ||
   3848 	    (rdev->family == CHIP_SUMO2)) {
   3849 		/* size in bytes on fusion */
   3850 		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
   3851 		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
   3852 	} else {
   3853 		/* size in MB on evergreen/cayman/tn */
   3854 		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
   3855 		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
   3856 	}
   3857 	rdev->mc.visible_vram_size = rdev->mc.aper_size;
   3858 	r700_vram_gtt_location(rdev, &rdev->mc);
   3859 	radeon_update_bandwidth_info(rdev);
   3860 
   3861 	return 0;
   3862 }
   3863 
   3864 void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
   3865 {
   3866 	dev_info(rdev->dev, "  GRBM_STATUS               = 0x%08X\n",
   3867 		RREG32(GRBM_STATUS));
   3868 	dev_info(rdev->dev, "  GRBM_STATUS_SE0           = 0x%08X\n",
   3869 		RREG32(GRBM_STATUS_SE0));
   3870 	dev_info(rdev->dev, "  GRBM_STATUS_SE1           = 0x%08X\n",
   3871 		RREG32(GRBM_STATUS_SE1));
   3872 	dev_info(rdev->dev, "  SRBM_STATUS               = 0x%08X\n",
   3873 		RREG32(SRBM_STATUS));
   3874 	dev_info(rdev->dev, "  SRBM_STATUS2              = 0x%08X\n",
   3875 		RREG32(SRBM_STATUS2));
   3876 	dev_info(rdev->dev, "  R_008674_CP_STALLED_STAT1 = 0x%08X\n",
   3877 		RREG32(CP_STALLED_STAT1));
   3878 	dev_info(rdev->dev, "  R_008678_CP_STALLED_STAT2 = 0x%08X\n",
   3879 		RREG32(CP_STALLED_STAT2));
   3880 	dev_info(rdev->dev, "  R_00867C_CP_BUSY_STAT     = 0x%08X\n",
   3881 		RREG32(CP_BUSY_STAT));
   3882 	dev_info(rdev->dev, "  R_008680_CP_STAT          = 0x%08X\n",
   3883 		RREG32(CP_STAT));
   3884 	dev_info(rdev->dev, "  R_00D034_DMA_STATUS_REG   = 0x%08X\n",
   3885 		RREG32(DMA_STATUS_REG));
   3886 	if (rdev->family >= CHIP_CAYMAN) {
   3887 		dev_info(rdev->dev, "  R_00D834_DMA_STATUS_REG   = 0x%08X\n",
   3888 			 RREG32(DMA_STATUS_REG + 0x800));
   3889 	}
   3890 }
   3891 
   3892 bool evergreen_is_display_hung(struct radeon_device *rdev)
   3893 {
   3894 	u32 crtc_hung = 0;
   3895 	u32 crtc_status[6];
   3896 	u32 i, j, tmp;
   3897 
   3898 	for (i = 0; i < rdev->num_crtc; i++) {
   3899 		if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
   3900 			crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
   3901 			crtc_hung |= (1 << i);
   3902 		}
   3903 	}
   3904 
   3905 	for (j = 0; j < 10; j++) {
   3906 		for (i = 0; i < rdev->num_crtc; i++) {
   3907 			if (crtc_hung & (1 << i)) {
   3908 				tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
   3909 				if (tmp != crtc_status[i])
   3910 					crtc_hung &= ~(1 << i);
   3911 			}
   3912 		}
   3913 		if (crtc_hung == 0)
   3914 			return false;
   3915 		udelay(100);
   3916 	}
   3917 
   3918 	return true;
   3919 }
   3920 
   3921 u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
   3922 {
   3923 	u32 reset_mask = 0;
   3924 	u32 tmp;
   3925 
   3926 	/* GRBM_STATUS */
   3927 	tmp = RREG32(GRBM_STATUS);
   3928 	if (tmp & (PA_BUSY | SC_BUSY |
   3929 		   SH_BUSY | SX_BUSY |
   3930 		   TA_BUSY | VGT_BUSY |
   3931 		   DB_BUSY | CB_BUSY |
   3932 		   SPI_BUSY | VGT_BUSY_NO_DMA))
   3933 		reset_mask |= RADEON_RESET_GFX;
   3934 
   3935 	if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
   3936 		   CP_BUSY | CP_COHERENCY_BUSY))
   3937 		reset_mask |= RADEON_RESET_CP;
   3938 
   3939 	if (tmp & GRBM_EE_BUSY)
   3940 		reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
   3941 
   3942 	/* DMA_STATUS_REG */
   3943 	tmp = RREG32(DMA_STATUS_REG);
   3944 	if (!(tmp & DMA_IDLE))
   3945 		reset_mask |= RADEON_RESET_DMA;
   3946 
   3947 	/* SRBM_STATUS2 */
   3948 	tmp = RREG32(SRBM_STATUS2);
   3949 	if (tmp & DMA_BUSY)
   3950 		reset_mask |= RADEON_RESET_DMA;
   3951 
   3952 	/* SRBM_STATUS */
   3953 	tmp = RREG32(SRBM_STATUS);
   3954 	if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
   3955 		reset_mask |= RADEON_RESET_RLC;
   3956 
   3957 	if (tmp & IH_BUSY)
   3958 		reset_mask |= RADEON_RESET_IH;
   3959 
   3960 	if (tmp & SEM_BUSY)
   3961 		reset_mask |= RADEON_RESET_SEM;
   3962 
   3963 	if (tmp & GRBM_RQ_PENDING)
   3964 		reset_mask |= RADEON_RESET_GRBM;
   3965 
   3966 	if (tmp & VMC_BUSY)
   3967 		reset_mask |= RADEON_RESET_VMC;
   3968 
   3969 	if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
   3970 		   MCC_BUSY | MCD_BUSY))
   3971 		reset_mask |= RADEON_RESET_MC;
   3972 
   3973 	if (evergreen_is_display_hung(rdev))
   3974 		reset_mask |= RADEON_RESET_DISPLAY;
   3975 
   3976 	/* VM_L2_STATUS */
   3977 	tmp = RREG32(VM_L2_STATUS);
   3978 	if (tmp & L2_BUSY)
   3979 		reset_mask |= RADEON_RESET_VMC;
   3980 
   3981 	/* Skip MC reset as it's mostly likely not hung, just busy */
   3982 	if (reset_mask & RADEON_RESET_MC) {
   3983 		DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
   3984 		reset_mask &= ~RADEON_RESET_MC;
   3985 	}
   3986 
   3987 	return reset_mask;
   3988 }
   3989 
   3990 static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
   3991 {
   3992 	struct evergreen_mc_save save;
   3993 	u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
   3994 	u32 tmp;
   3995 
   3996 	if (reset_mask == 0)
   3997 		return;
   3998 
   3999 	dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
   4000 
   4001 	evergreen_print_gpu_status_regs(rdev);
   4002 
   4003 	/* Disable CP parsing/prefetching */
   4004 	WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
   4005 
   4006 	if (reset_mask & RADEON_RESET_DMA) {
   4007 		/* Disable DMA */
   4008 		tmp = RREG32(DMA_RB_CNTL);
   4009 		tmp &= ~DMA_RB_ENABLE;
   4010 		WREG32(DMA_RB_CNTL, tmp);
   4011 	}
   4012 
   4013 	udelay(50);
   4014 
   4015 	evergreen_mc_stop(rdev, &save);
   4016 	if (evergreen_mc_wait_for_idle(rdev)) {
   4017 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
   4018 	}
   4019 
   4020 	if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
   4021 		grbm_soft_reset |= SOFT_RESET_DB |
   4022 			SOFT_RESET_CB |
   4023 			SOFT_RESET_PA |
   4024 			SOFT_RESET_SC |
   4025 			SOFT_RESET_SPI |
   4026 			SOFT_RESET_SX |
   4027 			SOFT_RESET_SH |
   4028 			SOFT_RESET_TC |
   4029 			SOFT_RESET_TA |
   4030 			SOFT_RESET_VC |
   4031 			SOFT_RESET_VGT;
   4032 	}
   4033 
   4034 	if (reset_mask & RADEON_RESET_CP) {
   4035 		grbm_soft_reset |= SOFT_RESET_CP |
   4036 			SOFT_RESET_VGT;
   4037 
   4038 		srbm_soft_reset |= SOFT_RESET_GRBM;
   4039 	}
   4040 
   4041 	if (reset_mask & RADEON_RESET_DMA)
   4042 		srbm_soft_reset |= SOFT_RESET_DMA;
   4043 
   4044 	if (reset_mask & RADEON_RESET_DISPLAY)
   4045 		srbm_soft_reset |= SOFT_RESET_DC;
   4046 
   4047 	if (reset_mask & RADEON_RESET_RLC)
   4048 		srbm_soft_reset |= SOFT_RESET_RLC;
   4049 
   4050 	if (reset_mask & RADEON_RESET_SEM)
   4051 		srbm_soft_reset |= SOFT_RESET_SEM;
   4052 
   4053 	if (reset_mask & RADEON_RESET_IH)
   4054 		srbm_soft_reset |= SOFT_RESET_IH;
   4055 
   4056 	if (reset_mask & RADEON_RESET_GRBM)
   4057 		srbm_soft_reset |= SOFT_RESET_GRBM;
   4058 
   4059 	if (reset_mask & RADEON_RESET_VMC)
   4060 		srbm_soft_reset |= SOFT_RESET_VMC;
   4061 
   4062 	if (!(rdev->flags & RADEON_IS_IGP)) {
   4063 		if (reset_mask & RADEON_RESET_MC)
   4064 			srbm_soft_reset |= SOFT_RESET_MC;
   4065 	}
   4066 
   4067 	if (grbm_soft_reset) {
   4068 		tmp = RREG32(GRBM_SOFT_RESET);
   4069 		tmp |= grbm_soft_reset;
   4070 		dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
   4071 		WREG32(GRBM_SOFT_RESET, tmp);
   4072 		tmp = RREG32(GRBM_SOFT_RESET);
   4073 
   4074 		udelay(50);
   4075 
   4076 		tmp &= ~grbm_soft_reset;
   4077 		WREG32(GRBM_SOFT_RESET, tmp);
   4078 		tmp = RREG32(GRBM_SOFT_RESET);
   4079 	}
   4080 
   4081 	if (srbm_soft_reset) {
   4082 		tmp = RREG32(SRBM_SOFT_RESET);
   4083 		tmp |= srbm_soft_reset;
   4084 		dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
   4085 		WREG32(SRBM_SOFT_RESET, tmp);
   4086 		tmp = RREG32(SRBM_SOFT_RESET);
   4087 
   4088 		udelay(50);
   4089 
   4090 		tmp &= ~srbm_soft_reset;
   4091 		WREG32(SRBM_SOFT_RESET, tmp);
   4092 		tmp = RREG32(SRBM_SOFT_RESET);
   4093 	}
   4094 
   4095 	/* Wait a little for things to settle down */
   4096 	udelay(50);
   4097 
   4098 	evergreen_mc_resume(rdev, &save);
   4099 	udelay(50);
   4100 
   4101 	evergreen_print_gpu_status_regs(rdev);
   4102 }
   4103 
   4104 void evergreen_gpu_pci_config_reset(struct radeon_device *rdev)
   4105 {
   4106 	struct evergreen_mc_save save;
   4107 	u32 tmp, i;
   4108 
   4109 	dev_info(rdev->dev, "GPU pci config reset\n");
   4110 
   4111 	/* disable dpm? */
   4112 
   4113 	/* Disable CP parsing/prefetching */
   4114 	WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
   4115 	udelay(50);
   4116 	/* Disable DMA */
   4117 	tmp = RREG32(DMA_RB_CNTL);
   4118 	tmp &= ~DMA_RB_ENABLE;
   4119 	WREG32(DMA_RB_CNTL, tmp);
   4120 	/* XXX other engines? */
   4121 
   4122 	/* halt the rlc */
   4123 	r600_rlc_stop(rdev);
   4124 
   4125 	udelay(50);
   4126 
   4127 	/* set mclk/sclk to bypass */
   4128 	rv770_set_clk_bypass_mode(rdev);
   4129 	/* disable BM */
   4130 	pci_clear_master(rdev->pdev);
   4131 	/* disable mem access */
   4132 	evergreen_mc_stop(rdev, &save);
   4133 	if (evergreen_mc_wait_for_idle(rdev)) {
   4134 		dev_warn(rdev->dev, "Wait for MC idle timed out !\n");
   4135 	}
   4136 	/* reset */
   4137 	radeon_pci_config_reset(rdev);
   4138 	/* wait for asic to come out of reset */
   4139 	for (i = 0; i < rdev->usec_timeout; i++) {
   4140 		if (RREG32(CONFIG_MEMSIZE) != 0xffffffff)
   4141 			break;
   4142 		udelay(1);
   4143 	}
   4144 }
   4145 
   4146 int evergreen_asic_reset(struct radeon_device *rdev)
   4147 {
   4148 	u32 reset_mask;
   4149 
   4150 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
   4151 
   4152 	if (reset_mask)
   4153 		r600_set_bios_scratch_engine_hung(rdev, true);
   4154 
   4155 	/* try soft reset */
   4156 	evergreen_gpu_soft_reset(rdev, reset_mask);
   4157 
   4158 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
   4159 
   4160 	/* try pci config reset */
   4161 	if (reset_mask && radeon_hard_reset)
   4162 		evergreen_gpu_pci_config_reset(rdev);
   4163 
   4164 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
   4165 
   4166 	if (!reset_mask)
   4167 		r600_set_bios_scratch_engine_hung(rdev, false);
   4168 
   4169 	return 0;
   4170 }
   4171 
   4172 /**
   4173  * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
   4174  *
   4175  * @rdev: radeon_device pointer
   4176  * @ring: radeon_ring structure holding ring information
   4177  *
   4178  * Check if the GFX engine is locked up.
   4179  * Returns true if the engine appears to be locked up, false if not.
   4180  */
   4181 bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
   4182 {
   4183 	u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
   4184 
   4185 	if (!(reset_mask & (RADEON_RESET_GFX |
   4186 			    RADEON_RESET_COMPUTE |
   4187 			    RADEON_RESET_CP))) {
   4188 		radeon_ring_lockup_update(rdev, ring);
   4189 		return false;
   4190 	}
   4191 	return radeon_ring_test_lockup(rdev, ring);
   4192 }
   4193 
   4194 /*
   4195  * RLC
   4196  */
   4197 #define RLC_SAVE_RESTORE_LIST_END_MARKER    0x00000000
   4198 #define RLC_CLEAR_STATE_END_MARKER          0x00000001
   4199 
   4200 void sumo_rlc_fini(struct radeon_device *rdev)
   4201 {
   4202 	int r;
   4203 
   4204 	/* save restore block */
   4205 	if (rdev->rlc.save_restore_obj) {
   4206 		r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
   4207 		if (unlikely(r != 0))
   4208 			dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r);
   4209 		radeon_bo_unpin(rdev->rlc.save_restore_obj);
   4210 		radeon_bo_unreserve(rdev->rlc.save_restore_obj);
   4211 
   4212 		radeon_bo_unref(&rdev->rlc.save_restore_obj);
   4213 		rdev->rlc.save_restore_obj = NULL;
   4214 	}
   4215 
   4216 	/* clear state block */
   4217 	if (rdev->rlc.clear_state_obj) {
   4218 		r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
   4219 		if (unlikely(r != 0))
   4220 			dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r);
   4221 		radeon_bo_unpin(rdev->rlc.clear_state_obj);
   4222 		radeon_bo_unreserve(rdev->rlc.clear_state_obj);
   4223 
   4224 		radeon_bo_unref(&rdev->rlc.clear_state_obj);
   4225 		rdev->rlc.clear_state_obj = NULL;
   4226 	}
   4227 
   4228 	/* clear state block */
   4229 	if (rdev->rlc.cp_table_obj) {
   4230 		r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
   4231 		if (unlikely(r != 0))
   4232 			dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
   4233 		radeon_bo_unpin(rdev->rlc.cp_table_obj);
   4234 		radeon_bo_unreserve(rdev->rlc.cp_table_obj);
   4235 
   4236 		radeon_bo_unref(&rdev->rlc.cp_table_obj);
   4237 		rdev->rlc.cp_table_obj = NULL;
   4238 	}
   4239 }
   4240 
   4241 #define CP_ME_TABLE_SIZE    96
   4242 
   4243 int sumo_rlc_init(struct radeon_device *rdev)
   4244 {
   4245 	const u32 *src_ptr;
   4246 	volatile u32 *dst_ptr;
   4247 	u32 dws, data, i, j, k, reg_num;
   4248 	u32 reg_list_num, reg_list_hdr_blk_index, reg_list_blk_index = 0;
   4249 	u64 reg_list_mc_addr;
   4250 	const struct cs_section_def *cs_data;
   4251 	int r;
   4252 
   4253 	src_ptr = rdev->rlc.reg_list;
   4254 	dws = rdev->rlc.reg_list_size;
   4255 	if (rdev->family >= CHIP_BONAIRE) {
   4256 		dws += (5 * 16) + 48 + 48 + 64;
   4257 	}
   4258 	cs_data = rdev->rlc.cs_data;
   4259 
   4260 	if (src_ptr) {
   4261 		/* save restore block */
   4262 		if (rdev->rlc.save_restore_obj == NULL) {
   4263 			r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
   4264 					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
   4265 					     NULL, &rdev->rlc.save_restore_obj);
   4266 			if (r) {
   4267 				dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r);
   4268 				return r;
   4269 			}
   4270 		}
   4271 
   4272 		r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
   4273 		if (unlikely(r != 0)) {
   4274 			sumo_rlc_fini(rdev);
   4275 			return r;
   4276 		}
   4277 		r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM,
   4278 				  &rdev->rlc.save_restore_gpu_addr);
   4279 		if (r) {
   4280 			radeon_bo_unreserve(rdev->rlc.save_restore_obj);
   4281 			dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r);
   4282 			sumo_rlc_fini(rdev);
   4283 			return r;
   4284 		}
   4285 
   4286 		r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void **)__UNVOLATILE(&rdev->rlc.sr_ptr));
   4287 		if (r) {
   4288 			dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r);
   4289 			sumo_rlc_fini(rdev);
   4290 			return r;
   4291 		}
   4292 		/* write the sr buffer */
   4293 		dst_ptr = rdev->rlc.sr_ptr;
   4294 		if (rdev->family >= CHIP_TAHITI) {
   4295 			/* SI */
   4296 			for (i = 0; i < rdev->rlc.reg_list_size; i++)
   4297 				dst_ptr[i] = cpu_to_le32(src_ptr[i]);
   4298 		} else {
   4299 			/* ON/LN/TN */
   4300 			/* format:
   4301 			 * dw0: (reg2 << 16) | reg1
   4302 			 * dw1: reg1 save space
   4303 			 * dw2: reg2 save space
   4304 			 */
   4305 			for (i = 0; i < dws; i++) {
   4306 				data = src_ptr[i] >> 2;
   4307 				i++;
   4308 				if (i < dws)
   4309 					data |= (src_ptr[i] >> 2) << 16;
   4310 				j = (((i - 1) * 3) / 2);
   4311 				dst_ptr[j] = cpu_to_le32(data);
   4312 			}
   4313 			j = ((i * 3) / 2);
   4314 			dst_ptr[j] = cpu_to_le32(RLC_SAVE_RESTORE_LIST_END_MARKER);
   4315 		}
   4316 		radeon_bo_kunmap(rdev->rlc.save_restore_obj);
   4317 		radeon_bo_unreserve(rdev->rlc.save_restore_obj);
   4318 	}
   4319 
   4320 	if (cs_data) {
   4321 		/* clear state block */
   4322 		if (rdev->family >= CHIP_BONAIRE) {
   4323 			rdev->rlc.clear_state_size = dws = cik_get_csb_size(rdev);
   4324 		} else if (rdev->family >= CHIP_TAHITI) {
   4325 			rdev->rlc.clear_state_size = si_get_csb_size(rdev);
   4326 			dws = rdev->rlc.clear_state_size + (256 / 4);
   4327 		} else {
   4328 			reg_list_num = 0;
   4329 			dws = 0;
   4330 			for (i = 0; cs_data[i].section != NULL; i++) {
   4331 				for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
   4332 					reg_list_num++;
   4333 					dws += cs_data[i].section[j].reg_count;
   4334 				}
   4335 			}
   4336 			reg_list_blk_index = (3 * reg_list_num + 2);
   4337 			dws += reg_list_blk_index;
   4338 			rdev->rlc.clear_state_size = dws;
   4339 		}
   4340 
   4341 		if (rdev->rlc.clear_state_obj == NULL) {
   4342 			r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
   4343 					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
   4344 					     NULL, &rdev->rlc.clear_state_obj);
   4345 			if (r) {
   4346 				dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r);
   4347 				sumo_rlc_fini(rdev);
   4348 				return r;
   4349 			}
   4350 		}
   4351 		r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
   4352 		if (unlikely(r != 0)) {
   4353 			sumo_rlc_fini(rdev);
   4354 			return r;
   4355 		}
   4356 		r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM,
   4357 				  &rdev->rlc.clear_state_gpu_addr);
   4358 		if (r) {
   4359 			radeon_bo_unreserve(rdev->rlc.clear_state_obj);
   4360 			dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r);
   4361 			sumo_rlc_fini(rdev);
   4362 			return r;
   4363 		}
   4364 
   4365 		r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void **)__UNVOLATILE(&rdev->rlc.cs_ptr));
   4366 		if (r) {
   4367 			dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r);
   4368 			sumo_rlc_fini(rdev);
   4369 			return r;
   4370 		}
   4371 		/* set up the cs buffer */
   4372 		dst_ptr = rdev->rlc.cs_ptr;
   4373 		if (rdev->family >= CHIP_BONAIRE) {
   4374 			cik_get_csb_buffer(rdev, dst_ptr);
   4375 		} else if (rdev->family >= CHIP_TAHITI) {
   4376 			reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256;
   4377 			dst_ptr[0] = cpu_to_le32(upper_32_bits(reg_list_mc_addr));
   4378 			dst_ptr[1] = cpu_to_le32(lower_32_bits(reg_list_mc_addr));
   4379 			dst_ptr[2] = cpu_to_le32(rdev->rlc.clear_state_size);
   4380 			si_get_csb_buffer(rdev, &dst_ptr[(256/4)]);
   4381 		} else {
   4382 			reg_list_hdr_blk_index = 0;
   4383 			reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
   4384 			data = upper_32_bits(reg_list_mc_addr);
   4385 			dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
   4386 			reg_list_hdr_blk_index++;
   4387 			for (i = 0; cs_data[i].section != NULL; i++) {
   4388 				for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
   4389 					reg_num = cs_data[i].section[j].reg_count;
   4390 					data = reg_list_mc_addr & 0xffffffff;
   4391 					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
   4392 					reg_list_hdr_blk_index++;
   4393 
   4394 					data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
   4395 					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
   4396 					reg_list_hdr_blk_index++;
   4397 
   4398 					data = 0x08000000 | (reg_num * 4);
   4399 					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
   4400 					reg_list_hdr_blk_index++;
   4401 
   4402 					for (k = 0; k < reg_num; k++) {
   4403 						data = cs_data[i].section[j].extent[k];
   4404 						dst_ptr[reg_list_blk_index + k] = cpu_to_le32(data);
   4405 					}
   4406 					reg_list_mc_addr += reg_num * 4;
   4407 					reg_list_blk_index += reg_num;
   4408 				}
   4409 			}
   4410 			dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(RLC_CLEAR_STATE_END_MARKER);
   4411 		}
   4412 		radeon_bo_kunmap(rdev->rlc.clear_state_obj);
   4413 		radeon_bo_unreserve(rdev->rlc.clear_state_obj);
   4414 	}
   4415 
   4416 	if (rdev->rlc.cp_table_size) {
   4417 		if (rdev->rlc.cp_table_obj == NULL) {
   4418 			r = radeon_bo_create(rdev, rdev->rlc.cp_table_size,
   4419 					     PAGE_SIZE, true,
   4420 					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
   4421 					     NULL, &rdev->rlc.cp_table_obj);
   4422 			if (r) {
   4423 				dev_warn(rdev->dev, "(%d) create RLC cp table bo failed\n", r);
   4424 				sumo_rlc_fini(rdev);
   4425 				return r;
   4426 			}
   4427 		}
   4428 
   4429 		r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
   4430 		if (unlikely(r != 0)) {
   4431 			dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
   4432 			sumo_rlc_fini(rdev);
   4433 			return r;
   4434 		}
   4435 		r = radeon_bo_pin(rdev->rlc.cp_table_obj, RADEON_GEM_DOMAIN_VRAM,
   4436 				  &rdev->rlc.cp_table_gpu_addr);
   4437 		if (r) {
   4438 			radeon_bo_unreserve(rdev->rlc.cp_table_obj);
   4439 			dev_warn(rdev->dev, "(%d) pin RLC cp_table bo failed\n", r);
   4440 			sumo_rlc_fini(rdev);
   4441 			return r;
   4442 		}
   4443 		r = radeon_bo_kmap(rdev->rlc.cp_table_obj, (void **)__UNVOLATILE(&rdev->rlc.cp_table_ptr));
   4444 		if (r) {
   4445 			dev_warn(rdev->dev, "(%d) map RLC cp table bo failed\n", r);
   4446 			sumo_rlc_fini(rdev);
   4447 			return r;
   4448 		}
   4449 
   4450 		cik_init_cp_pg_table(rdev);
   4451 
   4452 		radeon_bo_kunmap(rdev->rlc.cp_table_obj);
   4453 		radeon_bo_unreserve(rdev->rlc.cp_table_obj);
   4454 
   4455 	}
   4456 
   4457 	return 0;
   4458 }
   4459 
   4460 static void evergreen_rlc_start(struct radeon_device *rdev)
   4461 {
   4462 	u32 mask = RLC_ENABLE;
   4463 
   4464 	if (rdev->flags & RADEON_IS_IGP) {
   4465 		mask |= GFX_POWER_GATING_ENABLE | GFX_POWER_GATING_SRC;
   4466 	}
   4467 
   4468 	WREG32(RLC_CNTL, mask);
   4469 }
   4470 
   4471 int evergreen_rlc_resume(struct radeon_device *rdev)
   4472 {
   4473 	u32 i;
   4474 	const __be32 *fw_data;
   4475 
   4476 	if (!rdev->rlc_fw)
   4477 		return -EINVAL;
   4478 
   4479 	r600_rlc_stop(rdev);
   4480 
   4481 	WREG32(RLC_HB_CNTL, 0);
   4482 
   4483 	if (rdev->flags & RADEON_IS_IGP) {
   4484 		if (rdev->family == CHIP_ARUBA) {
   4485 			u32 always_on_bitmap =
   4486 				3 | (3 << (16 * rdev->config.cayman.max_shader_engines));
   4487 			/* find out the number of active simds */
   4488 			u32 tmp = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
   4489 			tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
   4490 			tmp = hweight32(~tmp);
   4491 			if (tmp == rdev->config.cayman.max_simds_per_se) {
   4492 				WREG32(TN_RLC_LB_ALWAYS_ACTIVE_SIMD_MASK, always_on_bitmap);
   4493 				WREG32(TN_RLC_LB_PARAMS, 0x00601004);
   4494 				WREG32(TN_RLC_LB_INIT_SIMD_MASK, 0xffffffff);
   4495 				WREG32(TN_RLC_LB_CNTR_INIT, 0x00000000);
   4496 				WREG32(TN_RLC_LB_CNTR_MAX, 0x00002000);
   4497 			}
   4498 		} else {
   4499 			WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
   4500 			WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
   4501 		}
   4502 		WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
   4503 		WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
   4504 	} else {
   4505 		WREG32(RLC_HB_BASE, 0);
   4506 		WREG32(RLC_HB_RPTR, 0);
   4507 		WREG32(RLC_HB_WPTR, 0);
   4508 		WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
   4509 		WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
   4510 	}
   4511 	WREG32(RLC_MC_CNTL, 0);
   4512 	WREG32(RLC_UCODE_CNTL, 0);
   4513 
   4514 	fw_data = (const __be32 *)rdev->rlc_fw->data;
   4515 	if (rdev->family >= CHIP_ARUBA) {
   4516 		for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
   4517 			WREG32(RLC_UCODE_ADDR, i);
   4518 			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
   4519 		}
   4520 	} else if (rdev->family >= CHIP_CAYMAN) {
   4521 		for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
   4522 			WREG32(RLC_UCODE_ADDR, i);
   4523 			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
   4524 		}
   4525 	} else {
   4526 		for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
   4527 			WREG32(RLC_UCODE_ADDR, i);
   4528 			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
   4529 		}
   4530 	}
   4531 	WREG32(RLC_UCODE_ADDR, 0);
   4532 
   4533 	evergreen_rlc_start(rdev);
   4534 
   4535 	return 0;
   4536 }
   4537 
   4538 /* Interrupts */
   4539 
   4540 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
   4541 {
   4542 	if (crtc >= rdev->num_crtc)
   4543 		return 0;
   4544 	else
   4545 		return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
   4546 }
   4547 
   4548 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
   4549 {
   4550 	u32 tmp;
   4551 
   4552 	if (rdev->family >= CHIP_CAYMAN) {
   4553 		cayman_cp_int_cntl_setup(rdev, 0,
   4554 					 CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
   4555 		cayman_cp_int_cntl_setup(rdev, 1, 0);
   4556 		cayman_cp_int_cntl_setup(rdev, 2, 0);
   4557 		tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
   4558 		WREG32(CAYMAN_DMA1_CNTL, tmp);
   4559 	} else
   4560 		WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
   4561 	tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
   4562 	WREG32(DMA_CNTL, tmp);
   4563 	WREG32(GRBM_INT_CNTL, 0);
   4564 	WREG32(SRBM_INT_CNTL, 0);
   4565 	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
   4566 	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
   4567 	if (rdev->num_crtc >= 4) {
   4568 		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
   4569 		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
   4570 	}
   4571 	if (rdev->num_crtc >= 6) {
   4572 		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
   4573 		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
   4574 	}
   4575 
   4576 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
   4577 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
   4578 	if (rdev->num_crtc >= 4) {
   4579 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
   4580 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
   4581 	}
   4582 	if (rdev->num_crtc >= 6) {
   4583 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
   4584 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
   4585 	}
   4586 
   4587 	/* only one DAC on DCE5 */
   4588 	if (!ASIC_IS_DCE5(rdev))
   4589 		WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
   4590 	WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
   4591 
   4592 	tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
   4593 	WREG32(DC_HPD1_INT_CONTROL, tmp);
   4594 	tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
   4595 	WREG32(DC_HPD2_INT_CONTROL, tmp);
   4596 	tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
   4597 	WREG32(DC_HPD3_INT_CONTROL, tmp);
   4598 	tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
   4599 	WREG32(DC_HPD4_INT_CONTROL, tmp);
   4600 	tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
   4601 	WREG32(DC_HPD5_INT_CONTROL, tmp);
   4602 	tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
   4603 	WREG32(DC_HPD6_INT_CONTROL, tmp);
   4604 
   4605 }
   4606 
   4607 int evergreen_irq_set(struct radeon_device *rdev)
   4608 {
   4609 	u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
   4610 	u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
   4611 	u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
   4612 	u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
   4613 	u32 grbm_int_cntl = 0;
   4614 	u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
   4615 	u32 dma_cntl, dma_cntl1 = 0;
   4616 	u32 thermal_int = 0;
   4617 
   4618 	if (!rdev->irq.installed) {
   4619 		WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
   4620 		return -EINVAL;
   4621 	}
   4622 	/* don't enable anything if the ih is disabled */
   4623 	if (!rdev->ih.enabled) {
   4624 		r600_disable_interrupts(rdev);
   4625 		/* force the active interrupt state to all disabled */
   4626 		evergreen_disable_interrupt_state(rdev);
   4627 		return 0;
   4628 	}
   4629 
   4630 	hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
   4631 	hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
   4632 	hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
   4633 	hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
   4634 	hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
   4635 	hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
   4636 	if (rdev->family == CHIP_ARUBA)
   4637 		thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) &
   4638 			~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
   4639 	else
   4640 		thermal_int = RREG32(CG_THERMAL_INT) &
   4641 			~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
   4642 
   4643 	afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
   4644 	afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
   4645 	afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
   4646 	afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
   4647 	afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
   4648 	afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
   4649 
   4650 	dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
   4651 
   4652 	if (rdev->family >= CHIP_CAYMAN) {
   4653 		/* enable CP interrupts on all rings */
   4654 		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
   4655 			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
   4656 			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
   4657 		}
   4658 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
   4659 			DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
   4660 			cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
   4661 		}
   4662 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
   4663 			DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
   4664 			cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
   4665 		}
   4666 	} else {
   4667 		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
   4668 			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
   4669 			cp_int_cntl |= RB_INT_ENABLE;
   4670 			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
   4671 		}
   4672 	}
   4673 
   4674 	if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
   4675 		DRM_DEBUG("r600_irq_set: sw int dma\n");
   4676 		dma_cntl |= TRAP_ENABLE;
   4677 	}
   4678 
   4679 	if (rdev->family >= CHIP_CAYMAN) {
   4680 		dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
   4681 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
   4682 			DRM_DEBUG("r600_irq_set: sw int dma1\n");
   4683 			dma_cntl1 |= TRAP_ENABLE;
   4684 		}
   4685 	}
   4686 
   4687 	if (rdev->irq.dpm_thermal) {
   4688 		DRM_DEBUG("dpm thermal\n");
   4689 		thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
   4690 	}
   4691 
   4692 	if (rdev->irq.crtc_vblank_int[0] ||
   4693 	    atomic_read(&rdev->irq.pflip[0])) {
   4694 		DRM_DEBUG("evergreen_irq_set: vblank 0\n");
   4695 		crtc1 |= VBLANK_INT_MASK;
   4696 	}
   4697 	if (rdev->irq.crtc_vblank_int[1] ||
   4698 	    atomic_read(&rdev->irq.pflip[1])) {
   4699 		DRM_DEBUG("evergreen_irq_set: vblank 1\n");
   4700 		crtc2 |= VBLANK_INT_MASK;
   4701 	}
   4702 	if (rdev->irq.crtc_vblank_int[2] ||
   4703 	    atomic_read(&rdev->irq.pflip[2])) {
   4704 		DRM_DEBUG("evergreen_irq_set: vblank 2\n");
   4705 		crtc3 |= VBLANK_INT_MASK;
   4706 	}
   4707 	if (rdev->irq.crtc_vblank_int[3] ||
   4708 	    atomic_read(&rdev->irq.pflip[3])) {
   4709 		DRM_DEBUG("evergreen_irq_set: vblank 3\n");
   4710 		crtc4 |= VBLANK_INT_MASK;
   4711 	}
   4712 	if (rdev->irq.crtc_vblank_int[4] ||
   4713 	    atomic_read(&rdev->irq.pflip[4])) {
   4714 		DRM_DEBUG("evergreen_irq_set: vblank 4\n");
   4715 		crtc5 |= VBLANK_INT_MASK;
   4716 	}
   4717 	if (rdev->irq.crtc_vblank_int[5] ||
   4718 	    atomic_read(&rdev->irq.pflip[5])) {
   4719 		DRM_DEBUG("evergreen_irq_set: vblank 5\n");
   4720 		crtc6 |= VBLANK_INT_MASK;
   4721 	}
   4722 	if (rdev->irq.hpd[0]) {
   4723 		DRM_DEBUG("evergreen_irq_set: hpd 1\n");
   4724 		hpd1 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
   4725 	}
   4726 	if (rdev->irq.hpd[1]) {
   4727 		DRM_DEBUG("evergreen_irq_set: hpd 2\n");
   4728 		hpd2 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
   4729 	}
   4730 	if (rdev->irq.hpd[2]) {
   4731 		DRM_DEBUG("evergreen_irq_set: hpd 3\n");
   4732 		hpd3 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
   4733 	}
   4734 	if (rdev->irq.hpd[3]) {
   4735 		DRM_DEBUG("evergreen_irq_set: hpd 4\n");
   4736 		hpd4 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
   4737 	}
   4738 	if (rdev->irq.hpd[4]) {
   4739 		DRM_DEBUG("evergreen_irq_set: hpd 5\n");
   4740 		hpd5 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
   4741 	}
   4742 	if (rdev->irq.hpd[5]) {
   4743 		DRM_DEBUG("evergreen_irq_set: hpd 6\n");
   4744 		hpd6 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
   4745 	}
   4746 	if (rdev->irq.afmt[0]) {
   4747 		DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
   4748 		afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
   4749 	}
   4750 	if (rdev->irq.afmt[1]) {
   4751 		DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
   4752 		afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
   4753 	}
   4754 	if (rdev->irq.afmt[2]) {
   4755 		DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
   4756 		afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
   4757 	}
   4758 	if (rdev->irq.afmt[3]) {
   4759 		DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
   4760 		afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
   4761 	}
   4762 	if (rdev->irq.afmt[4]) {
   4763 		DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
   4764 		afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
   4765 	}
   4766 	if (rdev->irq.afmt[5]) {
   4767 		DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
   4768 		afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
   4769 	}
   4770 
   4771 	if (rdev->family >= CHIP_CAYMAN) {
   4772 		cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
   4773 		cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
   4774 		cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
   4775 	} else
   4776 		WREG32(CP_INT_CNTL, cp_int_cntl);
   4777 
   4778 	WREG32(DMA_CNTL, dma_cntl);
   4779 
   4780 	if (rdev->family >= CHIP_CAYMAN)
   4781 		WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
   4782 
   4783 	WREG32(GRBM_INT_CNTL, grbm_int_cntl);
   4784 
   4785 	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
   4786 	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
   4787 	if (rdev->num_crtc >= 4) {
   4788 		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
   4789 		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
   4790 	}
   4791 	if (rdev->num_crtc >= 6) {
   4792 		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
   4793 		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
   4794 	}
   4795 
   4796 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET,
   4797 	       GRPH_PFLIP_INT_MASK);
   4798 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET,
   4799 	       GRPH_PFLIP_INT_MASK);
   4800 	if (rdev->num_crtc >= 4) {
   4801 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET,
   4802 		       GRPH_PFLIP_INT_MASK);
   4803 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET,
   4804 		       GRPH_PFLIP_INT_MASK);
   4805 	}
   4806 	if (rdev->num_crtc >= 6) {
   4807 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET,
   4808 		       GRPH_PFLIP_INT_MASK);
   4809 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET,
   4810 		       GRPH_PFLIP_INT_MASK);
   4811 	}
   4812 
   4813 	WREG32(DC_HPD1_INT_CONTROL, hpd1);
   4814 	WREG32(DC_HPD2_INT_CONTROL, hpd2);
   4815 	WREG32(DC_HPD3_INT_CONTROL, hpd3);
   4816 	WREG32(DC_HPD4_INT_CONTROL, hpd4);
   4817 	WREG32(DC_HPD5_INT_CONTROL, hpd5);
   4818 	WREG32(DC_HPD6_INT_CONTROL, hpd6);
   4819 	if (rdev->family == CHIP_ARUBA)
   4820 		WREG32(TN_CG_THERMAL_INT_CTRL, thermal_int);
   4821 	else
   4822 		WREG32(CG_THERMAL_INT, thermal_int);
   4823 
   4824 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
   4825 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
   4826 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
   4827 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
   4828 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
   4829 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
   4830 
   4831 	/* posting read */
   4832 	RREG32(SRBM_STATUS);
   4833 
   4834 	return 0;
   4835 }
   4836 
   4837 static void evergreen_irq_ack(struct radeon_device *rdev)
   4838 {
   4839 	u32 tmp;
   4840 
   4841 	rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
   4842 	rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
   4843 	rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
   4844 	rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
   4845 	rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
   4846 	rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
   4847 	rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
   4848 	rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
   4849 	if (rdev->num_crtc >= 4) {
   4850 		rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
   4851 		rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
   4852 	}
   4853 	if (rdev->num_crtc >= 6) {
   4854 		rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
   4855 		rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
   4856 	}
   4857 
   4858 	rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
   4859 	rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
   4860 	rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
   4861 	rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
   4862 	rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
   4863 	rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
   4864 
   4865 	if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
   4866 		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
   4867 	if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
   4868 		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
   4869 	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
   4870 		WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
   4871 	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
   4872 		WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
   4873 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
   4874 		WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
   4875 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
   4876 		WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
   4877 
   4878 	if (rdev->num_crtc >= 4) {
   4879 		if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
   4880 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
   4881 		if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
   4882 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
   4883 		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
   4884 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
   4885 		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
   4886 			WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
   4887 		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
   4888 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
   4889 		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
   4890 			WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
   4891 	}
   4892 
   4893 	if (rdev->num_crtc >= 6) {
   4894 		if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
   4895 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
   4896 		if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
   4897 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
   4898 		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
   4899 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
   4900 		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
   4901 			WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
   4902 		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
   4903 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
   4904 		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
   4905 			WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
   4906 	}
   4907 
   4908 	if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
   4909 		tmp = RREG32(DC_HPD1_INT_CONTROL);
   4910 		tmp |= DC_HPDx_INT_ACK;
   4911 		WREG32(DC_HPD1_INT_CONTROL, tmp);
   4912 	}
   4913 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
   4914 		tmp = RREG32(DC_HPD2_INT_CONTROL);
   4915 		tmp |= DC_HPDx_INT_ACK;
   4916 		WREG32(DC_HPD2_INT_CONTROL, tmp);
   4917 	}
   4918 	if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
   4919 		tmp = RREG32(DC_HPD3_INT_CONTROL);
   4920 		tmp |= DC_HPDx_INT_ACK;
   4921 		WREG32(DC_HPD3_INT_CONTROL, tmp);
   4922 	}
   4923 	if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
   4924 		tmp = RREG32(DC_HPD4_INT_CONTROL);
   4925 		tmp |= DC_HPDx_INT_ACK;
   4926 		WREG32(DC_HPD4_INT_CONTROL, tmp);
   4927 	}
   4928 	if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
   4929 		tmp = RREG32(DC_HPD5_INT_CONTROL);
   4930 		tmp |= DC_HPDx_INT_ACK;
   4931 		WREG32(DC_HPD5_INT_CONTROL, tmp);
   4932 	}
   4933 	if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
   4934 		tmp = RREG32(DC_HPD6_INT_CONTROL);
   4935 		tmp |= DC_HPDx_INT_ACK;
   4936 		WREG32(DC_HPD6_INT_CONTROL, tmp);
   4937 	}
   4938 
   4939 	if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT) {
   4940 		tmp = RREG32(DC_HPD1_INT_CONTROL);
   4941 		tmp |= DC_HPDx_RX_INT_ACK;
   4942 		WREG32(DC_HPD1_INT_CONTROL, tmp);
   4943 	}
   4944 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT) {
   4945 		tmp = RREG32(DC_HPD2_INT_CONTROL);
   4946 		tmp |= DC_HPDx_RX_INT_ACK;
   4947 		WREG32(DC_HPD2_INT_CONTROL, tmp);
   4948 	}
   4949 	if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT) {
   4950 		tmp = RREG32(DC_HPD3_INT_CONTROL);
   4951 		tmp |= DC_HPDx_RX_INT_ACK;
   4952 		WREG32(DC_HPD3_INT_CONTROL, tmp);
   4953 	}
   4954 	if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT) {
   4955 		tmp = RREG32(DC_HPD4_INT_CONTROL);
   4956 		tmp |= DC_HPDx_RX_INT_ACK;
   4957 		WREG32(DC_HPD4_INT_CONTROL, tmp);
   4958 	}
   4959 	if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT) {
   4960 		tmp = RREG32(DC_HPD5_INT_CONTROL);
   4961 		tmp |= DC_HPDx_RX_INT_ACK;
   4962 		WREG32(DC_HPD5_INT_CONTROL, tmp);
   4963 	}
   4964 	if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT) {
   4965 		tmp = RREG32(DC_HPD6_INT_CONTROL);
   4966 		tmp |= DC_HPDx_RX_INT_ACK;
   4967 		WREG32(DC_HPD6_INT_CONTROL, tmp);
   4968 	}
   4969 
   4970 	if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
   4971 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
   4972 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
   4973 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
   4974 	}
   4975 	if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
   4976 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
   4977 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
   4978 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
   4979 	}
   4980 	if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
   4981 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
   4982 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
   4983 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
   4984 	}
   4985 	if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
   4986 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
   4987 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
   4988 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
   4989 	}
   4990 	if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
   4991 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
   4992 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
   4993 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
   4994 	}
   4995 	if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
   4996 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
   4997 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
   4998 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
   4999 	}
   5000 }
   5001 
   5002 static void evergreen_irq_disable(struct radeon_device *rdev)
   5003 {
   5004 	r600_disable_interrupts(rdev);
   5005 	/* Wait and acknowledge irq */
   5006 	mdelay(1);
   5007 	evergreen_irq_ack(rdev);
   5008 	evergreen_disable_interrupt_state(rdev);
   5009 }
   5010 
   5011 void evergreen_irq_suspend(struct radeon_device *rdev)
   5012 {
   5013 	evergreen_irq_disable(rdev);
   5014 	r600_rlc_stop(rdev);
   5015 }
   5016 
   5017 static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
   5018 {
   5019 	u32 wptr, tmp;
   5020 
   5021 	if (rdev->wb.enabled)
   5022 		wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
   5023 	else
   5024 		wptr = RREG32(IH_RB_WPTR);
   5025 
   5026 	if (wptr & RB_OVERFLOW) {
   5027 		wptr &= ~RB_OVERFLOW;
   5028 		/* When a ring buffer overflow happen start parsing interrupt
   5029 		 * from the last not overwritten vector (wptr + 16). Hopefully
   5030 		 * this should allow us to catchup.
   5031 		 */
   5032 		dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n",
   5033 			 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask);
   5034 		rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
   5035 		tmp = RREG32(IH_RB_CNTL);
   5036 		tmp |= IH_WPTR_OVERFLOW_CLEAR;
   5037 		WREG32(IH_RB_CNTL, tmp);
   5038 	}
   5039 	return (wptr & rdev->ih.ptr_mask);
   5040 }
   5041 
   5042 int evergreen_irq_process(struct radeon_device *rdev)
   5043 {
   5044 	u32 wptr;
   5045 	u32 rptr;
   5046 	u32 src_id, src_data;
   5047 	u32 ring_index;
   5048 	bool queue_hotplug = false;
   5049 	bool queue_hdmi = false;
   5050 	bool queue_dp = false;
   5051 	bool queue_thermal = false;
   5052 	u32 status, addr;
   5053 
   5054 	if (!rdev->ih.enabled || rdev->shutdown)
   5055 		return IRQ_NONE;
   5056 
   5057 	wptr = evergreen_get_ih_wptr(rdev);
   5058 
   5059 restart_ih:
   5060 	/* is somebody else already processing irqs? */
   5061 	if (atomic_xchg(&rdev->ih.lock, 1))
   5062 		return IRQ_NONE;
   5063 
   5064 	rptr = rdev->ih.rptr;
   5065 	DRM_DEBUG("evergreen_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
   5066 
   5067 	/* Order reading of wptr vs. reading of IH ring data */
   5068 	rmb();
   5069 
   5070 	/* display interrupts */
   5071 	evergreen_irq_ack(rdev);
   5072 
   5073 	while (rptr != wptr) {
   5074 		/* wptr/rptr are in bytes! */
   5075 		ring_index = rptr / 4;
   5076 		src_id =  le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
   5077 		src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
   5078 
   5079 		switch (src_id) {
   5080 		case 1: /* D1 vblank/vline */
   5081 			switch (src_data) {
   5082 			case 0: /* D1 vblank */
   5083 				if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT))
   5084 					DRM_DEBUG("IH: D1 vblank - IH event w/o asserted irq bit?\n");
   5085 
   5086 				if (rdev->irq.crtc_vblank_int[0]) {
   5087 					drm_handle_vblank(rdev->ddev, 0);
   5088 #ifdef __NetBSD__
   5089 					spin_lock(&rdev->irq.vblank_lock);
   5090 					rdev->pm.vblank_sync = true;
   5091 					DRM_SPIN_WAKEUP_ONE(&rdev->irq.vblank_queue, &rdev->irq.vblank_lock);
   5092 					spin_unlock(&rdev->irq.vblank_lock);
   5093 #else
   5094 					rdev->pm.vblank_sync = true;
   5095 					wake_up(&rdev->irq.vblank_queue);
   5096 #endif
   5097 				}
   5098 				if (atomic_read(&rdev->irq.pflip[0]))
   5099 					radeon_crtc_handle_vblank(rdev, 0);
   5100 				rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
   5101 				DRM_DEBUG("IH: D1 vblank\n");
   5102 
   5103 				break;
   5104 			case 1: /* D1 vline */
   5105 				if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT))
   5106 					DRM_DEBUG("IH: D1 vline - IH event w/o asserted irq bit?\n");
   5107 
   5108 				rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
   5109 				DRM_DEBUG("IH: D1 vline\n");
   5110 
   5111 				break;
   5112 			default:
   5113 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
   5114 				break;
   5115 			}
   5116 			break;
   5117 		case 2: /* D2 vblank/vline */
   5118 			switch (src_data) {
   5119 			case 0: /* D2 vblank */
   5120 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT))
   5121 					DRM_DEBUG("IH: D2 vblank - IH event w/o asserted irq bit?\n");
   5122 
   5123 				if (rdev->irq.crtc_vblank_int[1]) {
   5124 					drm_handle_vblank(rdev->ddev, 1);
   5125 #ifdef __NetBSD__
   5126 					spin_lock(&rdev->irq.vblank_lock);
   5127 					rdev->pm.vblank_sync = true;
   5128 					DRM_SPIN_WAKEUP_ONE(&rdev->irq.vblank_queue, &rdev->irq.vblank_lock);
   5129 					spin_unlock(&rdev->irq.vblank_lock);
   5130 #else
   5131 					rdev->pm.vblank_sync = true;
   5132 					wake_up(&rdev->irq.vblank_queue);
   5133 #endif
   5134 				}
   5135 				if (atomic_read(&rdev->irq.pflip[1]))
   5136 					radeon_crtc_handle_vblank(rdev, 1);
   5137 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
   5138 				DRM_DEBUG("IH: D2 vblank\n");
   5139 
   5140 				break;
   5141 			case 1: /* D2 vline */
   5142 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT))
   5143 					DRM_DEBUG("IH: D2 vline - IH event w/o asserted irq bit?\n");
   5144 
   5145 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
   5146 				DRM_DEBUG("IH: D2 vline\n");
   5147 
   5148 				break;
   5149 			default:
   5150 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
   5151 				break;
   5152 			}
   5153 			break;
   5154 		case 3: /* D3 vblank/vline */
   5155 			switch (src_data) {
   5156 			case 0: /* D3 vblank */
   5157 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT))
   5158 					DRM_DEBUG("IH: D3 vblank - IH event w/o asserted irq bit?\n");
   5159 
   5160 				if (rdev->irq.crtc_vblank_int[2]) {
   5161 					drm_handle_vblank(rdev->ddev, 2);
   5162 #ifdef __NetBSD__
   5163 					spin_lock(&rdev->irq.vblank_lock);
   5164 					rdev->pm.vblank_sync = true;
   5165 					DRM_SPIN_WAKEUP_ONE(&rdev->irq.vblank_queue, &rdev->irq.vblank_lock);
   5166 					spin_unlock(&rdev->irq.vblank_lock);
   5167 #else
   5168 					rdev->pm.vblank_sync = true;
   5169 					wake_up(&rdev->irq.vblank_queue);
   5170 #endif
   5171 				}
   5172 				if (atomic_read(&rdev->irq.pflip[2]))
   5173 					radeon_crtc_handle_vblank(rdev, 2);
   5174 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
   5175 				DRM_DEBUG("IH: D3 vblank\n");
   5176 
   5177 				break;
   5178 			case 1: /* D3 vline */
   5179 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT))
   5180 					DRM_DEBUG("IH: D3 vline - IH event w/o asserted irq bit?\n");
   5181 
   5182 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
   5183 				DRM_DEBUG("IH: D3 vline\n");
   5184 
   5185 				break;
   5186 			default:
   5187 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
   5188 				break;
   5189 			}
   5190 			break;
   5191 		case 4: /* D4 vblank/vline */
   5192 			switch (src_data) {
   5193 			case 0: /* D4 vblank */
   5194 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT))
   5195 					DRM_DEBUG("IH: D4 vblank - IH event w/o asserted irq bit?\n");
   5196 
   5197 				if (rdev->irq.crtc_vblank_int[3]) {
   5198 					drm_handle_vblank(rdev->ddev, 3);
   5199 #ifdef __NetBSD__
   5200 					spin_lock(&rdev->irq.vblank_lock);
   5201 					rdev->pm.vblank_sync = true;
   5202 					DRM_SPIN_WAKEUP_ONE(&rdev->irq.vblank_queue, &rdev->irq.vblank_lock);
   5203 					spin_unlock(&rdev->irq.vblank_lock);
   5204 #else
   5205 					rdev->pm.vblank_sync = true;
   5206 					wake_up(&rdev->irq.vblank_queue);
   5207 #endif
   5208 				}
   5209 				if (atomic_read(&rdev->irq.pflip[3]))
   5210 					radeon_crtc_handle_vblank(rdev, 3);
   5211 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
   5212 				DRM_DEBUG("IH: D4 vblank\n");
   5213 
   5214 				break;
   5215 			case 1: /* D4 vline */
   5216 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT))
   5217 					DRM_DEBUG("IH: D4 vline - IH event w/o asserted irq bit?\n");
   5218 
   5219 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
   5220 				DRM_DEBUG("IH: D4 vline\n");
   5221 
   5222 				break;
   5223 			default:
   5224 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
   5225 				break;
   5226 			}
   5227 			break;
   5228 		case 5: /* D5 vblank/vline */
   5229 			switch (src_data) {
   5230 			case 0: /* D5 vblank */
   5231 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT))
   5232 					DRM_DEBUG("IH: D5 vblank - IH event w/o asserted irq bit?\n");
   5233 
   5234 				if (rdev->irq.crtc_vblank_int[4]) {
   5235 					drm_handle_vblank(rdev->ddev, 4);
   5236 					rdev->pm.vblank_sync = true;
   5237 #ifdef __NetBSD__
   5238 					spin_lock(&rdev->irq.vblank_lock);
   5239 					rdev->pm.vblank_sync = true;
   5240 					DRM_SPIN_WAKEUP_ONE(&rdev->irq.vblank_queue, &rdev->irq.vblank_lock);
   5241 					spin_unlock(&rdev->irq.vblank_lock);
   5242 #else
   5243 					wake_up(&rdev->irq.vblank_queue);
   5244 #endif
   5245 				}
   5246 				if (atomic_read(&rdev->irq.pflip[4]))
   5247 					radeon_crtc_handle_vblank(rdev, 4);
   5248 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
   5249 				DRM_DEBUG("IH: D5 vblank\n");
   5250 
   5251 				break;
   5252 			case 1: /* D5 vline */
   5253 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT))
   5254 					DRM_DEBUG("IH: D5 vline - IH event w/o asserted irq bit?\n");
   5255 
   5256 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
   5257 				DRM_DEBUG("IH: D5 vline\n");
   5258 
   5259 				break;
   5260 			default:
   5261 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
   5262 				break;
   5263 			}
   5264 			break;
   5265 		case 6: /* D6 vblank/vline */
   5266 			switch (src_data) {
   5267 			case 0: /* D6 vblank */
   5268 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT))
   5269 					DRM_DEBUG("IH: D6 vblank - IH event w/o asserted irq bit?\n");
   5270 
   5271 				if (rdev->irq.crtc_vblank_int[5]) {
   5272 					drm_handle_vblank(rdev->ddev, 5);
   5273 #ifdef __NetBSD__
   5274 					spin_lock(&rdev->irq.vblank_lock);
   5275 					rdev->pm.vblank_sync = true;
   5276 					DRM_SPIN_WAKEUP_ONE(&rdev->irq.vblank_queue, &rdev->irq.vblank_lock);
   5277 					spin_unlock(&rdev->irq.vblank_lock);
   5278 #else
   5279 					rdev->pm.vblank_sync = true;
   5280 					wake_up(&rdev->irq.vblank_queue);
   5281 #endif
   5282 				}
   5283 				if (atomic_read(&rdev->irq.pflip[5]))
   5284 					radeon_crtc_handle_vblank(rdev, 5);
   5285 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
   5286 				DRM_DEBUG("IH: D6 vblank\n");
   5287 
   5288 				break;
   5289 			case 1: /* D6 vline */
   5290 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT))
   5291 					DRM_DEBUG("IH: D6 vline - IH event w/o asserted irq bit?\n");
   5292 
   5293 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
   5294 				DRM_DEBUG("IH: D6 vline\n");
   5295 
   5296 				break;
   5297 			default:
   5298 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
   5299 				break;
   5300 			}
   5301 			break;
   5302 		case 8: /* D1 page flip */
   5303 		case 10: /* D2 page flip */
   5304 		case 12: /* D3 page flip */
   5305 		case 14: /* D4 page flip */
   5306 		case 16: /* D5 page flip */
   5307 		case 18: /* D6 page flip */
   5308 			DRM_DEBUG("IH: D%d flip\n", ((src_id - 8) >> 1) + 1);
   5309 			if (radeon_use_pflipirq > 0)
   5310 				radeon_crtc_handle_flip(rdev, (src_id - 8) >> 1);
   5311 			break;
   5312 		case 42: /* HPD hotplug */
   5313 			switch (src_data) {
   5314 			case 0:
   5315 				if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT))
   5316 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
   5317 
   5318 				rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
   5319 				queue_hotplug = true;
   5320 				DRM_DEBUG("IH: HPD1\n");
   5321 				break;
   5322 			case 1:
   5323 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT))
   5324 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
   5325 
   5326 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
   5327 				queue_hotplug = true;
   5328 				DRM_DEBUG("IH: HPD2\n");
   5329 				break;
   5330 			case 2:
   5331 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT))
   5332 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
   5333 
   5334 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
   5335 				queue_hotplug = true;
   5336 				DRM_DEBUG("IH: HPD3\n");
   5337 				break;
   5338 			case 3:
   5339 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT))
   5340 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
   5341 
   5342 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
   5343 				queue_hotplug = true;
   5344 				DRM_DEBUG("IH: HPD4\n");
   5345 				break;
   5346 			case 4:
   5347 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT))
   5348 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
   5349 
   5350 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
   5351 				queue_hotplug = true;
   5352 				DRM_DEBUG("IH: HPD5\n");
   5353 				break;
   5354 			case 5:
   5355 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT))
   5356 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
   5357 
   5358 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
   5359 				queue_hotplug = true;
   5360 				DRM_DEBUG("IH: HPD6\n");
   5361 				break;
   5362 			case 6:
   5363 				if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT))
   5364 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
   5365 
   5366 				rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_RX_INTERRUPT;
   5367 				queue_dp = true;
   5368 				DRM_DEBUG("IH: HPD_RX 1\n");
   5369 				break;
   5370 			case 7:
   5371 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT))
   5372 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
   5373 
   5374 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_RX_INTERRUPT;
   5375 				queue_dp = true;
   5376 				DRM_DEBUG("IH: HPD_RX 2\n");
   5377 				break;
   5378 			case 8:
   5379 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT))
   5380 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
   5381 
   5382 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_RX_INTERRUPT;
   5383 				queue_dp = true;
   5384 				DRM_DEBUG("IH: HPD_RX 3\n");
   5385 				break;
   5386 			case 9:
   5387 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT))
   5388 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
   5389 
   5390 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_RX_INTERRUPT;
   5391 				queue_dp = true;
   5392 				DRM_DEBUG("IH: HPD_RX 4\n");
   5393 				break;
   5394 			case 10:
   5395 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT))
   5396 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
   5397 
   5398 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_RX_INTERRUPT;
   5399 				queue_dp = true;
   5400 				DRM_DEBUG("IH: HPD_RX 5\n");
   5401 				break;
   5402 			case 11:
   5403 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT))
   5404 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
   5405 
   5406 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_RX_INTERRUPT;
   5407 				queue_dp = true;
   5408 				DRM_DEBUG("IH: HPD_RX 6\n");
   5409 				break;
   5410 			default:
   5411 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
   5412 				break;
   5413 			}
   5414 			break;
   5415 		case 44: /* hdmi */
   5416 			switch (src_data) {
   5417 			case 0:
   5418 				if (!(rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG))
   5419 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
   5420 
   5421 				rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
   5422 				queue_hdmi = true;
   5423 				DRM_DEBUG("IH: HDMI0\n");
   5424 				break;
   5425 			case 1:
   5426 				if (!(rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG))
   5427 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
   5428 
   5429 				rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
   5430 				queue_hdmi = true;
   5431 				DRM_DEBUG("IH: HDMI1\n");
   5432 				break;
   5433 			case 2:
   5434 				if (!(rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG))
   5435 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
   5436 
   5437 				rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
   5438 				queue_hdmi = true;
   5439 				DRM_DEBUG("IH: HDMI2\n");
   5440 				break;
   5441 			case 3:
   5442 				if (!(rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG))
   5443 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
   5444 
   5445 				rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
   5446 				queue_hdmi = true;
   5447 				DRM_DEBUG("IH: HDMI3\n");
   5448 				break;
   5449 			case 4:
   5450 				if (!(rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG))
   5451 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
   5452 
   5453 				rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
   5454 				queue_hdmi = true;
   5455 				DRM_DEBUG("IH: HDMI4\n");
   5456 				break;
   5457 			case 5:
   5458 				if (!(rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG))
   5459 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
   5460 
   5461 				rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
   5462 				queue_hdmi = true;
   5463 				DRM_DEBUG("IH: HDMI5\n");
   5464 				break;
   5465 			default:
   5466 				DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
   5467 				break;
   5468 			}
   5469 		case 96:
   5470 			DRM_ERROR("SRBM_READ_ERROR: 0x%x\n", RREG32(SRBM_READ_ERROR));
   5471 			WREG32(SRBM_INT_ACK, 0x1);
   5472 			break;
   5473 		case 124: /* UVD */
   5474 			DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
   5475 			radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
   5476 			break;
   5477 		case 146:
   5478 		case 147:
   5479 			addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
   5480 			status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
   5481 			/* reset addr and status */
   5482 			WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
   5483 			if (addr == 0x0 && status == 0x0)
   5484 				break;
   5485 			dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
   5486 			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_ADDR   0x%08X\n",
   5487 				addr);
   5488 			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
   5489 				status);
   5490 			cayman_vm_decode_fault(rdev, status, addr);
   5491 			break;
   5492 		case 176: /* CP_INT in ring buffer */
   5493 		case 177: /* CP_INT in IB1 */
   5494 		case 178: /* CP_INT in IB2 */
   5495 			DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
   5496 			radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
   5497 			break;
   5498 		case 181: /* CP EOP event */
   5499 			DRM_DEBUG("IH: CP EOP\n");
   5500 			if (rdev->family >= CHIP_CAYMAN) {
   5501 				switch (src_data) {
   5502 				case 0:
   5503 					radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
   5504 					break;
   5505 				case 1:
   5506 					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
   5507 					break;
   5508 				case 2:
   5509 					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
   5510 					break;
   5511 				}
   5512 			} else
   5513 				radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
   5514 			break;
   5515 		case 224: /* DMA trap event */
   5516 			DRM_DEBUG("IH: DMA trap\n");
   5517 			radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
   5518 			break;
   5519 		case 230: /* thermal low to high */
   5520 			DRM_DEBUG("IH: thermal low to high\n");
   5521 			rdev->pm.dpm.thermal.high_to_low = false;
   5522 			queue_thermal = true;
   5523 			break;
   5524 		case 231: /* thermal high to low */
   5525 			DRM_DEBUG("IH: thermal high to low\n");
   5526 			rdev->pm.dpm.thermal.high_to_low = true;
   5527 			queue_thermal = true;
   5528 			break;
   5529 		case 233: /* GUI IDLE */
   5530 			DRM_DEBUG("IH: GUI idle\n");
   5531 			break;
   5532 		case 244: /* DMA trap event */
   5533 			if (rdev->family >= CHIP_CAYMAN) {
   5534 				DRM_DEBUG("IH: DMA1 trap\n");
   5535 				radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
   5536 			}
   5537 			break;
   5538 		default:
   5539 			DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
   5540 			break;
   5541 		}
   5542 
   5543 		/* wptr/rptr are in bytes! */
   5544 		rptr += 16;
   5545 		rptr &= rdev->ih.ptr_mask;
   5546 		WREG32(IH_RB_RPTR, rptr);
   5547 	}
   5548 	if (queue_dp)
   5549 		schedule_work(&rdev->dp_work);
   5550 	if (queue_hotplug)
   5551 		schedule_delayed_work(&rdev->hotplug_work, 0);
   5552 	if (queue_hdmi)
   5553 		schedule_work(&rdev->audio_work);
   5554 	if (queue_thermal && rdev->pm.dpm_enabled)
   5555 		schedule_work(&rdev->pm.dpm.thermal.work);
   5556 	rdev->ih.rptr = rptr;
   5557 	atomic_set(&rdev->ih.lock, 0);
   5558 
   5559 	/* make sure wptr hasn't changed while processing */
   5560 	wptr = evergreen_get_ih_wptr(rdev);
   5561 	if (wptr != rptr)
   5562 		goto restart_ih;
   5563 
   5564 	return IRQ_HANDLED;
   5565 }
   5566 
   5567 static int evergreen_startup(struct radeon_device *rdev)
   5568 {
   5569 	struct radeon_ring *ring;
   5570 	int r;
   5571 
   5572 	/* enable pcie gen2 link */
   5573 	evergreen_pcie_gen2_enable(rdev);
   5574 	/* enable aspm */
   5575 	evergreen_program_aspm(rdev);
   5576 
   5577 	/* scratch needs to be initialized before MC */
   5578 	r = r600_vram_scratch_init(rdev);
   5579 	if (r)
   5580 		return r;
   5581 
   5582 	evergreen_mc_program(rdev);
   5583 
   5584 	if (ASIC_IS_DCE5(rdev) && !rdev->pm.dpm_enabled) {
   5585 		r = ni_mc_load_microcode(rdev);
   5586 		if (r) {
   5587 			DRM_ERROR("Failed to load MC firmware!\n");
   5588 			return r;
   5589 		}
   5590 	}
   5591 
   5592 	if (rdev->flags & RADEON_IS_AGP) {
   5593 		evergreen_agp_enable(rdev);
   5594 	} else {
   5595 		r = evergreen_pcie_gart_enable(rdev);
   5596 		if (r)
   5597 			return r;
   5598 	}
   5599 	evergreen_gpu_init(rdev);
   5600 
   5601 	/* allocate rlc buffers */
   5602 	if (rdev->flags & RADEON_IS_IGP) {
   5603 		rdev->rlc.reg_list = sumo_rlc_save_restore_register_list;
   5604 		rdev->rlc.reg_list_size =
   5605 			(u32)ARRAY_SIZE(sumo_rlc_save_restore_register_list);
   5606 		rdev->rlc.cs_data = evergreen_cs_data;
   5607 		r = sumo_rlc_init(rdev);
   5608 		if (r) {
   5609 			DRM_ERROR("Failed to init rlc BOs!\n");
   5610 			return r;
   5611 		}
   5612 	}
   5613 
   5614 	/* allocate wb buffer */
   5615 	r = radeon_wb_init(rdev);
   5616 	if (r)
   5617 		return r;
   5618 
   5619 	r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
   5620 	if (r) {
   5621 		dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
   5622 		return r;
   5623 	}
   5624 
   5625 	r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
   5626 	if (r) {
   5627 		dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
   5628 		return r;
   5629 	}
   5630 
   5631 	r = uvd_v2_2_resume(rdev);
   5632 	if (!r) {
   5633 		r = radeon_fence_driver_start_ring(rdev,
   5634 						   R600_RING_TYPE_UVD_INDEX);
   5635 		if (r)
   5636 			dev_err(rdev->dev, "UVD fences init error (%d).\n", r);
   5637 	}
   5638 
   5639 	if (r)
   5640 		rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
   5641 
   5642 	/* Enable IRQ */
   5643 	if (!rdev->irq.installed) {
   5644 		r = radeon_irq_kms_init(rdev);
   5645 		if (r)
   5646 			return r;
   5647 	}
   5648 
   5649 	r = r600_irq_init(rdev);
   5650 	if (r) {
   5651 		DRM_ERROR("radeon: IH init failed (%d).\n", r);
   5652 		radeon_irq_kms_fini(rdev);
   5653 		return r;
   5654 	}
   5655 	evergreen_irq_set(rdev);
   5656 
   5657 	ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
   5658 	r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
   5659 			     RADEON_CP_PACKET2);
   5660 	if (r)
   5661 		return r;
   5662 
   5663 	ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
   5664 	r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
   5665 			     DMA_PACKET(DMA_PACKET_NOP, 0, 0));
   5666 	if (r)
   5667 		return r;
   5668 
   5669 	r = evergreen_cp_load_microcode(rdev);
   5670 	if (r)
   5671 		return r;
   5672 	r = evergreen_cp_resume(rdev);
   5673 	if (r)
   5674 		return r;
   5675 	r = r600_dma_resume(rdev);
   5676 	if (r)
   5677 		return r;
   5678 
   5679 	ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
   5680 	if (ring->ring_size) {
   5681 		r = radeon_ring_init(rdev, ring, ring->ring_size, 0,
   5682 				     RADEON_CP_PACKET2);
   5683 		if (!r)
   5684 			r = uvd_v1_0_init(rdev);
   5685 
   5686 		if (r)
   5687 			DRM_ERROR("radeon: error initializing UVD (%d).\n", r);
   5688 	}
   5689 
   5690 	r = radeon_ib_pool_init(rdev);
   5691 	if (r) {
   5692 		dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
   5693 		return r;
   5694 	}
   5695 
   5696 	r = radeon_audio_init(rdev);
   5697 	if (r) {
   5698 		DRM_ERROR("radeon: audio init failed\n");
   5699 		return r;
   5700 	}
   5701 
   5702 	return 0;
   5703 }
   5704 
   5705 int evergreen_resume(struct radeon_device *rdev)
   5706 {
   5707 	int r;
   5708 
   5709 	/* reset the asic, the gfx blocks are often in a bad state
   5710 	 * after the driver is unloaded or after a resume
   5711 	 */
   5712 	if (radeon_asic_reset(rdev))
   5713 		dev_warn(rdev->dev, "GPU reset failed !\n");
   5714 	/* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
   5715 	 * posting will perform necessary task to bring back GPU into good
   5716 	 * shape.
   5717 	 */
   5718 	/* post card */
   5719 	atom_asic_init(rdev->mode_info.atom_context);
   5720 
   5721 	/* init golden registers */
   5722 	evergreen_init_golden_registers(rdev);
   5723 
   5724 	if (rdev->pm.pm_method == PM_METHOD_DPM)
   5725 		radeon_pm_resume(rdev);
   5726 
   5727 	rdev->accel_working = true;
   5728 	r = evergreen_startup(rdev);
   5729 	if (r) {
   5730 		DRM_ERROR("evergreen startup failed on resume\n");
   5731 		rdev->accel_working = false;
   5732 		return r;
   5733 	}
   5734 
   5735 	return r;
   5736 
   5737 }
   5738 
   5739 int evergreen_suspend(struct radeon_device *rdev)
   5740 {
   5741 	radeon_pm_suspend(rdev);
   5742 	radeon_audio_fini(rdev);
   5743 	uvd_v1_0_fini(rdev);
   5744 	radeon_uvd_suspend(rdev);
   5745 	r700_cp_stop(rdev);
   5746 	r600_dma_stop(rdev);
   5747 	evergreen_irq_suspend(rdev);
   5748 	radeon_wb_disable(rdev);
   5749 	evergreen_pcie_gart_disable(rdev);
   5750 
   5751 	return 0;
   5752 }
   5753 
   5754 /* Plan is to move initialization in that function and use
   5755  * helper function so that radeon_device_init pretty much
   5756  * do nothing more than calling asic specific function. This
   5757  * should also allow to remove a bunch of callback function
   5758  * like vram_info.
   5759  */
   5760 int evergreen_init(struct radeon_device *rdev)
   5761 {
   5762 	int r;
   5763 
   5764 	/* Read BIOS */
   5765 	if (!radeon_get_bios(rdev)) {
   5766 		if (ASIC_IS_AVIVO(rdev))
   5767 			return -EINVAL;
   5768 	}
   5769 	/* Must be an ATOMBIOS */
   5770 	if (!rdev->is_atom_bios) {
   5771 		dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
   5772 		return -EINVAL;
   5773 	}
   5774 	r = radeon_atombios_init(rdev);
   5775 	if (r)
   5776 		return r;
   5777 	/* reset the asic, the gfx blocks are often in a bad state
   5778 	 * after the driver is unloaded or after a resume
   5779 	 */
   5780 	if (radeon_asic_reset(rdev))
   5781 		dev_warn(rdev->dev, "GPU reset failed !\n");
   5782 	/* Post card if necessary */
   5783 	if (!radeon_card_posted(rdev)) {
   5784 		if (!rdev->bios) {
   5785 			dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
   5786 			return -EINVAL;
   5787 		}
   5788 		DRM_INFO("GPU not posted. posting now...\n");
   5789 		atom_asic_init(rdev->mode_info.atom_context);
   5790 	}
   5791 	/* init golden registers */
   5792 	evergreen_init_golden_registers(rdev);
   5793 	/* Initialize scratch registers */
   5794 	r600_scratch_init(rdev);
   5795 	/* Initialize surface registers */
   5796 	radeon_surface_init(rdev);
   5797 	/* Initialize clocks */
   5798 	radeon_get_clock_info(rdev->ddev);
   5799 	/* Fence driver */
   5800 	r = radeon_fence_driver_init(rdev);
   5801 	if (r)
   5802 		return r;
   5803 	/* initialize AGP */
   5804 	if (rdev->flags & RADEON_IS_AGP) {
   5805 		r = radeon_agp_init(rdev);
   5806 		if (r)
   5807 			radeon_agp_disable(rdev);
   5808 	}
   5809 	/* initialize memory controller */
   5810 	r = evergreen_mc_init(rdev);
   5811 	if (r)
   5812 		return r;
   5813 	/* Memory manager */
   5814 	r = radeon_bo_init(rdev);
   5815 	if (r)
   5816 		return r;
   5817 
   5818 	if (ASIC_IS_DCE5(rdev)) {
   5819 		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
   5820 			r = ni_init_microcode(rdev);
   5821 			if (r) {
   5822 				DRM_ERROR("Failed to load firmware!\n");
   5823 				return r;
   5824 			}
   5825 		}
   5826 	} else {
   5827 		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
   5828 			r = r600_init_microcode(rdev);
   5829 			if (r) {
   5830 				DRM_ERROR("Failed to load firmware!\n");
   5831 				return r;
   5832 			}
   5833 		}
   5834 	}
   5835 
   5836 	/* Initialize power management */
   5837 	radeon_pm_init(rdev);
   5838 
   5839 	rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
   5840 	r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
   5841 
   5842 	rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
   5843 	r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
   5844 
   5845 	r = radeon_uvd_init(rdev);
   5846 	if (!r) {
   5847 		rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
   5848 		r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX],
   5849 			       4096);
   5850 	}
   5851 
   5852 	rdev->ih.ring_obj = NULL;
   5853 	r600_ih_ring_init(rdev, 64 * 1024);
   5854 
   5855 	r = r600_pcie_gart_init(rdev);
   5856 	if (r)
   5857 		return r;
   5858 
   5859 	rdev->accel_working = true;
   5860 	r = evergreen_startup(rdev);
   5861 	if (r) {
   5862 		dev_err(rdev->dev, "disabling GPU acceleration\n");
   5863 		r700_cp_fini(rdev);
   5864 		r600_dma_fini(rdev);
   5865 		r600_irq_fini(rdev);
   5866 		if (rdev->flags & RADEON_IS_IGP)
   5867 			sumo_rlc_fini(rdev);
   5868 		radeon_wb_fini(rdev);
   5869 		radeon_ib_pool_fini(rdev);
   5870 		radeon_irq_kms_fini(rdev);
   5871 		evergreen_pcie_gart_fini(rdev);
   5872 		rdev->accel_working = false;
   5873 	}
   5874 
   5875 	/* Don't start up if the MC ucode is missing on BTC parts.
   5876 	 * The default clocks and voltages before the MC ucode
   5877 	 * is loaded are not suffient for advanced operations.
   5878 	 */
   5879 	if (ASIC_IS_DCE5(rdev)) {
   5880 		if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
   5881 			DRM_ERROR("radeon: MC ucode required for NI+.\n");
   5882 			return -EINVAL;
   5883 		}
   5884 	}
   5885 
   5886 	return 0;
   5887 }
   5888 
   5889 void evergreen_fini(struct radeon_device *rdev)
   5890 {
   5891 	radeon_pm_fini(rdev);
   5892 	radeon_audio_fini(rdev);
   5893 	r700_cp_fini(rdev);
   5894 	r600_dma_fini(rdev);
   5895 	r600_irq_fini(rdev);
   5896 	if (rdev->flags & RADEON_IS_IGP)
   5897 		sumo_rlc_fini(rdev);
   5898 	radeon_wb_fini(rdev);
   5899 	radeon_ib_pool_fini(rdev);
   5900 	radeon_irq_kms_fini(rdev);
   5901 	uvd_v1_0_fini(rdev);
   5902 	radeon_uvd_fini(rdev);
   5903 	evergreen_pcie_gart_fini(rdev);
   5904 	r600_vram_scratch_fini(rdev);
   5905 	radeon_gem_fini(rdev);
   5906 	radeon_fence_driver_fini(rdev);
   5907 	radeon_agp_fini(rdev);
   5908 	radeon_bo_fini(rdev);
   5909 	radeon_atombios_fini(rdev);
   5910 	kfree(rdev->bios);
   5911 	rdev->bios = NULL;
   5912 }
   5913 
   5914 void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
   5915 {
   5916 #ifndef __NetBSD__		/* XXX radeon pcie */
   5917 	u32 link_width_cntl, speed_cntl;
   5918 
   5919 	if (radeon_pcie_gen2 == 0)
   5920 		return;
   5921 
   5922 	if (rdev->flags & RADEON_IS_IGP)
   5923 		return;
   5924 
   5925 	if (!(rdev->flags & RADEON_IS_PCIE))
   5926 		return;
   5927 
   5928 	/* x2 cards have a special sequence */
   5929 	if (ASIC_IS_X2(rdev))
   5930 		return;
   5931 
   5932 	if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
   5933 		(rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
   5934 		return;
   5935 
   5936 	speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
   5937 	if (speed_cntl & LC_CURRENT_DATA_RATE) {
   5938 		DRM_INFO("PCIE gen 2 link speeds already enabled\n");
   5939 		return;
   5940 	}
   5941 
   5942 	DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
   5943 
   5944 	if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
   5945 	    (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
   5946 
   5947 		link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
   5948 		link_width_cntl &= ~LC_UPCONFIGURE_DIS;
   5949 		WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
   5950 
   5951 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
   5952 		speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
   5953 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
   5954 
   5955 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
   5956 		speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
   5957 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
   5958 
   5959 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
   5960 		speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
   5961 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
   5962 
   5963 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
   5964 		speed_cntl |= LC_GEN2_EN_STRAP;
   5965 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
   5966 
   5967 	} else {
   5968 		link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
   5969 		/* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
   5970 		if (1)
   5971 			link_width_cntl |= LC_UPCONFIGURE_DIS;
   5972 		else
   5973 			link_width_cntl &= ~LC_UPCONFIGURE_DIS;
   5974 		WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
   5975 	}
   5976 #endif
   5977 }
   5978 
   5979 void evergreen_program_aspm(struct radeon_device *rdev)
   5980 {
   5981 	u32 data, orig;
   5982 	u32 pcie_lc_cntl, pcie_lc_cntl_old;
   5983 	bool disable_l0s, disable_l1 = false, disable_plloff_in_l1 = false;
   5984 	/* fusion_platform = true
   5985 	 * if the system is a fusion system
   5986 	 * (APU or DGPU in a fusion system).
   5987 	 * todo: check if the system is a fusion platform.
   5988 	 */
   5989 	bool fusion_platform = false;
   5990 
   5991 	if (radeon_aspm == 0)
   5992 		return;
   5993 
   5994 	if (!(rdev->flags & RADEON_IS_PCIE))
   5995 		return;
   5996 
   5997 	switch (rdev->family) {
   5998 	case CHIP_CYPRESS:
   5999 	case CHIP_HEMLOCK:
   6000 	case CHIP_JUNIPER:
   6001 	case CHIP_REDWOOD:
   6002 	case CHIP_CEDAR:
   6003 	case CHIP_SUMO:
   6004 	case CHIP_SUMO2:
   6005 	case CHIP_PALM:
   6006 	case CHIP_ARUBA:
   6007 		disable_l0s = true;
   6008 		break;
   6009 	default:
   6010 		disable_l0s = false;
   6011 		break;
   6012 	}
   6013 
   6014 	if (rdev->flags & RADEON_IS_IGP)
   6015 		fusion_platform = true; /* XXX also dGPUs in a fusion system */
   6016 
   6017 	data = orig = RREG32_PIF_PHY0(PB0_PIF_PAIRING);
   6018 	if (fusion_platform)
   6019 		data &= ~MULTI_PIF;
   6020 	else
   6021 		data |= MULTI_PIF;
   6022 	if (data != orig)
   6023 		WREG32_PIF_PHY0(PB0_PIF_PAIRING, data);
   6024 
   6025 	data = orig = RREG32_PIF_PHY1(PB1_PIF_PAIRING);
   6026 	if (fusion_platform)
   6027 		data &= ~MULTI_PIF;
   6028 	else
   6029 		data |= MULTI_PIF;
   6030 	if (data != orig)
   6031 		WREG32_PIF_PHY1(PB1_PIF_PAIRING, data);
   6032 
   6033 	pcie_lc_cntl = pcie_lc_cntl_old = RREG32_PCIE_PORT(PCIE_LC_CNTL);
   6034 	pcie_lc_cntl &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
   6035 	if (!disable_l0s) {
   6036 		if (rdev->family >= CHIP_BARTS)
   6037 			pcie_lc_cntl |= LC_L0S_INACTIVITY(7);
   6038 		else
   6039 			pcie_lc_cntl |= LC_L0S_INACTIVITY(3);
   6040 	}
   6041 
   6042 	if (!disable_l1) {
   6043 		if (rdev->family >= CHIP_BARTS)
   6044 			pcie_lc_cntl |= LC_L1_INACTIVITY(7);
   6045 		else
   6046 			pcie_lc_cntl |= LC_L1_INACTIVITY(8);
   6047 
   6048 		if (!disable_plloff_in_l1) {
   6049 			data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
   6050 			data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
   6051 			data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
   6052 			if (data != orig)
   6053 				WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
   6054 
   6055 			data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
   6056 			data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
   6057 			data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
   6058 			if (data != orig)
   6059 				WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
   6060 
   6061 			data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
   6062 			data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
   6063 			data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
   6064 			if (data != orig)
   6065 				WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
   6066 
   6067 			data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
   6068 			data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
   6069 			data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
   6070 			if (data != orig)
   6071 				WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
   6072 
   6073 			if (rdev->family >= CHIP_BARTS) {
   6074 				data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
   6075 				data &= ~PLL_RAMP_UP_TIME_0_MASK;
   6076 				data |= PLL_RAMP_UP_TIME_0(4);
   6077 				if (data != orig)
   6078 					WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
   6079 
   6080 				data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
   6081 				data &= ~PLL_RAMP_UP_TIME_1_MASK;
   6082 				data |= PLL_RAMP_UP_TIME_1(4);
   6083 				if (data != orig)
   6084 					WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
   6085 
   6086 				data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
   6087 				data &= ~PLL_RAMP_UP_TIME_0_MASK;
   6088 				data |= PLL_RAMP_UP_TIME_0(4);
   6089 				if (data != orig)
   6090 					WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
   6091 
   6092 				data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
   6093 				data &= ~PLL_RAMP_UP_TIME_1_MASK;
   6094 				data |= PLL_RAMP_UP_TIME_1(4);
   6095 				if (data != orig)
   6096 					WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
   6097 			}
   6098 
   6099 			data = orig = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
   6100 			data &= ~LC_DYN_LANES_PWR_STATE_MASK;
   6101 			data |= LC_DYN_LANES_PWR_STATE(3);
   6102 			if (data != orig)
   6103 				WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
   6104 
   6105 			if (rdev->family >= CHIP_BARTS) {
   6106 				data = orig = RREG32_PIF_PHY0(PB0_PIF_CNTL);
   6107 				data &= ~LS2_EXIT_TIME_MASK;
   6108 				data |= LS2_EXIT_TIME(1);
   6109 				if (data != orig)
   6110 					WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
   6111 
   6112 				data = orig = RREG32_PIF_PHY1(PB1_PIF_CNTL);
   6113 				data &= ~LS2_EXIT_TIME_MASK;
   6114 				data |= LS2_EXIT_TIME(1);
   6115 				if (data != orig)
   6116 					WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
   6117 			}
   6118 		}
   6119 	}
   6120 
   6121 	/* evergreen parts only */
   6122 	if (rdev->family < CHIP_BARTS)
   6123 		pcie_lc_cntl |= LC_PMI_TO_L1_DIS;
   6124 
   6125 	if (pcie_lc_cntl != pcie_lc_cntl_old)
   6126 		WREG32_PCIE_PORT(PCIE_LC_CNTL, pcie_lc_cntl);
   6127 }
   6128