1 1.1 riastrad /* $NetBSD: radeon_r600_dpm.c,v 1.2 2021/12/18 23:45:43 riastradh Exp $ */ 2 1.1 riastrad 3 1.1 riastrad /* 4 1.1 riastrad * Copyright 2011 Advanced Micro Devices, Inc. 5 1.1 riastrad * 6 1.1 riastrad * Permission is hereby granted, free of charge, to any person obtaining a 7 1.1 riastrad * copy of this software and associated documentation files (the "Software"), 8 1.1 riastrad * to deal in the Software without restriction, including without limitation 9 1.1 riastrad * the rights to use, copy, modify, merge, publish, distribute, sublicense, 10 1.1 riastrad * and/or sell copies of the Software, and to permit persons to whom the 11 1.1 riastrad * Software is furnished to do so, subject to the following conditions: 12 1.1 riastrad * 13 1.1 riastrad * The above copyright notice and this permission notice shall be included in 14 1.1 riastrad * all copies or substantial portions of the Software. 15 1.1 riastrad * 16 1.1 riastrad * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 1.1 riastrad * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 1.1 riastrad * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 19 1.1 riastrad * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR 20 1.1 riastrad * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 21 1.1 riastrad * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 22 1.1 riastrad * OTHER DEALINGS IN THE SOFTWARE. 23 1.1 riastrad * 24 1.1 riastrad * Authors: Alex Deucher 25 1.1 riastrad */ 26 1.1 riastrad 27 1.1 riastrad #include <sys/cdefs.h> 28 1.1 riastrad __KERNEL_RCSID(0, "$NetBSD: radeon_r600_dpm.c,v 1.2 2021/12/18 23:45:43 riastradh Exp $"); 29 1.1 riastrad 30 1.1 riastrad #include "radeon.h" 31 1.1 riastrad #include "radeon_asic.h" 32 1.1 riastrad #include "r600d.h" 33 1.1 riastrad #include "r600_dpm.h" 34 1.1 riastrad #include "atom.h" 35 1.1 riastrad 36 1.1 riastrad const u32 r600_utc[R600_PM_NUMBER_OF_TC] = 37 1.1 riastrad { 38 1.1 riastrad R600_UTC_DFLT_00, 39 1.1 riastrad R600_UTC_DFLT_01, 40 1.1 riastrad R600_UTC_DFLT_02, 41 1.1 riastrad R600_UTC_DFLT_03, 42 1.1 riastrad R600_UTC_DFLT_04, 43 1.1 riastrad R600_UTC_DFLT_05, 44 1.1 riastrad R600_UTC_DFLT_06, 45 1.1 riastrad R600_UTC_DFLT_07, 46 1.1 riastrad R600_UTC_DFLT_08, 47 1.1 riastrad R600_UTC_DFLT_09, 48 1.1 riastrad R600_UTC_DFLT_10, 49 1.1 riastrad R600_UTC_DFLT_11, 50 1.1 riastrad R600_UTC_DFLT_12, 51 1.1 riastrad R600_UTC_DFLT_13, 52 1.1 riastrad R600_UTC_DFLT_14, 53 1.1 riastrad }; 54 1.1 riastrad 55 1.1 riastrad const u32 r600_dtc[R600_PM_NUMBER_OF_TC] = 56 1.1 riastrad { 57 1.1 riastrad R600_DTC_DFLT_00, 58 1.1 riastrad R600_DTC_DFLT_01, 59 1.1 riastrad R600_DTC_DFLT_02, 60 1.1 riastrad R600_DTC_DFLT_03, 61 1.1 riastrad R600_DTC_DFLT_04, 62 1.1 riastrad R600_DTC_DFLT_05, 63 1.1 riastrad R600_DTC_DFLT_06, 64 1.1 riastrad R600_DTC_DFLT_07, 65 1.1 riastrad R600_DTC_DFLT_08, 66 1.1 riastrad R600_DTC_DFLT_09, 67 1.1 riastrad R600_DTC_DFLT_10, 68 1.1 riastrad R600_DTC_DFLT_11, 69 1.1 riastrad R600_DTC_DFLT_12, 70 1.1 riastrad R600_DTC_DFLT_13, 71 1.1 riastrad R600_DTC_DFLT_14, 72 1.1 riastrad }; 73 1.1 riastrad 74 1.1 riastrad void r600_dpm_print_class_info(u32 class, u32 class2) 75 1.1 riastrad { 76 1.2 riastrad const char *s; 77 1.2 riastrad 78 1.1 riastrad switch (class & ATOM_PPLIB_CLASSIFICATION_UI_MASK) { 79 1.1 riastrad case ATOM_PPLIB_CLASSIFICATION_UI_NONE: 80 1.1 riastrad default: 81 1.2 riastrad s = "none"; 82 1.1 riastrad break; 83 1.1 riastrad case ATOM_PPLIB_CLASSIFICATION_UI_BATTERY: 84 1.2 riastrad s = "battery"; 85 1.1 riastrad break; 86 1.1 riastrad case ATOM_PPLIB_CLASSIFICATION_UI_BALANCED: 87 1.2 riastrad s = "balanced"; 88 1.1 riastrad break; 89 1.1 riastrad case ATOM_PPLIB_CLASSIFICATION_UI_PERFORMANCE: 90 1.2 riastrad s = "performance"; 91 1.1 riastrad break; 92 1.1 riastrad } 93 1.2 riastrad printk("\tui class: %s\n", s); 94 1.2 riastrad 95 1.2 riastrad printk("\tinternal class:"); 96 1.1 riastrad if (((class & ~ATOM_PPLIB_CLASSIFICATION_UI_MASK) == 0) && 97 1.1 riastrad (class2 == 0)) 98 1.2 riastrad pr_cont(" none"); 99 1.1 riastrad else { 100 1.1 riastrad if (class & ATOM_PPLIB_CLASSIFICATION_BOOT) 101 1.2 riastrad pr_cont(" boot"); 102 1.1 riastrad if (class & ATOM_PPLIB_CLASSIFICATION_THERMAL) 103 1.2 riastrad pr_cont(" thermal"); 104 1.1 riastrad if (class & ATOM_PPLIB_CLASSIFICATION_LIMITEDPOWERSOURCE) 105 1.2 riastrad pr_cont(" limited_pwr"); 106 1.1 riastrad if (class & ATOM_PPLIB_CLASSIFICATION_REST) 107 1.2 riastrad pr_cont(" rest"); 108 1.1 riastrad if (class & ATOM_PPLIB_CLASSIFICATION_FORCED) 109 1.2 riastrad pr_cont(" forced"); 110 1.1 riastrad if (class & ATOM_PPLIB_CLASSIFICATION_3DPERFORMANCE) 111 1.2 riastrad pr_cont(" 3d_perf"); 112 1.1 riastrad if (class & ATOM_PPLIB_CLASSIFICATION_OVERDRIVETEMPLATE) 113 1.2 riastrad pr_cont(" ovrdrv"); 114 1.1 riastrad if (class & ATOM_PPLIB_CLASSIFICATION_UVDSTATE) 115 1.2 riastrad pr_cont(" uvd"); 116 1.1 riastrad if (class & ATOM_PPLIB_CLASSIFICATION_3DLOW) 117 1.2 riastrad pr_cont(" 3d_low"); 118 1.1 riastrad if (class & ATOM_PPLIB_CLASSIFICATION_ACPI) 119 1.2 riastrad pr_cont(" acpi"); 120 1.1 riastrad if (class & ATOM_PPLIB_CLASSIFICATION_HD2STATE) 121 1.2 riastrad pr_cont(" uvd_hd2"); 122 1.1 riastrad if (class & ATOM_PPLIB_CLASSIFICATION_HDSTATE) 123 1.2 riastrad pr_cont(" uvd_hd"); 124 1.1 riastrad if (class & ATOM_PPLIB_CLASSIFICATION_SDSTATE) 125 1.2 riastrad pr_cont(" uvd_sd"); 126 1.1 riastrad if (class2 & ATOM_PPLIB_CLASSIFICATION2_LIMITEDPOWERSOURCE_2) 127 1.2 riastrad pr_cont(" limited_pwr2"); 128 1.1 riastrad if (class2 & ATOM_PPLIB_CLASSIFICATION2_ULV) 129 1.2 riastrad pr_cont(" ulv"); 130 1.1 riastrad if (class2 & ATOM_PPLIB_CLASSIFICATION2_MVC) 131 1.2 riastrad pr_cont(" uvd_mvc"); 132 1.1 riastrad } 133 1.2 riastrad pr_cont("\n"); 134 1.1 riastrad } 135 1.1 riastrad 136 1.1 riastrad void r600_dpm_print_cap_info(u32 caps) 137 1.1 riastrad { 138 1.2 riastrad printk("\tcaps:"); 139 1.1 riastrad if (caps & ATOM_PPLIB_SINGLE_DISPLAY_ONLY) 140 1.2 riastrad pr_cont(" single_disp"); 141 1.1 riastrad if (caps & ATOM_PPLIB_SUPPORTS_VIDEO_PLAYBACK) 142 1.2 riastrad pr_cont(" video"); 143 1.1 riastrad if (caps & ATOM_PPLIB_DISALLOW_ON_DC) 144 1.2 riastrad pr_cont(" no_dc"); 145 1.2 riastrad pr_cont("\n"); 146 1.1 riastrad } 147 1.1 riastrad 148 1.1 riastrad void r600_dpm_print_ps_status(struct radeon_device *rdev, 149 1.1 riastrad struct radeon_ps *rps) 150 1.1 riastrad { 151 1.2 riastrad printk("\tstatus:"); 152 1.1 riastrad if (rps == rdev->pm.dpm.current_ps) 153 1.2 riastrad pr_cont(" c"); 154 1.1 riastrad if (rps == rdev->pm.dpm.requested_ps) 155 1.2 riastrad pr_cont(" r"); 156 1.1 riastrad if (rps == rdev->pm.dpm.boot_ps) 157 1.2 riastrad pr_cont(" b"); 158 1.2 riastrad pr_cont("\n"); 159 1.1 riastrad } 160 1.1 riastrad 161 1.1 riastrad u32 r600_dpm_get_vblank_time(struct radeon_device *rdev) 162 1.1 riastrad { 163 1.1 riastrad struct drm_device *dev = rdev->ddev; 164 1.1 riastrad struct drm_crtc *crtc; 165 1.1 riastrad struct radeon_crtc *radeon_crtc; 166 1.1 riastrad u32 vblank_in_pixels; 167 1.1 riastrad u32 vblank_time_us = 0xffffffff; /* if the displays are off, vblank time is max */ 168 1.1 riastrad 169 1.1 riastrad if (rdev->num_crtc && rdev->mode_info.mode_config_initialized) { 170 1.1 riastrad list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 171 1.1 riastrad radeon_crtc = to_radeon_crtc(crtc); 172 1.1 riastrad if (crtc->enabled && radeon_crtc->enabled && radeon_crtc->hw_mode.clock) { 173 1.1 riastrad vblank_in_pixels = 174 1.1 riastrad radeon_crtc->hw_mode.crtc_htotal * 175 1.1 riastrad (radeon_crtc->hw_mode.crtc_vblank_end - 176 1.1 riastrad radeon_crtc->hw_mode.crtc_vdisplay + 177 1.1 riastrad (radeon_crtc->v_border * 2)); 178 1.1 riastrad 179 1.1 riastrad vblank_time_us = vblank_in_pixels * 1000 / radeon_crtc->hw_mode.clock; 180 1.1 riastrad break; 181 1.1 riastrad } 182 1.1 riastrad } 183 1.1 riastrad } 184 1.1 riastrad 185 1.1 riastrad return vblank_time_us; 186 1.1 riastrad } 187 1.1 riastrad 188 1.1 riastrad u32 r600_dpm_get_vrefresh(struct radeon_device *rdev) 189 1.1 riastrad { 190 1.1 riastrad struct drm_device *dev = rdev->ddev; 191 1.1 riastrad struct drm_crtc *crtc; 192 1.1 riastrad struct radeon_crtc *radeon_crtc; 193 1.1 riastrad u32 vrefresh = 0; 194 1.1 riastrad 195 1.1 riastrad if (rdev->num_crtc && rdev->mode_info.mode_config_initialized) { 196 1.1 riastrad list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 197 1.1 riastrad radeon_crtc = to_radeon_crtc(crtc); 198 1.1 riastrad if (crtc->enabled && radeon_crtc->enabled && radeon_crtc->hw_mode.clock) { 199 1.1 riastrad vrefresh = drm_mode_vrefresh(&radeon_crtc->hw_mode); 200 1.1 riastrad break; 201 1.1 riastrad } 202 1.1 riastrad } 203 1.1 riastrad } 204 1.1 riastrad return vrefresh; 205 1.1 riastrad } 206 1.1 riastrad 207 1.1 riastrad void r600_calculate_u_and_p(u32 i, u32 r_c, u32 p_b, 208 1.1 riastrad u32 *p, u32 *u) 209 1.1 riastrad { 210 1.1 riastrad u32 b_c = 0; 211 1.1 riastrad u32 i_c; 212 1.1 riastrad u32 tmp; 213 1.1 riastrad 214 1.1 riastrad i_c = (i * r_c) / 100; 215 1.1 riastrad tmp = i_c >> p_b; 216 1.1 riastrad 217 1.1 riastrad while (tmp) { 218 1.1 riastrad b_c++; 219 1.1 riastrad tmp >>= 1; 220 1.1 riastrad } 221 1.1 riastrad 222 1.1 riastrad *u = (b_c + 1) / 2; 223 1.1 riastrad *p = i_c / (1 << (2 * (*u))); 224 1.1 riastrad } 225 1.1 riastrad 226 1.1 riastrad int r600_calculate_at(u32 t, u32 h, u32 fh, u32 fl, u32 *tl, u32 *th) 227 1.1 riastrad { 228 1.1 riastrad u32 k, a, ah, al; 229 1.1 riastrad u32 t1; 230 1.1 riastrad 231 1.1 riastrad if ((fl == 0) || (fh == 0) || (fl > fh)) 232 1.1 riastrad return -EINVAL; 233 1.1 riastrad 234 1.1 riastrad k = (100 * fh) / fl; 235 1.1 riastrad t1 = (t * (k - 100)); 236 1.1 riastrad a = (1000 * (100 * h + t1)) / (10000 + (t1 / 100)); 237 1.1 riastrad a = (a + 5) / 10; 238 1.1 riastrad ah = ((a * t) + 5000) / 10000; 239 1.1 riastrad al = a - ah; 240 1.1 riastrad 241 1.1 riastrad *th = t - ah; 242 1.1 riastrad *tl = t + al; 243 1.1 riastrad 244 1.1 riastrad return 0; 245 1.1 riastrad } 246 1.1 riastrad 247 1.1 riastrad void r600_gfx_clockgating_enable(struct radeon_device *rdev, bool enable) 248 1.1 riastrad { 249 1.1 riastrad int i; 250 1.1 riastrad 251 1.1 riastrad if (enable) { 252 1.1 riastrad WREG32_P(SCLK_PWRMGT_CNTL, DYN_GFX_CLK_OFF_EN, ~DYN_GFX_CLK_OFF_EN); 253 1.1 riastrad } else { 254 1.1 riastrad WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN); 255 1.1 riastrad 256 1.1 riastrad WREG32(CG_RLC_REQ_AND_RSP, 0x2); 257 1.1 riastrad 258 1.1 riastrad for (i = 0; i < rdev->usec_timeout; i++) { 259 1.1 riastrad if (((RREG32(CG_RLC_REQ_AND_RSP) & CG_RLC_RSP_TYPE_MASK) >> CG_RLC_RSP_TYPE_SHIFT) == 1) 260 1.1 riastrad break; 261 1.1 riastrad udelay(1); 262 1.1 riastrad } 263 1.1 riastrad 264 1.1 riastrad WREG32(CG_RLC_REQ_AND_RSP, 0x0); 265 1.1 riastrad 266 1.1 riastrad WREG32(GRBM_PWR_CNTL, 0x1); 267 1.1 riastrad RREG32(GRBM_PWR_CNTL); 268 1.1 riastrad } 269 1.1 riastrad } 270 1.1 riastrad 271 1.1 riastrad void r600_dynamicpm_enable(struct radeon_device *rdev, bool enable) 272 1.1 riastrad { 273 1.1 riastrad if (enable) 274 1.1 riastrad WREG32_P(GENERAL_PWRMGT, GLOBAL_PWRMGT_EN, ~GLOBAL_PWRMGT_EN); 275 1.1 riastrad else 276 1.1 riastrad WREG32_P(GENERAL_PWRMGT, 0, ~GLOBAL_PWRMGT_EN); 277 1.1 riastrad } 278 1.1 riastrad 279 1.1 riastrad void r600_enable_thermal_protection(struct radeon_device *rdev, bool enable) 280 1.1 riastrad { 281 1.1 riastrad if (enable) 282 1.1 riastrad WREG32_P(GENERAL_PWRMGT, 0, ~THERMAL_PROTECTION_DIS); 283 1.1 riastrad else 284 1.1 riastrad WREG32_P(GENERAL_PWRMGT, THERMAL_PROTECTION_DIS, ~THERMAL_PROTECTION_DIS); 285 1.1 riastrad } 286 1.1 riastrad 287 1.1 riastrad void r600_enable_acpi_pm(struct radeon_device *rdev) 288 1.1 riastrad { 289 1.1 riastrad WREG32_P(GENERAL_PWRMGT, STATIC_PM_EN, ~STATIC_PM_EN); 290 1.1 riastrad } 291 1.1 riastrad 292 1.1 riastrad void r600_enable_dynamic_pcie_gen2(struct radeon_device *rdev, bool enable) 293 1.1 riastrad { 294 1.1 riastrad if (enable) 295 1.1 riastrad WREG32_P(GENERAL_PWRMGT, ENABLE_GEN2PCIE, ~ENABLE_GEN2PCIE); 296 1.1 riastrad else 297 1.1 riastrad WREG32_P(GENERAL_PWRMGT, 0, ~ENABLE_GEN2PCIE); 298 1.1 riastrad } 299 1.1 riastrad 300 1.1 riastrad bool r600_dynamicpm_enabled(struct radeon_device *rdev) 301 1.1 riastrad { 302 1.1 riastrad if (RREG32(GENERAL_PWRMGT) & GLOBAL_PWRMGT_EN) 303 1.1 riastrad return true; 304 1.1 riastrad else 305 1.1 riastrad return false; 306 1.1 riastrad } 307 1.1 riastrad 308 1.1 riastrad void r600_enable_sclk_control(struct radeon_device *rdev, bool enable) 309 1.1 riastrad { 310 1.1 riastrad if (enable) 311 1.1 riastrad WREG32_P(SCLK_PWRMGT_CNTL, 0, ~SCLK_PWRMGT_OFF); 312 1.1 riastrad else 313 1.1 riastrad WREG32_P(SCLK_PWRMGT_CNTL, SCLK_PWRMGT_OFF, ~SCLK_PWRMGT_OFF); 314 1.1 riastrad } 315 1.1 riastrad 316 1.1 riastrad void r600_enable_mclk_control(struct radeon_device *rdev, bool enable) 317 1.1 riastrad { 318 1.1 riastrad if (enable) 319 1.1 riastrad WREG32_P(MCLK_PWRMGT_CNTL, 0, ~MPLL_PWRMGT_OFF); 320 1.1 riastrad else 321 1.1 riastrad WREG32_P(MCLK_PWRMGT_CNTL, MPLL_PWRMGT_OFF, ~MPLL_PWRMGT_OFF); 322 1.1 riastrad } 323 1.1 riastrad 324 1.1 riastrad void r600_enable_spll_bypass(struct radeon_device *rdev, bool enable) 325 1.1 riastrad { 326 1.1 riastrad if (enable) 327 1.1 riastrad WREG32_P(CG_SPLL_FUNC_CNTL, SPLL_BYPASS_EN, ~SPLL_BYPASS_EN); 328 1.1 riastrad else 329 1.1 riastrad WREG32_P(CG_SPLL_FUNC_CNTL, 0, ~SPLL_BYPASS_EN); 330 1.1 riastrad } 331 1.1 riastrad 332 1.1 riastrad void r600_wait_for_spll_change(struct radeon_device *rdev) 333 1.1 riastrad { 334 1.1 riastrad int i; 335 1.1 riastrad 336 1.1 riastrad for (i = 0; i < rdev->usec_timeout; i++) { 337 1.1 riastrad if (RREG32(CG_SPLL_FUNC_CNTL) & SPLL_CHG_STATUS) 338 1.1 riastrad break; 339 1.1 riastrad udelay(1); 340 1.1 riastrad } 341 1.1 riastrad } 342 1.1 riastrad 343 1.1 riastrad void r600_set_bsp(struct radeon_device *rdev, u32 u, u32 p) 344 1.1 riastrad { 345 1.1 riastrad WREG32(CG_BSP, BSP(p) | BSU(u)); 346 1.1 riastrad } 347 1.1 riastrad 348 1.1 riastrad void r600_set_at(struct radeon_device *rdev, 349 1.1 riastrad u32 l_to_m, u32 m_to_h, 350 1.1 riastrad u32 h_to_m, u32 m_to_l) 351 1.1 riastrad { 352 1.1 riastrad WREG32(CG_RT, FLS(l_to_m) | FMS(m_to_h)); 353 1.1 riastrad WREG32(CG_LT, FHS(h_to_m) | FMS(m_to_l)); 354 1.1 riastrad } 355 1.1 riastrad 356 1.1 riastrad void r600_set_tc(struct radeon_device *rdev, 357 1.1 riastrad u32 index, u32 u_t, u32 d_t) 358 1.1 riastrad { 359 1.1 riastrad WREG32(CG_FFCT_0 + (index * 4), UTC_0(u_t) | DTC_0(d_t)); 360 1.1 riastrad } 361 1.1 riastrad 362 1.1 riastrad void r600_select_td(struct radeon_device *rdev, 363 1.1 riastrad enum r600_td td) 364 1.1 riastrad { 365 1.1 riastrad if (td == R600_TD_AUTO) 366 1.1 riastrad WREG32_P(SCLK_PWRMGT_CNTL, 0, ~FIR_FORCE_TREND_SEL); 367 1.1 riastrad else 368 1.1 riastrad WREG32_P(SCLK_PWRMGT_CNTL, FIR_FORCE_TREND_SEL, ~FIR_FORCE_TREND_SEL); 369 1.1 riastrad if (td == R600_TD_UP) 370 1.1 riastrad WREG32_P(SCLK_PWRMGT_CNTL, 0, ~FIR_TREND_MODE); 371 1.1 riastrad if (td == R600_TD_DOWN) 372 1.1 riastrad WREG32_P(SCLK_PWRMGT_CNTL, FIR_TREND_MODE, ~FIR_TREND_MODE); 373 1.1 riastrad } 374 1.1 riastrad 375 1.1 riastrad void r600_set_vrc(struct radeon_device *rdev, u32 vrv) 376 1.1 riastrad { 377 1.1 riastrad WREG32(CG_FTV, vrv); 378 1.1 riastrad } 379 1.1 riastrad 380 1.1 riastrad void r600_set_tpu(struct radeon_device *rdev, u32 u) 381 1.1 riastrad { 382 1.1 riastrad WREG32_P(CG_TPC, TPU(u), ~TPU_MASK); 383 1.1 riastrad } 384 1.1 riastrad 385 1.1 riastrad void r600_set_tpc(struct radeon_device *rdev, u32 c) 386 1.1 riastrad { 387 1.1 riastrad WREG32_P(CG_TPC, TPCC(c), ~TPCC_MASK); 388 1.1 riastrad } 389 1.1 riastrad 390 1.1 riastrad void r600_set_sstu(struct radeon_device *rdev, u32 u) 391 1.1 riastrad { 392 1.1 riastrad WREG32_P(CG_SSP, CG_SSTU(u), ~CG_SSTU_MASK); 393 1.1 riastrad } 394 1.1 riastrad 395 1.1 riastrad void r600_set_sst(struct radeon_device *rdev, u32 t) 396 1.1 riastrad { 397 1.1 riastrad WREG32_P(CG_SSP, CG_SST(t), ~CG_SST_MASK); 398 1.1 riastrad } 399 1.1 riastrad 400 1.1 riastrad void r600_set_git(struct radeon_device *rdev, u32 t) 401 1.1 riastrad { 402 1.1 riastrad WREG32_P(CG_GIT, CG_GICST(t), ~CG_GICST_MASK); 403 1.1 riastrad } 404 1.1 riastrad 405 1.1 riastrad void r600_set_fctu(struct radeon_device *rdev, u32 u) 406 1.1 riastrad { 407 1.1 riastrad WREG32_P(CG_FC_T, FC_TU(u), ~FC_TU_MASK); 408 1.1 riastrad } 409 1.1 riastrad 410 1.1 riastrad void r600_set_fct(struct radeon_device *rdev, u32 t) 411 1.1 riastrad { 412 1.1 riastrad WREG32_P(CG_FC_T, FC_T(t), ~FC_T_MASK); 413 1.1 riastrad } 414 1.1 riastrad 415 1.1 riastrad void r600_set_ctxcgtt3d_rphc(struct radeon_device *rdev, u32 p) 416 1.1 riastrad { 417 1.1 riastrad WREG32_P(CG_CTX_CGTT3D_R, PHC(p), ~PHC_MASK); 418 1.1 riastrad } 419 1.1 riastrad 420 1.1 riastrad void r600_set_ctxcgtt3d_rsdc(struct radeon_device *rdev, u32 s) 421 1.1 riastrad { 422 1.1 riastrad WREG32_P(CG_CTX_CGTT3D_R, SDC(s), ~SDC_MASK); 423 1.1 riastrad } 424 1.1 riastrad 425 1.1 riastrad void r600_set_vddc3d_oorsu(struct radeon_device *rdev, u32 u) 426 1.1 riastrad { 427 1.1 riastrad WREG32_P(CG_VDDC3D_OOR, SU(u), ~SU_MASK); 428 1.1 riastrad } 429 1.1 riastrad 430 1.1 riastrad void r600_set_vddc3d_oorphc(struct radeon_device *rdev, u32 p) 431 1.1 riastrad { 432 1.1 riastrad WREG32_P(CG_VDDC3D_OOR, PHC(p), ~PHC_MASK); 433 1.1 riastrad } 434 1.1 riastrad 435 1.1 riastrad void r600_set_vddc3d_oorsdc(struct radeon_device *rdev, u32 s) 436 1.1 riastrad { 437 1.1 riastrad WREG32_P(CG_VDDC3D_OOR, SDC(s), ~SDC_MASK); 438 1.1 riastrad } 439 1.1 riastrad 440 1.1 riastrad void r600_set_mpll_lock_time(struct radeon_device *rdev, u32 lock_time) 441 1.1 riastrad { 442 1.1 riastrad WREG32_P(MPLL_TIME, MPLL_LOCK_TIME(lock_time), ~MPLL_LOCK_TIME_MASK); 443 1.1 riastrad } 444 1.1 riastrad 445 1.1 riastrad void r600_set_mpll_reset_time(struct radeon_device *rdev, u32 reset_time) 446 1.1 riastrad { 447 1.1 riastrad WREG32_P(MPLL_TIME, MPLL_RESET_TIME(reset_time), ~MPLL_RESET_TIME_MASK); 448 1.1 riastrad } 449 1.1 riastrad 450 1.1 riastrad void r600_engine_clock_entry_enable(struct radeon_device *rdev, 451 1.1 riastrad u32 index, bool enable) 452 1.1 riastrad { 453 1.1 riastrad if (enable) 454 1.1 riastrad WREG32_P(SCLK_FREQ_SETTING_STEP_0_PART2 + (index * 4 * 2), 455 1.1 riastrad STEP_0_SPLL_ENTRY_VALID, ~STEP_0_SPLL_ENTRY_VALID); 456 1.1 riastrad else 457 1.1 riastrad WREG32_P(SCLK_FREQ_SETTING_STEP_0_PART2 + (index * 4 * 2), 458 1.1 riastrad 0, ~STEP_0_SPLL_ENTRY_VALID); 459 1.1 riastrad } 460 1.1 riastrad 461 1.1 riastrad void r600_engine_clock_entry_enable_pulse_skipping(struct radeon_device *rdev, 462 1.1 riastrad u32 index, bool enable) 463 1.1 riastrad { 464 1.1 riastrad if (enable) 465 1.1 riastrad WREG32_P(SCLK_FREQ_SETTING_STEP_0_PART2 + (index * 4 * 2), 466 1.1 riastrad STEP_0_SPLL_STEP_ENABLE, ~STEP_0_SPLL_STEP_ENABLE); 467 1.1 riastrad else 468 1.1 riastrad WREG32_P(SCLK_FREQ_SETTING_STEP_0_PART2 + (index * 4 * 2), 469 1.1 riastrad 0, ~STEP_0_SPLL_STEP_ENABLE); 470 1.1 riastrad } 471 1.1 riastrad 472 1.1 riastrad void r600_engine_clock_entry_enable_post_divider(struct radeon_device *rdev, 473 1.1 riastrad u32 index, bool enable) 474 1.1 riastrad { 475 1.1 riastrad if (enable) 476 1.1 riastrad WREG32_P(SCLK_FREQ_SETTING_STEP_0_PART2 + (index * 4 * 2), 477 1.1 riastrad STEP_0_POST_DIV_EN, ~STEP_0_POST_DIV_EN); 478 1.1 riastrad else 479 1.1 riastrad WREG32_P(SCLK_FREQ_SETTING_STEP_0_PART2 + (index * 4 * 2), 480 1.1 riastrad 0, ~STEP_0_POST_DIV_EN); 481 1.1 riastrad } 482 1.1 riastrad 483 1.1 riastrad void r600_engine_clock_entry_set_post_divider(struct radeon_device *rdev, 484 1.1 riastrad u32 index, u32 divider) 485 1.1 riastrad { 486 1.1 riastrad WREG32_P(SCLK_FREQ_SETTING_STEP_0_PART1 + (index * 4 * 2), 487 1.1 riastrad STEP_0_SPLL_POST_DIV(divider), ~STEP_0_SPLL_POST_DIV_MASK); 488 1.1 riastrad } 489 1.1 riastrad 490 1.1 riastrad void r600_engine_clock_entry_set_reference_divider(struct radeon_device *rdev, 491 1.1 riastrad u32 index, u32 divider) 492 1.1 riastrad { 493 1.1 riastrad WREG32_P(SCLK_FREQ_SETTING_STEP_0_PART1 + (index * 4 * 2), 494 1.1 riastrad STEP_0_SPLL_REF_DIV(divider), ~STEP_0_SPLL_REF_DIV_MASK); 495 1.1 riastrad } 496 1.1 riastrad 497 1.1 riastrad void r600_engine_clock_entry_set_feedback_divider(struct radeon_device *rdev, 498 1.1 riastrad u32 index, u32 divider) 499 1.1 riastrad { 500 1.1 riastrad WREG32_P(SCLK_FREQ_SETTING_STEP_0_PART1 + (index * 4 * 2), 501 1.1 riastrad STEP_0_SPLL_FB_DIV(divider), ~STEP_0_SPLL_FB_DIV_MASK); 502 1.1 riastrad } 503 1.1 riastrad 504 1.1 riastrad void r600_engine_clock_entry_set_step_time(struct radeon_device *rdev, 505 1.1 riastrad u32 index, u32 step_time) 506 1.1 riastrad { 507 1.1 riastrad WREG32_P(SCLK_FREQ_SETTING_STEP_0_PART1 + (index * 4 * 2), 508 1.1 riastrad STEP_0_SPLL_STEP_TIME(step_time), ~STEP_0_SPLL_STEP_TIME_MASK); 509 1.1 riastrad } 510 1.1 riastrad 511 1.1 riastrad void r600_vid_rt_set_ssu(struct radeon_device *rdev, u32 u) 512 1.1 riastrad { 513 1.1 riastrad WREG32_P(VID_RT, SSTU(u), ~SSTU_MASK); 514 1.1 riastrad } 515 1.1 riastrad 516 1.1 riastrad void r600_vid_rt_set_vru(struct radeon_device *rdev, u32 u) 517 1.1 riastrad { 518 1.1 riastrad WREG32_P(VID_RT, VID_CRTU(u), ~VID_CRTU_MASK); 519 1.1 riastrad } 520 1.1 riastrad 521 1.1 riastrad void r600_vid_rt_set_vrt(struct radeon_device *rdev, u32 rt) 522 1.1 riastrad { 523 1.1 riastrad WREG32_P(VID_RT, VID_CRT(rt), ~VID_CRT_MASK); 524 1.1 riastrad } 525 1.1 riastrad 526 1.1 riastrad void r600_voltage_control_enable_pins(struct radeon_device *rdev, 527 1.1 riastrad u64 mask) 528 1.1 riastrad { 529 1.1 riastrad WREG32(LOWER_GPIO_ENABLE, mask & 0xffffffff); 530 1.1 riastrad WREG32(UPPER_GPIO_ENABLE, upper_32_bits(mask)); 531 1.1 riastrad } 532 1.1 riastrad 533 1.1 riastrad 534 1.1 riastrad void r600_voltage_control_program_voltages(struct radeon_device *rdev, 535 1.1 riastrad enum r600_power_level index, u64 pins) 536 1.1 riastrad { 537 1.1 riastrad u32 tmp, mask; 538 1.1 riastrad u32 ix = 3 - (3 & index); 539 1.1 riastrad 540 1.1 riastrad WREG32(CTXSW_VID_LOWER_GPIO_CNTL + (ix * 4), pins & 0xffffffff); 541 1.1 riastrad 542 1.1 riastrad mask = 7 << (3 * ix); 543 1.1 riastrad tmp = RREG32(VID_UPPER_GPIO_CNTL); 544 1.1 riastrad tmp = (tmp & ~mask) | ((pins >> (32 - (3 * ix))) & mask); 545 1.1 riastrad WREG32(VID_UPPER_GPIO_CNTL, tmp); 546 1.1 riastrad } 547 1.1 riastrad 548 1.1 riastrad void r600_voltage_control_deactivate_static_control(struct radeon_device *rdev, 549 1.1 riastrad u64 mask) 550 1.1 riastrad { 551 1.1 riastrad u32 gpio; 552 1.1 riastrad 553 1.1 riastrad gpio = RREG32(GPIOPAD_MASK); 554 1.1 riastrad gpio &= ~mask; 555 1.1 riastrad WREG32(GPIOPAD_MASK, gpio); 556 1.1 riastrad 557 1.1 riastrad gpio = RREG32(GPIOPAD_EN); 558 1.1 riastrad gpio &= ~mask; 559 1.1 riastrad WREG32(GPIOPAD_EN, gpio); 560 1.1 riastrad 561 1.1 riastrad gpio = RREG32(GPIOPAD_A); 562 1.1 riastrad gpio &= ~mask; 563 1.1 riastrad WREG32(GPIOPAD_A, gpio); 564 1.1 riastrad } 565 1.1 riastrad 566 1.1 riastrad void r600_power_level_enable(struct radeon_device *rdev, 567 1.1 riastrad enum r600_power_level index, bool enable) 568 1.1 riastrad { 569 1.1 riastrad u32 ix = 3 - (3 & index); 570 1.1 riastrad 571 1.1 riastrad if (enable) 572 1.1 riastrad WREG32_P(CTXSW_PROFILE_INDEX + (ix * 4), CTXSW_FREQ_STATE_ENABLE, 573 1.1 riastrad ~CTXSW_FREQ_STATE_ENABLE); 574 1.1 riastrad else 575 1.1 riastrad WREG32_P(CTXSW_PROFILE_INDEX + (ix * 4), 0, 576 1.1 riastrad ~CTXSW_FREQ_STATE_ENABLE); 577 1.1 riastrad } 578 1.1 riastrad 579 1.1 riastrad void r600_power_level_set_voltage_index(struct radeon_device *rdev, 580 1.1 riastrad enum r600_power_level index, u32 voltage_index) 581 1.1 riastrad { 582 1.1 riastrad u32 ix = 3 - (3 & index); 583 1.1 riastrad 584 1.1 riastrad WREG32_P(CTXSW_PROFILE_INDEX + (ix * 4), 585 1.1 riastrad CTXSW_FREQ_VIDS_CFG_INDEX(voltage_index), ~CTXSW_FREQ_VIDS_CFG_INDEX_MASK); 586 1.1 riastrad } 587 1.1 riastrad 588 1.1 riastrad void r600_power_level_set_mem_clock_index(struct radeon_device *rdev, 589 1.1 riastrad enum r600_power_level index, u32 mem_clock_index) 590 1.1 riastrad { 591 1.1 riastrad u32 ix = 3 - (3 & index); 592 1.1 riastrad 593 1.1 riastrad WREG32_P(CTXSW_PROFILE_INDEX + (ix * 4), 594 1.1 riastrad CTXSW_FREQ_MCLK_CFG_INDEX(mem_clock_index), ~CTXSW_FREQ_MCLK_CFG_INDEX_MASK); 595 1.1 riastrad } 596 1.1 riastrad 597 1.1 riastrad void r600_power_level_set_eng_clock_index(struct radeon_device *rdev, 598 1.1 riastrad enum r600_power_level index, u32 eng_clock_index) 599 1.1 riastrad { 600 1.1 riastrad u32 ix = 3 - (3 & index); 601 1.1 riastrad 602 1.1 riastrad WREG32_P(CTXSW_PROFILE_INDEX + (ix * 4), 603 1.1 riastrad CTXSW_FREQ_SCLK_CFG_INDEX(eng_clock_index), ~CTXSW_FREQ_SCLK_CFG_INDEX_MASK); 604 1.1 riastrad } 605 1.1 riastrad 606 1.1 riastrad void r600_power_level_set_watermark_id(struct radeon_device *rdev, 607 1.1 riastrad enum r600_power_level index, 608 1.1 riastrad enum r600_display_watermark watermark_id) 609 1.1 riastrad { 610 1.1 riastrad u32 ix = 3 - (3 & index); 611 1.1 riastrad u32 tmp = 0; 612 1.1 riastrad 613 1.1 riastrad if (watermark_id == R600_DISPLAY_WATERMARK_HIGH) 614 1.1 riastrad tmp = CTXSW_FREQ_DISPLAY_WATERMARK; 615 1.1 riastrad WREG32_P(CTXSW_PROFILE_INDEX + (ix * 4), tmp, ~CTXSW_FREQ_DISPLAY_WATERMARK); 616 1.1 riastrad } 617 1.1 riastrad 618 1.1 riastrad void r600_power_level_set_pcie_gen2(struct radeon_device *rdev, 619 1.1 riastrad enum r600_power_level index, bool compatible) 620 1.1 riastrad { 621 1.1 riastrad u32 ix = 3 - (3 & index); 622 1.1 riastrad u32 tmp = 0; 623 1.1 riastrad 624 1.1 riastrad if (compatible) 625 1.1 riastrad tmp = CTXSW_FREQ_GEN2PCIE_VOLT; 626 1.1 riastrad WREG32_P(CTXSW_PROFILE_INDEX + (ix * 4), tmp, ~CTXSW_FREQ_GEN2PCIE_VOLT); 627 1.1 riastrad } 628 1.1 riastrad 629 1.1 riastrad enum r600_power_level r600_power_level_get_current_index(struct radeon_device *rdev) 630 1.1 riastrad { 631 1.1 riastrad u32 tmp; 632 1.1 riastrad 633 1.1 riastrad tmp = RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_PROFILE_INDEX_MASK; 634 1.1 riastrad tmp >>= CURRENT_PROFILE_INDEX_SHIFT; 635 1.1 riastrad return tmp; 636 1.1 riastrad } 637 1.1 riastrad 638 1.1 riastrad enum r600_power_level r600_power_level_get_target_index(struct radeon_device *rdev) 639 1.1 riastrad { 640 1.1 riastrad u32 tmp; 641 1.1 riastrad 642 1.1 riastrad tmp = RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & TARGET_PROFILE_INDEX_MASK; 643 1.1 riastrad tmp >>= TARGET_PROFILE_INDEX_SHIFT; 644 1.1 riastrad return tmp; 645 1.1 riastrad } 646 1.1 riastrad 647 1.1 riastrad void r600_power_level_set_enter_index(struct radeon_device *rdev, 648 1.1 riastrad enum r600_power_level index) 649 1.1 riastrad { 650 1.1 riastrad WREG32_P(TARGET_AND_CURRENT_PROFILE_INDEX, DYN_PWR_ENTER_INDEX(index), 651 1.1 riastrad ~DYN_PWR_ENTER_INDEX_MASK); 652 1.1 riastrad } 653 1.1 riastrad 654 1.1 riastrad void r600_wait_for_power_level_unequal(struct radeon_device *rdev, 655 1.1 riastrad enum r600_power_level index) 656 1.1 riastrad { 657 1.1 riastrad int i; 658 1.1 riastrad 659 1.1 riastrad for (i = 0; i < rdev->usec_timeout; i++) { 660 1.1 riastrad if (r600_power_level_get_target_index(rdev) != index) 661 1.1 riastrad break; 662 1.1 riastrad udelay(1); 663 1.1 riastrad } 664 1.1 riastrad 665 1.1 riastrad for (i = 0; i < rdev->usec_timeout; i++) { 666 1.1 riastrad if (r600_power_level_get_current_index(rdev) != index) 667 1.1 riastrad break; 668 1.1 riastrad udelay(1); 669 1.1 riastrad } 670 1.1 riastrad } 671 1.1 riastrad 672 1.1 riastrad void r600_wait_for_power_level(struct radeon_device *rdev, 673 1.1 riastrad enum r600_power_level index) 674 1.1 riastrad { 675 1.1 riastrad int i; 676 1.1 riastrad 677 1.1 riastrad for (i = 0; i < rdev->usec_timeout; i++) { 678 1.1 riastrad if (r600_power_level_get_target_index(rdev) == index) 679 1.1 riastrad break; 680 1.1 riastrad udelay(1); 681 1.1 riastrad } 682 1.1 riastrad 683 1.1 riastrad for (i = 0; i < rdev->usec_timeout; i++) { 684 1.1 riastrad if (r600_power_level_get_current_index(rdev) == index) 685 1.1 riastrad break; 686 1.1 riastrad udelay(1); 687 1.1 riastrad } 688 1.1 riastrad } 689 1.1 riastrad 690 1.1 riastrad void r600_start_dpm(struct radeon_device *rdev) 691 1.1 riastrad { 692 1.1 riastrad r600_enable_sclk_control(rdev, false); 693 1.1 riastrad r600_enable_mclk_control(rdev, false); 694 1.1 riastrad 695 1.1 riastrad r600_dynamicpm_enable(rdev, true); 696 1.1 riastrad 697 1.1 riastrad radeon_wait_for_vblank(rdev, 0); 698 1.1 riastrad radeon_wait_for_vblank(rdev, 1); 699 1.1 riastrad 700 1.1 riastrad r600_enable_spll_bypass(rdev, true); 701 1.1 riastrad r600_wait_for_spll_change(rdev); 702 1.1 riastrad r600_enable_spll_bypass(rdev, false); 703 1.1 riastrad r600_wait_for_spll_change(rdev); 704 1.1 riastrad 705 1.1 riastrad r600_enable_spll_bypass(rdev, true); 706 1.1 riastrad r600_wait_for_spll_change(rdev); 707 1.1 riastrad r600_enable_spll_bypass(rdev, false); 708 1.1 riastrad r600_wait_for_spll_change(rdev); 709 1.1 riastrad 710 1.1 riastrad r600_enable_sclk_control(rdev, true); 711 1.1 riastrad r600_enable_mclk_control(rdev, true); 712 1.1 riastrad } 713 1.1 riastrad 714 1.1 riastrad void r600_stop_dpm(struct radeon_device *rdev) 715 1.1 riastrad { 716 1.1 riastrad r600_dynamicpm_enable(rdev, false); 717 1.1 riastrad } 718 1.1 riastrad 719 1.1 riastrad int r600_dpm_pre_set_power_state(struct radeon_device *rdev) 720 1.1 riastrad { 721 1.1 riastrad return 0; 722 1.1 riastrad } 723 1.1 riastrad 724 1.1 riastrad void r600_dpm_post_set_power_state(struct radeon_device *rdev) 725 1.1 riastrad { 726 1.1 riastrad 727 1.1 riastrad } 728 1.1 riastrad 729 1.1 riastrad bool r600_is_uvd_state(u32 class, u32 class2) 730 1.1 riastrad { 731 1.1 riastrad if (class & ATOM_PPLIB_CLASSIFICATION_UVDSTATE) 732 1.1 riastrad return true; 733 1.1 riastrad if (class & ATOM_PPLIB_CLASSIFICATION_HD2STATE) 734 1.1 riastrad return true; 735 1.1 riastrad if (class & ATOM_PPLIB_CLASSIFICATION_HDSTATE) 736 1.1 riastrad return true; 737 1.1 riastrad if (class & ATOM_PPLIB_CLASSIFICATION_SDSTATE) 738 1.1 riastrad return true; 739 1.1 riastrad if (class2 & ATOM_PPLIB_CLASSIFICATION2_MVC) 740 1.1 riastrad return true; 741 1.1 riastrad return false; 742 1.1 riastrad } 743 1.1 riastrad 744 1.1 riastrad static int r600_set_thermal_temperature_range(struct radeon_device *rdev, 745 1.1 riastrad int min_temp, int max_temp) 746 1.1 riastrad { 747 1.1 riastrad int low_temp = 0 * 1000; 748 1.1 riastrad int high_temp = 255 * 1000; 749 1.1 riastrad 750 1.1 riastrad if (low_temp < min_temp) 751 1.1 riastrad low_temp = min_temp; 752 1.1 riastrad if (high_temp > max_temp) 753 1.1 riastrad high_temp = max_temp; 754 1.1 riastrad if (high_temp < low_temp) { 755 1.1 riastrad DRM_ERROR("invalid thermal range: %d - %d\n", low_temp, high_temp); 756 1.1 riastrad return -EINVAL; 757 1.1 riastrad } 758 1.1 riastrad 759 1.1 riastrad WREG32_P(CG_THERMAL_INT, DIG_THERM_INTH(high_temp / 1000), ~DIG_THERM_INTH_MASK); 760 1.1 riastrad WREG32_P(CG_THERMAL_INT, DIG_THERM_INTL(low_temp / 1000), ~DIG_THERM_INTL_MASK); 761 1.1 riastrad WREG32_P(CG_THERMAL_CTRL, DIG_THERM_DPM(high_temp / 1000), ~DIG_THERM_DPM_MASK); 762 1.1 riastrad 763 1.1 riastrad rdev->pm.dpm.thermal.min_temp = low_temp; 764 1.1 riastrad rdev->pm.dpm.thermal.max_temp = high_temp; 765 1.1 riastrad 766 1.1 riastrad return 0; 767 1.1 riastrad } 768 1.1 riastrad 769 1.1 riastrad bool r600_is_internal_thermal_sensor(enum radeon_int_thermal_type sensor) 770 1.1 riastrad { 771 1.1 riastrad switch (sensor) { 772 1.1 riastrad case THERMAL_TYPE_RV6XX: 773 1.1 riastrad case THERMAL_TYPE_RV770: 774 1.1 riastrad case THERMAL_TYPE_EVERGREEN: 775 1.1 riastrad case THERMAL_TYPE_SUMO: 776 1.1 riastrad case THERMAL_TYPE_NI: 777 1.1 riastrad case THERMAL_TYPE_SI: 778 1.1 riastrad case THERMAL_TYPE_CI: 779 1.1 riastrad case THERMAL_TYPE_KV: 780 1.1 riastrad return true; 781 1.1 riastrad case THERMAL_TYPE_ADT7473_WITH_INTERNAL: 782 1.1 riastrad case THERMAL_TYPE_EMC2103_WITH_INTERNAL: 783 1.1 riastrad return false; /* need special handling */ 784 1.1 riastrad case THERMAL_TYPE_NONE: 785 1.1 riastrad case THERMAL_TYPE_EXTERNAL: 786 1.1 riastrad case THERMAL_TYPE_EXTERNAL_GPIO: 787 1.1 riastrad default: 788 1.1 riastrad return false; 789 1.1 riastrad } 790 1.1 riastrad } 791 1.1 riastrad 792 1.1 riastrad int r600_dpm_late_enable(struct radeon_device *rdev) 793 1.1 riastrad { 794 1.1 riastrad int ret; 795 1.1 riastrad 796 1.1 riastrad if (rdev->irq.installed && 797 1.1 riastrad r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) { 798 1.1 riastrad ret = r600_set_thermal_temperature_range(rdev, R600_TEMP_RANGE_MIN, R600_TEMP_RANGE_MAX); 799 1.1 riastrad if (ret) 800 1.1 riastrad return ret; 801 1.1 riastrad rdev->irq.dpm_thermal = true; 802 1.1 riastrad radeon_irq_set(rdev); 803 1.1 riastrad } 804 1.1 riastrad 805 1.1 riastrad return 0; 806 1.1 riastrad } 807 1.1 riastrad 808 1.1 riastrad union power_info { 809 1.1 riastrad struct _ATOM_POWERPLAY_INFO info; 810 1.1 riastrad struct _ATOM_POWERPLAY_INFO_V2 info_2; 811 1.1 riastrad struct _ATOM_POWERPLAY_INFO_V3 info_3; 812 1.1 riastrad struct _ATOM_PPLIB_POWERPLAYTABLE pplib; 813 1.1 riastrad struct _ATOM_PPLIB_POWERPLAYTABLE2 pplib2; 814 1.1 riastrad struct _ATOM_PPLIB_POWERPLAYTABLE3 pplib3; 815 1.1 riastrad struct _ATOM_PPLIB_POWERPLAYTABLE4 pplib4; 816 1.1 riastrad struct _ATOM_PPLIB_POWERPLAYTABLE5 pplib5; 817 1.1 riastrad }; 818 1.1 riastrad 819 1.1 riastrad union fan_info { 820 1.1 riastrad struct _ATOM_PPLIB_FANTABLE fan; 821 1.1 riastrad struct _ATOM_PPLIB_FANTABLE2 fan2; 822 1.1 riastrad struct _ATOM_PPLIB_FANTABLE3 fan3; 823 1.1 riastrad }; 824 1.1 riastrad 825 1.1 riastrad static int r600_parse_clk_voltage_dep_table(struct radeon_clock_voltage_dependency_table *radeon_table, 826 1.1 riastrad ATOM_PPLIB_Clock_Voltage_Dependency_Table *atom_table) 827 1.1 riastrad { 828 1.1 riastrad u32 size = atom_table->ucNumEntries * 829 1.1 riastrad sizeof(struct radeon_clock_voltage_dependency_entry); 830 1.1 riastrad int i; 831 1.1 riastrad ATOM_PPLIB_Clock_Voltage_Dependency_Record *entry; 832 1.1 riastrad 833 1.1 riastrad radeon_table->entries = kzalloc(size, GFP_KERNEL); 834 1.1 riastrad if (!radeon_table->entries) 835 1.1 riastrad return -ENOMEM; 836 1.1 riastrad 837 1.1 riastrad entry = &atom_table->entries[0]; 838 1.1 riastrad for (i = 0; i < atom_table->ucNumEntries; i++) { 839 1.1 riastrad radeon_table->entries[i].clk = le16_to_cpu(entry->usClockLow) | 840 1.1 riastrad (entry->ucClockHigh << 16); 841 1.1 riastrad radeon_table->entries[i].v = le16_to_cpu(entry->usVoltage); 842 1.1 riastrad entry = (ATOM_PPLIB_Clock_Voltage_Dependency_Record *) 843 1.1 riastrad ((u8 *)entry + sizeof(ATOM_PPLIB_Clock_Voltage_Dependency_Record)); 844 1.1 riastrad } 845 1.1 riastrad radeon_table->count = atom_table->ucNumEntries; 846 1.1 riastrad 847 1.1 riastrad return 0; 848 1.1 riastrad } 849 1.1 riastrad 850 1.1 riastrad int r600_get_platform_caps(struct radeon_device *rdev) 851 1.1 riastrad { 852 1.1 riastrad struct radeon_mode_info *mode_info = &rdev->mode_info; 853 1.1 riastrad union power_info *power_info; 854 1.1 riastrad int index = GetIndexIntoMasterTable(DATA, PowerPlayInfo); 855 1.2 riastrad u16 data_offset; 856 1.1 riastrad u8 frev, crev; 857 1.1 riastrad 858 1.1 riastrad if (!atom_parse_data_header(mode_info->atom_context, index, NULL, 859 1.1 riastrad &frev, &crev, &data_offset)) 860 1.1 riastrad return -EINVAL; 861 1.1 riastrad power_info = (union power_info *)(mode_info->atom_context->bios + data_offset); 862 1.1 riastrad 863 1.1 riastrad rdev->pm.dpm.platform_caps = le32_to_cpu(power_info->pplib.ulPlatformCaps); 864 1.1 riastrad rdev->pm.dpm.backbias_response_time = le16_to_cpu(power_info->pplib.usBackbiasTime); 865 1.1 riastrad rdev->pm.dpm.voltage_response_time = le16_to_cpu(power_info->pplib.usVoltageTime); 866 1.1 riastrad 867 1.1 riastrad return 0; 868 1.1 riastrad } 869 1.1 riastrad 870 1.1 riastrad /* sizeof(ATOM_PPLIB_EXTENDEDHEADER) */ 871 1.1 riastrad #define SIZE_OF_ATOM_PPLIB_EXTENDEDHEADER_V2 12 872 1.1 riastrad #define SIZE_OF_ATOM_PPLIB_EXTENDEDHEADER_V3 14 873 1.1 riastrad #define SIZE_OF_ATOM_PPLIB_EXTENDEDHEADER_V4 16 874 1.1 riastrad #define SIZE_OF_ATOM_PPLIB_EXTENDEDHEADER_V5 18 875 1.1 riastrad #define SIZE_OF_ATOM_PPLIB_EXTENDEDHEADER_V6 20 876 1.1 riastrad #define SIZE_OF_ATOM_PPLIB_EXTENDEDHEADER_V7 22 877 1.1 riastrad 878 1.1 riastrad int r600_parse_extended_power_table(struct radeon_device *rdev) 879 1.1 riastrad { 880 1.1 riastrad struct radeon_mode_info *mode_info = &rdev->mode_info; 881 1.1 riastrad union power_info *power_info; 882 1.1 riastrad union fan_info *fan_info; 883 1.1 riastrad ATOM_PPLIB_Clock_Voltage_Dependency_Table *dep_table; 884 1.1 riastrad int index = GetIndexIntoMasterTable(DATA, PowerPlayInfo); 885 1.2 riastrad u16 data_offset; 886 1.1 riastrad u8 frev, crev; 887 1.1 riastrad int ret, i; 888 1.1 riastrad 889 1.1 riastrad if (!atom_parse_data_header(mode_info->atom_context, index, NULL, 890 1.1 riastrad &frev, &crev, &data_offset)) 891 1.1 riastrad return -EINVAL; 892 1.1 riastrad power_info = (union power_info *)(mode_info->atom_context->bios + data_offset); 893 1.1 riastrad 894 1.1 riastrad /* fan table */ 895 1.1 riastrad if (le16_to_cpu(power_info->pplib.usTableSize) >= 896 1.1 riastrad sizeof(struct _ATOM_PPLIB_POWERPLAYTABLE3)) { 897 1.1 riastrad if (power_info->pplib3.usFanTableOffset) { 898 1.1 riastrad fan_info = (union fan_info *)(mode_info->atom_context->bios + data_offset + 899 1.1 riastrad le16_to_cpu(power_info->pplib3.usFanTableOffset)); 900 1.1 riastrad rdev->pm.dpm.fan.t_hyst = fan_info->fan.ucTHyst; 901 1.1 riastrad rdev->pm.dpm.fan.t_min = le16_to_cpu(fan_info->fan.usTMin); 902 1.1 riastrad rdev->pm.dpm.fan.t_med = le16_to_cpu(fan_info->fan.usTMed); 903 1.1 riastrad rdev->pm.dpm.fan.t_high = le16_to_cpu(fan_info->fan.usTHigh); 904 1.1 riastrad rdev->pm.dpm.fan.pwm_min = le16_to_cpu(fan_info->fan.usPWMMin); 905 1.1 riastrad rdev->pm.dpm.fan.pwm_med = le16_to_cpu(fan_info->fan.usPWMMed); 906 1.1 riastrad rdev->pm.dpm.fan.pwm_high = le16_to_cpu(fan_info->fan.usPWMHigh); 907 1.1 riastrad if (fan_info->fan.ucFanTableFormat >= 2) 908 1.1 riastrad rdev->pm.dpm.fan.t_max = le16_to_cpu(fan_info->fan2.usTMax); 909 1.1 riastrad else 910 1.1 riastrad rdev->pm.dpm.fan.t_max = 10900; 911 1.1 riastrad rdev->pm.dpm.fan.cycle_delay = 100000; 912 1.1 riastrad if (fan_info->fan.ucFanTableFormat >= 3) { 913 1.1 riastrad rdev->pm.dpm.fan.control_mode = fan_info->fan3.ucFanControlMode; 914 1.1 riastrad rdev->pm.dpm.fan.default_max_fan_pwm = 915 1.1 riastrad le16_to_cpu(fan_info->fan3.usFanPWMMax); 916 1.1 riastrad rdev->pm.dpm.fan.default_fan_output_sensitivity = 4836; 917 1.1 riastrad rdev->pm.dpm.fan.fan_output_sensitivity = 918 1.1 riastrad le16_to_cpu(fan_info->fan3.usFanOutputSensitivity); 919 1.1 riastrad } 920 1.1 riastrad rdev->pm.dpm.fan.ucode_fan_control = true; 921 1.1 riastrad } 922 1.1 riastrad } 923 1.1 riastrad 924 1.1 riastrad /* clock dependancy tables, shedding tables */ 925 1.1 riastrad if (le16_to_cpu(power_info->pplib.usTableSize) >= 926 1.1 riastrad sizeof(struct _ATOM_PPLIB_POWERPLAYTABLE4)) { 927 1.1 riastrad if (power_info->pplib4.usVddcDependencyOnSCLKOffset) { 928 1.1 riastrad dep_table = (ATOM_PPLIB_Clock_Voltage_Dependency_Table *) 929 1.1 riastrad (mode_info->atom_context->bios + data_offset + 930 1.1 riastrad le16_to_cpu(power_info->pplib4.usVddcDependencyOnSCLKOffset)); 931 1.1 riastrad ret = r600_parse_clk_voltage_dep_table(&rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk, 932 1.1 riastrad dep_table); 933 1.1 riastrad if (ret) 934 1.1 riastrad return ret; 935 1.1 riastrad } 936 1.1 riastrad if (power_info->pplib4.usVddciDependencyOnMCLKOffset) { 937 1.1 riastrad dep_table = (ATOM_PPLIB_Clock_Voltage_Dependency_Table *) 938 1.1 riastrad (mode_info->atom_context->bios + data_offset + 939 1.1 riastrad le16_to_cpu(power_info->pplib4.usVddciDependencyOnMCLKOffset)); 940 1.1 riastrad ret = r600_parse_clk_voltage_dep_table(&rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk, 941 1.1 riastrad dep_table); 942 1.1 riastrad if (ret) { 943 1.1 riastrad kfree(rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries); 944 1.1 riastrad return ret; 945 1.1 riastrad } 946 1.1 riastrad } 947 1.1 riastrad if (power_info->pplib4.usVddcDependencyOnMCLKOffset) { 948 1.1 riastrad dep_table = (ATOM_PPLIB_Clock_Voltage_Dependency_Table *) 949 1.1 riastrad (mode_info->atom_context->bios + data_offset + 950 1.1 riastrad le16_to_cpu(power_info->pplib4.usVddcDependencyOnMCLKOffset)); 951 1.1 riastrad ret = r600_parse_clk_voltage_dep_table(&rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk, 952 1.1 riastrad dep_table); 953 1.1 riastrad if (ret) { 954 1.1 riastrad kfree(rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries); 955 1.1 riastrad kfree(rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk.entries); 956 1.1 riastrad return ret; 957 1.1 riastrad } 958 1.1 riastrad } 959 1.1 riastrad if (power_info->pplib4.usMvddDependencyOnMCLKOffset) { 960 1.1 riastrad dep_table = (ATOM_PPLIB_Clock_Voltage_Dependency_Table *) 961 1.1 riastrad (mode_info->atom_context->bios + data_offset + 962 1.1 riastrad le16_to_cpu(power_info->pplib4.usMvddDependencyOnMCLKOffset)); 963 1.1 riastrad ret = r600_parse_clk_voltage_dep_table(&rdev->pm.dpm.dyn_state.mvdd_dependency_on_mclk, 964 1.1 riastrad dep_table); 965 1.1 riastrad if (ret) { 966 1.1 riastrad kfree(rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries); 967 1.1 riastrad kfree(rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk.entries); 968 1.1 riastrad kfree(rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk.entries); 969 1.1 riastrad return ret; 970 1.1 riastrad } 971 1.1 riastrad } 972 1.1 riastrad if (power_info->pplib4.usMaxClockVoltageOnDCOffset) { 973 1.1 riastrad ATOM_PPLIB_Clock_Voltage_Limit_Table *clk_v = 974 1.1 riastrad (ATOM_PPLIB_Clock_Voltage_Limit_Table *) 975 1.1 riastrad (mode_info->atom_context->bios + data_offset + 976 1.1 riastrad le16_to_cpu(power_info->pplib4.usMaxClockVoltageOnDCOffset)); 977 1.1 riastrad if (clk_v->ucNumEntries) { 978 1.1 riastrad rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.sclk = 979 1.1 riastrad le16_to_cpu(clk_v->entries[0].usSclkLow) | 980 1.1 riastrad (clk_v->entries[0].ucSclkHigh << 16); 981 1.1 riastrad rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.mclk = 982 1.1 riastrad le16_to_cpu(clk_v->entries[0].usMclkLow) | 983 1.1 riastrad (clk_v->entries[0].ucMclkHigh << 16); 984 1.1 riastrad rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.vddc = 985 1.1 riastrad le16_to_cpu(clk_v->entries[0].usVddc); 986 1.1 riastrad rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.vddci = 987 1.1 riastrad le16_to_cpu(clk_v->entries[0].usVddci); 988 1.1 riastrad } 989 1.1 riastrad } 990 1.1 riastrad if (power_info->pplib4.usVddcPhaseShedLimitsTableOffset) { 991 1.1 riastrad ATOM_PPLIB_PhaseSheddingLimits_Table *psl = 992 1.1 riastrad (ATOM_PPLIB_PhaseSheddingLimits_Table *) 993 1.1 riastrad (mode_info->atom_context->bios + data_offset + 994 1.1 riastrad le16_to_cpu(power_info->pplib4.usVddcPhaseShedLimitsTableOffset)); 995 1.1 riastrad ATOM_PPLIB_PhaseSheddingLimits_Record *entry; 996 1.1 riastrad 997 1.1 riastrad rdev->pm.dpm.dyn_state.phase_shedding_limits_table.entries = 998 1.2 riastrad kcalloc(psl->ucNumEntries, 999 1.1 riastrad sizeof(struct radeon_phase_shedding_limits_entry), 1000 1.1 riastrad GFP_KERNEL); 1001 1.1 riastrad if (!rdev->pm.dpm.dyn_state.phase_shedding_limits_table.entries) { 1002 1.1 riastrad r600_free_extended_power_table(rdev); 1003 1.1 riastrad return -ENOMEM; 1004 1.1 riastrad } 1005 1.1 riastrad 1006 1.1 riastrad entry = &psl->entries[0]; 1007 1.1 riastrad for (i = 0; i < psl->ucNumEntries; i++) { 1008 1.1 riastrad rdev->pm.dpm.dyn_state.phase_shedding_limits_table.entries[i].sclk = 1009 1.1 riastrad le16_to_cpu(entry->usSclkLow) | (entry->ucSclkHigh << 16); 1010 1.1 riastrad rdev->pm.dpm.dyn_state.phase_shedding_limits_table.entries[i].mclk = 1011 1.1 riastrad le16_to_cpu(entry->usMclkLow) | (entry->ucMclkHigh << 16); 1012 1.1 riastrad rdev->pm.dpm.dyn_state.phase_shedding_limits_table.entries[i].voltage = 1013 1.1 riastrad le16_to_cpu(entry->usVoltage); 1014 1.1 riastrad entry = (ATOM_PPLIB_PhaseSheddingLimits_Record *) 1015 1.1 riastrad ((u8 *)entry + sizeof(ATOM_PPLIB_PhaseSheddingLimits_Record)); 1016 1.1 riastrad } 1017 1.1 riastrad rdev->pm.dpm.dyn_state.phase_shedding_limits_table.count = 1018 1.1 riastrad psl->ucNumEntries; 1019 1.1 riastrad } 1020 1.1 riastrad } 1021 1.1 riastrad 1022 1.1 riastrad /* cac data */ 1023 1.1 riastrad if (le16_to_cpu(power_info->pplib.usTableSize) >= 1024 1.1 riastrad sizeof(struct _ATOM_PPLIB_POWERPLAYTABLE5)) { 1025 1.1 riastrad rdev->pm.dpm.tdp_limit = le32_to_cpu(power_info->pplib5.ulTDPLimit); 1026 1.1 riastrad rdev->pm.dpm.near_tdp_limit = le32_to_cpu(power_info->pplib5.ulNearTDPLimit); 1027 1.1 riastrad rdev->pm.dpm.near_tdp_limit_adjusted = rdev->pm.dpm.near_tdp_limit; 1028 1.1 riastrad rdev->pm.dpm.tdp_od_limit = le16_to_cpu(power_info->pplib5.usTDPODLimit); 1029 1.1 riastrad if (rdev->pm.dpm.tdp_od_limit) 1030 1.1 riastrad rdev->pm.dpm.power_control = true; 1031 1.1 riastrad else 1032 1.1 riastrad rdev->pm.dpm.power_control = false; 1033 1.1 riastrad rdev->pm.dpm.tdp_adjustment = 0; 1034 1.1 riastrad rdev->pm.dpm.sq_ramping_threshold = le32_to_cpu(power_info->pplib5.ulSQRampingThreshold); 1035 1.1 riastrad rdev->pm.dpm.cac_leakage = le32_to_cpu(power_info->pplib5.ulCACLeakage); 1036 1.1 riastrad rdev->pm.dpm.load_line_slope = le16_to_cpu(power_info->pplib5.usLoadLineSlope); 1037 1.1 riastrad if (power_info->pplib5.usCACLeakageTableOffset) { 1038 1.1 riastrad ATOM_PPLIB_CAC_Leakage_Table *cac_table = 1039 1.1 riastrad (ATOM_PPLIB_CAC_Leakage_Table *) 1040 1.1 riastrad (mode_info->atom_context->bios + data_offset + 1041 1.1 riastrad le16_to_cpu(power_info->pplib5.usCACLeakageTableOffset)); 1042 1.1 riastrad ATOM_PPLIB_CAC_Leakage_Record *entry; 1043 1.1 riastrad u32 size = cac_table->ucNumEntries * sizeof(struct radeon_cac_leakage_table); 1044 1.1 riastrad rdev->pm.dpm.dyn_state.cac_leakage_table.entries = kzalloc(size, GFP_KERNEL); 1045 1.1 riastrad if (!rdev->pm.dpm.dyn_state.cac_leakage_table.entries) { 1046 1.1 riastrad r600_free_extended_power_table(rdev); 1047 1.1 riastrad return -ENOMEM; 1048 1.1 riastrad } 1049 1.1 riastrad entry = &cac_table->entries[0]; 1050 1.1 riastrad for (i = 0; i < cac_table->ucNumEntries; i++) { 1051 1.1 riastrad if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_EVV) { 1052 1.1 riastrad rdev->pm.dpm.dyn_state.cac_leakage_table.entries[i].vddc1 = 1053 1.1 riastrad le16_to_cpu(entry->usVddc1); 1054 1.1 riastrad rdev->pm.dpm.dyn_state.cac_leakage_table.entries[i].vddc2 = 1055 1.1 riastrad le16_to_cpu(entry->usVddc2); 1056 1.1 riastrad rdev->pm.dpm.dyn_state.cac_leakage_table.entries[i].vddc3 = 1057 1.1 riastrad le16_to_cpu(entry->usVddc3); 1058 1.1 riastrad } else { 1059 1.1 riastrad rdev->pm.dpm.dyn_state.cac_leakage_table.entries[i].vddc = 1060 1.1 riastrad le16_to_cpu(entry->usVddc); 1061 1.1 riastrad rdev->pm.dpm.dyn_state.cac_leakage_table.entries[i].leakage = 1062 1.1 riastrad le32_to_cpu(entry->ulLeakageValue); 1063 1.1 riastrad } 1064 1.1 riastrad entry = (ATOM_PPLIB_CAC_Leakage_Record *) 1065 1.1 riastrad ((u8 *)entry + sizeof(ATOM_PPLIB_CAC_Leakage_Record)); 1066 1.1 riastrad } 1067 1.1 riastrad rdev->pm.dpm.dyn_state.cac_leakage_table.count = cac_table->ucNumEntries; 1068 1.1 riastrad } 1069 1.1 riastrad } 1070 1.1 riastrad 1071 1.1 riastrad /* ext tables */ 1072 1.1 riastrad if (le16_to_cpu(power_info->pplib.usTableSize) >= 1073 1.1 riastrad sizeof(struct _ATOM_PPLIB_POWERPLAYTABLE3)) { 1074 1.1 riastrad ATOM_PPLIB_EXTENDEDHEADER *ext_hdr = (ATOM_PPLIB_EXTENDEDHEADER *) 1075 1.1 riastrad (mode_info->atom_context->bios + data_offset + 1076 1.1 riastrad le16_to_cpu(power_info->pplib3.usExtendendedHeaderOffset)); 1077 1.1 riastrad if ((le16_to_cpu(ext_hdr->usSize) >= SIZE_OF_ATOM_PPLIB_EXTENDEDHEADER_V2) && 1078 1.1 riastrad ext_hdr->usVCETableOffset) { 1079 1.1 riastrad VCEClockInfoArray *array = (VCEClockInfoArray *) 1080 1.1 riastrad (mode_info->atom_context->bios + data_offset + 1081 1.2 riastrad le16_to_cpu(ext_hdr->usVCETableOffset) + 1); 1082 1.1 riastrad ATOM_PPLIB_VCE_Clock_Voltage_Limit_Table *limits = 1083 1.1 riastrad (ATOM_PPLIB_VCE_Clock_Voltage_Limit_Table *) 1084 1.1 riastrad (mode_info->atom_context->bios + data_offset + 1085 1.1 riastrad le16_to_cpu(ext_hdr->usVCETableOffset) + 1 + 1086 1.1 riastrad 1 + array->ucNumEntries * sizeof(VCEClockInfo)); 1087 1.1 riastrad ATOM_PPLIB_VCE_State_Table *states = 1088 1.1 riastrad (ATOM_PPLIB_VCE_State_Table *) 1089 1.1 riastrad (mode_info->atom_context->bios + data_offset + 1090 1.1 riastrad le16_to_cpu(ext_hdr->usVCETableOffset) + 1 + 1091 1.1 riastrad 1 + (array->ucNumEntries * sizeof (VCEClockInfo)) + 1092 1.1 riastrad 1 + (limits->numEntries * sizeof(ATOM_PPLIB_VCE_Clock_Voltage_Limit_Record))); 1093 1.1 riastrad ATOM_PPLIB_VCE_Clock_Voltage_Limit_Record *entry; 1094 1.1 riastrad ATOM_PPLIB_VCE_State_Record *state_entry; 1095 1.1 riastrad VCEClockInfo *vce_clk; 1096 1.1 riastrad u32 size = limits->numEntries * 1097 1.1 riastrad sizeof(struct radeon_vce_clock_voltage_dependency_entry); 1098 1.1 riastrad rdev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table.entries = 1099 1.1 riastrad kzalloc(size, GFP_KERNEL); 1100 1.1 riastrad if (!rdev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table.entries) { 1101 1.1 riastrad r600_free_extended_power_table(rdev); 1102 1.1 riastrad return -ENOMEM; 1103 1.1 riastrad } 1104 1.1 riastrad rdev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table.count = 1105 1.1 riastrad limits->numEntries; 1106 1.1 riastrad entry = &limits->entries[0]; 1107 1.1 riastrad state_entry = &states->entries[0]; 1108 1.1 riastrad for (i = 0; i < limits->numEntries; i++) { 1109 1.1 riastrad vce_clk = (VCEClockInfo *) 1110 1.1 riastrad ((u8 *)&array->entries[0] + 1111 1.1 riastrad (entry->ucVCEClockInfoIndex * sizeof(VCEClockInfo))); 1112 1.1 riastrad rdev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table.entries[i].evclk = 1113 1.1 riastrad le16_to_cpu(vce_clk->usEVClkLow) | (vce_clk->ucEVClkHigh << 16); 1114 1.1 riastrad rdev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table.entries[i].ecclk = 1115 1.1 riastrad le16_to_cpu(vce_clk->usECClkLow) | (vce_clk->ucECClkHigh << 16); 1116 1.1 riastrad rdev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table.entries[i].v = 1117 1.1 riastrad le16_to_cpu(entry->usVoltage); 1118 1.1 riastrad entry = (ATOM_PPLIB_VCE_Clock_Voltage_Limit_Record *) 1119 1.1 riastrad ((u8 *)entry + sizeof(ATOM_PPLIB_VCE_Clock_Voltage_Limit_Record)); 1120 1.1 riastrad } 1121 1.1 riastrad for (i = 0; i < states->numEntries; i++) { 1122 1.1 riastrad if (i >= RADEON_MAX_VCE_LEVELS) 1123 1.1 riastrad break; 1124 1.1 riastrad vce_clk = (VCEClockInfo *) 1125 1.1 riastrad ((u8 *)&array->entries[0] + 1126 1.1 riastrad (state_entry->ucVCEClockInfoIndex * sizeof(VCEClockInfo))); 1127 1.1 riastrad rdev->pm.dpm.vce_states[i].evclk = 1128 1.1 riastrad le16_to_cpu(vce_clk->usEVClkLow) | (vce_clk->ucEVClkHigh << 16); 1129 1.1 riastrad rdev->pm.dpm.vce_states[i].ecclk = 1130 1.1 riastrad le16_to_cpu(vce_clk->usECClkLow) | (vce_clk->ucECClkHigh << 16); 1131 1.1 riastrad rdev->pm.dpm.vce_states[i].clk_idx = 1132 1.1 riastrad state_entry->ucClockInfoIndex & 0x3f; 1133 1.1 riastrad rdev->pm.dpm.vce_states[i].pstate = 1134 1.1 riastrad (state_entry->ucClockInfoIndex & 0xc0) >> 6; 1135 1.1 riastrad state_entry = (ATOM_PPLIB_VCE_State_Record *) 1136 1.1 riastrad ((u8 *)state_entry + sizeof(ATOM_PPLIB_VCE_State_Record)); 1137 1.1 riastrad } 1138 1.1 riastrad } 1139 1.1 riastrad if ((le16_to_cpu(ext_hdr->usSize) >= SIZE_OF_ATOM_PPLIB_EXTENDEDHEADER_V3) && 1140 1.1 riastrad ext_hdr->usUVDTableOffset) { 1141 1.1 riastrad UVDClockInfoArray *array = (UVDClockInfoArray *) 1142 1.1 riastrad (mode_info->atom_context->bios + data_offset + 1143 1.1 riastrad le16_to_cpu(ext_hdr->usUVDTableOffset) + 1); 1144 1.1 riastrad ATOM_PPLIB_UVD_Clock_Voltage_Limit_Table *limits = 1145 1.1 riastrad (ATOM_PPLIB_UVD_Clock_Voltage_Limit_Table *) 1146 1.1 riastrad (mode_info->atom_context->bios + data_offset + 1147 1.1 riastrad le16_to_cpu(ext_hdr->usUVDTableOffset) + 1 + 1148 1.1 riastrad 1 + (array->ucNumEntries * sizeof (UVDClockInfo))); 1149 1.1 riastrad ATOM_PPLIB_UVD_Clock_Voltage_Limit_Record *entry; 1150 1.1 riastrad u32 size = limits->numEntries * 1151 1.1 riastrad sizeof(struct radeon_uvd_clock_voltage_dependency_entry); 1152 1.1 riastrad rdev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.entries = 1153 1.1 riastrad kzalloc(size, GFP_KERNEL); 1154 1.1 riastrad if (!rdev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.entries) { 1155 1.1 riastrad r600_free_extended_power_table(rdev); 1156 1.1 riastrad return -ENOMEM; 1157 1.1 riastrad } 1158 1.1 riastrad rdev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.count = 1159 1.1 riastrad limits->numEntries; 1160 1.1 riastrad entry = &limits->entries[0]; 1161 1.1 riastrad for (i = 0; i < limits->numEntries; i++) { 1162 1.1 riastrad UVDClockInfo *uvd_clk = (UVDClockInfo *) 1163 1.1 riastrad ((u8 *)&array->entries[0] + 1164 1.1 riastrad (entry->ucUVDClockInfoIndex * sizeof(UVDClockInfo))); 1165 1.1 riastrad rdev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.entries[i].vclk = 1166 1.1 riastrad le16_to_cpu(uvd_clk->usVClkLow) | (uvd_clk->ucVClkHigh << 16); 1167 1.1 riastrad rdev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.entries[i].dclk = 1168 1.1 riastrad le16_to_cpu(uvd_clk->usDClkLow) | (uvd_clk->ucDClkHigh << 16); 1169 1.1 riastrad rdev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.entries[i].v = 1170 1.1 riastrad le16_to_cpu(entry->usVoltage); 1171 1.1 riastrad entry = (ATOM_PPLIB_UVD_Clock_Voltage_Limit_Record *) 1172 1.1 riastrad ((u8 *)entry + sizeof(ATOM_PPLIB_UVD_Clock_Voltage_Limit_Record)); 1173 1.1 riastrad } 1174 1.1 riastrad } 1175 1.1 riastrad if ((le16_to_cpu(ext_hdr->usSize) >= SIZE_OF_ATOM_PPLIB_EXTENDEDHEADER_V4) && 1176 1.1 riastrad ext_hdr->usSAMUTableOffset) { 1177 1.1 riastrad ATOM_PPLIB_SAMClk_Voltage_Limit_Table *limits = 1178 1.1 riastrad (ATOM_PPLIB_SAMClk_Voltage_Limit_Table *) 1179 1.1 riastrad (mode_info->atom_context->bios + data_offset + 1180 1.1 riastrad le16_to_cpu(ext_hdr->usSAMUTableOffset) + 1); 1181 1.1 riastrad ATOM_PPLIB_SAMClk_Voltage_Limit_Record *entry; 1182 1.1 riastrad u32 size = limits->numEntries * 1183 1.1 riastrad sizeof(struct radeon_clock_voltage_dependency_entry); 1184 1.1 riastrad rdev->pm.dpm.dyn_state.samu_clock_voltage_dependency_table.entries = 1185 1.1 riastrad kzalloc(size, GFP_KERNEL); 1186 1.1 riastrad if (!rdev->pm.dpm.dyn_state.samu_clock_voltage_dependency_table.entries) { 1187 1.1 riastrad r600_free_extended_power_table(rdev); 1188 1.1 riastrad return -ENOMEM; 1189 1.1 riastrad } 1190 1.1 riastrad rdev->pm.dpm.dyn_state.samu_clock_voltage_dependency_table.count = 1191 1.1 riastrad limits->numEntries; 1192 1.1 riastrad entry = &limits->entries[0]; 1193 1.1 riastrad for (i = 0; i < limits->numEntries; i++) { 1194 1.1 riastrad rdev->pm.dpm.dyn_state.samu_clock_voltage_dependency_table.entries[i].clk = 1195 1.1 riastrad le16_to_cpu(entry->usSAMClockLow) | (entry->ucSAMClockHigh << 16); 1196 1.1 riastrad rdev->pm.dpm.dyn_state.samu_clock_voltage_dependency_table.entries[i].v = 1197 1.1 riastrad le16_to_cpu(entry->usVoltage); 1198 1.1 riastrad entry = (ATOM_PPLIB_SAMClk_Voltage_Limit_Record *) 1199 1.1 riastrad ((u8 *)entry + sizeof(ATOM_PPLIB_SAMClk_Voltage_Limit_Record)); 1200 1.1 riastrad } 1201 1.1 riastrad } 1202 1.1 riastrad if ((le16_to_cpu(ext_hdr->usSize) >= SIZE_OF_ATOM_PPLIB_EXTENDEDHEADER_V5) && 1203 1.1 riastrad ext_hdr->usPPMTableOffset) { 1204 1.1 riastrad ATOM_PPLIB_PPM_Table *ppm = (ATOM_PPLIB_PPM_Table *) 1205 1.1 riastrad (mode_info->atom_context->bios + data_offset + 1206 1.1 riastrad le16_to_cpu(ext_hdr->usPPMTableOffset)); 1207 1.1 riastrad rdev->pm.dpm.dyn_state.ppm_table = 1208 1.1 riastrad kzalloc(sizeof(struct radeon_ppm_table), GFP_KERNEL); 1209 1.1 riastrad if (!rdev->pm.dpm.dyn_state.ppm_table) { 1210 1.1 riastrad r600_free_extended_power_table(rdev); 1211 1.1 riastrad return -ENOMEM; 1212 1.1 riastrad } 1213 1.1 riastrad rdev->pm.dpm.dyn_state.ppm_table->ppm_design = ppm->ucPpmDesign; 1214 1.1 riastrad rdev->pm.dpm.dyn_state.ppm_table->cpu_core_number = 1215 1.1 riastrad le16_to_cpu(ppm->usCpuCoreNumber); 1216 1.1 riastrad rdev->pm.dpm.dyn_state.ppm_table->platform_tdp = 1217 1.1 riastrad le32_to_cpu(ppm->ulPlatformTDP); 1218 1.1 riastrad rdev->pm.dpm.dyn_state.ppm_table->small_ac_platform_tdp = 1219 1.1 riastrad le32_to_cpu(ppm->ulSmallACPlatformTDP); 1220 1.1 riastrad rdev->pm.dpm.dyn_state.ppm_table->platform_tdc = 1221 1.1 riastrad le32_to_cpu(ppm->ulPlatformTDC); 1222 1.1 riastrad rdev->pm.dpm.dyn_state.ppm_table->small_ac_platform_tdc = 1223 1.1 riastrad le32_to_cpu(ppm->ulSmallACPlatformTDC); 1224 1.1 riastrad rdev->pm.dpm.dyn_state.ppm_table->apu_tdp = 1225 1.1 riastrad le32_to_cpu(ppm->ulApuTDP); 1226 1.1 riastrad rdev->pm.dpm.dyn_state.ppm_table->dgpu_tdp = 1227 1.1 riastrad le32_to_cpu(ppm->ulDGpuTDP); 1228 1.1 riastrad rdev->pm.dpm.dyn_state.ppm_table->dgpu_ulv_power = 1229 1.1 riastrad le32_to_cpu(ppm->ulDGpuUlvPower); 1230 1.1 riastrad rdev->pm.dpm.dyn_state.ppm_table->tj_max = 1231 1.1 riastrad le32_to_cpu(ppm->ulTjmax); 1232 1.1 riastrad } 1233 1.1 riastrad if ((le16_to_cpu(ext_hdr->usSize) >= SIZE_OF_ATOM_PPLIB_EXTENDEDHEADER_V6) && 1234 1.1 riastrad ext_hdr->usACPTableOffset) { 1235 1.1 riastrad ATOM_PPLIB_ACPClk_Voltage_Limit_Table *limits = 1236 1.1 riastrad (ATOM_PPLIB_ACPClk_Voltage_Limit_Table *) 1237 1.1 riastrad (mode_info->atom_context->bios + data_offset + 1238 1.1 riastrad le16_to_cpu(ext_hdr->usACPTableOffset) + 1); 1239 1.1 riastrad ATOM_PPLIB_ACPClk_Voltage_Limit_Record *entry; 1240 1.1 riastrad u32 size = limits->numEntries * 1241 1.1 riastrad sizeof(struct radeon_clock_voltage_dependency_entry); 1242 1.1 riastrad rdev->pm.dpm.dyn_state.acp_clock_voltage_dependency_table.entries = 1243 1.1 riastrad kzalloc(size, GFP_KERNEL); 1244 1.1 riastrad if (!rdev->pm.dpm.dyn_state.acp_clock_voltage_dependency_table.entries) { 1245 1.1 riastrad r600_free_extended_power_table(rdev); 1246 1.1 riastrad return -ENOMEM; 1247 1.1 riastrad } 1248 1.1 riastrad rdev->pm.dpm.dyn_state.acp_clock_voltage_dependency_table.count = 1249 1.1 riastrad limits->numEntries; 1250 1.1 riastrad entry = &limits->entries[0]; 1251 1.1 riastrad for (i = 0; i < limits->numEntries; i++) { 1252 1.1 riastrad rdev->pm.dpm.dyn_state.acp_clock_voltage_dependency_table.entries[i].clk = 1253 1.1 riastrad le16_to_cpu(entry->usACPClockLow) | (entry->ucACPClockHigh << 16); 1254 1.1 riastrad rdev->pm.dpm.dyn_state.acp_clock_voltage_dependency_table.entries[i].v = 1255 1.1 riastrad le16_to_cpu(entry->usVoltage); 1256 1.1 riastrad entry = (ATOM_PPLIB_ACPClk_Voltage_Limit_Record *) 1257 1.1 riastrad ((u8 *)entry + sizeof(ATOM_PPLIB_ACPClk_Voltage_Limit_Record)); 1258 1.1 riastrad } 1259 1.1 riastrad } 1260 1.1 riastrad if ((le16_to_cpu(ext_hdr->usSize) >= SIZE_OF_ATOM_PPLIB_EXTENDEDHEADER_V7) && 1261 1.1 riastrad ext_hdr->usPowerTuneTableOffset) { 1262 1.1 riastrad u8 rev = *(u8 *)(mode_info->atom_context->bios + data_offset + 1263 1.1 riastrad le16_to_cpu(ext_hdr->usPowerTuneTableOffset)); 1264 1.1 riastrad ATOM_PowerTune_Table *pt; 1265 1.1 riastrad rdev->pm.dpm.dyn_state.cac_tdp_table = 1266 1.1 riastrad kzalloc(sizeof(struct radeon_cac_tdp_table), GFP_KERNEL); 1267 1.1 riastrad if (!rdev->pm.dpm.dyn_state.cac_tdp_table) { 1268 1.1 riastrad r600_free_extended_power_table(rdev); 1269 1.1 riastrad return -ENOMEM; 1270 1.1 riastrad } 1271 1.1 riastrad if (rev > 0) { 1272 1.1 riastrad ATOM_PPLIB_POWERTUNE_Table_V1 *ppt = (ATOM_PPLIB_POWERTUNE_Table_V1 *) 1273 1.1 riastrad (mode_info->atom_context->bios + data_offset + 1274 1.1 riastrad le16_to_cpu(ext_hdr->usPowerTuneTableOffset)); 1275 1.1 riastrad rdev->pm.dpm.dyn_state.cac_tdp_table->maximum_power_delivery_limit = 1276 1.1 riastrad le16_to_cpu(ppt->usMaximumPowerDeliveryLimit); 1277 1.1 riastrad pt = &ppt->power_tune_table; 1278 1.1 riastrad } else { 1279 1.1 riastrad ATOM_PPLIB_POWERTUNE_Table *ppt = (ATOM_PPLIB_POWERTUNE_Table *) 1280 1.1 riastrad (mode_info->atom_context->bios + data_offset + 1281 1.1 riastrad le16_to_cpu(ext_hdr->usPowerTuneTableOffset)); 1282 1.1 riastrad rdev->pm.dpm.dyn_state.cac_tdp_table->maximum_power_delivery_limit = 255; 1283 1.1 riastrad pt = &ppt->power_tune_table; 1284 1.1 riastrad } 1285 1.1 riastrad rdev->pm.dpm.dyn_state.cac_tdp_table->tdp = le16_to_cpu(pt->usTDP); 1286 1.1 riastrad rdev->pm.dpm.dyn_state.cac_tdp_table->configurable_tdp = 1287 1.1 riastrad le16_to_cpu(pt->usConfigurableTDP); 1288 1.1 riastrad rdev->pm.dpm.dyn_state.cac_tdp_table->tdc = le16_to_cpu(pt->usTDC); 1289 1.1 riastrad rdev->pm.dpm.dyn_state.cac_tdp_table->battery_power_limit = 1290 1.1 riastrad le16_to_cpu(pt->usBatteryPowerLimit); 1291 1.1 riastrad rdev->pm.dpm.dyn_state.cac_tdp_table->small_power_limit = 1292 1.1 riastrad le16_to_cpu(pt->usSmallPowerLimit); 1293 1.1 riastrad rdev->pm.dpm.dyn_state.cac_tdp_table->low_cac_leakage = 1294 1.1 riastrad le16_to_cpu(pt->usLowCACLeakage); 1295 1.1 riastrad rdev->pm.dpm.dyn_state.cac_tdp_table->high_cac_leakage = 1296 1.1 riastrad le16_to_cpu(pt->usHighCACLeakage); 1297 1.1 riastrad } 1298 1.1 riastrad } 1299 1.1 riastrad 1300 1.1 riastrad return 0; 1301 1.1 riastrad } 1302 1.1 riastrad 1303 1.1 riastrad void r600_free_extended_power_table(struct radeon_device *rdev) 1304 1.1 riastrad { 1305 1.1 riastrad struct radeon_dpm_dynamic_state *dyn_state = &rdev->pm.dpm.dyn_state; 1306 1.1 riastrad 1307 1.1 riastrad kfree(dyn_state->vddc_dependency_on_sclk.entries); 1308 1.1 riastrad kfree(dyn_state->vddci_dependency_on_mclk.entries); 1309 1.1 riastrad kfree(dyn_state->vddc_dependency_on_mclk.entries); 1310 1.1 riastrad kfree(dyn_state->mvdd_dependency_on_mclk.entries); 1311 1.1 riastrad kfree(dyn_state->cac_leakage_table.entries); 1312 1.1 riastrad kfree(dyn_state->phase_shedding_limits_table.entries); 1313 1.1 riastrad kfree(dyn_state->ppm_table); 1314 1.1 riastrad kfree(dyn_state->cac_tdp_table); 1315 1.1 riastrad kfree(dyn_state->vce_clock_voltage_dependency_table.entries); 1316 1.1 riastrad kfree(dyn_state->uvd_clock_voltage_dependency_table.entries); 1317 1.1 riastrad kfree(dyn_state->samu_clock_voltage_dependency_table.entries); 1318 1.1 riastrad kfree(dyn_state->acp_clock_voltage_dependency_table.entries); 1319 1.1 riastrad } 1320 1.1 riastrad 1321 1.1 riastrad enum radeon_pcie_gen r600_get_pcie_gen_support(struct radeon_device *rdev, 1322 1.1 riastrad u32 sys_mask, 1323 1.1 riastrad enum radeon_pcie_gen asic_gen, 1324 1.1 riastrad enum radeon_pcie_gen default_gen) 1325 1.1 riastrad { 1326 1.1 riastrad switch (asic_gen) { 1327 1.1 riastrad case RADEON_PCIE_GEN1: 1328 1.1 riastrad return RADEON_PCIE_GEN1; 1329 1.1 riastrad case RADEON_PCIE_GEN2: 1330 1.1 riastrad return RADEON_PCIE_GEN2; 1331 1.1 riastrad case RADEON_PCIE_GEN3: 1332 1.1 riastrad return RADEON_PCIE_GEN3; 1333 1.1 riastrad default: 1334 1.2 riastrad if ((sys_mask & RADEON_PCIE_SPEED_80) && (default_gen == RADEON_PCIE_GEN3)) 1335 1.1 riastrad return RADEON_PCIE_GEN3; 1336 1.2 riastrad else if ((sys_mask & RADEON_PCIE_SPEED_50) && (default_gen == RADEON_PCIE_GEN2)) 1337 1.1 riastrad return RADEON_PCIE_GEN2; 1338 1.1 riastrad else 1339 1.1 riastrad return RADEON_PCIE_GEN1; 1340 1.1 riastrad } 1341 1.1 riastrad return RADEON_PCIE_GEN1; 1342 1.1 riastrad } 1343 1.1 riastrad 1344 1.1 riastrad u16 r600_get_pcie_lane_support(struct radeon_device *rdev, 1345 1.1 riastrad u16 asic_lanes, 1346 1.1 riastrad u16 default_lanes) 1347 1.1 riastrad { 1348 1.1 riastrad switch (asic_lanes) { 1349 1.1 riastrad case 0: 1350 1.1 riastrad default: 1351 1.1 riastrad return default_lanes; 1352 1.1 riastrad case 1: 1353 1.1 riastrad return 1; 1354 1.1 riastrad case 2: 1355 1.1 riastrad return 2; 1356 1.1 riastrad case 4: 1357 1.1 riastrad return 4; 1358 1.1 riastrad case 8: 1359 1.1 riastrad return 8; 1360 1.1 riastrad case 12: 1361 1.1 riastrad return 12; 1362 1.1 riastrad case 16: 1363 1.1 riastrad return 16; 1364 1.1 riastrad } 1365 1.1 riastrad } 1366 1.1 riastrad 1367 1.1 riastrad u8 r600_encode_pci_lane_width(u32 lanes) 1368 1.1 riastrad { 1369 1.1 riastrad u8 encoded_lanes[] = { 0, 1, 2, 0, 3, 0, 0, 0, 4, 0, 0, 0, 5, 0, 0, 0, 6 }; 1370 1.1 riastrad 1371 1.1 riastrad if (lanes > 16) 1372 1.1 riastrad return 0; 1373 1.1 riastrad 1374 1.1 riastrad return encoded_lanes[lanes]; 1375 1.1 riastrad } 1376