radeon_rv770_dpm.c revision 1.1 1 /* $NetBSD: radeon_rv770_dpm.c,v 1.1 2018/08/27 14:38:20 riastradh Exp $ */
2
3 /*
4 * Copyright 2011 Advanced Micro Devices, Inc.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 * OTHER DEALINGS IN THE SOFTWARE.
23 *
24 * Authors: Alex Deucher
25 */
26
27 #include <sys/cdefs.h>
28 __KERNEL_RCSID(0, "$NetBSD: radeon_rv770_dpm.c,v 1.1 2018/08/27 14:38:20 riastradh Exp $");
29
30 #include "drmP.h"
31 #include "radeon.h"
32 #include "radeon_asic.h"
33 #include "rv770d.h"
34 #include "r600_dpm.h"
35 #include "rv770_dpm.h"
36 #include "cypress_dpm.h"
37 #include "atom.h"
38 #include <linux/seq_file.h>
39
40 #define MC_CG_ARB_FREQ_F0 0x0a
41 #define MC_CG_ARB_FREQ_F1 0x0b
42 #define MC_CG_ARB_FREQ_F2 0x0c
43 #define MC_CG_ARB_FREQ_F3 0x0d
44
45 #define MC_CG_SEQ_DRAMCONF_S0 0x05
46 #define MC_CG_SEQ_DRAMCONF_S1 0x06
47
48 #define PCIE_BUS_CLK 10000
49 #define TCLK (PCIE_BUS_CLK / 10)
50
51 #define SMC_RAM_END 0xC000
52
53 struct rv7xx_ps *rv770_get_ps(struct radeon_ps *rps)
54 {
55 struct rv7xx_ps *ps = rps->ps_priv;
56
57 return ps;
58 }
59
60 struct rv7xx_power_info *rv770_get_pi(struct radeon_device *rdev)
61 {
62 struct rv7xx_power_info *pi = rdev->pm.dpm.priv;
63
64 return pi;
65 }
66
67 struct evergreen_power_info *evergreen_get_pi(struct radeon_device *rdev)
68 {
69 struct evergreen_power_info *pi = rdev->pm.dpm.priv;
70
71 return pi;
72 }
73
74 static void rv770_enable_bif_dynamic_pcie_gen2(struct radeon_device *rdev,
75 bool enable)
76 {
77 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
78 u32 tmp;
79
80 tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
81 if (enable) {
82 tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
83 tmp |= LC_HW_VOLTAGE_IF_CONTROL(1);
84 tmp |= LC_GEN2_EN_STRAP;
85 } else {
86 if (!pi->boot_in_gen2) {
87 tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
88 tmp &= ~LC_GEN2_EN_STRAP;
89 }
90 }
91 if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
92 (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2))
93 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
94
95 }
96
97 static void rv770_enable_l0s(struct radeon_device *rdev)
98 {
99 u32 tmp;
100
101 tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL) & ~LC_L0S_INACTIVITY_MASK;
102 tmp |= LC_L0S_INACTIVITY(3);
103 WREG32_PCIE_PORT(PCIE_LC_CNTL, tmp);
104 }
105
106 static void rv770_enable_l1(struct radeon_device *rdev)
107 {
108 u32 tmp;
109
110 tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL);
111 tmp &= ~LC_L1_INACTIVITY_MASK;
112 tmp |= LC_L1_INACTIVITY(4);
113 tmp &= ~LC_PMI_TO_L1_DIS;
114 tmp &= ~LC_ASPM_TO_L1_DIS;
115 WREG32_PCIE_PORT(PCIE_LC_CNTL, tmp);
116 }
117
118 static void rv770_enable_pll_sleep_in_l1(struct radeon_device *rdev)
119 {
120 u32 tmp;
121
122 tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL) & ~LC_L1_INACTIVITY_MASK;
123 tmp |= LC_L1_INACTIVITY(8);
124 WREG32_PCIE_PORT(PCIE_LC_CNTL, tmp);
125
126 /* NOTE, this is a PCIE indirect reg, not PCIE PORT */
127 tmp = RREG32_PCIE(PCIE_P_CNTL);
128 tmp |= P_PLL_PWRDN_IN_L1L23;
129 tmp &= ~P_PLL_BUF_PDNB;
130 tmp &= ~P_PLL_PDNB;
131 tmp |= P_ALLOW_PRX_FRONTEND_SHUTOFF;
132 WREG32_PCIE(PCIE_P_CNTL, tmp);
133 }
134
135 static void rv770_gfx_clock_gating_enable(struct radeon_device *rdev,
136 bool enable)
137 {
138 if (enable)
139 WREG32_P(SCLK_PWRMGT_CNTL, DYN_GFX_CLK_OFF_EN, ~DYN_GFX_CLK_OFF_EN);
140 else {
141 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN);
142 WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON);
143 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON);
144 RREG32(GB_TILING_CONFIG);
145 }
146 }
147
148 static void rv770_mg_clock_gating_enable(struct radeon_device *rdev,
149 bool enable)
150 {
151 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
152
153 if (enable) {
154 u32 mgcg_cgtt_local0;
155
156 if (rdev->family == CHIP_RV770)
157 mgcg_cgtt_local0 = RV770_MGCGTTLOCAL0_DFLT;
158 else
159 mgcg_cgtt_local0 = RV7XX_MGCGTTLOCAL0_DFLT;
160
161 WREG32(CG_CGTT_LOCAL_0, mgcg_cgtt_local0);
162 WREG32(CG_CGTT_LOCAL_1, (RV770_MGCGTTLOCAL1_DFLT & 0xFFFFCFFF));
163
164 if (pi->mgcgtssm)
165 WREG32(CGTS_SM_CTRL_REG, RV770_MGCGCGTSSMCTRL_DFLT);
166 } else {
167 WREG32(CG_CGTT_LOCAL_0, 0xFFFFFFFF);
168 WREG32(CG_CGTT_LOCAL_1, 0xFFFFCFFF);
169 }
170 }
171
172 void rv770_restore_cgcg(struct radeon_device *rdev)
173 {
174 bool dpm_en = false, cg_en = false;
175
176 if (RREG32(GENERAL_PWRMGT) & GLOBAL_PWRMGT_EN)
177 dpm_en = true;
178 if (RREG32(SCLK_PWRMGT_CNTL) & DYN_GFX_CLK_OFF_EN)
179 cg_en = true;
180
181 if (dpm_en && !cg_en)
182 WREG32_P(SCLK_PWRMGT_CNTL, DYN_GFX_CLK_OFF_EN, ~DYN_GFX_CLK_OFF_EN);
183 }
184
185 static void rv770_start_dpm(struct radeon_device *rdev)
186 {
187 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~SCLK_PWRMGT_OFF);
188
189 WREG32_P(MCLK_PWRMGT_CNTL, 0, ~MPLL_PWRMGT_OFF);
190
191 WREG32_P(GENERAL_PWRMGT, GLOBAL_PWRMGT_EN, ~GLOBAL_PWRMGT_EN);
192 }
193
194 void rv770_stop_dpm(struct radeon_device *rdev)
195 {
196 PPSMC_Result result;
197
198 result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_TwoLevelsDisabled);
199
200 if (result != PPSMC_Result_OK)
201 DRM_DEBUG("Could not force DPM to low.\n");
202
203 WREG32_P(GENERAL_PWRMGT, 0, ~GLOBAL_PWRMGT_EN);
204
205 WREG32_P(SCLK_PWRMGT_CNTL, SCLK_PWRMGT_OFF, ~SCLK_PWRMGT_OFF);
206
207 WREG32_P(MCLK_PWRMGT_CNTL, MPLL_PWRMGT_OFF, ~MPLL_PWRMGT_OFF);
208 }
209
210 bool rv770_dpm_enabled(struct radeon_device *rdev)
211 {
212 if (RREG32(GENERAL_PWRMGT) & GLOBAL_PWRMGT_EN)
213 return true;
214 else
215 return false;
216 }
217
218 void rv770_enable_thermal_protection(struct radeon_device *rdev,
219 bool enable)
220 {
221 if (enable)
222 WREG32_P(GENERAL_PWRMGT, 0, ~THERMAL_PROTECTION_DIS);
223 else
224 WREG32_P(GENERAL_PWRMGT, THERMAL_PROTECTION_DIS, ~THERMAL_PROTECTION_DIS);
225 }
226
227 void rv770_enable_acpi_pm(struct radeon_device *rdev)
228 {
229 WREG32_P(GENERAL_PWRMGT, STATIC_PM_EN, ~STATIC_PM_EN);
230 }
231
232 u8 rv770_get_seq_value(struct radeon_device *rdev,
233 struct rv7xx_pl *pl)
234 {
235 return (pl->flags & ATOM_PPLIB_R600_FLAGS_LOWPOWER) ?
236 MC_CG_SEQ_DRAMCONF_S0 : MC_CG_SEQ_DRAMCONF_S1;
237 }
238
239 #if 0
240 int rv770_read_smc_soft_register(struct radeon_device *rdev,
241 u16 reg_offset, u32 *value)
242 {
243 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
244
245 return rv770_read_smc_sram_dword(rdev,
246 pi->soft_regs_start + reg_offset,
247 value, pi->sram_end);
248 }
249 #endif
250
251 int rv770_write_smc_soft_register(struct radeon_device *rdev,
252 u16 reg_offset, u32 value)
253 {
254 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
255
256 return rv770_write_smc_sram_dword(rdev,
257 pi->soft_regs_start + reg_offset,
258 value, pi->sram_end);
259 }
260
261 int rv770_populate_smc_t(struct radeon_device *rdev,
262 struct radeon_ps *radeon_state,
263 RV770_SMC_SWSTATE *smc_state)
264 {
265 struct rv7xx_ps *state = rv770_get_ps(radeon_state);
266 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
267 int i;
268 int a_n;
269 int a_d;
270 u8 l[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE];
271 u8 r[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE];
272 u32 a_t;
273
274 l[0] = 0;
275 r[2] = 100;
276
277 a_n = (int)state->medium.sclk * pi->lmp +
278 (int)state->low.sclk * (R600_AH_DFLT - pi->rlp);
279 a_d = (int)state->low.sclk * (100 - (int)pi->rlp) +
280 (int)state->medium.sclk * pi->lmp;
281
282 l[1] = (u8)(pi->lmp - (int)pi->lmp * a_n / a_d);
283 r[0] = (u8)(pi->rlp + (100 - (int)pi->rlp) * a_n / a_d);
284
285 a_n = (int)state->high.sclk * pi->lhp + (int)state->medium.sclk *
286 (R600_AH_DFLT - pi->rmp);
287 a_d = (int)state->medium.sclk * (100 - (int)pi->rmp) +
288 (int)state->high.sclk * pi->lhp;
289
290 l[2] = (u8)(pi->lhp - (int)pi->lhp * a_n / a_d);
291 r[1] = (u8)(pi->rmp + (100 - (int)pi->rmp) * a_n / a_d);
292
293 for (i = 0; i < (RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1); i++) {
294 a_t = CG_R(r[i] * pi->bsp / 200) | CG_L(l[i] * pi->bsp / 200);
295 smc_state->levels[i].aT = cpu_to_be32(a_t);
296 }
297
298 a_t = CG_R(r[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1] * pi->pbsp / 200) |
299 CG_L(l[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1] * pi->pbsp / 200);
300
301 smc_state->levels[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1].aT =
302 cpu_to_be32(a_t);
303
304 return 0;
305 }
306
307 int rv770_populate_smc_sp(struct radeon_device *rdev,
308 struct radeon_ps *radeon_state,
309 RV770_SMC_SWSTATE *smc_state)
310 {
311 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
312 int i;
313
314 for (i = 0; i < (RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1); i++)
315 smc_state->levels[i].bSP = cpu_to_be32(pi->dsp);
316
317 smc_state->levels[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1].bSP =
318 cpu_to_be32(pi->psp);
319
320 return 0;
321 }
322
323 static void rv770_calculate_fractional_mpll_feedback_divider(u32 memory_clock,
324 u32 reference_clock,
325 bool gddr5,
326 struct atom_clock_dividers *dividers,
327 u32 *clkf,
328 u32 *clkfrac)
329 {
330 u32 post_divider, reference_divider, feedback_divider8;
331 u32 fyclk;
332
333 if (gddr5)
334 fyclk = (memory_clock * 8) / 2;
335 else
336 fyclk = (memory_clock * 4) / 2;
337
338 post_divider = dividers->post_div;
339 reference_divider = dividers->ref_div;
340
341 feedback_divider8 =
342 (8 * fyclk * reference_divider * post_divider) / reference_clock;
343
344 *clkf = feedback_divider8 / 8;
345 *clkfrac = feedback_divider8 % 8;
346 }
347
348 static int rv770_encode_yclk_post_div(u32 postdiv, u32 *encoded_postdiv)
349 {
350 int ret = 0;
351
352 switch (postdiv) {
353 case 1:
354 *encoded_postdiv = 0;
355 break;
356 case 2:
357 *encoded_postdiv = 1;
358 break;
359 case 4:
360 *encoded_postdiv = 2;
361 break;
362 case 8:
363 *encoded_postdiv = 3;
364 break;
365 case 16:
366 *encoded_postdiv = 4;
367 break;
368 default:
369 ret = -EINVAL;
370 break;
371 }
372
373 return ret;
374 }
375
376 u32 rv770_map_clkf_to_ibias(struct radeon_device *rdev, u32 clkf)
377 {
378 if (clkf <= 0x10)
379 return 0x4B;
380 if (clkf <= 0x19)
381 return 0x5B;
382 if (clkf <= 0x21)
383 return 0x2B;
384 if (clkf <= 0x27)
385 return 0x6C;
386 if (clkf <= 0x31)
387 return 0x9D;
388 return 0xC6;
389 }
390
391 static int rv770_populate_mclk_value(struct radeon_device *rdev,
392 u32 engine_clock, u32 memory_clock,
393 RV7XX_SMC_MCLK_VALUE *mclk)
394 {
395 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
396 u8 encoded_reference_dividers[] = { 0, 16, 17, 20, 21 };
397 u32 mpll_ad_func_cntl =
398 pi->clk_regs.rv770.mpll_ad_func_cntl;
399 u32 mpll_ad_func_cntl_2 =
400 pi->clk_regs.rv770.mpll_ad_func_cntl_2;
401 u32 mpll_dq_func_cntl =
402 pi->clk_regs.rv770.mpll_dq_func_cntl;
403 u32 mpll_dq_func_cntl_2 =
404 pi->clk_regs.rv770.mpll_dq_func_cntl_2;
405 u32 mclk_pwrmgt_cntl =
406 pi->clk_regs.rv770.mclk_pwrmgt_cntl;
407 u32 dll_cntl = pi->clk_regs.rv770.dll_cntl;
408 struct atom_clock_dividers dividers;
409 u32 reference_clock = rdev->clock.mpll.reference_freq;
410 u32 clkf, clkfrac;
411 u32 postdiv_yclk;
412 u32 ibias;
413 int ret;
414
415 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_MEMORY_PLL_PARAM,
416 memory_clock, false, ÷rs);
417 if (ret)
418 return ret;
419
420 if ((dividers.ref_div < 1) || (dividers.ref_div > 5))
421 return -EINVAL;
422
423 rv770_calculate_fractional_mpll_feedback_divider(memory_clock, reference_clock,
424 pi->mem_gddr5,
425 ÷rs, &clkf, &clkfrac);
426
427 ret = rv770_encode_yclk_post_div(dividers.post_div, &postdiv_yclk);
428 if (ret)
429 return ret;
430
431 ibias = rv770_map_clkf_to_ibias(rdev, clkf);
432
433 mpll_ad_func_cntl &= ~(CLKR_MASK |
434 YCLK_POST_DIV_MASK |
435 CLKF_MASK |
436 CLKFRAC_MASK |
437 IBIAS_MASK);
438 mpll_ad_func_cntl |= CLKR(encoded_reference_dividers[dividers.ref_div - 1]);
439 mpll_ad_func_cntl |= YCLK_POST_DIV(postdiv_yclk);
440 mpll_ad_func_cntl |= CLKF(clkf);
441 mpll_ad_func_cntl |= CLKFRAC(clkfrac);
442 mpll_ad_func_cntl |= IBIAS(ibias);
443
444 if (dividers.vco_mode)
445 mpll_ad_func_cntl_2 |= VCO_MODE;
446 else
447 mpll_ad_func_cntl_2 &= ~VCO_MODE;
448
449 if (pi->mem_gddr5) {
450 rv770_calculate_fractional_mpll_feedback_divider(memory_clock,
451 reference_clock,
452 pi->mem_gddr5,
453 ÷rs, &clkf, &clkfrac);
454
455 ibias = rv770_map_clkf_to_ibias(rdev, clkf);
456
457 ret = rv770_encode_yclk_post_div(dividers.post_div, &postdiv_yclk);
458 if (ret)
459 return ret;
460
461 mpll_dq_func_cntl &= ~(CLKR_MASK |
462 YCLK_POST_DIV_MASK |
463 CLKF_MASK |
464 CLKFRAC_MASK |
465 IBIAS_MASK);
466 mpll_dq_func_cntl |= CLKR(encoded_reference_dividers[dividers.ref_div - 1]);
467 mpll_dq_func_cntl |= YCLK_POST_DIV(postdiv_yclk);
468 mpll_dq_func_cntl |= CLKF(clkf);
469 mpll_dq_func_cntl |= CLKFRAC(clkfrac);
470 mpll_dq_func_cntl |= IBIAS(ibias);
471
472 if (dividers.vco_mode)
473 mpll_dq_func_cntl_2 |= VCO_MODE;
474 else
475 mpll_dq_func_cntl_2 &= ~VCO_MODE;
476 }
477
478 mclk->mclk770.mclk_value = cpu_to_be32(memory_clock);
479 mclk->mclk770.vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
480 mclk->mclk770.vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
481 mclk->mclk770.vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
482 mclk->mclk770.vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
483 mclk->mclk770.vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
484 mclk->mclk770.vDLL_CNTL = cpu_to_be32(dll_cntl);
485
486 return 0;
487 }
488
489 static int rv770_populate_sclk_value(struct radeon_device *rdev,
490 u32 engine_clock,
491 RV770_SMC_SCLK_VALUE *sclk)
492 {
493 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
494 struct atom_clock_dividers dividers;
495 u32 spll_func_cntl =
496 pi->clk_regs.rv770.cg_spll_func_cntl;
497 u32 spll_func_cntl_2 =
498 pi->clk_regs.rv770.cg_spll_func_cntl_2;
499 u32 spll_func_cntl_3 =
500 pi->clk_regs.rv770.cg_spll_func_cntl_3;
501 u32 cg_spll_spread_spectrum =
502 pi->clk_regs.rv770.cg_spll_spread_spectrum;
503 u32 cg_spll_spread_spectrum_2 =
504 pi->clk_regs.rv770.cg_spll_spread_spectrum_2;
505 u64 tmp;
506 u32 reference_clock = rdev->clock.spll.reference_freq;
507 u32 reference_divider, post_divider;
508 u32 fbdiv;
509 int ret;
510
511 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
512 engine_clock, false, ÷rs);
513 if (ret)
514 return ret;
515
516 reference_divider = 1 + dividers.ref_div;
517
518 if (dividers.enable_post_div)
519 post_divider = (0x0f & (dividers.post_div >> 4)) + (0x0f & dividers.post_div) + 2;
520 else
521 post_divider = 1;
522
523 tmp = (u64) engine_clock * reference_divider * post_divider * 16384;
524 do_div(tmp, reference_clock);
525 fbdiv = (u32) tmp;
526
527 if (dividers.enable_post_div)
528 spll_func_cntl |= SPLL_DIVEN;
529 else
530 spll_func_cntl &= ~SPLL_DIVEN;
531 spll_func_cntl &= ~(SPLL_HILEN_MASK | SPLL_LOLEN_MASK | SPLL_REF_DIV_MASK);
532 spll_func_cntl |= SPLL_REF_DIV(dividers.ref_div);
533 spll_func_cntl |= SPLL_HILEN((dividers.post_div >> 4) & 0xf);
534 spll_func_cntl |= SPLL_LOLEN(dividers.post_div & 0xf);
535
536 spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
537 spll_func_cntl_2 |= SCLK_MUX_SEL(2);
538
539 spll_func_cntl_3 &= ~SPLL_FB_DIV_MASK;
540 spll_func_cntl_3 |= SPLL_FB_DIV(fbdiv);
541 spll_func_cntl_3 |= SPLL_DITHEN;
542
543 if (pi->sclk_ss) {
544 struct radeon_atom_ss ss;
545 u32 vco_freq = engine_clock * post_divider;
546
547 if (radeon_atombios_get_asic_ss_info(rdev, &ss,
548 ASIC_INTERNAL_ENGINE_SS, vco_freq)) {
549 u32 clk_s = reference_clock * 5 / (reference_divider * ss.rate);
550 u32 clk_v = ss.percentage * fbdiv / (clk_s * 10000);
551
552 cg_spll_spread_spectrum &= ~CLKS_MASK;
553 cg_spll_spread_spectrum |= CLKS(clk_s);
554 cg_spll_spread_spectrum |= SSEN;
555
556 cg_spll_spread_spectrum_2 &= ~CLKV_MASK;
557 cg_spll_spread_spectrum_2 |= CLKV(clk_v);
558 }
559 }
560
561 sclk->sclk_value = cpu_to_be32(engine_clock);
562 sclk->vCG_SPLL_FUNC_CNTL = cpu_to_be32(spll_func_cntl);
563 sclk->vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(spll_func_cntl_2);
564 sclk->vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(spll_func_cntl_3);
565 sclk->vCG_SPLL_SPREAD_SPECTRUM = cpu_to_be32(cg_spll_spread_spectrum);
566 sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cpu_to_be32(cg_spll_spread_spectrum_2);
567
568 return 0;
569 }
570
571 int rv770_populate_vddc_value(struct radeon_device *rdev, u16 vddc,
572 RV770_SMC_VOLTAGE_VALUE *voltage)
573 {
574 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
575 int i;
576
577 if (!pi->voltage_control) {
578 voltage->index = 0;
579 voltage->value = 0;
580 return 0;
581 }
582
583 for (i = 0; i < pi->valid_vddc_entries; i++) {
584 if (vddc <= pi->vddc_table[i].vddc) {
585 voltage->index = pi->vddc_table[i].vddc_index;
586 voltage->value = cpu_to_be16(vddc);
587 break;
588 }
589 }
590
591 if (i == pi->valid_vddc_entries)
592 return -EINVAL;
593
594 return 0;
595 }
596
597 int rv770_populate_mvdd_value(struct radeon_device *rdev, u32 mclk,
598 RV770_SMC_VOLTAGE_VALUE *voltage)
599 {
600 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
601
602 if (!pi->mvdd_control) {
603 voltage->index = MVDD_HIGH_INDEX;
604 voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
605 return 0;
606 }
607
608 if (mclk <= pi->mvdd_split_frequency) {
609 voltage->index = MVDD_LOW_INDEX;
610 voltage->value = cpu_to_be16(MVDD_LOW_VALUE);
611 } else {
612 voltage->index = MVDD_HIGH_INDEX;
613 voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
614 }
615
616 return 0;
617 }
618
619 static int rv770_convert_power_level_to_smc(struct radeon_device *rdev,
620 struct rv7xx_pl *pl,
621 RV770_SMC_HW_PERFORMANCE_LEVEL *level,
622 u8 watermark_level)
623 {
624 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
625 int ret;
626
627 level->gen2PCIE = pi->pcie_gen2 ?
628 ((pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0) : 0;
629 level->gen2XSP = (pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0;
630 level->backbias = (pl->flags & ATOM_PPLIB_R600_FLAGS_BACKBIASENABLE) ? 1 : 0;
631 level->displayWatermark = watermark_level;
632
633 if (rdev->family == CHIP_RV740)
634 ret = rv740_populate_sclk_value(rdev, pl->sclk,
635 &level->sclk);
636 else if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
637 ret = rv730_populate_sclk_value(rdev, pl->sclk,
638 &level->sclk);
639 else
640 ret = rv770_populate_sclk_value(rdev, pl->sclk,
641 &level->sclk);
642 if (ret)
643 return ret;
644
645 if (rdev->family == CHIP_RV740) {
646 if (pi->mem_gddr5) {
647 if (pl->mclk <= pi->mclk_strobe_mode_threshold)
648 level->strobeMode =
649 rv740_get_mclk_frequency_ratio(pl->mclk) | 0x10;
650 else
651 level->strobeMode = 0;
652
653 if (pl->mclk > pi->mclk_edc_enable_threshold)
654 level->mcFlags = SMC_MC_EDC_RD_FLAG | SMC_MC_EDC_WR_FLAG;
655 else
656 level->mcFlags = 0;
657 }
658 ret = rv740_populate_mclk_value(rdev, pl->sclk,
659 pl->mclk, &level->mclk);
660 } else if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
661 ret = rv730_populate_mclk_value(rdev, pl->sclk,
662 pl->mclk, &level->mclk);
663 else
664 ret = rv770_populate_mclk_value(rdev, pl->sclk,
665 pl->mclk, &level->mclk);
666 if (ret)
667 return ret;
668
669 ret = rv770_populate_vddc_value(rdev, pl->vddc,
670 &level->vddc);
671 if (ret)
672 return ret;
673
674 ret = rv770_populate_mvdd_value(rdev, pl->mclk, &level->mvdd);
675
676 return ret;
677 }
678
679 static int rv770_convert_power_state_to_smc(struct radeon_device *rdev,
680 struct radeon_ps *radeon_state,
681 RV770_SMC_SWSTATE *smc_state)
682 {
683 struct rv7xx_ps *state = rv770_get_ps(radeon_state);
684 int ret;
685
686 if (!(radeon_state->caps & ATOM_PPLIB_DISALLOW_ON_DC))
687 smc_state->flags |= PPSMC_SWSTATE_FLAG_DC;
688
689 ret = rv770_convert_power_level_to_smc(rdev,
690 &state->low,
691 &smc_state->levels[0],
692 PPSMC_DISPLAY_WATERMARK_LOW);
693 if (ret)
694 return ret;
695
696 ret = rv770_convert_power_level_to_smc(rdev,
697 &state->medium,
698 &smc_state->levels[1],
699 PPSMC_DISPLAY_WATERMARK_LOW);
700 if (ret)
701 return ret;
702
703 ret = rv770_convert_power_level_to_smc(rdev,
704 &state->high,
705 &smc_state->levels[2],
706 PPSMC_DISPLAY_WATERMARK_HIGH);
707 if (ret)
708 return ret;
709
710 smc_state->levels[0].arbValue = MC_CG_ARB_FREQ_F1;
711 smc_state->levels[1].arbValue = MC_CG_ARB_FREQ_F2;
712 smc_state->levels[2].arbValue = MC_CG_ARB_FREQ_F3;
713
714 smc_state->levels[0].seqValue = rv770_get_seq_value(rdev,
715 &state->low);
716 smc_state->levels[1].seqValue = rv770_get_seq_value(rdev,
717 &state->medium);
718 smc_state->levels[2].seqValue = rv770_get_seq_value(rdev,
719 &state->high);
720
721 rv770_populate_smc_sp(rdev, radeon_state, smc_state);
722
723 return rv770_populate_smc_t(rdev, radeon_state, smc_state);
724
725 }
726
727 u32 rv770_calculate_memory_refresh_rate(struct radeon_device *rdev,
728 u32 engine_clock)
729 {
730 u32 dram_rows;
731 u32 dram_refresh_rate;
732 u32 mc_arb_rfsh_rate;
733 u32 tmp;
734
735 tmp = (RREG32(MC_ARB_RAMCFG) & NOOFROWS_MASK) >> NOOFROWS_SHIFT;
736 dram_rows = 1 << (tmp + 10);
737 tmp = RREG32(MC_SEQ_MISC0) & 3;
738 dram_refresh_rate = 1 << (tmp + 3);
739 mc_arb_rfsh_rate = ((engine_clock * 10) * dram_refresh_rate / dram_rows - 32) / 64;
740
741 return mc_arb_rfsh_rate;
742 }
743
744 static void rv770_program_memory_timing_parameters(struct radeon_device *rdev,
745 struct radeon_ps *radeon_state)
746 {
747 struct rv7xx_ps *state = rv770_get_ps(radeon_state);
748 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
749 u32 sqm_ratio;
750 u32 arb_refresh_rate;
751 u32 high_clock;
752
753 if (state->high.sclk < (state->low.sclk * 0xFF / 0x40))
754 high_clock = state->high.sclk;
755 else
756 high_clock = (state->low.sclk * 0xFF / 0x40);
757
758 radeon_atom_set_engine_dram_timings(rdev, high_clock,
759 state->high.mclk);
760
761 sqm_ratio =
762 STATE0(64 * high_clock / pi->boot_sclk) |
763 STATE1(64 * high_clock / state->low.sclk) |
764 STATE2(64 * high_clock / state->medium.sclk) |
765 STATE3(64 * high_clock / state->high.sclk);
766 WREG32(MC_ARB_SQM_RATIO, sqm_ratio);
767
768 arb_refresh_rate =
769 POWERMODE0(rv770_calculate_memory_refresh_rate(rdev, pi->boot_sclk)) |
770 POWERMODE1(rv770_calculate_memory_refresh_rate(rdev, state->low.sclk)) |
771 POWERMODE2(rv770_calculate_memory_refresh_rate(rdev, state->medium.sclk)) |
772 POWERMODE3(rv770_calculate_memory_refresh_rate(rdev, state->high.sclk));
773 WREG32(MC_ARB_RFSH_RATE, arb_refresh_rate);
774 }
775
776 void rv770_enable_backbias(struct radeon_device *rdev,
777 bool enable)
778 {
779 if (enable)
780 WREG32_P(GENERAL_PWRMGT, BACKBIAS_PAD_EN, ~BACKBIAS_PAD_EN);
781 else
782 WREG32_P(GENERAL_PWRMGT, 0, ~(BACKBIAS_VALUE | BACKBIAS_PAD_EN));
783 }
784
785 static void rv770_enable_spread_spectrum(struct radeon_device *rdev,
786 bool enable)
787 {
788 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
789
790 if (enable) {
791 if (pi->sclk_ss)
792 WREG32_P(GENERAL_PWRMGT, DYN_SPREAD_SPECTRUM_EN, ~DYN_SPREAD_SPECTRUM_EN);
793
794 if (pi->mclk_ss) {
795 if (rdev->family == CHIP_RV740)
796 rv740_enable_mclk_spread_spectrum(rdev, true);
797 }
798 } else {
799 WREG32_P(CG_SPLL_SPREAD_SPECTRUM, 0, ~SSEN);
800
801 WREG32_P(GENERAL_PWRMGT, 0, ~DYN_SPREAD_SPECTRUM_EN);
802
803 WREG32_P(CG_MPLL_SPREAD_SPECTRUM, 0, ~SSEN);
804
805 if (rdev->family == CHIP_RV740)
806 rv740_enable_mclk_spread_spectrum(rdev, false);
807 }
808 }
809
810 static void rv770_program_mpll_timing_parameters(struct radeon_device *rdev)
811 {
812 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
813
814 if ((rdev->family == CHIP_RV770) && !pi->mem_gddr5) {
815 WREG32(MPLL_TIME,
816 (MPLL_LOCK_TIME(R600_MPLLLOCKTIME_DFLT * pi->ref_div) |
817 MPLL_RESET_TIME(R600_MPLLRESETTIME_DFLT)));
818 }
819 }
820
821 void rv770_setup_bsp(struct radeon_device *rdev)
822 {
823 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
824 u32 xclk = radeon_get_xclk(rdev);
825
826 r600_calculate_u_and_p(pi->asi,
827 xclk,
828 16,
829 &pi->bsp,
830 &pi->bsu);
831
832 r600_calculate_u_and_p(pi->pasi,
833 xclk,
834 16,
835 &pi->pbsp,
836 &pi->pbsu);
837
838 pi->dsp = BSP(pi->bsp) | BSU(pi->bsu);
839 pi->psp = BSP(pi->pbsp) | BSU(pi->pbsu);
840
841 WREG32(CG_BSP, pi->dsp);
842
843 }
844
845 void rv770_program_git(struct radeon_device *rdev)
846 {
847 WREG32_P(CG_GIT, CG_GICST(R600_GICST_DFLT), ~CG_GICST_MASK);
848 }
849
850 void rv770_program_tp(struct radeon_device *rdev)
851 {
852 int i;
853 enum r600_td td = R600_TD_DFLT;
854
855 for (i = 0; i < R600_PM_NUMBER_OF_TC; i++)
856 WREG32(CG_FFCT_0 + (i * 4), (UTC_0(r600_utc[i]) | DTC_0(r600_dtc[i])));
857
858 if (td == R600_TD_AUTO)
859 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~FIR_FORCE_TREND_SEL);
860 else
861 WREG32_P(SCLK_PWRMGT_CNTL, FIR_FORCE_TREND_SEL, ~FIR_FORCE_TREND_SEL);
862 if (td == R600_TD_UP)
863 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~FIR_TREND_MODE);
864 if (td == R600_TD_DOWN)
865 WREG32_P(SCLK_PWRMGT_CNTL, FIR_TREND_MODE, ~FIR_TREND_MODE);
866 }
867
868 void rv770_program_tpp(struct radeon_device *rdev)
869 {
870 WREG32(CG_TPC, R600_TPC_DFLT);
871 }
872
873 void rv770_program_sstp(struct radeon_device *rdev)
874 {
875 WREG32(CG_SSP, (SSTU(R600_SSTU_DFLT) | SST(R600_SST_DFLT)));
876 }
877
878 void rv770_program_engine_speed_parameters(struct radeon_device *rdev)
879 {
880 WREG32_P(SPLL_CNTL_MODE, SPLL_DIV_SYNC, ~SPLL_DIV_SYNC);
881 }
882
883 static void rv770_enable_display_gap(struct radeon_device *rdev)
884 {
885 u32 tmp = RREG32(CG_DISPLAY_GAP_CNTL);
886
887 tmp &= ~(DISP1_GAP_MCHG_MASK | DISP2_GAP_MCHG_MASK);
888 tmp |= (DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE) |
889 DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE));
890 WREG32(CG_DISPLAY_GAP_CNTL, tmp);
891 }
892
893 void rv770_program_vc(struct radeon_device *rdev)
894 {
895 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
896
897 WREG32(CG_FTV, pi->vrc);
898 }
899
900 void rv770_clear_vc(struct radeon_device *rdev)
901 {
902 WREG32(CG_FTV, 0);
903 }
904
905 int rv770_upload_firmware(struct radeon_device *rdev)
906 {
907 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
908 int ret;
909
910 rv770_reset_smc(rdev);
911 rv770_stop_smc_clock(rdev);
912
913 ret = rv770_load_smc_ucode(rdev, pi->sram_end);
914 if (ret)
915 return ret;
916
917 return 0;
918 }
919
920 static int rv770_populate_smc_acpi_state(struct radeon_device *rdev,
921 RV770_SMC_STATETABLE *table)
922 {
923 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
924
925 u32 mpll_ad_func_cntl =
926 pi->clk_regs.rv770.mpll_ad_func_cntl;
927 u32 mpll_ad_func_cntl_2 =
928 pi->clk_regs.rv770.mpll_ad_func_cntl_2;
929 u32 mpll_dq_func_cntl =
930 pi->clk_regs.rv770.mpll_dq_func_cntl;
931 u32 mpll_dq_func_cntl_2 =
932 pi->clk_regs.rv770.mpll_dq_func_cntl_2;
933 u32 spll_func_cntl =
934 pi->clk_regs.rv770.cg_spll_func_cntl;
935 u32 spll_func_cntl_2 =
936 pi->clk_regs.rv770.cg_spll_func_cntl_2;
937 u32 spll_func_cntl_3 =
938 pi->clk_regs.rv770.cg_spll_func_cntl_3;
939 u32 mclk_pwrmgt_cntl;
940 u32 dll_cntl;
941
942 table->ACPIState = table->initialState;
943
944 table->ACPIState.flags &= ~PPSMC_SWSTATE_FLAG_DC;
945
946 if (pi->acpi_vddc) {
947 rv770_populate_vddc_value(rdev, pi->acpi_vddc,
948 &table->ACPIState.levels[0].vddc);
949 if (pi->pcie_gen2) {
950 if (pi->acpi_pcie_gen2)
951 table->ACPIState.levels[0].gen2PCIE = 1;
952 else
953 table->ACPIState.levels[0].gen2PCIE = 0;
954 } else
955 table->ACPIState.levels[0].gen2PCIE = 0;
956 if (pi->acpi_pcie_gen2)
957 table->ACPIState.levels[0].gen2XSP = 1;
958 else
959 table->ACPIState.levels[0].gen2XSP = 0;
960 } else {
961 rv770_populate_vddc_value(rdev, pi->min_vddc_in_table,
962 &table->ACPIState.levels[0].vddc);
963 table->ACPIState.levels[0].gen2PCIE = 0;
964 }
965
966
967 mpll_ad_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN;
968
969 mpll_dq_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN;
970
971 mclk_pwrmgt_cntl = (MRDCKA0_RESET |
972 MRDCKA1_RESET |
973 MRDCKB0_RESET |
974 MRDCKB1_RESET |
975 MRDCKC0_RESET |
976 MRDCKC1_RESET |
977 MRDCKD0_RESET |
978 MRDCKD1_RESET);
979
980 dll_cntl = 0xff000000;
981
982 spll_func_cntl |= SPLL_RESET | SPLL_SLEEP | SPLL_BYPASS_EN;
983
984 spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
985 spll_func_cntl_2 |= SCLK_MUX_SEL(4);
986
987 table->ACPIState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
988 table->ACPIState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
989 table->ACPIState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
990 table->ACPIState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
991
992 table->ACPIState.levels[0].mclk.mclk770.vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
993 table->ACPIState.levels[0].mclk.mclk770.vDLL_CNTL = cpu_to_be32(dll_cntl);
994
995 table->ACPIState.levels[0].mclk.mclk770.mclk_value = 0;
996
997 table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL = cpu_to_be32(spll_func_cntl);
998 table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(spll_func_cntl_2);
999 table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(spll_func_cntl_3);
1000
1001 table->ACPIState.levels[0].sclk.sclk_value = 0;
1002
1003 rv770_populate_mvdd_value(rdev, 0, &table->ACPIState.levels[0].mvdd);
1004
1005 table->ACPIState.levels[1] = table->ACPIState.levels[0];
1006 table->ACPIState.levels[2] = table->ACPIState.levels[0];
1007
1008 return 0;
1009 }
1010
1011 int rv770_populate_initial_mvdd_value(struct radeon_device *rdev,
1012 RV770_SMC_VOLTAGE_VALUE *voltage)
1013 {
1014 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1015
1016 if ((pi->s0_vid_lower_smio_cntl & pi->mvdd_mask_low) ==
1017 (pi->mvdd_low_smio[MVDD_LOW_INDEX] & pi->mvdd_mask_low) ) {
1018 voltage->index = MVDD_LOW_INDEX;
1019 voltage->value = cpu_to_be16(MVDD_LOW_VALUE);
1020 } else {
1021 voltage->index = MVDD_HIGH_INDEX;
1022 voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1023 }
1024
1025 return 0;
1026 }
1027
1028 static int rv770_populate_smc_initial_state(struct radeon_device *rdev,
1029 struct radeon_ps *radeon_state,
1030 RV770_SMC_STATETABLE *table)
1031 {
1032 struct rv7xx_ps *initial_state = rv770_get_ps(radeon_state);
1033 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1034 u32 a_t;
1035
1036 table->initialState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL =
1037 cpu_to_be32(pi->clk_regs.rv770.mpll_ad_func_cntl);
1038 table->initialState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL_2 =
1039 cpu_to_be32(pi->clk_regs.rv770.mpll_ad_func_cntl_2);
1040 table->initialState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL =
1041 cpu_to_be32(pi->clk_regs.rv770.mpll_dq_func_cntl);
1042 table->initialState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL_2 =
1043 cpu_to_be32(pi->clk_regs.rv770.mpll_dq_func_cntl_2);
1044 table->initialState.levels[0].mclk.mclk770.vMCLK_PWRMGT_CNTL =
1045 cpu_to_be32(pi->clk_regs.rv770.mclk_pwrmgt_cntl);
1046 table->initialState.levels[0].mclk.mclk770.vDLL_CNTL =
1047 cpu_to_be32(pi->clk_regs.rv770.dll_cntl);
1048
1049 table->initialState.levels[0].mclk.mclk770.vMPLL_SS =
1050 cpu_to_be32(pi->clk_regs.rv770.mpll_ss1);
1051 table->initialState.levels[0].mclk.mclk770.vMPLL_SS2 =
1052 cpu_to_be32(pi->clk_regs.rv770.mpll_ss2);
1053
1054 table->initialState.levels[0].mclk.mclk770.mclk_value =
1055 cpu_to_be32(initial_state->low.mclk);
1056
1057 table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL =
1058 cpu_to_be32(pi->clk_regs.rv770.cg_spll_func_cntl);
1059 table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 =
1060 cpu_to_be32(pi->clk_regs.rv770.cg_spll_func_cntl_2);
1061 table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 =
1062 cpu_to_be32(pi->clk_regs.rv770.cg_spll_func_cntl_3);
1063 table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM =
1064 cpu_to_be32(pi->clk_regs.rv770.cg_spll_spread_spectrum);
1065 table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM_2 =
1066 cpu_to_be32(pi->clk_regs.rv770.cg_spll_spread_spectrum_2);
1067
1068 table->initialState.levels[0].sclk.sclk_value =
1069 cpu_to_be32(initial_state->low.sclk);
1070
1071 table->initialState.levels[0].arbValue = MC_CG_ARB_FREQ_F0;
1072
1073 table->initialState.levels[0].seqValue =
1074 rv770_get_seq_value(rdev, &initial_state->low);
1075
1076 rv770_populate_vddc_value(rdev,
1077 initial_state->low.vddc,
1078 &table->initialState.levels[0].vddc);
1079 rv770_populate_initial_mvdd_value(rdev,
1080 &table->initialState.levels[0].mvdd);
1081
1082 a_t = CG_R(0xffff) | CG_L(0);
1083 table->initialState.levels[0].aT = cpu_to_be32(a_t);
1084
1085 table->initialState.levels[0].bSP = cpu_to_be32(pi->dsp);
1086
1087 if (pi->boot_in_gen2)
1088 table->initialState.levels[0].gen2PCIE = 1;
1089 else
1090 table->initialState.levels[0].gen2PCIE = 0;
1091 if (initial_state->low.flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2)
1092 table->initialState.levels[0].gen2XSP = 1;
1093 else
1094 table->initialState.levels[0].gen2XSP = 0;
1095
1096 if (rdev->family == CHIP_RV740) {
1097 if (pi->mem_gddr5) {
1098 if (initial_state->low.mclk <= pi->mclk_strobe_mode_threshold)
1099 table->initialState.levels[0].strobeMode =
1100 rv740_get_mclk_frequency_ratio(initial_state->low.mclk) | 0x10;
1101 else
1102 table->initialState.levels[0].strobeMode = 0;
1103
1104 if (initial_state->low.mclk >= pi->mclk_edc_enable_threshold)
1105 table->initialState.levels[0].mcFlags = SMC_MC_EDC_RD_FLAG | SMC_MC_EDC_WR_FLAG;
1106 else
1107 table->initialState.levels[0].mcFlags = 0;
1108 }
1109 }
1110
1111 table->initialState.levels[1] = table->initialState.levels[0];
1112 table->initialState.levels[2] = table->initialState.levels[0];
1113
1114 table->initialState.flags |= PPSMC_SWSTATE_FLAG_DC;
1115
1116 return 0;
1117 }
1118
1119 static int rv770_populate_smc_vddc_table(struct radeon_device *rdev,
1120 RV770_SMC_STATETABLE *table)
1121 {
1122 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1123 int i;
1124
1125 for (i = 0; i < pi->valid_vddc_entries; i++) {
1126 table->highSMIO[pi->vddc_table[i].vddc_index] =
1127 pi->vddc_table[i].high_smio;
1128 table->lowSMIO[pi->vddc_table[i].vddc_index] =
1129 cpu_to_be32(pi->vddc_table[i].low_smio);
1130 }
1131
1132 table->voltageMaskTable.highMask[RV770_SMC_VOLTAGEMASK_VDDC] = 0;
1133 table->voltageMaskTable.lowMask[RV770_SMC_VOLTAGEMASK_VDDC] =
1134 cpu_to_be32(pi->vddc_mask_low);
1135
1136 for (i = 0;
1137 ((i < pi->valid_vddc_entries) &&
1138 (pi->max_vddc_in_table >
1139 pi->vddc_table[i].vddc));
1140 i++);
1141
1142 table->maxVDDCIndexInPPTable =
1143 pi->vddc_table[i].vddc_index;
1144
1145 return 0;
1146 }
1147
1148 static int rv770_populate_smc_mvdd_table(struct radeon_device *rdev,
1149 RV770_SMC_STATETABLE *table)
1150 {
1151 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1152
1153 if (pi->mvdd_control) {
1154 table->lowSMIO[MVDD_HIGH_INDEX] |=
1155 cpu_to_be32(pi->mvdd_low_smio[MVDD_HIGH_INDEX]);
1156 table->lowSMIO[MVDD_LOW_INDEX] |=
1157 cpu_to_be32(pi->mvdd_low_smio[MVDD_LOW_INDEX]);
1158
1159 table->voltageMaskTable.highMask[RV770_SMC_VOLTAGEMASK_MVDD] = 0;
1160 table->voltageMaskTable.lowMask[RV770_SMC_VOLTAGEMASK_MVDD] =
1161 cpu_to_be32(pi->mvdd_mask_low);
1162 }
1163
1164 return 0;
1165 }
1166
1167 static int rv770_init_smc_table(struct radeon_device *rdev,
1168 struct radeon_ps *radeon_boot_state)
1169 {
1170 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1171 struct rv7xx_ps *boot_state = rv770_get_ps(radeon_boot_state);
1172 RV770_SMC_STATETABLE *table = &pi->smc_statetable;
1173 int ret;
1174
1175 memset(table, 0, sizeof(RV770_SMC_STATETABLE));
1176
1177 pi->boot_sclk = boot_state->low.sclk;
1178
1179 rv770_populate_smc_vddc_table(rdev, table);
1180 rv770_populate_smc_mvdd_table(rdev, table);
1181
1182 switch (rdev->pm.int_thermal_type) {
1183 case THERMAL_TYPE_RV770:
1184 case THERMAL_TYPE_ADT7473_WITH_INTERNAL:
1185 table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_INTERNAL;
1186 break;
1187 case THERMAL_TYPE_NONE:
1188 table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_NONE;
1189 break;
1190 case THERMAL_TYPE_EXTERNAL_GPIO:
1191 default:
1192 table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_EXTERNAL;
1193 break;
1194 }
1195
1196 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_HARDWAREDC) {
1197 table->systemFlags |= PPSMC_SYSTEMFLAG_GPIO_DC;
1198
1199 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_DONT_WAIT_FOR_VBLANK_ON_ALERT)
1200 table->extraFlags |= PPSMC_EXTRAFLAGS_AC2DC_DONT_WAIT_FOR_VBLANK;
1201
1202 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_GOTO_BOOT_ON_ALERT)
1203 table->extraFlags |= PPSMC_EXTRAFLAGS_AC2DC_ACTION_GOTOINITIALSTATE;
1204 }
1205
1206 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC)
1207 table->systemFlags |= PPSMC_SYSTEMFLAG_STEPVDDC;
1208
1209 if (pi->mem_gddr5)
1210 table->systemFlags |= PPSMC_SYSTEMFLAG_GDDR5;
1211
1212 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1213 ret = rv730_populate_smc_initial_state(rdev, radeon_boot_state, table);
1214 else
1215 ret = rv770_populate_smc_initial_state(rdev, radeon_boot_state, table);
1216 if (ret)
1217 return ret;
1218
1219 if (rdev->family == CHIP_RV740)
1220 ret = rv740_populate_smc_acpi_state(rdev, table);
1221 else if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1222 ret = rv730_populate_smc_acpi_state(rdev, table);
1223 else
1224 ret = rv770_populate_smc_acpi_state(rdev, table);
1225 if (ret)
1226 return ret;
1227
1228 table->driverState = table->initialState;
1229
1230 return rv770_copy_bytes_to_smc(rdev,
1231 pi->state_table_start,
1232 (const u8 *)table,
1233 sizeof(RV770_SMC_STATETABLE),
1234 pi->sram_end);
1235 }
1236
1237 static int rv770_construct_vddc_table(struct radeon_device *rdev)
1238 {
1239 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1240 u16 vmin, vmax, step;
1241 u32 steps = 0;
1242 u8 vddc_index = 0;
1243 u32 i;
1244
1245 radeon_atom_get_min_voltage(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, &vmin);
1246 radeon_atom_get_max_voltage(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, &vmax);
1247 radeon_atom_get_voltage_step(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, &step);
1248
1249 steps = (vmax - vmin) / step + 1;
1250
1251 if (steps > MAX_NO_VREG_STEPS)
1252 return -EINVAL;
1253
1254 for (i = 0; i < steps; i++) {
1255 u32 gpio_pins, gpio_mask;
1256
1257 pi->vddc_table[i].vddc = (u16)(vmin + i * step);
1258 radeon_atom_get_voltage_gpio_settings(rdev,
1259 pi->vddc_table[i].vddc,
1260 SET_VOLTAGE_TYPE_ASIC_VDDC,
1261 &gpio_pins, &gpio_mask);
1262 pi->vddc_table[i].low_smio = gpio_pins & gpio_mask;
1263 pi->vddc_table[i].high_smio = 0;
1264 pi->vddc_mask_low = gpio_mask;
1265 if (i > 0) {
1266 if ((pi->vddc_table[i].low_smio !=
1267 pi->vddc_table[i - 1].low_smio ) ||
1268 (pi->vddc_table[i].high_smio !=
1269 pi->vddc_table[i - 1].high_smio))
1270 vddc_index++;
1271 }
1272 pi->vddc_table[i].vddc_index = vddc_index;
1273 }
1274
1275 pi->valid_vddc_entries = (u8)steps;
1276
1277 return 0;
1278 }
1279
1280 static u32 rv770_get_mclk_split_point(struct atom_memory_info *memory_info)
1281 {
1282 if (memory_info->mem_type == MEM_TYPE_GDDR3)
1283 return 30000;
1284
1285 return 0;
1286 }
1287
1288 static int rv770_get_mvdd_pin_configuration(struct radeon_device *rdev)
1289 {
1290 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1291 u32 gpio_pins, gpio_mask;
1292
1293 radeon_atom_get_voltage_gpio_settings(rdev,
1294 MVDD_HIGH_VALUE, SET_VOLTAGE_TYPE_ASIC_MVDDC,
1295 &gpio_pins, &gpio_mask);
1296 pi->mvdd_mask_low = gpio_mask;
1297 pi->mvdd_low_smio[MVDD_HIGH_INDEX] =
1298 gpio_pins & gpio_mask;
1299
1300 radeon_atom_get_voltage_gpio_settings(rdev,
1301 MVDD_LOW_VALUE, SET_VOLTAGE_TYPE_ASIC_MVDDC,
1302 &gpio_pins, &gpio_mask);
1303 pi->mvdd_low_smio[MVDD_LOW_INDEX] =
1304 gpio_pins & gpio_mask;
1305
1306 return 0;
1307 }
1308
1309 u8 rv770_get_memory_module_index(struct radeon_device *rdev)
1310 {
1311 return (u8) ((RREG32(BIOS_SCRATCH_4) >> 16) & 0xff);
1312 }
1313
1314 static int rv770_get_mvdd_configuration(struct radeon_device *rdev)
1315 {
1316 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1317 u8 memory_module_index;
1318 struct atom_memory_info memory_info;
1319
1320 memory_module_index = rv770_get_memory_module_index(rdev);
1321
1322 if (radeon_atom_get_memory_info(rdev, memory_module_index, &memory_info)) {
1323 pi->mvdd_control = false;
1324 return 0;
1325 }
1326
1327 pi->mvdd_split_frequency =
1328 rv770_get_mclk_split_point(&memory_info);
1329
1330 if (pi->mvdd_split_frequency == 0) {
1331 pi->mvdd_control = false;
1332 return 0;
1333 }
1334
1335 return rv770_get_mvdd_pin_configuration(rdev);
1336 }
1337
1338 void rv770_enable_voltage_control(struct radeon_device *rdev,
1339 bool enable)
1340 {
1341 if (enable)
1342 WREG32_P(GENERAL_PWRMGT, VOLT_PWRMGT_EN, ~VOLT_PWRMGT_EN);
1343 else
1344 WREG32_P(GENERAL_PWRMGT, 0, ~VOLT_PWRMGT_EN);
1345 }
1346
1347 static void rv770_program_display_gap(struct radeon_device *rdev)
1348 {
1349 u32 tmp = RREG32(CG_DISPLAY_GAP_CNTL);
1350
1351 tmp &= ~(DISP1_GAP_MCHG_MASK | DISP2_GAP_MCHG_MASK);
1352 if (rdev->pm.dpm.new_active_crtcs & 1) {
1353 tmp |= DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_VBLANK);
1354 tmp |= DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE);
1355 } else if (rdev->pm.dpm.new_active_crtcs & 2) {
1356 tmp |= DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE);
1357 tmp |= DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_VBLANK);
1358 } else {
1359 tmp |= DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE);
1360 tmp |= DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE);
1361 }
1362 WREG32(CG_DISPLAY_GAP_CNTL, tmp);
1363 }
1364
1365 static void rv770_enable_dynamic_pcie_gen2(struct radeon_device *rdev,
1366 bool enable)
1367 {
1368 rv770_enable_bif_dynamic_pcie_gen2(rdev, enable);
1369
1370 if (enable)
1371 WREG32_P(GENERAL_PWRMGT, ENABLE_GEN2PCIE, ~ENABLE_GEN2PCIE);
1372 else
1373 WREG32_P(GENERAL_PWRMGT, 0, ~ENABLE_GEN2PCIE);
1374 }
1375
1376 static void r7xx_program_memory_timing_parameters(struct radeon_device *rdev,
1377 struct radeon_ps *radeon_new_state)
1378 {
1379 if ((rdev->family == CHIP_RV730) ||
1380 (rdev->family == CHIP_RV710) ||
1381 (rdev->family == CHIP_RV740))
1382 rv730_program_memory_timing_parameters(rdev, radeon_new_state);
1383 else
1384 rv770_program_memory_timing_parameters(rdev, radeon_new_state);
1385 }
1386
1387 static int rv770_upload_sw_state(struct radeon_device *rdev,
1388 struct radeon_ps *radeon_new_state)
1389 {
1390 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1391 u16 address = pi->state_table_start +
1392 offsetof(RV770_SMC_STATETABLE, driverState);
1393 RV770_SMC_SWSTATE state = { 0 };
1394 int ret;
1395
1396 ret = rv770_convert_power_state_to_smc(rdev, radeon_new_state, &state);
1397 if (ret)
1398 return ret;
1399
1400 return rv770_copy_bytes_to_smc(rdev, address, (const u8 *)&state,
1401 sizeof(RV770_SMC_SWSTATE),
1402 pi->sram_end);
1403 }
1404
1405 int rv770_halt_smc(struct radeon_device *rdev)
1406 {
1407 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_Halt) != PPSMC_Result_OK)
1408 return -EINVAL;
1409
1410 if (rv770_wait_for_smc_inactive(rdev) != PPSMC_Result_OK)
1411 return -EINVAL;
1412
1413 return 0;
1414 }
1415
1416 int rv770_resume_smc(struct radeon_device *rdev)
1417 {
1418 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_Resume) != PPSMC_Result_OK)
1419 return -EINVAL;
1420 return 0;
1421 }
1422
1423 int rv770_set_sw_state(struct radeon_device *rdev)
1424 {
1425 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_SwitchToSwState) != PPSMC_Result_OK)
1426 DRM_DEBUG("rv770_set_sw_state failed\n");
1427 return 0;
1428 }
1429
1430 int rv770_set_boot_state(struct radeon_device *rdev)
1431 {
1432 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_SwitchToInitialState) != PPSMC_Result_OK)
1433 return -EINVAL;
1434 return 0;
1435 }
1436
1437 void rv770_set_uvd_clock_before_set_eng_clock(struct radeon_device *rdev,
1438 struct radeon_ps *new_ps,
1439 struct radeon_ps *old_ps)
1440 {
1441 struct rv7xx_ps *new_state = rv770_get_ps(new_ps);
1442 struct rv7xx_ps *current_state = rv770_get_ps(old_ps);
1443
1444 if ((new_ps->vclk == old_ps->vclk) &&
1445 (new_ps->dclk == old_ps->dclk))
1446 return;
1447
1448 if (new_state->high.sclk >= current_state->high.sclk)
1449 return;
1450
1451 radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
1452 }
1453
1454 void rv770_set_uvd_clock_after_set_eng_clock(struct radeon_device *rdev,
1455 struct radeon_ps *new_ps,
1456 struct radeon_ps *old_ps)
1457 {
1458 struct rv7xx_ps *new_state = rv770_get_ps(new_ps);
1459 struct rv7xx_ps *current_state = rv770_get_ps(old_ps);
1460
1461 if ((new_ps->vclk == old_ps->vclk) &&
1462 (new_ps->dclk == old_ps->dclk))
1463 return;
1464
1465 if (new_state->high.sclk < current_state->high.sclk)
1466 return;
1467
1468 radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
1469 }
1470
1471 int rv770_restrict_performance_levels_before_switch(struct radeon_device *rdev)
1472 {
1473 if (rv770_send_msg_to_smc(rdev, (PPSMC_Msg)(PPSMC_MSG_NoForcedLevel)) != PPSMC_Result_OK)
1474 return -EINVAL;
1475
1476 if (rv770_send_msg_to_smc(rdev, (PPSMC_Msg)(PPSMC_MSG_TwoLevelsDisabled)) != PPSMC_Result_OK)
1477 return -EINVAL;
1478
1479 return 0;
1480 }
1481
1482 int rv770_dpm_force_performance_level(struct radeon_device *rdev,
1483 enum radeon_dpm_forced_level level)
1484 {
1485 PPSMC_Msg msg;
1486
1487 if (level == RADEON_DPM_FORCED_LEVEL_HIGH) {
1488 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_ZeroLevelsDisabled) != PPSMC_Result_OK)
1489 return -EINVAL;
1490 msg = PPSMC_MSG_ForceHigh;
1491 } else if (level == RADEON_DPM_FORCED_LEVEL_LOW) {
1492 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_NoForcedLevel) != PPSMC_Result_OK)
1493 return -EINVAL;
1494 msg = (PPSMC_Msg)(PPSMC_MSG_TwoLevelsDisabled);
1495 } else {
1496 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_NoForcedLevel) != PPSMC_Result_OK)
1497 return -EINVAL;
1498 msg = (PPSMC_Msg)(PPSMC_MSG_ZeroLevelsDisabled);
1499 }
1500
1501 if (rv770_send_msg_to_smc(rdev, msg) != PPSMC_Result_OK)
1502 return -EINVAL;
1503
1504 rdev->pm.dpm.forced_level = level;
1505
1506 return 0;
1507 }
1508
1509 void r7xx_start_smc(struct radeon_device *rdev)
1510 {
1511 rv770_start_smc(rdev);
1512 rv770_start_smc_clock(rdev);
1513 }
1514
1515
1516 void r7xx_stop_smc(struct radeon_device *rdev)
1517 {
1518 rv770_reset_smc(rdev);
1519 rv770_stop_smc_clock(rdev);
1520 }
1521
1522 static void rv770_read_clock_registers(struct radeon_device *rdev)
1523 {
1524 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1525
1526 pi->clk_regs.rv770.cg_spll_func_cntl =
1527 RREG32(CG_SPLL_FUNC_CNTL);
1528 pi->clk_regs.rv770.cg_spll_func_cntl_2 =
1529 RREG32(CG_SPLL_FUNC_CNTL_2);
1530 pi->clk_regs.rv770.cg_spll_func_cntl_3 =
1531 RREG32(CG_SPLL_FUNC_CNTL_3);
1532 pi->clk_regs.rv770.cg_spll_spread_spectrum =
1533 RREG32(CG_SPLL_SPREAD_SPECTRUM);
1534 pi->clk_regs.rv770.cg_spll_spread_spectrum_2 =
1535 RREG32(CG_SPLL_SPREAD_SPECTRUM_2);
1536 pi->clk_regs.rv770.mpll_ad_func_cntl =
1537 RREG32(MPLL_AD_FUNC_CNTL);
1538 pi->clk_regs.rv770.mpll_ad_func_cntl_2 =
1539 RREG32(MPLL_AD_FUNC_CNTL_2);
1540 pi->clk_regs.rv770.mpll_dq_func_cntl =
1541 RREG32(MPLL_DQ_FUNC_CNTL);
1542 pi->clk_regs.rv770.mpll_dq_func_cntl_2 =
1543 RREG32(MPLL_DQ_FUNC_CNTL_2);
1544 pi->clk_regs.rv770.mclk_pwrmgt_cntl =
1545 RREG32(MCLK_PWRMGT_CNTL);
1546 pi->clk_regs.rv770.dll_cntl = RREG32(DLL_CNTL);
1547 }
1548
1549 static void r7xx_read_clock_registers(struct radeon_device *rdev)
1550 {
1551 if (rdev->family == CHIP_RV740)
1552 rv740_read_clock_registers(rdev);
1553 else if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1554 rv730_read_clock_registers(rdev);
1555 else
1556 rv770_read_clock_registers(rdev);
1557 }
1558
1559 void rv770_read_voltage_smio_registers(struct radeon_device *rdev)
1560 {
1561 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1562
1563 pi->s0_vid_lower_smio_cntl =
1564 RREG32(S0_VID_LOWER_SMIO_CNTL);
1565 }
1566
1567 void rv770_reset_smio_status(struct radeon_device *rdev)
1568 {
1569 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1570 u32 sw_smio_index, vid_smio_cntl;
1571
1572 sw_smio_index =
1573 (RREG32(GENERAL_PWRMGT) & SW_SMIO_INDEX_MASK) >> SW_SMIO_INDEX_SHIFT;
1574 switch (sw_smio_index) {
1575 case 3:
1576 vid_smio_cntl = RREG32(S3_VID_LOWER_SMIO_CNTL);
1577 break;
1578 case 2:
1579 vid_smio_cntl = RREG32(S2_VID_LOWER_SMIO_CNTL);
1580 break;
1581 case 1:
1582 vid_smio_cntl = RREG32(S1_VID_LOWER_SMIO_CNTL);
1583 break;
1584 case 0:
1585 return;
1586 default:
1587 vid_smio_cntl = pi->s0_vid_lower_smio_cntl;
1588 break;
1589 }
1590
1591 WREG32(S0_VID_LOWER_SMIO_CNTL, vid_smio_cntl);
1592 WREG32_P(GENERAL_PWRMGT, SW_SMIO_INDEX(0), ~SW_SMIO_INDEX_MASK);
1593 }
1594
1595 void rv770_get_memory_type(struct radeon_device *rdev)
1596 {
1597 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1598 u32 tmp;
1599
1600 tmp = RREG32(MC_SEQ_MISC0);
1601
1602 if (((tmp & MC_SEQ_MISC0_GDDR5_MASK) >> MC_SEQ_MISC0_GDDR5_SHIFT) ==
1603 MC_SEQ_MISC0_GDDR5_VALUE)
1604 pi->mem_gddr5 = true;
1605 else
1606 pi->mem_gddr5 = false;
1607
1608 }
1609
1610 void rv770_get_pcie_gen2_status(struct radeon_device *rdev)
1611 {
1612 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1613 u32 tmp;
1614
1615 tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
1616
1617 if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
1618 (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2))
1619 pi->pcie_gen2 = true;
1620 else
1621 pi->pcie_gen2 = false;
1622
1623 if (pi->pcie_gen2) {
1624 if (tmp & LC_CURRENT_DATA_RATE)
1625 pi->boot_in_gen2 = true;
1626 else
1627 pi->boot_in_gen2 = false;
1628 } else
1629 pi->boot_in_gen2 = false;
1630 }
1631
1632 #if 0
1633 static int rv770_enter_ulp_state(struct radeon_device *rdev)
1634 {
1635 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1636
1637 if (pi->gfx_clock_gating) {
1638 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN);
1639 WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON);
1640 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON);
1641 RREG32(GB_TILING_CONFIG);
1642 }
1643
1644 WREG32_P(SMC_MSG, HOST_SMC_MSG(PPSMC_MSG_SwitchToMinimumPower),
1645 ~HOST_SMC_MSG_MASK);
1646
1647 udelay(7000);
1648
1649 return 0;
1650 }
1651
1652 static int rv770_exit_ulp_state(struct radeon_device *rdev)
1653 {
1654 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1655 int i;
1656
1657 WREG32_P(SMC_MSG, HOST_SMC_MSG(PPSMC_MSG_ResumeFromMinimumPower),
1658 ~HOST_SMC_MSG_MASK);
1659
1660 udelay(7000);
1661
1662 for (i = 0; i < rdev->usec_timeout; i++) {
1663 if (((RREG32(SMC_MSG) & HOST_SMC_RESP_MASK) >> HOST_SMC_RESP_SHIFT) == 1)
1664 break;
1665 udelay(1000);
1666 }
1667
1668 if (pi->gfx_clock_gating)
1669 WREG32_P(SCLK_PWRMGT_CNTL, DYN_GFX_CLK_OFF_EN, ~DYN_GFX_CLK_OFF_EN);
1670
1671 return 0;
1672 }
1673 #endif
1674
1675 static void rv770_get_mclk_odt_threshold(struct radeon_device *rdev)
1676 {
1677 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1678 u8 memory_module_index;
1679 struct atom_memory_info memory_info;
1680
1681 pi->mclk_odt_threshold = 0;
1682
1683 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710)) {
1684 memory_module_index = rv770_get_memory_module_index(rdev);
1685
1686 if (radeon_atom_get_memory_info(rdev, memory_module_index, &memory_info))
1687 return;
1688
1689 if (memory_info.mem_type == MEM_TYPE_DDR2 ||
1690 memory_info.mem_type == MEM_TYPE_DDR3)
1691 pi->mclk_odt_threshold = 30000;
1692 }
1693 }
1694
1695 void rv770_get_max_vddc(struct radeon_device *rdev)
1696 {
1697 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1698 u16 vddc;
1699
1700 if (radeon_atom_get_max_vddc(rdev, 0, 0, &vddc))
1701 pi->max_vddc = 0;
1702 else
1703 pi->max_vddc = vddc;
1704 }
1705
1706 void rv770_program_response_times(struct radeon_device *rdev)
1707 {
1708 u32 voltage_response_time, backbias_response_time;
1709 u32 acpi_delay_time, vbi_time_out;
1710 u32 vddc_dly, bb_dly, acpi_dly, vbi_dly;
1711 u32 reference_clock;
1712
1713 voltage_response_time = (u32)rdev->pm.dpm.voltage_response_time;
1714 backbias_response_time = (u32)rdev->pm.dpm.backbias_response_time;
1715
1716 if (voltage_response_time == 0)
1717 voltage_response_time = 1000;
1718
1719 if (backbias_response_time == 0)
1720 backbias_response_time = 1000;
1721
1722 acpi_delay_time = 15000;
1723 vbi_time_out = 100000;
1724
1725 reference_clock = radeon_get_xclk(rdev);
1726
1727 vddc_dly = (voltage_response_time * reference_clock) / 1600;
1728 bb_dly = (backbias_response_time * reference_clock) / 1600;
1729 acpi_dly = (acpi_delay_time * reference_clock) / 1600;
1730 vbi_dly = (vbi_time_out * reference_clock) / 1600;
1731
1732 rv770_write_smc_soft_register(rdev,
1733 RV770_SMC_SOFT_REGISTER_delay_vreg, vddc_dly);
1734 rv770_write_smc_soft_register(rdev,
1735 RV770_SMC_SOFT_REGISTER_delay_bbias, bb_dly);
1736 rv770_write_smc_soft_register(rdev,
1737 RV770_SMC_SOFT_REGISTER_delay_acpi, acpi_dly);
1738 rv770_write_smc_soft_register(rdev,
1739 RV770_SMC_SOFT_REGISTER_mclk_chg_timeout, vbi_dly);
1740 #if 0
1741 /* XXX look up hw revision */
1742 if (WEKIVA_A21)
1743 rv770_write_smc_soft_register(rdev,
1744 RV770_SMC_SOFT_REGISTER_baby_step_timer,
1745 0x10);
1746 #endif
1747 }
1748
1749 static void rv770_program_dcodt_before_state_switch(struct radeon_device *rdev,
1750 struct radeon_ps *radeon_new_state,
1751 struct radeon_ps *radeon_current_state)
1752 {
1753 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1754 struct rv7xx_ps *new_state = rv770_get_ps(radeon_new_state);
1755 struct rv7xx_ps *current_state = rv770_get_ps(radeon_current_state);
1756 bool current_use_dc = false;
1757 bool new_use_dc = false;
1758
1759 if (pi->mclk_odt_threshold == 0)
1760 return;
1761
1762 if (current_state->high.mclk <= pi->mclk_odt_threshold)
1763 current_use_dc = true;
1764
1765 if (new_state->high.mclk <= pi->mclk_odt_threshold)
1766 new_use_dc = true;
1767
1768 if (current_use_dc == new_use_dc)
1769 return;
1770
1771 if (!current_use_dc && new_use_dc)
1772 return;
1773
1774 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1775 rv730_program_dcodt(rdev, new_use_dc);
1776 }
1777
1778 static void rv770_program_dcodt_after_state_switch(struct radeon_device *rdev,
1779 struct radeon_ps *radeon_new_state,
1780 struct radeon_ps *radeon_current_state)
1781 {
1782 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1783 struct rv7xx_ps *new_state = rv770_get_ps(radeon_new_state);
1784 struct rv7xx_ps *current_state = rv770_get_ps(radeon_current_state);
1785 bool current_use_dc = false;
1786 bool new_use_dc = false;
1787
1788 if (pi->mclk_odt_threshold == 0)
1789 return;
1790
1791 if (current_state->high.mclk <= pi->mclk_odt_threshold)
1792 current_use_dc = true;
1793
1794 if (new_state->high.mclk <= pi->mclk_odt_threshold)
1795 new_use_dc = true;
1796
1797 if (current_use_dc == new_use_dc)
1798 return;
1799
1800 if (current_use_dc && !new_use_dc)
1801 return;
1802
1803 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1804 rv730_program_dcodt(rdev, new_use_dc);
1805 }
1806
1807 static void rv770_retrieve_odt_values(struct radeon_device *rdev)
1808 {
1809 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1810
1811 if (pi->mclk_odt_threshold == 0)
1812 return;
1813
1814 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1815 rv730_get_odt_values(rdev);
1816 }
1817
1818 static void rv770_set_dpm_event_sources(struct radeon_device *rdev, u32 sources)
1819 {
1820 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1821 bool want_thermal_protection;
1822 enum radeon_dpm_event_src dpm_event_src;
1823
1824 switch (sources) {
1825 case 0:
1826 default:
1827 want_thermal_protection = false;
1828 break;
1829 case (1 << RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL):
1830 want_thermal_protection = true;
1831 dpm_event_src = RADEON_DPM_EVENT_SRC_DIGITAL;
1832 break;
1833
1834 case (1 << RADEON_DPM_AUTO_THROTTLE_SRC_EXTERNAL):
1835 want_thermal_protection = true;
1836 dpm_event_src = RADEON_DPM_EVENT_SRC_EXTERNAL;
1837 break;
1838
1839 case ((1 << RADEON_DPM_AUTO_THROTTLE_SRC_EXTERNAL) |
1840 (1 << RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL)):
1841 want_thermal_protection = true;
1842 dpm_event_src = RADEON_DPM_EVENT_SRC_DIGIAL_OR_EXTERNAL;
1843 break;
1844 }
1845
1846 if (want_thermal_protection) {
1847 WREG32_P(CG_THERMAL_CTRL, DPM_EVENT_SRC(dpm_event_src), ~DPM_EVENT_SRC_MASK);
1848 if (pi->thermal_protection)
1849 WREG32_P(GENERAL_PWRMGT, 0, ~THERMAL_PROTECTION_DIS);
1850 } else {
1851 WREG32_P(GENERAL_PWRMGT, THERMAL_PROTECTION_DIS, ~THERMAL_PROTECTION_DIS);
1852 }
1853 }
1854
1855 void rv770_enable_auto_throttle_source(struct radeon_device *rdev,
1856 enum radeon_dpm_auto_throttle_src source,
1857 bool enable)
1858 {
1859 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1860
1861 if (enable) {
1862 if (!(pi->active_auto_throttle_sources & (1 << source))) {
1863 pi->active_auto_throttle_sources |= 1 << source;
1864 rv770_set_dpm_event_sources(rdev, pi->active_auto_throttle_sources);
1865 }
1866 } else {
1867 if (pi->active_auto_throttle_sources & (1 << source)) {
1868 pi->active_auto_throttle_sources &= ~(1 << source);
1869 rv770_set_dpm_event_sources(rdev, pi->active_auto_throttle_sources);
1870 }
1871 }
1872 }
1873
1874 static int rv770_set_thermal_temperature_range(struct radeon_device *rdev,
1875 int min_temp, int max_temp)
1876 {
1877 int low_temp = 0 * 1000;
1878 int high_temp = 255 * 1000;
1879
1880 if (low_temp < min_temp)
1881 low_temp = min_temp;
1882 if (high_temp > max_temp)
1883 high_temp = max_temp;
1884 if (high_temp < low_temp) {
1885 DRM_ERROR("invalid thermal range: %d - %d\n", low_temp, high_temp);
1886 return -EINVAL;
1887 }
1888
1889 WREG32_P(CG_THERMAL_INT, DIG_THERM_INTH(high_temp / 1000), ~DIG_THERM_INTH_MASK);
1890 WREG32_P(CG_THERMAL_INT, DIG_THERM_INTL(low_temp / 1000), ~DIG_THERM_INTL_MASK);
1891 WREG32_P(CG_THERMAL_CTRL, DIG_THERM_DPM(high_temp / 1000), ~DIG_THERM_DPM_MASK);
1892
1893 rdev->pm.dpm.thermal.min_temp = low_temp;
1894 rdev->pm.dpm.thermal.max_temp = high_temp;
1895
1896 return 0;
1897 }
1898
1899 int rv770_dpm_enable(struct radeon_device *rdev)
1900 {
1901 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1902 struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
1903 int ret;
1904
1905 if (pi->gfx_clock_gating)
1906 rv770_restore_cgcg(rdev);
1907
1908 if (rv770_dpm_enabled(rdev))
1909 return -EINVAL;
1910
1911 if (pi->voltage_control) {
1912 rv770_enable_voltage_control(rdev, true);
1913 ret = rv770_construct_vddc_table(rdev);
1914 if (ret) {
1915 DRM_ERROR("rv770_construct_vddc_table failed\n");
1916 return ret;
1917 }
1918 }
1919
1920 if (pi->dcodt)
1921 rv770_retrieve_odt_values(rdev);
1922
1923 if (pi->mvdd_control) {
1924 ret = rv770_get_mvdd_configuration(rdev);
1925 if (ret) {
1926 DRM_ERROR("rv770_get_mvdd_configuration failed\n");
1927 return ret;
1928 }
1929 }
1930
1931 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_BACKBIAS)
1932 rv770_enable_backbias(rdev, true);
1933
1934 rv770_enable_spread_spectrum(rdev, true);
1935
1936 if (pi->thermal_protection)
1937 rv770_enable_thermal_protection(rdev, true);
1938
1939 rv770_program_mpll_timing_parameters(rdev);
1940 rv770_setup_bsp(rdev);
1941 rv770_program_git(rdev);
1942 rv770_program_tp(rdev);
1943 rv770_program_tpp(rdev);
1944 rv770_program_sstp(rdev);
1945 rv770_program_engine_speed_parameters(rdev);
1946 rv770_enable_display_gap(rdev);
1947 rv770_program_vc(rdev);
1948
1949 if (pi->dynamic_pcie_gen2)
1950 rv770_enable_dynamic_pcie_gen2(rdev, true);
1951
1952 ret = rv770_upload_firmware(rdev);
1953 if (ret) {
1954 DRM_ERROR("rv770_upload_firmware failed\n");
1955 return ret;
1956 }
1957 ret = rv770_init_smc_table(rdev, boot_ps);
1958 if (ret) {
1959 DRM_ERROR("rv770_init_smc_table failed\n");
1960 return ret;
1961 }
1962
1963 rv770_program_response_times(rdev);
1964 r7xx_start_smc(rdev);
1965
1966 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1967 rv730_start_dpm(rdev);
1968 else
1969 rv770_start_dpm(rdev);
1970
1971 if (pi->gfx_clock_gating)
1972 rv770_gfx_clock_gating_enable(rdev, true);
1973
1974 if (pi->mg_clock_gating)
1975 rv770_mg_clock_gating_enable(rdev, true);
1976
1977 rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, true);
1978
1979 return 0;
1980 }
1981
1982 int rv770_dpm_late_enable(struct radeon_device *rdev)
1983 {
1984 int ret;
1985
1986 if (rdev->irq.installed &&
1987 r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
1988 PPSMC_Result result;
1989
1990 ret = rv770_set_thermal_temperature_range(rdev, R600_TEMP_RANGE_MIN, R600_TEMP_RANGE_MAX);
1991 if (ret)
1992 return ret;
1993 rdev->irq.dpm_thermal = true;
1994 radeon_irq_set(rdev);
1995 result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_EnableThermalInterrupt);
1996
1997 if (result != PPSMC_Result_OK)
1998 DRM_DEBUG_KMS("Could not enable thermal interrupts.\n");
1999 }
2000
2001 return 0;
2002 }
2003
2004 void rv770_dpm_disable(struct radeon_device *rdev)
2005 {
2006 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2007
2008 if (!rv770_dpm_enabled(rdev))
2009 return;
2010
2011 rv770_clear_vc(rdev);
2012
2013 if (pi->thermal_protection)
2014 rv770_enable_thermal_protection(rdev, false);
2015
2016 rv770_enable_spread_spectrum(rdev, false);
2017
2018 if (pi->dynamic_pcie_gen2)
2019 rv770_enable_dynamic_pcie_gen2(rdev, false);
2020
2021 if (rdev->irq.installed &&
2022 r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
2023 rdev->irq.dpm_thermal = false;
2024 radeon_irq_set(rdev);
2025 }
2026
2027 if (pi->gfx_clock_gating)
2028 rv770_gfx_clock_gating_enable(rdev, false);
2029
2030 if (pi->mg_clock_gating)
2031 rv770_mg_clock_gating_enable(rdev, false);
2032
2033 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
2034 rv730_stop_dpm(rdev);
2035 else
2036 rv770_stop_dpm(rdev);
2037
2038 r7xx_stop_smc(rdev);
2039 rv770_reset_smio_status(rdev);
2040 }
2041
2042 int rv770_dpm_set_power_state(struct radeon_device *rdev)
2043 {
2044 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2045 struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps;
2046 struct radeon_ps *old_ps = rdev->pm.dpm.current_ps;
2047 int ret;
2048
2049 ret = rv770_restrict_performance_levels_before_switch(rdev);
2050 if (ret) {
2051 DRM_ERROR("rv770_restrict_performance_levels_before_switch failed\n");
2052 return ret;
2053 }
2054 rv770_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps);
2055 ret = rv770_halt_smc(rdev);
2056 if (ret) {
2057 DRM_ERROR("rv770_halt_smc failed\n");
2058 return ret;
2059 }
2060 ret = rv770_upload_sw_state(rdev, new_ps);
2061 if (ret) {
2062 DRM_ERROR("rv770_upload_sw_state failed\n");
2063 return ret;
2064 }
2065 r7xx_program_memory_timing_parameters(rdev, new_ps);
2066 if (pi->dcodt)
2067 rv770_program_dcodt_before_state_switch(rdev, new_ps, old_ps);
2068 ret = rv770_resume_smc(rdev);
2069 if (ret) {
2070 DRM_ERROR("rv770_resume_smc failed\n");
2071 return ret;
2072 }
2073 ret = rv770_set_sw_state(rdev);
2074 if (ret) {
2075 DRM_ERROR("rv770_set_sw_state failed\n");
2076 return ret;
2077 }
2078 if (pi->dcodt)
2079 rv770_program_dcodt_after_state_switch(rdev, new_ps, old_ps);
2080 rv770_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps);
2081
2082 return 0;
2083 }
2084
2085 #if 0
2086 void rv770_dpm_reset_asic(struct radeon_device *rdev)
2087 {
2088 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2089 struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
2090
2091 rv770_restrict_performance_levels_before_switch(rdev);
2092 if (pi->dcodt)
2093 rv770_program_dcodt_before_state_switch(rdev, boot_ps, boot_ps);
2094 rv770_set_boot_state(rdev);
2095 if (pi->dcodt)
2096 rv770_program_dcodt_after_state_switch(rdev, boot_ps, boot_ps);
2097 }
2098 #endif
2099
2100 void rv770_dpm_setup_asic(struct radeon_device *rdev)
2101 {
2102 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2103
2104 r7xx_read_clock_registers(rdev);
2105 rv770_read_voltage_smio_registers(rdev);
2106 rv770_get_memory_type(rdev);
2107 if (pi->dcodt)
2108 rv770_get_mclk_odt_threshold(rdev);
2109 rv770_get_pcie_gen2_status(rdev);
2110
2111 rv770_enable_acpi_pm(rdev);
2112
2113 if (radeon_aspm != 0) {
2114 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_ASPM_L0s)
2115 rv770_enable_l0s(rdev);
2116 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_ASPM_L1)
2117 rv770_enable_l1(rdev);
2118 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_TURNOFFPLL_ASPML1)
2119 rv770_enable_pll_sleep_in_l1(rdev);
2120 }
2121 }
2122
2123 void rv770_dpm_display_configuration_changed(struct radeon_device *rdev)
2124 {
2125 rv770_program_display_gap(rdev);
2126 }
2127
2128 union power_info {
2129 struct _ATOM_POWERPLAY_INFO info;
2130 struct _ATOM_POWERPLAY_INFO_V2 info_2;
2131 struct _ATOM_POWERPLAY_INFO_V3 info_3;
2132 struct _ATOM_PPLIB_POWERPLAYTABLE pplib;
2133 struct _ATOM_PPLIB_POWERPLAYTABLE2 pplib2;
2134 struct _ATOM_PPLIB_POWERPLAYTABLE3 pplib3;
2135 };
2136
2137 union pplib_clock_info {
2138 struct _ATOM_PPLIB_R600_CLOCK_INFO r600;
2139 struct _ATOM_PPLIB_RS780_CLOCK_INFO rs780;
2140 struct _ATOM_PPLIB_EVERGREEN_CLOCK_INFO evergreen;
2141 struct _ATOM_PPLIB_SUMO_CLOCK_INFO sumo;
2142 };
2143
2144 union pplib_power_state {
2145 struct _ATOM_PPLIB_STATE v1;
2146 struct _ATOM_PPLIB_STATE_V2 v2;
2147 };
2148
2149 static void rv7xx_parse_pplib_non_clock_info(struct radeon_device *rdev,
2150 struct radeon_ps *rps,
2151 struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info,
2152 u8 table_rev)
2153 {
2154 rps->caps = le32_to_cpu(non_clock_info->ulCapsAndSettings);
2155 rps->class = le16_to_cpu(non_clock_info->usClassification);
2156 rps->class2 = le16_to_cpu(non_clock_info->usClassification2);
2157
2158 if (ATOM_PPLIB_NONCLOCKINFO_VER1 < table_rev) {
2159 rps->vclk = le32_to_cpu(non_clock_info->ulVCLK);
2160 rps->dclk = le32_to_cpu(non_clock_info->ulDCLK);
2161 } else {
2162 rps->vclk = 0;
2163 rps->dclk = 0;
2164 }
2165
2166 if (r600_is_uvd_state(rps->class, rps->class2)) {
2167 if ((rps->vclk == 0) || (rps->dclk == 0)) {
2168 rps->vclk = RV770_DEFAULT_VCLK_FREQ;
2169 rps->dclk = RV770_DEFAULT_DCLK_FREQ;
2170 }
2171 }
2172
2173 if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT)
2174 rdev->pm.dpm.boot_ps = rps;
2175 if (rps->class & ATOM_PPLIB_CLASSIFICATION_UVDSTATE)
2176 rdev->pm.dpm.uvd_ps = rps;
2177 }
2178
2179 static void rv7xx_parse_pplib_clock_info(struct radeon_device *rdev,
2180 struct radeon_ps *rps, int index,
2181 union pplib_clock_info *clock_info)
2182 {
2183 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2184 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2185 struct rv7xx_ps *ps = rv770_get_ps(rps);
2186 u32 sclk, mclk;
2187 struct rv7xx_pl *pl;
2188
2189 switch (index) {
2190 case 0:
2191 pl = &ps->low;
2192 break;
2193 case 1:
2194 pl = &ps->medium;
2195 break;
2196 case 2:
2197 default:
2198 pl = &ps->high;
2199 break;
2200 }
2201
2202 if (rdev->family >= CHIP_CEDAR) {
2203 sclk = le16_to_cpu(clock_info->evergreen.usEngineClockLow);
2204 sclk |= clock_info->evergreen.ucEngineClockHigh << 16;
2205 mclk = le16_to_cpu(clock_info->evergreen.usMemoryClockLow);
2206 mclk |= clock_info->evergreen.ucMemoryClockHigh << 16;
2207
2208 pl->vddc = le16_to_cpu(clock_info->evergreen.usVDDC);
2209 pl->vddci = le16_to_cpu(clock_info->evergreen.usVDDCI);
2210 pl->flags = le32_to_cpu(clock_info->evergreen.ulFlags);
2211 } else {
2212 sclk = le16_to_cpu(clock_info->r600.usEngineClockLow);
2213 sclk |= clock_info->r600.ucEngineClockHigh << 16;
2214 mclk = le16_to_cpu(clock_info->r600.usMemoryClockLow);
2215 mclk |= clock_info->r600.ucMemoryClockHigh << 16;
2216
2217 pl->vddc = le16_to_cpu(clock_info->r600.usVDDC);
2218 pl->flags = le32_to_cpu(clock_info->r600.ulFlags);
2219 }
2220
2221 pl->mclk = mclk;
2222 pl->sclk = sclk;
2223
2224 /* patch up vddc if necessary */
2225 if (pl->vddc == 0xff01) {
2226 if (pi->max_vddc)
2227 pl->vddc = pi->max_vddc;
2228 }
2229
2230 if (rps->class & ATOM_PPLIB_CLASSIFICATION_ACPI) {
2231 pi->acpi_vddc = pl->vddc;
2232 if (rdev->family >= CHIP_CEDAR)
2233 eg_pi->acpi_vddci = pl->vddci;
2234 if (ps->low.flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2)
2235 pi->acpi_pcie_gen2 = true;
2236 else
2237 pi->acpi_pcie_gen2 = false;
2238 }
2239
2240 if (rps->class2 & ATOM_PPLIB_CLASSIFICATION2_ULV) {
2241 if (rdev->family >= CHIP_BARTS) {
2242 eg_pi->ulv.supported = true;
2243 eg_pi->ulv.pl = pl;
2244 }
2245 }
2246
2247 if (pi->min_vddc_in_table > pl->vddc)
2248 pi->min_vddc_in_table = pl->vddc;
2249
2250 if (pi->max_vddc_in_table < pl->vddc)
2251 pi->max_vddc_in_table = pl->vddc;
2252
2253 /* patch up boot state */
2254 if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT) {
2255 u16 vddc, vddci, mvdd;
2256 radeon_atombios_get_default_voltages(rdev, &vddc, &vddci, &mvdd);
2257 pl->mclk = rdev->clock.default_mclk;
2258 pl->sclk = rdev->clock.default_sclk;
2259 pl->vddc = vddc;
2260 pl->vddci = vddci;
2261 }
2262
2263 if ((rps->class & ATOM_PPLIB_CLASSIFICATION_UI_MASK) ==
2264 ATOM_PPLIB_CLASSIFICATION_UI_PERFORMANCE) {
2265 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.sclk = pl->sclk;
2266 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.mclk = pl->mclk;
2267 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddc = pl->vddc;
2268 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddci = pl->vddci;
2269 }
2270 }
2271
2272 int rv7xx_parse_power_table(struct radeon_device *rdev)
2273 {
2274 struct radeon_mode_info *mode_info = &rdev->mode_info;
2275 struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info;
2276 union pplib_power_state *power_state;
2277 int i, j;
2278 union pplib_clock_info *clock_info;
2279 union power_info *power_info;
2280 int index = GetIndexIntoMasterTable(DATA, PowerPlayInfo);
2281 u16 data_offset;
2282 u8 frev, crev;
2283 struct rv7xx_ps *ps;
2284
2285 if (!atom_parse_data_header(mode_info->atom_context, index, NULL,
2286 &frev, &crev, &data_offset))
2287 return -EINVAL;
2288 power_info = (union power_info *)(mode_info->atom_context->bios + data_offset);
2289
2290 rdev->pm.dpm.ps = kzalloc(sizeof(struct radeon_ps) *
2291 power_info->pplib.ucNumStates, GFP_KERNEL);
2292 if (!rdev->pm.dpm.ps)
2293 return -ENOMEM;
2294
2295 for (i = 0; i < power_info->pplib.ucNumStates; i++) {
2296 power_state = (union pplib_power_state *)
2297 (mode_info->atom_context->bios + data_offset +
2298 le16_to_cpu(power_info->pplib.usStateArrayOffset) +
2299 i * power_info->pplib.ucStateEntrySize);
2300 non_clock_info = (struct _ATOM_PPLIB_NONCLOCK_INFO *)
2301 (mode_info->atom_context->bios + data_offset +
2302 le16_to_cpu(power_info->pplib.usNonClockInfoArrayOffset) +
2303 (power_state->v1.ucNonClockStateIndex *
2304 power_info->pplib.ucNonClockSize));
2305 if (power_info->pplib.ucStateEntrySize - 1) {
2306 u8 *idx;
2307 ps = kzalloc(sizeof(struct rv7xx_ps), GFP_KERNEL);
2308 if (ps == NULL) {
2309 kfree(rdev->pm.dpm.ps);
2310 return -ENOMEM;
2311 }
2312 rdev->pm.dpm.ps[i].ps_priv = ps;
2313 rv7xx_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i],
2314 non_clock_info,
2315 power_info->pplib.ucNonClockSize);
2316 idx = (u8 *)&power_state->v1.ucClockStateIndices[0];
2317 for (j = 0; j < (power_info->pplib.ucStateEntrySize - 1); j++) {
2318 clock_info = (union pplib_clock_info *)
2319 (mode_info->atom_context->bios + data_offset +
2320 le16_to_cpu(power_info->pplib.usClockInfoArrayOffset) +
2321 (idx[j] * power_info->pplib.ucClockInfoSize));
2322 rv7xx_parse_pplib_clock_info(rdev,
2323 &rdev->pm.dpm.ps[i], j,
2324 clock_info);
2325 }
2326 }
2327 }
2328 rdev->pm.dpm.num_ps = power_info->pplib.ucNumStates;
2329 return 0;
2330 }
2331
2332 void rv770_get_engine_memory_ss(struct radeon_device *rdev)
2333 {
2334 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2335 struct radeon_atom_ss ss;
2336
2337 pi->sclk_ss = radeon_atombios_get_asic_ss_info(rdev, &ss,
2338 ASIC_INTERNAL_ENGINE_SS, 0);
2339 pi->mclk_ss = radeon_atombios_get_asic_ss_info(rdev, &ss,
2340 ASIC_INTERNAL_MEMORY_SS, 0);
2341
2342 if (pi->sclk_ss || pi->mclk_ss)
2343 pi->dynamic_ss = true;
2344 else
2345 pi->dynamic_ss = false;
2346 }
2347
2348 int rv770_dpm_init(struct radeon_device *rdev)
2349 {
2350 struct rv7xx_power_info *pi;
2351 struct atom_clock_dividers dividers;
2352 int ret;
2353
2354 pi = kzalloc(sizeof(struct rv7xx_power_info), GFP_KERNEL);
2355 if (pi == NULL)
2356 return -ENOMEM;
2357 rdev->pm.dpm.priv = pi;
2358
2359 rv770_get_max_vddc(rdev);
2360
2361 pi->acpi_vddc = 0;
2362 pi->min_vddc_in_table = 0;
2363 pi->max_vddc_in_table = 0;
2364
2365 ret = r600_get_platform_caps(rdev);
2366 if (ret)
2367 return ret;
2368
2369 ret = rv7xx_parse_power_table(rdev);
2370 if (ret)
2371 return ret;
2372
2373 if (rdev->pm.dpm.voltage_response_time == 0)
2374 rdev->pm.dpm.voltage_response_time = R600_VOLTAGERESPONSETIME_DFLT;
2375 if (rdev->pm.dpm.backbias_response_time == 0)
2376 rdev->pm.dpm.backbias_response_time = R600_BACKBIASRESPONSETIME_DFLT;
2377
2378 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
2379 0, false, ÷rs);
2380 if (ret)
2381 pi->ref_div = dividers.ref_div + 1;
2382 else
2383 pi->ref_div = R600_REFERENCEDIVIDER_DFLT;
2384
2385 pi->mclk_strobe_mode_threshold = 30000;
2386 pi->mclk_edc_enable_threshold = 30000;
2387
2388 pi->rlp = RV770_RLP_DFLT;
2389 pi->rmp = RV770_RMP_DFLT;
2390 pi->lhp = RV770_LHP_DFLT;
2391 pi->lmp = RV770_LMP_DFLT;
2392
2393 pi->voltage_control =
2394 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, 0);
2395
2396 pi->mvdd_control =
2397 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_MVDDC, 0);
2398
2399 rv770_get_engine_memory_ss(rdev);
2400
2401 pi->asi = RV770_ASI_DFLT;
2402 pi->pasi = RV770_HASI_DFLT;
2403 pi->vrc = RV770_VRC_DFLT;
2404
2405 pi->power_gating = false;
2406
2407 pi->gfx_clock_gating = true;
2408
2409 pi->mg_clock_gating = true;
2410 pi->mgcgtssm = true;
2411
2412 pi->dynamic_pcie_gen2 = true;
2413
2414 if (rdev->pm.int_thermal_type != THERMAL_TYPE_NONE)
2415 pi->thermal_protection = true;
2416 else
2417 pi->thermal_protection = false;
2418
2419 pi->display_gap = true;
2420
2421 if (rdev->flags & RADEON_IS_MOBILITY)
2422 pi->dcodt = true;
2423 else
2424 pi->dcodt = false;
2425
2426 pi->ulps = true;
2427
2428 pi->mclk_stutter_mode_threshold = 0;
2429
2430 pi->sram_end = SMC_RAM_END;
2431 pi->state_table_start = RV770_SMC_TABLE_ADDRESS;
2432 pi->soft_regs_start = RV770_SMC_SOFT_REGISTERS_START;
2433
2434 return 0;
2435 }
2436
2437 void rv770_dpm_print_power_state(struct radeon_device *rdev,
2438 struct radeon_ps *rps)
2439 {
2440 struct rv7xx_ps *ps = rv770_get_ps(rps);
2441 struct rv7xx_pl *pl;
2442
2443 r600_dpm_print_class_info(rps->class, rps->class2);
2444 r600_dpm_print_cap_info(rps->caps);
2445 printk("\tuvd vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
2446 if (rdev->family >= CHIP_CEDAR) {
2447 pl = &ps->low;
2448 printk("\t\tpower level 0 sclk: %u mclk: %u vddc: %u vddci: %u\n",
2449 pl->sclk, pl->mclk, pl->vddc, pl->vddci);
2450 pl = &ps->medium;
2451 printk("\t\tpower level 1 sclk: %u mclk: %u vddc: %u vddci: %u\n",
2452 pl->sclk, pl->mclk, pl->vddc, pl->vddci);
2453 pl = &ps->high;
2454 printk("\t\tpower level 2 sclk: %u mclk: %u vddc: %u vddci: %u\n",
2455 pl->sclk, pl->mclk, pl->vddc, pl->vddci);
2456 } else {
2457 pl = &ps->low;
2458 printk("\t\tpower level 0 sclk: %u mclk: %u vddc: %u\n",
2459 pl->sclk, pl->mclk, pl->vddc);
2460 pl = &ps->medium;
2461 printk("\t\tpower level 1 sclk: %u mclk: %u vddc: %u\n",
2462 pl->sclk, pl->mclk, pl->vddc);
2463 pl = &ps->high;
2464 printk("\t\tpower level 2 sclk: %u mclk: %u vddc: %u\n",
2465 pl->sclk, pl->mclk, pl->vddc);
2466 }
2467 r600_dpm_print_ps_status(rdev, rps);
2468 }
2469
2470 #ifdef CONFIG_DEBUG_FS
2471 void rv770_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
2472 struct seq_file *m)
2473 {
2474 struct radeon_ps *rps = rdev->pm.dpm.current_ps;
2475 struct rv7xx_ps *ps = rv770_get_ps(rps);
2476 struct rv7xx_pl *pl;
2477 u32 current_index =
2478 (RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_PROFILE_INDEX_MASK) >>
2479 CURRENT_PROFILE_INDEX_SHIFT;
2480
2481 if (current_index > 2) {
2482 seq_printf(m, "invalid dpm profile %d\n", current_index);
2483 } else {
2484 if (current_index == 0)
2485 pl = &ps->low;
2486 else if (current_index == 1)
2487 pl = &ps->medium;
2488 else /* current_index == 2 */
2489 pl = &ps->high;
2490 seq_printf(m, "uvd vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
2491 if (rdev->family >= CHIP_CEDAR) {
2492 seq_printf(m, "power level %d sclk: %u mclk: %u vddc: %u vddci: %u\n",
2493 current_index, pl->sclk, pl->mclk, pl->vddc, pl->vddci);
2494 } else {
2495 seq_printf(m, "power level %d sclk: %u mclk: %u vddc: %u\n",
2496 current_index, pl->sclk, pl->mclk, pl->vddc);
2497 }
2498 }
2499 }
2500 #endif
2501
2502 u32 rv770_dpm_get_current_sclk(struct radeon_device *rdev)
2503 {
2504 struct radeon_ps *rps = rdev->pm.dpm.current_ps;
2505 struct rv7xx_ps *ps = rv770_get_ps(rps);
2506 struct rv7xx_pl *pl;
2507 u32 current_index =
2508 (RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_PROFILE_INDEX_MASK) >>
2509 CURRENT_PROFILE_INDEX_SHIFT;
2510
2511 if (current_index > 2) {
2512 return 0;
2513 } else {
2514 if (current_index == 0)
2515 pl = &ps->low;
2516 else if (current_index == 1)
2517 pl = &ps->medium;
2518 else /* current_index == 2 */
2519 pl = &ps->high;
2520 return pl->sclk;
2521 }
2522 }
2523
2524 u32 rv770_dpm_get_current_mclk(struct radeon_device *rdev)
2525 {
2526 struct radeon_ps *rps = rdev->pm.dpm.current_ps;
2527 struct rv7xx_ps *ps = rv770_get_ps(rps);
2528 struct rv7xx_pl *pl;
2529 u32 current_index =
2530 (RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_PROFILE_INDEX_MASK) >>
2531 CURRENT_PROFILE_INDEX_SHIFT;
2532
2533 if (current_index > 2) {
2534 return 0;
2535 } else {
2536 if (current_index == 0)
2537 pl = &ps->low;
2538 else if (current_index == 1)
2539 pl = &ps->medium;
2540 else /* current_index == 2 */
2541 pl = &ps->high;
2542 return pl->mclk;
2543 }
2544 }
2545
2546 void rv770_dpm_fini(struct radeon_device *rdev)
2547 {
2548 int i;
2549
2550 for (i = 0; i < rdev->pm.dpm.num_ps; i++) {
2551 kfree(rdev->pm.dpm.ps[i].ps_priv);
2552 }
2553 kfree(rdev->pm.dpm.ps);
2554 kfree(rdev->pm.dpm.priv);
2555 }
2556
2557 u32 rv770_dpm_get_sclk(struct radeon_device *rdev, bool low)
2558 {
2559 struct rv7xx_ps *requested_state = rv770_get_ps(rdev->pm.dpm.requested_ps);
2560
2561 if (low)
2562 return requested_state->low.sclk;
2563 else
2564 return requested_state->high.sclk;
2565 }
2566
2567 u32 rv770_dpm_get_mclk(struct radeon_device *rdev, bool low)
2568 {
2569 struct rv7xx_ps *requested_state = rv770_get_ps(rdev->pm.dpm.requested_ps);
2570
2571 if (low)
2572 return requested_state->low.mclk;
2573 else
2574 return requested_state->high.mclk;
2575 }
2576
2577 bool rv770_dpm_vblank_too_short(struct radeon_device *rdev)
2578 {
2579 u32 vblank_time = r600_dpm_get_vblank_time(rdev);
2580 u32 switch_limit = 200; /* 300 */
2581
2582 /* RV770 */
2583 /* mclk switching doesn't seem to work reliably on desktop RV770s */
2584 if ((rdev->family == CHIP_RV770) &&
2585 !(rdev->flags & RADEON_IS_MOBILITY))
2586 switch_limit = 0xffffffff; /* disable mclk switching */
2587
2588 if (vblank_time < switch_limit)
2589 return true;
2590 else
2591 return false;
2592
2593 }
2594