x86/xen: resume timer irqs early
[linux/fpc-iii.git] / drivers / gpu / drm / radeon / cypress_dpm.c
blob7143783fb23730cb4decaeef07e6b34c11ea82cd
1 /*
2 * Copyright 2011 Advanced Micro Devices, Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
22 * Authors: Alex Deucher
25 #include "drmP.h"
26 #include "radeon.h"
27 #include "evergreend.h"
28 #include "r600_dpm.h"
29 #include "cypress_dpm.h"
30 #include "atom.h"
32 #define SMC_RAM_END 0x8000
34 #define MC_CG_ARB_FREQ_F0 0x0a
35 #define MC_CG_ARB_FREQ_F1 0x0b
36 #define MC_CG_ARB_FREQ_F2 0x0c
37 #define MC_CG_ARB_FREQ_F3 0x0d
39 #define MC_CG_SEQ_DRAMCONF_S0 0x05
40 #define MC_CG_SEQ_DRAMCONF_S1 0x06
41 #define MC_CG_SEQ_YCLK_SUSPEND 0x04
42 #define MC_CG_SEQ_YCLK_RESUME 0x0a
44 struct rv7xx_ps *rv770_get_ps(struct radeon_ps *rps);
45 struct rv7xx_power_info *rv770_get_pi(struct radeon_device *rdev);
46 struct evergreen_power_info *evergreen_get_pi(struct radeon_device *rdev);
48 static void cypress_enable_bif_dynamic_pcie_gen2(struct radeon_device *rdev,
49 bool enable)
51 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
52 u32 tmp, bif;
54 tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
55 if (enable) {
56 if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
57 (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
58 if (!pi->boot_in_gen2) {
59 bif = RREG32(CG_BIF_REQ_AND_RSP) & ~CG_CLIENT_REQ_MASK;
60 bif |= CG_CLIENT_REQ(0xd);
61 WREG32(CG_BIF_REQ_AND_RSP, bif);
63 tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
64 tmp |= LC_HW_VOLTAGE_IF_CONTROL(1);
65 tmp |= LC_GEN2_EN_STRAP;
67 tmp |= LC_CLR_FAILED_SPD_CHANGE_CNT;
68 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
69 udelay(10);
70 tmp &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
71 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
74 } else {
75 if (!pi->boot_in_gen2) {
76 tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
77 tmp &= ~LC_GEN2_EN_STRAP;
79 if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
80 (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2))
81 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
85 static void cypress_enable_dynamic_pcie_gen2(struct radeon_device *rdev,
86 bool enable)
88 cypress_enable_bif_dynamic_pcie_gen2(rdev, enable);
90 if (enable)
91 WREG32_P(GENERAL_PWRMGT, ENABLE_GEN2PCIE, ~ENABLE_GEN2PCIE);
92 else
93 WREG32_P(GENERAL_PWRMGT, 0, ~ENABLE_GEN2PCIE);
96 #if 0
97 static int cypress_enter_ulp_state(struct radeon_device *rdev)
99 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
101 if (pi->gfx_clock_gating) {
102 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN);
103 WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON);
104 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON);
106 RREG32(GB_ADDR_CONFIG);
109 WREG32_P(SMC_MSG, HOST_SMC_MSG(PPSMC_MSG_SwitchToMinimumPower),
110 ~HOST_SMC_MSG_MASK);
112 udelay(7000);
114 return 0;
116 #endif
118 static void cypress_gfx_clock_gating_enable(struct radeon_device *rdev,
119 bool enable)
121 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
123 if (enable) {
124 if (eg_pi->light_sleep) {
125 WREG32(GRBM_GFX_INDEX, 0xC0000000);
127 WREG32_CG(CG_CGLS_TILE_0, 0xFFFFFFFF);
128 WREG32_CG(CG_CGLS_TILE_1, 0xFFFFFFFF);
129 WREG32_CG(CG_CGLS_TILE_2, 0xFFFFFFFF);
130 WREG32_CG(CG_CGLS_TILE_3, 0xFFFFFFFF);
131 WREG32_CG(CG_CGLS_TILE_4, 0xFFFFFFFF);
132 WREG32_CG(CG_CGLS_TILE_5, 0xFFFFFFFF);
133 WREG32_CG(CG_CGLS_TILE_6, 0xFFFFFFFF);
134 WREG32_CG(CG_CGLS_TILE_7, 0xFFFFFFFF);
135 WREG32_CG(CG_CGLS_TILE_8, 0xFFFFFFFF);
136 WREG32_CG(CG_CGLS_TILE_9, 0xFFFFFFFF);
137 WREG32_CG(CG_CGLS_TILE_10, 0xFFFFFFFF);
138 WREG32_CG(CG_CGLS_TILE_11, 0xFFFFFFFF);
140 WREG32_P(SCLK_PWRMGT_CNTL, DYN_LIGHT_SLEEP_EN, ~DYN_LIGHT_SLEEP_EN);
142 WREG32_P(SCLK_PWRMGT_CNTL, DYN_GFX_CLK_OFF_EN, ~DYN_GFX_CLK_OFF_EN);
143 } else {
144 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN);
145 WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON);
146 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON);
147 RREG32(GB_ADDR_CONFIG);
149 if (eg_pi->light_sleep) {
150 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_LIGHT_SLEEP_EN);
152 WREG32(GRBM_GFX_INDEX, 0xC0000000);
154 WREG32_CG(CG_CGLS_TILE_0, 0);
155 WREG32_CG(CG_CGLS_TILE_1, 0);
156 WREG32_CG(CG_CGLS_TILE_2, 0);
157 WREG32_CG(CG_CGLS_TILE_3, 0);
158 WREG32_CG(CG_CGLS_TILE_4, 0);
159 WREG32_CG(CG_CGLS_TILE_5, 0);
160 WREG32_CG(CG_CGLS_TILE_6, 0);
161 WREG32_CG(CG_CGLS_TILE_7, 0);
162 WREG32_CG(CG_CGLS_TILE_8, 0);
163 WREG32_CG(CG_CGLS_TILE_9, 0);
164 WREG32_CG(CG_CGLS_TILE_10, 0);
165 WREG32_CG(CG_CGLS_TILE_11, 0);
170 static void cypress_mg_clock_gating_enable(struct radeon_device *rdev,
171 bool enable)
173 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
174 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
176 if (enable) {
177 u32 cgts_sm_ctrl_reg;
179 if (rdev->family == CHIP_CEDAR)
180 cgts_sm_ctrl_reg = CEDAR_MGCGCGTSSMCTRL_DFLT;
181 else if (rdev->family == CHIP_REDWOOD)
182 cgts_sm_ctrl_reg = REDWOOD_MGCGCGTSSMCTRL_DFLT;
183 else
184 cgts_sm_ctrl_reg = CYPRESS_MGCGCGTSSMCTRL_DFLT;
186 WREG32(GRBM_GFX_INDEX, 0xC0000000);
188 WREG32_CG(CG_CGTT_LOCAL_0, CYPRESS_MGCGTTLOCAL0_DFLT);
189 WREG32_CG(CG_CGTT_LOCAL_1, CYPRESS_MGCGTTLOCAL1_DFLT & 0xFFFFCFFF);
190 WREG32_CG(CG_CGTT_LOCAL_2, CYPRESS_MGCGTTLOCAL2_DFLT);
191 WREG32_CG(CG_CGTT_LOCAL_3, CYPRESS_MGCGTTLOCAL3_DFLT);
193 if (pi->mgcgtssm)
194 WREG32(CGTS_SM_CTRL_REG, cgts_sm_ctrl_reg);
196 if (eg_pi->mcls) {
197 WREG32_P(MC_CITF_MISC_RD_CG, MEM_LS_ENABLE, ~MEM_LS_ENABLE);
198 WREG32_P(MC_CITF_MISC_WR_CG, MEM_LS_ENABLE, ~MEM_LS_ENABLE);
199 WREG32_P(MC_CITF_MISC_VM_CG, MEM_LS_ENABLE, ~MEM_LS_ENABLE);
200 WREG32_P(MC_HUB_MISC_HUB_CG, MEM_LS_ENABLE, ~MEM_LS_ENABLE);
201 WREG32_P(MC_HUB_MISC_VM_CG, MEM_LS_ENABLE, ~MEM_LS_ENABLE);
202 WREG32_P(MC_HUB_MISC_SIP_CG, MEM_LS_ENABLE, ~MEM_LS_ENABLE);
203 WREG32_P(MC_XPB_CLK_GAT, MEM_LS_ENABLE, ~MEM_LS_ENABLE);
204 WREG32_P(VM_L2_CG, MEM_LS_ENABLE, ~MEM_LS_ENABLE);
206 } else {
207 WREG32(GRBM_GFX_INDEX, 0xC0000000);
209 WREG32_CG(CG_CGTT_LOCAL_0, 0xFFFFFFFF);
210 WREG32_CG(CG_CGTT_LOCAL_1, 0xFFFFFFFF);
211 WREG32_CG(CG_CGTT_LOCAL_2, 0xFFFFFFFF);
212 WREG32_CG(CG_CGTT_LOCAL_3, 0xFFFFFFFF);
214 if (pi->mgcgtssm)
215 WREG32(CGTS_SM_CTRL_REG, 0x81f44bc0);
219 void cypress_enable_spread_spectrum(struct radeon_device *rdev,
220 bool enable)
222 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
224 if (enable) {
225 if (pi->sclk_ss)
226 WREG32_P(GENERAL_PWRMGT, DYN_SPREAD_SPECTRUM_EN, ~DYN_SPREAD_SPECTRUM_EN);
228 if (pi->mclk_ss)
229 WREG32_P(MPLL_CNTL_MODE, SS_SSEN, ~SS_SSEN);
230 } else {
231 WREG32_P(CG_SPLL_SPREAD_SPECTRUM, 0, ~SSEN);
232 WREG32_P(GENERAL_PWRMGT, 0, ~DYN_SPREAD_SPECTRUM_EN);
233 WREG32_P(MPLL_CNTL_MODE, 0, ~SS_SSEN);
234 WREG32_P(MPLL_CNTL_MODE, 0, ~SS_DSMODE_EN);
238 void cypress_start_dpm(struct radeon_device *rdev)
240 WREG32_P(GENERAL_PWRMGT, GLOBAL_PWRMGT_EN, ~GLOBAL_PWRMGT_EN);
243 void cypress_enable_sclk_control(struct radeon_device *rdev,
244 bool enable)
246 if (enable)
247 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~SCLK_PWRMGT_OFF);
248 else
249 WREG32_P(SCLK_PWRMGT_CNTL, SCLK_PWRMGT_OFF, ~SCLK_PWRMGT_OFF);
252 void cypress_enable_mclk_control(struct radeon_device *rdev,
253 bool enable)
255 if (enable)
256 WREG32_P(MCLK_PWRMGT_CNTL, 0, ~MPLL_PWRMGT_OFF);
257 else
258 WREG32_P(MCLK_PWRMGT_CNTL, MPLL_PWRMGT_OFF, ~MPLL_PWRMGT_OFF);
261 int cypress_notify_smc_display_change(struct radeon_device *rdev,
262 bool has_display)
264 PPSMC_Msg msg = has_display ?
265 (PPSMC_Msg)PPSMC_MSG_HasDisplay : (PPSMC_Msg)PPSMC_MSG_NoDisplay;
267 if (rv770_send_msg_to_smc(rdev, msg) != PPSMC_Result_OK)
268 return -EINVAL;
270 return 0;
273 void cypress_program_response_times(struct radeon_device *rdev)
275 u32 reference_clock;
276 u32 mclk_switch_limit;
278 reference_clock = radeon_get_xclk(rdev);
279 mclk_switch_limit = (460 * reference_clock) / 100;
281 rv770_write_smc_soft_register(rdev,
282 RV770_SMC_SOFT_REGISTER_mclk_switch_lim,
283 mclk_switch_limit);
285 rv770_write_smc_soft_register(rdev,
286 RV770_SMC_SOFT_REGISTER_mvdd_chg_time, 1);
288 rv770_write_smc_soft_register(rdev,
289 RV770_SMC_SOFT_REGISTER_mc_block_delay, 0xAA);
291 rv770_program_response_times(rdev);
293 if (ASIC_IS_LOMBOK(rdev))
294 rv770_write_smc_soft_register(rdev,
295 RV770_SMC_SOFT_REGISTER_is_asic_lombok, 1);
299 static int cypress_pcie_performance_request(struct radeon_device *rdev,
300 u8 perf_req, bool advertise)
302 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
303 u32 tmp;
305 udelay(10);
306 tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
307 if ((perf_req == PCIE_PERF_REQ_PECI_GEN1) && (tmp & LC_CURRENT_DATA_RATE))
308 return 0;
310 #if defined(CONFIG_ACPI)
311 if ((perf_req == PCIE_PERF_REQ_PECI_GEN1) ||
312 (perf_req == PCIE_PERF_REQ_PECI_GEN2)) {
313 eg_pi->pcie_performance_request_registered = true;
314 return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise);
315 } else if ((perf_req == PCIE_PERF_REQ_REMOVE_REGISTRY) &&
316 eg_pi->pcie_performance_request_registered) {
317 eg_pi->pcie_performance_request_registered = false;
318 return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise);
320 #endif
322 return 0;
325 void cypress_advertise_gen2_capability(struct radeon_device *rdev)
327 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
328 u32 tmp;
330 #if defined(CONFIG_ACPI)
331 radeon_acpi_pcie_notify_device_ready(rdev);
332 #endif
334 tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
336 if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
337 (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2))
338 pi->pcie_gen2 = true;
339 else
340 pi->pcie_gen2 = false;
342 if (!pi->pcie_gen2)
343 cypress_pcie_performance_request(rdev, PCIE_PERF_REQ_PECI_GEN2, true);
347 static enum radeon_pcie_gen cypress_get_maximum_link_speed(struct radeon_ps *radeon_state)
349 struct rv7xx_ps *state = rv770_get_ps(radeon_state);
351 if (state->high.flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2)
352 return 1;
353 return 0;
356 void cypress_notify_link_speed_change_after_state_change(struct radeon_device *rdev,
357 struct radeon_ps *radeon_new_state,
358 struct radeon_ps *radeon_current_state)
360 enum radeon_pcie_gen pcie_link_speed_target =
361 cypress_get_maximum_link_speed(radeon_new_state);
362 enum radeon_pcie_gen pcie_link_speed_current =
363 cypress_get_maximum_link_speed(radeon_current_state);
364 u8 request;
366 if (pcie_link_speed_target < pcie_link_speed_current) {
367 if (pcie_link_speed_target == RADEON_PCIE_GEN1)
368 request = PCIE_PERF_REQ_PECI_GEN1;
369 else if (pcie_link_speed_target == RADEON_PCIE_GEN2)
370 request = PCIE_PERF_REQ_PECI_GEN2;
371 else
372 request = PCIE_PERF_REQ_PECI_GEN3;
374 cypress_pcie_performance_request(rdev, request, false);
378 void cypress_notify_link_speed_change_before_state_change(struct radeon_device *rdev,
379 struct radeon_ps *radeon_new_state,
380 struct radeon_ps *radeon_current_state)
382 enum radeon_pcie_gen pcie_link_speed_target =
383 cypress_get_maximum_link_speed(radeon_new_state);
384 enum radeon_pcie_gen pcie_link_speed_current =
385 cypress_get_maximum_link_speed(radeon_current_state);
386 u8 request;
388 if (pcie_link_speed_target > pcie_link_speed_current) {
389 if (pcie_link_speed_target == RADEON_PCIE_GEN1)
390 request = PCIE_PERF_REQ_PECI_GEN1;
391 else if (pcie_link_speed_target == RADEON_PCIE_GEN2)
392 request = PCIE_PERF_REQ_PECI_GEN2;
393 else
394 request = PCIE_PERF_REQ_PECI_GEN3;
396 cypress_pcie_performance_request(rdev, request, false);
400 static int cypress_populate_voltage_value(struct radeon_device *rdev,
401 struct atom_voltage_table *table,
402 u16 value, RV770_SMC_VOLTAGE_VALUE *voltage)
404 unsigned int i;
406 for (i = 0; i < table->count; i++) {
407 if (value <= table->entries[i].value) {
408 voltage->index = (u8)i;
409 voltage->value = cpu_to_be16(table->entries[i].value);
410 break;
414 if (i == table->count)
415 return -EINVAL;
417 return 0;
420 u8 cypress_get_strobe_mode_settings(struct radeon_device *rdev, u32 mclk)
422 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
423 u8 result = 0;
424 bool strobe_mode = false;
426 if (pi->mem_gddr5) {
427 if (mclk <= pi->mclk_strobe_mode_threshold)
428 strobe_mode = true;
429 result = cypress_get_mclk_frequency_ratio(rdev, mclk, strobe_mode);
431 if (strobe_mode)
432 result |= SMC_STROBE_ENABLE;
435 return result;
438 u32 cypress_map_clkf_to_ibias(struct radeon_device *rdev, u32 clkf)
440 u32 ref_clk = rdev->clock.mpll.reference_freq;
441 u32 vco = clkf * ref_clk;
443 /* 100 Mhz ref clk */
444 if (ref_clk == 10000) {
445 if (vco > 500000)
446 return 0xC6;
447 if (vco > 400000)
448 return 0x9D;
449 if (vco > 330000)
450 return 0x6C;
451 if (vco > 250000)
452 return 0x2B;
453 if (vco > 160000)
454 return 0x5B;
455 if (vco > 120000)
456 return 0x0A;
457 return 0x4B;
460 /* 27 Mhz ref clk */
461 if (vco > 250000)
462 return 0x8B;
463 if (vco > 200000)
464 return 0xCC;
465 if (vco > 150000)
466 return 0x9B;
467 return 0x6B;
470 static int cypress_populate_mclk_value(struct radeon_device *rdev,
471 u32 engine_clock, u32 memory_clock,
472 RV7XX_SMC_MCLK_VALUE *mclk,
473 bool strobe_mode, bool dll_state_on)
475 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
477 u32 mpll_ad_func_cntl =
478 pi->clk_regs.rv770.mpll_ad_func_cntl;
479 u32 mpll_ad_func_cntl_2 =
480 pi->clk_regs.rv770.mpll_ad_func_cntl_2;
481 u32 mpll_dq_func_cntl =
482 pi->clk_regs.rv770.mpll_dq_func_cntl;
483 u32 mpll_dq_func_cntl_2 =
484 pi->clk_regs.rv770.mpll_dq_func_cntl_2;
485 u32 mclk_pwrmgt_cntl =
486 pi->clk_regs.rv770.mclk_pwrmgt_cntl;
487 u32 dll_cntl =
488 pi->clk_regs.rv770.dll_cntl;
489 u32 mpll_ss1 = pi->clk_regs.rv770.mpll_ss1;
490 u32 mpll_ss2 = pi->clk_regs.rv770.mpll_ss2;
491 struct atom_clock_dividers dividers;
492 u32 ibias;
493 u32 dll_speed;
494 int ret;
495 u32 mc_seq_misc7;
497 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_MEMORY_PLL_PARAM,
498 memory_clock, strobe_mode, &dividers);
499 if (ret)
500 return ret;
502 if (!strobe_mode) {
503 mc_seq_misc7 = RREG32(MC_SEQ_MISC7);
505 if(mc_seq_misc7 & 0x8000000)
506 dividers.post_div = 1;
509 ibias = cypress_map_clkf_to_ibias(rdev, dividers.whole_fb_div);
511 mpll_ad_func_cntl &= ~(CLKR_MASK |
512 YCLK_POST_DIV_MASK |
513 CLKF_MASK |
514 CLKFRAC_MASK |
515 IBIAS_MASK);
516 mpll_ad_func_cntl |= CLKR(dividers.ref_div);
517 mpll_ad_func_cntl |= YCLK_POST_DIV(dividers.post_div);
518 mpll_ad_func_cntl |= CLKF(dividers.whole_fb_div);
519 mpll_ad_func_cntl |= CLKFRAC(dividers.frac_fb_div);
520 mpll_ad_func_cntl |= IBIAS(ibias);
522 if (dividers.vco_mode)
523 mpll_ad_func_cntl_2 |= VCO_MODE;
524 else
525 mpll_ad_func_cntl_2 &= ~VCO_MODE;
527 if (pi->mem_gddr5) {
528 mpll_dq_func_cntl &= ~(CLKR_MASK |
529 YCLK_POST_DIV_MASK |
530 CLKF_MASK |
531 CLKFRAC_MASK |
532 IBIAS_MASK);
533 mpll_dq_func_cntl |= CLKR(dividers.ref_div);
534 mpll_dq_func_cntl |= YCLK_POST_DIV(dividers.post_div);
535 mpll_dq_func_cntl |= CLKF(dividers.whole_fb_div);
536 mpll_dq_func_cntl |= CLKFRAC(dividers.frac_fb_div);
537 mpll_dq_func_cntl |= IBIAS(ibias);
539 if (strobe_mode)
540 mpll_dq_func_cntl &= ~PDNB;
541 else
542 mpll_dq_func_cntl |= PDNB;
544 if (dividers.vco_mode)
545 mpll_dq_func_cntl_2 |= VCO_MODE;
546 else
547 mpll_dq_func_cntl_2 &= ~VCO_MODE;
550 if (pi->mclk_ss) {
551 struct radeon_atom_ss ss;
552 u32 vco_freq = memory_clock * dividers.post_div;
554 if (radeon_atombios_get_asic_ss_info(rdev, &ss,
555 ASIC_INTERNAL_MEMORY_SS, vco_freq)) {
556 u32 reference_clock = rdev->clock.mpll.reference_freq;
557 u32 decoded_ref = rv740_get_decoded_reference_divider(dividers.ref_div);
558 u32 clk_s = reference_clock * 5 / (decoded_ref * ss.rate);
559 u32 clk_v = ss.percentage *
560 (0x4000 * dividers.whole_fb_div + 0x800 * dividers.frac_fb_div) / (clk_s * 625);
562 mpll_ss1 &= ~CLKV_MASK;
563 mpll_ss1 |= CLKV(clk_v);
565 mpll_ss2 &= ~CLKS_MASK;
566 mpll_ss2 |= CLKS(clk_s);
570 dll_speed = rv740_get_dll_speed(pi->mem_gddr5,
571 memory_clock);
573 mclk_pwrmgt_cntl &= ~DLL_SPEED_MASK;
574 mclk_pwrmgt_cntl |= DLL_SPEED(dll_speed);
575 if (dll_state_on)
576 mclk_pwrmgt_cntl |= (MRDCKA0_PDNB |
577 MRDCKA1_PDNB |
578 MRDCKB0_PDNB |
579 MRDCKB1_PDNB |
580 MRDCKC0_PDNB |
581 MRDCKC1_PDNB |
582 MRDCKD0_PDNB |
583 MRDCKD1_PDNB);
584 else
585 mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB |
586 MRDCKA1_PDNB |
587 MRDCKB0_PDNB |
588 MRDCKB1_PDNB |
589 MRDCKC0_PDNB |
590 MRDCKC1_PDNB |
591 MRDCKD0_PDNB |
592 MRDCKD1_PDNB);
594 mclk->mclk770.mclk_value = cpu_to_be32(memory_clock);
595 mclk->mclk770.vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
596 mclk->mclk770.vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
597 mclk->mclk770.vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
598 mclk->mclk770.vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
599 mclk->mclk770.vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
600 mclk->mclk770.vDLL_CNTL = cpu_to_be32(dll_cntl);
601 mclk->mclk770.vMPLL_SS = cpu_to_be32(mpll_ss1);
602 mclk->mclk770.vMPLL_SS2 = cpu_to_be32(mpll_ss2);
604 return 0;
607 u8 cypress_get_mclk_frequency_ratio(struct radeon_device *rdev,
608 u32 memory_clock, bool strobe_mode)
610 u8 mc_para_index;
612 if (rdev->family >= CHIP_BARTS) {
613 if (strobe_mode) {
614 if (memory_clock < 10000)
615 mc_para_index = 0x00;
616 else if (memory_clock > 47500)
617 mc_para_index = 0x0f;
618 else
619 mc_para_index = (u8)((memory_clock - 10000) / 2500);
620 } else {
621 if (memory_clock < 65000)
622 mc_para_index = 0x00;
623 else if (memory_clock > 135000)
624 mc_para_index = 0x0f;
625 else
626 mc_para_index = (u8)((memory_clock - 60000) / 5000);
628 } else {
629 if (strobe_mode) {
630 if (memory_clock < 10000)
631 mc_para_index = 0x00;
632 else if (memory_clock > 47500)
633 mc_para_index = 0x0f;
634 else
635 mc_para_index = (u8)((memory_clock - 10000) / 2500);
636 } else {
637 if (memory_clock < 40000)
638 mc_para_index = 0x00;
639 else if (memory_clock > 115000)
640 mc_para_index = 0x0f;
641 else
642 mc_para_index = (u8)((memory_clock - 40000) / 5000);
645 return mc_para_index;
648 static int cypress_populate_mvdd_value(struct radeon_device *rdev,
649 u32 mclk,
650 RV770_SMC_VOLTAGE_VALUE *voltage)
652 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
653 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
655 if (!pi->mvdd_control) {
656 voltage->index = eg_pi->mvdd_high_index;
657 voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
658 return 0;
661 if (mclk <= pi->mvdd_split_frequency) {
662 voltage->index = eg_pi->mvdd_low_index;
663 voltage->value = cpu_to_be16(MVDD_LOW_VALUE);
664 } else {
665 voltage->index = eg_pi->mvdd_high_index;
666 voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
669 return 0;
672 int cypress_convert_power_level_to_smc(struct radeon_device *rdev,
673 struct rv7xx_pl *pl,
674 RV770_SMC_HW_PERFORMANCE_LEVEL *level,
675 u8 watermark_level)
677 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
678 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
679 int ret;
680 bool dll_state_on;
682 level->gen2PCIE = pi->pcie_gen2 ?
683 ((pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0) : 0;
684 level->gen2XSP = (pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0;
685 level->backbias = (pl->flags & ATOM_PPLIB_R600_FLAGS_BACKBIASENABLE) ? 1 : 0;
686 level->displayWatermark = watermark_level;
688 ret = rv740_populate_sclk_value(rdev, pl->sclk, &level->sclk);
689 if (ret)
690 return ret;
692 level->mcFlags = 0;
693 if (pi->mclk_stutter_mode_threshold &&
694 (pl->mclk <= pi->mclk_stutter_mode_threshold) &&
695 !eg_pi->uvd_enabled) {
696 level->mcFlags |= SMC_MC_STUTTER_EN;
697 if (eg_pi->sclk_deep_sleep)
698 level->stateFlags |= PPSMC_STATEFLAG_AUTO_PULSE_SKIP;
699 else
700 level->stateFlags &= ~PPSMC_STATEFLAG_AUTO_PULSE_SKIP;
703 if (pi->mem_gddr5) {
704 if (pl->mclk > pi->mclk_edc_enable_threshold)
705 level->mcFlags |= SMC_MC_EDC_RD_FLAG;
707 if (pl->mclk > eg_pi->mclk_edc_wr_enable_threshold)
708 level->mcFlags |= SMC_MC_EDC_WR_FLAG;
710 level->strobeMode = cypress_get_strobe_mode_settings(rdev, pl->mclk);
712 if (level->strobeMode & SMC_STROBE_ENABLE) {
713 if (cypress_get_mclk_frequency_ratio(rdev, pl->mclk, true) >=
714 ((RREG32(MC_SEQ_MISC7) >> 16) & 0xf))
715 dll_state_on = ((RREG32(MC_SEQ_MISC5) >> 1) & 0x1) ? true : false;
716 else
717 dll_state_on = ((RREG32(MC_SEQ_MISC6) >> 1) & 0x1) ? true : false;
718 } else
719 dll_state_on = eg_pi->dll_default_on;
721 ret = cypress_populate_mclk_value(rdev,
722 pl->sclk,
723 pl->mclk,
724 &level->mclk,
725 (level->strobeMode & SMC_STROBE_ENABLE) != 0,
726 dll_state_on);
727 } else {
728 ret = cypress_populate_mclk_value(rdev,
729 pl->sclk,
730 pl->mclk,
731 &level->mclk,
732 true,
733 true);
735 if (ret)
736 return ret;
738 ret = cypress_populate_voltage_value(rdev,
739 &eg_pi->vddc_voltage_table,
740 pl->vddc,
741 &level->vddc);
742 if (ret)
743 return ret;
745 if (eg_pi->vddci_control) {
746 ret = cypress_populate_voltage_value(rdev,
747 &eg_pi->vddci_voltage_table,
748 pl->vddci,
749 &level->vddci);
750 if (ret)
751 return ret;
754 ret = cypress_populate_mvdd_value(rdev, pl->mclk, &level->mvdd);
756 return ret;
759 static int cypress_convert_power_state_to_smc(struct radeon_device *rdev,
760 struct radeon_ps *radeon_state,
761 RV770_SMC_SWSTATE *smc_state)
763 struct rv7xx_ps *state = rv770_get_ps(radeon_state);
764 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
765 int ret;
767 if (!(radeon_state->caps & ATOM_PPLIB_DISALLOW_ON_DC))
768 smc_state->flags |= PPSMC_SWSTATE_FLAG_DC;
770 ret = cypress_convert_power_level_to_smc(rdev,
771 &state->low,
772 &smc_state->levels[0],
773 PPSMC_DISPLAY_WATERMARK_LOW);
774 if (ret)
775 return ret;
777 ret = cypress_convert_power_level_to_smc(rdev,
778 &state->medium,
779 &smc_state->levels[1],
780 PPSMC_DISPLAY_WATERMARK_LOW);
781 if (ret)
782 return ret;
784 ret = cypress_convert_power_level_to_smc(rdev,
785 &state->high,
786 &smc_state->levels[2],
787 PPSMC_DISPLAY_WATERMARK_HIGH);
788 if (ret)
789 return ret;
791 smc_state->levels[0].arbValue = MC_CG_ARB_FREQ_F1;
792 smc_state->levels[1].arbValue = MC_CG_ARB_FREQ_F2;
793 smc_state->levels[2].arbValue = MC_CG_ARB_FREQ_F3;
795 if (eg_pi->dynamic_ac_timing) {
796 smc_state->levels[0].ACIndex = 2;
797 smc_state->levels[1].ACIndex = 3;
798 smc_state->levels[2].ACIndex = 4;
799 } else {
800 smc_state->levels[0].ACIndex = 0;
801 smc_state->levels[1].ACIndex = 0;
802 smc_state->levels[2].ACIndex = 0;
805 rv770_populate_smc_sp(rdev, radeon_state, smc_state);
807 return rv770_populate_smc_t(rdev, radeon_state, smc_state);
810 static void cypress_convert_mc_registers(struct evergreen_mc_reg_entry *entry,
811 SMC_Evergreen_MCRegisterSet *data,
812 u32 num_entries, u32 valid_flag)
814 u32 i, j;
816 for (i = 0, j = 0; j < num_entries; j++) {
817 if (valid_flag & (1 << j)) {
818 data->value[i] = cpu_to_be32(entry->mc_data[j]);
819 i++;
824 static void cypress_convert_mc_reg_table_entry_to_smc(struct radeon_device *rdev,
825 struct rv7xx_pl *pl,
826 SMC_Evergreen_MCRegisterSet *mc_reg_table_data)
828 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
829 u32 i = 0;
831 for (i = 0; i < eg_pi->mc_reg_table.num_entries; i++) {
832 if (pl->mclk <=
833 eg_pi->mc_reg_table.mc_reg_table_entry[i].mclk_max)
834 break;
837 if ((i == eg_pi->mc_reg_table.num_entries) && (i > 0))
838 --i;
840 cypress_convert_mc_registers(&eg_pi->mc_reg_table.mc_reg_table_entry[i],
841 mc_reg_table_data,
842 eg_pi->mc_reg_table.last,
843 eg_pi->mc_reg_table.valid_flag);
846 static void cypress_convert_mc_reg_table_to_smc(struct radeon_device *rdev,
847 struct radeon_ps *radeon_state,
848 SMC_Evergreen_MCRegisters *mc_reg_table)
850 struct rv7xx_ps *state = rv770_get_ps(radeon_state);
852 cypress_convert_mc_reg_table_entry_to_smc(rdev,
853 &state->low,
854 &mc_reg_table->data[2]);
855 cypress_convert_mc_reg_table_entry_to_smc(rdev,
856 &state->medium,
857 &mc_reg_table->data[3]);
858 cypress_convert_mc_reg_table_entry_to_smc(rdev,
859 &state->high,
860 &mc_reg_table->data[4]);
863 int cypress_upload_sw_state(struct radeon_device *rdev,
864 struct radeon_ps *radeon_new_state)
866 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
867 u16 address = pi->state_table_start +
868 offsetof(RV770_SMC_STATETABLE, driverState);
869 RV770_SMC_SWSTATE state = { 0 };
870 int ret;
872 ret = cypress_convert_power_state_to_smc(rdev, radeon_new_state, &state);
873 if (ret)
874 return ret;
876 return rv770_copy_bytes_to_smc(rdev, address, (u8 *)&state,
877 sizeof(RV770_SMC_SWSTATE),
878 pi->sram_end);
881 int cypress_upload_mc_reg_table(struct radeon_device *rdev,
882 struct radeon_ps *radeon_new_state)
884 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
885 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
886 SMC_Evergreen_MCRegisters mc_reg_table = { 0 };
887 u16 address;
889 cypress_convert_mc_reg_table_to_smc(rdev, radeon_new_state, &mc_reg_table);
891 address = eg_pi->mc_reg_table_start +
892 (u16)offsetof(SMC_Evergreen_MCRegisters, data[2]);
894 return rv770_copy_bytes_to_smc(rdev, address,
895 (u8 *)&mc_reg_table.data[2],
896 sizeof(SMC_Evergreen_MCRegisterSet) * 3,
897 pi->sram_end);
900 u32 cypress_calculate_burst_time(struct radeon_device *rdev,
901 u32 engine_clock, u32 memory_clock)
903 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
904 u32 multiplier = pi->mem_gddr5 ? 1 : 2;
905 u32 result = (4 * multiplier * engine_clock) / (memory_clock / 2);
906 u32 burst_time;
908 if (result <= 4)
909 burst_time = 0;
910 else if (result < 8)
911 burst_time = result - 4;
912 else {
913 burst_time = result / 2 ;
914 if (burst_time > 18)
915 burst_time = 18;
918 return burst_time;
921 void cypress_program_memory_timing_parameters(struct radeon_device *rdev,
922 struct radeon_ps *radeon_new_state)
924 struct rv7xx_ps *new_state = rv770_get_ps(radeon_new_state);
925 u32 mc_arb_burst_time = RREG32(MC_ARB_BURST_TIME);
927 mc_arb_burst_time &= ~(STATE1_MASK | STATE2_MASK | STATE3_MASK);
929 mc_arb_burst_time |= STATE1(cypress_calculate_burst_time(rdev,
930 new_state->low.sclk,
931 new_state->low.mclk));
932 mc_arb_burst_time |= STATE2(cypress_calculate_burst_time(rdev,
933 new_state->medium.sclk,
934 new_state->medium.mclk));
935 mc_arb_burst_time |= STATE3(cypress_calculate_burst_time(rdev,
936 new_state->high.sclk,
937 new_state->high.mclk));
939 rv730_program_memory_timing_parameters(rdev, radeon_new_state);
941 WREG32(MC_ARB_BURST_TIME, mc_arb_burst_time);
944 static void cypress_populate_mc_reg_addresses(struct radeon_device *rdev,
945 SMC_Evergreen_MCRegisters *mc_reg_table)
947 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
948 u32 i, j;
950 for (i = 0, j = 0; j < eg_pi->mc_reg_table.last; j++) {
951 if (eg_pi->mc_reg_table.valid_flag & (1 << j)) {
952 mc_reg_table->address[i].s0 =
953 cpu_to_be16(eg_pi->mc_reg_table.mc_reg_address[j].s0);
954 mc_reg_table->address[i].s1 =
955 cpu_to_be16(eg_pi->mc_reg_table.mc_reg_address[j].s1);
956 i++;
960 mc_reg_table->last = (u8)i;
963 static void cypress_set_mc_reg_address_table(struct radeon_device *rdev)
965 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
966 u32 i = 0;
968 eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_RAS_TIMING_LP >> 2;
969 eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_RAS_TIMING >> 2;
970 i++;
972 eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_CAS_TIMING_LP >> 2;
973 eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_CAS_TIMING >> 2;
974 i++;
976 eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_MISC_TIMING_LP >> 2;
977 eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_MISC_TIMING >> 2;
978 i++;
980 eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_MISC_TIMING2_LP >> 2;
981 eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_MISC_TIMING2 >> 2;
982 i++;
984 eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_RD_CTL_D0_LP >> 2;
985 eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_RD_CTL_D0 >> 2;
986 i++;
988 eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_RD_CTL_D1_LP >> 2;
989 eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_RD_CTL_D1 >> 2;
990 i++;
992 eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_WR_CTL_D0_LP >> 2;
993 eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_WR_CTL_D0 >> 2;
994 i++;
996 eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_WR_CTL_D1_LP >> 2;
997 eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_WR_CTL_D1 >> 2;
998 i++;
1000 eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_PMG_CMD_EMRS_LP >> 2;
1001 eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_PMG_CMD_EMRS >> 2;
1002 i++;
1004 eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_PMG_CMD_MRS_LP >> 2;
1005 eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_PMG_CMD_MRS >> 2;
1006 i++;
1008 eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_PMG_CMD_MRS1_LP >> 2;
1009 eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_PMG_CMD_MRS1 >> 2;
1010 i++;
1012 eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_MISC1 >> 2;
1013 eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_MISC1 >> 2;
1014 i++;
1016 eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_RESERVE_M >> 2;
1017 eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_RESERVE_M >> 2;
1018 i++;
1020 eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_MISC3 >> 2;
1021 eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_MISC3 >> 2;
1022 i++;
1024 eg_pi->mc_reg_table.last = (u8)i;
1027 static void cypress_retrieve_ac_timing_for_one_entry(struct radeon_device *rdev,
1028 struct evergreen_mc_reg_entry *entry)
1030 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1031 u32 i;
1033 for (i = 0; i < eg_pi->mc_reg_table.last; i++)
1034 entry->mc_data[i] =
1035 RREG32(eg_pi->mc_reg_table.mc_reg_address[i].s1 << 2);
1039 static void cypress_retrieve_ac_timing_for_all_ranges(struct radeon_device *rdev,
1040 struct atom_memory_clock_range_table *range_table)
1042 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1043 u32 i, j;
1045 for (i = 0; i < range_table->num_entries; i++) {
1046 eg_pi->mc_reg_table.mc_reg_table_entry[i].mclk_max =
1047 range_table->mclk[i];
1048 radeon_atom_set_ac_timing(rdev, range_table->mclk[i]);
1049 cypress_retrieve_ac_timing_for_one_entry(rdev,
1050 &eg_pi->mc_reg_table.mc_reg_table_entry[i]);
1053 eg_pi->mc_reg_table.num_entries = range_table->num_entries;
1054 eg_pi->mc_reg_table.valid_flag = 0;
1056 for (i = 0; i < eg_pi->mc_reg_table.last; i++) {
1057 for (j = 1; j < range_table->num_entries; j++) {
1058 if (eg_pi->mc_reg_table.mc_reg_table_entry[j-1].mc_data[i] !=
1059 eg_pi->mc_reg_table.mc_reg_table_entry[j].mc_data[i]) {
1060 eg_pi->mc_reg_table.valid_flag |= (1 << i);
1061 break;
1067 static int cypress_initialize_mc_reg_table(struct radeon_device *rdev)
1069 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1070 u8 module_index = rv770_get_memory_module_index(rdev);
1071 struct atom_memory_clock_range_table range_table = { 0 };
1072 int ret;
1074 ret = radeon_atom_get_mclk_range_table(rdev,
1075 pi->mem_gddr5,
1076 module_index, &range_table);
1077 if (ret)
1078 return ret;
1080 cypress_retrieve_ac_timing_for_all_ranges(rdev, &range_table);
1082 return 0;
1085 static void cypress_wait_for_mc_sequencer(struct radeon_device *rdev, u8 value)
1087 u32 i, j;
1088 u32 channels = 2;
1090 if ((rdev->family == CHIP_CYPRESS) ||
1091 (rdev->family == CHIP_HEMLOCK))
1092 channels = 4;
1093 else if (rdev->family == CHIP_CEDAR)
1094 channels = 1;
1096 for (i = 0; i < channels; i++) {
1097 if ((rdev->family == CHIP_CYPRESS) ||
1098 (rdev->family == CHIP_HEMLOCK)) {
1099 WREG32_P(MC_CONFIG_MCD, MC_RD_ENABLE_MCD(i), ~MC_RD_ENABLE_MCD_MASK);
1100 WREG32_P(MC_CG_CONFIG_MCD, MC_RD_ENABLE_MCD(i), ~MC_RD_ENABLE_MCD_MASK);
1101 } else {
1102 WREG32_P(MC_CONFIG, MC_RD_ENABLE(i), ~MC_RD_ENABLE_MASK);
1103 WREG32_P(MC_CG_CONFIG, MC_RD_ENABLE(i), ~MC_RD_ENABLE_MASK);
1105 for (j = 0; j < rdev->usec_timeout; j++) {
1106 if (((RREG32(MC_SEQ_CG) & CG_SEQ_RESP_MASK) >> CG_SEQ_RESP_SHIFT) == value)
1107 break;
1108 udelay(1);
1113 static void cypress_force_mc_use_s1(struct radeon_device *rdev,
1114 struct radeon_ps *radeon_boot_state)
1116 struct rv7xx_ps *boot_state = rv770_get_ps(radeon_boot_state);
1117 u32 strobe_mode;
1118 u32 mc_seq_cg;
1119 int i;
1121 if (RREG32(MC_SEQ_STATUS_M) & PMG_PWRSTATE)
1122 return;
1124 radeon_atom_set_ac_timing(rdev, boot_state->low.mclk);
1125 radeon_mc_wait_for_idle(rdev);
1127 if ((rdev->family == CHIP_CYPRESS) ||
1128 (rdev->family == CHIP_HEMLOCK)) {
1129 WREG32(MC_CONFIG_MCD, 0xf);
1130 WREG32(MC_CG_CONFIG_MCD, 0xf);
1131 } else {
1132 WREG32(MC_CONFIG, 0xf);
1133 WREG32(MC_CG_CONFIG, 0xf);
1136 for (i = 0; i < rdev->num_crtc; i++)
1137 radeon_wait_for_vblank(rdev, i);
1139 WREG32(MC_SEQ_CG, MC_CG_SEQ_YCLK_SUSPEND);
1140 cypress_wait_for_mc_sequencer(rdev, MC_CG_SEQ_YCLK_SUSPEND);
1142 strobe_mode = cypress_get_strobe_mode_settings(rdev,
1143 boot_state->low.mclk);
1145 mc_seq_cg = CG_SEQ_REQ(MC_CG_SEQ_DRAMCONF_S1);
1146 mc_seq_cg |= SEQ_CG_RESP(strobe_mode);
1147 WREG32(MC_SEQ_CG, mc_seq_cg);
1149 for (i = 0; i < rdev->usec_timeout; i++) {
1150 if (RREG32(MC_SEQ_STATUS_M) & PMG_PWRSTATE)
1151 break;
1152 udelay(1);
1155 mc_seq_cg &= ~CG_SEQ_REQ_MASK;
1156 mc_seq_cg |= CG_SEQ_REQ(MC_CG_SEQ_YCLK_RESUME);
1157 WREG32(MC_SEQ_CG, mc_seq_cg);
1159 cypress_wait_for_mc_sequencer(rdev, MC_CG_SEQ_YCLK_RESUME);
1162 static void cypress_copy_ac_timing_from_s1_to_s0(struct radeon_device *rdev)
1164 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1165 u32 value;
1166 u32 i;
1168 for (i = 0; i < eg_pi->mc_reg_table.last; i++) {
1169 value = RREG32(eg_pi->mc_reg_table.mc_reg_address[i].s1 << 2);
1170 WREG32(eg_pi->mc_reg_table.mc_reg_address[i].s0 << 2, value);
1174 static void cypress_force_mc_use_s0(struct radeon_device *rdev,
1175 struct radeon_ps *radeon_boot_state)
1177 struct rv7xx_ps *boot_state = rv770_get_ps(radeon_boot_state);
1178 u32 strobe_mode;
1179 u32 mc_seq_cg;
1180 int i;
1182 cypress_copy_ac_timing_from_s1_to_s0(rdev);
1183 radeon_mc_wait_for_idle(rdev);
1185 if ((rdev->family == CHIP_CYPRESS) ||
1186 (rdev->family == CHIP_HEMLOCK)) {
1187 WREG32(MC_CONFIG_MCD, 0xf);
1188 WREG32(MC_CG_CONFIG_MCD, 0xf);
1189 } else {
1190 WREG32(MC_CONFIG, 0xf);
1191 WREG32(MC_CG_CONFIG, 0xf);
1194 for (i = 0; i < rdev->num_crtc; i++)
1195 radeon_wait_for_vblank(rdev, i);
1197 WREG32(MC_SEQ_CG, MC_CG_SEQ_YCLK_SUSPEND);
1198 cypress_wait_for_mc_sequencer(rdev, MC_CG_SEQ_YCLK_SUSPEND);
1200 strobe_mode = cypress_get_strobe_mode_settings(rdev,
1201 boot_state->low.mclk);
1203 mc_seq_cg = CG_SEQ_REQ(MC_CG_SEQ_DRAMCONF_S0);
1204 mc_seq_cg |= SEQ_CG_RESP(strobe_mode);
1205 WREG32(MC_SEQ_CG, mc_seq_cg);
1207 for (i = 0; i < rdev->usec_timeout; i++) {
1208 if (!(RREG32(MC_SEQ_STATUS_M) & PMG_PWRSTATE))
1209 break;
1210 udelay(1);
1213 mc_seq_cg &= ~CG_SEQ_REQ_MASK;
1214 mc_seq_cg |= CG_SEQ_REQ(MC_CG_SEQ_YCLK_RESUME);
1215 WREG32(MC_SEQ_CG, mc_seq_cg);
1217 cypress_wait_for_mc_sequencer(rdev, MC_CG_SEQ_YCLK_RESUME);
1220 static int cypress_populate_initial_mvdd_value(struct radeon_device *rdev,
1221 RV770_SMC_VOLTAGE_VALUE *voltage)
1223 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1225 voltage->index = eg_pi->mvdd_high_index;
1226 voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1228 return 0;
1231 int cypress_populate_smc_initial_state(struct radeon_device *rdev,
1232 struct radeon_ps *radeon_initial_state,
1233 RV770_SMC_STATETABLE *table)
1235 struct rv7xx_ps *initial_state = rv770_get_ps(radeon_initial_state);
1236 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1237 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1238 u32 a_t;
1240 table->initialState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL =
1241 cpu_to_be32(pi->clk_regs.rv770.mpll_ad_func_cntl);
1242 table->initialState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL_2 =
1243 cpu_to_be32(pi->clk_regs.rv770.mpll_ad_func_cntl_2);
1244 table->initialState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL =
1245 cpu_to_be32(pi->clk_regs.rv770.mpll_dq_func_cntl);
1246 table->initialState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL_2 =
1247 cpu_to_be32(pi->clk_regs.rv770.mpll_dq_func_cntl_2);
1248 table->initialState.levels[0].mclk.mclk770.vMCLK_PWRMGT_CNTL =
1249 cpu_to_be32(pi->clk_regs.rv770.mclk_pwrmgt_cntl);
1250 table->initialState.levels[0].mclk.mclk770.vDLL_CNTL =
1251 cpu_to_be32(pi->clk_regs.rv770.dll_cntl);
1253 table->initialState.levels[0].mclk.mclk770.vMPLL_SS =
1254 cpu_to_be32(pi->clk_regs.rv770.mpll_ss1);
1255 table->initialState.levels[0].mclk.mclk770.vMPLL_SS2 =
1256 cpu_to_be32(pi->clk_regs.rv770.mpll_ss2);
1258 table->initialState.levels[0].mclk.mclk770.mclk_value =
1259 cpu_to_be32(initial_state->low.mclk);
1261 table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL =
1262 cpu_to_be32(pi->clk_regs.rv770.cg_spll_func_cntl);
1263 table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 =
1264 cpu_to_be32(pi->clk_regs.rv770.cg_spll_func_cntl_2);
1265 table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 =
1266 cpu_to_be32(pi->clk_regs.rv770.cg_spll_func_cntl_3);
1267 table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM =
1268 cpu_to_be32(pi->clk_regs.rv770.cg_spll_spread_spectrum);
1269 table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM_2 =
1270 cpu_to_be32(pi->clk_regs.rv770.cg_spll_spread_spectrum_2);
1272 table->initialState.levels[0].sclk.sclk_value =
1273 cpu_to_be32(initial_state->low.sclk);
1275 table->initialState.levels[0].arbValue = MC_CG_ARB_FREQ_F0;
1277 table->initialState.levels[0].ACIndex = 0;
1279 cypress_populate_voltage_value(rdev,
1280 &eg_pi->vddc_voltage_table,
1281 initial_state->low.vddc,
1282 &table->initialState.levels[0].vddc);
1284 if (eg_pi->vddci_control)
1285 cypress_populate_voltage_value(rdev,
1286 &eg_pi->vddci_voltage_table,
1287 initial_state->low.vddci,
1288 &table->initialState.levels[0].vddci);
1290 cypress_populate_initial_mvdd_value(rdev,
1291 &table->initialState.levels[0].mvdd);
1293 a_t = CG_R(0xffff) | CG_L(0);
1294 table->initialState.levels[0].aT = cpu_to_be32(a_t);
1296 table->initialState.levels[0].bSP = cpu_to_be32(pi->dsp);
1299 if (pi->boot_in_gen2)
1300 table->initialState.levels[0].gen2PCIE = 1;
1301 else
1302 table->initialState.levels[0].gen2PCIE = 0;
1303 if (initial_state->low.flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2)
1304 table->initialState.levels[0].gen2XSP = 1;
1305 else
1306 table->initialState.levels[0].gen2XSP = 0;
1308 if (pi->mem_gddr5) {
1309 table->initialState.levels[0].strobeMode =
1310 cypress_get_strobe_mode_settings(rdev,
1311 initial_state->low.mclk);
1313 if (initial_state->low.mclk > pi->mclk_edc_enable_threshold)
1314 table->initialState.levels[0].mcFlags = SMC_MC_EDC_RD_FLAG | SMC_MC_EDC_WR_FLAG;
1315 else
1316 table->initialState.levels[0].mcFlags = 0;
1319 table->initialState.levels[1] = table->initialState.levels[0];
1320 table->initialState.levels[2] = table->initialState.levels[0];
1322 table->initialState.flags |= PPSMC_SWSTATE_FLAG_DC;
1324 return 0;
1327 int cypress_populate_smc_acpi_state(struct radeon_device *rdev,
1328 RV770_SMC_STATETABLE *table)
1330 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1331 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1332 u32 mpll_ad_func_cntl =
1333 pi->clk_regs.rv770.mpll_ad_func_cntl;
1334 u32 mpll_ad_func_cntl_2 =
1335 pi->clk_regs.rv770.mpll_ad_func_cntl_2;
1336 u32 mpll_dq_func_cntl =
1337 pi->clk_regs.rv770.mpll_dq_func_cntl;
1338 u32 mpll_dq_func_cntl_2 =
1339 pi->clk_regs.rv770.mpll_dq_func_cntl_2;
1340 u32 spll_func_cntl =
1341 pi->clk_regs.rv770.cg_spll_func_cntl;
1342 u32 spll_func_cntl_2 =
1343 pi->clk_regs.rv770.cg_spll_func_cntl_2;
1344 u32 spll_func_cntl_3 =
1345 pi->clk_regs.rv770.cg_spll_func_cntl_3;
1346 u32 mclk_pwrmgt_cntl =
1347 pi->clk_regs.rv770.mclk_pwrmgt_cntl;
1348 u32 dll_cntl =
1349 pi->clk_regs.rv770.dll_cntl;
1351 table->ACPIState = table->initialState;
1353 table->ACPIState.flags &= ~PPSMC_SWSTATE_FLAG_DC;
1355 if (pi->acpi_vddc) {
1356 cypress_populate_voltage_value(rdev,
1357 &eg_pi->vddc_voltage_table,
1358 pi->acpi_vddc,
1359 &table->ACPIState.levels[0].vddc);
1360 if (pi->pcie_gen2) {
1361 if (pi->acpi_pcie_gen2)
1362 table->ACPIState.levels[0].gen2PCIE = 1;
1363 else
1364 table->ACPIState.levels[0].gen2PCIE = 0;
1365 } else
1366 table->ACPIState.levels[0].gen2PCIE = 0;
1367 if (pi->acpi_pcie_gen2)
1368 table->ACPIState.levels[0].gen2XSP = 1;
1369 else
1370 table->ACPIState.levels[0].gen2XSP = 0;
1371 } else {
1372 cypress_populate_voltage_value(rdev,
1373 &eg_pi->vddc_voltage_table,
1374 pi->min_vddc_in_table,
1375 &table->ACPIState.levels[0].vddc);
1376 table->ACPIState.levels[0].gen2PCIE = 0;
1379 if (eg_pi->acpi_vddci) {
1380 if (eg_pi->vddci_control) {
1381 cypress_populate_voltage_value(rdev,
1382 &eg_pi->vddci_voltage_table,
1383 eg_pi->acpi_vddci,
1384 &table->ACPIState.levels[0].vddci);
1388 mpll_ad_func_cntl &= ~PDNB;
1390 mpll_ad_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN;
1392 if (pi->mem_gddr5)
1393 mpll_dq_func_cntl &= ~PDNB;
1394 mpll_dq_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN | BYPASS;
1396 mclk_pwrmgt_cntl |= (MRDCKA0_RESET |
1397 MRDCKA1_RESET |
1398 MRDCKB0_RESET |
1399 MRDCKB1_RESET |
1400 MRDCKC0_RESET |
1401 MRDCKC1_RESET |
1402 MRDCKD0_RESET |
1403 MRDCKD1_RESET);
1405 mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB |
1406 MRDCKA1_PDNB |
1407 MRDCKB0_PDNB |
1408 MRDCKB1_PDNB |
1409 MRDCKC0_PDNB |
1410 MRDCKC1_PDNB |
1411 MRDCKD0_PDNB |
1412 MRDCKD1_PDNB);
1414 dll_cntl |= (MRDCKA0_BYPASS |
1415 MRDCKA1_BYPASS |
1416 MRDCKB0_BYPASS |
1417 MRDCKB1_BYPASS |
1418 MRDCKC0_BYPASS |
1419 MRDCKC1_BYPASS |
1420 MRDCKD0_BYPASS |
1421 MRDCKD1_BYPASS);
1423 /* evergreen only */
1424 if (rdev->family <= CHIP_HEMLOCK)
1425 spll_func_cntl |= SPLL_RESET | SPLL_SLEEP | SPLL_BYPASS_EN;
1427 spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
1428 spll_func_cntl_2 |= SCLK_MUX_SEL(4);
1430 table->ACPIState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL =
1431 cpu_to_be32(mpll_ad_func_cntl);
1432 table->ACPIState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL_2 =
1433 cpu_to_be32(mpll_ad_func_cntl_2);
1434 table->ACPIState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL =
1435 cpu_to_be32(mpll_dq_func_cntl);
1436 table->ACPIState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL_2 =
1437 cpu_to_be32(mpll_dq_func_cntl_2);
1438 table->ACPIState.levels[0].mclk.mclk770.vMCLK_PWRMGT_CNTL =
1439 cpu_to_be32(mclk_pwrmgt_cntl);
1440 table->ACPIState.levels[0].mclk.mclk770.vDLL_CNTL = cpu_to_be32(dll_cntl);
1442 table->ACPIState.levels[0].mclk.mclk770.mclk_value = 0;
1444 table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL =
1445 cpu_to_be32(spll_func_cntl);
1446 table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 =
1447 cpu_to_be32(spll_func_cntl_2);
1448 table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 =
1449 cpu_to_be32(spll_func_cntl_3);
1451 table->ACPIState.levels[0].sclk.sclk_value = 0;
1453 cypress_populate_mvdd_value(rdev, 0, &table->ACPIState.levels[0].mvdd);
1455 if (eg_pi->dynamic_ac_timing)
1456 table->ACPIState.levels[0].ACIndex = 1;
1458 table->ACPIState.levels[1] = table->ACPIState.levels[0];
1459 table->ACPIState.levels[2] = table->ACPIState.levels[0];
1461 return 0;
1464 static void cypress_trim_voltage_table_to_fit_state_table(struct radeon_device *rdev,
1465 struct atom_voltage_table *voltage_table)
1467 unsigned int i, diff;
1469 if (voltage_table->count <= MAX_NO_VREG_STEPS)
1470 return;
1472 diff = voltage_table->count - MAX_NO_VREG_STEPS;
1474 for (i= 0; i < MAX_NO_VREG_STEPS; i++)
1475 voltage_table->entries[i] = voltage_table->entries[i + diff];
1477 voltage_table->count = MAX_NO_VREG_STEPS;
1480 int cypress_construct_voltage_tables(struct radeon_device *rdev)
1482 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1483 int ret;
1485 ret = radeon_atom_get_voltage_table(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, 0,
1486 &eg_pi->vddc_voltage_table);
1487 if (ret)
1488 return ret;
1490 if (eg_pi->vddc_voltage_table.count > MAX_NO_VREG_STEPS)
1491 cypress_trim_voltage_table_to_fit_state_table(rdev,
1492 &eg_pi->vddc_voltage_table);
1494 if (eg_pi->vddci_control) {
1495 ret = radeon_atom_get_voltage_table(rdev, SET_VOLTAGE_TYPE_ASIC_VDDCI, 0,
1496 &eg_pi->vddci_voltage_table);
1497 if (ret)
1498 return ret;
1500 if (eg_pi->vddci_voltage_table.count > MAX_NO_VREG_STEPS)
1501 cypress_trim_voltage_table_to_fit_state_table(rdev,
1502 &eg_pi->vddci_voltage_table);
1505 return 0;
1508 static void cypress_populate_smc_voltage_table(struct radeon_device *rdev,
1509 struct atom_voltage_table *voltage_table,
1510 RV770_SMC_STATETABLE *table)
1512 unsigned int i;
1514 for (i = 0; i < voltage_table->count; i++) {
1515 table->highSMIO[i] = 0;
1516 table->lowSMIO[i] |= cpu_to_be32(voltage_table->entries[i].smio_low);
1520 int cypress_populate_smc_voltage_tables(struct radeon_device *rdev,
1521 RV770_SMC_STATETABLE *table)
1523 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1524 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1525 unsigned char i;
1527 if (eg_pi->vddc_voltage_table.count) {
1528 cypress_populate_smc_voltage_table(rdev,
1529 &eg_pi->vddc_voltage_table,
1530 table);
1532 table->voltageMaskTable.highMask[RV770_SMC_VOLTAGEMASK_VDDC] = 0;
1533 table->voltageMaskTable.lowMask[RV770_SMC_VOLTAGEMASK_VDDC] =
1534 cpu_to_be32(eg_pi->vddc_voltage_table.mask_low);
1536 for (i = 0; i < eg_pi->vddc_voltage_table.count; i++) {
1537 if (pi->max_vddc_in_table <=
1538 eg_pi->vddc_voltage_table.entries[i].value) {
1539 table->maxVDDCIndexInPPTable = i;
1540 break;
1545 if (eg_pi->vddci_voltage_table.count) {
1546 cypress_populate_smc_voltage_table(rdev,
1547 &eg_pi->vddci_voltage_table,
1548 table);
1550 table->voltageMaskTable.highMask[RV770_SMC_VOLTAGEMASK_VDDCI] = 0;
1551 table->voltageMaskTable.lowMask[RV770_SMC_VOLTAGEMASK_VDDCI] =
1552 cpu_to_be32(eg_pi->vddci_voltage_table.mask_low);
1555 return 0;
1558 static u32 cypress_get_mclk_split_point(struct atom_memory_info *memory_info)
1560 if ((memory_info->mem_type == MEM_TYPE_GDDR3) ||
1561 (memory_info->mem_type == MEM_TYPE_DDR3))
1562 return 30000;
1564 return 0;
1567 int cypress_get_mvdd_configuration(struct radeon_device *rdev)
1569 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1570 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1571 u8 module_index;
1572 struct atom_memory_info memory_info;
1573 u32 tmp = RREG32(GENERAL_PWRMGT);
1575 if (!(tmp & BACKBIAS_PAD_EN)) {
1576 eg_pi->mvdd_high_index = 0;
1577 eg_pi->mvdd_low_index = 1;
1578 pi->mvdd_control = false;
1579 return 0;
1582 if (tmp & BACKBIAS_VALUE)
1583 eg_pi->mvdd_high_index = 1;
1584 else
1585 eg_pi->mvdd_high_index = 0;
1587 eg_pi->mvdd_low_index =
1588 (eg_pi->mvdd_high_index == 0) ? 1 : 0;
1590 module_index = rv770_get_memory_module_index(rdev);
1592 if (radeon_atom_get_memory_info(rdev, module_index, &memory_info)) {
1593 pi->mvdd_control = false;
1594 return 0;
1597 pi->mvdd_split_frequency =
1598 cypress_get_mclk_split_point(&memory_info);
1600 if (pi->mvdd_split_frequency == 0) {
1601 pi->mvdd_control = false;
1602 return 0;
1605 return 0;
1608 static int cypress_init_smc_table(struct radeon_device *rdev,
1609 struct radeon_ps *radeon_boot_state)
1611 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1612 RV770_SMC_STATETABLE *table = &pi->smc_statetable;
1613 int ret;
1615 memset(table, 0, sizeof(RV770_SMC_STATETABLE));
1617 cypress_populate_smc_voltage_tables(rdev, table);
1619 switch (rdev->pm.int_thermal_type) {
1620 case THERMAL_TYPE_EVERGREEN:
1621 case THERMAL_TYPE_EMC2103_WITH_INTERNAL:
1622 table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_INTERNAL;
1623 break;
1624 case THERMAL_TYPE_NONE:
1625 table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_NONE;
1626 break;
1627 default:
1628 table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_EXTERNAL;
1629 break;
1632 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_HARDWAREDC)
1633 table->systemFlags |= PPSMC_SYSTEMFLAG_GPIO_DC;
1635 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_REGULATOR_HOT)
1636 table->systemFlags |= PPSMC_SYSTEMFLAG_REGULATOR_HOT;
1638 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC)
1639 table->systemFlags |= PPSMC_SYSTEMFLAG_STEPVDDC;
1641 if (pi->mem_gddr5)
1642 table->systemFlags |= PPSMC_SYSTEMFLAG_GDDR5;
1644 ret = cypress_populate_smc_initial_state(rdev, radeon_boot_state, table);
1645 if (ret)
1646 return ret;
1648 ret = cypress_populate_smc_acpi_state(rdev, table);
1649 if (ret)
1650 return ret;
1652 table->driverState = table->initialState;
1654 return rv770_copy_bytes_to_smc(rdev,
1655 pi->state_table_start,
1656 (u8 *)table, sizeof(RV770_SMC_STATETABLE),
1657 pi->sram_end);
1660 int cypress_populate_mc_reg_table(struct radeon_device *rdev,
1661 struct radeon_ps *radeon_boot_state)
1663 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1664 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1665 struct rv7xx_ps *boot_state = rv770_get_ps(radeon_boot_state);
1666 SMC_Evergreen_MCRegisters mc_reg_table = { 0 };
1668 rv770_write_smc_soft_register(rdev,
1669 RV770_SMC_SOFT_REGISTER_seq_index, 1);
1671 cypress_populate_mc_reg_addresses(rdev, &mc_reg_table);
1673 cypress_convert_mc_reg_table_entry_to_smc(rdev,
1674 &boot_state->low,
1675 &mc_reg_table.data[0]);
1677 cypress_convert_mc_registers(&eg_pi->mc_reg_table.mc_reg_table_entry[0],
1678 &mc_reg_table.data[1], eg_pi->mc_reg_table.last,
1679 eg_pi->mc_reg_table.valid_flag);
1681 cypress_convert_mc_reg_table_to_smc(rdev, radeon_boot_state, &mc_reg_table);
1683 return rv770_copy_bytes_to_smc(rdev, eg_pi->mc_reg_table_start,
1684 (u8 *)&mc_reg_table, sizeof(SMC_Evergreen_MCRegisters),
1685 pi->sram_end);
1688 int cypress_get_table_locations(struct radeon_device *rdev)
1690 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1691 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1692 u32 tmp;
1693 int ret;
1695 ret = rv770_read_smc_sram_dword(rdev,
1696 EVERGREEN_SMC_FIRMWARE_HEADER_LOCATION +
1697 EVERGREEN_SMC_FIRMWARE_HEADER_stateTable,
1698 &tmp, pi->sram_end);
1699 if (ret)
1700 return ret;
1702 pi->state_table_start = (u16)tmp;
1704 ret = rv770_read_smc_sram_dword(rdev,
1705 EVERGREEN_SMC_FIRMWARE_HEADER_LOCATION +
1706 EVERGREEN_SMC_FIRMWARE_HEADER_softRegisters,
1707 &tmp, pi->sram_end);
1708 if (ret)
1709 return ret;
1711 pi->soft_regs_start = (u16)tmp;
1713 ret = rv770_read_smc_sram_dword(rdev,
1714 EVERGREEN_SMC_FIRMWARE_HEADER_LOCATION +
1715 EVERGREEN_SMC_FIRMWARE_HEADER_mcRegisterTable,
1716 &tmp, pi->sram_end);
1717 if (ret)
1718 return ret;
1720 eg_pi->mc_reg_table_start = (u16)tmp;
1722 return 0;
1725 void cypress_enable_display_gap(struct radeon_device *rdev)
1727 u32 tmp = RREG32(CG_DISPLAY_GAP_CNTL);
1729 tmp &= ~(DISP1_GAP_MASK | DISP2_GAP_MASK);
1730 tmp |= (DISP1_GAP(R600_PM_DISPLAY_GAP_IGNORE) |
1731 DISP2_GAP(R600_PM_DISPLAY_GAP_IGNORE));
1733 tmp &= ~(DISP1_GAP_MCHG_MASK | DISP2_GAP_MCHG_MASK);
1734 tmp |= (DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_VBLANK) |
1735 DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE));
1736 WREG32(CG_DISPLAY_GAP_CNTL, tmp);
1739 static void cypress_program_display_gap(struct radeon_device *rdev)
1741 u32 tmp, pipe;
1742 int i;
1744 tmp = RREG32(CG_DISPLAY_GAP_CNTL) & ~(DISP1_GAP_MASK | DISP2_GAP_MASK);
1745 if (rdev->pm.dpm.new_active_crtc_count > 0)
1746 tmp |= DISP1_GAP(R600_PM_DISPLAY_GAP_VBLANK_OR_WM);
1747 else
1748 tmp |= DISP1_GAP(R600_PM_DISPLAY_GAP_IGNORE);
1750 if (rdev->pm.dpm.new_active_crtc_count > 1)
1751 tmp |= DISP2_GAP(R600_PM_DISPLAY_GAP_VBLANK_OR_WM);
1752 else
1753 tmp |= DISP2_GAP(R600_PM_DISPLAY_GAP_IGNORE);
1755 WREG32(CG_DISPLAY_GAP_CNTL, tmp);
1757 tmp = RREG32(DCCG_DISP_SLOW_SELECT_REG);
1758 pipe = (tmp & DCCG_DISP1_SLOW_SELECT_MASK) >> DCCG_DISP1_SLOW_SELECT_SHIFT;
1760 if ((rdev->pm.dpm.new_active_crtc_count > 0) &&
1761 (!(rdev->pm.dpm.new_active_crtcs & (1 << pipe)))) {
1762 /* find the first active crtc */
1763 for (i = 0; i < rdev->num_crtc; i++) {
1764 if (rdev->pm.dpm.new_active_crtcs & (1 << i))
1765 break;
1767 if (i == rdev->num_crtc)
1768 pipe = 0;
1769 else
1770 pipe = i;
1772 tmp &= ~DCCG_DISP1_SLOW_SELECT_MASK;
1773 tmp |= DCCG_DISP1_SLOW_SELECT(pipe);
1774 WREG32(DCCG_DISP_SLOW_SELECT_REG, tmp);
1777 cypress_notify_smc_display_change(rdev, rdev->pm.dpm.new_active_crtc_count > 0);
1780 void cypress_dpm_setup_asic(struct radeon_device *rdev)
1782 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1784 rv740_read_clock_registers(rdev);
1785 rv770_read_voltage_smio_registers(rdev);
1786 rv770_get_max_vddc(rdev);
1787 rv770_get_memory_type(rdev);
1789 if (eg_pi->pcie_performance_request)
1790 eg_pi->pcie_performance_request_registered = false;
1792 if (eg_pi->pcie_performance_request)
1793 cypress_advertise_gen2_capability(rdev);
1795 rv770_get_pcie_gen2_status(rdev);
1797 rv770_enable_acpi_pm(rdev);
1800 int cypress_dpm_enable(struct radeon_device *rdev)
1802 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1803 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1804 struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
1805 int ret;
1807 if (pi->gfx_clock_gating)
1808 rv770_restore_cgcg(rdev);
1810 if (rv770_dpm_enabled(rdev))
1811 return -EINVAL;
1813 if (pi->voltage_control) {
1814 rv770_enable_voltage_control(rdev, true);
1815 ret = cypress_construct_voltage_tables(rdev);
1816 if (ret) {
1817 DRM_ERROR("cypress_construct_voltage_tables failed\n");
1818 return ret;
1822 if (pi->mvdd_control) {
1823 ret = cypress_get_mvdd_configuration(rdev);
1824 if (ret) {
1825 DRM_ERROR("cypress_get_mvdd_configuration failed\n");
1826 return ret;
1830 if (eg_pi->dynamic_ac_timing) {
1831 cypress_set_mc_reg_address_table(rdev);
1832 cypress_force_mc_use_s0(rdev, boot_ps);
1833 ret = cypress_initialize_mc_reg_table(rdev);
1834 if (ret)
1835 eg_pi->dynamic_ac_timing = false;
1836 cypress_force_mc_use_s1(rdev, boot_ps);
1839 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_BACKBIAS)
1840 rv770_enable_backbias(rdev, true);
1842 if (pi->dynamic_ss)
1843 cypress_enable_spread_spectrum(rdev, true);
1845 if (pi->thermal_protection)
1846 rv770_enable_thermal_protection(rdev, true);
1848 rv770_setup_bsp(rdev);
1849 rv770_program_git(rdev);
1850 rv770_program_tp(rdev);
1851 rv770_program_tpp(rdev);
1852 rv770_program_sstp(rdev);
1853 rv770_program_engine_speed_parameters(rdev);
1854 cypress_enable_display_gap(rdev);
1855 rv770_program_vc(rdev);
1857 if (pi->dynamic_pcie_gen2)
1858 cypress_enable_dynamic_pcie_gen2(rdev, true);
1860 ret = rv770_upload_firmware(rdev);
1861 if (ret) {
1862 DRM_ERROR("rv770_upload_firmware failed\n");
1863 return ret;
1866 ret = cypress_get_table_locations(rdev);
1867 if (ret) {
1868 DRM_ERROR("cypress_get_table_locations failed\n");
1869 return ret;
1871 ret = cypress_init_smc_table(rdev, boot_ps);
1872 if (ret) {
1873 DRM_ERROR("cypress_init_smc_table failed\n");
1874 return ret;
1876 if (eg_pi->dynamic_ac_timing) {
1877 ret = cypress_populate_mc_reg_table(rdev, boot_ps);
1878 if (ret) {
1879 DRM_ERROR("cypress_populate_mc_reg_table failed\n");
1880 return ret;
1884 cypress_program_response_times(rdev);
1886 r7xx_start_smc(rdev);
1888 ret = cypress_notify_smc_display_change(rdev, false);
1889 if (ret) {
1890 DRM_ERROR("cypress_notify_smc_display_change failed\n");
1891 return ret;
1893 cypress_enable_sclk_control(rdev, true);
1895 if (eg_pi->memory_transition)
1896 cypress_enable_mclk_control(rdev, true);
1898 cypress_start_dpm(rdev);
1900 if (pi->gfx_clock_gating)
1901 cypress_gfx_clock_gating_enable(rdev, true);
1903 if (pi->mg_clock_gating)
1904 cypress_mg_clock_gating_enable(rdev, true);
1906 if (rdev->irq.installed &&
1907 r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
1908 PPSMC_Result result;
1910 ret = rv770_set_thermal_temperature_range(rdev, R600_TEMP_RANGE_MIN, R600_TEMP_RANGE_MAX);
1911 if (ret)
1912 return ret;
1913 rdev->irq.dpm_thermal = true;
1914 radeon_irq_set(rdev);
1915 result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_EnableThermalInterrupt);
1917 if (result != PPSMC_Result_OK)
1918 DRM_DEBUG_KMS("Could not enable thermal interrupts.\n");
1921 rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, true);
1923 return 0;
1926 void cypress_dpm_disable(struct radeon_device *rdev)
1928 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1929 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1930 struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
1932 if (!rv770_dpm_enabled(rdev))
1933 return;
1935 rv770_clear_vc(rdev);
1937 if (pi->thermal_protection)
1938 rv770_enable_thermal_protection(rdev, false);
1940 if (pi->dynamic_pcie_gen2)
1941 cypress_enable_dynamic_pcie_gen2(rdev, false);
1943 if (rdev->irq.installed &&
1944 r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
1945 rdev->irq.dpm_thermal = false;
1946 radeon_irq_set(rdev);
1949 if (pi->gfx_clock_gating)
1950 cypress_gfx_clock_gating_enable(rdev, false);
1952 if (pi->mg_clock_gating)
1953 cypress_mg_clock_gating_enable(rdev, false);
1955 rv770_stop_dpm(rdev);
1956 r7xx_stop_smc(rdev);
1958 cypress_enable_spread_spectrum(rdev, false);
1960 if (eg_pi->dynamic_ac_timing)
1961 cypress_force_mc_use_s1(rdev, boot_ps);
1963 rv770_reset_smio_status(rdev);
1966 int cypress_dpm_set_power_state(struct radeon_device *rdev)
1968 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1969 struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps;
1970 struct radeon_ps *old_ps = rdev->pm.dpm.current_ps;
1971 int ret;
1973 ret = rv770_restrict_performance_levels_before_switch(rdev);
1974 if (ret) {
1975 DRM_ERROR("rv770_restrict_performance_levels_before_switch failed\n");
1976 return ret;
1978 if (eg_pi->pcie_performance_request)
1979 cypress_notify_link_speed_change_before_state_change(rdev, new_ps, old_ps);
1981 rv770_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps);
1982 ret = rv770_halt_smc(rdev);
1983 if (ret) {
1984 DRM_ERROR("rv770_halt_smc failed\n");
1985 return ret;
1987 ret = cypress_upload_sw_state(rdev, new_ps);
1988 if (ret) {
1989 DRM_ERROR("cypress_upload_sw_state failed\n");
1990 return ret;
1992 if (eg_pi->dynamic_ac_timing) {
1993 ret = cypress_upload_mc_reg_table(rdev, new_ps);
1994 if (ret) {
1995 DRM_ERROR("cypress_upload_mc_reg_table failed\n");
1996 return ret;
2000 cypress_program_memory_timing_parameters(rdev, new_ps);
2002 ret = rv770_resume_smc(rdev);
2003 if (ret) {
2004 DRM_ERROR("rv770_resume_smc failed\n");
2005 return ret;
2007 ret = rv770_set_sw_state(rdev);
2008 if (ret) {
2009 DRM_ERROR("rv770_set_sw_state failed\n");
2010 return ret;
2012 rv770_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps);
2014 if (eg_pi->pcie_performance_request)
2015 cypress_notify_link_speed_change_after_state_change(rdev, new_ps, old_ps);
2017 return 0;
2020 void cypress_dpm_reset_asic(struct radeon_device *rdev)
2022 rv770_restrict_performance_levels_before_switch(rdev);
2023 rv770_set_boot_state(rdev);
2026 void cypress_dpm_display_configuration_changed(struct radeon_device *rdev)
2028 cypress_program_display_gap(rdev);
2031 int cypress_dpm_init(struct radeon_device *rdev)
2033 struct rv7xx_power_info *pi;
2034 struct evergreen_power_info *eg_pi;
2035 struct atom_clock_dividers dividers;
2036 int ret;
2038 eg_pi = kzalloc(sizeof(struct evergreen_power_info), GFP_KERNEL);
2039 if (eg_pi == NULL)
2040 return -ENOMEM;
2041 rdev->pm.dpm.priv = eg_pi;
2042 pi = &eg_pi->rv7xx;
2044 rv770_get_max_vddc(rdev);
2046 eg_pi->ulv.supported = false;
2047 pi->acpi_vddc = 0;
2048 eg_pi->acpi_vddci = 0;
2049 pi->min_vddc_in_table = 0;
2050 pi->max_vddc_in_table = 0;
2052 ret = rv7xx_parse_power_table(rdev);
2053 if (ret)
2054 return ret;
2056 if (rdev->pm.dpm.voltage_response_time == 0)
2057 rdev->pm.dpm.voltage_response_time = R600_VOLTAGERESPONSETIME_DFLT;
2058 if (rdev->pm.dpm.backbias_response_time == 0)
2059 rdev->pm.dpm.backbias_response_time = R600_BACKBIASRESPONSETIME_DFLT;
2061 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
2062 0, false, &dividers);
2063 if (ret)
2064 pi->ref_div = dividers.ref_div + 1;
2065 else
2066 pi->ref_div = R600_REFERENCEDIVIDER_DFLT;
2068 pi->mclk_strobe_mode_threshold = 40000;
2069 pi->mclk_edc_enable_threshold = 40000;
2070 eg_pi->mclk_edc_wr_enable_threshold = 40000;
2072 pi->rlp = RV770_RLP_DFLT;
2073 pi->rmp = RV770_RMP_DFLT;
2074 pi->lhp = RV770_LHP_DFLT;
2075 pi->lmp = RV770_LMP_DFLT;
2077 pi->voltage_control =
2078 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, 0);
2080 pi->mvdd_control =
2081 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_MVDDC, 0);
2083 eg_pi->vddci_control =
2084 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDCI, 0);
2086 rv770_get_engine_memory_ss(rdev);
2088 pi->asi = RV770_ASI_DFLT;
2089 pi->pasi = CYPRESS_HASI_DFLT;
2090 pi->vrc = CYPRESS_VRC_DFLT;
2092 pi->power_gating = false;
2094 if ((rdev->family == CHIP_CYPRESS) ||
2095 (rdev->family == CHIP_HEMLOCK))
2096 pi->gfx_clock_gating = false;
2097 else
2098 pi->gfx_clock_gating = true;
2100 pi->mg_clock_gating = true;
2101 pi->mgcgtssm = true;
2102 eg_pi->ls_clock_gating = false;
2103 eg_pi->sclk_deep_sleep = false;
2105 pi->dynamic_pcie_gen2 = true;
2107 if (rdev->pm.int_thermal_type != THERMAL_TYPE_NONE)
2108 pi->thermal_protection = true;
2109 else
2110 pi->thermal_protection = false;
2112 pi->display_gap = true;
2114 if (rdev->flags & RADEON_IS_MOBILITY)
2115 pi->dcodt = true;
2116 else
2117 pi->dcodt = false;
2119 pi->ulps = true;
2121 eg_pi->dynamic_ac_timing = true;
2122 eg_pi->abm = true;
2123 eg_pi->mcls = true;
2124 eg_pi->light_sleep = true;
2125 eg_pi->memory_transition = true;
2126 #if defined(CONFIG_ACPI)
2127 eg_pi->pcie_performance_request =
2128 radeon_acpi_is_pcie_performance_request_supported(rdev);
2129 #else
2130 eg_pi->pcie_performance_request = false;
2131 #endif
2133 if ((rdev->family == CHIP_CYPRESS) ||
2134 (rdev->family == CHIP_HEMLOCK) ||
2135 (rdev->family == CHIP_JUNIPER))
2136 eg_pi->dll_default_on = true;
2137 else
2138 eg_pi->dll_default_on = false;
2140 eg_pi->sclk_deep_sleep = false;
2141 pi->mclk_stutter_mode_threshold = 0;
2143 pi->sram_end = SMC_RAM_END;
2145 return 0;
2148 void cypress_dpm_fini(struct radeon_device *rdev)
2150 int i;
2152 for (i = 0; i < rdev->pm.dpm.num_ps; i++) {
2153 kfree(rdev->pm.dpm.ps[i].ps_priv);
2155 kfree(rdev->pm.dpm.ps);
2156 kfree(rdev->pm.dpm.priv);
2159 bool cypress_dpm_vblank_too_short(struct radeon_device *rdev)
2161 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2162 u32 vblank_time = r600_dpm_get_vblank_time(rdev);
2163 /* we never hit the non-gddr5 limit so disable it */
2164 u32 switch_limit = pi->mem_gddr5 ? 450 : 0;
2166 if (vblank_time < switch_limit)
2167 return true;
2168 else
2169 return false;