2 * Copyright 2012-16 Advanced Micro Devices, Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
26 #include <linux/delay.h>
27 #include <linux/slab.h>
29 #include "core_types.h"
30 #include "link_encoder.h"
32 #include "dm_services.h"
33 #include "reg_helper.h"
34 #include "fixed31_32.h"
37 #define TO_DCE_DMCU(dmcu)\
38 container_of(dmcu, struct dce_dmcu, base)
44 #define FN(reg_name, field_name) \
45 dmcu_dce->dmcu_shift->field_name, dmcu_dce->dmcu_mask->field_name
50 /* PSR related commands */
51 #define PSR_ENABLE 0x20
54 #define PSR_SET_WAITLOOP 0x31
55 #define MCP_INIT_DMCU 0x88
56 #define MCP_INIT_IRAM 0x89
57 #define MCP_SYNC_PHY_LOCK 0x90
58 #define MCP_SYNC_PHY_UNLOCK 0x91
59 #define MCP_BL_SET_PWM_FRAC 0x6A /* Enable or disable Fractional PWM */
60 #define MASTER_COMM_CNTL_REG__MASTER_COMM_INTERRUPT_MASK 0x00000001L
63 #define mmMP0_SMN_C2PMSG_58 0x1607A
65 //Register access policy version
66 #define mmMP0_SMN_C2PMSG_91 0x1609B
68 static bool dce_dmcu_init(struct dmcu
*dmcu
)
74 bool dce_dmcu_load_iram(struct dmcu
*dmcu
,
75 unsigned int start_offset
,
79 struct dce_dmcu
*dmcu_dce
= TO_DCE_DMCU(dmcu
);
80 unsigned int count
= 0;
82 /* Enable write access to IRAM */
83 REG_UPDATE_2(DMCU_RAM_ACCESS_CTRL
,
84 IRAM_HOST_ACCESS_EN
, 1,
85 IRAM_WR_ADDR_AUTO_INC
, 1);
87 REG_WAIT(DCI_MEM_PWR_STATUS
, DMCU_IRAM_MEM_PWR_STATE
, 0, 2, 10);
89 REG_WRITE(DMCU_IRAM_WR_CTRL
, start_offset
);
91 for (count
= 0; count
< bytes
; count
++)
92 REG_WRITE(DMCU_IRAM_WR_DATA
, src
[count
]);
94 /* Disable write access to IRAM to allow dynamic sleep state */
95 REG_UPDATE_2(DMCU_RAM_ACCESS_CTRL
,
96 IRAM_HOST_ACCESS_EN
, 0,
97 IRAM_WR_ADDR_AUTO_INC
, 0);
102 static void dce_get_dmcu_psr_state(struct dmcu
*dmcu
, uint32_t *psr_state
)
104 struct dce_dmcu
*dmcu_dce
= TO_DCE_DMCU(dmcu
);
106 uint32_t psr_state_offset
= 0xf0;
108 /* Enable write access to IRAM */
109 REG_UPDATE(DMCU_RAM_ACCESS_CTRL
, IRAM_HOST_ACCESS_EN
, 1);
111 REG_WAIT(DCI_MEM_PWR_STATUS
, DMCU_IRAM_MEM_PWR_STATE
, 0, 2, 10);
113 /* Write address to IRAM_RD_ADDR in DMCU_IRAM_RD_CTRL */
114 REG_WRITE(DMCU_IRAM_RD_CTRL
, psr_state_offset
);
116 /* Read data from IRAM_RD_DATA in DMCU_IRAM_RD_DATA*/
117 *psr_state
= REG_READ(DMCU_IRAM_RD_DATA
);
119 /* Disable write access to IRAM after finished using IRAM
120 * in order to allow dynamic sleep state
122 REG_UPDATE(DMCU_RAM_ACCESS_CTRL
, IRAM_HOST_ACCESS_EN
, 0);
125 static void dce_dmcu_set_psr_enable(struct dmcu
*dmcu
, bool enable
, bool wait
)
127 struct dce_dmcu
*dmcu_dce
= TO_DCE_DMCU(dmcu
);
128 unsigned int dmcu_max_retry_on_wait_reg_ready
= 801;
129 unsigned int dmcu_wait_reg_ready_interval
= 100;
131 unsigned int retryCount
;
132 uint32_t psr_state
= 0;
134 /* waitDMCUReadyForCmd */
135 REG_WAIT(MASTER_COMM_CNTL_REG
, MASTER_COMM_INTERRUPT
, 0,
136 dmcu_wait_reg_ready_interval
,
137 dmcu_max_retry_on_wait_reg_ready
);
139 /* setDMCUParam_Cmd */
141 REG_UPDATE(MASTER_COMM_CMD_REG
, MASTER_COMM_CMD_REG_BYTE0
,
144 REG_UPDATE(MASTER_COMM_CMD_REG
, MASTER_COMM_CMD_REG_BYTE0
,
148 REG_UPDATE(MASTER_COMM_CNTL_REG
, MASTER_COMM_INTERRUPT
, 1);
150 for (retryCount
= 0; retryCount
<= 100; retryCount
++) {
151 dce_get_dmcu_psr_state(dmcu
, &psr_state
);
164 static bool dce_dmcu_setup_psr(struct dmcu
*dmcu
,
165 struct dc_link
*link
,
166 struct psr_context
*psr_context
)
168 struct dce_dmcu
*dmcu_dce
= TO_DCE_DMCU(dmcu
);
170 unsigned int dmcu_max_retry_on_wait_reg_ready
= 801;
171 unsigned int dmcu_wait_reg_ready_interval
= 100;
173 union dce_dmcu_psr_config_data_reg1 masterCmdData1
;
174 union dce_dmcu_psr_config_data_reg2 masterCmdData2
;
175 union dce_dmcu_psr_config_data_reg3 masterCmdData3
;
177 link
->link_enc
->funcs
->psr_program_dp_dphy_fast_training(link
->link_enc
,
178 psr_context
->psrExitLinkTrainingRequired
);
180 /* Enable static screen interrupts for PSR supported display */
181 /* Disable the interrupt coming from other displays. */
182 REG_UPDATE_4(DMCU_INTERRUPT_TO_UC_EN_MASK
,
183 STATIC_SCREEN1_INT_TO_UC_EN
, 0,
184 STATIC_SCREEN2_INT_TO_UC_EN
, 0,
185 STATIC_SCREEN3_INT_TO_UC_EN
, 0,
186 STATIC_SCREEN4_INT_TO_UC_EN
, 0);
188 switch (psr_context
->controllerId
) {
189 /* Driver uses case 1 for unconfigured */
191 REG_UPDATE(DMCU_INTERRUPT_TO_UC_EN_MASK
,
192 STATIC_SCREEN1_INT_TO_UC_EN
, 1);
195 REG_UPDATE(DMCU_INTERRUPT_TO_UC_EN_MASK
,
196 STATIC_SCREEN2_INT_TO_UC_EN
, 1);
199 REG_UPDATE(DMCU_INTERRUPT_TO_UC_EN_MASK
,
200 STATIC_SCREEN3_INT_TO_UC_EN
, 1);
203 REG_UPDATE(DMCU_INTERRUPT_TO_UC_EN_MASK
,
204 STATIC_SCREEN4_INT_TO_UC_EN
, 1);
207 /* CZ/NL only has 4 CRTC!!
209 * There is no interrupt enable mask for these instances.
213 /* CZ/NL only has 4 CRTC!!
214 * These are here because they are defined in HW regspec,
215 * but not really valid. There is no interrupt enable mask
216 * for these instances.
220 REG_UPDATE(DMCU_INTERRUPT_TO_UC_EN_MASK
,
221 STATIC_SCREEN1_INT_TO_UC_EN
, 1);
225 link
->link_enc
->funcs
->psr_program_secondary_packet(link
->link_enc
,
226 psr_context
->sdpTransmitLineNumDeadline
);
228 /* waitDMCUReadyForCmd */
229 REG_WAIT(MASTER_COMM_CNTL_REG
, MASTER_COMM_INTERRUPT
, 0,
230 dmcu_wait_reg_ready_interval
,
231 dmcu_max_retry_on_wait_reg_ready
);
233 /* setDMCUParam_PSRHostConfigData */
234 masterCmdData1
.u32All
= 0;
235 masterCmdData1
.bits
.timehyst_frames
= psr_context
->timehyst_frames
;
236 masterCmdData1
.bits
.hyst_lines
= psr_context
->hyst_lines
;
237 masterCmdData1
.bits
.rfb_update_auto_en
=
238 psr_context
->rfb_update_auto_en
;
239 masterCmdData1
.bits
.dp_port_num
= psr_context
->transmitterId
;
240 masterCmdData1
.bits
.dcp_sel
= psr_context
->controllerId
;
241 masterCmdData1
.bits
.phy_type
= psr_context
->phyType
;
242 masterCmdData1
.bits
.frame_cap_ind
=
243 psr_context
->psrFrameCaptureIndicationReq
;
244 masterCmdData1
.bits
.aux_chan
= psr_context
->channel
;
245 masterCmdData1
.bits
.aux_repeat
= psr_context
->aux_repeats
;
246 dm_write_reg(dmcu
->ctx
, REG(MASTER_COMM_DATA_REG1
),
247 masterCmdData1
.u32All
);
249 masterCmdData2
.u32All
= 0;
250 masterCmdData2
.bits
.dig_fe
= psr_context
->engineId
;
251 masterCmdData2
.bits
.dig_be
= psr_context
->transmitterId
;
252 masterCmdData2
.bits
.skip_wait_for_pll_lock
=
253 psr_context
->skipPsrWaitForPllLock
;
254 masterCmdData2
.bits
.frame_delay
= psr_context
->frame_delay
;
255 masterCmdData2
.bits
.smu_phy_id
= psr_context
->smuPhyId
;
256 masterCmdData2
.bits
.num_of_controllers
=
257 psr_context
->numberOfControllers
;
258 dm_write_reg(dmcu
->ctx
, REG(MASTER_COMM_DATA_REG2
),
259 masterCmdData2
.u32All
);
261 masterCmdData3
.u32All
= 0;
262 masterCmdData3
.bits
.psr_level
= psr_context
->psr_level
.u32all
;
263 dm_write_reg(dmcu
->ctx
, REG(MASTER_COMM_DATA_REG3
),
264 masterCmdData3
.u32All
);
266 /* setDMCUParam_Cmd */
267 REG_UPDATE(MASTER_COMM_CMD_REG
,
268 MASTER_COMM_CMD_REG_BYTE0
, PSR_SET
);
271 REG_UPDATE(MASTER_COMM_CNTL_REG
, MASTER_COMM_INTERRUPT
, 1);
276 static bool dce_is_dmcu_initialized(struct dmcu
*dmcu
)
278 struct dce_dmcu
*dmcu_dce
= TO_DCE_DMCU(dmcu
);
279 unsigned int dmcu_uc_reset
;
281 /* microcontroller is not running */
282 REG_GET(DMCU_STATUS
, UC_IN_RESET
, &dmcu_uc_reset
);
284 /* DMCU is not running */
291 static void dce_psr_wait_loop(
293 unsigned int wait_loop_number
)
295 struct dce_dmcu
*dmcu_dce
= TO_DCE_DMCU(dmcu
);
296 union dce_dmcu_psr_config_data_wait_loop_reg1 masterCmdData1
;
298 if (dmcu
->cached_wait_loop_number
== wait_loop_number
)
301 /* DMCU is not running */
302 if (!dce_is_dmcu_initialized(dmcu
))
305 /* waitDMCUReadyForCmd */
306 REG_WAIT(MASTER_COMM_CNTL_REG
, MASTER_COMM_INTERRUPT
, 0, 1, 10000);
308 masterCmdData1
.u32
= 0;
309 masterCmdData1
.bits
.wait_loop
= wait_loop_number
;
310 dmcu
->cached_wait_loop_number
= wait_loop_number
;
311 dm_write_reg(dmcu
->ctx
, REG(MASTER_COMM_DATA_REG1
), masterCmdData1
.u32
);
313 /* setDMCUParam_Cmd */
314 REG_UPDATE(MASTER_COMM_CMD_REG
, MASTER_COMM_CMD_REG_BYTE0
, PSR_SET_WAITLOOP
);
317 REG_UPDATE(MASTER_COMM_CNTL_REG
, MASTER_COMM_INTERRUPT
, 1);
320 static void dce_get_psr_wait_loop(
321 struct dmcu
*dmcu
, unsigned int *psr_wait_loop_number
)
323 *psr_wait_loop_number
= dmcu
->cached_wait_loop_number
;
327 #if defined(CONFIG_DRM_AMD_DC_DCN)
328 static void dcn10_get_dmcu_version(struct dmcu
*dmcu
)
330 struct dce_dmcu
*dmcu_dce
= TO_DCE_DMCU(dmcu
);
331 uint32_t dmcu_version_offset
= 0xf1;
333 /* Enable write access to IRAM */
334 REG_UPDATE_2(DMCU_RAM_ACCESS_CTRL
,
335 IRAM_HOST_ACCESS_EN
, 1,
336 IRAM_RD_ADDR_AUTO_INC
, 1);
338 REG_WAIT(DMU_MEM_PWR_CNTL
, DMCU_IRAM_MEM_PWR_STATE
, 0, 2, 10);
340 /* Write address to IRAM_RD_ADDR and read from DATA register */
341 REG_WRITE(DMCU_IRAM_RD_CTRL
, dmcu_version_offset
);
342 dmcu
->dmcu_version
.interface_version
= REG_READ(DMCU_IRAM_RD_DATA
);
343 dmcu
->dmcu_version
.abm_version
= REG_READ(DMCU_IRAM_RD_DATA
);
344 dmcu
->dmcu_version
.psr_version
= REG_READ(DMCU_IRAM_RD_DATA
);
345 dmcu
->dmcu_version
.build_version
= ((REG_READ(DMCU_IRAM_RD_DATA
) << 8) |
346 REG_READ(DMCU_IRAM_RD_DATA
));
348 /* Disable write access to IRAM to allow dynamic sleep state */
349 REG_UPDATE_2(DMCU_RAM_ACCESS_CTRL
,
350 IRAM_HOST_ACCESS_EN
, 0,
351 IRAM_RD_ADDR_AUTO_INC
, 0);
354 static void dcn10_dmcu_enable_fractional_pwm(struct dmcu
*dmcu
,
355 uint32_t fractional_pwm
)
357 struct dce_dmcu
*dmcu_dce
= TO_DCE_DMCU(dmcu
);
359 /* Wait until microcontroller is ready to process interrupt */
360 REG_WAIT(MASTER_COMM_CNTL_REG
, MASTER_COMM_INTERRUPT
, 0, 100, 800);
362 /* Set PWM fractional enable/disable */
363 REG_WRITE(MASTER_COMM_DATA_REG1
, fractional_pwm
);
365 /* Set command to enable or disable fractional PWM microcontroller */
366 REG_UPDATE(MASTER_COMM_CMD_REG
, MASTER_COMM_CMD_REG_BYTE0
,
367 MCP_BL_SET_PWM_FRAC
);
369 /* Notify microcontroller of new command */
370 REG_UPDATE(MASTER_COMM_CNTL_REG
, MASTER_COMM_INTERRUPT
, 1);
372 /* Ensure command has been executed before continuing */
373 REG_WAIT(MASTER_COMM_CNTL_REG
, MASTER_COMM_INTERRUPT
, 0, 100, 800);
376 static bool dcn10_dmcu_init(struct dmcu
*dmcu
)
378 struct dce_dmcu
*dmcu_dce
= TO_DCE_DMCU(dmcu
);
379 const struct dc_config
*config
= &dmcu
->ctx
->dc
->config
;
383 /* Definition of DC_DMCU_SCRATCH
384 * 0 : firmare not loaded
385 * 1 : PSP load DMCU FW but not initialized
386 * 2 : Firmware already initialized
388 dmcu
->dmcu_state
= REG_READ(DC_DMCU_SCRATCH
);
390 switch (dmcu
->dmcu_state
) {
394 case DMCU_LOADED_UNINITIALIZED
:
395 /* Wait until microcontroller is ready to process interrupt */
396 REG_WAIT(MASTER_COMM_CNTL_REG
, MASTER_COMM_INTERRUPT
, 0, 100, 800);
398 /* Set initialized ramping boundary value */
399 REG_WRITE(MASTER_COMM_DATA_REG1
, 0xFFFF);
401 /* Set backlight ramping stepsize */
402 REG_WRITE(MASTER_COMM_DATA_REG2
, abm_gain_stepsize
);
404 /* Set command to initialize microcontroller */
405 REG_UPDATE(MASTER_COMM_CMD_REG
, MASTER_COMM_CMD_REG_BYTE0
,
408 /* Notify microcontroller of new command */
409 REG_UPDATE(MASTER_COMM_CNTL_REG
, MASTER_COMM_INTERRUPT
, 1);
411 /* Ensure command has been executed before continuing */
412 REG_WAIT(MASTER_COMM_CNTL_REG
, MASTER_COMM_INTERRUPT
, 0, 100, 800);
414 // Check state is initialized
415 dmcu
->dmcu_state
= REG_READ(DC_DMCU_SCRATCH
);
417 // If microcontroller is not in running state, fail
418 if (dmcu
->dmcu_state
== DMCU_RUNNING
) {
419 /* Retrieve and cache the DMCU firmware version. */
420 dcn10_get_dmcu_version(dmcu
);
422 /* Initialize DMCU to use fractional PWM or not */
423 dcn10_dmcu_enable_fractional_pwm(dmcu
,
424 (config
->disable_fractional_pwm
== false) ? 1 : 0);
443 static bool dcn21_dmcu_init(struct dmcu
*dmcu
)
445 struct dce_dmcu
*dmcu_dce
= TO_DCE_DMCU(dmcu
);
446 uint32_t dmcub_psp_version
= REG_READ(DMCUB_SCRATCH15
);
448 if (dmcu
->auto_load_dmcu
&& dmcub_psp_version
== 0) {
452 return dcn10_dmcu_init(dmcu
);
455 static bool dcn10_dmcu_load_iram(struct dmcu
*dmcu
,
456 unsigned int start_offset
,
460 struct dce_dmcu
*dmcu_dce
= TO_DCE_DMCU(dmcu
);
461 unsigned int count
= 0;
463 /* If microcontroller is not running, do nothing */
464 if (dmcu
->dmcu_state
!= DMCU_RUNNING
)
467 /* Enable write access to IRAM */
468 REG_UPDATE_2(DMCU_RAM_ACCESS_CTRL
,
469 IRAM_HOST_ACCESS_EN
, 1,
470 IRAM_WR_ADDR_AUTO_INC
, 1);
472 REG_WAIT(DMU_MEM_PWR_CNTL
, DMCU_IRAM_MEM_PWR_STATE
, 0, 2, 10);
474 REG_WRITE(DMCU_IRAM_WR_CTRL
, start_offset
);
476 for (count
= 0; count
< bytes
; count
++)
477 REG_WRITE(DMCU_IRAM_WR_DATA
, src
[count
]);
479 /* Disable write access to IRAM to allow dynamic sleep state */
480 REG_UPDATE_2(DMCU_RAM_ACCESS_CTRL
,
481 IRAM_HOST_ACCESS_EN
, 0,
482 IRAM_WR_ADDR_AUTO_INC
, 0);
484 /* Wait until microcontroller is ready to process interrupt */
485 REG_WAIT(MASTER_COMM_CNTL_REG
, MASTER_COMM_INTERRUPT
, 0, 100, 800);
487 /* Set command to signal IRAM is loaded and to initialize IRAM */
488 REG_UPDATE(MASTER_COMM_CMD_REG
, MASTER_COMM_CMD_REG_BYTE0
,
491 /* Notify microcontroller of new command */
492 REG_UPDATE(MASTER_COMM_CNTL_REG
, MASTER_COMM_INTERRUPT
, 1);
494 /* Ensure command has been executed before continuing */
495 REG_WAIT(MASTER_COMM_CNTL_REG
, MASTER_COMM_INTERRUPT
, 0, 100, 800);
500 static void dcn10_get_dmcu_psr_state(struct dmcu
*dmcu
, uint32_t *psr_state
)
502 struct dce_dmcu
*dmcu_dce
= TO_DCE_DMCU(dmcu
);
504 uint32_t psr_state_offset
= 0xf0;
506 /* If microcontroller is not running, do nothing */
507 if (dmcu
->dmcu_state
!= DMCU_RUNNING
)
510 /* Enable write access to IRAM */
511 REG_UPDATE(DMCU_RAM_ACCESS_CTRL
, IRAM_HOST_ACCESS_EN
, 1);
513 REG_WAIT(DMU_MEM_PWR_CNTL
, DMCU_IRAM_MEM_PWR_STATE
, 0, 2, 10);
515 /* Write address to IRAM_RD_ADDR in DMCU_IRAM_RD_CTRL */
516 REG_WRITE(DMCU_IRAM_RD_CTRL
, psr_state_offset
);
518 /* Read data from IRAM_RD_DATA in DMCU_IRAM_RD_DATA*/
519 *psr_state
= REG_READ(DMCU_IRAM_RD_DATA
);
521 /* Disable write access to IRAM after finished using IRAM
522 * in order to allow dynamic sleep state
524 REG_UPDATE(DMCU_RAM_ACCESS_CTRL
, IRAM_HOST_ACCESS_EN
, 0);
527 static void dcn10_dmcu_set_psr_enable(struct dmcu
*dmcu
, bool enable
, bool wait
)
529 struct dce_dmcu
*dmcu_dce
= TO_DCE_DMCU(dmcu
);
530 unsigned int dmcu_max_retry_on_wait_reg_ready
= 801;
531 unsigned int dmcu_wait_reg_ready_interval
= 100;
533 unsigned int retryCount
;
534 uint32_t psr_state
= 0;
536 /* If microcontroller is not running, do nothing */
537 if (dmcu
->dmcu_state
!= DMCU_RUNNING
)
540 /* waitDMCUReadyForCmd */
541 REG_WAIT(MASTER_COMM_CNTL_REG
, MASTER_COMM_INTERRUPT
, 0,
542 dmcu_wait_reg_ready_interval
,
543 dmcu_max_retry_on_wait_reg_ready
);
545 /* setDMCUParam_Cmd */
547 REG_UPDATE(MASTER_COMM_CMD_REG
, MASTER_COMM_CMD_REG_BYTE0
,
550 REG_UPDATE(MASTER_COMM_CMD_REG
, MASTER_COMM_CMD_REG_BYTE0
,
554 REG_UPDATE(MASTER_COMM_CNTL_REG
, MASTER_COMM_INTERRUPT
, 1);
556 /* Below loops 1000 x 500us = 500 ms.
557 * Exit PSR may need to wait 1-2 frames to power up. Timeout after at
558 * least a few frames. Should never hit the max retry assert below.
561 for (retryCount
= 0; retryCount
<= 1000; retryCount
++) {
562 dcn10_get_dmcu_psr_state(dmcu
, &psr_state
);
573 /* assert if max retry hit */
574 if (retryCount
>= 1000)
579 static bool dcn10_dmcu_setup_psr(struct dmcu
*dmcu
,
580 struct dc_link
*link
,
581 struct psr_context
*psr_context
)
583 struct dce_dmcu
*dmcu_dce
= TO_DCE_DMCU(dmcu
);
585 unsigned int dmcu_max_retry_on_wait_reg_ready
= 801;
586 unsigned int dmcu_wait_reg_ready_interval
= 100;
588 union dce_dmcu_psr_config_data_reg1 masterCmdData1
;
589 union dce_dmcu_psr_config_data_reg2 masterCmdData2
;
590 union dce_dmcu_psr_config_data_reg3 masterCmdData3
;
592 /* If microcontroller is not running, do nothing */
593 if (dmcu
->dmcu_state
!= DMCU_RUNNING
)
596 link
->link_enc
->funcs
->psr_program_dp_dphy_fast_training(link
->link_enc
,
597 psr_context
->psrExitLinkTrainingRequired
);
599 /* Enable static screen interrupts for PSR supported display */
600 /* Disable the interrupt coming from other displays. */
601 REG_UPDATE_4(DMCU_INTERRUPT_TO_UC_EN_MASK
,
602 STATIC_SCREEN1_INT_TO_UC_EN
, 0,
603 STATIC_SCREEN2_INT_TO_UC_EN
, 0,
604 STATIC_SCREEN3_INT_TO_UC_EN
, 0,
605 STATIC_SCREEN4_INT_TO_UC_EN
, 0);
607 switch (psr_context
->controllerId
) {
608 /* Driver uses case 1 for unconfigured */
610 REG_UPDATE(DMCU_INTERRUPT_TO_UC_EN_MASK
,
611 STATIC_SCREEN1_INT_TO_UC_EN
, 1);
614 REG_UPDATE(DMCU_INTERRUPT_TO_UC_EN_MASK
,
615 STATIC_SCREEN2_INT_TO_UC_EN
, 1);
618 REG_UPDATE(DMCU_INTERRUPT_TO_UC_EN_MASK
,
619 STATIC_SCREEN3_INT_TO_UC_EN
, 1);
622 REG_UPDATE(DMCU_INTERRUPT_TO_UC_EN_MASK
,
623 STATIC_SCREEN4_INT_TO_UC_EN
, 1);
626 /* CZ/NL only has 4 CRTC!!
628 * There is no interrupt enable mask for these instances.
632 /* CZ/NL only has 4 CRTC!!
633 * These are here because they are defined in HW regspec,
634 * but not really valid. There is no interrupt enable mask
635 * for these instances.
639 REG_UPDATE(DMCU_INTERRUPT_TO_UC_EN_MASK
,
640 STATIC_SCREEN1_INT_TO_UC_EN
, 1);
644 link
->link_enc
->funcs
->psr_program_secondary_packet(link
->link_enc
,
645 psr_context
->sdpTransmitLineNumDeadline
);
647 if (psr_context
->allow_smu_optimizations
)
648 REG_UPDATE(SMU_INTERRUPT_CONTROL
, DC_SMU_INT_ENABLE
, 1);
650 /* waitDMCUReadyForCmd */
651 REG_WAIT(MASTER_COMM_CNTL_REG
, MASTER_COMM_INTERRUPT
, 0,
652 dmcu_wait_reg_ready_interval
,
653 dmcu_max_retry_on_wait_reg_ready
);
655 /* setDMCUParam_PSRHostConfigData */
656 masterCmdData1
.u32All
= 0;
657 masterCmdData1
.bits
.timehyst_frames
= psr_context
->timehyst_frames
;
658 masterCmdData1
.bits
.hyst_lines
= psr_context
->hyst_lines
;
659 masterCmdData1
.bits
.rfb_update_auto_en
=
660 psr_context
->rfb_update_auto_en
;
661 masterCmdData1
.bits
.dp_port_num
= psr_context
->transmitterId
;
662 masterCmdData1
.bits
.dcp_sel
= psr_context
->controllerId
;
663 masterCmdData1
.bits
.phy_type
= psr_context
->phyType
;
664 masterCmdData1
.bits
.frame_cap_ind
=
665 psr_context
->psrFrameCaptureIndicationReq
;
666 masterCmdData1
.bits
.aux_chan
= psr_context
->channel
;
667 masterCmdData1
.bits
.aux_repeat
= psr_context
->aux_repeats
;
668 masterCmdData1
.bits
.allow_smu_optimizations
= psr_context
->allow_smu_optimizations
;
669 dm_write_reg(dmcu
->ctx
, REG(MASTER_COMM_DATA_REG1
),
670 masterCmdData1
.u32All
);
672 masterCmdData2
.u32All
= 0;
673 masterCmdData2
.bits
.dig_fe
= psr_context
->engineId
;
674 masterCmdData2
.bits
.dig_be
= psr_context
->transmitterId
;
675 masterCmdData2
.bits
.skip_wait_for_pll_lock
=
676 psr_context
->skipPsrWaitForPllLock
;
677 masterCmdData2
.bits
.frame_delay
= psr_context
->frame_delay
;
678 masterCmdData2
.bits
.smu_phy_id
= psr_context
->smuPhyId
;
679 masterCmdData2
.bits
.num_of_controllers
=
680 psr_context
->numberOfControllers
;
681 dm_write_reg(dmcu
->ctx
, REG(MASTER_COMM_DATA_REG2
),
682 masterCmdData2
.u32All
);
684 masterCmdData3
.u32All
= 0;
685 masterCmdData3
.bits
.psr_level
= psr_context
->psr_level
.u32all
;
686 dm_write_reg(dmcu
->ctx
, REG(MASTER_COMM_DATA_REG3
),
687 masterCmdData3
.u32All
);
690 /* setDMCUParam_Cmd */
691 REG_UPDATE(MASTER_COMM_CMD_REG
,
692 MASTER_COMM_CMD_REG_BYTE0
, PSR_SET
);
695 REG_UPDATE(MASTER_COMM_CNTL_REG
, MASTER_COMM_INTERRUPT
, 1);
697 /* waitDMCUReadyForCmd */
698 REG_WAIT(MASTER_COMM_CNTL_REG
, MASTER_COMM_INTERRUPT
, 0, 1, 10000);
703 static void dcn10_psr_wait_loop(
705 unsigned int wait_loop_number
)
707 struct dce_dmcu
*dmcu_dce
= TO_DCE_DMCU(dmcu
);
708 union dce_dmcu_psr_config_data_wait_loop_reg1 masterCmdData1
;
710 /* If microcontroller is not running, do nothing */
711 if (dmcu
->dmcu_state
!= DMCU_RUNNING
)
714 if (wait_loop_number
!= 0) {
715 /* waitDMCUReadyForCmd */
716 REG_WAIT(MASTER_COMM_CNTL_REG
, MASTER_COMM_INTERRUPT
, 0, 1, 10000);
718 masterCmdData1
.u32
= 0;
719 masterCmdData1
.bits
.wait_loop
= wait_loop_number
;
720 dmcu
->cached_wait_loop_number
= wait_loop_number
;
721 dm_write_reg(dmcu
->ctx
, REG(MASTER_COMM_DATA_REG1
), masterCmdData1
.u32
);
723 /* setDMCUParam_Cmd */
724 REG_UPDATE(MASTER_COMM_CMD_REG
, MASTER_COMM_CMD_REG_BYTE0
, PSR_SET_WAITLOOP
);
727 REG_UPDATE(MASTER_COMM_CNTL_REG
, MASTER_COMM_INTERRUPT
, 1);
731 static void dcn10_get_psr_wait_loop(
732 struct dmcu
*dmcu
, unsigned int *psr_wait_loop_number
)
734 *psr_wait_loop_number
= dmcu
->cached_wait_loop_number
;
738 static bool dcn10_is_dmcu_initialized(struct dmcu
*dmcu
)
740 /* microcontroller is not running */
741 if (dmcu
->dmcu_state
!= DMCU_RUNNING
)
748 static bool dcn20_lock_phy(struct dmcu
*dmcu
)
750 struct dce_dmcu
*dmcu_dce
= TO_DCE_DMCU(dmcu
);
752 /* If microcontroller is not running, do nothing */
753 if (dmcu
->dmcu_state
!= DMCU_RUNNING
)
756 /* waitDMCUReadyForCmd */
757 REG_WAIT(MASTER_COMM_CNTL_REG
, MASTER_COMM_INTERRUPT
, 0, 1, 10000);
759 /* setDMCUParam_Cmd */
760 REG_UPDATE(MASTER_COMM_CMD_REG
, MASTER_COMM_CMD_REG_BYTE0
, MCP_SYNC_PHY_LOCK
);
763 REG_UPDATE(MASTER_COMM_CNTL_REG
, MASTER_COMM_INTERRUPT
, 1);
765 /* waitDMCUReadyForCmd */
766 REG_WAIT(MASTER_COMM_CNTL_REG
, MASTER_COMM_INTERRUPT
, 0, 1, 10000);
771 static bool dcn20_unlock_phy(struct dmcu
*dmcu
)
773 struct dce_dmcu
*dmcu_dce
= TO_DCE_DMCU(dmcu
);
775 /* If microcontroller is not running, do nothing */
776 if (dmcu
->dmcu_state
!= DMCU_RUNNING
)
779 /* waitDMCUReadyForCmd */
780 REG_WAIT(MASTER_COMM_CNTL_REG
, MASTER_COMM_INTERRUPT
, 0, 1, 10000);
782 /* setDMCUParam_Cmd */
783 REG_UPDATE(MASTER_COMM_CMD_REG
, MASTER_COMM_CMD_REG_BYTE0
, MCP_SYNC_PHY_UNLOCK
);
786 REG_UPDATE(MASTER_COMM_CNTL_REG
, MASTER_COMM_INTERRUPT
, 1);
788 /* waitDMCUReadyForCmd */
789 REG_WAIT(MASTER_COMM_CNTL_REG
, MASTER_COMM_INTERRUPT
, 0, 1, 10000);
794 #endif //(CONFIG_DRM_AMD_DC_DCN)
796 static const struct dmcu_funcs dce_funcs
= {
797 .dmcu_init
= dce_dmcu_init
,
798 .load_iram
= dce_dmcu_load_iram
,
799 .set_psr_enable
= dce_dmcu_set_psr_enable
,
800 .setup_psr
= dce_dmcu_setup_psr
,
801 .get_psr_state
= dce_get_dmcu_psr_state
,
802 .set_psr_wait_loop
= dce_psr_wait_loop
,
803 .get_psr_wait_loop
= dce_get_psr_wait_loop
,
804 .is_dmcu_initialized
= dce_is_dmcu_initialized
807 #if defined(CONFIG_DRM_AMD_DC_DCN)
808 static const struct dmcu_funcs dcn10_funcs
= {
809 .dmcu_init
= dcn10_dmcu_init
,
810 .load_iram
= dcn10_dmcu_load_iram
,
811 .set_psr_enable
= dcn10_dmcu_set_psr_enable
,
812 .setup_psr
= dcn10_dmcu_setup_psr
,
813 .get_psr_state
= dcn10_get_dmcu_psr_state
,
814 .set_psr_wait_loop
= dcn10_psr_wait_loop
,
815 .get_psr_wait_loop
= dcn10_get_psr_wait_loop
,
816 .is_dmcu_initialized
= dcn10_is_dmcu_initialized
819 static const struct dmcu_funcs dcn20_funcs
= {
820 .dmcu_init
= dcn10_dmcu_init
,
821 .load_iram
= dcn10_dmcu_load_iram
,
822 .set_psr_enable
= dcn10_dmcu_set_psr_enable
,
823 .setup_psr
= dcn10_dmcu_setup_psr
,
824 .get_psr_state
= dcn10_get_dmcu_psr_state
,
825 .set_psr_wait_loop
= dcn10_psr_wait_loop
,
826 .get_psr_wait_loop
= dcn10_get_psr_wait_loop
,
827 .is_dmcu_initialized
= dcn10_is_dmcu_initialized
,
828 .lock_phy
= dcn20_lock_phy
,
829 .unlock_phy
= dcn20_unlock_phy
832 static const struct dmcu_funcs dcn21_funcs
= {
833 .dmcu_init
= dcn21_dmcu_init
,
834 .load_iram
= dcn10_dmcu_load_iram
,
835 .set_psr_enable
= dcn10_dmcu_set_psr_enable
,
836 .setup_psr
= dcn10_dmcu_setup_psr
,
837 .get_psr_state
= dcn10_get_dmcu_psr_state
,
838 .set_psr_wait_loop
= dcn10_psr_wait_loop
,
839 .get_psr_wait_loop
= dcn10_get_psr_wait_loop
,
840 .is_dmcu_initialized
= dcn10_is_dmcu_initialized
,
841 .lock_phy
= dcn20_lock_phy
,
842 .unlock_phy
= dcn20_unlock_phy
846 static void dce_dmcu_construct(
847 struct dce_dmcu
*dmcu_dce
,
848 struct dc_context
*ctx
,
849 const struct dce_dmcu_registers
*regs
,
850 const struct dce_dmcu_shift
*dmcu_shift
,
851 const struct dce_dmcu_mask
*dmcu_mask
)
853 struct dmcu
*base
= &dmcu_dce
->base
;
856 base
->funcs
= &dce_funcs
;
857 base
->cached_wait_loop_number
= 0;
859 dmcu_dce
->regs
= regs
;
860 dmcu_dce
->dmcu_shift
= dmcu_shift
;
861 dmcu_dce
->dmcu_mask
= dmcu_mask
;
864 #if defined(CONFIG_DRM_AMD_DC_DCN)
865 static void dcn21_dmcu_construct(
866 struct dce_dmcu
*dmcu_dce
,
867 struct dc_context
*ctx
,
868 const struct dce_dmcu_registers
*regs
,
869 const struct dce_dmcu_shift
*dmcu_shift
,
870 const struct dce_dmcu_mask
*dmcu_mask
)
872 uint32_t psp_version
= 0;
874 dce_dmcu_construct(dmcu_dce
, ctx
, regs
, dmcu_shift
, dmcu_mask
);
876 if (!IS_FPGA_MAXIMUS_DC(ctx
->dce_environment
)) {
877 psp_version
= dm_read_reg(ctx
, mmMP0_SMN_C2PMSG_58
);
878 dmcu_dce
->base
.auto_load_dmcu
= ((psp_version
& 0x00FF00FF) > 0x00110029);
879 dmcu_dce
->base
.psp_version
= psp_version
;
884 struct dmcu
*dce_dmcu_create(
885 struct dc_context
*ctx
,
886 const struct dce_dmcu_registers
*regs
,
887 const struct dce_dmcu_shift
*dmcu_shift
,
888 const struct dce_dmcu_mask
*dmcu_mask
)
890 struct dce_dmcu
*dmcu_dce
= kzalloc(sizeof(*dmcu_dce
), GFP_KERNEL
);
892 if (dmcu_dce
== NULL
) {
898 dmcu_dce
, ctx
, regs
, dmcu_shift
, dmcu_mask
);
900 dmcu_dce
->base
.funcs
= &dce_funcs
;
902 return &dmcu_dce
->base
;
905 #if defined(CONFIG_DRM_AMD_DC_DCN)
906 struct dmcu
*dcn10_dmcu_create(
907 struct dc_context
*ctx
,
908 const struct dce_dmcu_registers
*regs
,
909 const struct dce_dmcu_shift
*dmcu_shift
,
910 const struct dce_dmcu_mask
*dmcu_mask
)
912 struct dce_dmcu
*dmcu_dce
= kzalloc(sizeof(*dmcu_dce
), GFP_KERNEL
);
914 if (dmcu_dce
== NULL
) {
920 dmcu_dce
, ctx
, regs
, dmcu_shift
, dmcu_mask
);
922 dmcu_dce
->base
.funcs
= &dcn10_funcs
;
924 return &dmcu_dce
->base
;
927 struct dmcu
*dcn20_dmcu_create(
928 struct dc_context
*ctx
,
929 const struct dce_dmcu_registers
*regs
,
930 const struct dce_dmcu_shift
*dmcu_shift
,
931 const struct dce_dmcu_mask
*dmcu_mask
)
933 struct dce_dmcu
*dmcu_dce
= kzalloc(sizeof(*dmcu_dce
), GFP_KERNEL
);
935 if (dmcu_dce
== NULL
) {
941 dmcu_dce
, ctx
, regs
, dmcu_shift
, dmcu_mask
);
943 dmcu_dce
->base
.funcs
= &dcn20_funcs
;
945 return &dmcu_dce
->base
;
948 struct dmcu
*dcn21_dmcu_create(
949 struct dc_context
*ctx
,
950 const struct dce_dmcu_registers
*regs
,
951 const struct dce_dmcu_shift
*dmcu_shift
,
952 const struct dce_dmcu_mask
*dmcu_mask
)
954 struct dce_dmcu
*dmcu_dce
= kzalloc(sizeof(*dmcu_dce
), GFP_KERNEL
);
956 if (dmcu_dce
== NULL
) {
961 dcn21_dmcu_construct(
962 dmcu_dce
, ctx
, regs
, dmcu_shift
, dmcu_mask
);
964 dmcu_dce
->base
.funcs
= &dcn21_funcs
;
966 return &dmcu_dce
->base
;
970 void dce_dmcu_destroy(struct dmcu
**dmcu
)
972 struct dce_dmcu
*dmcu_dce
= TO_DCE_DMCU(*dmcu
);