2 * Copyright 2015 Advanced Micro Devices, Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
24 #include <linux/delay.h>
25 #include <linux/gfp.h>
26 #include <linux/kernel.h>
27 #include <linux/slab.h>
28 #include <linux/types.h>
30 #include "cgs_common.h"
31 #include "smu/smu_8_0_d.h"
32 #include "smu/smu_8_0_sh_mask.h"
34 #include "smu8_fusion.h"
35 #include "smu8_smumgr.h"
37 #include "smu_ucode_xfer_cz.h"
38 #include "gca/gfx_8_0_d.h"
39 #include "gca/gfx_8_0_sh_mask.h"
42 #define SIZE_ALIGN_32(x) (((x) + 31) / 32 * 32)
44 static const enum smu8_scratch_entry firmware_list
[] = {
45 SMU8_SCRATCH_ENTRY_UCODE_ID_SDMA0
,
46 SMU8_SCRATCH_ENTRY_UCODE_ID_SDMA1
,
47 SMU8_SCRATCH_ENTRY_UCODE_ID_CP_CE
,
48 SMU8_SCRATCH_ENTRY_UCODE_ID_CP_PFP
,
49 SMU8_SCRATCH_ENTRY_UCODE_ID_CP_ME
,
50 SMU8_SCRATCH_ENTRY_UCODE_ID_CP_MEC_JT1
,
51 SMU8_SCRATCH_ENTRY_UCODE_ID_CP_MEC_JT2
,
52 SMU8_SCRATCH_ENTRY_UCODE_ID_RLC_G
,
55 static uint32_t smu8_get_argument(struct pp_hwmgr
*hwmgr
)
57 if (hwmgr
== NULL
|| hwmgr
->device
== NULL
)
60 return cgs_read_register(hwmgr
->device
,
61 mmSMU_MP1_SRBM2P_ARG_0
);
64 static int smu8_send_msg_to_smc_async(struct pp_hwmgr
*hwmgr
, uint16_t msg
)
68 if (hwmgr
== NULL
|| hwmgr
->device
== NULL
)
71 result
= PHM_WAIT_FIELD_UNEQUAL(hwmgr
,
72 SMU_MP1_SRBM2P_RESP_0
, CONTENT
, 0);
74 pr_err("smu8_send_msg_to_smc_async (0x%04x) failed\n", msg
);
78 cgs_write_register(hwmgr
->device
, mmSMU_MP1_SRBM2P_RESP_0
, 0);
79 cgs_write_register(hwmgr
->device
, mmSMU_MP1_SRBM2P_MSG_0
, msg
);
84 /* Send a message to the SMC, and wait for its response.*/
85 static int smu8_send_msg_to_smc(struct pp_hwmgr
*hwmgr
, uint16_t msg
)
89 result
= smu8_send_msg_to_smc_async(hwmgr
, msg
);
93 return PHM_WAIT_FIELD_UNEQUAL(hwmgr
,
94 SMU_MP1_SRBM2P_RESP_0
, CONTENT
, 0);
97 static int smu8_set_smc_sram_address(struct pp_hwmgr
*hwmgr
,
98 uint32_t smc_address
, uint32_t limit
)
100 if (hwmgr
== NULL
|| hwmgr
->device
== NULL
)
103 if (0 != (3 & smc_address
)) {
104 pr_err("SMC address must be 4 byte aligned\n");
108 if (limit
<= (smc_address
+ 3)) {
109 pr_err("SMC address beyond the SMC RAM area\n");
113 cgs_write_register(hwmgr
->device
, mmMP0PUB_IND_INDEX_0
,
114 SMN_MP1_SRAM_START_ADDR
+ smc_address
);
119 static int smu8_write_smc_sram_dword(struct pp_hwmgr
*hwmgr
,
120 uint32_t smc_address
, uint32_t value
, uint32_t limit
)
124 if (hwmgr
== NULL
|| hwmgr
->device
== NULL
)
127 result
= smu8_set_smc_sram_address(hwmgr
, smc_address
, limit
);
129 cgs_write_register(hwmgr
->device
, mmMP0PUB_IND_DATA_0
, value
);
134 static int smu8_send_msg_to_smc_with_parameter(struct pp_hwmgr
*hwmgr
,
135 uint16_t msg
, uint32_t parameter
)
137 if (hwmgr
== NULL
|| hwmgr
->device
== NULL
)
140 cgs_write_register(hwmgr
->device
, mmSMU_MP1_SRBM2P_ARG_0
, parameter
);
142 return smu8_send_msg_to_smc(hwmgr
, msg
);
145 static int smu8_check_fw_load_finish(struct pp_hwmgr
*hwmgr
,
149 uint32_t index
= SMN_MP1_SRAM_START_ADDR
+
150 SMU8_FIRMWARE_HEADER_LOCATION
+
151 offsetof(struct SMU8_Firmware_Header
, UcodeLoadStatus
);
153 if (hwmgr
== NULL
|| hwmgr
->device
== NULL
)
156 cgs_write_register(hwmgr
->device
, mmMP0PUB_IND_INDEX
, index
);
158 for (i
= 0; i
< hwmgr
->usec_timeout
; i
++) {
160 (cgs_read_register(hwmgr
->device
, mmMP0PUB_IND_DATA
) & firmware
))
165 if (i
>= hwmgr
->usec_timeout
) {
166 pr_err("SMU check loaded firmware failed.\n");
173 static int smu8_load_mec_firmware(struct pp_hwmgr
*hwmgr
)
178 struct cgs_firmware_info info
= {0};
179 struct smu8_smumgr
*smu8_smu
;
181 if (hwmgr
== NULL
|| hwmgr
->device
== NULL
)
184 smu8_smu
= hwmgr
->smu_backend
;
185 ret
= cgs_get_firmware_info(hwmgr
->device
,
186 CGS_UCODE_ID_CP_MEC
, &info
);
191 /* Disable MEC parsing/prefetching */
192 tmp
= cgs_read_register(hwmgr
->device
,
194 tmp
= PHM_SET_FIELD(tmp
, CP_MEC_CNTL
, MEC_ME1_HALT
, 1);
195 tmp
= PHM_SET_FIELD(tmp
, CP_MEC_CNTL
, MEC_ME2_HALT
, 1);
196 cgs_write_register(hwmgr
->device
, mmCP_MEC_CNTL
, tmp
);
198 tmp
= cgs_read_register(hwmgr
->device
,
199 mmCP_CPC_IC_BASE_CNTL
);
201 tmp
= PHM_SET_FIELD(tmp
, CP_CPC_IC_BASE_CNTL
, VMID
, 0);
202 tmp
= PHM_SET_FIELD(tmp
, CP_CPC_IC_BASE_CNTL
, ATC
, 0);
203 tmp
= PHM_SET_FIELD(tmp
, CP_CPC_IC_BASE_CNTL
, CACHE_POLICY
, 0);
204 tmp
= PHM_SET_FIELD(tmp
, CP_CPC_IC_BASE_CNTL
, MTYPE
, 1);
205 cgs_write_register(hwmgr
->device
, mmCP_CPC_IC_BASE_CNTL
, tmp
);
207 reg_data
= lower_32_bits(info
.mc_addr
) &
208 PHM_FIELD_MASK(CP_CPC_IC_BASE_LO
, IC_BASE_LO
);
209 cgs_write_register(hwmgr
->device
, mmCP_CPC_IC_BASE_LO
, reg_data
);
211 reg_data
= upper_32_bits(info
.mc_addr
) &
212 PHM_FIELD_MASK(CP_CPC_IC_BASE_HI
, IC_BASE_HI
);
213 cgs_write_register(hwmgr
->device
, mmCP_CPC_IC_BASE_HI
, reg_data
);
218 static uint8_t smu8_translate_firmware_enum_to_arg(struct pp_hwmgr
*hwmgr
,
219 enum smu8_scratch_entry firmware_enum
)
223 switch (firmware_enum
) {
224 case SMU8_SCRATCH_ENTRY_UCODE_ID_SDMA0
:
225 ret
= UCODE_ID_SDMA0
;
227 case SMU8_SCRATCH_ENTRY_UCODE_ID_SDMA1
:
228 if (hwmgr
->chip_id
== CHIP_STONEY
)
229 ret
= UCODE_ID_SDMA0
;
231 ret
= UCODE_ID_SDMA1
;
233 case SMU8_SCRATCH_ENTRY_UCODE_ID_CP_CE
:
234 ret
= UCODE_ID_CP_CE
;
236 case SMU8_SCRATCH_ENTRY_UCODE_ID_CP_PFP
:
237 ret
= UCODE_ID_CP_PFP
;
239 case SMU8_SCRATCH_ENTRY_UCODE_ID_CP_ME
:
240 ret
= UCODE_ID_CP_ME
;
242 case SMU8_SCRATCH_ENTRY_UCODE_ID_CP_MEC_JT1
:
243 ret
= UCODE_ID_CP_MEC_JT1
;
245 case SMU8_SCRATCH_ENTRY_UCODE_ID_CP_MEC_JT2
:
246 if (hwmgr
->chip_id
== CHIP_STONEY
)
247 ret
= UCODE_ID_CP_MEC_JT1
;
249 ret
= UCODE_ID_CP_MEC_JT2
;
251 case SMU8_SCRATCH_ENTRY_UCODE_ID_GMCON_RENG
:
252 ret
= UCODE_ID_GMCON_RENG
;
254 case SMU8_SCRATCH_ENTRY_UCODE_ID_RLC_G
:
255 ret
= UCODE_ID_RLC_G
;
257 case SMU8_SCRATCH_ENTRY_UCODE_ID_RLC_SCRATCH
:
258 ret
= UCODE_ID_RLC_SCRATCH
;
260 case SMU8_SCRATCH_ENTRY_UCODE_ID_RLC_SRM_ARAM
:
261 ret
= UCODE_ID_RLC_SRM_ARAM
;
263 case SMU8_SCRATCH_ENTRY_UCODE_ID_RLC_SRM_DRAM
:
264 ret
= UCODE_ID_RLC_SRM_DRAM
;
266 case SMU8_SCRATCH_ENTRY_UCODE_ID_DMCU_ERAM
:
267 ret
= UCODE_ID_DMCU_ERAM
;
269 case SMU8_SCRATCH_ENTRY_UCODE_ID_DMCU_IRAM
:
270 ret
= UCODE_ID_DMCU_IRAM
;
272 case SMU8_SCRATCH_ENTRY_UCODE_ID_POWER_PROFILING
:
273 ret
= TASK_ARG_INIT_MM_PWR_LOG
;
275 case SMU8_SCRATCH_ENTRY_DATA_ID_SDMA_HALT
:
276 case SMU8_SCRATCH_ENTRY_DATA_ID_SYS_CLOCKGATING
:
277 case SMU8_SCRATCH_ENTRY_DATA_ID_SDMA_RING_REGS
:
278 case SMU8_SCRATCH_ENTRY_DATA_ID_NONGFX_REINIT
:
279 case SMU8_SCRATCH_ENTRY_DATA_ID_SDMA_START
:
280 case SMU8_SCRATCH_ENTRY_DATA_ID_IH_REGISTERS
:
281 ret
= TASK_ARG_REG_MMIO
;
283 case SMU8_SCRATCH_ENTRY_SMU8_FUSION_CLKTABLE
:
284 ret
= TASK_ARG_INIT_CLK_TABLE
;
291 static enum cgs_ucode_id
smu8_convert_fw_type_to_cgs(uint32_t fw_type
)
293 enum cgs_ucode_id result
= CGS_UCODE_ID_MAXIMUM
;
297 result
= CGS_UCODE_ID_SDMA0
;
300 result
= CGS_UCODE_ID_SDMA1
;
303 result
= CGS_UCODE_ID_CP_CE
;
305 case UCODE_ID_CP_PFP
:
306 result
= CGS_UCODE_ID_CP_PFP
;
309 result
= CGS_UCODE_ID_CP_ME
;
311 case UCODE_ID_CP_MEC_JT1
:
312 result
= CGS_UCODE_ID_CP_MEC_JT1
;
314 case UCODE_ID_CP_MEC_JT2
:
315 result
= CGS_UCODE_ID_CP_MEC_JT2
;
318 result
= CGS_UCODE_ID_RLC_G
;
327 static int smu8_smu_populate_single_scratch_task(
328 struct pp_hwmgr
*hwmgr
,
329 enum smu8_scratch_entry fw_enum
,
330 uint8_t type
, bool is_last
)
333 struct smu8_smumgr
*smu8_smu
= hwmgr
->smu_backend
;
334 struct TOC
*toc
= (struct TOC
*)smu8_smu
->toc_buffer
.kaddr
;
335 struct SMU_Task
*task
= &toc
->tasks
[smu8_smu
->toc_entry_used_count
++];
338 task
->arg
= smu8_translate_firmware_enum_to_arg(hwmgr
, fw_enum
);
339 task
->next
= is_last
? END_OF_TASK_LIST
: smu8_smu
->toc_entry_used_count
;
341 for (i
= 0; i
< smu8_smu
->scratch_buffer_length
; i
++)
342 if (smu8_smu
->scratch_buffer
[i
].firmware_ID
== fw_enum
)
345 if (i
>= smu8_smu
->scratch_buffer_length
) {
346 pr_err("Invalid Firmware Type\n");
350 task
->addr
.low
= lower_32_bits(smu8_smu
->scratch_buffer
[i
].mc_addr
);
351 task
->addr
.high
= upper_32_bits(smu8_smu
->scratch_buffer
[i
].mc_addr
);
352 task
->size_bytes
= smu8_smu
->scratch_buffer
[i
].data_size
;
354 if (SMU8_SCRATCH_ENTRY_DATA_ID_IH_REGISTERS
== fw_enum
) {
355 struct smu8_ih_meta_data
*pIHReg_restore
=
356 (struct smu8_ih_meta_data
*)smu8_smu
->scratch_buffer
[i
].kaddr
;
357 pIHReg_restore
->command
=
358 METADATA_CMD_MODE0
| METADATA_PERFORM_ON_LOAD
;
364 static int smu8_smu_populate_single_ucode_load_task(
365 struct pp_hwmgr
*hwmgr
,
366 enum smu8_scratch_entry fw_enum
,
370 struct smu8_smumgr
*smu8_smu
= hwmgr
->smu_backend
;
371 struct TOC
*toc
= (struct TOC
*)smu8_smu
->toc_buffer
.kaddr
;
372 struct SMU_Task
*task
= &toc
->tasks
[smu8_smu
->toc_entry_used_count
++];
374 task
->type
= TASK_TYPE_UCODE_LOAD
;
375 task
->arg
= smu8_translate_firmware_enum_to_arg(hwmgr
, fw_enum
);
376 task
->next
= is_last
? END_OF_TASK_LIST
: smu8_smu
->toc_entry_used_count
;
378 for (i
= 0; i
< smu8_smu
->driver_buffer_length
; i
++)
379 if (smu8_smu
->driver_buffer
[i
].firmware_ID
== fw_enum
)
382 if (i
>= smu8_smu
->driver_buffer_length
) {
383 pr_err("Invalid Firmware Type\n");
387 task
->addr
.low
= lower_32_bits(smu8_smu
->driver_buffer
[i
].mc_addr
);
388 task
->addr
.high
= upper_32_bits(smu8_smu
->driver_buffer
[i
].mc_addr
);
389 task
->size_bytes
= smu8_smu
->driver_buffer
[i
].data_size
;
394 static int smu8_smu_construct_toc_for_rlc_aram_save(struct pp_hwmgr
*hwmgr
)
396 struct smu8_smumgr
*smu8_smu
= hwmgr
->smu_backend
;
398 smu8_smu
->toc_entry_aram
= smu8_smu
->toc_entry_used_count
;
399 smu8_smu_populate_single_scratch_task(hwmgr
,
400 SMU8_SCRATCH_ENTRY_UCODE_ID_RLC_SRM_ARAM
,
401 TASK_TYPE_UCODE_SAVE
, true);
406 static int smu8_smu_initialize_toc_empty_job_list(struct pp_hwmgr
*hwmgr
)
409 struct smu8_smumgr
*smu8_smu
= hwmgr
->smu_backend
;
410 struct TOC
*toc
= (struct TOC
*)smu8_smu
->toc_buffer
.kaddr
;
412 for (i
= 0; i
< NUM_JOBLIST_ENTRIES
; i
++)
413 toc
->JobList
[i
] = (uint8_t)IGNORE_JOB
;
418 static int smu8_smu_construct_toc_for_vddgfx_enter(struct pp_hwmgr
*hwmgr
)
420 struct smu8_smumgr
*smu8_smu
= hwmgr
->smu_backend
;
421 struct TOC
*toc
= (struct TOC
*)smu8_smu
->toc_buffer
.kaddr
;
423 toc
->JobList
[JOB_GFX_SAVE
] = (uint8_t)smu8_smu
->toc_entry_used_count
;
424 smu8_smu_populate_single_scratch_task(hwmgr
,
425 SMU8_SCRATCH_ENTRY_UCODE_ID_RLC_SCRATCH
,
426 TASK_TYPE_UCODE_SAVE
, false);
428 smu8_smu_populate_single_scratch_task(hwmgr
,
429 SMU8_SCRATCH_ENTRY_UCODE_ID_RLC_SRM_DRAM
,
430 TASK_TYPE_UCODE_SAVE
, true);
436 static int smu8_smu_construct_toc_for_vddgfx_exit(struct pp_hwmgr
*hwmgr
)
438 struct smu8_smumgr
*smu8_smu
= hwmgr
->smu_backend
;
439 struct TOC
*toc
= (struct TOC
*)smu8_smu
->toc_buffer
.kaddr
;
441 toc
->JobList
[JOB_GFX_RESTORE
] = (uint8_t)smu8_smu
->toc_entry_used_count
;
443 smu8_smu_populate_single_ucode_load_task(hwmgr
,
444 SMU8_SCRATCH_ENTRY_UCODE_ID_CP_CE
, false);
445 smu8_smu_populate_single_ucode_load_task(hwmgr
,
446 SMU8_SCRATCH_ENTRY_UCODE_ID_CP_PFP
, false);
447 smu8_smu_populate_single_ucode_load_task(hwmgr
,
448 SMU8_SCRATCH_ENTRY_UCODE_ID_CP_ME
, false);
449 smu8_smu_populate_single_ucode_load_task(hwmgr
,
450 SMU8_SCRATCH_ENTRY_UCODE_ID_CP_MEC_JT1
, false);
452 if (hwmgr
->chip_id
== CHIP_STONEY
)
453 smu8_smu_populate_single_ucode_load_task(hwmgr
,
454 SMU8_SCRATCH_ENTRY_UCODE_ID_CP_MEC_JT1
, false);
456 smu8_smu_populate_single_ucode_load_task(hwmgr
,
457 SMU8_SCRATCH_ENTRY_UCODE_ID_CP_MEC_JT2
, false);
459 smu8_smu_populate_single_ucode_load_task(hwmgr
,
460 SMU8_SCRATCH_ENTRY_UCODE_ID_RLC_G
, false);
462 /* populate scratch */
463 smu8_smu_populate_single_scratch_task(hwmgr
,
464 SMU8_SCRATCH_ENTRY_UCODE_ID_RLC_SCRATCH
,
465 TASK_TYPE_UCODE_LOAD
, false);
467 smu8_smu_populate_single_scratch_task(hwmgr
,
468 SMU8_SCRATCH_ENTRY_UCODE_ID_RLC_SRM_ARAM
,
469 TASK_TYPE_UCODE_LOAD
, false);
471 smu8_smu_populate_single_scratch_task(hwmgr
,
472 SMU8_SCRATCH_ENTRY_UCODE_ID_RLC_SRM_DRAM
,
473 TASK_TYPE_UCODE_LOAD
, true);
478 static int smu8_smu_construct_toc_for_power_profiling(struct pp_hwmgr
*hwmgr
)
480 struct smu8_smumgr
*smu8_smu
= hwmgr
->smu_backend
;
482 smu8_smu
->toc_entry_power_profiling_index
= smu8_smu
->toc_entry_used_count
;
484 smu8_smu_populate_single_scratch_task(hwmgr
,
485 SMU8_SCRATCH_ENTRY_UCODE_ID_POWER_PROFILING
,
486 TASK_TYPE_INITIALIZE
, true);
490 static int smu8_smu_construct_toc_for_bootup(struct pp_hwmgr
*hwmgr
)
492 struct smu8_smumgr
*smu8_smu
= hwmgr
->smu_backend
;
494 smu8_smu
->toc_entry_initialize_index
= smu8_smu
->toc_entry_used_count
;
496 smu8_smu_populate_single_ucode_load_task(hwmgr
,
497 SMU8_SCRATCH_ENTRY_UCODE_ID_SDMA0
, false);
498 if (hwmgr
->chip_id
!= CHIP_STONEY
)
499 smu8_smu_populate_single_ucode_load_task(hwmgr
,
500 SMU8_SCRATCH_ENTRY_UCODE_ID_SDMA1
, false);
501 smu8_smu_populate_single_ucode_load_task(hwmgr
,
502 SMU8_SCRATCH_ENTRY_UCODE_ID_CP_CE
, false);
503 smu8_smu_populate_single_ucode_load_task(hwmgr
,
504 SMU8_SCRATCH_ENTRY_UCODE_ID_CP_PFP
, false);
505 smu8_smu_populate_single_ucode_load_task(hwmgr
,
506 SMU8_SCRATCH_ENTRY_UCODE_ID_CP_ME
, false);
507 smu8_smu_populate_single_ucode_load_task(hwmgr
,
508 SMU8_SCRATCH_ENTRY_UCODE_ID_CP_MEC_JT1
, false);
509 if (hwmgr
->chip_id
!= CHIP_STONEY
)
510 smu8_smu_populate_single_ucode_load_task(hwmgr
,
511 SMU8_SCRATCH_ENTRY_UCODE_ID_CP_MEC_JT2
, false);
512 smu8_smu_populate_single_ucode_load_task(hwmgr
,
513 SMU8_SCRATCH_ENTRY_UCODE_ID_RLC_G
, true);
518 static int smu8_smu_construct_toc_for_clock_table(struct pp_hwmgr
*hwmgr
)
520 struct smu8_smumgr
*smu8_smu
= hwmgr
->smu_backend
;
522 smu8_smu
->toc_entry_clock_table
= smu8_smu
->toc_entry_used_count
;
524 smu8_smu_populate_single_scratch_task(hwmgr
,
525 SMU8_SCRATCH_ENTRY_SMU8_FUSION_CLKTABLE
,
526 TASK_TYPE_INITIALIZE
, true);
531 static int smu8_smu_construct_toc(struct pp_hwmgr
*hwmgr
)
533 struct smu8_smumgr
*smu8_smu
= hwmgr
->smu_backend
;
535 smu8_smu
->toc_entry_used_count
= 0;
536 smu8_smu_initialize_toc_empty_job_list(hwmgr
);
537 smu8_smu_construct_toc_for_rlc_aram_save(hwmgr
);
538 smu8_smu_construct_toc_for_vddgfx_enter(hwmgr
);
539 smu8_smu_construct_toc_for_vddgfx_exit(hwmgr
);
540 smu8_smu_construct_toc_for_power_profiling(hwmgr
);
541 smu8_smu_construct_toc_for_bootup(hwmgr
);
542 smu8_smu_construct_toc_for_clock_table(hwmgr
);
547 static int smu8_smu_populate_firmware_entries(struct pp_hwmgr
*hwmgr
)
549 struct smu8_smumgr
*smu8_smu
= hwmgr
->smu_backend
;
550 uint32_t firmware_type
;
553 enum cgs_ucode_id ucode_id
;
554 struct cgs_firmware_info info
= {0};
556 smu8_smu
->driver_buffer_length
= 0;
558 for (i
= 0; i
< ARRAY_SIZE(firmware_list
); i
++) {
560 firmware_type
= smu8_translate_firmware_enum_to_arg(hwmgr
,
563 ucode_id
= smu8_convert_fw_type_to_cgs(firmware_type
);
565 ret
= cgs_get_firmware_info(hwmgr
->device
,
569 smu8_smu
->driver_buffer
[i
].mc_addr
= info
.mc_addr
;
571 smu8_smu
->driver_buffer
[i
].data_size
= info
.image_size
;
573 smu8_smu
->driver_buffer
[i
].firmware_ID
= firmware_list
[i
];
574 smu8_smu
->driver_buffer_length
++;
581 static int smu8_smu_populate_single_scratch_entry(
582 struct pp_hwmgr
*hwmgr
,
583 enum smu8_scratch_entry scratch_type
,
584 uint32_t ulsize_byte
,
585 struct smu8_buffer_entry
*entry
)
587 struct smu8_smumgr
*smu8_smu
= hwmgr
->smu_backend
;
588 uint32_t ulsize_aligned
= SIZE_ALIGN_32(ulsize_byte
);
590 entry
->data_size
= ulsize_byte
;
591 entry
->kaddr
= (char *) smu8_smu
->smu_buffer
.kaddr
+
592 smu8_smu
->smu_buffer_used_bytes
;
593 entry
->mc_addr
= smu8_smu
->smu_buffer
.mc_addr
+ smu8_smu
->smu_buffer_used_bytes
;
594 entry
->firmware_ID
= scratch_type
;
596 smu8_smu
->smu_buffer_used_bytes
+= ulsize_aligned
;
601 static int smu8_download_pptable_settings(struct pp_hwmgr
*hwmgr
, void **table
)
603 struct smu8_smumgr
*smu8_smu
= hwmgr
->smu_backend
;
606 for (i
= 0; i
< smu8_smu
->scratch_buffer_length
; i
++) {
607 if (smu8_smu
->scratch_buffer
[i
].firmware_ID
608 == SMU8_SCRATCH_ENTRY_SMU8_FUSION_CLKTABLE
)
612 *table
= (struct SMU8_Fusion_ClkTable
*)smu8_smu
->scratch_buffer
[i
].kaddr
;
614 smu8_send_msg_to_smc_with_parameter(hwmgr
,
615 PPSMC_MSG_SetClkTableAddrHi
,
616 upper_32_bits(smu8_smu
->scratch_buffer
[i
].mc_addr
));
618 smu8_send_msg_to_smc_with_parameter(hwmgr
,
619 PPSMC_MSG_SetClkTableAddrLo
,
620 lower_32_bits(smu8_smu
->scratch_buffer
[i
].mc_addr
));
622 smu8_send_msg_to_smc_with_parameter(hwmgr
, PPSMC_MSG_ExecuteJob
,
623 smu8_smu
->toc_entry_clock_table
);
625 smu8_send_msg_to_smc(hwmgr
, PPSMC_MSG_ClkTableXferToDram
);
630 static int smu8_upload_pptable_settings(struct pp_hwmgr
*hwmgr
)
632 struct smu8_smumgr
*smu8_smu
= hwmgr
->smu_backend
;
635 for (i
= 0; i
< smu8_smu
->scratch_buffer_length
; i
++) {
636 if (smu8_smu
->scratch_buffer
[i
].firmware_ID
637 == SMU8_SCRATCH_ENTRY_SMU8_FUSION_CLKTABLE
)
641 smu8_send_msg_to_smc_with_parameter(hwmgr
,
642 PPSMC_MSG_SetClkTableAddrHi
,
643 upper_32_bits(smu8_smu
->scratch_buffer
[i
].mc_addr
));
645 smu8_send_msg_to_smc_with_parameter(hwmgr
,
646 PPSMC_MSG_SetClkTableAddrLo
,
647 lower_32_bits(smu8_smu
->scratch_buffer
[i
].mc_addr
));
649 smu8_send_msg_to_smc_with_parameter(hwmgr
, PPSMC_MSG_ExecuteJob
,
650 smu8_smu
->toc_entry_clock_table
);
652 smu8_send_msg_to_smc(hwmgr
, PPSMC_MSG_ClkTableXferToSmu
);
657 static int smu8_request_smu_load_fw(struct pp_hwmgr
*hwmgr
)
659 struct smu8_smumgr
*smu8_smu
= hwmgr
->smu_backend
;
660 uint32_t smc_address
;
662 if (!hwmgr
->reload_fw
) {
663 pr_info("skip reloading...\n");
667 smu8_smu_populate_firmware_entries(hwmgr
);
669 smu8_smu_construct_toc(hwmgr
);
671 smc_address
= SMU8_FIRMWARE_HEADER_LOCATION
+
672 offsetof(struct SMU8_Firmware_Header
, UcodeLoadStatus
);
674 smu8_write_smc_sram_dword(hwmgr
, smc_address
, 0, smc_address
+4);
676 smu8_send_msg_to_smc_with_parameter(hwmgr
,
677 PPSMC_MSG_DriverDramAddrHi
,
678 upper_32_bits(smu8_smu
->toc_buffer
.mc_addr
));
680 smu8_send_msg_to_smc_with_parameter(hwmgr
,
681 PPSMC_MSG_DriverDramAddrLo
,
682 lower_32_bits(smu8_smu
->toc_buffer
.mc_addr
));
684 smu8_send_msg_to_smc(hwmgr
, PPSMC_MSG_InitJobs
);
686 smu8_send_msg_to_smc_with_parameter(hwmgr
,
687 PPSMC_MSG_ExecuteJob
,
688 smu8_smu
->toc_entry_aram
);
689 smu8_send_msg_to_smc_with_parameter(hwmgr
, PPSMC_MSG_ExecuteJob
,
690 smu8_smu
->toc_entry_power_profiling_index
);
692 return smu8_send_msg_to_smc_with_parameter(hwmgr
,
693 PPSMC_MSG_ExecuteJob
,
694 smu8_smu
->toc_entry_initialize_index
);
697 static int smu8_start_smu(struct pp_hwmgr
*hwmgr
)
700 uint32_t fw_to_check
= 0;
701 struct amdgpu_device
*adev
= hwmgr
->adev
;
703 uint32_t index
= SMN_MP1_SRAM_START_ADDR
+
704 SMU8_FIRMWARE_HEADER_LOCATION
+
705 offsetof(struct SMU8_Firmware_Header
, Version
);
708 if (hwmgr
== NULL
|| hwmgr
->device
== NULL
)
711 cgs_write_register(hwmgr
->device
, mmMP0PUB_IND_INDEX
, index
);
712 hwmgr
->smu_version
= cgs_read_register(hwmgr
->device
, mmMP0PUB_IND_DATA
);
713 adev
->pm
.fw_version
= hwmgr
->smu_version
>> 8;
715 fw_to_check
= UCODE_ID_RLC_G_MASK
|
716 UCODE_ID_SDMA0_MASK
|
717 UCODE_ID_SDMA1_MASK
|
718 UCODE_ID_CP_CE_MASK
|
719 UCODE_ID_CP_ME_MASK
|
720 UCODE_ID_CP_PFP_MASK
|
721 UCODE_ID_CP_MEC_JT1_MASK
|
722 UCODE_ID_CP_MEC_JT2_MASK
;
724 if (hwmgr
->chip_id
== CHIP_STONEY
)
725 fw_to_check
&= ~(UCODE_ID_SDMA1_MASK
| UCODE_ID_CP_MEC_JT2_MASK
);
727 ret
= smu8_request_smu_load_fw(hwmgr
);
729 pr_err("SMU firmware load failed\n");
731 smu8_check_fw_load_finish(hwmgr
, fw_to_check
);
733 ret
= smu8_load_mec_firmware(hwmgr
);
735 pr_err("Mec Firmware load failed\n");
740 static int smu8_smu_init(struct pp_hwmgr
*hwmgr
)
743 struct smu8_smumgr
*smu8_smu
;
745 smu8_smu
= kzalloc(sizeof(struct smu8_smumgr
), GFP_KERNEL
);
746 if (smu8_smu
== NULL
)
749 hwmgr
->smu_backend
= smu8_smu
;
751 smu8_smu
->toc_buffer
.data_size
= 4096;
752 smu8_smu
->smu_buffer
.data_size
=
753 ALIGN(UCODE_ID_RLC_SCRATCH_SIZE_BYTE
, 32) +
754 ALIGN(UCODE_ID_RLC_SRM_ARAM_SIZE_BYTE
, 32) +
755 ALIGN(UCODE_ID_RLC_SRM_DRAM_SIZE_BYTE
, 32) +
756 ALIGN(sizeof(struct SMU8_MultimediaPowerLogData
), 32) +
757 ALIGN(sizeof(struct SMU8_Fusion_ClkTable
), 32);
759 ret
= amdgpu_bo_create_kernel((struct amdgpu_device
*)hwmgr
->adev
,
760 smu8_smu
->toc_buffer
.data_size
,
762 AMDGPU_GEM_DOMAIN_VRAM
,
763 &smu8_smu
->toc_buffer
.handle
,
764 &smu8_smu
->toc_buffer
.mc_addr
,
765 &smu8_smu
->toc_buffer
.kaddr
);
769 ret
= amdgpu_bo_create_kernel((struct amdgpu_device
*)hwmgr
->adev
,
770 smu8_smu
->smu_buffer
.data_size
,
772 AMDGPU_GEM_DOMAIN_VRAM
,
773 &smu8_smu
->smu_buffer
.handle
,
774 &smu8_smu
->smu_buffer
.mc_addr
,
775 &smu8_smu
->smu_buffer
.kaddr
);
779 if (0 != smu8_smu_populate_single_scratch_entry(hwmgr
,
780 SMU8_SCRATCH_ENTRY_UCODE_ID_RLC_SCRATCH
,
781 UCODE_ID_RLC_SCRATCH_SIZE_BYTE
,
782 &smu8_smu
->scratch_buffer
[smu8_smu
->scratch_buffer_length
++])) {
783 pr_err("Error when Populate Firmware Entry.\n");
787 if (0 != smu8_smu_populate_single_scratch_entry(hwmgr
,
788 SMU8_SCRATCH_ENTRY_UCODE_ID_RLC_SRM_ARAM
,
789 UCODE_ID_RLC_SRM_ARAM_SIZE_BYTE
,
790 &smu8_smu
->scratch_buffer
[smu8_smu
->scratch_buffer_length
++])) {
791 pr_err("Error when Populate Firmware Entry.\n");
794 if (0 != smu8_smu_populate_single_scratch_entry(hwmgr
,
795 SMU8_SCRATCH_ENTRY_UCODE_ID_RLC_SRM_DRAM
,
796 UCODE_ID_RLC_SRM_DRAM_SIZE_BYTE
,
797 &smu8_smu
->scratch_buffer
[smu8_smu
->scratch_buffer_length
++])) {
798 pr_err("Error when Populate Firmware Entry.\n");
802 if (0 != smu8_smu_populate_single_scratch_entry(hwmgr
,
803 SMU8_SCRATCH_ENTRY_UCODE_ID_POWER_PROFILING
,
804 sizeof(struct SMU8_MultimediaPowerLogData
),
805 &smu8_smu
->scratch_buffer
[smu8_smu
->scratch_buffer_length
++])) {
806 pr_err("Error when Populate Firmware Entry.\n");
810 if (0 != smu8_smu_populate_single_scratch_entry(hwmgr
,
811 SMU8_SCRATCH_ENTRY_SMU8_FUSION_CLKTABLE
,
812 sizeof(struct SMU8_Fusion_ClkTable
),
813 &smu8_smu
->scratch_buffer
[smu8_smu
->scratch_buffer_length
++])) {
814 pr_err("Error when Populate Firmware Entry.\n");
821 amdgpu_bo_free_kernel(&smu8_smu
->smu_buffer
.handle
,
822 &smu8_smu
->smu_buffer
.mc_addr
,
823 &smu8_smu
->smu_buffer
.kaddr
);
825 amdgpu_bo_free_kernel(&smu8_smu
->toc_buffer
.handle
,
826 &smu8_smu
->toc_buffer
.mc_addr
,
827 &smu8_smu
->toc_buffer
.kaddr
);
833 static int smu8_smu_fini(struct pp_hwmgr
*hwmgr
)
835 struct smu8_smumgr
*smu8_smu
;
837 if (hwmgr
== NULL
|| hwmgr
->device
== NULL
)
840 smu8_smu
= hwmgr
->smu_backend
;
842 amdgpu_bo_free_kernel(&smu8_smu
->toc_buffer
.handle
,
843 &smu8_smu
->toc_buffer
.mc_addr
,
844 &smu8_smu
->toc_buffer
.kaddr
);
845 amdgpu_bo_free_kernel(&smu8_smu
->smu_buffer
.handle
,
846 &smu8_smu
->smu_buffer
.mc_addr
,
847 &smu8_smu
->smu_buffer
.kaddr
);
854 static bool smu8_dpm_check_smu_features(struct pp_hwmgr
*hwmgr
,
855 unsigned long check_feature
)
858 unsigned long features
;
860 result
= smu8_send_msg_to_smc_with_parameter(hwmgr
, PPSMC_MSG_GetFeatureStatus
, 0);
862 features
= smum_get_argument(hwmgr
);
863 if (features
& check_feature
)
870 static bool smu8_is_dpm_running(struct pp_hwmgr
*hwmgr
)
872 if (smu8_dpm_check_smu_features(hwmgr
, SMU_EnabledFeatureScoreboard_SclkDpmOn
))
877 const struct pp_smumgr_func smu8_smu_funcs
= {
878 .smu_init
= smu8_smu_init
,
879 .smu_fini
= smu8_smu_fini
,
880 .start_smu
= smu8_start_smu
,
881 .check_fw_load_finish
= smu8_check_fw_load_finish
,
882 .request_smu_load_fw
= NULL
,
883 .request_smu_load_specific_fw
= NULL
,
884 .get_argument
= smu8_get_argument
,
885 .send_msg_to_smc
= smu8_send_msg_to_smc
,
886 .send_msg_to_smc_with_parameter
= smu8_send_msg_to_smc_with_parameter
,
887 .download_pptable_settings
= smu8_download_pptable_settings
,
888 .upload_pptable_settings
= smu8_upload_pptable_settings
,
889 .is_dpm_running
= smu8_is_dpm_running
,