1 // SPDX-License-Identifier: (GPL-2.0-only OR BSD-3-Clause)
3 // This file is provided under a dual BSD/GPLv2 license. When using or
4 // redistributing this file, you may do so under either license.
6 // Copyright(c) 2018-2022 Intel Corporation
9 #include <linux/delay.h>
10 #include <linux/device.h>
11 #include <linux/dma-mapping.h>
12 #include <linux/firmware.h>
14 #include <linux/interrupt.h>
16 #include <linux/module.h>
17 #include <linux/pci.h>
18 #include <linux/slab.h>
19 #include <sound/hdaudio_ext.h>
20 #include <sound/sof.h>
21 #include <sound/pcm_params.h>
23 #include "../sof-priv.h"
27 #define HDA_SKL_WAIT_TIMEOUT 500 /* 500 msec */
28 #define HDA_SKL_CLDMA_MAX_BUFFER_SIZE (32 * PAGE_SIZE)
31 #define HDA_CL_SD_CTL_SRST_SHIFT 0
32 #define HDA_CL_SD_CTL_SRST(x) (((x) & 0x1) << \
33 HDA_CL_SD_CTL_SRST_SHIFT)
36 #define HDA_CL_SD_CTL_RUN_SHIFT 1
37 #define HDA_CL_SD_CTL_RUN(x) (((x) & 0x1) << \
38 HDA_CL_SD_CTL_RUN_SHIFT)
40 /* Interrupt On Completion Enable */
41 #define HDA_CL_SD_CTL_IOCE_SHIFT 2
42 #define HDA_CL_SD_CTL_IOCE(x) (((x) & 0x1) << \
43 HDA_CL_SD_CTL_IOCE_SHIFT)
45 /* FIFO Error Interrupt Enable */
46 #define HDA_CL_SD_CTL_FEIE_SHIFT 3
47 #define HDA_CL_SD_CTL_FEIE(x) (((x) & 0x1) << \
48 HDA_CL_SD_CTL_FEIE_SHIFT)
50 /* Descriptor Error Interrupt Enable */
51 #define HDA_CL_SD_CTL_DEIE_SHIFT 4
52 #define HDA_CL_SD_CTL_DEIE(x) (((x) & 0x1) << \
53 HDA_CL_SD_CTL_DEIE_SHIFT)
55 /* FIFO Limit Change */
56 #define HDA_CL_SD_CTL_FIFOLC_SHIFT 5
57 #define HDA_CL_SD_CTL_FIFOLC(x) (((x) & 0x1) << \
58 HDA_CL_SD_CTL_FIFOLC_SHIFT)
61 #define HDA_CL_SD_CTL_STRIPE_SHIFT 16
62 #define HDA_CL_SD_CTL_STRIPE(x) (((x) & 0x3) << \
63 HDA_CL_SD_CTL_STRIPE_SHIFT)
65 /* Traffic Priority */
66 #define HDA_CL_SD_CTL_TP_SHIFT 18
67 #define HDA_CL_SD_CTL_TP(x) (((x) & 0x1) << \
68 HDA_CL_SD_CTL_TP_SHIFT)
70 /* Bidirectional Direction Control */
71 #define HDA_CL_SD_CTL_DIR_SHIFT 19
72 #define HDA_CL_SD_CTL_DIR(x) (((x) & 0x1) << \
73 HDA_CL_SD_CTL_DIR_SHIFT)
76 #define HDA_CL_SD_CTL_STRM_SHIFT 20
77 #define HDA_CL_SD_CTL_STRM(x) (((x) & 0xf) << \
78 HDA_CL_SD_CTL_STRM_SHIFT)
80 #define HDA_CL_SD_CTL_INT(x) \
81 (HDA_CL_SD_CTL_IOCE(x) | \
82 HDA_CL_SD_CTL_FEIE(x) | \
83 HDA_CL_SD_CTL_DEIE(x))
85 #define HDA_CL_SD_CTL_INT_MASK \
86 (HDA_CL_SD_CTL_IOCE(1) | \
87 HDA_CL_SD_CTL_FEIE(1) | \
88 HDA_CL_SD_CTL_DEIE(1))
90 #define DMA_ADDRESS_128_BITS_ALIGNMENT 7
91 #define BDL_ALIGN(x) ((x) >> DMA_ADDRESS_128_BITS_ALIGNMENT)
93 /* Buffer Descriptor List Lower Base Address */
94 #define HDA_CL_SD_BDLPLBA_SHIFT 7
95 #define HDA_CL_SD_BDLPLBA_MASK GENMASK(31, 7)
96 #define HDA_CL_SD_BDLPLBA(x) \
97 ((BDL_ALIGN(lower_32_bits(x)) << HDA_CL_SD_BDLPLBA_SHIFT) & \
98 HDA_CL_SD_BDLPLBA_MASK)
100 /* Buffer Descriptor List Upper Base Address */
101 #define HDA_CL_SD_BDLPUBA(x) \
104 /* Software Position in Buffer Enable */
105 #define HDA_CL_SPBFIFO_SPBFCCTL_SPIBE_SHIFT 0
106 #define HDA_CL_SPBFIFO_SPBFCCTL_SPIBE_MASK \
107 (1 << HDA_CL_SPBFIFO_SPBFCCTL_SPIBE_SHIFT)
109 #define HDA_CL_SPBFIFO_SPBFCCTL_SPIBE(x) \
110 (((x) << HDA_CL_SPBFIFO_SPBFCCTL_SPIBE_SHIFT) & \
111 HDA_CL_SPBFIFO_SPBFCCTL_SPIBE_MASK)
113 #define HDA_CL_DMA_SD_INT_COMPLETE 0x4
115 static int cl_skl_cldma_setup_bdle(struct snd_sof_dev
*sdev
,
116 struct snd_dma_buffer
*dmab_data
,
117 __le32
**bdlp
, int size
, int with_ioc
)
119 phys_addr_t addr
= virt_to_phys(dmab_data
->area
);
123 * This code is simplified by using one fragment of physical memory and assuming
124 * all the code fits. This could be improved with scatter-gather but the firmware
125 * size is limited by DSP memory anyways
127 bdl
[0] = cpu_to_le32(lower_32_bits(addr
));
128 bdl
[1] = cpu_to_le32(upper_32_bits(addr
));
129 bdl
[2] = cpu_to_le32(size
);
130 bdl
[3] = (!with_ioc
) ? 0 : cpu_to_le32(0x01);
132 return 1; /* one fragment */
135 static void cl_skl_cldma_stream_run(struct snd_sof_dev
*sdev
, bool enable
)
137 int sd_offset
= SOF_HDA_ADSP_LOADER_BASE
;
140 u32 run
= enable
? 0x1 : 0;
142 snd_sof_dsp_update_bits(sdev
, HDA_DSP_BAR
,
143 sd_offset
+ SOF_HDA_ADSP_REG_SD_CTL
,
144 HDA_CL_SD_CTL_RUN(1), HDA_CL_SD_CTL_RUN(run
));
150 /* waiting for hardware to report the stream Run bit set */
151 val
= snd_sof_dsp_read(sdev
, HDA_DSP_BAR
,
152 sd_offset
+ SOF_HDA_ADSP_REG_SD_CTL
);
153 val
&= HDA_CL_SD_CTL_RUN(1);
156 else if (!enable
&& !val
)
161 dev_err(sdev
->dev
, "%s: failed to set Run bit=%d enable=%d\n",
162 __func__
, val
, enable
);
165 static void cl_skl_cldma_stream_clear(struct snd_sof_dev
*sdev
)
167 int sd_offset
= SOF_HDA_ADSP_LOADER_BASE
;
169 /* make sure Run bit is cleared before setting stream register */
170 cl_skl_cldma_stream_run(sdev
, 0);
172 /* Disable the Interrupt On Completion, FIFO Error Interrupt,
173 * Descriptor Error Interrupt and set the cldma stream number to 0.
175 snd_sof_dsp_update_bits(sdev
, HDA_DSP_BAR
,
176 sd_offset
+ SOF_HDA_ADSP_REG_SD_CTL
,
177 HDA_CL_SD_CTL_INT_MASK
, HDA_CL_SD_CTL_INT(0));
178 snd_sof_dsp_update_bits(sdev
, HDA_DSP_BAR
,
179 sd_offset
+ SOF_HDA_ADSP_REG_SD_CTL
,
180 HDA_CL_SD_CTL_STRM(0xf), HDA_CL_SD_CTL_STRM(0));
182 snd_sof_dsp_write(sdev
, HDA_DSP_BAR
,
183 sd_offset
+ SOF_HDA_ADSP_REG_SD_BDLPL
, HDA_CL_SD_BDLPLBA(0));
184 snd_sof_dsp_write(sdev
, HDA_DSP_BAR
,
185 sd_offset
+ SOF_HDA_ADSP_REG_SD_BDLPU
, 0);
187 /* Set the Cyclic Buffer Length to 0. */
188 snd_sof_dsp_write(sdev
, HDA_DSP_BAR
,
189 sd_offset
+ SOF_HDA_ADSP_REG_SD_CBL
, 0);
190 /* Set the Last Valid Index. */
191 snd_sof_dsp_write(sdev
, HDA_DSP_BAR
,
192 sd_offset
+ SOF_HDA_ADSP_REG_SD_LVI
, 0);
195 static void cl_skl_cldma_setup_spb(struct snd_sof_dev
*sdev
,
196 unsigned int size
, bool enable
)
198 int sd_offset
= SOF_DSP_REG_CL_SPBFIFO
;
201 snd_sof_dsp_update_bits(sdev
, HDA_DSP_BAR
,
202 sd_offset
+ SOF_HDA_ADSP_REG_CL_SPBFIFO_SPBFCCTL
,
203 HDA_CL_SPBFIFO_SPBFCCTL_SPIBE_MASK
,
204 HDA_CL_SPBFIFO_SPBFCCTL_SPIBE(1));
206 snd_sof_dsp_write(sdev
, HDA_DSP_BAR
,
207 sd_offset
+ SOF_HDA_ADSP_REG_CL_SPBFIFO_SPIB
, size
);
210 static void cl_skl_cldma_set_intr(struct snd_sof_dev
*sdev
, bool enable
)
212 u32 val
= enable
? HDA_DSP_ADSPIC_CL_DMA
: 0;
214 snd_sof_dsp_update_bits(sdev
, HDA_DSP_BAR
, HDA_DSP_REG_ADSPIC
,
215 HDA_DSP_ADSPIC_CL_DMA
, val
);
218 static void cl_skl_cldma_cleanup_spb(struct snd_sof_dev
*sdev
)
220 int sd_offset
= SOF_DSP_REG_CL_SPBFIFO
;
222 snd_sof_dsp_update_bits(sdev
, HDA_DSP_BAR
,
223 sd_offset
+ SOF_HDA_ADSP_REG_CL_SPBFIFO_SPBFCCTL
,
224 HDA_CL_SPBFIFO_SPBFCCTL_SPIBE_MASK
,
225 HDA_CL_SPBFIFO_SPBFCCTL_SPIBE(0));
227 snd_sof_dsp_write(sdev
, HDA_DSP_BAR
,
228 sd_offset
+ SOF_HDA_ADSP_REG_CL_SPBFIFO_SPIB
, 0);
231 static void cl_skl_cldma_setup_controller(struct snd_sof_dev
*sdev
,
232 struct snd_dma_buffer
*dmab_bdl
,
233 unsigned int max_size
, u32 count
)
235 int sd_offset
= SOF_HDA_ADSP_LOADER_BASE
;
237 /* Clear the stream first and then set it. */
238 cl_skl_cldma_stream_clear(sdev
);
240 /* setting the stream register */
241 snd_sof_dsp_write(sdev
, HDA_DSP_BAR
,
242 sd_offset
+ SOF_HDA_ADSP_REG_SD_BDLPL
,
243 HDA_CL_SD_BDLPLBA(dmab_bdl
->addr
));
244 snd_sof_dsp_write(sdev
, HDA_DSP_BAR
,
245 sd_offset
+ SOF_HDA_ADSP_REG_SD_BDLPU
,
246 HDA_CL_SD_BDLPUBA(dmab_bdl
->addr
));
248 /* Set the Cyclic Buffer Length. */
249 snd_sof_dsp_write(sdev
, HDA_DSP_BAR
,
250 sd_offset
+ SOF_HDA_ADSP_REG_SD_CBL
, max_size
);
251 /* Set the Last Valid Index. */
252 snd_sof_dsp_write(sdev
, HDA_DSP_BAR
,
253 sd_offset
+ SOF_HDA_ADSP_REG_SD_LVI
, count
- 1);
255 /* Set the Interrupt On Completion, FIFO Error Interrupt,
256 * Descriptor Error Interrupt and the cldma stream number.
258 snd_sof_dsp_update_bits(sdev
, HDA_DSP_BAR
,
259 sd_offset
+ SOF_HDA_ADSP_REG_SD_CTL
,
260 HDA_CL_SD_CTL_INT_MASK
, HDA_CL_SD_CTL_INT(1));
261 snd_sof_dsp_update_bits(sdev
, HDA_DSP_BAR
,
262 sd_offset
+ SOF_HDA_ADSP_REG_SD_CTL
,
263 HDA_CL_SD_CTL_STRM(0xf),
264 HDA_CL_SD_CTL_STRM(1));
267 static int cl_stream_prepare_skl(struct snd_sof_dev
*sdev
,
268 struct snd_dma_buffer
*dmab
,
269 struct snd_dma_buffer
*dmab_bdl
)
272 unsigned int bufsize
= HDA_SKL_CLDMA_MAX_BUFFER_SIZE
;
277 ret
= snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV
, sdev
->dev
, bufsize
, dmab
);
279 dev_err(sdev
->dev
, "%s: failed to alloc fw buffer: %x\n", __func__
, ret
);
283 ret
= snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV
, sdev
->dev
, bufsize
, dmab_bdl
);
285 dev_err(sdev
->dev
, "%s: failed to alloc blde: %x\n", __func__
, ret
);
286 snd_dma_free_pages(dmab
);
290 bdl
= (__le32
*)dmab_bdl
->area
;
291 frags
= cl_skl_cldma_setup_bdle(sdev
, dmab
, &bdl
, bufsize
, 1);
292 cl_skl_cldma_setup_controller(sdev
, dmab_bdl
, bufsize
, frags
);
297 static void cl_cleanup_skl(struct snd_sof_dev
*sdev
,
298 struct snd_dma_buffer
*dmab
,
299 struct snd_dma_buffer
*dmab_bdl
)
301 cl_skl_cldma_cleanup_spb(sdev
);
302 cl_skl_cldma_stream_clear(sdev
);
303 snd_dma_free_pages(dmab
);
304 snd_dma_free_pages(dmab_bdl
);
307 static int cl_dsp_init_skl(struct snd_sof_dev
*sdev
,
308 struct snd_dma_buffer
*dmab
,
309 struct snd_dma_buffer
*dmab_bdl
)
311 struct sof_intel_hda_dev
*hda
= sdev
->pdata
->hw_pdata
;
312 const struct sof_intel_dsp_desc
*chip
= hda
->desc
;
317 /* check if the init_core is already enabled, if yes, reset and make it run,
318 * if not, powerdown and enable it again.
320 if (hda_dsp_core_is_enabled(sdev
, chip
->init_core_mask
)) {
321 /* if enabled, reset it, and run the init_core. */
322 ret
= hda_dsp_core_stall_reset(sdev
, chip
->init_core_mask
);
326 ret
= hda_dsp_core_run(sdev
, chip
->init_core_mask
);
328 dev_err(sdev
->dev
, "%s: dsp core start failed %d\n", __func__
, ret
);
332 /* if not enabled, power down it first and then powerup and run
335 ret
= hda_dsp_core_reset_power_down(sdev
, chip
->init_core_mask
);
337 dev_err(sdev
->dev
, "%s: dsp core0 disable fail: %d\n", __func__
, ret
);
340 ret
= hda_dsp_enable_core(sdev
, chip
->init_core_mask
);
342 dev_err(sdev
->dev
, "%s: dsp core0 enable fail: %d\n", __func__
, ret
);
347 /* prepare DMA for code loader stream */
348 ret
= cl_stream_prepare_skl(sdev
, dmab
, dmab_bdl
);
350 dev_err(sdev
->dev
, "%s: dma prepare fw loading err: %x\n", __func__
, ret
);
354 /* enable the interrupt */
355 snd_sof_dsp_update_bits(sdev
, HDA_DSP_BAR
, HDA_DSP_REG_ADSPIC
,
356 HDA_DSP_ADSPIC_IPC
, HDA_DSP_ADSPIC_IPC
);
358 /* enable IPC DONE interrupt */
359 snd_sof_dsp_update_bits(sdev
, HDA_DSP_BAR
, chip
->ipc_ctl
,
360 HDA_DSP_REG_HIPCCTL_DONE
,
361 HDA_DSP_REG_HIPCCTL_DONE
);
363 /* enable IPC BUSY interrupt */
364 snd_sof_dsp_update_bits(sdev
, HDA_DSP_BAR
, chip
->ipc_ctl
,
365 HDA_DSP_REG_HIPCCTL_BUSY
,
366 HDA_DSP_REG_HIPCCTL_BUSY
);
368 /* polling the ROM init status information. */
369 ret
= snd_sof_dsp_read_poll_timeout(sdev
, HDA_DSP_BAR
,
370 chip
->rom_status_reg
, status
,
371 (FSR_TO_STATE_CODE(status
)
372 == FSR_STATE_INIT_DONE
),
373 HDA_DSP_REG_POLL_INTERVAL_US
,
374 chip
->rom_init_timeout
*
382 flags
= SOF_DBG_DUMP_PCI
| SOF_DBG_DUMP_MBOX
;
384 snd_sof_dsp_dbg_dump(sdev
, "Boot failed\n", flags
);
385 cl_cleanup_skl(sdev
, dmab
, dmab_bdl
);
386 hda_dsp_core_reset_power_down(sdev
, chip
->init_core_mask
);
390 static void cl_skl_cldma_fill_buffer(struct snd_sof_dev
*sdev
,
391 struct snd_dma_buffer
*dmab
,
392 unsigned int bufsize
,
393 unsigned int copysize
,
394 const void *curr_pos
,
397 struct sof_intel_hda_dev
*hda
= sdev
->pdata
->hw_pdata
;
399 /* copy the image into the buffer with the maximum buffer size. */
400 unsigned int size
= (bufsize
== copysize
) ? bufsize
: copysize
;
402 memcpy(dmab
->area
, curr_pos
, size
);
404 /* Set the wait condition for every load. */
405 hda
->code_loading
= 1;
407 /* Set the interrupt. */
409 cl_skl_cldma_set_intr(sdev
, true);
412 cl_skl_cldma_setup_spb(sdev
, size
, true);
414 /* Trigger the code loading stream. */
415 cl_skl_cldma_stream_run(sdev
, true);
418 static int cl_skl_cldma_wait_interruptible(struct snd_sof_dev
*sdev
,
421 struct sof_intel_hda_dev
*hda
= sdev
->pdata
->hw_pdata
;
422 const struct sof_intel_dsp_desc
*chip
= hda
->desc
;
423 int sd_offset
= SOF_HDA_ADSP_LOADER_BASE
;
424 u8 cl_dma_intr_status
;
427 * Wait for CLDMA interrupt to inform the binary segment transfer is
430 if (!wait_event_timeout(hda
->waitq
, !hda
->code_loading
,
431 msecs_to_jiffies(HDA_SKL_WAIT_TIMEOUT
))) {
432 dev_err(sdev
->dev
, "cldma copy timeout\n");
433 dev_err(sdev
->dev
, "ROM code=%#x: FW status=%#x\n",
434 snd_sof_dsp_read(sdev
, HDA_DSP_BAR
, HDA_DSP_SRAM_REG_ROM_ERROR
),
435 snd_sof_dsp_read(sdev
, HDA_DSP_BAR
, chip
->rom_status_reg
));
439 /* now check DMA interrupt status */
440 cl_dma_intr_status
= snd_sof_dsp_read(sdev
, HDA_DSP_BAR
,
441 sd_offset
+ SOF_HDA_ADSP_REG_SD_STS
);
443 if (!(cl_dma_intr_status
& HDA_CL_DMA_SD_INT_COMPLETE
)) {
444 dev_err(sdev
->dev
, "cldma copy failed\n");
448 dev_dbg(sdev
->dev
, "cldma buffer copy complete\n");
453 cl_skl_cldma_copy_to_buf(struct snd_sof_dev
*sdev
,
454 struct snd_dma_buffer
*dmab
,
456 u32 total_size
, u32 bufsize
)
458 unsigned int bytes_left
= total_size
;
459 const void *curr_pos
= bin
;
465 while (bytes_left
> 0) {
466 if (bytes_left
> bufsize
) {
467 dev_dbg(sdev
->dev
, "cldma copy %#x bytes\n", bufsize
);
469 cl_skl_cldma_fill_buffer(sdev
, dmab
, bufsize
, bufsize
, curr_pos
, true);
471 ret
= cl_skl_cldma_wait_interruptible(sdev
, false);
473 dev_err(sdev
->dev
, "%s: fw failed to load. %#x bytes remaining\n",
474 __func__
, bytes_left
);
478 bytes_left
-= bufsize
;
481 dev_dbg(sdev
->dev
, "cldma copy %#x bytes\n", bytes_left
);
483 cl_skl_cldma_set_intr(sdev
, false);
484 cl_skl_cldma_fill_buffer(sdev
, dmab
, bufsize
, bytes_left
, curr_pos
, false);
492 static int cl_copy_fw_skl(struct snd_sof_dev
*sdev
,
493 struct snd_dma_buffer
*dmab
)
496 const struct firmware
*fw
= sdev
->basefw
.fw
;
497 struct firmware stripped_firmware
;
498 unsigned int bufsize
= HDA_SKL_CLDMA_MAX_BUFFER_SIZE
;
501 stripped_firmware
.data
= fw
->data
+ sdev
->basefw
.payload_offset
;
502 stripped_firmware
.size
= fw
->size
- sdev
->basefw
.payload_offset
;
504 dev_dbg(sdev
->dev
, "firmware size: %#zx buffer size %#x\n", fw
->size
, bufsize
);
506 ret
= cl_skl_cldma_copy_to_buf(sdev
, dmab
, stripped_firmware
.data
,
507 stripped_firmware
.size
, bufsize
);
509 dev_err(sdev
->dev
, "%s: fw copy failed %d\n", __func__
, ret
);
514 int hda_dsp_cl_boot_firmware_skl(struct snd_sof_dev
*sdev
)
516 struct sof_intel_hda_dev
*hda
= sdev
->pdata
->hw_pdata
;
517 const struct sof_intel_dsp_desc
*chip
= hda
->desc
;
518 struct snd_dma_buffer dmab_bdl
;
519 struct snd_dma_buffer dmab
;
524 ret
= cl_dsp_init_skl(sdev
, &dmab
, &dmab_bdl
);
526 /* retry enabling core and ROM load. seemed to help */
528 ret
= cl_dsp_init_skl(sdev
, &dmab
, &dmab_bdl
);
530 dev_err(sdev
->dev
, "Error code=%#x: FW status=%#x\n",
531 snd_sof_dsp_read(sdev
, HDA_DSP_BAR
, HDA_DSP_SRAM_REG_ROM_ERROR
),
532 snd_sof_dsp_read(sdev
, HDA_DSP_BAR
, chip
->rom_status_reg
));
533 dev_err(sdev
->dev
, "Core En/ROM load fail:%d\n", ret
);
538 dev_dbg(sdev
->dev
, "ROM init successful\n");
540 /* at this point DSP ROM has been initialized and should be ready for
541 * code loading and firmware boot
543 ret
= cl_copy_fw_skl(sdev
, &dmab
);
545 dev_err(sdev
->dev
, "%s: load firmware failed : %d\n", __func__
, ret
);
549 ret
= snd_sof_dsp_read_poll_timeout(sdev
, HDA_DSP_BAR
,
550 chip
->rom_status_reg
, reg
,
551 (FSR_TO_STATE_CODE(reg
)
552 == FSR_STATE_ROM_BASEFW_ENTERED
),
553 HDA_DSP_REG_POLL_INTERVAL_US
,
554 HDA_DSP_BASEFW_TIMEOUT_US
);
556 dev_dbg(sdev
->dev
, "Firmware download successful, booting...\n");
558 cl_skl_cldma_stream_run(sdev
, false);
559 cl_cleanup_skl(sdev
, &dmab
, &dmab_bdl
);
562 return chip
->init_core_mask
;
567 flags
= SOF_DBG_DUMP_PCI
| SOF_DBG_DUMP_MBOX
;
569 snd_sof_dsp_dbg_dump(sdev
, "Boot failed\n", flags
);
572 hda_dsp_core_reset_power_down(sdev
, chip
->init_core_mask
);
573 cl_skl_cldma_stream_run(sdev
, false);
574 cl_cleanup_skl(sdev
, &dmab
, &dmab_bdl
);
576 dev_err(sdev
->dev
, "%s: load fw failed err: %d\n", __func__
, ret
);