1 // SPDX-License-Identifier: GPL-2.0-only
3 * skl-sst-dsp.c - SKL SST library generic function
5 * Copyright (C) 2014-15, Intel Corporation.
6 * Author:Rafal Redzimski <rafal.f.redzimski@intel.com>
7 * Jeeja KP <jeeja.kp@intel.com>
8 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
10 #include <sound/pcm.h>
12 #include "../common/sst-dsp.h"
13 #include "../common/sst-ipc.h"
14 #include "../common/sst-dsp-priv.h"
17 /* various timeout values */
18 #define SKL_DSP_PU_TO 50
19 #define SKL_DSP_PD_TO 50
20 #define SKL_DSP_RESET_TO 50
22 void skl_dsp_set_state_locked(struct sst_dsp
*ctx
, int state
)
24 mutex_lock(&ctx
->mutex
);
25 ctx
->sst_state
= state
;
26 mutex_unlock(&ctx
->mutex
);
30 * Initialize core power state and usage count. To be called after
31 * successful first boot. Hence core 0 will be running and other cores
34 void skl_dsp_init_core_state(struct sst_dsp
*ctx
)
36 struct skl_dev
*skl
= ctx
->thread_context
;
39 skl
->cores
.state
[SKL_DSP_CORE0_ID
] = SKL_DSP_RUNNING
;
40 skl
->cores
.usage_count
[SKL_DSP_CORE0_ID
] = 1;
42 for (i
= SKL_DSP_CORE0_ID
+ 1; i
< skl
->cores
.count
; i
++) {
43 skl
->cores
.state
[i
] = SKL_DSP_RESET
;
44 skl
->cores
.usage_count
[i
] = 0;
48 /* Get the mask for all enabled cores */
49 unsigned int skl_dsp_get_enabled_cores(struct sst_dsp
*ctx
)
51 struct skl_dev
*skl
= ctx
->thread_context
;
52 unsigned int core_mask
, en_cores_mask
;
55 core_mask
= SKL_DSP_CORES_MASK(skl
->cores
.count
);
57 val
= sst_dsp_shim_read_unlocked(ctx
, SKL_ADSP_REG_ADSPCS
);
59 /* Cores having CPA bit set */
60 en_cores_mask
= (val
& SKL_ADSPCS_CPA_MASK(core_mask
)) >>
63 /* And cores having CRST bit cleared */
64 en_cores_mask
&= (~val
& SKL_ADSPCS_CRST_MASK(core_mask
)) >>
65 SKL_ADSPCS_CRST_SHIFT
;
67 /* And cores having CSTALL bit cleared */
68 en_cores_mask
&= (~val
& SKL_ADSPCS_CSTALL_MASK(core_mask
)) >>
69 SKL_ADSPCS_CSTALL_SHIFT
;
70 en_cores_mask
&= core_mask
;
72 dev_dbg(ctx
->dev
, "DSP enabled cores mask = %x\n", en_cores_mask
);
78 skl_dsp_core_set_reset_state(struct sst_dsp
*ctx
, unsigned int core_mask
)
83 sst_dsp_shim_update_bits_unlocked(ctx
,
84 SKL_ADSP_REG_ADSPCS
, SKL_ADSPCS_CRST_MASK(core_mask
),
85 SKL_ADSPCS_CRST_MASK(core_mask
));
87 /* poll with timeout to check if operation successful */
88 ret
= sst_dsp_register_poll(ctx
,
90 SKL_ADSPCS_CRST_MASK(core_mask
),
91 SKL_ADSPCS_CRST_MASK(core_mask
),
94 if ((sst_dsp_shim_read_unlocked(ctx
, SKL_ADSP_REG_ADSPCS
) &
95 SKL_ADSPCS_CRST_MASK(core_mask
)) !=
96 SKL_ADSPCS_CRST_MASK(core_mask
)) {
97 dev_err(ctx
->dev
, "Set reset state failed: core_mask %x\n",
105 int skl_dsp_core_unset_reset_state(
106 struct sst_dsp
*ctx
, unsigned int core_mask
)
110 dev_dbg(ctx
->dev
, "In %s\n", __func__
);
113 sst_dsp_shim_update_bits_unlocked(ctx
, SKL_ADSP_REG_ADSPCS
,
114 SKL_ADSPCS_CRST_MASK(core_mask
), 0);
116 /* poll with timeout to check if operation successful */
117 ret
= sst_dsp_register_poll(ctx
,
119 SKL_ADSPCS_CRST_MASK(core_mask
),
124 if ((sst_dsp_shim_read_unlocked(ctx
, SKL_ADSP_REG_ADSPCS
) &
125 SKL_ADSPCS_CRST_MASK(core_mask
)) != 0) {
126 dev_err(ctx
->dev
, "Unset reset state failed: core_mask %x\n",
135 is_skl_dsp_core_enable(struct sst_dsp
*ctx
, unsigned int core_mask
)
140 val
= sst_dsp_shim_read_unlocked(ctx
, SKL_ADSP_REG_ADSPCS
);
142 is_enable
= ((val
& SKL_ADSPCS_CPA_MASK(core_mask
)) &&
143 (val
& SKL_ADSPCS_SPA_MASK(core_mask
)) &&
144 !(val
& SKL_ADSPCS_CRST_MASK(core_mask
)) &&
145 !(val
& SKL_ADSPCS_CSTALL_MASK(core_mask
)));
147 dev_dbg(ctx
->dev
, "DSP core(s) enabled? %d : core_mask %x\n",
148 is_enable
, core_mask
);
153 static int skl_dsp_reset_core(struct sst_dsp
*ctx
, unsigned int core_mask
)
156 sst_dsp_shim_update_bits_unlocked(ctx
, SKL_ADSP_REG_ADSPCS
,
157 SKL_ADSPCS_CSTALL_MASK(core_mask
),
158 SKL_ADSPCS_CSTALL_MASK(core_mask
));
160 /* set reset state */
161 return skl_dsp_core_set_reset_state(ctx
, core_mask
);
164 int skl_dsp_start_core(struct sst_dsp
*ctx
, unsigned int core_mask
)
168 /* unset reset state */
169 ret
= skl_dsp_core_unset_reset_state(ctx
, core_mask
);
174 dev_dbg(ctx
->dev
, "unstall/run core: core_mask = %x\n", core_mask
);
175 sst_dsp_shim_update_bits_unlocked(ctx
, SKL_ADSP_REG_ADSPCS
,
176 SKL_ADSPCS_CSTALL_MASK(core_mask
), 0);
178 if (!is_skl_dsp_core_enable(ctx
, core_mask
)) {
179 skl_dsp_reset_core(ctx
, core_mask
);
180 dev_err(ctx
->dev
, "DSP start core failed: core_mask %x\n",
188 int skl_dsp_core_power_up(struct sst_dsp
*ctx
, unsigned int core_mask
)
193 sst_dsp_shim_update_bits_unlocked(ctx
, SKL_ADSP_REG_ADSPCS
,
194 SKL_ADSPCS_SPA_MASK(core_mask
),
195 SKL_ADSPCS_SPA_MASK(core_mask
));
197 /* poll with timeout to check if operation successful */
198 ret
= sst_dsp_register_poll(ctx
,
200 SKL_ADSPCS_CPA_MASK(core_mask
),
201 SKL_ADSPCS_CPA_MASK(core_mask
),
205 if ((sst_dsp_shim_read_unlocked(ctx
, SKL_ADSP_REG_ADSPCS
) &
206 SKL_ADSPCS_CPA_MASK(core_mask
)) !=
207 SKL_ADSPCS_CPA_MASK(core_mask
)) {
208 dev_err(ctx
->dev
, "DSP core power up failed: core_mask %x\n",
216 int skl_dsp_core_power_down(struct sst_dsp
*ctx
, unsigned int core_mask
)
219 sst_dsp_shim_update_bits_unlocked(ctx
, SKL_ADSP_REG_ADSPCS
,
220 SKL_ADSPCS_SPA_MASK(core_mask
), 0);
222 /* poll with timeout to check if operation successful */
223 return sst_dsp_register_poll(ctx
,
225 SKL_ADSPCS_CPA_MASK(core_mask
),
231 int skl_dsp_enable_core(struct sst_dsp
*ctx
, unsigned int core_mask
)
236 ret
= skl_dsp_core_power_up(ctx
, core_mask
);
238 dev_err(ctx
->dev
, "dsp core power up failed: core_mask %x\n",
243 return skl_dsp_start_core(ctx
, core_mask
);
246 int skl_dsp_disable_core(struct sst_dsp
*ctx
, unsigned int core_mask
)
250 ret
= skl_dsp_reset_core(ctx
, core_mask
);
252 dev_err(ctx
->dev
, "dsp core reset failed: core_mask %x\n",
258 ret
= skl_dsp_core_power_down(ctx
, core_mask
);
260 dev_err(ctx
->dev
, "dsp core power down fail mask %x: %d\n",
265 if (is_skl_dsp_core_enable(ctx
, core_mask
)) {
266 dev_err(ctx
->dev
, "dsp core disable fail mask %x: %d\n",
274 int skl_dsp_boot(struct sst_dsp
*ctx
)
278 if (is_skl_dsp_core_enable(ctx
, SKL_DSP_CORE0_MASK
)) {
279 ret
= skl_dsp_reset_core(ctx
, SKL_DSP_CORE0_MASK
);
281 dev_err(ctx
->dev
, "dsp core0 reset fail: %d\n", ret
);
285 ret
= skl_dsp_start_core(ctx
, SKL_DSP_CORE0_MASK
);
287 dev_err(ctx
->dev
, "dsp core0 start fail: %d\n", ret
);
291 ret
= skl_dsp_disable_core(ctx
, SKL_DSP_CORE0_MASK
);
293 dev_err(ctx
->dev
, "dsp core0 disable fail: %d\n", ret
);
296 ret
= skl_dsp_enable_core(ctx
, SKL_DSP_CORE0_MASK
);
302 irqreturn_t
skl_dsp_sst_interrupt(int irq
, void *dev_id
)
304 struct sst_dsp
*ctx
= dev_id
;
306 irqreturn_t result
= IRQ_NONE
;
308 spin_lock(&ctx
->spinlock
);
310 val
= sst_dsp_shim_read_unlocked(ctx
, SKL_ADSP_REG_ADSPIS
);
311 ctx
->intr_status
= val
;
313 if (val
== 0xffffffff) {
314 spin_unlock(&ctx
->spinlock
);
318 if (val
& SKL_ADSPIS_IPC
) {
319 skl_ipc_int_disable(ctx
);
320 result
= IRQ_WAKE_THREAD
;
323 if (val
& SKL_ADSPIS_CL_DMA
) {
324 skl_cldma_int_disable(ctx
);
325 result
= IRQ_WAKE_THREAD
;
328 spin_unlock(&ctx
->spinlock
);
333 * skl_dsp_get_core/skl_dsp_put_core will be called inside DAPM context
334 * within the dapm mutex. Hence no separate lock is used.
336 int skl_dsp_get_core(struct sst_dsp
*ctx
, unsigned int core_id
)
338 struct skl_dev
*skl
= ctx
->thread_context
;
341 if (core_id
>= skl
->cores
.count
) {
342 dev_err(ctx
->dev
, "invalid core id: %d\n", core_id
);
346 skl
->cores
.usage_count
[core_id
]++;
348 if (skl
->cores
.state
[core_id
] == SKL_DSP_RESET
) {
349 ret
= ctx
->fw_ops
.set_state_D0(ctx
, core_id
);
351 dev_err(ctx
->dev
, "unable to get core%d\n", core_id
);
357 dev_dbg(ctx
->dev
, "core id %d state %d usage_count %d\n",
358 core_id
, skl
->cores
.state
[core_id
],
359 skl
->cores
.usage_count
[core_id
]);
363 EXPORT_SYMBOL_GPL(skl_dsp_get_core
);
365 int skl_dsp_put_core(struct sst_dsp
*ctx
, unsigned int core_id
)
367 struct skl_dev
*skl
= ctx
->thread_context
;
370 if (core_id
>= skl
->cores
.count
) {
371 dev_err(ctx
->dev
, "invalid core id: %d\n", core_id
);
375 if ((--skl
->cores
.usage_count
[core_id
] == 0) &&
376 (skl
->cores
.state
[core_id
] != SKL_DSP_RESET
)) {
377 ret
= ctx
->fw_ops
.set_state_D3(ctx
, core_id
);
379 dev_err(ctx
->dev
, "unable to put core %d: %d\n",
381 skl
->cores
.usage_count
[core_id
]++;
385 dev_dbg(ctx
->dev
, "core id %d state %d usage_count %d\n",
386 core_id
, skl
->cores
.state
[core_id
],
387 skl
->cores
.usage_count
[core_id
]);
391 EXPORT_SYMBOL_GPL(skl_dsp_put_core
);
393 int skl_dsp_wake(struct sst_dsp
*ctx
)
395 return skl_dsp_get_core(ctx
, SKL_DSP_CORE0_ID
);
397 EXPORT_SYMBOL_GPL(skl_dsp_wake
);
399 int skl_dsp_sleep(struct sst_dsp
*ctx
)
401 return skl_dsp_put_core(ctx
, SKL_DSP_CORE0_ID
);
403 EXPORT_SYMBOL_GPL(skl_dsp_sleep
);
405 struct sst_dsp
*skl_dsp_ctx_init(struct device
*dev
,
406 struct sst_dsp_device
*sst_dev
, int irq
)
411 sst
= devm_kzalloc(dev
, sizeof(*sst
), GFP_KERNEL
);
415 spin_lock_init(&sst
->spinlock
);
416 mutex_init(&sst
->mutex
);
418 sst
->sst_dev
= sst_dev
;
420 sst
->ops
= sst_dev
->ops
;
421 sst
->thread_context
= sst_dev
->thread_context
;
423 /* Initialise SST Audio DSP */
424 if (sst
->ops
->init
) {
425 ret
= sst
->ops
->init(sst
);
433 int skl_dsp_acquire_irq(struct sst_dsp
*sst
)
435 struct sst_dsp_device
*sst_dev
= sst
->sst_dev
;
438 /* Register the ISR */
439 ret
= request_threaded_irq(sst
->irq
, sst
->ops
->irq_handler
,
440 sst_dev
->thread
, IRQF_SHARED
, "AudioDSP", sst
);
442 dev_err(sst
->dev
, "unable to grab threaded IRQ %d, disabling device\n",
448 void skl_dsp_free(struct sst_dsp
*dsp
)
450 skl_ipc_int_disable(dsp
);
452 free_irq(dsp
->irq
, dsp
);
453 skl_ipc_op_int_disable(dsp
);
454 skl_dsp_disable_core(dsp
, SKL_DSP_CORE0_MASK
);
456 EXPORT_SYMBOL_GPL(skl_dsp_free
);
458 bool is_skl_dsp_running(struct sst_dsp
*ctx
)
460 return (ctx
->sst_state
== SKL_DSP_RUNNING
);
462 EXPORT_SYMBOL_GPL(is_skl_dsp_running
);