WIP FPC-III support
[linux/fpc-iii.git] / sound / soc / sof / intel / hda-stream.c
blob0e09ede922c7a074bbf78c7f072a07e948e17d77
1 // SPDX-License-Identifier: (GPL-2.0-only OR BSD-3-Clause)
2 //
3 // This file is provided under a dual BSD/GPLv2 license. When using or
4 // redistributing this file, you may do so under either license.
5 //
6 // Copyright(c) 2018 Intel Corporation. All rights reserved.
7 //
8 // Authors: Liam Girdwood <liam.r.girdwood@linux.intel.com>
9 // Ranjani Sridharan <ranjani.sridharan@linux.intel.com>
10 // Rander Wang <rander.wang@intel.com>
11 // Keyon Jie <yang.jie@linux.intel.com>
15 * Hardware interface for generic Intel audio DSP HDA IP
18 #include <linux/pm_runtime.h>
19 #include <sound/hdaudio_ext.h>
20 #include <sound/hda_register.h>
21 #include <sound/sof.h>
22 #include "../ops.h"
23 #include "../sof-audio.h"
24 #include "hda.h"
26 #define HDA_LTRP_GB_VALUE_US 95
29 * set up one of BDL entries for a stream
31 static int hda_setup_bdle(struct snd_sof_dev *sdev,
32 struct snd_dma_buffer *dmab,
33 struct hdac_stream *stream,
34 struct sof_intel_dsp_bdl **bdlp,
35 int offset, int size, int ioc)
37 struct hdac_bus *bus = sof_to_bus(sdev);
38 struct sof_intel_dsp_bdl *bdl = *bdlp;
40 while (size > 0) {
41 dma_addr_t addr;
42 int chunk;
44 if (stream->frags >= HDA_DSP_MAX_BDL_ENTRIES) {
45 dev_err(sdev->dev, "error: stream frags exceeded\n");
46 return -EINVAL;
49 addr = snd_sgbuf_get_addr(dmab, offset);
50 /* program BDL addr */
51 bdl->addr_l = cpu_to_le32(lower_32_bits(addr));
52 bdl->addr_h = cpu_to_le32(upper_32_bits(addr));
53 /* program BDL size */
54 chunk = snd_sgbuf_get_chunk_size(dmab, offset, size);
55 /* one BDLE should not cross 4K boundary */
56 if (bus->align_bdle_4k) {
57 u32 remain = 0x1000 - (offset & 0xfff);
59 if (chunk > remain)
60 chunk = remain;
62 bdl->size = cpu_to_le32(chunk);
63 /* only program IOC when the whole segment is processed */
64 size -= chunk;
65 bdl->ioc = (size || !ioc) ? 0 : cpu_to_le32(0x01);
66 bdl++;
67 stream->frags++;
68 offset += chunk;
70 dev_vdbg(sdev->dev, "bdl, frags:%d, chunk size:0x%x;\n",
71 stream->frags, chunk);
74 *bdlp = bdl;
75 return offset;
79 * set up Buffer Descriptor List (BDL) for host memory transfer
80 * BDL describes the location of the individual buffers and is little endian.
82 int hda_dsp_stream_setup_bdl(struct snd_sof_dev *sdev,
83 struct snd_dma_buffer *dmab,
84 struct hdac_stream *stream)
86 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
87 struct sof_intel_dsp_bdl *bdl;
88 int i, offset, period_bytes, periods;
89 int remain, ioc;
91 period_bytes = stream->period_bytes;
92 dev_dbg(sdev->dev, "period_bytes:0x%x\n", period_bytes);
93 if (!period_bytes)
94 period_bytes = stream->bufsize;
96 periods = stream->bufsize / period_bytes;
98 dev_dbg(sdev->dev, "periods:%d\n", periods);
100 remain = stream->bufsize % period_bytes;
101 if (remain)
102 periods++;
104 /* program the initial BDL entries */
105 bdl = (struct sof_intel_dsp_bdl *)stream->bdl.area;
106 offset = 0;
107 stream->frags = 0;
110 * set IOC if don't use position IPC
111 * and period_wakeup needed.
113 ioc = hda->no_ipc_position ?
114 !stream->no_period_wakeup : 0;
116 for (i = 0; i < periods; i++) {
117 if (i == (periods - 1) && remain)
118 /* set the last small entry */
119 offset = hda_setup_bdle(sdev, dmab,
120 stream, &bdl, offset,
121 remain, 0);
122 else
123 offset = hda_setup_bdle(sdev, dmab,
124 stream, &bdl, offset,
125 period_bytes, ioc);
128 return offset;
131 int hda_dsp_stream_spib_config(struct snd_sof_dev *sdev,
132 struct hdac_ext_stream *stream,
133 int enable, u32 size)
135 struct hdac_stream *hstream = &stream->hstream;
136 u32 mask;
138 if (!sdev->bar[HDA_DSP_SPIB_BAR]) {
139 dev_err(sdev->dev, "error: address of spib capability is NULL\n");
140 return -EINVAL;
143 mask = (1 << hstream->index);
145 /* enable/disable SPIB for the stream */
146 snd_sof_dsp_update_bits(sdev, HDA_DSP_SPIB_BAR,
147 SOF_HDA_ADSP_REG_CL_SPBFIFO_SPBFCCTL, mask,
148 enable << hstream->index);
150 /* set the SPIB value */
151 sof_io_write(sdev, stream->spib_addr, size);
153 return 0;
156 /* get next unused stream */
157 struct hdac_ext_stream *
158 hda_dsp_stream_get(struct snd_sof_dev *sdev, int direction)
160 struct hdac_bus *bus = sof_to_bus(sdev);
161 struct sof_intel_hda_stream *hda_stream;
162 struct hdac_ext_stream *stream = NULL;
163 struct hdac_stream *s;
165 spin_lock_irq(&bus->reg_lock);
167 /* get an unused stream */
168 list_for_each_entry(s, &bus->stream_list, list) {
169 if (s->direction == direction && !s->opened) {
170 stream = stream_to_hdac_ext_stream(s);
171 hda_stream = container_of(stream,
172 struct sof_intel_hda_stream,
173 hda_stream);
174 /* check if the host DMA channel is reserved */
175 if (hda_stream->host_reserved)
176 continue;
178 s->opened = true;
179 break;
183 spin_unlock_irq(&bus->reg_lock);
185 /* stream found ? */
186 if (!stream)
187 dev_err(sdev->dev, "error: no free %s streams\n",
188 direction == SNDRV_PCM_STREAM_PLAYBACK ?
189 "playback" : "capture");
192 * Disable DMI Link L1 entry when capture stream is opened.
193 * Workaround to address a known issue with host DMA that results
194 * in xruns during pause/release in capture scenarios.
196 if (!IS_ENABLED(CONFIG_SND_SOC_SOF_HDA_ALWAYS_ENABLE_DMI_L1))
197 if (stream && direction == SNDRV_PCM_STREAM_CAPTURE)
198 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
199 HDA_VS_INTEL_EM2,
200 HDA_VS_INTEL_EM2_L1SEN, 0);
202 return stream;
205 /* free a stream */
206 int hda_dsp_stream_put(struct snd_sof_dev *sdev, int direction, int stream_tag)
208 struct hdac_bus *bus = sof_to_bus(sdev);
209 struct hdac_stream *s;
210 bool active_capture_stream = false;
211 bool found = false;
213 spin_lock_irq(&bus->reg_lock);
216 * close stream matching the stream tag
217 * and check if there are any open capture streams.
219 list_for_each_entry(s, &bus->stream_list, list) {
220 if (!s->opened)
221 continue;
223 if (s->direction == direction && s->stream_tag == stream_tag) {
224 s->opened = false;
225 found = true;
226 } else if (s->direction == SNDRV_PCM_STREAM_CAPTURE) {
227 active_capture_stream = true;
231 spin_unlock_irq(&bus->reg_lock);
233 /* Enable DMI L1 entry if there are no capture streams open */
234 if (!IS_ENABLED(CONFIG_SND_SOC_SOF_HDA_ALWAYS_ENABLE_DMI_L1))
235 if (!active_capture_stream)
236 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
237 HDA_VS_INTEL_EM2,
238 HDA_VS_INTEL_EM2_L1SEN,
239 HDA_VS_INTEL_EM2_L1SEN);
241 if (!found) {
242 dev_dbg(sdev->dev, "stream_tag %d not opened!\n", stream_tag);
243 return -ENODEV;
246 return 0;
249 int hda_dsp_stream_trigger(struct snd_sof_dev *sdev,
250 struct hdac_ext_stream *stream, int cmd)
252 struct hdac_stream *hstream = &stream->hstream;
253 int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
254 u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
255 int ret;
256 u32 run;
258 /* cmd must be for audio stream */
259 switch (cmd) {
260 case SNDRV_PCM_TRIGGER_RESUME:
261 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
262 case SNDRV_PCM_TRIGGER_START:
263 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTCTL,
264 1 << hstream->index,
265 1 << hstream->index);
267 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
268 sd_offset,
269 SOF_HDA_SD_CTL_DMA_START |
270 SOF_HDA_CL_DMA_SD_INT_MASK,
271 SOF_HDA_SD_CTL_DMA_START |
272 SOF_HDA_CL_DMA_SD_INT_MASK);
274 ret = snd_sof_dsp_read_poll_timeout(sdev,
275 HDA_DSP_HDA_BAR,
276 sd_offset, run,
277 ((run & dma_start) == dma_start),
278 HDA_DSP_REG_POLL_INTERVAL_US,
279 HDA_DSP_STREAM_RUN_TIMEOUT);
281 if (ret < 0) {
282 dev_err(sdev->dev,
283 "error: %s: cmd %d: timeout on STREAM_SD_OFFSET read\n",
284 __func__, cmd);
285 return ret;
288 hstream->running = true;
289 break;
290 case SNDRV_PCM_TRIGGER_SUSPEND:
291 case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
292 case SNDRV_PCM_TRIGGER_STOP:
293 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
294 sd_offset,
295 SOF_HDA_SD_CTL_DMA_START |
296 SOF_HDA_CL_DMA_SD_INT_MASK, 0x0);
298 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
299 sd_offset, run,
300 !(run & dma_start),
301 HDA_DSP_REG_POLL_INTERVAL_US,
302 HDA_DSP_STREAM_RUN_TIMEOUT);
304 if (ret < 0) {
305 dev_err(sdev->dev,
306 "error: %s: cmd %d: timeout on STREAM_SD_OFFSET read\n",
307 __func__, cmd);
308 return ret;
311 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, sd_offset +
312 SOF_HDA_ADSP_REG_CL_SD_STS,
313 SOF_HDA_CL_DMA_SD_INT_MASK);
315 hstream->running = false;
316 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTCTL,
317 1 << hstream->index, 0x0);
318 break;
319 default:
320 dev_err(sdev->dev, "error: unknown command: %d\n", cmd);
321 return -EINVAL;
324 return 0;
327 /* minimal recommended programming for ICCMAX stream */
328 int hda_dsp_iccmax_stream_hw_params(struct snd_sof_dev *sdev, struct hdac_ext_stream *stream,
329 struct snd_dma_buffer *dmab,
330 struct snd_pcm_hw_params *params)
332 struct hdac_bus *bus = sof_to_bus(sdev);
333 struct hdac_stream *hstream = &stream->hstream;
334 int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
335 int ret;
336 u32 mask = 0x1 << hstream->index;
338 if (!stream) {
339 dev_err(sdev->dev, "error: no stream available\n");
340 return -ENODEV;
343 if (hstream->posbuf)
344 *hstream->posbuf = 0;
346 /* reset BDL address */
347 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
348 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL,
349 0x0);
350 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
351 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU,
352 0x0);
354 hstream->frags = 0;
356 ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream);
357 if (ret < 0) {
358 dev_err(sdev->dev, "error: set up of BDL failed\n");
359 return ret;
362 /* program BDL address */
363 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
364 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL,
365 (u32)hstream->bdl.addr);
366 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
367 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU,
368 upper_32_bits(hstream->bdl.addr));
370 /* program cyclic buffer length */
371 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
372 sd_offset + SOF_HDA_ADSP_REG_CL_SD_CBL,
373 hstream->bufsize);
375 /* program last valid index */
376 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
377 sd_offset + SOF_HDA_ADSP_REG_CL_SD_LVI,
378 0xffff, (hstream->frags - 1));
380 /* decouple host and link DMA, enable DSP features */
381 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
382 mask, mask);
384 /* Follow HW recommendation to set the guardband value to 95us during FW boot */
385 snd_hdac_chip_updateb(bus, VS_LTRP, HDA_VS_INTEL_LTRP_GB_MASK, HDA_LTRP_GB_VALUE_US);
387 /* start DMA */
388 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
389 SOF_HDA_SD_CTL_DMA_START, SOF_HDA_SD_CTL_DMA_START);
391 return 0;
395 * prepare for common hdac registers settings, for both code loader
396 * and normal stream.
398 int hda_dsp_stream_hw_params(struct snd_sof_dev *sdev,
399 struct hdac_ext_stream *stream,
400 struct snd_dma_buffer *dmab,
401 struct snd_pcm_hw_params *params)
403 struct hdac_bus *bus = sof_to_bus(sdev);
404 struct hdac_stream *hstream = &stream->hstream;
405 int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
406 int ret, timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
407 u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
408 u32 val, mask;
409 u32 run;
411 if (!stream) {
412 dev_err(sdev->dev, "error: no stream available\n");
413 return -ENODEV;
416 /* decouple host and link DMA */
417 mask = 0x1 << hstream->index;
418 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
419 mask, mask);
421 if (!dmab) {
422 dev_err(sdev->dev, "error: no dma buffer allocated!\n");
423 return -ENODEV;
426 /* clear stream status */
427 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
428 SOF_HDA_CL_DMA_SD_INT_MASK |
429 SOF_HDA_SD_CTL_DMA_START, 0);
431 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
432 sd_offset, run,
433 !(run & dma_start),
434 HDA_DSP_REG_POLL_INTERVAL_US,
435 HDA_DSP_STREAM_RUN_TIMEOUT);
437 if (ret < 0) {
438 dev_err(sdev->dev,
439 "error: %s: timeout on STREAM_SD_OFFSET read1\n",
440 __func__);
441 return ret;
444 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
445 sd_offset + SOF_HDA_ADSP_REG_CL_SD_STS,
446 SOF_HDA_CL_DMA_SD_INT_MASK,
447 SOF_HDA_CL_DMA_SD_INT_MASK);
449 /* stream reset */
450 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 0x1,
451 0x1);
452 udelay(3);
453 do {
454 val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
455 sd_offset);
456 if (val & 0x1)
457 break;
458 } while (--timeout);
459 if (timeout == 0) {
460 dev_err(sdev->dev, "error: stream reset failed\n");
461 return -ETIMEDOUT;
464 timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
465 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 0x1,
466 0x0);
468 /* wait for hardware to report that stream is out of reset */
469 udelay(3);
470 do {
471 val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
472 sd_offset);
473 if ((val & 0x1) == 0)
474 break;
475 } while (--timeout);
476 if (timeout == 0) {
477 dev_err(sdev->dev, "error: timeout waiting for stream reset\n");
478 return -ETIMEDOUT;
481 if (hstream->posbuf)
482 *hstream->posbuf = 0;
484 /* reset BDL address */
485 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
486 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL,
487 0x0);
488 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
489 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU,
490 0x0);
492 /* clear stream status */
493 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
494 SOF_HDA_CL_DMA_SD_INT_MASK |
495 SOF_HDA_SD_CTL_DMA_START, 0);
497 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
498 sd_offset, run,
499 !(run & dma_start),
500 HDA_DSP_REG_POLL_INTERVAL_US,
501 HDA_DSP_STREAM_RUN_TIMEOUT);
503 if (ret < 0) {
504 dev_err(sdev->dev,
505 "error: %s: timeout on STREAM_SD_OFFSET read2\n",
506 __func__);
507 return ret;
510 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
511 sd_offset + SOF_HDA_ADSP_REG_CL_SD_STS,
512 SOF_HDA_CL_DMA_SD_INT_MASK,
513 SOF_HDA_CL_DMA_SD_INT_MASK);
515 hstream->frags = 0;
517 ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream);
518 if (ret < 0) {
519 dev_err(sdev->dev, "error: set up of BDL failed\n");
520 return ret;
523 /* program stream tag to set up stream descriptor for DMA */
524 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
525 SOF_HDA_CL_SD_CTL_STREAM_TAG_MASK,
526 hstream->stream_tag <<
527 SOF_HDA_CL_SD_CTL_STREAM_TAG_SHIFT);
529 /* program cyclic buffer length */
530 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
531 sd_offset + SOF_HDA_ADSP_REG_CL_SD_CBL,
532 hstream->bufsize);
535 * Recommended hardware programming sequence for HDAudio DMA format
537 * 1. Put DMA into coupled mode by clearing PPCTL.PROCEN bit
538 * for corresponding stream index before the time of writing
539 * format to SDxFMT register.
540 * 2. Write SDxFMT
541 * 3. Set PPCTL.PROCEN bit for corresponding stream index to
542 * enable decoupled mode
545 /* couple host and link DMA, disable DSP features */
546 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
547 mask, 0);
549 /* program stream format */
550 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
551 sd_offset +
552 SOF_HDA_ADSP_REG_CL_SD_FORMAT,
553 0xffff, hstream->format_val);
555 /* decouple host and link DMA, enable DSP features */
556 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
557 mask, mask);
559 /* program last valid index */
560 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
561 sd_offset + SOF_HDA_ADSP_REG_CL_SD_LVI,
562 0xffff, (hstream->frags - 1));
564 /* program BDL address */
565 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
566 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL,
567 (u32)hstream->bdl.addr);
568 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
569 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU,
570 upper_32_bits(hstream->bdl.addr));
572 /* enable position buffer */
573 if (!(snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE)
574 & SOF_HDA_ADSP_DPLBASE_ENABLE)) {
575 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPUBASE,
576 upper_32_bits(bus->posbuf.addr));
577 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE,
578 (u32)bus->posbuf.addr |
579 SOF_HDA_ADSP_DPLBASE_ENABLE);
582 /* set interrupt enable bits */
583 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
584 SOF_HDA_CL_DMA_SD_INT_MASK,
585 SOF_HDA_CL_DMA_SD_INT_MASK);
587 /* read FIFO size */
588 if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK) {
589 hstream->fifo_size =
590 snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
591 sd_offset +
592 SOF_HDA_ADSP_REG_CL_SD_FIFOSIZE);
593 hstream->fifo_size &= 0xffff;
594 hstream->fifo_size += 1;
595 } else {
596 hstream->fifo_size = 0;
599 return ret;
602 int hda_dsp_stream_hw_free(struct snd_sof_dev *sdev,
603 struct snd_pcm_substream *substream)
605 struct hdac_stream *stream = substream->runtime->private_data;
606 struct hdac_ext_stream *link_dev = container_of(stream,
607 struct hdac_ext_stream,
608 hstream);
609 struct hdac_bus *bus = sof_to_bus(sdev);
610 u32 mask = 0x1 << stream->index;
612 spin_lock_irq(&bus->reg_lock);
613 /* couple host and link DMA if link DMA channel is idle */
614 if (!link_dev->link_locked)
615 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR,
616 SOF_HDA_REG_PP_PPCTL, mask, 0);
617 spin_unlock_irq(&bus->reg_lock);
619 stream->substream = NULL;
621 return 0;
624 bool hda_dsp_check_stream_irq(struct snd_sof_dev *sdev)
626 struct hdac_bus *bus = sof_to_bus(sdev);
627 bool ret = false;
628 u32 status;
630 /* The function can be called at irq thread, so use spin_lock_irq */
631 spin_lock_irq(&bus->reg_lock);
633 status = snd_hdac_chip_readl(bus, INTSTS);
634 dev_vdbg(bus->dev, "stream irq, INTSTS status: 0x%x\n", status);
636 /* if Register inaccessible, ignore it.*/
637 if (status != 0xffffffff)
638 ret = true;
640 spin_unlock_irq(&bus->reg_lock);
642 return ret;
645 static void
646 hda_dsp_set_bytes_transferred(struct hdac_stream *hstream, u64 buffer_size)
648 u64 prev_pos, pos, num_bytes;
650 div64_u64_rem(hstream->curr_pos, buffer_size, &prev_pos);
651 pos = snd_hdac_stream_get_pos_posbuf(hstream);
653 if (pos < prev_pos)
654 num_bytes = (buffer_size - prev_pos) + pos;
655 else
656 num_bytes = pos - prev_pos;
658 hstream->curr_pos += num_bytes;
661 static bool hda_dsp_stream_check(struct hdac_bus *bus, u32 status)
663 struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
664 struct hdac_stream *s;
665 bool active = false;
666 u32 sd_status;
668 list_for_each_entry(s, &bus->stream_list, list) {
669 if (status & BIT(s->index) && s->opened) {
670 sd_status = snd_hdac_stream_readb(s, SD_STS);
672 dev_vdbg(bus->dev, "stream %d status 0x%x\n",
673 s->index, sd_status);
675 snd_hdac_stream_writeb(s, SD_STS, sd_status);
677 active = true;
678 if ((!s->substream && !s->cstream) ||
679 !s->running ||
680 (sd_status & SOF_HDA_CL_DMA_SD_INT_COMPLETE) == 0)
681 continue;
683 /* Inform ALSA only in case not do that with IPC */
684 if (s->substream && sof_hda->no_ipc_position) {
685 snd_sof_pcm_period_elapsed(s->substream);
686 } else if (s->cstream) {
687 hda_dsp_set_bytes_transferred(s,
688 s->cstream->runtime->buffer_size);
689 snd_compr_fragment_elapsed(s->cstream);
694 return active;
697 irqreturn_t hda_dsp_stream_threaded_handler(int irq, void *context)
699 struct snd_sof_dev *sdev = context;
700 struct hdac_bus *bus = sof_to_bus(sdev);
701 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
702 u32 rirb_status;
703 #endif
704 bool active;
705 u32 status;
706 int i;
709 * Loop 10 times to handle missed interrupts caused by
710 * unsolicited responses from the codec
712 for (i = 0, active = true; i < 10 && active; i++) {
713 spin_lock_irq(&bus->reg_lock);
715 status = snd_hdac_chip_readl(bus, INTSTS);
717 /* check streams */
718 active = hda_dsp_stream_check(bus, status);
720 /* check and clear RIRB interrupt */
721 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
722 if (status & AZX_INT_CTRL_EN) {
723 rirb_status = snd_hdac_chip_readb(bus, RIRBSTS);
724 if (rirb_status & RIRB_INT_MASK) {
726 * Clearing the interrupt status here ensures
727 * that no interrupt gets masked after the RIRB
728 * wp is read in snd_hdac_bus_update_rirb.
730 snd_hdac_chip_writeb(bus, RIRBSTS,
731 RIRB_INT_MASK);
732 active = true;
733 if (rirb_status & RIRB_INT_RESPONSE)
734 snd_hdac_bus_update_rirb(bus);
737 #endif
738 spin_unlock_irq(&bus->reg_lock);
741 return IRQ_HANDLED;
744 int hda_dsp_stream_init(struct snd_sof_dev *sdev)
746 struct hdac_bus *bus = sof_to_bus(sdev);
747 struct hdac_ext_stream *stream;
748 struct hdac_stream *hstream;
749 struct pci_dev *pci = to_pci_dev(sdev->dev);
750 struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
751 int sd_offset;
752 int i, num_playback, num_capture, num_total, ret;
753 u32 gcap;
755 gcap = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_GCAP);
756 dev_dbg(sdev->dev, "hda global caps = 0x%x\n", gcap);
758 /* get stream count from GCAP */
759 num_capture = (gcap >> 8) & 0x0f;
760 num_playback = (gcap >> 12) & 0x0f;
761 num_total = num_playback + num_capture;
763 dev_dbg(sdev->dev, "detected %d playback and %d capture streams\n",
764 num_playback, num_capture);
766 if (num_playback >= SOF_HDA_PLAYBACK_STREAMS) {
767 dev_err(sdev->dev, "error: too many playback streams %d\n",
768 num_playback);
769 return -EINVAL;
772 if (num_capture >= SOF_HDA_CAPTURE_STREAMS) {
773 dev_err(sdev->dev, "error: too many capture streams %d\n",
774 num_playback);
775 return -EINVAL;
779 * mem alloc for the position buffer
780 * TODO: check position buffer update
782 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
783 SOF_HDA_DPIB_ENTRY_SIZE * num_total,
784 &bus->posbuf);
785 if (ret < 0) {
786 dev_err(sdev->dev, "error: posbuffer dma alloc failed\n");
787 return -ENOMEM;
790 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
791 /* mem alloc for the CORB/RIRB ringbuffers */
792 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
793 PAGE_SIZE, &bus->rb);
794 if (ret < 0) {
795 dev_err(sdev->dev, "error: RB alloc failed\n");
796 return -ENOMEM;
798 #endif
800 /* create capture streams */
801 for (i = 0; i < num_capture; i++) {
802 struct sof_intel_hda_stream *hda_stream;
804 hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream),
805 GFP_KERNEL);
806 if (!hda_stream)
807 return -ENOMEM;
809 hda_stream->sdev = sdev;
811 stream = &hda_stream->hda_stream;
813 stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] +
814 SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i;
816 stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] +
817 SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total +
818 SOF_HDA_PPLC_INTERVAL * i;
820 /* do we support SPIB */
821 if (sdev->bar[HDA_DSP_SPIB_BAR]) {
822 stream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
823 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
824 SOF_HDA_SPIB_SPIB;
826 stream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
827 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
828 SOF_HDA_SPIB_MAXFIFO;
831 hstream = &stream->hstream;
832 hstream->bus = bus;
833 hstream->sd_int_sta_mask = 1 << i;
834 hstream->index = i;
835 sd_offset = SOF_STREAM_SD_OFFSET(hstream);
836 hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset;
837 hstream->stream_tag = i + 1;
838 hstream->opened = false;
839 hstream->running = false;
840 hstream->direction = SNDRV_PCM_STREAM_CAPTURE;
842 /* memory alloc for stream BDL */
843 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
844 HDA_DSP_BDL_SIZE, &hstream->bdl);
845 if (ret < 0) {
846 dev_err(sdev->dev, "error: stream bdl dma alloc failed\n");
847 return -ENOMEM;
849 hstream->posbuf = (__le32 *)(bus->posbuf.area +
850 (hstream->index) * 8);
852 list_add_tail(&hstream->list, &bus->stream_list);
855 /* create playback streams */
856 for (i = num_capture; i < num_total; i++) {
857 struct sof_intel_hda_stream *hda_stream;
859 hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream),
860 GFP_KERNEL);
861 if (!hda_stream)
862 return -ENOMEM;
864 hda_stream->sdev = sdev;
866 stream = &hda_stream->hda_stream;
868 /* we always have DSP support */
869 stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] +
870 SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i;
872 stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] +
873 SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total +
874 SOF_HDA_PPLC_INTERVAL * i;
876 /* do we support SPIB */
877 if (sdev->bar[HDA_DSP_SPIB_BAR]) {
878 stream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
879 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
880 SOF_HDA_SPIB_SPIB;
882 stream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
883 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
884 SOF_HDA_SPIB_MAXFIFO;
887 hstream = &stream->hstream;
888 hstream->bus = bus;
889 hstream->sd_int_sta_mask = 1 << i;
890 hstream->index = i;
891 sd_offset = SOF_STREAM_SD_OFFSET(hstream);
892 hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset;
893 hstream->stream_tag = i - num_capture + 1;
894 hstream->opened = false;
895 hstream->running = false;
896 hstream->direction = SNDRV_PCM_STREAM_PLAYBACK;
898 /* mem alloc for stream BDL */
899 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
900 HDA_DSP_BDL_SIZE, &hstream->bdl);
901 if (ret < 0) {
902 dev_err(sdev->dev, "error: stream bdl dma alloc failed\n");
903 return -ENOMEM;
906 hstream->posbuf = (__le32 *)(bus->posbuf.area +
907 (hstream->index) * 8);
909 list_add_tail(&hstream->list, &bus->stream_list);
912 /* store total stream count (playback + capture) from GCAP */
913 sof_hda->stream_max = num_total;
915 return 0;
918 void hda_dsp_stream_free(struct snd_sof_dev *sdev)
920 struct hdac_bus *bus = sof_to_bus(sdev);
921 struct hdac_stream *s, *_s;
922 struct hdac_ext_stream *stream;
923 struct sof_intel_hda_stream *hda_stream;
925 /* free position buffer */
926 if (bus->posbuf.area)
927 snd_dma_free_pages(&bus->posbuf);
929 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
930 /* free position buffer */
931 if (bus->rb.area)
932 snd_dma_free_pages(&bus->rb);
933 #endif
935 list_for_each_entry_safe(s, _s, &bus->stream_list, list) {
936 /* TODO: decouple */
938 /* free bdl buffer */
939 if (s->bdl.area)
940 snd_dma_free_pages(&s->bdl);
941 list_del(&s->list);
942 stream = stream_to_hdac_ext_stream(s);
943 hda_stream = container_of(stream, struct sof_intel_hda_stream,
944 hda_stream);
945 devm_kfree(sdev->dev, hda_stream);