treewide: remove redundant IS_ERR() before error code check
[linux/fpc-iii.git] / sound / soc / fsl / fsl_asrc_dma.c
blobece130f59d15c98d5c49d70b51074ebf45765c81
1 // SPDX-License-Identifier: GPL-2.0
2 //
3 // Freescale ASRC ALSA SoC Platform (DMA) driver
4 //
5 // Copyright (C) 2014 Freescale Semiconductor, Inc.
6 //
7 // Author: Nicolin Chen <nicoleotsuka@gmail.com>
9 #include <linux/dma-mapping.h>
10 #include <linux/module.h>
11 #include <linux/platform_data/dma-imx.h>
12 #include <sound/dmaengine_pcm.h>
13 #include <sound/pcm_params.h>
15 #include "fsl_asrc.h"
17 #define FSL_ASRC_DMABUF_SIZE (256 * 1024)
19 static struct snd_pcm_hardware snd_imx_hardware = {
20 .info = SNDRV_PCM_INFO_INTERLEAVED |
21 SNDRV_PCM_INFO_BLOCK_TRANSFER |
22 SNDRV_PCM_INFO_MMAP |
23 SNDRV_PCM_INFO_MMAP_VALID,
24 .buffer_bytes_max = FSL_ASRC_DMABUF_SIZE,
25 .period_bytes_min = 128,
26 .period_bytes_max = 65535, /* Limited by SDMA engine */
27 .periods_min = 2,
28 .periods_max = 255,
29 .fifo_size = 0,
32 static bool filter(struct dma_chan *chan, void *param)
34 if (!imx_dma_is_general_purpose(chan))
35 return false;
37 chan->private = param;
39 return true;
42 static void fsl_asrc_dma_complete(void *arg)
44 struct snd_pcm_substream *substream = arg;
45 struct snd_pcm_runtime *runtime = substream->runtime;
46 struct fsl_asrc_pair *pair = runtime->private_data;
48 pair->pos += snd_pcm_lib_period_bytes(substream);
49 if (pair->pos >= snd_pcm_lib_buffer_bytes(substream))
50 pair->pos = 0;
52 snd_pcm_period_elapsed(substream);
55 static int fsl_asrc_dma_prepare_and_submit(struct snd_pcm_substream *substream,
56 struct snd_soc_component *component)
58 u8 dir = substream->stream == SNDRV_PCM_STREAM_PLAYBACK ? OUT : IN;
59 struct snd_pcm_runtime *runtime = substream->runtime;
60 struct fsl_asrc_pair *pair = runtime->private_data;
61 struct device *dev = component->dev;
62 unsigned long flags = DMA_CTRL_ACK;
64 /* Prepare and submit Front-End DMA channel */
65 if (!substream->runtime->no_period_wakeup)
66 flags |= DMA_PREP_INTERRUPT;
68 pair->pos = 0;
69 pair->desc[!dir] = dmaengine_prep_dma_cyclic(
70 pair->dma_chan[!dir], runtime->dma_addr,
71 snd_pcm_lib_buffer_bytes(substream),
72 snd_pcm_lib_period_bytes(substream),
73 dir == OUT ? DMA_MEM_TO_DEV : DMA_DEV_TO_MEM, flags);
74 if (!pair->desc[!dir]) {
75 dev_err(dev, "failed to prepare slave DMA for Front-End\n");
76 return -ENOMEM;
79 pair->desc[!dir]->callback = fsl_asrc_dma_complete;
80 pair->desc[!dir]->callback_param = substream;
82 dmaengine_submit(pair->desc[!dir]);
84 /* Prepare and submit Back-End DMA channel */
85 pair->desc[dir] = dmaengine_prep_dma_cyclic(
86 pair->dma_chan[dir], 0xffff, 64, 64, DMA_DEV_TO_DEV, 0);
87 if (!pair->desc[dir]) {
88 dev_err(dev, "failed to prepare slave DMA for Back-End\n");
89 return -ENOMEM;
92 dmaengine_submit(pair->desc[dir]);
94 return 0;
97 static int fsl_asrc_dma_trigger(struct snd_soc_component *component,
98 struct snd_pcm_substream *substream, int cmd)
100 struct snd_pcm_runtime *runtime = substream->runtime;
101 struct fsl_asrc_pair *pair = runtime->private_data;
102 int ret;
104 switch (cmd) {
105 case SNDRV_PCM_TRIGGER_START:
106 case SNDRV_PCM_TRIGGER_RESUME:
107 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
108 ret = fsl_asrc_dma_prepare_and_submit(substream, component);
109 if (ret)
110 return ret;
111 dma_async_issue_pending(pair->dma_chan[IN]);
112 dma_async_issue_pending(pair->dma_chan[OUT]);
113 break;
114 case SNDRV_PCM_TRIGGER_STOP:
115 case SNDRV_PCM_TRIGGER_SUSPEND:
116 case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
117 dmaengine_terminate_all(pair->dma_chan[OUT]);
118 dmaengine_terminate_all(pair->dma_chan[IN]);
119 break;
120 default:
121 return -EINVAL;
124 return 0;
127 static int fsl_asrc_dma_hw_params(struct snd_soc_component *component,
128 struct snd_pcm_substream *substream,
129 struct snd_pcm_hw_params *params)
131 enum dma_slave_buswidth buswidth = DMA_SLAVE_BUSWIDTH_2_BYTES;
132 struct snd_soc_pcm_runtime *rtd = substream->private_data;
133 bool tx = substream->stream == SNDRV_PCM_STREAM_PLAYBACK;
134 struct snd_dmaengine_dai_dma_data *dma_params_fe = NULL;
135 struct snd_dmaengine_dai_dma_data *dma_params_be = NULL;
136 struct snd_pcm_runtime *runtime = substream->runtime;
137 struct fsl_asrc_pair *pair = runtime->private_data;
138 struct fsl_asrc *asrc_priv = pair->asrc_priv;
139 struct dma_slave_config config_fe, config_be;
140 enum asrc_pair_index index = pair->index;
141 struct device *dev = component->dev;
142 int stream = substream->stream;
143 struct imx_dma_data *tmp_data;
144 struct snd_soc_dpcm *dpcm;
145 struct dma_chan *tmp_chan;
146 struct device *dev_be;
147 u8 dir = tx ? OUT : IN;
148 dma_cap_mask_t mask;
149 int ret;
151 /* Fetch the Back-End dma_data from DPCM */
152 for_each_dpcm_be(rtd, stream, dpcm) {
153 struct snd_soc_pcm_runtime *be = dpcm->be;
154 struct snd_pcm_substream *substream_be;
155 struct snd_soc_dai *dai = be->cpu_dai;
157 if (dpcm->fe != rtd)
158 continue;
160 substream_be = snd_soc_dpcm_get_substream(be, stream);
161 dma_params_be = snd_soc_dai_get_dma_data(dai, substream_be);
162 dev_be = dai->dev;
163 break;
166 if (!dma_params_be) {
167 dev_err(dev, "failed to get the substream of Back-End\n");
168 return -EINVAL;
171 /* Override dma_data of the Front-End and config its dmaengine */
172 dma_params_fe = snd_soc_dai_get_dma_data(rtd->cpu_dai, substream);
173 dma_params_fe->addr = asrc_priv->paddr + REG_ASRDx(!dir, index);
174 dma_params_fe->maxburst = dma_params_be->maxburst;
176 pair->dma_chan[!dir] = fsl_asrc_get_dma_channel(pair, !dir);
177 if (!pair->dma_chan[!dir]) {
178 dev_err(dev, "failed to request DMA channel\n");
179 return -EINVAL;
182 memset(&config_fe, 0, sizeof(config_fe));
183 ret = snd_dmaengine_pcm_prepare_slave_config(substream, params, &config_fe);
184 if (ret) {
185 dev_err(dev, "failed to prepare DMA config for Front-End\n");
186 return ret;
189 ret = dmaengine_slave_config(pair->dma_chan[!dir], &config_fe);
190 if (ret) {
191 dev_err(dev, "failed to config DMA channel for Front-End\n");
192 return ret;
195 /* Request and config DMA channel for Back-End */
196 dma_cap_zero(mask);
197 dma_cap_set(DMA_SLAVE, mask);
198 dma_cap_set(DMA_CYCLIC, mask);
201 * An EDMA DEV_TO_DEV channel is fixed and bound with DMA event of each
202 * peripheral, unlike SDMA channel that is allocated dynamically. So no
203 * need to configure dma_request and dma_request2, but get dma_chan via
204 * dma_request_slave_channel directly with dma name of Front-End device
206 if (!asrc_priv->soc->use_edma) {
207 /* Get DMA request of Back-End */
208 tmp_chan = dma_request_slave_channel(dev_be, tx ? "tx" : "rx");
209 tmp_data = tmp_chan->private;
210 pair->dma_data.dma_request = tmp_data->dma_request;
211 dma_release_channel(tmp_chan);
213 /* Get DMA request of Front-End */
214 tmp_chan = fsl_asrc_get_dma_channel(pair, dir);
215 tmp_data = tmp_chan->private;
216 pair->dma_data.dma_request2 = tmp_data->dma_request;
217 pair->dma_data.peripheral_type = tmp_data->peripheral_type;
218 pair->dma_data.priority = tmp_data->priority;
219 dma_release_channel(tmp_chan);
221 pair->dma_chan[dir] =
222 dma_request_channel(mask, filter, &pair->dma_data);
223 } else {
224 pair->dma_chan[dir] =
225 fsl_asrc_get_dma_channel(pair, dir);
228 if (!pair->dma_chan[dir]) {
229 dev_err(dev, "failed to request DMA channel for Back-End\n");
230 return -EINVAL;
233 if (asrc_priv->asrc_width == 16)
234 buswidth = DMA_SLAVE_BUSWIDTH_2_BYTES;
235 else
236 buswidth = DMA_SLAVE_BUSWIDTH_4_BYTES;
238 config_be.direction = DMA_DEV_TO_DEV;
239 config_be.src_addr_width = buswidth;
240 config_be.src_maxburst = dma_params_be->maxburst;
241 config_be.dst_addr_width = buswidth;
242 config_be.dst_maxburst = dma_params_be->maxburst;
244 if (tx) {
245 config_be.src_addr = asrc_priv->paddr + REG_ASRDO(index);
246 config_be.dst_addr = dma_params_be->addr;
247 } else {
248 config_be.dst_addr = asrc_priv->paddr + REG_ASRDI(index);
249 config_be.src_addr = dma_params_be->addr;
252 ret = dmaengine_slave_config(pair->dma_chan[dir], &config_be);
253 if (ret) {
254 dev_err(dev, "failed to config DMA channel for Back-End\n");
255 return ret;
258 snd_pcm_set_runtime_buffer(substream, &substream->dma_buffer);
260 return 0;
263 static int fsl_asrc_dma_hw_free(struct snd_soc_component *component,
264 struct snd_pcm_substream *substream)
266 struct snd_pcm_runtime *runtime = substream->runtime;
267 struct fsl_asrc_pair *pair = runtime->private_data;
269 snd_pcm_set_runtime_buffer(substream, NULL);
271 if (pair->dma_chan[IN])
272 dma_release_channel(pair->dma_chan[IN]);
274 if (pair->dma_chan[OUT])
275 dma_release_channel(pair->dma_chan[OUT]);
277 pair->dma_chan[IN] = NULL;
278 pair->dma_chan[OUT] = NULL;
280 return 0;
283 static int fsl_asrc_dma_startup(struct snd_soc_component *component,
284 struct snd_pcm_substream *substream)
286 bool tx = substream->stream == SNDRV_PCM_STREAM_PLAYBACK;
287 struct snd_soc_pcm_runtime *rtd = substream->private_data;
288 struct snd_pcm_runtime *runtime = substream->runtime;
289 struct snd_dmaengine_dai_dma_data *dma_data;
290 struct device *dev = component->dev;
291 struct fsl_asrc *asrc_priv = dev_get_drvdata(dev);
292 struct fsl_asrc_pair *pair;
293 struct dma_chan *tmp_chan = NULL;
294 u8 dir = tx ? OUT : IN;
295 bool release_pair = true;
296 int ret = 0;
298 ret = snd_pcm_hw_constraint_integer(substream->runtime,
299 SNDRV_PCM_HW_PARAM_PERIODS);
300 if (ret < 0) {
301 dev_err(dev, "failed to set pcm hw params periods\n");
302 return ret;
305 pair = kzalloc(sizeof(struct fsl_asrc_pair), GFP_KERNEL);
306 if (!pair)
307 return -ENOMEM;
309 pair->asrc_priv = asrc_priv;
311 runtime->private_data = pair;
313 /* Request a dummy pair, which will be released later.
314 * Request pair function needs channel num as input, for this
315 * dummy pair, we just request "1" channel temporarily.
317 ret = fsl_asrc_request_pair(1, pair);
318 if (ret < 0) {
319 dev_err(dev, "failed to request asrc pair\n");
320 goto req_pair_err;
323 /* Request a dummy dma channel, which will be released later. */
324 tmp_chan = fsl_asrc_get_dma_channel(pair, dir);
325 if (!tmp_chan) {
326 dev_err(dev, "failed to get dma channel\n");
327 ret = -EINVAL;
328 goto dma_chan_err;
331 dma_data = snd_soc_dai_get_dma_data(rtd->cpu_dai, substream);
333 /* Refine the snd_imx_hardware according to caps of DMA. */
334 ret = snd_dmaengine_pcm_refine_runtime_hwparams(substream,
335 dma_data,
336 &snd_imx_hardware,
337 tmp_chan);
338 if (ret < 0) {
339 dev_err(dev, "failed to refine runtime hwparams\n");
340 goto out;
343 release_pair = false;
344 snd_soc_set_runtime_hwparams(substream, &snd_imx_hardware);
346 out:
347 dma_release_channel(tmp_chan);
349 dma_chan_err:
350 fsl_asrc_release_pair(pair);
352 req_pair_err:
353 if (release_pair)
354 kfree(pair);
356 return ret;
359 static int fsl_asrc_dma_shutdown(struct snd_soc_component *component,
360 struct snd_pcm_substream *substream)
362 struct snd_pcm_runtime *runtime = substream->runtime;
363 struct fsl_asrc_pair *pair = runtime->private_data;
364 struct fsl_asrc *asrc_priv;
366 if (!pair)
367 return 0;
369 asrc_priv = pair->asrc_priv;
371 if (asrc_priv->pair[pair->index] == pair)
372 asrc_priv->pair[pair->index] = NULL;
374 kfree(pair);
376 return 0;
379 static snd_pcm_uframes_t
380 fsl_asrc_dma_pcm_pointer(struct snd_soc_component *component,
381 struct snd_pcm_substream *substream)
383 struct snd_pcm_runtime *runtime = substream->runtime;
384 struct fsl_asrc_pair *pair = runtime->private_data;
386 return bytes_to_frames(substream->runtime, pair->pos);
389 static int fsl_asrc_dma_pcm_new(struct snd_soc_component *component,
390 struct snd_soc_pcm_runtime *rtd)
392 struct snd_card *card = rtd->card->snd_card;
393 struct snd_pcm_substream *substream;
394 struct snd_pcm *pcm = rtd->pcm;
395 int ret, i;
397 ret = dma_coerce_mask_and_coherent(card->dev, DMA_BIT_MASK(32));
398 if (ret) {
399 dev_err(card->dev, "failed to set DMA mask\n");
400 return ret;
403 for (i = SNDRV_PCM_STREAM_PLAYBACK; i <= SNDRV_PCM_STREAM_LAST; i++) {
404 substream = pcm->streams[i].substream;
405 if (!substream)
406 continue;
408 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, pcm->card->dev,
409 FSL_ASRC_DMABUF_SIZE, &substream->dma_buffer);
410 if (ret) {
411 dev_err(card->dev, "failed to allocate DMA buffer\n");
412 goto err;
416 return 0;
418 err:
419 if (--i == 0 && pcm->streams[i].substream)
420 snd_dma_free_pages(&pcm->streams[i].substream->dma_buffer);
422 return ret;
425 static void fsl_asrc_dma_pcm_free(struct snd_soc_component *component,
426 struct snd_pcm *pcm)
428 struct snd_pcm_substream *substream;
429 int i;
431 for (i = SNDRV_PCM_STREAM_PLAYBACK; i <= SNDRV_PCM_STREAM_LAST; i++) {
432 substream = pcm->streams[i].substream;
433 if (!substream)
434 continue;
436 snd_dma_free_pages(&substream->dma_buffer);
437 substream->dma_buffer.area = NULL;
438 substream->dma_buffer.addr = 0;
442 struct snd_soc_component_driver fsl_asrc_component = {
443 .name = DRV_NAME,
444 .hw_params = fsl_asrc_dma_hw_params,
445 .hw_free = fsl_asrc_dma_hw_free,
446 .trigger = fsl_asrc_dma_trigger,
447 .open = fsl_asrc_dma_startup,
448 .close = fsl_asrc_dma_shutdown,
449 .pointer = fsl_asrc_dma_pcm_pointer,
450 .pcm_construct = fsl_asrc_dma_pcm_new,
451 .pcm_destruct = fsl_asrc_dma_pcm_free,
453 EXPORT_SYMBOL_GPL(fsl_asrc_component);