ixgbevf: Fix checksum error when using stacked vlan
[linux/fpc-iii.git] / sound / soc / fsl / fsl_asrc_dma.c
blobffc000bc1f15bffb9c96937e532d04feadddac4a
1 /*
2 * Freescale ASRC ALSA SoC Platform (DMA) driver
4 * Copyright (C) 2014 Freescale Semiconductor, Inc.
6 * Author: Nicolin Chen <nicoleotsuka@gmail.com>
8 * This file is licensed under the terms of the GNU General Public License
9 * version 2. This program is licensed "as is" without any warranty of any
10 * kind, whether express or implied.
13 #include <linux/dma-mapping.h>
14 #include <linux/module.h>
15 #include <linux/platform_data/dma-imx.h>
16 #include <sound/dmaengine_pcm.h>
17 #include <sound/pcm_params.h>
19 #include "fsl_asrc.h"
21 #define FSL_ASRC_DMABUF_SIZE (256 * 1024)
23 static struct snd_pcm_hardware snd_imx_hardware = {
24 .info = SNDRV_PCM_INFO_INTERLEAVED |
25 SNDRV_PCM_INFO_BLOCK_TRANSFER |
26 SNDRV_PCM_INFO_MMAP |
27 SNDRV_PCM_INFO_MMAP_VALID |
28 SNDRV_PCM_INFO_PAUSE |
29 SNDRV_PCM_INFO_RESUME,
30 .buffer_bytes_max = FSL_ASRC_DMABUF_SIZE,
31 .period_bytes_min = 128,
32 .period_bytes_max = 65535, /* Limited by SDMA engine */
33 .periods_min = 2,
34 .periods_max = 255,
35 .fifo_size = 0,
38 static bool filter(struct dma_chan *chan, void *param)
40 if (!imx_dma_is_general_purpose(chan))
41 return false;
43 chan->private = param;
45 return true;
48 static void fsl_asrc_dma_complete(void *arg)
50 struct snd_pcm_substream *substream = arg;
51 struct snd_pcm_runtime *runtime = substream->runtime;
52 struct fsl_asrc_pair *pair = runtime->private_data;
54 pair->pos += snd_pcm_lib_period_bytes(substream);
55 if (pair->pos >= snd_pcm_lib_buffer_bytes(substream))
56 pair->pos = 0;
58 snd_pcm_period_elapsed(substream);
61 static int fsl_asrc_dma_prepare_and_submit(struct snd_pcm_substream *substream)
63 u8 dir = substream->stream == SNDRV_PCM_STREAM_PLAYBACK ? OUT : IN;
64 struct snd_soc_pcm_runtime *rtd = substream->private_data;
65 struct snd_pcm_runtime *runtime = substream->runtime;
66 struct fsl_asrc_pair *pair = runtime->private_data;
67 struct device *dev = rtd->platform->dev;
68 unsigned long flags = DMA_CTRL_ACK;
70 /* Prepare and submit Front-End DMA channel */
71 if (!substream->runtime->no_period_wakeup)
72 flags |= DMA_PREP_INTERRUPT;
74 pair->pos = 0;
75 pair->desc[!dir] = dmaengine_prep_dma_cyclic(
76 pair->dma_chan[!dir], runtime->dma_addr,
77 snd_pcm_lib_buffer_bytes(substream),
78 snd_pcm_lib_period_bytes(substream),
79 dir == OUT ? DMA_TO_DEVICE : DMA_FROM_DEVICE, flags);
80 if (!pair->desc[!dir]) {
81 dev_err(dev, "failed to prepare slave DMA for Front-End\n");
82 return -ENOMEM;
85 pair->desc[!dir]->callback = fsl_asrc_dma_complete;
86 pair->desc[!dir]->callback_param = substream;
88 dmaengine_submit(pair->desc[!dir]);
90 /* Prepare and submit Back-End DMA channel */
91 pair->desc[dir] = dmaengine_prep_dma_cyclic(
92 pair->dma_chan[dir], 0xffff, 64, 64, DMA_DEV_TO_DEV, 0);
93 if (!pair->desc[dir]) {
94 dev_err(dev, "failed to prepare slave DMA for Back-End\n");
95 return -ENOMEM;
98 dmaengine_submit(pair->desc[dir]);
100 return 0;
103 static int fsl_asrc_dma_trigger(struct snd_pcm_substream *substream, int cmd)
105 struct snd_pcm_runtime *runtime = substream->runtime;
106 struct fsl_asrc_pair *pair = runtime->private_data;
107 int ret;
109 switch (cmd) {
110 case SNDRV_PCM_TRIGGER_START:
111 case SNDRV_PCM_TRIGGER_RESUME:
112 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
113 ret = fsl_asrc_dma_prepare_and_submit(substream);
114 if (ret)
115 return ret;
116 dma_async_issue_pending(pair->dma_chan[IN]);
117 dma_async_issue_pending(pair->dma_chan[OUT]);
118 break;
119 case SNDRV_PCM_TRIGGER_STOP:
120 case SNDRV_PCM_TRIGGER_SUSPEND:
121 case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
122 dmaengine_terminate_all(pair->dma_chan[OUT]);
123 dmaengine_terminate_all(pair->dma_chan[IN]);
124 break;
125 default:
126 return -EINVAL;
129 return 0;
132 static int fsl_asrc_dma_hw_params(struct snd_pcm_substream *substream,
133 struct snd_pcm_hw_params *params)
135 enum dma_slave_buswidth buswidth = DMA_SLAVE_BUSWIDTH_2_BYTES;
136 struct snd_soc_pcm_runtime *rtd = substream->private_data;
137 bool tx = substream->stream == SNDRV_PCM_STREAM_PLAYBACK;
138 struct snd_dmaengine_dai_dma_data *dma_params_fe = NULL;
139 struct snd_dmaengine_dai_dma_data *dma_params_be = NULL;
140 struct snd_pcm_runtime *runtime = substream->runtime;
141 struct fsl_asrc_pair *pair = runtime->private_data;
142 struct fsl_asrc *asrc_priv = pair->asrc_priv;
143 struct dma_slave_config config_fe, config_be;
144 enum asrc_pair_index index = pair->index;
145 struct device *dev = rtd->platform->dev;
146 int stream = substream->stream;
147 struct imx_dma_data *tmp_data;
148 struct snd_soc_dpcm *dpcm;
149 struct dma_chan *tmp_chan;
150 struct device *dev_be;
151 u8 dir = tx ? OUT : IN;
152 dma_cap_mask_t mask;
153 int ret;
155 /* Fetch the Back-End dma_data from DPCM */
156 list_for_each_entry(dpcm, &rtd->dpcm[stream].be_clients, list_be) {
157 struct snd_soc_pcm_runtime *be = dpcm->be;
158 struct snd_pcm_substream *substream_be;
159 struct snd_soc_dai *dai = be->cpu_dai;
161 if (dpcm->fe != rtd)
162 continue;
164 substream_be = snd_soc_dpcm_get_substream(be, stream);
165 dma_params_be = snd_soc_dai_get_dma_data(dai, substream_be);
166 dev_be = dai->dev;
167 break;
170 if (!dma_params_be) {
171 dev_err(dev, "failed to get the substream of Back-End\n");
172 return -EINVAL;
175 /* Override dma_data of the Front-End and config its dmaengine */
176 dma_params_fe = snd_soc_dai_get_dma_data(rtd->cpu_dai, substream);
177 dma_params_fe->addr = asrc_priv->paddr + REG_ASRDx(!dir, index);
178 dma_params_fe->maxburst = dma_params_be->maxburst;
180 pair->dma_chan[!dir] = fsl_asrc_get_dma_channel(pair, !dir);
181 if (!pair->dma_chan[!dir]) {
182 dev_err(dev, "failed to request DMA channel\n");
183 return -EINVAL;
186 memset(&config_fe, 0, sizeof(config_fe));
187 ret = snd_dmaengine_pcm_prepare_slave_config(substream, params, &config_fe);
188 if (ret) {
189 dev_err(dev, "failed to prepare DMA config for Front-End\n");
190 return ret;
193 ret = dmaengine_slave_config(pair->dma_chan[!dir], &config_fe);
194 if (ret) {
195 dev_err(dev, "failed to config DMA channel for Front-End\n");
196 return ret;
199 /* Request and config DMA channel for Back-End */
200 dma_cap_zero(mask);
201 dma_cap_set(DMA_SLAVE, mask);
202 dma_cap_set(DMA_CYCLIC, mask);
204 /* Get DMA request of Back-End */
205 tmp_chan = dma_request_slave_channel(dev_be, tx ? "tx" : "rx");
206 tmp_data = tmp_chan->private;
207 pair->dma_data.dma_request = tmp_data->dma_request;
208 dma_release_channel(tmp_chan);
210 /* Get DMA request of Front-End */
211 tmp_chan = fsl_asrc_get_dma_channel(pair, dir);
212 tmp_data = tmp_chan->private;
213 pair->dma_data.dma_request2 = tmp_data->dma_request;
214 pair->dma_data.peripheral_type = tmp_data->peripheral_type;
215 pair->dma_data.priority = tmp_data->priority;
216 dma_release_channel(tmp_chan);
218 pair->dma_chan[dir] = dma_request_channel(mask, filter, &pair->dma_data);
219 if (!pair->dma_chan[dir]) {
220 dev_err(dev, "failed to request DMA channel for Back-End\n");
221 return -EINVAL;
224 if (asrc_priv->asrc_width == 16)
225 buswidth = DMA_SLAVE_BUSWIDTH_2_BYTES;
226 else
227 buswidth = DMA_SLAVE_BUSWIDTH_4_BYTES;
229 config_be.direction = DMA_DEV_TO_DEV;
230 config_be.src_addr_width = buswidth;
231 config_be.src_maxburst = dma_params_be->maxburst;
232 config_be.dst_addr_width = buswidth;
233 config_be.dst_maxburst = dma_params_be->maxburst;
235 if (tx) {
236 config_be.src_addr = asrc_priv->paddr + REG_ASRDO(index);
237 config_be.dst_addr = dma_params_be->addr;
238 } else {
239 config_be.dst_addr = asrc_priv->paddr + REG_ASRDI(index);
240 config_be.src_addr = dma_params_be->addr;
243 ret = dmaengine_slave_config(pair->dma_chan[dir], &config_be);
244 if (ret) {
245 dev_err(dev, "failed to config DMA channel for Back-End\n");
246 return ret;
249 snd_pcm_set_runtime_buffer(substream, &substream->dma_buffer);
251 return 0;
254 static int fsl_asrc_dma_hw_free(struct snd_pcm_substream *substream)
256 struct snd_pcm_runtime *runtime = substream->runtime;
257 struct fsl_asrc_pair *pair = runtime->private_data;
259 snd_pcm_set_runtime_buffer(substream, NULL);
261 if (pair->dma_chan[IN])
262 dma_release_channel(pair->dma_chan[IN]);
264 if (pair->dma_chan[OUT])
265 dma_release_channel(pair->dma_chan[OUT]);
267 pair->dma_chan[IN] = NULL;
268 pair->dma_chan[OUT] = NULL;
270 return 0;
273 static int fsl_asrc_dma_startup(struct snd_pcm_substream *substream)
275 struct snd_soc_pcm_runtime *rtd = substream->private_data;
276 struct snd_pcm_runtime *runtime = substream->runtime;
277 struct device *dev = rtd->platform->dev;
278 struct fsl_asrc *asrc_priv = dev_get_drvdata(dev);
279 struct fsl_asrc_pair *pair;
281 pair = kzalloc(sizeof(struct fsl_asrc_pair), GFP_KERNEL);
282 if (!pair) {
283 dev_err(dev, "failed to allocate pair\n");
284 return -ENOMEM;
287 pair->asrc_priv = asrc_priv;
289 runtime->private_data = pair;
291 snd_pcm_hw_constraint_integer(substream->runtime,
292 SNDRV_PCM_HW_PARAM_PERIODS);
293 snd_soc_set_runtime_hwparams(substream, &snd_imx_hardware);
295 return 0;
298 static int fsl_asrc_dma_shutdown(struct snd_pcm_substream *substream)
300 struct snd_pcm_runtime *runtime = substream->runtime;
301 struct fsl_asrc_pair *pair = runtime->private_data;
302 struct fsl_asrc *asrc_priv;
304 if (!pair)
305 return 0;
307 asrc_priv = pair->asrc_priv;
309 if (asrc_priv->pair[pair->index] == pair)
310 asrc_priv->pair[pair->index] = NULL;
312 kfree(pair);
314 return 0;
317 static snd_pcm_uframes_t fsl_asrc_dma_pcm_pointer(struct snd_pcm_substream *substream)
319 struct snd_pcm_runtime *runtime = substream->runtime;
320 struct fsl_asrc_pair *pair = runtime->private_data;
322 return bytes_to_frames(substream->runtime, pair->pos);
325 static struct snd_pcm_ops fsl_asrc_dma_pcm_ops = {
326 .ioctl = snd_pcm_lib_ioctl,
327 .hw_params = fsl_asrc_dma_hw_params,
328 .hw_free = fsl_asrc_dma_hw_free,
329 .trigger = fsl_asrc_dma_trigger,
330 .open = fsl_asrc_dma_startup,
331 .close = fsl_asrc_dma_shutdown,
332 .pointer = fsl_asrc_dma_pcm_pointer,
335 static int fsl_asrc_dma_pcm_new(struct snd_soc_pcm_runtime *rtd)
337 struct snd_card *card = rtd->card->snd_card;
338 struct snd_pcm_substream *substream;
339 struct snd_pcm *pcm = rtd->pcm;
340 int ret, i;
342 ret = dma_coerce_mask_and_coherent(card->dev, DMA_BIT_MASK(32));
343 if (ret) {
344 dev_err(card->dev, "failed to set DMA mask\n");
345 return ret;
348 for (i = SNDRV_PCM_STREAM_PLAYBACK; i <= SNDRV_PCM_STREAM_LAST; i++) {
349 substream = pcm->streams[i].substream;
350 if (!substream)
351 continue;
353 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, pcm->card->dev,
354 FSL_ASRC_DMABUF_SIZE, &substream->dma_buffer);
355 if (ret) {
356 dev_err(card->dev, "failed to allocate DMA buffer\n");
357 goto err;
361 return 0;
363 err:
364 if (--i == 0 && pcm->streams[i].substream)
365 snd_dma_free_pages(&pcm->streams[i].substream->dma_buffer);
367 return ret;
370 static void fsl_asrc_dma_pcm_free(struct snd_pcm *pcm)
372 struct snd_pcm_substream *substream;
373 int i;
375 for (i = SNDRV_PCM_STREAM_PLAYBACK; i <= SNDRV_PCM_STREAM_LAST; i++) {
376 substream = pcm->streams[i].substream;
377 if (!substream)
378 continue;
380 snd_dma_free_pages(&substream->dma_buffer);
381 substream->dma_buffer.area = NULL;
382 substream->dma_buffer.addr = 0;
386 struct snd_soc_platform_driver fsl_asrc_platform = {
387 .ops = &fsl_asrc_dma_pcm_ops,
388 .pcm_new = fsl_asrc_dma_pcm_new,
389 .pcm_free = fsl_asrc_dma_pcm_free,
391 EXPORT_SYMBOL_GPL(fsl_asrc_platform);