4 * Author: Roy Huang <roy.huang@analog.com>
6 * Created: Tue Sep 21 10:52:42 CEST 2004
8 * Blackfin SPORT Driver
10 * Copyright 2004-2007 Analog Devices Inc.
12 * Bugs: Enter bugs at http://blackfin.uclinux.org/
14 * This program is free software; you can redistribute it and/or modify
15 * it under the terms of the GNU General Public License as published by
16 * the Free Software Foundation; either version 2 of the License, or
17 * (at your option) any later version.
19 * This program is distributed in the hope that it will be useful,
20 * but WITHOUT ANY WARRANTY; without even the implied warranty of
21 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22 * GNU General Public License for more details.
24 * You should have received a copy of the GNU General Public License
25 * along with this program; if not, see the file COPYING, or write
26 * to the Free Software Foundation, Inc.,
27 * 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
30 #include <linux/kernel.h>
31 #include <linux/slab.h>
32 #include <linux/delay.h>
33 #include <linux/dma-mapping.h>
34 #include <linux/gpio.h>
35 #include <linux/bug.h>
36 #include <linux/module.h>
37 #include <asm/portmux.h>
39 #include <asm/blackfin.h>
40 #include <asm/cacheflush.h>
42 #include "bf5xx-sport.h"
43 /* delay between frame sync pulse and first data bit in multichannel mode */
44 #define FRAME_DELAY (1<<12)
46 /* note: multichannel is in units of 8 channels,
47 * tdm_count is # channels NOT / 8 ! */
48 int sport_set_multichannel(struct sport_device
*sport
,
49 int tdm_count
, u32 tx_mask
, u32 rx_mask
, int packed
)
51 pr_debug("%s tdm_count=%d tx_mask:0x%08x rx_mask:0x%08x packed=%d\n",
52 __func__
, tdm_count
, tx_mask
, rx_mask
, packed
);
54 if ((sport
->regs
->tcr1
& TSPEN
) || (sport
->regs
->rcr1
& RSPEN
))
61 return -EINVAL
; /* Only support less than 32 channels now */
64 sport
->regs
->mcmc1
= ((tdm_count
>>3)-1) << 12;
65 sport
->regs
->mcmc2
= FRAME_DELAY
| MCMEN
| \
66 (packed
? (MCDTXPE
|MCDRXPE
) : 0);
68 sport
->regs
->mtcs0
= tx_mask
;
69 sport
->regs
->mrcs0
= rx_mask
;
70 sport
->regs
->mtcs1
= 0;
71 sport
->regs
->mrcs1
= 0;
72 sport
->regs
->mtcs2
= 0;
73 sport
->regs
->mrcs2
= 0;
74 sport
->regs
->mtcs3
= 0;
75 sport
->regs
->mrcs3
= 0;
77 sport
->regs
->mcmc1
= 0;
78 sport
->regs
->mcmc2
= 0;
80 sport
->regs
->mtcs0
= 0;
81 sport
->regs
->mrcs0
= 0;
84 sport
->regs
->mtcs1
= 0; sport
->regs
->mtcs2
= 0; sport
->regs
->mtcs3
= 0;
85 sport
->regs
->mrcs1
= 0; sport
->regs
->mrcs2
= 0; sport
->regs
->mrcs3
= 0;
91 EXPORT_SYMBOL(sport_set_multichannel
);
93 int sport_config_rx(struct sport_device
*sport
, unsigned int rcr1
,
94 unsigned int rcr2
, unsigned int clkdiv
, unsigned int fsdiv
)
96 if ((sport
->regs
->tcr1
& TSPEN
) || (sport
->regs
->rcr1
& RSPEN
))
99 sport
->regs
->rcr1
= rcr1
;
100 sport
->regs
->rcr2
= rcr2
;
101 sport
->regs
->rclkdiv
= clkdiv
;
102 sport
->regs
->rfsdiv
= fsdiv
;
108 EXPORT_SYMBOL(sport_config_rx
);
110 int sport_config_tx(struct sport_device
*sport
, unsigned int tcr1
,
111 unsigned int tcr2
, unsigned int clkdiv
, unsigned int fsdiv
)
113 if ((sport
->regs
->tcr1
& TSPEN
) || (sport
->regs
->rcr1
& RSPEN
))
116 sport
->regs
->tcr1
= tcr1
;
117 sport
->regs
->tcr2
= tcr2
;
118 sport
->regs
->tclkdiv
= clkdiv
;
119 sport
->regs
->tfsdiv
= fsdiv
;
125 EXPORT_SYMBOL(sport_config_tx
);
127 static void setup_desc(struct dmasg
*desc
, void *buf
, int fragcount
,
128 size_t fragsize
, unsigned int cfg
,
129 unsigned int x_count
, unsigned int ycount
, size_t wdsize
)
134 for (i
= 0; i
< fragcount
; ++i
) {
135 desc
[i
].next_desc_addr
= &(desc
[i
+ 1]);
136 desc
[i
].start_addr
= (unsigned long)buf
+ i
*fragsize
;
138 desc
[i
].x_count
= x_count
;
139 desc
[i
].x_modify
= wdsize
;
140 desc
[i
].y_count
= ycount
;
141 desc
[i
].y_modify
= wdsize
;
145 desc
[fragcount
-1].next_desc_addr
= desc
;
147 pr_debug("setup desc: desc0=%p, next0=%p, desc1=%p,"
148 "next1=%p\nx_count=%x,y_count=%x,addr=0x%lx,cfs=0x%x\n",
149 desc
, desc
[0].next_desc_addr
,
150 desc
+1, desc
[1].next_desc_addr
,
151 desc
[0].x_count
, desc
[0].y_count
,
152 desc
[0].start_addr
, desc
[0].cfg
);
155 static int sport_start(struct sport_device
*sport
)
157 enable_dma(sport
->dma_rx_chan
);
158 enable_dma(sport
->dma_tx_chan
);
159 sport
->regs
->rcr1
|= RSPEN
;
160 sport
->regs
->tcr1
|= TSPEN
;
166 static int sport_stop(struct sport_device
*sport
)
168 sport
->regs
->tcr1
&= ~TSPEN
;
169 sport
->regs
->rcr1
&= ~RSPEN
;
172 disable_dma(sport
->dma_rx_chan
);
173 disable_dma(sport
->dma_tx_chan
);
177 static inline int sport_hook_rx_dummy(struct sport_device
*sport
)
179 struct dmasg
*desc
, temp_desc
;
182 BUG_ON(sport
->dummy_rx_desc
== NULL
);
183 BUG_ON(sport
->curr_rx_desc
== sport
->dummy_rx_desc
);
185 /* Maybe the dummy buffer descriptor ring is damaged */
186 sport
->dummy_rx_desc
->next_desc_addr
= sport
->dummy_rx_desc
+ 1;
188 local_irq_save(flags
);
189 desc
= get_dma_next_desc_ptr(sport
->dma_rx_chan
);
190 /* Copy the descriptor which will be damaged to backup */
192 desc
->x_count
= sport
->dummy_count
/ 2;
194 desc
->next_desc_addr
= sport
->dummy_rx_desc
;
195 local_irq_restore(flags
);
196 /* Waiting for dummy buffer descriptor is already hooked*/
197 while ((get_dma_curr_desc_ptr(sport
->dma_rx_chan
) -
198 sizeof(struct dmasg
)) != sport
->dummy_rx_desc
)
200 sport
->curr_rx_desc
= sport
->dummy_rx_desc
;
201 /* Restore the damaged descriptor */
207 static inline int sport_rx_dma_start(struct sport_device
*sport
, int dummy
)
210 sport
->dummy_rx_desc
->next_desc_addr
= sport
->dummy_rx_desc
;
211 sport
->curr_rx_desc
= sport
->dummy_rx_desc
;
213 sport
->curr_rx_desc
= sport
->dma_rx_desc
;
215 set_dma_next_desc_addr(sport
->dma_rx_chan
, sport
->curr_rx_desc
);
216 set_dma_x_count(sport
->dma_rx_chan
, 0);
217 set_dma_x_modify(sport
->dma_rx_chan
, 0);
218 set_dma_config(sport
->dma_rx_chan
, (DMAFLOW_LARGE
| NDSIZE_9
| \
220 set_dma_curr_addr(sport
->dma_rx_chan
, sport
->curr_rx_desc
->start_addr
);
226 static inline int sport_tx_dma_start(struct sport_device
*sport
, int dummy
)
229 sport
->dummy_tx_desc
->next_desc_addr
= sport
->dummy_tx_desc
;
230 sport
->curr_tx_desc
= sport
->dummy_tx_desc
;
232 sport
->curr_tx_desc
= sport
->dma_tx_desc
;
234 set_dma_next_desc_addr(sport
->dma_tx_chan
, sport
->curr_tx_desc
);
235 set_dma_x_count(sport
->dma_tx_chan
, 0);
236 set_dma_x_modify(sport
->dma_tx_chan
, 0);
237 set_dma_config(sport
->dma_tx_chan
,
238 (DMAFLOW_LARGE
| NDSIZE_9
| WDSIZE_32
));
239 set_dma_curr_addr(sport
->dma_tx_chan
, sport
->curr_tx_desc
->start_addr
);
245 int sport_rx_start(struct sport_device
*sport
)
248 pr_debug("%s enter\n", __func__
);
252 /* tx is running, rx is not running */
253 BUG_ON(sport
->dma_rx_desc
== NULL
);
254 BUG_ON(sport
->curr_rx_desc
!= sport
->dummy_rx_desc
);
255 local_irq_save(flags
);
256 while ((get_dma_curr_desc_ptr(sport
->dma_rx_chan
) -
257 sizeof(struct dmasg
)) != sport
->dummy_rx_desc
)
259 sport
->dummy_rx_desc
->next_desc_addr
= sport
->dma_rx_desc
;
260 local_irq_restore(flags
);
261 sport
->curr_rx_desc
= sport
->dma_rx_desc
;
263 sport_tx_dma_start(sport
, 1);
264 sport_rx_dma_start(sport
, 0);
272 EXPORT_SYMBOL(sport_rx_start
);
274 int sport_rx_stop(struct sport_device
*sport
)
276 pr_debug("%s enter\n", __func__
);
281 /* TX dma is still running, hook the dummy buffer */
282 sport_hook_rx_dummy(sport
);
284 /* Both rx and tx dma will be stopped */
286 sport
->curr_rx_desc
= NULL
;
287 sport
->curr_tx_desc
= NULL
;
294 EXPORT_SYMBOL(sport_rx_stop
);
296 static inline int sport_hook_tx_dummy(struct sport_device
*sport
)
298 struct dmasg
*desc
, temp_desc
;
301 BUG_ON(sport
->dummy_tx_desc
== NULL
);
302 BUG_ON(sport
->curr_tx_desc
== sport
->dummy_tx_desc
);
304 sport
->dummy_tx_desc
->next_desc_addr
= sport
->dummy_tx_desc
+ 1;
306 /* Shorten the time on last normal descriptor */
307 local_irq_save(flags
);
308 desc
= get_dma_next_desc_ptr(sport
->dma_tx_chan
);
309 /* Store the descriptor which will be damaged */
311 desc
->x_count
= sport
->dummy_count
/ 2;
313 desc
->next_desc_addr
= sport
->dummy_tx_desc
;
314 local_irq_restore(flags
);
315 /* Waiting for dummy buffer descriptor is already hooked*/
316 while ((get_dma_curr_desc_ptr(sport
->dma_tx_chan
) - \
317 sizeof(struct dmasg
)) != sport
->dummy_tx_desc
)
319 sport
->curr_tx_desc
= sport
->dummy_tx_desc
;
320 /* Restore the damaged descriptor */
326 int sport_tx_start(struct sport_device
*sport
)
329 pr_debug("%s: tx_run:%d, rx_run:%d\n", __func__
,
330 sport
->tx_run
, sport
->rx_run
);
334 BUG_ON(sport
->dma_tx_desc
== NULL
);
335 BUG_ON(sport
->curr_tx_desc
!= sport
->dummy_tx_desc
);
336 /* Hook the normal buffer descriptor */
337 local_irq_save(flags
);
338 while ((get_dma_curr_desc_ptr(sport
->dma_tx_chan
) -
339 sizeof(struct dmasg
)) != sport
->dummy_tx_desc
)
341 sport
->dummy_tx_desc
->next_desc_addr
= sport
->dma_tx_desc
;
342 local_irq_restore(flags
);
343 sport
->curr_tx_desc
= sport
->dma_tx_desc
;
346 sport_tx_dma_start(sport
, 0);
347 /* Let rx dma run the dummy buffer */
348 sport_rx_dma_start(sport
, 1);
354 EXPORT_SYMBOL(sport_tx_start
);
356 int sport_tx_stop(struct sport_device
*sport
)
361 /* RX is still running, hook the dummy buffer */
362 sport_hook_tx_dummy(sport
);
364 /* Both rx and tx dma stopped */
366 sport
->curr_rx_desc
= NULL
;
367 sport
->curr_tx_desc
= NULL
;
374 EXPORT_SYMBOL(sport_tx_stop
);
376 static inline int compute_wdsize(size_t wdsize
)
389 int sport_config_rx_dma(struct sport_device
*sport
, void *buf
,
390 int fragcount
, size_t fragsize
)
392 unsigned int x_count
;
393 unsigned int y_count
;
397 pr_debug("%s buf:%p, frag:%d, fragsize:0x%lx\n", __func__
, \
398 buf
, fragcount
, fragsize
);
400 x_count
= fragsize
/ sport
->wdsize
;
403 /* for fragments larger than 64k words we use 2d dma,
404 * denote fragecount as two numbers' mutliply and both of them
405 * are less than 64k.*/
406 if (x_count
>= 0x10000) {
407 int i
, count
= x_count
;
409 for (i
= 16; i
> 0; i
--) {
411 if ((count
& (x_count
- 1)) == 0) {
412 y_count
= count
>> i
;
413 if (y_count
< 0x10000)
420 pr_debug("%s(x_count:0x%x, y_count:0x%x)\n", __func__
,
423 if (sport
->dma_rx_desc
)
424 dma_free_coherent(NULL
, sport
->rx_desc_bytes
,
425 sport
->dma_rx_desc
, 0);
427 /* Allocate a new descritor ring as current one. */
428 sport
->dma_rx_desc
= dma_alloc_coherent(NULL
, \
429 fragcount
* sizeof(struct dmasg
), &addr
, 0);
430 sport
->rx_desc_bytes
= fragcount
* sizeof(struct dmasg
);
432 if (!sport
->dma_rx_desc
) {
433 pr_err("Failed to allocate memory for rx desc\n");
438 sport
->rx_fragsize
= fragsize
;
439 sport
->rx_frags
= fragcount
;
441 cfg
= 0x7000 | DI_EN
| compute_wdsize(sport
->wdsize
) | WNR
| \
442 (DESC_ELEMENT_COUNT
<< 8); /* large descriptor mode */
447 setup_desc(sport
->dma_rx_desc
, buf
, fragcount
, fragsize
,
448 cfg
|DMAEN
, x_count
, y_count
, sport
->wdsize
);
452 EXPORT_SYMBOL(sport_config_rx_dma
);
454 int sport_config_tx_dma(struct sport_device
*sport
, void *buf
, \
455 int fragcount
, size_t fragsize
)
457 unsigned int x_count
;
458 unsigned int y_count
;
462 pr_debug("%s buf:%p, fragcount:%d, fragsize:0x%lx\n",
463 __func__
, buf
, fragcount
, fragsize
);
465 x_count
= fragsize
/sport
->wdsize
;
468 /* for fragments larger than 64k words we use 2d dma,
469 * denote fragecount as two numbers' mutliply and both of them
470 * are less than 64k.*/
471 if (x_count
>= 0x10000) {
472 int i
, count
= x_count
;
474 for (i
= 16; i
> 0; i
--) {
476 if ((count
& (x_count
- 1)) == 0) {
477 y_count
= count
>> i
;
478 if (y_count
< 0x10000)
485 pr_debug("%s x_count:0x%x, y_count:0x%x\n", __func__
,
489 if (sport
->dma_tx_desc
) {
490 dma_free_coherent(NULL
, sport
->tx_desc_bytes
, \
491 sport
->dma_tx_desc
, 0);
494 sport
->dma_tx_desc
= dma_alloc_coherent(NULL
, \
495 fragcount
* sizeof(struct dmasg
), &addr
, 0);
496 sport
->tx_desc_bytes
= fragcount
* sizeof(struct dmasg
);
497 if (!sport
->dma_tx_desc
) {
498 pr_err("Failed to allocate memory for tx desc\n");
503 sport
->tx_fragsize
= fragsize
;
504 sport
->tx_frags
= fragcount
;
505 cfg
= 0x7000 | DI_EN
| compute_wdsize(sport
->wdsize
) | \
506 (DESC_ELEMENT_COUNT
<< 8); /* large descriptor mode */
511 setup_desc(sport
->dma_tx_desc
, buf
, fragcount
, fragsize
,
512 cfg
|DMAEN
, x_count
, y_count
, sport
->wdsize
);
516 EXPORT_SYMBOL(sport_config_tx_dma
);
518 /* setup dummy dma descriptor ring, which don't generate interrupts,
519 * the x_modify is set to 0 */
520 static int sport_config_rx_dummy(struct sport_device
*sport
)
525 pr_debug("%s entered\n", __func__
);
526 if (L1_DATA_A_LENGTH
)
527 desc
= l1_data_sram_zalloc(2 * sizeof(*desc
));
530 desc
= dma_alloc_coherent(NULL
, 2 * sizeof(*desc
), &addr
, 0);
531 memset(desc
, 0, 2 * sizeof(*desc
));
534 pr_err("Failed to allocate memory for dummy rx desc\n");
537 sport
->dummy_rx_desc
= desc
;
538 desc
->start_addr
= (unsigned long)sport
->dummy_buf
;
539 config
= DMAFLOW_LARGE
| NDSIZE_9
| compute_wdsize(sport
->wdsize
)
542 desc
->x_count
= sport
->dummy_count
/sport
->wdsize
;
543 desc
->x_modify
= sport
->wdsize
;
546 memcpy(desc
+1, desc
, sizeof(*desc
));
547 desc
->next_desc_addr
= desc
+ 1;
548 desc
[1].next_desc_addr
= desc
;
552 static int sport_config_tx_dummy(struct sport_device
*sport
)
557 pr_debug("%s entered\n", __func__
);
559 if (L1_DATA_A_LENGTH
)
560 desc
= l1_data_sram_zalloc(2 * sizeof(*desc
));
563 desc
= dma_alloc_coherent(NULL
, 2 * sizeof(*desc
), &addr
, 0);
564 memset(desc
, 0, 2 * sizeof(*desc
));
567 pr_err("Failed to allocate memory for dummy tx desc\n");
570 sport
->dummy_tx_desc
= desc
;
571 desc
->start_addr
= (unsigned long)sport
->dummy_buf
+ \
573 config
= DMAFLOW_LARGE
| NDSIZE_9
|
574 compute_wdsize(sport
->wdsize
) | DMAEN
;
576 desc
->x_count
= sport
->dummy_count
/sport
->wdsize
;
577 desc
->x_modify
= sport
->wdsize
;
580 memcpy(desc
+1, desc
, sizeof(*desc
));
581 desc
->next_desc_addr
= desc
+ 1;
582 desc
[1].next_desc_addr
= desc
;
586 unsigned long sport_curr_offset_rx(struct sport_device
*sport
)
588 unsigned long curr
= get_dma_curr_addr(sport
->dma_rx_chan
);
590 return (unsigned char *)curr
- sport
->rx_buf
;
592 EXPORT_SYMBOL(sport_curr_offset_rx
);
594 unsigned long sport_curr_offset_tx(struct sport_device
*sport
)
596 unsigned long curr
= get_dma_curr_addr(sport
->dma_tx_chan
);
598 return (unsigned char *)curr
- sport
->tx_buf
;
600 EXPORT_SYMBOL(sport_curr_offset_tx
);
602 void sport_incfrag(struct sport_device
*sport
, int *frag
, int tx
)
605 if (tx
== 1 && *frag
== sport
->tx_frags
)
608 if (tx
== 0 && *frag
== sport
->rx_frags
)
611 EXPORT_SYMBOL(sport_incfrag
);
613 void sport_decfrag(struct sport_device
*sport
, int *frag
, int tx
)
616 if (tx
== 1 && *frag
== 0)
617 *frag
= sport
->tx_frags
;
619 if (tx
== 0 && *frag
== 0)
620 *frag
= sport
->rx_frags
;
622 EXPORT_SYMBOL(sport_decfrag
);
624 static int sport_check_status(struct sport_device
*sport
,
625 unsigned int *sport_stat
,
626 unsigned int *rx_stat
,
627 unsigned int *tx_stat
)
633 status
= sport
->regs
->stat
;
634 if (status
& (TOVF
|TUVF
|ROVF
|RUVF
))
635 sport
->regs
->stat
= (status
& (TOVF
|TUVF
|ROVF
|RUVF
));
637 *sport_stat
= status
;
642 status
= get_dma_curr_irqstat(sport
->dma_rx_chan
);
643 if (status
& (DMA_DONE
|DMA_ERR
))
644 clear_dma_irqstat(sport
->dma_rx_chan
);
651 status
= get_dma_curr_irqstat(sport
->dma_tx_chan
);
652 if (status
& (DMA_DONE
|DMA_ERR
))
653 clear_dma_irqstat(sport
->dma_tx_chan
);
661 int sport_dump_stat(struct sport_device
*sport
, char *buf
, size_t len
)
665 ret
= snprintf(buf
, len
,
667 "rx dma %d sts: 0x%04x tx dma %d sts: 0x%04x\n",
670 get_dma_curr_irqstat(sport
->dma_rx_chan
),
672 get_dma_curr_irqstat(sport
->dma_tx_chan
));
676 ret
+= snprintf(buf
, len
,
677 "curr_rx_desc:0x%p, curr_tx_desc:0x%p\n"
678 "dma_rx_desc:0x%p, dma_tx_desc:0x%p\n"
679 "dummy_rx_desc:0x%p, dummy_tx_desc:0x%p\n",
680 sport
->curr_rx_desc
, sport
->curr_tx_desc
,
681 sport
->dma_rx_desc
, sport
->dma_tx_desc
,
682 sport
->dummy_rx_desc
, sport
->dummy_tx_desc
);
687 static irqreturn_t
rx_handler(int irq
, void *dev_id
)
689 unsigned int rx_stat
;
690 struct sport_device
*sport
= dev_id
;
692 pr_debug("%s enter\n", __func__
);
693 sport_check_status(sport
, NULL
, &rx_stat
, NULL
);
694 if (!(rx_stat
& DMA_DONE
))
695 pr_err("rx dma is already stopped\n");
697 if (sport
->rx_callback
) {
698 sport
->rx_callback(sport
->rx_data
);
705 static irqreturn_t
tx_handler(int irq
, void *dev_id
)
707 unsigned int tx_stat
;
708 struct sport_device
*sport
= dev_id
;
709 pr_debug("%s enter\n", __func__
);
710 sport_check_status(sport
, NULL
, NULL
, &tx_stat
);
711 if (!(tx_stat
& DMA_DONE
)) {
712 pr_err("tx dma is already stopped\n");
715 if (sport
->tx_callback
) {
716 sport
->tx_callback(sport
->tx_data
);
723 static irqreturn_t
err_handler(int irq
, void *dev_id
)
725 unsigned int status
= 0;
726 struct sport_device
*sport
= dev_id
;
728 pr_debug("%s\n", __func__
);
729 if (sport_check_status(sport
, &status
, NULL
, NULL
)) {
730 pr_err("error checking status ??");
734 if (status
& (TOVF
|TUVF
|ROVF
|RUVF
)) {
735 pr_info("sport status error:%s%s%s%s\n",
736 status
& TOVF
? " TOVF" : "",
737 status
& TUVF
? " TUVF" : "",
738 status
& ROVF
? " ROVF" : "",
739 status
& RUVF
? " RUVF" : "");
740 if (status
& TOVF
|| status
& TUVF
) {
741 disable_dma(sport
->dma_tx_chan
);
743 sport_tx_dma_start(sport
, 0);
745 sport_tx_dma_start(sport
, 1);
746 enable_dma(sport
->dma_tx_chan
);
748 disable_dma(sport
->dma_rx_chan
);
750 sport_rx_dma_start(sport
, 0);
752 sport_rx_dma_start(sport
, 1);
753 enable_dma(sport
->dma_rx_chan
);
756 status
= sport
->regs
->stat
;
757 if (status
& (TOVF
|TUVF
|ROVF
|RUVF
))
758 sport
->regs
->stat
= (status
& (TOVF
|TUVF
|ROVF
|RUVF
));
761 if (sport
->err_callback
)
762 sport
->err_callback(sport
->err_data
);
767 int sport_set_rx_callback(struct sport_device
*sport
,
768 void (*rx_callback
)(void *), void *rx_data
)
770 BUG_ON(rx_callback
== NULL
);
771 sport
->rx_callback
= rx_callback
;
772 sport
->rx_data
= rx_data
;
776 EXPORT_SYMBOL(sport_set_rx_callback
);
778 int sport_set_tx_callback(struct sport_device
*sport
,
779 void (*tx_callback
)(void *), void *tx_data
)
781 BUG_ON(tx_callback
== NULL
);
782 sport
->tx_callback
= tx_callback
;
783 sport
->tx_data
= tx_data
;
787 EXPORT_SYMBOL(sport_set_tx_callback
);
789 int sport_set_err_callback(struct sport_device
*sport
,
790 void (*err_callback
)(void *), void *err_data
)
792 BUG_ON(err_callback
== NULL
);
793 sport
->err_callback
= err_callback
;
794 sport
->err_data
= err_data
;
798 EXPORT_SYMBOL(sport_set_err_callback
);
800 static int sport_config_pdev(struct platform_device
*pdev
, struct sport_param
*param
)
802 /* Extract settings from platform data */
803 struct device
*dev
= &pdev
->dev
;
804 struct bfin_snd_platform_data
*pdata
= dev
->platform_data
;
805 struct resource
*res
;
807 param
->num
= pdev
->id
;
810 dev_err(dev
, "no platform_data\n");
813 param
->pin_req
= pdata
->pin_req
;
815 res
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
817 dev_err(dev
, "no MEM resource\n");
820 param
->regs
= (struct sport_register
*)res
->start
;
822 /* first RX, then TX */
823 res
= platform_get_resource(pdev
, IORESOURCE_DMA
, 0);
825 dev_err(dev
, "no rx DMA resource\n");
828 param
->dma_rx_chan
= res
->start
;
830 res
= platform_get_resource(pdev
, IORESOURCE_DMA
, 1);
832 dev_err(dev
, "no tx DMA resource\n");
835 param
->dma_tx_chan
= res
->start
;
837 res
= platform_get_resource(pdev
, IORESOURCE_IRQ
, 0);
839 dev_err(dev
, "no irq resource\n");
842 param
->err_irq
= res
->start
;
847 struct sport_device
*sport_init(struct platform_device
*pdev
,
848 unsigned int wdsize
, unsigned int dummy_count
, size_t priv_size
)
850 struct device
*dev
= &pdev
->dev
;
851 struct sport_param param
;
852 struct sport_device
*sport
;
855 dev_dbg(dev
, "%s enter\n", __func__
);
857 param
.wdsize
= wdsize
;
858 param
.dummy_count
= dummy_count
;
859 BUG_ON(param
.wdsize
== 0 || param
.dummy_count
== 0);
861 ret
= sport_config_pdev(pdev
, ¶m
);
865 if (peripheral_request_list(param
.pin_req
, "soc-audio")) {
866 dev_err(dev
, "requesting Peripherals failed\n");
870 sport
= kzalloc(sizeof(*sport
), GFP_KERNEL
);
872 dev_err(dev
, "failed to allocate for sport device\n");
876 sport
->num
= param
.num
;
877 sport
->dma_rx_chan
= param
.dma_rx_chan
;
878 sport
->dma_tx_chan
= param
.dma_tx_chan
;
879 sport
->err_irq
= param
.err_irq
;
880 sport
->regs
= param
.regs
;
881 sport
->pin_req
= param
.pin_req
;
883 if (request_dma(sport
->dma_rx_chan
, "SPORT RX Data") == -EBUSY
) {
884 dev_err(dev
, "failed to request RX dma %d\n", sport
->dma_rx_chan
);
887 if (set_dma_callback(sport
->dma_rx_chan
, rx_handler
, sport
) != 0) {
888 dev_err(dev
, "failed to request RX irq %d\n", sport
->dma_rx_chan
);
892 if (request_dma(sport
->dma_tx_chan
, "SPORT TX Data") == -EBUSY
) {
893 dev_err(dev
, "failed to request TX dma %d\n", sport
->dma_tx_chan
);
897 if (set_dma_callback(sport
->dma_tx_chan
, tx_handler
, sport
) != 0) {
898 dev_err(dev
, "failed to request TX irq %d\n", sport
->dma_tx_chan
);
902 if (request_irq(sport
->err_irq
, err_handler
, IRQF_SHARED
, "SPORT err",
904 dev_err(dev
, "failed to request err irq %d\n", sport
->err_irq
);
908 dev_info(dev
, "dma rx:%d tx:%d, err irq:%d, regs:%p\n",
909 sport
->dma_rx_chan
, sport
->dma_tx_chan
,
910 sport
->err_irq
, sport
->regs
);
912 sport
->wdsize
= param
.wdsize
;
913 sport
->dummy_count
= param
.dummy_count
;
915 sport
->private_data
= kzalloc(priv_size
, GFP_KERNEL
);
916 if (!sport
->private_data
) {
917 dev_err(dev
, "could not alloc priv data %zu bytes\n", priv_size
);
921 if (L1_DATA_A_LENGTH
)
922 sport
->dummy_buf
= l1_data_sram_zalloc(param
.dummy_count
* 2);
924 sport
->dummy_buf
= kzalloc(param
.dummy_count
* 2, GFP_KERNEL
);
925 if (sport
->dummy_buf
== NULL
) {
926 dev_err(dev
, "failed to allocate dummy buffer\n");
930 ret
= sport_config_rx_dummy(sport
);
932 dev_err(dev
, "failed to config rx dummy ring\n");
935 ret
= sport_config_tx_dummy(sport
);
937 dev_err(dev
, "failed to config tx dummy ring\n");
941 platform_set_drvdata(pdev
, sport
);
945 if (L1_DATA_A_LENGTH
)
946 l1_data_sram_free(sport
->dummy_rx_desc
);
948 dma_free_coherent(NULL
, 2*sizeof(struct dmasg
),
949 sport
->dummy_rx_desc
, 0);
951 if (L1_DATA_A_LENGTH
)
952 l1_data_sram_free(sport
->dummy_buf
);
954 kfree(sport
->dummy_buf
);
956 kfree(sport
->private_data
);
958 free_irq(sport
->err_irq
, sport
);
960 free_dma(sport
->dma_tx_chan
);
962 free_dma(sport
->dma_rx_chan
);
966 peripheral_free_list(param
.pin_req
);
969 EXPORT_SYMBOL(sport_init
);
971 void sport_done(struct sport_device
*sport
)
977 if (sport
->dma_rx_desc
)
978 dma_free_coherent(NULL
, sport
->rx_desc_bytes
,
979 sport
->dma_rx_desc
, 0);
980 if (sport
->dma_tx_desc
)
981 dma_free_coherent(NULL
, sport
->tx_desc_bytes
,
982 sport
->dma_tx_desc
, 0);
984 #if L1_DATA_A_LENGTH != 0
985 l1_data_sram_free(sport
->dummy_rx_desc
);
986 l1_data_sram_free(sport
->dummy_tx_desc
);
987 l1_data_sram_free(sport
->dummy_buf
);
989 dma_free_coherent(NULL
, 2*sizeof(struct dmasg
),
990 sport
->dummy_rx_desc
, 0);
991 dma_free_coherent(NULL
, 2*sizeof(struct dmasg
),
992 sport
->dummy_tx_desc
, 0);
993 kfree(sport
->dummy_buf
);
995 free_dma(sport
->dma_rx_chan
);
996 free_dma(sport
->dma_tx_chan
);
997 free_irq(sport
->err_irq
, sport
);
999 kfree(sport
->private_data
);
1000 peripheral_free_list(sport
->pin_req
);
1003 EXPORT_SYMBOL(sport_done
);
1006 * It is only used to send several bytes when dma is not enabled
1007 * sport controller is configured but not enabled.
1008 * Multichannel cannot works with pio mode */
1009 /* Used by ac97 to write and read codec register */
1010 int sport_send_and_recv(struct sport_device
*sport
, u8
*out_data
, \
1011 u8
*in_data
, int len
)
1013 unsigned short dma_config
;
1014 unsigned short status
;
1015 unsigned long flags
;
1016 unsigned long wait
= 0;
1018 pr_debug("%s enter, out_data:%p, in_data:%p len:%d\n", \
1019 __func__
, out_data
, in_data
, len
);
1020 pr_debug("tcr1:0x%04x, tcr2:0x%04x, tclkdiv:0x%04x, tfsdiv:0x%04x\n"
1021 "mcmc1:0x%04x, mcmc2:0x%04x\n",
1022 sport
->regs
->tcr1
, sport
->regs
->tcr2
,
1023 sport
->regs
->tclkdiv
, sport
->regs
->tfsdiv
,
1024 sport
->regs
->mcmc1
, sport
->regs
->mcmc2
);
1025 flush_dcache_range((unsigned)out_data
, (unsigned)(out_data
+ len
));
1028 dma_config
= (RESTART
| WDSIZE_16
| DI_EN
);
1029 set_dma_start_addr(sport
->dma_tx_chan
, (unsigned long)out_data
);
1030 set_dma_x_count(sport
->dma_tx_chan
, len
/2);
1031 set_dma_x_modify(sport
->dma_tx_chan
, 2);
1032 set_dma_config(sport
->dma_tx_chan
, dma_config
);
1033 enable_dma(sport
->dma_tx_chan
);
1035 if (in_data
!= NULL
) {
1036 invalidate_dcache_range((unsigned)in_data
, \
1037 (unsigned)(in_data
+ len
));
1039 dma_config
= (RESTART
| WDSIZE_16
| WNR
| DI_EN
);
1040 set_dma_start_addr(sport
->dma_rx_chan
, (unsigned long)in_data
);
1041 set_dma_x_count(sport
->dma_rx_chan
, len
/2);
1042 set_dma_x_modify(sport
->dma_rx_chan
, 2);
1043 set_dma_config(sport
->dma_rx_chan
, dma_config
);
1044 enable_dma(sport
->dma_rx_chan
);
1047 local_irq_save(flags
);
1048 sport
->regs
->tcr1
|= TSPEN
;
1049 sport
->regs
->rcr1
|= RSPEN
;
1052 status
= get_dma_curr_irqstat(sport
->dma_tx_chan
);
1053 while (status
& DMA_RUN
) {
1055 status
= get_dma_curr_irqstat(sport
->dma_tx_chan
);
1056 pr_debug("DMA status:0x%04x\n", status
);
1060 status
= sport
->regs
->stat
;
1063 while (!(status
& TXHRE
)) {
1064 pr_debug("sport status:0x%04x\n", status
);
1066 status
= *(unsigned short *)&sport
->regs
->stat
;
1070 /* Wait for the last byte sent out */
1072 pr_debug("sport status:0x%04x\n", status
);
1075 sport
->regs
->tcr1
&= ~TSPEN
;
1076 sport
->regs
->rcr1
&= ~RSPEN
;
1078 disable_dma(sport
->dma_tx_chan
);
1079 /* Clear the status */
1080 clear_dma_irqstat(sport
->dma_tx_chan
);
1081 if (in_data
!= NULL
) {
1082 disable_dma(sport
->dma_rx_chan
);
1083 clear_dma_irqstat(sport
->dma_rx_chan
);
1086 local_irq_restore(flags
);
1090 EXPORT_SYMBOL(sport_send_and_recv
);
1092 MODULE_AUTHOR("Roy Huang");
1093 MODULE_DESCRIPTION("SPORT driver for ADI Blackfin");
1094 MODULE_LICENSE("GPL");