4 * Author: Roy Huang <roy.huang@analog.com>
6 * Created: Tue Sep 21 10:52:42 CEST 2004
8 * Blackfin SPORT Driver
10 * Copyright 2004-2007 Analog Devices Inc.
12 * Bugs: Enter bugs at http://blackfin.uclinux.org/
14 * This program is free software; you can redistribute it and/or modify
15 * it under the terms of the GNU General Public License as published by
16 * the Free Software Foundation; either version 2 of the License, or
17 * (at your option) any later version.
19 * This program is distributed in the hope that it will be useful,
20 * but WITHOUT ANY WARRANTY; without even the implied warranty of
21 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22 * GNU General Public License for more details.
24 * You should have received a copy of the GNU General Public License
25 * along with this program; if not, see the file COPYING, or write
26 * to the Free Software Foundation, Inc.,
27 * 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
30 #include <linux/kernel.h>
31 #include <linux/slab.h>
32 #include <linux/delay.h>
33 #include <linux/dma-mapping.h>
34 #include <linux/gpio.h>
35 #include <linux/bug.h>
36 #include <asm/portmux.h>
38 #include <asm/blackfin.h>
39 #include <asm/cacheflush.h>
41 #include "bf5xx-sport.h"
42 /* delay between frame sync pulse and first data bit in multichannel mode */
43 #define FRAME_DELAY (1<<12)
45 struct sport_device
*sport_handle
;
46 EXPORT_SYMBOL(sport_handle
);
47 /* note: multichannel is in units of 8 channels,
48 * tdm_count is # channels NOT / 8 ! */
49 int sport_set_multichannel(struct sport_device
*sport
,
50 int tdm_count
, u32 mask
, int packed
)
52 pr_debug("%s tdm_count=%d mask:0x%08x packed=%d\n", __func__
,
53 tdm_count
, mask
, packed
);
55 if ((sport
->regs
->tcr1
& TSPEN
) || (sport
->regs
->rcr1
& RSPEN
))
62 return -EINVAL
; /* Only support less than 32 channels now */
65 sport
->regs
->mcmc1
= ((tdm_count
>>3)-1) << 12;
66 sport
->regs
->mcmc2
= FRAME_DELAY
| MCMEN
| \
67 (packed
? (MCDTXPE
|MCDRXPE
) : 0);
69 sport
->regs
->mtcs0
= mask
;
70 sport
->regs
->mrcs0
= mask
;
71 sport
->regs
->mtcs1
= 0;
72 sport
->regs
->mrcs1
= 0;
73 sport
->regs
->mtcs2
= 0;
74 sport
->regs
->mrcs2
= 0;
75 sport
->regs
->mtcs3
= 0;
76 sport
->regs
->mrcs3
= 0;
78 sport
->regs
->mcmc1
= 0;
79 sport
->regs
->mcmc2
= 0;
81 sport
->regs
->mtcs0
= 0;
82 sport
->regs
->mrcs0
= 0;
85 sport
->regs
->mtcs1
= 0; sport
->regs
->mtcs2
= 0; sport
->regs
->mtcs3
= 0;
86 sport
->regs
->mrcs1
= 0; sport
->regs
->mrcs2
= 0; sport
->regs
->mrcs3
= 0;
92 EXPORT_SYMBOL(sport_set_multichannel
);
94 int sport_config_rx(struct sport_device
*sport
, unsigned int rcr1
,
95 unsigned int rcr2
, unsigned int clkdiv
, unsigned int fsdiv
)
97 if ((sport
->regs
->tcr1
& TSPEN
) || (sport
->regs
->rcr1
& RSPEN
))
100 sport
->regs
->rcr1
= rcr1
;
101 sport
->regs
->rcr2
= rcr2
;
102 sport
->regs
->rclkdiv
= clkdiv
;
103 sport
->regs
->rfsdiv
= fsdiv
;
109 EXPORT_SYMBOL(sport_config_rx
);
111 int sport_config_tx(struct sport_device
*sport
, unsigned int tcr1
,
112 unsigned int tcr2
, unsigned int clkdiv
, unsigned int fsdiv
)
114 if ((sport
->regs
->tcr1
& TSPEN
) || (sport
->regs
->rcr1
& RSPEN
))
117 sport
->regs
->tcr1
= tcr1
;
118 sport
->regs
->tcr2
= tcr2
;
119 sport
->regs
->tclkdiv
= clkdiv
;
120 sport
->regs
->tfsdiv
= fsdiv
;
126 EXPORT_SYMBOL(sport_config_tx
);
128 static void setup_desc(struct dmasg
*desc
, void *buf
, int fragcount
,
129 size_t fragsize
, unsigned int cfg
,
130 unsigned int x_count
, unsigned int ycount
, size_t wdsize
)
135 for (i
= 0; i
< fragcount
; ++i
) {
136 desc
[i
].next_desc_addr
= &(desc
[i
+ 1]);
137 desc
[i
].start_addr
= (unsigned long)buf
+ i
*fragsize
;
139 desc
[i
].x_count
= x_count
;
140 desc
[i
].x_modify
= wdsize
;
141 desc
[i
].y_count
= ycount
;
142 desc
[i
].y_modify
= wdsize
;
146 desc
[fragcount
-1].next_desc_addr
= desc
;
148 pr_debug("setup desc: desc0=%p, next0=%p, desc1=%p,"
149 "next1=%p\nx_count=%x,y_count=%x,addr=0x%lx,cfs=0x%x\n",
150 desc
, desc
[0].next_desc_addr
,
151 desc
+1, desc
[1].next_desc_addr
,
152 desc
[0].x_count
, desc
[0].y_count
,
153 desc
[0].start_addr
, desc
[0].cfg
);
156 static int sport_start(struct sport_device
*sport
)
158 enable_dma(sport
->dma_rx_chan
);
159 enable_dma(sport
->dma_tx_chan
);
160 sport
->regs
->rcr1
|= RSPEN
;
161 sport
->regs
->tcr1
|= TSPEN
;
167 static int sport_stop(struct sport_device
*sport
)
169 sport
->regs
->tcr1
&= ~TSPEN
;
170 sport
->regs
->rcr1
&= ~RSPEN
;
173 disable_dma(sport
->dma_rx_chan
);
174 disable_dma(sport
->dma_tx_chan
);
178 static inline int sport_hook_rx_dummy(struct sport_device
*sport
)
180 struct dmasg
*desc
, temp_desc
;
183 BUG_ON(sport
->dummy_rx_desc
== NULL
);
184 BUG_ON(sport
->curr_rx_desc
== sport
->dummy_rx_desc
);
186 /* Maybe the dummy buffer descriptor ring is damaged */
187 sport
->dummy_rx_desc
->next_desc_addr
= sport
->dummy_rx_desc
+ 1;
189 local_irq_save(flags
);
190 desc
= get_dma_next_desc_ptr(sport
->dma_rx_chan
);
191 /* Copy the descriptor which will be damaged to backup */
193 desc
->x_count
= sport
->dummy_count
/ 2;
195 desc
->next_desc_addr
= sport
->dummy_rx_desc
;
196 local_irq_restore(flags
);
197 /* Waiting for dummy buffer descriptor is already hooked*/
198 while ((get_dma_curr_desc_ptr(sport
->dma_rx_chan
) -
199 sizeof(struct dmasg
)) != sport
->dummy_rx_desc
)
201 sport
->curr_rx_desc
= sport
->dummy_rx_desc
;
202 /* Restore the damaged descriptor */
208 static inline int sport_rx_dma_start(struct sport_device
*sport
, int dummy
)
211 sport
->dummy_rx_desc
->next_desc_addr
= sport
->dummy_rx_desc
;
212 sport
->curr_rx_desc
= sport
->dummy_rx_desc
;
214 sport
->curr_rx_desc
= sport
->dma_rx_desc
;
216 set_dma_next_desc_addr(sport
->dma_rx_chan
, sport
->curr_rx_desc
);
217 set_dma_x_count(sport
->dma_rx_chan
, 0);
218 set_dma_x_modify(sport
->dma_rx_chan
, 0);
219 set_dma_config(sport
->dma_rx_chan
, (DMAFLOW_LARGE
| NDSIZE_9
| \
221 set_dma_curr_addr(sport
->dma_rx_chan
, sport
->curr_rx_desc
->start_addr
);
227 static inline int sport_tx_dma_start(struct sport_device
*sport
, int dummy
)
230 sport
->dummy_tx_desc
->next_desc_addr
= sport
->dummy_tx_desc
;
231 sport
->curr_tx_desc
= sport
->dummy_tx_desc
;
233 sport
->curr_tx_desc
= sport
->dma_tx_desc
;
235 set_dma_next_desc_addr(sport
->dma_tx_chan
, sport
->curr_tx_desc
);
236 set_dma_x_count(sport
->dma_tx_chan
, 0);
237 set_dma_x_modify(sport
->dma_tx_chan
, 0);
238 set_dma_config(sport
->dma_tx_chan
,
239 (DMAFLOW_LARGE
| NDSIZE_9
| WDSIZE_32
));
240 set_dma_curr_addr(sport
->dma_tx_chan
, sport
->curr_tx_desc
->start_addr
);
246 int sport_rx_start(struct sport_device
*sport
)
249 pr_debug("%s enter\n", __func__
);
253 /* tx is running, rx is not running */
254 BUG_ON(sport
->dma_rx_desc
== NULL
);
255 BUG_ON(sport
->curr_rx_desc
!= sport
->dummy_rx_desc
);
256 local_irq_save(flags
);
257 while ((get_dma_curr_desc_ptr(sport
->dma_rx_chan
) -
258 sizeof(struct dmasg
)) != sport
->dummy_rx_desc
)
260 sport
->dummy_rx_desc
->next_desc_addr
= sport
->dma_rx_desc
;
261 local_irq_restore(flags
);
262 sport
->curr_rx_desc
= sport
->dma_rx_desc
;
264 sport_tx_dma_start(sport
, 1);
265 sport_rx_dma_start(sport
, 0);
273 EXPORT_SYMBOL(sport_rx_start
);
275 int sport_rx_stop(struct sport_device
*sport
)
277 pr_debug("%s enter\n", __func__
);
282 /* TX dma is still running, hook the dummy buffer */
283 sport_hook_rx_dummy(sport
);
285 /* Both rx and tx dma will be stopped */
287 sport
->curr_rx_desc
= NULL
;
288 sport
->curr_tx_desc
= NULL
;
295 EXPORT_SYMBOL(sport_rx_stop
);
297 static inline int sport_hook_tx_dummy(struct sport_device
*sport
)
299 struct dmasg
*desc
, temp_desc
;
302 BUG_ON(sport
->dummy_tx_desc
== NULL
);
303 BUG_ON(sport
->curr_tx_desc
== sport
->dummy_tx_desc
);
305 sport
->dummy_tx_desc
->next_desc_addr
= sport
->dummy_tx_desc
+ 1;
307 /* Shorten the time on last normal descriptor */
308 local_irq_save(flags
);
309 desc
= get_dma_next_desc_ptr(sport
->dma_tx_chan
);
310 /* Store the descriptor which will be damaged */
312 desc
->x_count
= sport
->dummy_count
/ 2;
314 desc
->next_desc_addr
= sport
->dummy_tx_desc
;
315 local_irq_restore(flags
);
316 /* Waiting for dummy buffer descriptor is already hooked*/
317 while ((get_dma_curr_desc_ptr(sport
->dma_tx_chan
) - \
318 sizeof(struct dmasg
)) != sport
->dummy_tx_desc
)
320 sport
->curr_tx_desc
= sport
->dummy_tx_desc
;
321 /* Restore the damaged descriptor */
327 int sport_tx_start(struct sport_device
*sport
)
330 pr_debug("%s: tx_run:%d, rx_run:%d\n", __func__
,
331 sport
->tx_run
, sport
->rx_run
);
335 BUG_ON(sport
->dma_tx_desc
== NULL
);
336 BUG_ON(sport
->curr_tx_desc
!= sport
->dummy_tx_desc
);
337 /* Hook the normal buffer descriptor */
338 local_irq_save(flags
);
339 while ((get_dma_curr_desc_ptr(sport
->dma_tx_chan
) -
340 sizeof(struct dmasg
)) != sport
->dummy_tx_desc
)
342 sport
->dummy_tx_desc
->next_desc_addr
= sport
->dma_tx_desc
;
343 local_irq_restore(flags
);
344 sport
->curr_tx_desc
= sport
->dma_tx_desc
;
347 sport_tx_dma_start(sport
, 0);
348 /* Let rx dma run the dummy buffer */
349 sport_rx_dma_start(sport
, 1);
355 EXPORT_SYMBOL(sport_tx_start
);
357 int sport_tx_stop(struct sport_device
*sport
)
362 /* RX is still running, hook the dummy buffer */
363 sport_hook_tx_dummy(sport
);
365 /* Both rx and tx dma stopped */
367 sport
->curr_rx_desc
= NULL
;
368 sport
->curr_tx_desc
= NULL
;
375 EXPORT_SYMBOL(sport_tx_stop
);
377 static inline int compute_wdsize(size_t wdsize
)
390 int sport_config_rx_dma(struct sport_device
*sport
, void *buf
,
391 int fragcount
, size_t fragsize
)
393 unsigned int x_count
;
394 unsigned int y_count
;
398 pr_debug("%s buf:%p, frag:%d, fragsize:0x%lx\n", __func__
, \
399 buf
, fragcount
, fragsize
);
401 x_count
= fragsize
/ sport
->wdsize
;
404 /* for fragments larger than 64k words we use 2d dma,
405 * denote fragecount as two numbers' mutliply and both of them
406 * are less than 64k.*/
407 if (x_count
>= 0x10000) {
408 int i
, count
= x_count
;
410 for (i
= 16; i
> 0; i
--) {
412 if ((count
& (x_count
- 1)) == 0) {
413 y_count
= count
>> i
;
414 if (y_count
< 0x10000)
421 pr_debug("%s(x_count:0x%x, y_count:0x%x)\n", __func__
,
424 if (sport
->dma_rx_desc
)
425 dma_free_coherent(NULL
, sport
->rx_desc_bytes
,
426 sport
->dma_rx_desc
, 0);
428 /* Allocate a new descritor ring as current one. */
429 sport
->dma_rx_desc
= dma_alloc_coherent(NULL
, \
430 fragcount
* sizeof(struct dmasg
), &addr
, 0);
431 sport
->rx_desc_bytes
= fragcount
* sizeof(struct dmasg
);
433 if (!sport
->dma_rx_desc
) {
434 pr_err("Failed to allocate memory for rx desc\n");
439 sport
->rx_fragsize
= fragsize
;
440 sport
->rx_frags
= fragcount
;
442 cfg
= 0x7000 | DI_EN
| compute_wdsize(sport
->wdsize
) | WNR
| \
443 (DESC_ELEMENT_COUNT
<< 8); /* large descriptor mode */
448 setup_desc(sport
->dma_rx_desc
, buf
, fragcount
, fragsize
,
449 cfg
|DMAEN
, x_count
, y_count
, sport
->wdsize
);
453 EXPORT_SYMBOL(sport_config_rx_dma
);
455 int sport_config_tx_dma(struct sport_device
*sport
, void *buf
, \
456 int fragcount
, size_t fragsize
)
458 unsigned int x_count
;
459 unsigned int y_count
;
463 pr_debug("%s buf:%p, fragcount:%d, fragsize:0x%lx\n",
464 __func__
, buf
, fragcount
, fragsize
);
466 x_count
= fragsize
/sport
->wdsize
;
469 /* for fragments larger than 64k words we use 2d dma,
470 * denote fragecount as two numbers' mutliply and both of them
471 * are less than 64k.*/
472 if (x_count
>= 0x10000) {
473 int i
, count
= x_count
;
475 for (i
= 16; i
> 0; i
--) {
477 if ((count
& (x_count
- 1)) == 0) {
478 y_count
= count
>> i
;
479 if (y_count
< 0x10000)
486 pr_debug("%s x_count:0x%x, y_count:0x%x\n", __func__
,
490 if (sport
->dma_tx_desc
) {
491 dma_free_coherent(NULL
, sport
->tx_desc_bytes
, \
492 sport
->dma_tx_desc
, 0);
495 sport
->dma_tx_desc
= dma_alloc_coherent(NULL
, \
496 fragcount
* sizeof(struct dmasg
), &addr
, 0);
497 sport
->tx_desc_bytes
= fragcount
* sizeof(struct dmasg
);
498 if (!sport
->dma_tx_desc
) {
499 pr_err("Failed to allocate memory for tx desc\n");
504 sport
->tx_fragsize
= fragsize
;
505 sport
->tx_frags
= fragcount
;
506 cfg
= 0x7000 | DI_EN
| compute_wdsize(sport
->wdsize
) | \
507 (DESC_ELEMENT_COUNT
<< 8); /* large descriptor mode */
512 setup_desc(sport
->dma_tx_desc
, buf
, fragcount
, fragsize
,
513 cfg
|DMAEN
, x_count
, y_count
, sport
->wdsize
);
517 EXPORT_SYMBOL(sport_config_tx_dma
);
519 /* setup dummy dma descriptor ring, which don't generate interrupts,
520 * the x_modify is set to 0 */
521 static int sport_config_rx_dummy(struct sport_device
*sport
)
526 pr_debug("%s entered\n", __func__
);
527 if (L1_DATA_A_LENGTH
)
528 desc
= l1_data_sram_zalloc(2 * sizeof(*desc
));
531 desc
= dma_alloc_coherent(NULL
, 2 * sizeof(*desc
), &addr
, 0);
532 memset(desc
, 0, 2 * sizeof(*desc
));
535 pr_err("Failed to allocate memory for dummy rx desc\n");
538 sport
->dummy_rx_desc
= desc
;
539 desc
->start_addr
= (unsigned long)sport
->dummy_buf
;
540 config
= DMAFLOW_LARGE
| NDSIZE_9
| compute_wdsize(sport
->wdsize
)
543 desc
->x_count
= sport
->dummy_count
/sport
->wdsize
;
544 desc
->x_modify
= sport
->wdsize
;
547 memcpy(desc
+1, desc
, sizeof(*desc
));
548 desc
->next_desc_addr
= desc
+ 1;
549 desc
[1].next_desc_addr
= desc
;
553 static int sport_config_tx_dummy(struct sport_device
*sport
)
558 pr_debug("%s entered\n", __func__
);
560 if (L1_DATA_A_LENGTH
)
561 desc
= l1_data_sram_zalloc(2 * sizeof(*desc
));
564 desc
= dma_alloc_coherent(NULL
, 2 * sizeof(*desc
), &addr
, 0);
565 memset(desc
, 0, 2 * sizeof(*desc
));
568 pr_err("Failed to allocate memory for dummy tx desc\n");
571 sport
->dummy_tx_desc
= desc
;
572 desc
->start_addr
= (unsigned long)sport
->dummy_buf
+ \
574 config
= DMAFLOW_LARGE
| NDSIZE_9
|
575 compute_wdsize(sport
->wdsize
) | DMAEN
;
577 desc
->x_count
= sport
->dummy_count
/sport
->wdsize
;
578 desc
->x_modify
= sport
->wdsize
;
581 memcpy(desc
+1, desc
, sizeof(*desc
));
582 desc
->next_desc_addr
= desc
+ 1;
583 desc
[1].next_desc_addr
= desc
;
587 unsigned long sport_curr_offset_rx(struct sport_device
*sport
)
589 unsigned long curr
= get_dma_curr_addr(sport
->dma_rx_chan
);
591 return (unsigned char *)curr
- sport
->rx_buf
;
593 EXPORT_SYMBOL(sport_curr_offset_rx
);
595 unsigned long sport_curr_offset_tx(struct sport_device
*sport
)
597 unsigned long curr
= get_dma_curr_addr(sport
->dma_tx_chan
);
599 return (unsigned char *)curr
- sport
->tx_buf
;
601 EXPORT_SYMBOL(sport_curr_offset_tx
);
603 void sport_incfrag(struct sport_device
*sport
, int *frag
, int tx
)
606 if (tx
== 1 && *frag
== sport
->tx_frags
)
609 if (tx
== 0 && *frag
== sport
->rx_frags
)
612 EXPORT_SYMBOL(sport_incfrag
);
614 void sport_decfrag(struct sport_device
*sport
, int *frag
, int tx
)
617 if (tx
== 1 && *frag
== 0)
618 *frag
= sport
->tx_frags
;
620 if (tx
== 0 && *frag
== 0)
621 *frag
= sport
->rx_frags
;
623 EXPORT_SYMBOL(sport_decfrag
);
625 static int sport_check_status(struct sport_device
*sport
,
626 unsigned int *sport_stat
,
627 unsigned int *rx_stat
,
628 unsigned int *tx_stat
)
634 status
= sport
->regs
->stat
;
635 if (status
& (TOVF
|TUVF
|ROVF
|RUVF
))
636 sport
->regs
->stat
= (status
& (TOVF
|TUVF
|ROVF
|RUVF
));
638 *sport_stat
= status
;
643 status
= get_dma_curr_irqstat(sport
->dma_rx_chan
);
644 if (status
& (DMA_DONE
|DMA_ERR
))
645 clear_dma_irqstat(sport
->dma_rx_chan
);
652 status
= get_dma_curr_irqstat(sport
->dma_tx_chan
);
653 if (status
& (DMA_DONE
|DMA_ERR
))
654 clear_dma_irqstat(sport
->dma_tx_chan
);
662 int sport_dump_stat(struct sport_device
*sport
, char *buf
, size_t len
)
666 ret
= snprintf(buf
, len
,
668 "rx dma %d sts: 0x%04x tx dma %d sts: 0x%04x\n",
671 get_dma_curr_irqstat(sport
->dma_rx_chan
),
673 get_dma_curr_irqstat(sport
->dma_tx_chan
));
677 ret
+= snprintf(buf
, len
,
678 "curr_rx_desc:0x%p, curr_tx_desc:0x%p\n"
679 "dma_rx_desc:0x%p, dma_tx_desc:0x%p\n"
680 "dummy_rx_desc:0x%p, dummy_tx_desc:0x%p\n",
681 sport
->curr_rx_desc
, sport
->curr_tx_desc
,
682 sport
->dma_rx_desc
, sport
->dma_tx_desc
,
683 sport
->dummy_rx_desc
, sport
->dummy_tx_desc
);
688 static irqreturn_t
rx_handler(int irq
, void *dev_id
)
690 unsigned int rx_stat
;
691 struct sport_device
*sport
= dev_id
;
693 pr_debug("%s enter\n", __func__
);
694 sport_check_status(sport
, NULL
, &rx_stat
, NULL
);
695 if (!(rx_stat
& DMA_DONE
))
696 pr_err("rx dma is already stopped\n");
698 if (sport
->rx_callback
) {
699 sport
->rx_callback(sport
->rx_data
);
706 static irqreturn_t
tx_handler(int irq
, void *dev_id
)
708 unsigned int tx_stat
;
709 struct sport_device
*sport
= dev_id
;
710 pr_debug("%s enter\n", __func__
);
711 sport_check_status(sport
, NULL
, NULL
, &tx_stat
);
712 if (!(tx_stat
& DMA_DONE
)) {
713 pr_err("tx dma is already stopped\n");
716 if (sport
->tx_callback
) {
717 sport
->tx_callback(sport
->tx_data
);
724 static irqreturn_t
err_handler(int irq
, void *dev_id
)
726 unsigned int status
= 0;
727 struct sport_device
*sport
= dev_id
;
729 pr_debug("%s\n", __func__
);
730 if (sport_check_status(sport
, &status
, NULL
, NULL
)) {
731 pr_err("error checking status ??");
735 if (status
& (TOVF
|TUVF
|ROVF
|RUVF
)) {
736 pr_info("sport status error:%s%s%s%s\n",
737 status
& TOVF
? " TOVF" : "",
738 status
& TUVF
? " TUVF" : "",
739 status
& ROVF
? " ROVF" : "",
740 status
& RUVF
? " RUVF" : "");
741 if (status
& TOVF
|| status
& TUVF
) {
742 disable_dma(sport
->dma_tx_chan
);
744 sport_tx_dma_start(sport
, 0);
746 sport_tx_dma_start(sport
, 1);
747 enable_dma(sport
->dma_tx_chan
);
749 disable_dma(sport
->dma_rx_chan
);
751 sport_rx_dma_start(sport
, 0);
753 sport_rx_dma_start(sport
, 1);
754 enable_dma(sport
->dma_rx_chan
);
757 status
= sport
->regs
->stat
;
758 if (status
& (TOVF
|TUVF
|ROVF
|RUVF
))
759 sport
->regs
->stat
= (status
& (TOVF
|TUVF
|ROVF
|RUVF
));
762 if (sport
->err_callback
)
763 sport
->err_callback(sport
->err_data
);
768 int sport_set_rx_callback(struct sport_device
*sport
,
769 void (*rx_callback
)(void *), void *rx_data
)
771 BUG_ON(rx_callback
== NULL
);
772 sport
->rx_callback
= rx_callback
;
773 sport
->rx_data
= rx_data
;
777 EXPORT_SYMBOL(sport_set_rx_callback
);
779 int sport_set_tx_callback(struct sport_device
*sport
,
780 void (*tx_callback
)(void *), void *tx_data
)
782 BUG_ON(tx_callback
== NULL
);
783 sport
->tx_callback
= tx_callback
;
784 sport
->tx_data
= tx_data
;
788 EXPORT_SYMBOL(sport_set_tx_callback
);
790 int sport_set_err_callback(struct sport_device
*sport
,
791 void (*err_callback
)(void *), void *err_data
)
793 BUG_ON(err_callback
== NULL
);
794 sport
->err_callback
= err_callback
;
795 sport
->err_data
= err_data
;
799 EXPORT_SYMBOL(sport_set_err_callback
);
801 struct sport_device
*sport_init(struct sport_param
*param
, unsigned wdsize
,
802 unsigned dummy_count
, void *private_data
)
805 struct sport_device
*sport
;
806 pr_debug("%s enter\n", __func__
);
807 BUG_ON(param
== NULL
);
808 BUG_ON(wdsize
== 0 || dummy_count
== 0);
809 sport
= kmalloc(sizeof(struct sport_device
), GFP_KERNEL
);
811 pr_err("Failed to allocate for sport device\n");
815 memset(sport
, 0, sizeof(struct sport_device
));
816 sport
->dma_rx_chan
= param
->dma_rx_chan
;
817 sport
->dma_tx_chan
= param
->dma_tx_chan
;
818 sport
->err_irq
= param
->err_irq
;
819 sport
->regs
= param
->regs
;
820 sport
->private_data
= private_data
;
822 if (request_dma(sport
->dma_rx_chan
, "SPORT RX Data") == -EBUSY
) {
823 pr_err("Failed to request RX dma %d\n", \
827 if (set_dma_callback(sport
->dma_rx_chan
, rx_handler
, sport
) != 0) {
828 pr_err("Failed to request RX irq %d\n", \
833 if (request_dma(sport
->dma_tx_chan
, "SPORT TX Data") == -EBUSY
) {
834 pr_err("Failed to request TX dma %d\n", \
839 if (set_dma_callback(sport
->dma_tx_chan
, tx_handler
, sport
) != 0) {
840 pr_err("Failed to request TX irq %d\n", \
845 if (request_irq(sport
->err_irq
, err_handler
, IRQF_SHARED
, "SPORT err",
847 pr_err("Failed to request err irq:%d\n", \
852 pr_err("dma rx:%d tx:%d, err irq:%d, regs:%p\n",
853 sport
->dma_rx_chan
, sport
->dma_tx_chan
,
854 sport
->err_irq
, sport
->regs
);
856 sport
->wdsize
= wdsize
;
857 sport
->dummy_count
= dummy_count
;
859 if (L1_DATA_A_LENGTH
)
860 sport
->dummy_buf
= l1_data_sram_zalloc(dummy_count
* 2);
862 sport
->dummy_buf
= kzalloc(dummy_count
* 2, GFP_KERNEL
);
863 if (sport
->dummy_buf
== NULL
) {
864 pr_err("Failed to allocate dummy buffer\n");
868 ret
= sport_config_rx_dummy(sport
);
870 pr_err("Failed to config rx dummy ring\n");
873 ret
= sport_config_tx_dummy(sport
);
875 pr_err("Failed to config tx dummy ring\n");
881 free_irq(sport
->err_irq
, sport
);
883 free_dma(sport
->dma_tx_chan
);
885 free_dma(sport
->dma_rx_chan
);
890 EXPORT_SYMBOL(sport_init
);
892 void sport_done(struct sport_device
*sport
)
898 if (sport
->dma_rx_desc
)
899 dma_free_coherent(NULL
, sport
->rx_desc_bytes
,
900 sport
->dma_rx_desc
, 0);
901 if (sport
->dma_tx_desc
)
902 dma_free_coherent(NULL
, sport
->tx_desc_bytes
,
903 sport
->dma_tx_desc
, 0);
905 #if L1_DATA_A_LENGTH != 0
906 l1_data_sram_free(sport
->dummy_rx_desc
);
907 l1_data_sram_free(sport
->dummy_tx_desc
);
908 l1_data_sram_free(sport
->dummy_buf
);
910 dma_free_coherent(NULL
, 2*sizeof(struct dmasg
),
911 sport
->dummy_rx_desc
, 0);
912 dma_free_coherent(NULL
, 2*sizeof(struct dmasg
),
913 sport
->dummy_tx_desc
, 0);
914 kfree(sport
->dummy_buf
);
916 free_dma(sport
->dma_rx_chan
);
917 free_dma(sport
->dma_tx_chan
);
918 free_irq(sport
->err_irq
, sport
);
923 EXPORT_SYMBOL(sport_done
);
926 * It is only used to send several bytes when dma is not enabled
927 * sport controller is configured but not enabled.
928 * Multichannel cannot works with pio mode */
929 /* Used by ac97 to write and read codec register */
930 int sport_send_and_recv(struct sport_device
*sport
, u8
*out_data
, \
931 u8
*in_data
, int len
)
933 unsigned short dma_config
;
934 unsigned short status
;
936 unsigned long wait
= 0;
938 pr_debug("%s enter, out_data:%p, in_data:%p len:%d\n", \
939 __func__
, out_data
, in_data
, len
);
940 pr_debug("tcr1:0x%04x, tcr2:0x%04x, tclkdiv:0x%04x, tfsdiv:0x%04x\n"
941 "mcmc1:0x%04x, mcmc2:0x%04x\n",
942 sport
->regs
->tcr1
, sport
->regs
->tcr2
,
943 sport
->regs
->tclkdiv
, sport
->regs
->tfsdiv
,
944 sport
->regs
->mcmc1
, sport
->regs
->mcmc2
);
945 flush_dcache_range((unsigned)out_data
, (unsigned)(out_data
+ len
));
948 dma_config
= (RESTART
| WDSIZE_16
| DI_EN
);
949 set_dma_start_addr(sport
->dma_tx_chan
, (unsigned long)out_data
);
950 set_dma_x_count(sport
->dma_tx_chan
, len
/2);
951 set_dma_x_modify(sport
->dma_tx_chan
, 2);
952 set_dma_config(sport
->dma_tx_chan
, dma_config
);
953 enable_dma(sport
->dma_tx_chan
);
955 if (in_data
!= NULL
) {
956 invalidate_dcache_range((unsigned)in_data
, \
957 (unsigned)(in_data
+ len
));
959 dma_config
= (RESTART
| WDSIZE_16
| WNR
| DI_EN
);
960 set_dma_start_addr(sport
->dma_rx_chan
, (unsigned long)in_data
);
961 set_dma_x_count(sport
->dma_rx_chan
, len
/2);
962 set_dma_x_modify(sport
->dma_rx_chan
, 2);
963 set_dma_config(sport
->dma_rx_chan
, dma_config
);
964 enable_dma(sport
->dma_rx_chan
);
967 local_irq_save(flags
);
968 sport
->regs
->tcr1
|= TSPEN
;
969 sport
->regs
->rcr1
|= RSPEN
;
972 status
= get_dma_curr_irqstat(sport
->dma_tx_chan
);
973 while (status
& DMA_RUN
) {
975 status
= get_dma_curr_irqstat(sport
->dma_tx_chan
);
976 pr_debug("DMA status:0x%04x\n", status
);
980 status
= sport
->regs
->stat
;
983 while (!(status
& TXHRE
)) {
984 pr_debug("sport status:0x%04x\n", status
);
986 status
= *(unsigned short *)&sport
->regs
->stat
;
990 /* Wait for the last byte sent out */
992 pr_debug("sport status:0x%04x\n", status
);
995 sport
->regs
->tcr1
&= ~TSPEN
;
996 sport
->regs
->rcr1
&= ~RSPEN
;
998 disable_dma(sport
->dma_tx_chan
);
999 /* Clear the status */
1000 clear_dma_irqstat(sport
->dma_tx_chan
);
1001 if (in_data
!= NULL
) {
1002 disable_dma(sport
->dma_rx_chan
);
1003 clear_dma_irqstat(sport
->dma_rx_chan
);
1006 local_irq_restore(flags
);
1010 EXPORT_SYMBOL(sport_send_and_recv
);
1012 MODULE_AUTHOR("Roy Huang");
1013 MODULE_DESCRIPTION("SPORT driver for ADI Blackfin");
1014 MODULE_LICENSE("GPL");