4 * Author: Roy Huang <roy.huang@analog.com>
6 * Created: Tue Sep 21 10:52:42 CEST 2004
8 * Blackfin SPORT Driver
10 * Copyright 2004-2007 Analog Devices Inc.
12 * Bugs: Enter bugs at http://blackfin.uclinux.org/
14 * This program is free software; you can redistribute it and/or modify
15 * it under the terms of the GNU General Public License as published by
16 * the Free Software Foundation; either version 2 of the License, or
17 * (at your option) any later version.
19 * This program is distributed in the hope that it will be useful,
20 * but WITHOUT ANY WARRANTY; without even the implied warranty of
21 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22 * GNU General Public License for more details.
24 * You should have received a copy of the GNU General Public License
25 * along with this program; if not, see the file COPYING, or write
26 * to the Free Software Foundation, Inc.,
27 * 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
30 #include <linux/kernel.h>
31 #include <linux/slab.h>
32 #include <linux/delay.h>
33 #include <linux/dma-mapping.h>
34 #include <linux/gpio.h>
35 #include <linux/bug.h>
36 #include <asm/portmux.h>
38 #include <asm/blackfin.h>
39 #include <asm/cacheflush.h>
41 #include "bf5xx-sport.h"
42 /* delay between frame sync pulse and first data bit in multichannel mode */
43 #define FRAME_DELAY (1<<12)
45 /* note: multichannel is in units of 8 channels,
46 * tdm_count is # channels NOT / 8 ! */
47 int sport_set_multichannel(struct sport_device
*sport
,
48 int tdm_count
, u32 mask
, int packed
)
50 pr_debug("%s tdm_count=%d mask:0x%08x packed=%d\n", __func__
,
51 tdm_count
, mask
, packed
);
53 if ((sport
->regs
->tcr1
& TSPEN
) || (sport
->regs
->rcr1
& RSPEN
))
60 return -EINVAL
; /* Only support less than 32 channels now */
63 sport
->regs
->mcmc1
= ((tdm_count
>>3)-1) << 12;
64 sport
->regs
->mcmc2
= FRAME_DELAY
| MCMEN
| \
65 (packed
? (MCDTXPE
|MCDRXPE
) : 0);
67 sport
->regs
->mtcs0
= mask
;
68 sport
->regs
->mrcs0
= mask
;
69 sport
->regs
->mtcs1
= 0;
70 sport
->regs
->mrcs1
= 0;
71 sport
->regs
->mtcs2
= 0;
72 sport
->regs
->mrcs2
= 0;
73 sport
->regs
->mtcs3
= 0;
74 sport
->regs
->mrcs3
= 0;
76 sport
->regs
->mcmc1
= 0;
77 sport
->regs
->mcmc2
= 0;
79 sport
->regs
->mtcs0
= 0;
80 sport
->regs
->mrcs0
= 0;
83 sport
->regs
->mtcs1
= 0; sport
->regs
->mtcs2
= 0; sport
->regs
->mtcs3
= 0;
84 sport
->regs
->mrcs1
= 0; sport
->regs
->mrcs2
= 0; sport
->regs
->mrcs3
= 0;
90 EXPORT_SYMBOL(sport_set_multichannel
);
92 int sport_config_rx(struct sport_device
*sport
, unsigned int rcr1
,
93 unsigned int rcr2
, unsigned int clkdiv
, unsigned int fsdiv
)
95 if ((sport
->regs
->tcr1
& TSPEN
) || (sport
->regs
->rcr1
& RSPEN
))
98 sport
->regs
->rcr1
= rcr1
;
99 sport
->regs
->rcr2
= rcr2
;
100 sport
->regs
->rclkdiv
= clkdiv
;
101 sport
->regs
->rfsdiv
= fsdiv
;
107 EXPORT_SYMBOL(sport_config_rx
);
109 int sport_config_tx(struct sport_device
*sport
, unsigned int tcr1
,
110 unsigned int tcr2
, unsigned int clkdiv
, unsigned int fsdiv
)
112 if ((sport
->regs
->tcr1
& TSPEN
) || (sport
->regs
->rcr1
& RSPEN
))
115 sport
->regs
->tcr1
= tcr1
;
116 sport
->regs
->tcr2
= tcr2
;
117 sport
->regs
->tclkdiv
= clkdiv
;
118 sport
->regs
->tfsdiv
= fsdiv
;
124 EXPORT_SYMBOL(sport_config_tx
);
126 static void setup_desc(struct dmasg
*desc
, void *buf
, int fragcount
,
127 size_t fragsize
, unsigned int cfg
,
128 unsigned int x_count
, unsigned int ycount
, size_t wdsize
)
133 for (i
= 0; i
< fragcount
; ++i
) {
134 desc
[i
].next_desc_addr
= &(desc
[i
+ 1]);
135 desc
[i
].start_addr
= (unsigned long)buf
+ i
*fragsize
;
137 desc
[i
].x_count
= x_count
;
138 desc
[i
].x_modify
= wdsize
;
139 desc
[i
].y_count
= ycount
;
140 desc
[i
].y_modify
= wdsize
;
144 desc
[fragcount
-1].next_desc_addr
= desc
;
146 pr_debug("setup desc: desc0=%p, next0=%p, desc1=%p,"
147 "next1=%p\nx_count=%x,y_count=%x,addr=0x%lx,cfs=0x%x\n",
148 desc
, desc
[0].next_desc_addr
,
149 desc
+1, desc
[1].next_desc_addr
,
150 desc
[0].x_count
, desc
[0].y_count
,
151 desc
[0].start_addr
, desc
[0].cfg
);
154 static int sport_start(struct sport_device
*sport
)
156 enable_dma(sport
->dma_rx_chan
);
157 enable_dma(sport
->dma_tx_chan
);
158 sport
->regs
->rcr1
|= RSPEN
;
159 sport
->regs
->tcr1
|= TSPEN
;
165 static int sport_stop(struct sport_device
*sport
)
167 sport
->regs
->tcr1
&= ~TSPEN
;
168 sport
->regs
->rcr1
&= ~RSPEN
;
171 disable_dma(sport
->dma_rx_chan
);
172 disable_dma(sport
->dma_tx_chan
);
176 static inline int sport_hook_rx_dummy(struct sport_device
*sport
)
178 struct dmasg
*desc
, temp_desc
;
181 BUG_ON(sport
->dummy_rx_desc
== NULL
);
182 BUG_ON(sport
->curr_rx_desc
== sport
->dummy_rx_desc
);
184 /* Maybe the dummy buffer descriptor ring is damaged */
185 sport
->dummy_rx_desc
->next_desc_addr
= sport
->dummy_rx_desc
+ 1;
187 local_irq_save(flags
);
188 desc
= get_dma_next_desc_ptr(sport
->dma_rx_chan
);
189 /* Copy the descriptor which will be damaged to backup */
191 desc
->x_count
= sport
->dummy_count
/ 2;
193 desc
->next_desc_addr
= sport
->dummy_rx_desc
;
194 local_irq_restore(flags
);
195 /* Waiting for dummy buffer descriptor is already hooked*/
196 while ((get_dma_curr_desc_ptr(sport
->dma_rx_chan
) -
197 sizeof(struct dmasg
)) != sport
->dummy_rx_desc
)
199 sport
->curr_rx_desc
= sport
->dummy_rx_desc
;
200 /* Restore the damaged descriptor */
206 static inline int sport_rx_dma_start(struct sport_device
*sport
, int dummy
)
209 sport
->dummy_rx_desc
->next_desc_addr
= sport
->dummy_rx_desc
;
210 sport
->curr_rx_desc
= sport
->dummy_rx_desc
;
212 sport
->curr_rx_desc
= sport
->dma_rx_desc
;
214 set_dma_next_desc_addr(sport
->dma_rx_chan
, sport
->curr_rx_desc
);
215 set_dma_x_count(sport
->dma_rx_chan
, 0);
216 set_dma_x_modify(sport
->dma_rx_chan
, 0);
217 set_dma_config(sport
->dma_rx_chan
, (DMAFLOW_LARGE
| NDSIZE_9
| \
219 set_dma_curr_addr(sport
->dma_rx_chan
, sport
->curr_rx_desc
->start_addr
);
225 static inline int sport_tx_dma_start(struct sport_device
*sport
, int dummy
)
228 sport
->dummy_tx_desc
->next_desc_addr
= sport
->dummy_tx_desc
;
229 sport
->curr_tx_desc
= sport
->dummy_tx_desc
;
231 sport
->curr_tx_desc
= sport
->dma_tx_desc
;
233 set_dma_next_desc_addr(sport
->dma_tx_chan
, sport
->curr_tx_desc
);
234 set_dma_x_count(sport
->dma_tx_chan
, 0);
235 set_dma_x_modify(sport
->dma_tx_chan
, 0);
236 set_dma_config(sport
->dma_tx_chan
,
237 (DMAFLOW_LARGE
| NDSIZE_9
| WDSIZE_32
));
238 set_dma_curr_addr(sport
->dma_tx_chan
, sport
->curr_tx_desc
->start_addr
);
244 int sport_rx_start(struct sport_device
*sport
)
247 pr_debug("%s enter\n", __func__
);
251 /* tx is running, rx is not running */
252 BUG_ON(sport
->dma_rx_desc
== NULL
);
253 BUG_ON(sport
->curr_rx_desc
!= sport
->dummy_rx_desc
);
254 local_irq_save(flags
);
255 while ((get_dma_curr_desc_ptr(sport
->dma_rx_chan
) -
256 sizeof(struct dmasg
)) != sport
->dummy_rx_desc
)
258 sport
->dummy_rx_desc
->next_desc_addr
= sport
->dma_rx_desc
;
259 local_irq_restore(flags
);
260 sport
->curr_rx_desc
= sport
->dma_rx_desc
;
262 sport_tx_dma_start(sport
, 1);
263 sport_rx_dma_start(sport
, 0);
271 EXPORT_SYMBOL(sport_rx_start
);
273 int sport_rx_stop(struct sport_device
*sport
)
275 pr_debug("%s enter\n", __func__
);
280 /* TX dma is still running, hook the dummy buffer */
281 sport_hook_rx_dummy(sport
);
283 /* Both rx and tx dma will be stopped */
285 sport
->curr_rx_desc
= NULL
;
286 sport
->curr_tx_desc
= NULL
;
293 EXPORT_SYMBOL(sport_rx_stop
);
295 static inline int sport_hook_tx_dummy(struct sport_device
*sport
)
297 struct dmasg
*desc
, temp_desc
;
300 BUG_ON(sport
->dummy_tx_desc
== NULL
);
301 BUG_ON(sport
->curr_tx_desc
== sport
->dummy_tx_desc
);
303 sport
->dummy_tx_desc
->next_desc_addr
= sport
->dummy_tx_desc
+ 1;
305 /* Shorten the time on last normal descriptor */
306 local_irq_save(flags
);
307 desc
= get_dma_next_desc_ptr(sport
->dma_tx_chan
);
308 /* Store the descriptor which will be damaged */
310 desc
->x_count
= sport
->dummy_count
/ 2;
312 desc
->next_desc_addr
= sport
->dummy_tx_desc
;
313 local_irq_restore(flags
);
314 /* Waiting for dummy buffer descriptor is already hooked*/
315 while ((get_dma_curr_desc_ptr(sport
->dma_tx_chan
) - \
316 sizeof(struct dmasg
)) != sport
->dummy_tx_desc
)
318 sport
->curr_tx_desc
= sport
->dummy_tx_desc
;
319 /* Restore the damaged descriptor */
325 int sport_tx_start(struct sport_device
*sport
)
328 pr_debug("%s: tx_run:%d, rx_run:%d\n", __func__
,
329 sport
->tx_run
, sport
->rx_run
);
333 BUG_ON(sport
->dma_tx_desc
== NULL
);
334 BUG_ON(sport
->curr_tx_desc
!= sport
->dummy_tx_desc
);
335 /* Hook the normal buffer descriptor */
336 local_irq_save(flags
);
337 while ((get_dma_curr_desc_ptr(sport
->dma_tx_chan
) -
338 sizeof(struct dmasg
)) != sport
->dummy_tx_desc
)
340 sport
->dummy_tx_desc
->next_desc_addr
= sport
->dma_tx_desc
;
341 local_irq_restore(flags
);
342 sport
->curr_tx_desc
= sport
->dma_tx_desc
;
345 sport_tx_dma_start(sport
, 0);
346 /* Let rx dma run the dummy buffer */
347 sport_rx_dma_start(sport
, 1);
353 EXPORT_SYMBOL(sport_tx_start
);
355 int sport_tx_stop(struct sport_device
*sport
)
360 /* RX is still running, hook the dummy buffer */
361 sport_hook_tx_dummy(sport
);
363 /* Both rx and tx dma stopped */
365 sport
->curr_rx_desc
= NULL
;
366 sport
->curr_tx_desc
= NULL
;
373 EXPORT_SYMBOL(sport_tx_stop
);
375 static inline int compute_wdsize(size_t wdsize
)
388 int sport_config_rx_dma(struct sport_device
*sport
, void *buf
,
389 int fragcount
, size_t fragsize
)
391 unsigned int x_count
;
392 unsigned int y_count
;
396 pr_debug("%s buf:%p, frag:%d, fragsize:0x%lx\n", __func__
, \
397 buf
, fragcount
, fragsize
);
399 x_count
= fragsize
/ sport
->wdsize
;
402 /* for fragments larger than 64k words we use 2d dma,
403 * denote fragecount as two numbers' mutliply and both of them
404 * are less than 64k.*/
405 if (x_count
>= 0x10000) {
406 int i
, count
= x_count
;
408 for (i
= 16; i
> 0; i
--) {
410 if ((count
& (x_count
- 1)) == 0) {
411 y_count
= count
>> i
;
412 if (y_count
< 0x10000)
419 pr_debug("%s(x_count:0x%x, y_count:0x%x)\n", __func__
,
422 if (sport
->dma_rx_desc
)
423 dma_free_coherent(NULL
, sport
->rx_desc_bytes
,
424 sport
->dma_rx_desc
, 0);
426 /* Allocate a new descritor ring as current one. */
427 sport
->dma_rx_desc
= dma_alloc_coherent(NULL
, \
428 fragcount
* sizeof(struct dmasg
), &addr
, 0);
429 sport
->rx_desc_bytes
= fragcount
* sizeof(struct dmasg
);
431 if (!sport
->dma_rx_desc
) {
432 pr_err("Failed to allocate memory for rx desc\n");
437 sport
->rx_fragsize
= fragsize
;
438 sport
->rx_frags
= fragcount
;
440 cfg
= 0x7000 | DI_EN
| compute_wdsize(sport
->wdsize
) | WNR
| \
441 (DESC_ELEMENT_COUNT
<< 8); /* large descriptor mode */
446 setup_desc(sport
->dma_rx_desc
, buf
, fragcount
, fragsize
,
447 cfg
|DMAEN
, x_count
, y_count
, sport
->wdsize
);
451 EXPORT_SYMBOL(sport_config_rx_dma
);
453 int sport_config_tx_dma(struct sport_device
*sport
, void *buf
, \
454 int fragcount
, size_t fragsize
)
456 unsigned int x_count
;
457 unsigned int y_count
;
461 pr_debug("%s buf:%p, fragcount:%d, fragsize:0x%lx\n",
462 __func__
, buf
, fragcount
, fragsize
);
464 x_count
= fragsize
/sport
->wdsize
;
467 /* for fragments larger than 64k words we use 2d dma,
468 * denote fragecount as two numbers' mutliply and both of them
469 * are less than 64k.*/
470 if (x_count
>= 0x10000) {
471 int i
, count
= x_count
;
473 for (i
= 16; i
> 0; i
--) {
475 if ((count
& (x_count
- 1)) == 0) {
476 y_count
= count
>> i
;
477 if (y_count
< 0x10000)
484 pr_debug("%s x_count:0x%x, y_count:0x%x\n", __func__
,
488 if (sport
->dma_tx_desc
) {
489 dma_free_coherent(NULL
, sport
->tx_desc_bytes
, \
490 sport
->dma_tx_desc
, 0);
493 sport
->dma_tx_desc
= dma_alloc_coherent(NULL
, \
494 fragcount
* sizeof(struct dmasg
), &addr
, 0);
495 sport
->tx_desc_bytes
= fragcount
* sizeof(struct dmasg
);
496 if (!sport
->dma_tx_desc
) {
497 pr_err("Failed to allocate memory for tx desc\n");
502 sport
->tx_fragsize
= fragsize
;
503 sport
->tx_frags
= fragcount
;
504 cfg
= 0x7000 | DI_EN
| compute_wdsize(sport
->wdsize
) | \
505 (DESC_ELEMENT_COUNT
<< 8); /* large descriptor mode */
510 setup_desc(sport
->dma_tx_desc
, buf
, fragcount
, fragsize
,
511 cfg
|DMAEN
, x_count
, y_count
, sport
->wdsize
);
515 EXPORT_SYMBOL(sport_config_tx_dma
);
517 /* setup dummy dma descriptor ring, which don't generate interrupts,
518 * the x_modify is set to 0 */
519 static int sport_config_rx_dummy(struct sport_device
*sport
)
524 pr_debug("%s entered\n", __func__
);
525 if (L1_DATA_A_LENGTH
)
526 desc
= l1_data_sram_zalloc(2 * sizeof(*desc
));
529 desc
= dma_alloc_coherent(NULL
, 2 * sizeof(*desc
), &addr
, 0);
530 memset(desc
, 0, 2 * sizeof(*desc
));
533 pr_err("Failed to allocate memory for dummy rx desc\n");
536 sport
->dummy_rx_desc
= desc
;
537 desc
->start_addr
= (unsigned long)sport
->dummy_buf
;
538 config
= DMAFLOW_LARGE
| NDSIZE_9
| compute_wdsize(sport
->wdsize
)
541 desc
->x_count
= sport
->dummy_count
/sport
->wdsize
;
542 desc
->x_modify
= sport
->wdsize
;
545 memcpy(desc
+1, desc
, sizeof(*desc
));
546 desc
->next_desc_addr
= desc
+ 1;
547 desc
[1].next_desc_addr
= desc
;
551 static int sport_config_tx_dummy(struct sport_device
*sport
)
556 pr_debug("%s entered\n", __func__
);
558 if (L1_DATA_A_LENGTH
)
559 desc
= l1_data_sram_zalloc(2 * sizeof(*desc
));
562 desc
= dma_alloc_coherent(NULL
, 2 * sizeof(*desc
), &addr
, 0);
563 memset(desc
, 0, 2 * sizeof(*desc
));
566 pr_err("Failed to allocate memory for dummy tx desc\n");
569 sport
->dummy_tx_desc
= desc
;
570 desc
->start_addr
= (unsigned long)sport
->dummy_buf
+ \
572 config
= DMAFLOW_LARGE
| NDSIZE_9
|
573 compute_wdsize(sport
->wdsize
) | DMAEN
;
575 desc
->x_count
= sport
->dummy_count
/sport
->wdsize
;
576 desc
->x_modify
= sport
->wdsize
;
579 memcpy(desc
+1, desc
, sizeof(*desc
));
580 desc
->next_desc_addr
= desc
+ 1;
581 desc
[1].next_desc_addr
= desc
;
585 unsigned long sport_curr_offset_rx(struct sport_device
*sport
)
587 unsigned long curr
= get_dma_curr_addr(sport
->dma_rx_chan
);
589 return (unsigned char *)curr
- sport
->rx_buf
;
591 EXPORT_SYMBOL(sport_curr_offset_rx
);
593 unsigned long sport_curr_offset_tx(struct sport_device
*sport
)
595 unsigned long curr
= get_dma_curr_addr(sport
->dma_tx_chan
);
597 return (unsigned char *)curr
- sport
->tx_buf
;
599 EXPORT_SYMBOL(sport_curr_offset_tx
);
601 void sport_incfrag(struct sport_device
*sport
, int *frag
, int tx
)
604 if (tx
== 1 && *frag
== sport
->tx_frags
)
607 if (tx
== 0 && *frag
== sport
->rx_frags
)
610 EXPORT_SYMBOL(sport_incfrag
);
612 void sport_decfrag(struct sport_device
*sport
, int *frag
, int tx
)
615 if (tx
== 1 && *frag
== 0)
616 *frag
= sport
->tx_frags
;
618 if (tx
== 0 && *frag
== 0)
619 *frag
= sport
->rx_frags
;
621 EXPORT_SYMBOL(sport_decfrag
);
623 static int sport_check_status(struct sport_device
*sport
,
624 unsigned int *sport_stat
,
625 unsigned int *rx_stat
,
626 unsigned int *tx_stat
)
632 status
= sport
->regs
->stat
;
633 if (status
& (TOVF
|TUVF
|ROVF
|RUVF
))
634 sport
->regs
->stat
= (status
& (TOVF
|TUVF
|ROVF
|RUVF
));
636 *sport_stat
= status
;
641 status
= get_dma_curr_irqstat(sport
->dma_rx_chan
);
642 if (status
& (DMA_DONE
|DMA_ERR
))
643 clear_dma_irqstat(sport
->dma_rx_chan
);
650 status
= get_dma_curr_irqstat(sport
->dma_tx_chan
);
651 if (status
& (DMA_DONE
|DMA_ERR
))
652 clear_dma_irqstat(sport
->dma_tx_chan
);
660 int sport_dump_stat(struct sport_device
*sport
, char *buf
, size_t len
)
664 ret
= snprintf(buf
, len
,
666 "rx dma %d sts: 0x%04x tx dma %d sts: 0x%04x\n",
669 get_dma_curr_irqstat(sport
->dma_rx_chan
),
671 get_dma_curr_irqstat(sport
->dma_tx_chan
));
675 ret
+= snprintf(buf
, len
,
676 "curr_rx_desc:0x%p, curr_tx_desc:0x%p\n"
677 "dma_rx_desc:0x%p, dma_tx_desc:0x%p\n"
678 "dummy_rx_desc:0x%p, dummy_tx_desc:0x%p\n",
679 sport
->curr_rx_desc
, sport
->curr_tx_desc
,
680 sport
->dma_rx_desc
, sport
->dma_tx_desc
,
681 sport
->dummy_rx_desc
, sport
->dummy_tx_desc
);
686 static irqreturn_t
rx_handler(int irq
, void *dev_id
)
688 unsigned int rx_stat
;
689 struct sport_device
*sport
= dev_id
;
691 pr_debug("%s enter\n", __func__
);
692 sport_check_status(sport
, NULL
, &rx_stat
, NULL
);
693 if (!(rx_stat
& DMA_DONE
))
694 pr_err("rx dma is already stopped\n");
696 if (sport
->rx_callback
) {
697 sport
->rx_callback(sport
->rx_data
);
704 static irqreturn_t
tx_handler(int irq
, void *dev_id
)
706 unsigned int tx_stat
;
707 struct sport_device
*sport
= dev_id
;
708 pr_debug("%s enter\n", __func__
);
709 sport_check_status(sport
, NULL
, NULL
, &tx_stat
);
710 if (!(tx_stat
& DMA_DONE
)) {
711 pr_err("tx dma is already stopped\n");
714 if (sport
->tx_callback
) {
715 sport
->tx_callback(sport
->tx_data
);
722 static irqreturn_t
err_handler(int irq
, void *dev_id
)
724 unsigned int status
= 0;
725 struct sport_device
*sport
= dev_id
;
727 pr_debug("%s\n", __func__
);
728 if (sport_check_status(sport
, &status
, NULL
, NULL
)) {
729 pr_err("error checking status ??");
733 if (status
& (TOVF
|TUVF
|ROVF
|RUVF
)) {
734 pr_info("sport status error:%s%s%s%s\n",
735 status
& TOVF
? " TOVF" : "",
736 status
& TUVF
? " TUVF" : "",
737 status
& ROVF
? " ROVF" : "",
738 status
& RUVF
? " RUVF" : "");
739 if (status
& TOVF
|| status
& TUVF
) {
740 disable_dma(sport
->dma_tx_chan
);
742 sport_tx_dma_start(sport
, 0);
744 sport_tx_dma_start(sport
, 1);
745 enable_dma(sport
->dma_tx_chan
);
747 disable_dma(sport
->dma_rx_chan
);
749 sport_rx_dma_start(sport
, 0);
751 sport_rx_dma_start(sport
, 1);
752 enable_dma(sport
->dma_rx_chan
);
755 status
= sport
->regs
->stat
;
756 if (status
& (TOVF
|TUVF
|ROVF
|RUVF
))
757 sport
->regs
->stat
= (status
& (TOVF
|TUVF
|ROVF
|RUVF
));
760 if (sport
->err_callback
)
761 sport
->err_callback(sport
->err_data
);
766 int sport_set_rx_callback(struct sport_device
*sport
,
767 void (*rx_callback
)(void *), void *rx_data
)
769 BUG_ON(rx_callback
== NULL
);
770 sport
->rx_callback
= rx_callback
;
771 sport
->rx_data
= rx_data
;
775 EXPORT_SYMBOL(sport_set_rx_callback
);
777 int sport_set_tx_callback(struct sport_device
*sport
,
778 void (*tx_callback
)(void *), void *tx_data
)
780 BUG_ON(tx_callback
== NULL
);
781 sport
->tx_callback
= tx_callback
;
782 sport
->tx_data
= tx_data
;
786 EXPORT_SYMBOL(sport_set_tx_callback
);
788 int sport_set_err_callback(struct sport_device
*sport
,
789 void (*err_callback
)(void *), void *err_data
)
791 BUG_ON(err_callback
== NULL
);
792 sport
->err_callback
= err_callback
;
793 sport
->err_data
= err_data
;
797 EXPORT_SYMBOL(sport_set_err_callback
);
799 static int sport_config_pdev(struct platform_device
*pdev
, struct sport_param
*param
)
801 /* Extract settings from platform data */
802 struct device
*dev
= &pdev
->dev
;
803 struct bfin_snd_platform_data
*pdata
= dev
->platform_data
;
804 struct resource
*res
;
806 param
->num
= pdev
->id
;
809 dev_err(dev
, "no platform_data\n");
812 param
->pin_req
= pdata
->pin_req
;
814 res
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
816 dev_err(dev
, "no MEM resource\n");
819 param
->regs
= (struct sport_register
*)res
->start
;
821 /* first RX, then TX */
822 res
= platform_get_resource(pdev
, IORESOURCE_DMA
, 0);
824 dev_err(dev
, "no rx DMA resource\n");
827 param
->dma_rx_chan
= res
->start
;
829 res
= platform_get_resource(pdev
, IORESOURCE_DMA
, 1);
831 dev_err(dev
, "no tx DMA resource\n");
834 param
->dma_tx_chan
= res
->start
;
836 res
= platform_get_resource(pdev
, IORESOURCE_IRQ
, 0);
838 dev_err(dev
, "no irq resource\n");
841 param
->err_irq
= res
->start
;
846 struct sport_device
*sport_init(struct platform_device
*pdev
,
847 unsigned int wdsize
, unsigned int dummy_count
, size_t priv_size
)
849 struct device
*dev
= &pdev
->dev
;
850 struct sport_param param
;
851 struct sport_device
*sport
;
854 dev_dbg(dev
, "%s enter\n", __func__
);
856 param
.wdsize
= wdsize
;
857 param
.dummy_count
= dummy_count
;
858 BUG_ON(param
.wdsize
== 0 || param
.dummy_count
== 0);
860 ret
= sport_config_pdev(pdev
, ¶m
);
864 if (peripheral_request_list(param
.pin_req
, "soc-audio")) {
865 dev_err(dev
, "requesting Peripherals failed\n");
869 sport
= kzalloc(sizeof(*sport
), GFP_KERNEL
);
871 dev_err(dev
, "failed to allocate for sport device\n");
875 sport
->num
= param
.num
;
876 sport
->dma_rx_chan
= param
.dma_rx_chan
;
877 sport
->dma_tx_chan
= param
.dma_tx_chan
;
878 sport
->err_irq
= param
.err_irq
;
879 sport
->regs
= param
.regs
;
880 sport
->pin_req
= param
.pin_req
;
882 if (request_dma(sport
->dma_rx_chan
, "SPORT RX Data") == -EBUSY
) {
883 dev_err(dev
, "failed to request RX dma %d\n", sport
->dma_rx_chan
);
886 if (set_dma_callback(sport
->dma_rx_chan
, rx_handler
, sport
) != 0) {
887 dev_err(dev
, "failed to request RX irq %d\n", sport
->dma_rx_chan
);
891 if (request_dma(sport
->dma_tx_chan
, "SPORT TX Data") == -EBUSY
) {
892 dev_err(dev
, "failed to request TX dma %d\n", sport
->dma_tx_chan
);
896 if (set_dma_callback(sport
->dma_tx_chan
, tx_handler
, sport
) != 0) {
897 dev_err(dev
, "failed to request TX irq %d\n", sport
->dma_tx_chan
);
901 if (request_irq(sport
->err_irq
, err_handler
, IRQF_SHARED
, "SPORT err",
903 dev_err(dev
, "failed to request err irq %d\n", sport
->err_irq
);
907 dev_info(dev
, "dma rx:%d tx:%d, err irq:%d, regs:%p\n",
908 sport
->dma_rx_chan
, sport
->dma_tx_chan
,
909 sport
->err_irq
, sport
->regs
);
911 sport
->wdsize
= param
.wdsize
;
912 sport
->dummy_count
= param
.dummy_count
;
914 sport
->private_data
= kzalloc(priv_size
, GFP_KERNEL
);
915 if (!sport
->private_data
) {
916 dev_err(dev
, "could not alloc priv data %zu bytes\n", priv_size
);
920 if (L1_DATA_A_LENGTH
)
921 sport
->dummy_buf
= l1_data_sram_zalloc(param
.dummy_count
* 2);
923 sport
->dummy_buf
= kzalloc(param
.dummy_count
* 2, GFP_KERNEL
);
924 if (sport
->dummy_buf
== NULL
) {
925 dev_err(dev
, "failed to allocate dummy buffer\n");
929 ret
= sport_config_rx_dummy(sport
);
931 dev_err(dev
, "failed to config rx dummy ring\n");
934 ret
= sport_config_tx_dummy(sport
);
936 dev_err(dev
, "failed to config tx dummy ring\n");
940 platform_set_drvdata(pdev
, sport
);
944 if (L1_DATA_A_LENGTH
)
945 l1_data_sram_free(sport
->dummy_rx_desc
);
947 dma_free_coherent(NULL
, 2*sizeof(struct dmasg
),
948 sport
->dummy_rx_desc
, 0);
950 if (L1_DATA_A_LENGTH
)
951 l1_data_sram_free(sport
->dummy_buf
);
953 kfree(sport
->dummy_buf
);
955 kfree(sport
->private_data
);
957 free_irq(sport
->err_irq
, sport
);
959 free_dma(sport
->dma_tx_chan
);
961 free_dma(sport
->dma_rx_chan
);
965 peripheral_free_list(param
.pin_req
);
968 EXPORT_SYMBOL(sport_init
);
970 void sport_done(struct sport_device
*sport
)
976 if (sport
->dma_rx_desc
)
977 dma_free_coherent(NULL
, sport
->rx_desc_bytes
,
978 sport
->dma_rx_desc
, 0);
979 if (sport
->dma_tx_desc
)
980 dma_free_coherent(NULL
, sport
->tx_desc_bytes
,
981 sport
->dma_tx_desc
, 0);
983 #if L1_DATA_A_LENGTH != 0
984 l1_data_sram_free(sport
->dummy_rx_desc
);
985 l1_data_sram_free(sport
->dummy_tx_desc
);
986 l1_data_sram_free(sport
->dummy_buf
);
988 dma_free_coherent(NULL
, 2*sizeof(struct dmasg
),
989 sport
->dummy_rx_desc
, 0);
990 dma_free_coherent(NULL
, 2*sizeof(struct dmasg
),
991 sport
->dummy_tx_desc
, 0);
992 kfree(sport
->dummy_buf
);
994 free_dma(sport
->dma_rx_chan
);
995 free_dma(sport
->dma_tx_chan
);
996 free_irq(sport
->err_irq
, sport
);
998 kfree(sport
->private_data
);
999 peripheral_free_list(sport
->pin_req
);
1002 EXPORT_SYMBOL(sport_done
);
1005 * It is only used to send several bytes when dma is not enabled
1006 * sport controller is configured but not enabled.
1007 * Multichannel cannot works with pio mode */
1008 /* Used by ac97 to write and read codec register */
1009 int sport_send_and_recv(struct sport_device
*sport
, u8
*out_data
, \
1010 u8
*in_data
, int len
)
1012 unsigned short dma_config
;
1013 unsigned short status
;
1014 unsigned long flags
;
1015 unsigned long wait
= 0;
1017 pr_debug("%s enter, out_data:%p, in_data:%p len:%d\n", \
1018 __func__
, out_data
, in_data
, len
);
1019 pr_debug("tcr1:0x%04x, tcr2:0x%04x, tclkdiv:0x%04x, tfsdiv:0x%04x\n"
1020 "mcmc1:0x%04x, mcmc2:0x%04x\n",
1021 sport
->regs
->tcr1
, sport
->regs
->tcr2
,
1022 sport
->regs
->tclkdiv
, sport
->regs
->tfsdiv
,
1023 sport
->regs
->mcmc1
, sport
->regs
->mcmc2
);
1024 flush_dcache_range((unsigned)out_data
, (unsigned)(out_data
+ len
));
1027 dma_config
= (RESTART
| WDSIZE_16
| DI_EN
);
1028 set_dma_start_addr(sport
->dma_tx_chan
, (unsigned long)out_data
);
1029 set_dma_x_count(sport
->dma_tx_chan
, len
/2);
1030 set_dma_x_modify(sport
->dma_tx_chan
, 2);
1031 set_dma_config(sport
->dma_tx_chan
, dma_config
);
1032 enable_dma(sport
->dma_tx_chan
);
1034 if (in_data
!= NULL
) {
1035 invalidate_dcache_range((unsigned)in_data
, \
1036 (unsigned)(in_data
+ len
));
1038 dma_config
= (RESTART
| WDSIZE_16
| WNR
| DI_EN
);
1039 set_dma_start_addr(sport
->dma_rx_chan
, (unsigned long)in_data
);
1040 set_dma_x_count(sport
->dma_rx_chan
, len
/2);
1041 set_dma_x_modify(sport
->dma_rx_chan
, 2);
1042 set_dma_config(sport
->dma_rx_chan
, dma_config
);
1043 enable_dma(sport
->dma_rx_chan
);
1046 local_irq_save(flags
);
1047 sport
->regs
->tcr1
|= TSPEN
;
1048 sport
->regs
->rcr1
|= RSPEN
;
1051 status
= get_dma_curr_irqstat(sport
->dma_tx_chan
);
1052 while (status
& DMA_RUN
) {
1054 status
= get_dma_curr_irqstat(sport
->dma_tx_chan
);
1055 pr_debug("DMA status:0x%04x\n", status
);
1059 status
= sport
->regs
->stat
;
1062 while (!(status
& TXHRE
)) {
1063 pr_debug("sport status:0x%04x\n", status
);
1065 status
= *(unsigned short *)&sport
->regs
->stat
;
1069 /* Wait for the last byte sent out */
1071 pr_debug("sport status:0x%04x\n", status
);
1074 sport
->regs
->tcr1
&= ~TSPEN
;
1075 sport
->regs
->rcr1
&= ~RSPEN
;
1077 disable_dma(sport
->dma_tx_chan
);
1078 /* Clear the status */
1079 clear_dma_irqstat(sport
->dma_tx_chan
);
1080 if (in_data
!= NULL
) {
1081 disable_dma(sport
->dma_rx_chan
);
1082 clear_dma_irqstat(sport
->dma_rx_chan
);
1085 local_irq_restore(flags
);
1089 EXPORT_SYMBOL(sport_send_and_recv
);
1091 MODULE_AUTHOR("Roy Huang");
1092 MODULE_DESCRIPTION("SPORT driver for ADI Blackfin");
1093 MODULE_LICENSE("GPL");