4 * Author: Roy Huang <roy.huang@analog.com>
6 * Created: Tue Sep 21 10:52:42 CEST 2004
8 * Blackfin SPORT Driver
10 * Copyright 2004-2007 Analog Devices Inc.
12 * Bugs: Enter bugs at http://blackfin.uclinux.org/
14 * This program is free software; you can redistribute it and/or modify
15 * it under the terms of the GNU General Public License as published by
16 * the Free Software Foundation; either version 2 of the License, or
17 * (at your option) any later version.
19 * This program is distributed in the hope that it will be useful,
20 * but WITHOUT ANY WARRANTY; without even the implied warranty of
21 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22 * GNU General Public License for more details.
24 * You should have received a copy of the GNU General Public License
25 * along with this program; if not, see the file COPYING, or write
26 * to the Free Software Foundation, Inc.,
27 * 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
30 #include <linux/kernel.h>
31 #include <linux/slab.h>
32 #include <linux/delay.h>
33 #include <linux/dma-mapping.h>
34 #include <linux/gpio.h>
35 #include <linux/bug.h>
36 #include <linux/module.h>
37 #include <asm/portmux.h>
39 #include <asm/blackfin.h>
40 #include <asm/cacheflush.h>
42 #include "bf5xx-sport.h"
43 /* delay between frame sync pulse and first data bit in multichannel mode */
44 #define FRAME_DELAY (1<<12)
46 /* note: multichannel is in units of 8 channels,
47 * tdm_count is # channels NOT / 8 ! */
48 int sport_set_multichannel(struct sport_device
*sport
,
49 int tdm_count
, u32 tx_mask
, u32 rx_mask
, int packed
)
51 pr_debug("%s tdm_count=%d tx_mask:0x%08x rx_mask:0x%08x packed=%d\n",
52 __func__
, tdm_count
, tx_mask
, rx_mask
, packed
);
54 if ((sport
->regs
->tcr1
& TSPEN
) || (sport
->regs
->rcr1
& RSPEN
))
61 return -EINVAL
; /* Only support less than 32 channels now */
64 sport
->regs
->mcmc1
= ((tdm_count
>>3)-1) << 12;
65 sport
->regs
->mcmc2
= FRAME_DELAY
| MCMEN
| \
66 (packed
? (MCDTXPE
|MCDRXPE
) : 0);
68 sport
->regs
->mtcs0
= tx_mask
;
69 sport
->regs
->mrcs0
= rx_mask
;
70 sport
->regs
->mtcs1
= 0;
71 sport
->regs
->mrcs1
= 0;
72 sport
->regs
->mtcs2
= 0;
73 sport
->regs
->mrcs2
= 0;
74 sport
->regs
->mtcs3
= 0;
75 sport
->regs
->mrcs3
= 0;
77 sport
->regs
->mcmc1
= 0;
78 sport
->regs
->mcmc2
= 0;
80 sport
->regs
->mtcs0
= 0;
81 sport
->regs
->mrcs0
= 0;
84 sport
->regs
->mtcs1
= 0; sport
->regs
->mtcs2
= 0; sport
->regs
->mtcs3
= 0;
85 sport
->regs
->mrcs1
= 0; sport
->regs
->mrcs2
= 0; sport
->regs
->mrcs3
= 0;
91 EXPORT_SYMBOL(sport_set_multichannel
);
93 int sport_config_rx(struct sport_device
*sport
, unsigned int rcr1
,
94 unsigned int rcr2
, unsigned int clkdiv
, unsigned int fsdiv
)
96 if ((sport
->regs
->tcr1
& TSPEN
) || (sport
->regs
->rcr1
& RSPEN
))
99 sport
->regs
->rcr1
= rcr1
;
100 sport
->regs
->rcr2
= rcr2
;
101 sport
->regs
->rclkdiv
= clkdiv
;
102 sport
->regs
->rfsdiv
= fsdiv
;
108 EXPORT_SYMBOL(sport_config_rx
);
110 int sport_config_tx(struct sport_device
*sport
, unsigned int tcr1
,
111 unsigned int tcr2
, unsigned int clkdiv
, unsigned int fsdiv
)
113 if ((sport
->regs
->tcr1
& TSPEN
) || (sport
->regs
->rcr1
& RSPEN
))
116 sport
->regs
->tcr1
= tcr1
;
117 sport
->regs
->tcr2
= tcr2
;
118 sport
->regs
->tclkdiv
= clkdiv
;
119 sport
->regs
->tfsdiv
= fsdiv
;
125 EXPORT_SYMBOL(sport_config_tx
);
127 static void setup_desc(struct dmasg
*desc
, void *buf
, int fragcount
,
128 size_t fragsize
, unsigned int cfg
,
129 unsigned int x_count
, unsigned int ycount
, size_t wdsize
)
134 for (i
= 0; i
< fragcount
; ++i
) {
135 desc
[i
].next_desc_addr
= &(desc
[i
+ 1]);
136 desc
[i
].start_addr
= (unsigned long)buf
+ i
*fragsize
;
138 desc
[i
].x_count
= x_count
;
139 desc
[i
].x_modify
= wdsize
;
140 desc
[i
].y_count
= ycount
;
141 desc
[i
].y_modify
= wdsize
;
145 desc
[fragcount
-1].next_desc_addr
= desc
;
147 pr_debug("setup desc: desc0=%p, next0=%p, desc1=%p,"
148 "next1=%p\nx_count=%x,y_count=%x,addr=0x%lx,cfs=0x%x\n",
149 desc
, desc
[0].next_desc_addr
,
150 desc
+1, desc
[1].next_desc_addr
,
151 desc
[0].x_count
, desc
[0].y_count
,
152 desc
[0].start_addr
, desc
[0].cfg
);
155 static int sport_start(struct sport_device
*sport
)
157 enable_dma(sport
->dma_rx_chan
);
158 enable_dma(sport
->dma_tx_chan
);
159 sport
->regs
->rcr1
|= RSPEN
;
160 sport
->regs
->tcr1
|= TSPEN
;
166 static int sport_stop(struct sport_device
*sport
)
168 sport
->regs
->tcr1
&= ~TSPEN
;
169 sport
->regs
->rcr1
&= ~RSPEN
;
172 disable_dma(sport
->dma_rx_chan
);
173 disable_dma(sport
->dma_tx_chan
);
177 static inline int sport_hook_rx_dummy(struct sport_device
*sport
)
179 struct dmasg
*desc
, temp_desc
;
182 if (WARN_ON(!sport
->dummy_rx_desc
) ||
183 WARN_ON(sport
->curr_rx_desc
== sport
->dummy_rx_desc
))
186 /* Maybe the dummy buffer descriptor ring is damaged */
187 sport
->dummy_rx_desc
->next_desc_addr
= sport
->dummy_rx_desc
+ 1;
189 local_irq_save(flags
);
190 desc
= get_dma_next_desc_ptr(sport
->dma_rx_chan
);
191 /* Copy the descriptor which will be damaged to backup */
193 desc
->x_count
= sport
->dummy_count
/ 2;
195 desc
->next_desc_addr
= sport
->dummy_rx_desc
;
196 local_irq_restore(flags
);
197 /* Waiting for dummy buffer descriptor is already hooked*/
198 while ((get_dma_curr_desc_ptr(sport
->dma_rx_chan
) -
199 sizeof(struct dmasg
)) != sport
->dummy_rx_desc
)
201 sport
->curr_rx_desc
= sport
->dummy_rx_desc
;
202 /* Restore the damaged descriptor */
208 static inline int sport_rx_dma_start(struct sport_device
*sport
, int dummy
)
211 sport
->dummy_rx_desc
->next_desc_addr
= sport
->dummy_rx_desc
;
212 sport
->curr_rx_desc
= sport
->dummy_rx_desc
;
214 sport
->curr_rx_desc
= sport
->dma_rx_desc
;
216 set_dma_next_desc_addr(sport
->dma_rx_chan
, sport
->curr_rx_desc
);
217 set_dma_x_count(sport
->dma_rx_chan
, 0);
218 set_dma_x_modify(sport
->dma_rx_chan
, 0);
219 set_dma_config(sport
->dma_rx_chan
, (DMAFLOW_LARGE
| NDSIZE_9
| \
221 set_dma_curr_addr(sport
->dma_rx_chan
, sport
->curr_rx_desc
->start_addr
);
227 static inline int sport_tx_dma_start(struct sport_device
*sport
, int dummy
)
230 sport
->dummy_tx_desc
->next_desc_addr
= sport
->dummy_tx_desc
;
231 sport
->curr_tx_desc
= sport
->dummy_tx_desc
;
233 sport
->curr_tx_desc
= sport
->dma_tx_desc
;
235 set_dma_next_desc_addr(sport
->dma_tx_chan
, sport
->curr_tx_desc
);
236 set_dma_x_count(sport
->dma_tx_chan
, 0);
237 set_dma_x_modify(sport
->dma_tx_chan
, 0);
238 set_dma_config(sport
->dma_tx_chan
,
239 (DMAFLOW_LARGE
| NDSIZE_9
| WDSIZE_32
));
240 set_dma_curr_addr(sport
->dma_tx_chan
, sport
->curr_tx_desc
->start_addr
);
246 int sport_rx_start(struct sport_device
*sport
)
249 pr_debug("%s enter\n", __func__
);
253 /* tx is running, rx is not running */
254 if (WARN_ON(!sport
->dma_rx_desc
) ||
255 WARN_ON(sport
->curr_rx_desc
!= sport
->dummy_rx_desc
))
257 local_irq_save(flags
);
258 while ((get_dma_curr_desc_ptr(sport
->dma_rx_chan
) -
259 sizeof(struct dmasg
)) != sport
->dummy_rx_desc
)
261 sport
->dummy_rx_desc
->next_desc_addr
= sport
->dma_rx_desc
;
262 local_irq_restore(flags
);
263 sport
->curr_rx_desc
= sport
->dma_rx_desc
;
265 sport_tx_dma_start(sport
, 1);
266 sport_rx_dma_start(sport
, 0);
274 EXPORT_SYMBOL(sport_rx_start
);
276 int sport_rx_stop(struct sport_device
*sport
)
278 pr_debug("%s enter\n", __func__
);
283 /* TX dma is still running, hook the dummy buffer */
284 sport_hook_rx_dummy(sport
);
286 /* Both rx and tx dma will be stopped */
288 sport
->curr_rx_desc
= NULL
;
289 sport
->curr_tx_desc
= NULL
;
296 EXPORT_SYMBOL(sport_rx_stop
);
298 static inline int sport_hook_tx_dummy(struct sport_device
*sport
)
300 struct dmasg
*desc
, temp_desc
;
303 if (WARN_ON(!sport
->dummy_tx_desc
) ||
304 WARN_ON(sport
->curr_tx_desc
== sport
->dummy_tx_desc
))
307 sport
->dummy_tx_desc
->next_desc_addr
= sport
->dummy_tx_desc
+ 1;
309 /* Shorten the time on last normal descriptor */
310 local_irq_save(flags
);
311 desc
= get_dma_next_desc_ptr(sport
->dma_tx_chan
);
312 /* Store the descriptor which will be damaged */
314 desc
->x_count
= sport
->dummy_count
/ 2;
316 desc
->next_desc_addr
= sport
->dummy_tx_desc
;
317 local_irq_restore(flags
);
318 /* Waiting for dummy buffer descriptor is already hooked*/
319 while ((get_dma_curr_desc_ptr(sport
->dma_tx_chan
) - \
320 sizeof(struct dmasg
)) != sport
->dummy_tx_desc
)
322 sport
->curr_tx_desc
= sport
->dummy_tx_desc
;
323 /* Restore the damaged descriptor */
329 int sport_tx_start(struct sport_device
*sport
)
332 pr_debug("%s: tx_run:%d, rx_run:%d\n", __func__
,
333 sport
->tx_run
, sport
->rx_run
);
337 if (WARN_ON(!sport
->dma_tx_desc
) ||
338 WARN_ON(sport
->curr_tx_desc
!= sport
->dummy_tx_desc
))
340 /* Hook the normal buffer descriptor */
341 local_irq_save(flags
);
342 while ((get_dma_curr_desc_ptr(sport
->dma_tx_chan
) -
343 sizeof(struct dmasg
)) != sport
->dummy_tx_desc
)
345 sport
->dummy_tx_desc
->next_desc_addr
= sport
->dma_tx_desc
;
346 local_irq_restore(flags
);
347 sport
->curr_tx_desc
= sport
->dma_tx_desc
;
350 sport_tx_dma_start(sport
, 0);
351 /* Let rx dma run the dummy buffer */
352 sport_rx_dma_start(sport
, 1);
358 EXPORT_SYMBOL(sport_tx_start
);
360 int sport_tx_stop(struct sport_device
*sport
)
365 /* RX is still running, hook the dummy buffer */
366 sport_hook_tx_dummy(sport
);
368 /* Both rx and tx dma stopped */
370 sport
->curr_rx_desc
= NULL
;
371 sport
->curr_tx_desc
= NULL
;
378 EXPORT_SYMBOL(sport_tx_stop
);
380 static inline int compute_wdsize(size_t wdsize
)
393 int sport_config_rx_dma(struct sport_device
*sport
, void *buf
,
394 int fragcount
, size_t fragsize
)
396 unsigned int x_count
;
397 unsigned int y_count
;
401 pr_debug("%s buf:%p, frag:%d, fragsize:0x%lx\n", __func__
, \
402 buf
, fragcount
, fragsize
);
404 x_count
= fragsize
/ sport
->wdsize
;
407 /* for fragments larger than 64k words we use 2d dma,
408 * denote fragecount as two numbers' mutliply and both of them
409 * are less than 64k.*/
410 if (x_count
>= 0x10000) {
411 int i
, count
= x_count
;
413 for (i
= 16; i
> 0; i
--) {
415 if ((count
& (x_count
- 1)) == 0) {
416 y_count
= count
>> i
;
417 if (y_count
< 0x10000)
424 pr_debug("%s(x_count:0x%x, y_count:0x%x)\n", __func__
,
427 if (sport
->dma_rx_desc
)
428 dma_free_coherent(NULL
, sport
->rx_desc_bytes
,
429 sport
->dma_rx_desc
, 0);
431 /* Allocate a new descritor ring as current one. */
432 sport
->dma_rx_desc
= dma_alloc_coherent(NULL
, \
433 fragcount
* sizeof(struct dmasg
), &addr
, 0);
434 sport
->rx_desc_bytes
= fragcount
* sizeof(struct dmasg
);
436 if (!sport
->dma_rx_desc
) {
437 pr_err("Failed to allocate memory for rx desc\n");
442 sport
->rx_fragsize
= fragsize
;
443 sport
->rx_frags
= fragcount
;
445 cfg
= 0x7000 | DI_EN
| compute_wdsize(sport
->wdsize
) | WNR
| \
446 (DESC_ELEMENT_COUNT
<< 8); /* large descriptor mode */
451 setup_desc(sport
->dma_rx_desc
, buf
, fragcount
, fragsize
,
452 cfg
|DMAEN
, x_count
, y_count
, sport
->wdsize
);
456 EXPORT_SYMBOL(sport_config_rx_dma
);
458 int sport_config_tx_dma(struct sport_device
*sport
, void *buf
, \
459 int fragcount
, size_t fragsize
)
461 unsigned int x_count
;
462 unsigned int y_count
;
466 pr_debug("%s buf:%p, fragcount:%d, fragsize:0x%lx\n",
467 __func__
, buf
, fragcount
, fragsize
);
469 x_count
= fragsize
/sport
->wdsize
;
472 /* for fragments larger than 64k words we use 2d dma,
473 * denote fragecount as two numbers' mutliply and both of them
474 * are less than 64k.*/
475 if (x_count
>= 0x10000) {
476 int i
, count
= x_count
;
478 for (i
= 16; i
> 0; i
--) {
480 if ((count
& (x_count
- 1)) == 0) {
481 y_count
= count
>> i
;
482 if (y_count
< 0x10000)
489 pr_debug("%s x_count:0x%x, y_count:0x%x\n", __func__
,
493 if (sport
->dma_tx_desc
) {
494 dma_free_coherent(NULL
, sport
->tx_desc_bytes
, \
495 sport
->dma_tx_desc
, 0);
498 sport
->dma_tx_desc
= dma_alloc_coherent(NULL
, \
499 fragcount
* sizeof(struct dmasg
), &addr
, 0);
500 sport
->tx_desc_bytes
= fragcount
* sizeof(struct dmasg
);
501 if (!sport
->dma_tx_desc
) {
502 pr_err("Failed to allocate memory for tx desc\n");
507 sport
->tx_fragsize
= fragsize
;
508 sport
->tx_frags
= fragcount
;
509 cfg
= 0x7000 | DI_EN
| compute_wdsize(sport
->wdsize
) | \
510 (DESC_ELEMENT_COUNT
<< 8); /* large descriptor mode */
515 setup_desc(sport
->dma_tx_desc
, buf
, fragcount
, fragsize
,
516 cfg
|DMAEN
, x_count
, y_count
, sport
->wdsize
);
520 EXPORT_SYMBOL(sport_config_tx_dma
);
522 /* setup dummy dma descriptor ring, which don't generate interrupts,
523 * the x_modify is set to 0 */
524 static int sport_config_rx_dummy(struct sport_device
*sport
)
529 pr_debug("%s entered\n", __func__
);
530 if (L1_DATA_A_LENGTH
)
531 desc
= l1_data_sram_zalloc(2 * sizeof(*desc
));
534 desc
= dma_alloc_coherent(NULL
, 2 * sizeof(*desc
), &addr
, 0);
535 memset(desc
, 0, 2 * sizeof(*desc
));
538 pr_err("Failed to allocate memory for dummy rx desc\n");
541 sport
->dummy_rx_desc
= desc
;
542 desc
->start_addr
= (unsigned long)sport
->dummy_buf
;
543 config
= DMAFLOW_LARGE
| NDSIZE_9
| compute_wdsize(sport
->wdsize
)
546 desc
->x_count
= sport
->dummy_count
/sport
->wdsize
;
547 desc
->x_modify
= sport
->wdsize
;
550 memcpy(desc
+1, desc
, sizeof(*desc
));
551 desc
->next_desc_addr
= desc
+ 1;
552 desc
[1].next_desc_addr
= desc
;
556 static int sport_config_tx_dummy(struct sport_device
*sport
)
561 pr_debug("%s entered\n", __func__
);
563 if (L1_DATA_A_LENGTH
)
564 desc
= l1_data_sram_zalloc(2 * sizeof(*desc
));
567 desc
= dma_alloc_coherent(NULL
, 2 * sizeof(*desc
), &addr
, 0);
568 memset(desc
, 0, 2 * sizeof(*desc
));
571 pr_err("Failed to allocate memory for dummy tx desc\n");
574 sport
->dummy_tx_desc
= desc
;
575 desc
->start_addr
= (unsigned long)sport
->dummy_buf
+ \
577 config
= DMAFLOW_LARGE
| NDSIZE_9
|
578 compute_wdsize(sport
->wdsize
) | DMAEN
;
580 desc
->x_count
= sport
->dummy_count
/sport
->wdsize
;
581 desc
->x_modify
= sport
->wdsize
;
584 memcpy(desc
+1, desc
, sizeof(*desc
));
585 desc
->next_desc_addr
= desc
+ 1;
586 desc
[1].next_desc_addr
= desc
;
590 unsigned long sport_curr_offset_rx(struct sport_device
*sport
)
592 unsigned long curr
= get_dma_curr_addr(sport
->dma_rx_chan
);
594 return (unsigned char *)curr
- sport
->rx_buf
;
596 EXPORT_SYMBOL(sport_curr_offset_rx
);
598 unsigned long sport_curr_offset_tx(struct sport_device
*sport
)
600 unsigned long curr
= get_dma_curr_addr(sport
->dma_tx_chan
);
602 return (unsigned char *)curr
- sport
->tx_buf
;
604 EXPORT_SYMBOL(sport_curr_offset_tx
);
606 void sport_incfrag(struct sport_device
*sport
, int *frag
, int tx
)
609 if (tx
== 1 && *frag
== sport
->tx_frags
)
612 if (tx
== 0 && *frag
== sport
->rx_frags
)
615 EXPORT_SYMBOL(sport_incfrag
);
617 void sport_decfrag(struct sport_device
*sport
, int *frag
, int tx
)
620 if (tx
== 1 && *frag
== 0)
621 *frag
= sport
->tx_frags
;
623 if (tx
== 0 && *frag
== 0)
624 *frag
= sport
->rx_frags
;
626 EXPORT_SYMBOL(sport_decfrag
);
628 static int sport_check_status(struct sport_device
*sport
,
629 unsigned int *sport_stat
,
630 unsigned int *rx_stat
,
631 unsigned int *tx_stat
)
637 status
= sport
->regs
->stat
;
638 if (status
& (TOVF
|TUVF
|ROVF
|RUVF
))
639 sport
->regs
->stat
= (status
& (TOVF
|TUVF
|ROVF
|RUVF
));
641 *sport_stat
= status
;
646 status
= get_dma_curr_irqstat(sport
->dma_rx_chan
);
647 if (status
& (DMA_DONE
|DMA_ERR
))
648 clear_dma_irqstat(sport
->dma_rx_chan
);
655 status
= get_dma_curr_irqstat(sport
->dma_tx_chan
);
656 if (status
& (DMA_DONE
|DMA_ERR
))
657 clear_dma_irqstat(sport
->dma_tx_chan
);
665 int sport_dump_stat(struct sport_device
*sport
, char *buf
, size_t len
)
669 ret
= snprintf(buf
, len
,
671 "rx dma %d sts: 0x%04x tx dma %d sts: 0x%04x\n",
674 get_dma_curr_irqstat(sport
->dma_rx_chan
),
676 get_dma_curr_irqstat(sport
->dma_tx_chan
));
680 ret
+= snprintf(buf
, len
,
681 "curr_rx_desc:0x%p, curr_tx_desc:0x%p\n"
682 "dma_rx_desc:0x%p, dma_tx_desc:0x%p\n"
683 "dummy_rx_desc:0x%p, dummy_tx_desc:0x%p\n",
684 sport
->curr_rx_desc
, sport
->curr_tx_desc
,
685 sport
->dma_rx_desc
, sport
->dma_tx_desc
,
686 sport
->dummy_rx_desc
, sport
->dummy_tx_desc
);
691 static irqreturn_t
rx_handler(int irq
, void *dev_id
)
693 unsigned int rx_stat
;
694 struct sport_device
*sport
= dev_id
;
696 pr_debug("%s enter\n", __func__
);
697 sport_check_status(sport
, NULL
, &rx_stat
, NULL
);
698 if (!(rx_stat
& DMA_DONE
))
699 pr_err("rx dma is already stopped\n");
701 if (sport
->rx_callback
) {
702 sport
->rx_callback(sport
->rx_data
);
709 static irqreturn_t
tx_handler(int irq
, void *dev_id
)
711 unsigned int tx_stat
;
712 struct sport_device
*sport
= dev_id
;
713 pr_debug("%s enter\n", __func__
);
714 sport_check_status(sport
, NULL
, NULL
, &tx_stat
);
715 if (!(tx_stat
& DMA_DONE
)) {
716 pr_err("tx dma is already stopped\n");
719 if (sport
->tx_callback
) {
720 sport
->tx_callback(sport
->tx_data
);
727 static irqreturn_t
err_handler(int irq
, void *dev_id
)
729 unsigned int status
= 0;
730 struct sport_device
*sport
= dev_id
;
732 pr_debug("%s\n", __func__
);
733 if (sport_check_status(sport
, &status
, NULL
, NULL
)) {
734 pr_err("error checking status ??");
738 if (status
& (TOVF
|TUVF
|ROVF
|RUVF
)) {
739 pr_info("sport status error:%s%s%s%s\n",
740 status
& TOVF
? " TOVF" : "",
741 status
& TUVF
? " TUVF" : "",
742 status
& ROVF
? " ROVF" : "",
743 status
& RUVF
? " RUVF" : "");
744 if (status
& TOVF
|| status
& TUVF
) {
745 disable_dma(sport
->dma_tx_chan
);
747 sport_tx_dma_start(sport
, 0);
749 sport_tx_dma_start(sport
, 1);
750 enable_dma(sport
->dma_tx_chan
);
752 disable_dma(sport
->dma_rx_chan
);
754 sport_rx_dma_start(sport
, 0);
756 sport_rx_dma_start(sport
, 1);
757 enable_dma(sport
->dma_rx_chan
);
760 status
= sport
->regs
->stat
;
761 if (status
& (TOVF
|TUVF
|ROVF
|RUVF
))
762 sport
->regs
->stat
= (status
& (TOVF
|TUVF
|ROVF
|RUVF
));
765 if (sport
->err_callback
)
766 sport
->err_callback(sport
->err_data
);
771 int sport_set_rx_callback(struct sport_device
*sport
,
772 void (*rx_callback
)(void *), void *rx_data
)
774 if (WARN_ON(!rx_callback
))
776 sport
->rx_callback
= rx_callback
;
777 sport
->rx_data
= rx_data
;
781 EXPORT_SYMBOL(sport_set_rx_callback
);
783 int sport_set_tx_callback(struct sport_device
*sport
,
784 void (*tx_callback
)(void *), void *tx_data
)
786 if (WARN_ON(!tx_callback
))
788 sport
->tx_callback
= tx_callback
;
789 sport
->tx_data
= tx_data
;
793 EXPORT_SYMBOL(sport_set_tx_callback
);
795 int sport_set_err_callback(struct sport_device
*sport
,
796 void (*err_callback
)(void *), void *err_data
)
798 if (WARN_ON(!err_callback
))
800 sport
->err_callback
= err_callback
;
801 sport
->err_data
= err_data
;
805 EXPORT_SYMBOL(sport_set_err_callback
);
807 static int sport_config_pdev(struct platform_device
*pdev
, struct sport_param
*param
)
809 /* Extract settings from platform data */
810 struct device
*dev
= &pdev
->dev
;
811 struct bfin_snd_platform_data
*pdata
= dev
->platform_data
;
812 struct resource
*res
;
814 param
->num
= pdev
->id
;
817 dev_err(dev
, "no platform_data\n");
820 param
->pin_req
= pdata
->pin_req
;
822 res
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
824 dev_err(dev
, "no MEM resource\n");
827 param
->regs
= (struct sport_register
*)res
->start
;
829 /* first RX, then TX */
830 res
= platform_get_resource(pdev
, IORESOURCE_DMA
, 0);
832 dev_err(dev
, "no rx DMA resource\n");
835 param
->dma_rx_chan
= res
->start
;
837 res
= platform_get_resource(pdev
, IORESOURCE_DMA
, 1);
839 dev_err(dev
, "no tx DMA resource\n");
842 param
->dma_tx_chan
= res
->start
;
844 res
= platform_get_resource(pdev
, IORESOURCE_IRQ
, 0);
846 dev_err(dev
, "no irq resource\n");
849 param
->err_irq
= res
->start
;
854 struct sport_device
*sport_init(struct platform_device
*pdev
,
855 unsigned int wdsize
, unsigned int dummy_count
, size_t priv_size
)
857 struct device
*dev
= &pdev
->dev
;
858 struct sport_param param
;
859 struct sport_device
*sport
;
862 dev_dbg(dev
, "%s enter\n", __func__
);
864 param
.wdsize
= wdsize
;
865 param
.dummy_count
= dummy_count
;
866 if (WARN_ON(param
.wdsize
== 0 || param
.dummy_count
== 0))
869 ret
= sport_config_pdev(pdev
, ¶m
);
873 if (peripheral_request_list(param
.pin_req
, "soc-audio")) {
874 dev_err(dev
, "requesting Peripherals failed\n");
878 sport
= kzalloc(sizeof(*sport
), GFP_KERNEL
);
880 dev_err(dev
, "failed to allocate for sport device\n");
884 sport
->num
= param
.num
;
885 sport
->dma_rx_chan
= param
.dma_rx_chan
;
886 sport
->dma_tx_chan
= param
.dma_tx_chan
;
887 sport
->err_irq
= param
.err_irq
;
888 sport
->regs
= param
.regs
;
889 sport
->pin_req
= param
.pin_req
;
891 if (request_dma(sport
->dma_rx_chan
, "SPORT RX Data") == -EBUSY
) {
892 dev_err(dev
, "failed to request RX dma %d\n", sport
->dma_rx_chan
);
895 if (set_dma_callback(sport
->dma_rx_chan
, rx_handler
, sport
) != 0) {
896 dev_err(dev
, "failed to request RX irq %d\n", sport
->dma_rx_chan
);
900 if (request_dma(sport
->dma_tx_chan
, "SPORT TX Data") == -EBUSY
) {
901 dev_err(dev
, "failed to request TX dma %d\n", sport
->dma_tx_chan
);
905 if (set_dma_callback(sport
->dma_tx_chan
, tx_handler
, sport
) != 0) {
906 dev_err(dev
, "failed to request TX irq %d\n", sport
->dma_tx_chan
);
910 if (request_irq(sport
->err_irq
, err_handler
, IRQF_SHARED
, "SPORT err",
912 dev_err(dev
, "failed to request err irq %d\n", sport
->err_irq
);
916 dev_info(dev
, "dma rx:%d tx:%d, err irq:%d, regs:%p\n",
917 sport
->dma_rx_chan
, sport
->dma_tx_chan
,
918 sport
->err_irq
, sport
->regs
);
920 sport
->wdsize
= param
.wdsize
;
921 sport
->dummy_count
= param
.dummy_count
;
923 sport
->private_data
= kzalloc(priv_size
, GFP_KERNEL
);
924 if (!sport
->private_data
) {
925 dev_err(dev
, "could not alloc priv data %zu bytes\n", priv_size
);
929 if (L1_DATA_A_LENGTH
)
930 sport
->dummy_buf
= l1_data_sram_zalloc(param
.dummy_count
* 2);
932 sport
->dummy_buf
= kzalloc(param
.dummy_count
* 2, GFP_KERNEL
);
933 if (sport
->dummy_buf
== NULL
) {
934 dev_err(dev
, "failed to allocate dummy buffer\n");
938 ret
= sport_config_rx_dummy(sport
);
940 dev_err(dev
, "failed to config rx dummy ring\n");
943 ret
= sport_config_tx_dummy(sport
);
945 dev_err(dev
, "failed to config tx dummy ring\n");
949 platform_set_drvdata(pdev
, sport
);
953 if (L1_DATA_A_LENGTH
)
954 l1_data_sram_free(sport
->dummy_rx_desc
);
956 dma_free_coherent(NULL
, 2*sizeof(struct dmasg
),
957 sport
->dummy_rx_desc
, 0);
959 if (L1_DATA_A_LENGTH
)
960 l1_data_sram_free(sport
->dummy_buf
);
962 kfree(sport
->dummy_buf
);
964 kfree(sport
->private_data
);
966 free_irq(sport
->err_irq
, sport
);
968 free_dma(sport
->dma_tx_chan
);
970 free_dma(sport
->dma_rx_chan
);
974 peripheral_free_list(param
.pin_req
);
977 EXPORT_SYMBOL(sport_init
);
979 void sport_done(struct sport_device
*sport
)
985 if (sport
->dma_rx_desc
)
986 dma_free_coherent(NULL
, sport
->rx_desc_bytes
,
987 sport
->dma_rx_desc
, 0);
988 if (sport
->dma_tx_desc
)
989 dma_free_coherent(NULL
, sport
->tx_desc_bytes
,
990 sport
->dma_tx_desc
, 0);
992 #if L1_DATA_A_LENGTH != 0
993 l1_data_sram_free(sport
->dummy_rx_desc
);
994 l1_data_sram_free(sport
->dummy_tx_desc
);
995 l1_data_sram_free(sport
->dummy_buf
);
997 dma_free_coherent(NULL
, 2*sizeof(struct dmasg
),
998 sport
->dummy_rx_desc
, 0);
999 dma_free_coherent(NULL
, 2*sizeof(struct dmasg
),
1000 sport
->dummy_tx_desc
, 0);
1001 kfree(sport
->dummy_buf
);
1003 free_dma(sport
->dma_rx_chan
);
1004 free_dma(sport
->dma_tx_chan
);
1005 free_irq(sport
->err_irq
, sport
);
1007 kfree(sport
->private_data
);
1008 peripheral_free_list(sport
->pin_req
);
1011 EXPORT_SYMBOL(sport_done
);
1014 * It is only used to send several bytes when dma is not enabled
1015 * sport controller is configured but not enabled.
1016 * Multichannel cannot works with pio mode */
1017 /* Used by ac97 to write and read codec register */
1018 int sport_send_and_recv(struct sport_device
*sport
, u8
*out_data
, \
1019 u8
*in_data
, int len
)
1021 unsigned short dma_config
;
1022 unsigned short status
;
1023 unsigned long flags
;
1024 unsigned long wait
= 0;
1026 pr_debug("%s enter, out_data:%p, in_data:%p len:%d\n", \
1027 __func__
, out_data
, in_data
, len
);
1028 pr_debug("tcr1:0x%04x, tcr2:0x%04x, tclkdiv:0x%04x, tfsdiv:0x%04x\n"
1029 "mcmc1:0x%04x, mcmc2:0x%04x\n",
1030 sport
->regs
->tcr1
, sport
->regs
->tcr2
,
1031 sport
->regs
->tclkdiv
, sport
->regs
->tfsdiv
,
1032 sport
->regs
->mcmc1
, sport
->regs
->mcmc2
);
1033 flush_dcache_range((unsigned)out_data
, (unsigned)(out_data
+ len
));
1036 dma_config
= (RESTART
| WDSIZE_16
| DI_EN
);
1037 set_dma_start_addr(sport
->dma_tx_chan
, (unsigned long)out_data
);
1038 set_dma_x_count(sport
->dma_tx_chan
, len
/2);
1039 set_dma_x_modify(sport
->dma_tx_chan
, 2);
1040 set_dma_config(sport
->dma_tx_chan
, dma_config
);
1041 enable_dma(sport
->dma_tx_chan
);
1043 if (in_data
!= NULL
) {
1044 invalidate_dcache_range((unsigned)in_data
, \
1045 (unsigned)(in_data
+ len
));
1047 dma_config
= (RESTART
| WDSIZE_16
| WNR
| DI_EN
);
1048 set_dma_start_addr(sport
->dma_rx_chan
, (unsigned long)in_data
);
1049 set_dma_x_count(sport
->dma_rx_chan
, len
/2);
1050 set_dma_x_modify(sport
->dma_rx_chan
, 2);
1051 set_dma_config(sport
->dma_rx_chan
, dma_config
);
1052 enable_dma(sport
->dma_rx_chan
);
1055 local_irq_save(flags
);
1056 sport
->regs
->tcr1
|= TSPEN
;
1057 sport
->regs
->rcr1
|= RSPEN
;
1060 status
= get_dma_curr_irqstat(sport
->dma_tx_chan
);
1061 while (status
& DMA_RUN
) {
1063 status
= get_dma_curr_irqstat(sport
->dma_tx_chan
);
1064 pr_debug("DMA status:0x%04x\n", status
);
1068 status
= sport
->regs
->stat
;
1071 while (!(status
& TXHRE
)) {
1072 pr_debug("sport status:0x%04x\n", status
);
1074 status
= *(unsigned short *)&sport
->regs
->stat
;
1078 /* Wait for the last byte sent out */
1080 pr_debug("sport status:0x%04x\n", status
);
1083 sport
->regs
->tcr1
&= ~TSPEN
;
1084 sport
->regs
->rcr1
&= ~RSPEN
;
1086 disable_dma(sport
->dma_tx_chan
);
1087 /* Clear the status */
1088 clear_dma_irqstat(sport
->dma_tx_chan
);
1089 if (in_data
!= NULL
) {
1090 disable_dma(sport
->dma_rx_chan
);
1091 clear_dma_irqstat(sport
->dma_rx_chan
);
1094 local_irq_restore(flags
);
1098 EXPORT_SYMBOL(sport_send_and_recv
);
1100 MODULE_AUTHOR("Roy Huang");
1101 MODULE_DESCRIPTION("SPORT driver for ADI Blackfin");
1102 MODULE_LICENSE("GPL");