Merge git://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux-2.6
[wrt350n-kernel.git] / arch / blackfin / kernel / bfin_dma_5xx.c
blob9a1a44de20af585b7c6f9c0d511ecbb908a61773
1 /*
2 * File: arch/blackfin/kernel/bfin_dma_5xx.c
3 * Based on:
4 * Author:
6 * Created:
7 * Description: This file contains the simple DMA Implementation for Blackfin
9 * Modified:
10 * Copyright 2004-2006 Analog Devices Inc.
12 * Bugs: Enter bugs at http://blackfin.uclinux.org/
14 * This program is free software; you can redistribute it and/or modify
15 * it under the terms of the GNU General Public License as published by
16 * the Free Software Foundation; either version 2 of the License, or
17 * (at your option) any later version.
19 * This program is distributed in the hope that it will be useful,
20 * but WITHOUT ANY WARRANTY; without even the implied warranty of
21 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22 * GNU General Public License for more details.
24 * You should have received a copy of the GNU General Public License
25 * along with this program; if not, see the file COPYING, or write
26 * to the Free Software Foundation, Inc.,
27 * 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
30 #include <linux/errno.h>
31 #include <linux/module.h>
32 #include <linux/sched.h>
33 #include <linux/interrupt.h>
34 #include <linux/kernel.h>
35 #include <linux/param.h>
37 #include <asm/blackfin.h>
38 #include <asm/dma.h>
39 #include <asm/cacheflush.h>
41 /* Remove unused code not exported by symbol or internally called */
42 #define REMOVE_DEAD_CODE
44 /**************************************************************************
45 * Global Variables
46 ***************************************************************************/
48 static struct dma_channel dma_ch[MAX_BLACKFIN_DMA_CHANNEL];
50 /*------------------------------------------------------------------------------
51 * Set the Buffer Clear bit in the Configuration register of specific DMA
52 * channel. This will stop the descriptor based DMA operation.
53 *-----------------------------------------------------------------------------*/
54 static void clear_dma_buffer(unsigned int channel)
56 dma_ch[channel].regs->cfg |= RESTART;
57 SSYNC();
58 dma_ch[channel].regs->cfg &= ~RESTART;
59 SSYNC();
62 static int __init blackfin_dma_init(void)
64 int i;
66 printk(KERN_INFO "Blackfin DMA Controller\n");
68 for (i = 0; i < MAX_BLACKFIN_DMA_CHANNEL; i++) {
69 dma_ch[i].chan_status = DMA_CHANNEL_FREE;
70 dma_ch[i].regs = base_addr[i];
71 mutex_init(&(dma_ch[i].dmalock));
73 /* Mark MEMDMA Channel 0 as requested since we're using it internally */
74 dma_ch[CH_MEM_STREAM0_DEST].chan_status = DMA_CHANNEL_REQUESTED;
75 dma_ch[CH_MEM_STREAM0_SRC].chan_status = DMA_CHANNEL_REQUESTED;
77 #if defined(CONFIG_DEB_DMA_URGENT)
78 bfin_write_EBIU_DDRQUE(bfin_read_EBIU_DDRQUE()
79 | DEB1_URGENT | DEB2_URGENT | DEB3_URGENT);
80 #endif
81 return 0;
84 arch_initcall(blackfin_dma_init);
86 /*------------------------------------------------------------------------------
87 * Request the specific DMA channel from the system.
88 *-----------------------------------------------------------------------------*/
89 int request_dma(unsigned int channel, char *device_id)
92 pr_debug("request_dma() : BEGIN \n");
93 mutex_lock(&(dma_ch[channel].dmalock));
95 if ((dma_ch[channel].chan_status == DMA_CHANNEL_REQUESTED)
96 || (dma_ch[channel].chan_status == DMA_CHANNEL_ENABLED)) {
97 mutex_unlock(&(dma_ch[channel].dmalock));
98 pr_debug("DMA CHANNEL IN USE \n");
99 return -EBUSY;
100 } else {
101 dma_ch[channel].chan_status = DMA_CHANNEL_REQUESTED;
102 pr_debug("DMA CHANNEL IS ALLOCATED \n");
105 mutex_unlock(&(dma_ch[channel].dmalock));
107 #ifdef CONFIG_BF54x
108 <<<<<<< HEAD:arch/blackfin/kernel/bfin_dma_5xx.c
109 if (channel >= CH_UART2_RX && channel <= CH_UART3_TX &&
110 strncmp(device_id, "BFIN_UART", 9) == 0)
111 dma_ch[channel].regs->peripheral_map |=
112 (channel - CH_UART2_RX + 0xC);
113 else
114 dma_ch[channel].regs->peripheral_map |=
115 (channel - CH_UART2_RX + 0x6);
116 =======
117 if (channel >= CH_UART2_RX && channel <= CH_UART3_TX) {
118 if (strncmp(device_id, "BFIN_UART", 9) == 0)
119 dma_ch[channel].regs->peripheral_map |=
120 (channel - CH_UART2_RX + 0xC);
121 else
122 dma_ch[channel].regs->peripheral_map |=
123 (channel - CH_UART2_RX + 0x6);
125 >>>>>>> 264e3e889d86e552b4191d69bb60f4f3b383135a:arch/blackfin/kernel/bfin_dma_5xx.c
126 #endif
128 dma_ch[channel].device_id = device_id;
129 dma_ch[channel].irq_callback = NULL;
131 /* This is to be enabled by putting a restriction -
132 * you have to request DMA, before doing any operations on
133 * descriptor/channel
135 pr_debug("request_dma() : END \n");
136 return channel;
138 EXPORT_SYMBOL(request_dma);
140 int set_dma_callback(unsigned int channel, dma_interrupt_t callback, void *data)
142 int ret_irq = 0;
144 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
145 && channel < MAX_BLACKFIN_DMA_CHANNEL));
147 if (callback != NULL) {
148 int ret_val;
149 ret_irq = channel2irq(channel);
151 dma_ch[channel].data = data;
153 ret_val =
154 request_irq(ret_irq, (void *)callback, IRQF_DISABLED,
155 dma_ch[channel].device_id, data);
156 if (ret_val) {
157 printk(KERN_NOTICE
158 "Request irq in DMA engine failed.\n");
159 return -EPERM;
161 dma_ch[channel].irq_callback = callback;
163 return 0;
165 EXPORT_SYMBOL(set_dma_callback);
167 void free_dma(unsigned int channel)
169 int ret_irq;
171 pr_debug("freedma() : BEGIN \n");
172 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
173 && channel < MAX_BLACKFIN_DMA_CHANNEL));
175 /* Halt the DMA */
176 disable_dma(channel);
177 clear_dma_buffer(channel);
179 if (dma_ch[channel].irq_callback != NULL) {
180 ret_irq = channel2irq(channel);
181 free_irq(ret_irq, dma_ch[channel].data);
184 /* Clear the DMA Variable in the Channel */
185 mutex_lock(&(dma_ch[channel].dmalock));
186 dma_ch[channel].chan_status = DMA_CHANNEL_FREE;
187 mutex_unlock(&(dma_ch[channel].dmalock));
189 pr_debug("freedma() : END \n");
191 EXPORT_SYMBOL(free_dma);
193 void dma_enable_irq(unsigned int channel)
195 int ret_irq;
197 pr_debug("dma_enable_irq() : BEGIN \n");
198 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
199 && channel < MAX_BLACKFIN_DMA_CHANNEL));
201 ret_irq = channel2irq(channel);
202 enable_irq(ret_irq);
204 EXPORT_SYMBOL(dma_enable_irq);
206 void dma_disable_irq(unsigned int channel)
208 int ret_irq;
210 pr_debug("dma_disable_irq() : BEGIN \n");
211 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
212 && channel < MAX_BLACKFIN_DMA_CHANNEL));
214 ret_irq = channel2irq(channel);
215 disable_irq(ret_irq);
217 EXPORT_SYMBOL(dma_disable_irq);
219 int dma_channel_active(unsigned int channel)
221 if (dma_ch[channel].chan_status == DMA_CHANNEL_FREE) {
222 return 0;
223 } else {
224 return 1;
227 EXPORT_SYMBOL(dma_channel_active);
229 /*------------------------------------------------------------------------------
230 * stop the specific DMA channel.
231 *-----------------------------------------------------------------------------*/
232 void disable_dma(unsigned int channel)
234 pr_debug("stop_dma() : BEGIN \n");
236 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
237 && channel < MAX_BLACKFIN_DMA_CHANNEL));
239 dma_ch[channel].regs->cfg &= ~DMAEN; /* Clean the enable bit */
240 SSYNC();
241 dma_ch[channel].chan_status = DMA_CHANNEL_REQUESTED;
242 /* Needs to be enabled Later */
243 pr_debug("stop_dma() : END \n");
244 return;
246 EXPORT_SYMBOL(disable_dma);
248 void enable_dma(unsigned int channel)
250 pr_debug("enable_dma() : BEGIN \n");
252 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
253 && channel < MAX_BLACKFIN_DMA_CHANNEL));
255 dma_ch[channel].chan_status = DMA_CHANNEL_ENABLED;
256 dma_ch[channel].regs->curr_x_count = 0;
257 dma_ch[channel].regs->curr_y_count = 0;
259 dma_ch[channel].regs->cfg |= DMAEN; /* Set the enable bit */
260 SSYNC();
261 pr_debug("enable_dma() : END \n");
262 return;
264 EXPORT_SYMBOL(enable_dma);
266 /*------------------------------------------------------------------------------
267 * Set the Start Address register for the specific DMA channel
268 * This function can be used for register based DMA,
269 * to setup the start address
270 * addr: Starting address of the DMA Data to be transferred.
271 *-----------------------------------------------------------------------------*/
272 void set_dma_start_addr(unsigned int channel, unsigned long addr)
274 pr_debug("set_dma_start_addr() : BEGIN \n");
276 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
277 && channel < MAX_BLACKFIN_DMA_CHANNEL));
279 dma_ch[channel].regs->start_addr = addr;
280 SSYNC();
281 pr_debug("set_dma_start_addr() : END\n");
283 EXPORT_SYMBOL(set_dma_start_addr);
285 void set_dma_next_desc_addr(unsigned int channel, unsigned long addr)
287 pr_debug("set_dma_next_desc_addr() : BEGIN \n");
289 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
290 && channel < MAX_BLACKFIN_DMA_CHANNEL));
292 dma_ch[channel].regs->next_desc_ptr = addr;
293 SSYNC();
294 pr_debug("set_dma_next_desc_addr() : END\n");
296 EXPORT_SYMBOL(set_dma_next_desc_addr);
298 void set_dma_curr_desc_addr(unsigned int channel, unsigned long addr)
300 pr_debug("set_dma_curr_desc_addr() : BEGIN \n");
302 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
303 && channel < MAX_BLACKFIN_DMA_CHANNEL));
305 dma_ch[channel].regs->curr_desc_ptr = addr;
306 SSYNC();
307 pr_debug("set_dma_curr_desc_addr() : END\n");
309 EXPORT_SYMBOL(set_dma_curr_desc_addr);
311 void set_dma_x_count(unsigned int channel, unsigned short x_count)
313 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
314 && channel < MAX_BLACKFIN_DMA_CHANNEL));
316 dma_ch[channel].regs->x_count = x_count;
317 SSYNC();
319 EXPORT_SYMBOL(set_dma_x_count);
321 void set_dma_y_count(unsigned int channel, unsigned short y_count)
323 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
324 && channel < MAX_BLACKFIN_DMA_CHANNEL));
326 dma_ch[channel].regs->y_count = y_count;
327 SSYNC();
329 EXPORT_SYMBOL(set_dma_y_count);
331 void set_dma_x_modify(unsigned int channel, short x_modify)
333 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
334 && channel < MAX_BLACKFIN_DMA_CHANNEL));
336 dma_ch[channel].regs->x_modify = x_modify;
337 SSYNC();
339 EXPORT_SYMBOL(set_dma_x_modify);
341 void set_dma_y_modify(unsigned int channel, short y_modify)
343 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
344 && channel < MAX_BLACKFIN_DMA_CHANNEL));
346 dma_ch[channel].regs->y_modify = y_modify;
347 SSYNC();
349 EXPORT_SYMBOL(set_dma_y_modify);
351 void set_dma_config(unsigned int channel, unsigned short config)
353 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
354 && channel < MAX_BLACKFIN_DMA_CHANNEL));
356 dma_ch[channel].regs->cfg = config;
357 SSYNC();
359 EXPORT_SYMBOL(set_dma_config);
361 unsigned short
362 set_bfin_dma_config(char direction, char flow_mode,
363 char intr_mode, char dma_mode, char width, char syncmode)
365 unsigned short config;
367 config =
368 ((direction << 1) | (width << 2) | (dma_mode << 4) |
369 (intr_mode << 6) | (flow_mode << 12) | (syncmode << 5));
370 return config;
372 EXPORT_SYMBOL(set_bfin_dma_config);
374 void set_dma_sg(unsigned int channel, struct dmasg *sg, int nr_sg)
376 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
377 && channel < MAX_BLACKFIN_DMA_CHANNEL));
379 dma_ch[channel].regs->cfg |= ((nr_sg & 0x0F) << 8);
381 dma_ch[channel].regs->next_desc_ptr = (unsigned int)sg;
383 SSYNC();
385 EXPORT_SYMBOL(set_dma_sg);
387 void set_dma_curr_addr(unsigned int channel, unsigned long addr)
389 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
390 && channel < MAX_BLACKFIN_DMA_CHANNEL));
392 dma_ch[channel].regs->curr_addr_ptr = addr;
393 SSYNC();
395 EXPORT_SYMBOL(set_dma_curr_addr);
397 /*------------------------------------------------------------------------------
398 * Get the DMA status of a specific DMA channel from the system.
399 *-----------------------------------------------------------------------------*/
400 unsigned short get_dma_curr_irqstat(unsigned int channel)
402 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
403 && channel < MAX_BLACKFIN_DMA_CHANNEL));
405 return dma_ch[channel].regs->irq_status;
407 EXPORT_SYMBOL(get_dma_curr_irqstat);
409 /*------------------------------------------------------------------------------
410 * Clear the DMA_DONE bit in DMA status. Stop the DMA completion interrupt.
411 *-----------------------------------------------------------------------------*/
412 void clear_dma_irqstat(unsigned int channel)
414 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
415 && channel < MAX_BLACKFIN_DMA_CHANNEL));
416 dma_ch[channel].regs->irq_status |= 3;
418 EXPORT_SYMBOL(clear_dma_irqstat);
420 /*------------------------------------------------------------------------------
421 * Get current DMA xcount of a specific DMA channel from the system.
422 *-----------------------------------------------------------------------------*/
423 unsigned short get_dma_curr_xcount(unsigned int channel)
425 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
426 && channel < MAX_BLACKFIN_DMA_CHANNEL));
428 return dma_ch[channel].regs->curr_x_count;
430 EXPORT_SYMBOL(get_dma_curr_xcount);
432 /*------------------------------------------------------------------------------
433 * Get current DMA ycount of a specific DMA channel from the system.
434 *-----------------------------------------------------------------------------*/
435 unsigned short get_dma_curr_ycount(unsigned int channel)
437 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
438 && channel < MAX_BLACKFIN_DMA_CHANNEL));
440 return dma_ch[channel].regs->curr_y_count;
442 EXPORT_SYMBOL(get_dma_curr_ycount);
444 unsigned long get_dma_next_desc_ptr(unsigned int channel)
446 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
447 && channel < MAX_BLACKFIN_DMA_CHANNEL));
449 return dma_ch[channel].regs->next_desc_ptr;
451 EXPORT_SYMBOL(get_dma_next_desc_ptr);
453 unsigned long get_dma_curr_desc_ptr(unsigned int channel)
455 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
456 && channel < MAX_BLACKFIN_DMA_CHANNEL));
458 return dma_ch[channel].regs->curr_desc_ptr;
460 EXPORT_SYMBOL(get_dma_curr_desc_ptr);
462 unsigned long get_dma_curr_addr(unsigned int channel)
464 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
465 && channel < MAX_BLACKFIN_DMA_CHANNEL));
467 return dma_ch[channel].regs->curr_addr_ptr;
469 EXPORT_SYMBOL(get_dma_curr_addr);
471 static void *__dma_memcpy(void *dest, const void *src, size_t size)
473 int direction; /* 1 - address decrease, 0 - address increase */
474 int flag_align; /* 1 - address aligned, 0 - address unaligned */
475 int flag_2D; /* 1 - 2D DMA needed, 0 - 1D DMA needed */
476 unsigned long flags;
478 if (size <= 0)
479 return NULL;
481 local_irq_save(flags);
483 if ((unsigned long)src < memory_end)
484 blackfin_dcache_flush_range((unsigned int)src,
485 (unsigned int)(src + size));
487 if ((unsigned long)dest < memory_end)
488 blackfin_dcache_invalidate_range((unsigned int)dest,
489 (unsigned int)(dest + size));
491 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
493 if ((unsigned long)src < (unsigned long)dest)
494 direction = 1;
495 else
496 direction = 0;
498 if ((((unsigned long)dest % 2) == 0) && (((unsigned long)src % 2) == 0)
499 && ((size % 2) == 0))
500 flag_align = 1;
501 else
502 flag_align = 0;
504 if (size > 0x10000) /* size > 64K */
505 flag_2D = 1;
506 else
507 flag_2D = 0;
509 /* Setup destination and source start address */
510 if (direction) {
511 if (flag_align) {
512 bfin_write_MDMA_D0_START_ADDR(dest + size - 2);
513 bfin_write_MDMA_S0_START_ADDR(src + size - 2);
514 } else {
515 bfin_write_MDMA_D0_START_ADDR(dest + size - 1);
516 bfin_write_MDMA_S0_START_ADDR(src + size - 1);
518 } else {
519 bfin_write_MDMA_D0_START_ADDR(dest);
520 bfin_write_MDMA_S0_START_ADDR(src);
523 /* Setup destination and source xcount */
524 if (flag_2D) {
525 if (flag_align) {
526 bfin_write_MDMA_D0_X_COUNT(1024 / 2);
527 bfin_write_MDMA_S0_X_COUNT(1024 / 2);
528 } else {
529 bfin_write_MDMA_D0_X_COUNT(1024);
530 bfin_write_MDMA_S0_X_COUNT(1024);
532 bfin_write_MDMA_D0_Y_COUNT(size >> 10);
533 bfin_write_MDMA_S0_Y_COUNT(size >> 10);
534 } else {
535 if (flag_align) {
536 bfin_write_MDMA_D0_X_COUNT(size / 2);
537 bfin_write_MDMA_S0_X_COUNT(size / 2);
538 } else {
539 bfin_write_MDMA_D0_X_COUNT(size);
540 bfin_write_MDMA_S0_X_COUNT(size);
544 /* Setup destination and source xmodify and ymodify */
545 if (direction) {
546 if (flag_align) {
547 bfin_write_MDMA_D0_X_MODIFY(-2);
548 bfin_write_MDMA_S0_X_MODIFY(-2);
549 if (flag_2D) {
550 bfin_write_MDMA_D0_Y_MODIFY(-2);
551 bfin_write_MDMA_S0_Y_MODIFY(-2);
553 } else {
554 bfin_write_MDMA_D0_X_MODIFY(-1);
555 bfin_write_MDMA_S0_X_MODIFY(-1);
556 if (flag_2D) {
557 bfin_write_MDMA_D0_Y_MODIFY(-1);
558 bfin_write_MDMA_S0_Y_MODIFY(-1);
561 } else {
562 if (flag_align) {
563 bfin_write_MDMA_D0_X_MODIFY(2);
564 bfin_write_MDMA_S0_X_MODIFY(2);
565 if (flag_2D) {
566 bfin_write_MDMA_D0_Y_MODIFY(2);
567 bfin_write_MDMA_S0_Y_MODIFY(2);
569 } else {
570 bfin_write_MDMA_D0_X_MODIFY(1);
571 bfin_write_MDMA_S0_X_MODIFY(1);
572 if (flag_2D) {
573 bfin_write_MDMA_D0_Y_MODIFY(1);
574 bfin_write_MDMA_S0_Y_MODIFY(1);
579 /* Enable source DMA */
580 if (flag_2D) {
581 if (flag_align) {
582 bfin_write_MDMA_S0_CONFIG(DMAEN | DMA2D | WDSIZE_16);
583 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | DMA2D | WDSIZE_16);
584 } else {
585 bfin_write_MDMA_S0_CONFIG(DMAEN | DMA2D);
586 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | DMA2D);
588 } else {
589 if (flag_align) {
590 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
591 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
592 } else {
593 bfin_write_MDMA_S0_CONFIG(DMAEN);
594 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN);
598 SSYNC();
600 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE))
603 bfin_write_MDMA_D0_IRQ_STATUS(bfin_read_MDMA_D0_IRQ_STATUS() |
604 (DMA_DONE | DMA_ERR));
606 bfin_write_MDMA_S0_CONFIG(0);
607 bfin_write_MDMA_D0_CONFIG(0);
609 local_irq_restore(flags);
611 return dest;
614 void *dma_memcpy(void *dest, const void *src, size_t size)
616 size_t bulk;
617 size_t rest;
618 void * addr;
620 bulk = (size >> 16) << 16;
621 rest = size - bulk;
622 if (bulk)
623 __dma_memcpy(dest, src, bulk);
624 addr = __dma_memcpy(dest+bulk, src+bulk, rest);
625 return addr;
627 EXPORT_SYMBOL(dma_memcpy);
629 void *safe_dma_memcpy(void *dest, const void *src, size_t size)
631 void *addr;
632 addr = dma_memcpy(dest, src, size);
633 return addr;
635 EXPORT_SYMBOL(safe_dma_memcpy);
637 void dma_outsb(unsigned long addr, const void *buf, unsigned short len)
639 unsigned long flags;
641 local_irq_save(flags);
643 blackfin_dcache_flush_range((unsigned int)buf,
644 (unsigned int)(buf) + len);
646 bfin_write_MDMA_D0_START_ADDR(addr);
647 bfin_write_MDMA_D0_X_COUNT(len);
648 bfin_write_MDMA_D0_X_MODIFY(0);
649 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
651 bfin_write_MDMA_S0_START_ADDR(buf);
652 bfin_write_MDMA_S0_X_COUNT(len);
653 bfin_write_MDMA_S0_X_MODIFY(1);
654 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
656 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_8);
657 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_8);
659 SSYNC();
661 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
663 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
665 bfin_write_MDMA_S0_CONFIG(0);
666 bfin_write_MDMA_D0_CONFIG(0);
667 local_irq_restore(flags);
670 EXPORT_SYMBOL(dma_outsb);
673 void dma_insb(unsigned long addr, void *buf, unsigned short len)
675 unsigned long flags;
677 blackfin_dcache_invalidate_range((unsigned int)buf,
678 (unsigned int)(buf) + len);
680 local_irq_save(flags);
681 bfin_write_MDMA_D0_START_ADDR(buf);
682 bfin_write_MDMA_D0_X_COUNT(len);
683 bfin_write_MDMA_D0_X_MODIFY(1);
684 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
686 bfin_write_MDMA_S0_START_ADDR(addr);
687 bfin_write_MDMA_S0_X_COUNT(len);
688 bfin_write_MDMA_S0_X_MODIFY(0);
689 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
691 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_8);
692 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_8);
694 SSYNC();
696 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
698 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
700 bfin_write_MDMA_S0_CONFIG(0);
701 bfin_write_MDMA_D0_CONFIG(0);
702 local_irq_restore(flags);
705 EXPORT_SYMBOL(dma_insb);
707 void dma_outsw(unsigned long addr, const void *buf, unsigned short len)
709 unsigned long flags;
711 local_irq_save(flags);
713 blackfin_dcache_flush_range((unsigned int)buf,
714 (unsigned int)(buf) + len * sizeof(short));
716 bfin_write_MDMA_D0_START_ADDR(addr);
717 bfin_write_MDMA_D0_X_COUNT(len);
718 bfin_write_MDMA_D0_X_MODIFY(0);
719 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
721 bfin_write_MDMA_S0_START_ADDR(buf);
722 bfin_write_MDMA_S0_X_COUNT(len);
723 bfin_write_MDMA_S0_X_MODIFY(2);
724 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
726 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
727 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
729 SSYNC();
731 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
733 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
735 bfin_write_MDMA_S0_CONFIG(0);
736 bfin_write_MDMA_D0_CONFIG(0);
737 local_irq_restore(flags);
740 EXPORT_SYMBOL(dma_outsw);
742 void dma_insw(unsigned long addr, void *buf, unsigned short len)
744 unsigned long flags;
746 blackfin_dcache_invalidate_range((unsigned int)buf,
747 (unsigned int)(buf) + len * sizeof(short));
749 local_irq_save(flags);
751 bfin_write_MDMA_D0_START_ADDR(buf);
752 bfin_write_MDMA_D0_X_COUNT(len);
753 bfin_write_MDMA_D0_X_MODIFY(2);
754 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
756 bfin_write_MDMA_S0_START_ADDR(addr);
757 bfin_write_MDMA_S0_X_COUNT(len);
758 bfin_write_MDMA_S0_X_MODIFY(0);
759 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
761 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
762 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
764 SSYNC();
766 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
768 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
770 bfin_write_MDMA_S0_CONFIG(0);
771 bfin_write_MDMA_D0_CONFIG(0);
772 local_irq_restore(flags);
775 EXPORT_SYMBOL(dma_insw);
777 void dma_outsl(unsigned long addr, const void *buf, unsigned short len)
779 unsigned long flags;
781 local_irq_save(flags);
783 blackfin_dcache_flush_range((unsigned int)buf,
784 (unsigned int)(buf) + len * sizeof(long));
786 bfin_write_MDMA_D0_START_ADDR(addr);
787 bfin_write_MDMA_D0_X_COUNT(len);
788 bfin_write_MDMA_D0_X_MODIFY(0);
789 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
791 bfin_write_MDMA_S0_START_ADDR(buf);
792 bfin_write_MDMA_S0_X_COUNT(len);
793 bfin_write_MDMA_S0_X_MODIFY(4);
794 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
796 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_32);
797 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_32);
799 SSYNC();
801 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
803 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
805 bfin_write_MDMA_S0_CONFIG(0);
806 bfin_write_MDMA_D0_CONFIG(0);
807 local_irq_restore(flags);
810 EXPORT_SYMBOL(dma_outsl);
812 void dma_insl(unsigned long addr, void *buf, unsigned short len)
814 unsigned long flags;
816 blackfin_dcache_invalidate_range((unsigned int)buf,
817 (unsigned int)(buf) + len * sizeof(long));
819 local_irq_save(flags);
821 bfin_write_MDMA_D0_START_ADDR(buf);
822 bfin_write_MDMA_D0_X_COUNT(len);
823 bfin_write_MDMA_D0_X_MODIFY(4);
824 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
826 bfin_write_MDMA_S0_START_ADDR(addr);
827 bfin_write_MDMA_S0_X_COUNT(len);
828 bfin_write_MDMA_S0_X_MODIFY(0);
829 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
831 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_32);
832 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_32);
834 SSYNC();
836 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
838 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
840 bfin_write_MDMA_S0_CONFIG(0);
841 bfin_write_MDMA_D0_CONFIG(0);
842 local_irq_restore(flags);
845 EXPORT_SYMBOL(dma_insl);