x86: add PAGE_KERNEL_EXEC_NOCACHE
[wrt350n-kernel.git] / arch / blackfin / kernel / bfin_dma_5xx.c
blobfa9debe8d5f4e7dff0581851dddda87ed27cc93e
1 /*
2 * File: arch/blackfin/kernel/bfin_dma_5xx.c
3 * Based on:
4 * Author:
6 * Created:
7 * Description: This file contains the simple DMA Implementation for Blackfin
9 * Modified:
10 * Copyright 2004-2006 Analog Devices Inc.
12 * Bugs: Enter bugs at http://blackfin.uclinux.org/
14 * This program is free software; you can redistribute it and/or modify
15 * it under the terms of the GNU General Public License as published by
16 * the Free Software Foundation; either version 2 of the License, or
17 * (at your option) any later version.
19 * This program is distributed in the hope that it will be useful,
20 * but WITHOUT ANY WARRANTY; without even the implied warranty of
21 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22 * GNU General Public License for more details.
24 * You should have received a copy of the GNU General Public License
25 * along with this program; if not, see the file COPYING, or write
26 * to the Free Software Foundation, Inc.,
27 * 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
30 #include <linux/errno.h>
31 #include <linux/module.h>
32 #include <linux/sched.h>
33 #include <linux/interrupt.h>
34 #include <linux/kernel.h>
35 #include <linux/param.h>
37 #include <asm/blackfin.h>
38 #include <asm/dma.h>
39 #include <asm/cacheflush.h>
41 /* Remove unused code not exported by symbol or internally called */
42 #define REMOVE_DEAD_CODE
44 /**************************************************************************
45 * Global Variables
46 ***************************************************************************/
48 static struct dma_channel dma_ch[MAX_BLACKFIN_DMA_CHANNEL];
50 /*------------------------------------------------------------------------------
51 * Set the Buffer Clear bit in the Configuration register of specific DMA
52 * channel. This will stop the descriptor based DMA operation.
53 *-----------------------------------------------------------------------------*/
54 static void clear_dma_buffer(unsigned int channel)
56 dma_ch[channel].regs->cfg |= RESTART;
57 SSYNC();
58 dma_ch[channel].regs->cfg &= ~RESTART;
59 SSYNC();
62 static int __init blackfin_dma_init(void)
64 int i;
66 printk(KERN_INFO "Blackfin DMA Controller\n");
68 for (i = 0; i < MAX_BLACKFIN_DMA_CHANNEL; i++) {
69 dma_ch[i].chan_status = DMA_CHANNEL_FREE;
70 dma_ch[i].regs = base_addr[i];
71 mutex_init(&(dma_ch[i].dmalock));
73 /* Mark MEMDMA Channel 0 as requested since we're using it internally */
74 dma_ch[CH_MEM_STREAM0_DEST].chan_status = DMA_CHANNEL_REQUESTED;
75 dma_ch[CH_MEM_STREAM0_SRC].chan_status = DMA_CHANNEL_REQUESTED;
77 #if defined(CONFIG_DEB_DMA_URGENT)
78 bfin_write_EBIU_DDRQUE(bfin_read_EBIU_DDRQUE()
79 | DEB1_URGENT | DEB2_URGENT | DEB3_URGENT);
80 #endif
81 return 0;
84 arch_initcall(blackfin_dma_init);
86 /*------------------------------------------------------------------------------
87 * Request the specific DMA channel from the system.
88 *-----------------------------------------------------------------------------*/
89 int request_dma(unsigned int channel, char *device_id)
92 pr_debug("request_dma() : BEGIN \n");
93 mutex_lock(&(dma_ch[channel].dmalock));
95 if ((dma_ch[channel].chan_status == DMA_CHANNEL_REQUESTED)
96 || (dma_ch[channel].chan_status == DMA_CHANNEL_ENABLED)) {
97 mutex_unlock(&(dma_ch[channel].dmalock));
98 pr_debug("DMA CHANNEL IN USE \n");
99 return -EBUSY;
100 } else {
101 dma_ch[channel].chan_status = DMA_CHANNEL_REQUESTED;
102 pr_debug("DMA CHANNEL IS ALLOCATED \n");
105 mutex_unlock(&(dma_ch[channel].dmalock));
107 dma_ch[channel].device_id = device_id;
108 dma_ch[channel].irq_callback = NULL;
110 /* This is to be enabled by putting a restriction -
111 * you have to request DMA, before doing any operations on
112 * descriptor/channel
114 pr_debug("request_dma() : END \n");
115 return channel;
117 EXPORT_SYMBOL(request_dma);
119 int set_dma_callback(unsigned int channel, dma_interrupt_t callback, void *data)
121 int ret_irq = 0;
123 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
124 && channel < MAX_BLACKFIN_DMA_CHANNEL));
126 if (callback != NULL) {
127 int ret_val;
128 ret_irq = channel2irq(channel);
130 dma_ch[channel].data = data;
132 ret_val =
133 request_irq(ret_irq, (void *)callback, IRQF_DISABLED,
134 dma_ch[channel].device_id, data);
135 if (ret_val) {
136 printk(KERN_NOTICE
137 "Request irq in DMA engine failed.\n");
138 return -EPERM;
140 dma_ch[channel].irq_callback = callback;
142 return 0;
144 EXPORT_SYMBOL(set_dma_callback);
146 void free_dma(unsigned int channel)
148 int ret_irq;
150 pr_debug("freedma() : BEGIN \n");
151 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
152 && channel < MAX_BLACKFIN_DMA_CHANNEL));
154 /* Halt the DMA */
155 disable_dma(channel);
156 clear_dma_buffer(channel);
158 if (dma_ch[channel].irq_callback != NULL) {
159 ret_irq = channel2irq(channel);
160 free_irq(ret_irq, dma_ch[channel].data);
163 /* Clear the DMA Variable in the Channel */
164 mutex_lock(&(dma_ch[channel].dmalock));
165 dma_ch[channel].chan_status = DMA_CHANNEL_FREE;
166 mutex_unlock(&(dma_ch[channel].dmalock));
168 pr_debug("freedma() : END \n");
170 EXPORT_SYMBOL(free_dma);
172 void dma_enable_irq(unsigned int channel)
174 int ret_irq;
176 pr_debug("dma_enable_irq() : BEGIN \n");
177 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
178 && channel < MAX_BLACKFIN_DMA_CHANNEL));
180 ret_irq = channel2irq(channel);
181 enable_irq(ret_irq);
183 EXPORT_SYMBOL(dma_enable_irq);
185 void dma_disable_irq(unsigned int channel)
187 int ret_irq;
189 pr_debug("dma_disable_irq() : BEGIN \n");
190 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
191 && channel < MAX_BLACKFIN_DMA_CHANNEL));
193 ret_irq = channel2irq(channel);
194 disable_irq(ret_irq);
196 EXPORT_SYMBOL(dma_disable_irq);
198 int dma_channel_active(unsigned int channel)
200 if (dma_ch[channel].chan_status == DMA_CHANNEL_FREE) {
201 return 0;
202 } else {
203 return 1;
206 EXPORT_SYMBOL(dma_channel_active);
208 /*------------------------------------------------------------------------------
209 * stop the specific DMA channel.
210 *-----------------------------------------------------------------------------*/
211 void disable_dma(unsigned int channel)
213 pr_debug("stop_dma() : BEGIN \n");
215 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
216 && channel < MAX_BLACKFIN_DMA_CHANNEL));
218 dma_ch[channel].regs->cfg &= ~DMAEN; /* Clean the enable bit */
219 SSYNC();
220 dma_ch[channel].chan_status = DMA_CHANNEL_REQUESTED;
221 /* Needs to be enabled Later */
222 pr_debug("stop_dma() : END \n");
223 return;
225 EXPORT_SYMBOL(disable_dma);
227 void enable_dma(unsigned int channel)
229 pr_debug("enable_dma() : BEGIN \n");
231 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
232 && channel < MAX_BLACKFIN_DMA_CHANNEL));
234 dma_ch[channel].chan_status = DMA_CHANNEL_ENABLED;
235 dma_ch[channel].regs->curr_x_count = 0;
236 dma_ch[channel].regs->curr_y_count = 0;
238 dma_ch[channel].regs->cfg |= DMAEN; /* Set the enable bit */
239 SSYNC();
240 pr_debug("enable_dma() : END \n");
241 return;
243 EXPORT_SYMBOL(enable_dma);
245 /*------------------------------------------------------------------------------
246 * Set the Start Address register for the specific DMA channel
247 * This function can be used for register based DMA,
248 * to setup the start address
249 * addr: Starting address of the DMA Data to be transferred.
250 *-----------------------------------------------------------------------------*/
251 void set_dma_start_addr(unsigned int channel, unsigned long addr)
253 pr_debug("set_dma_start_addr() : BEGIN \n");
255 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
256 && channel < MAX_BLACKFIN_DMA_CHANNEL));
258 dma_ch[channel].regs->start_addr = addr;
259 SSYNC();
260 pr_debug("set_dma_start_addr() : END\n");
262 EXPORT_SYMBOL(set_dma_start_addr);
264 void set_dma_next_desc_addr(unsigned int channel, unsigned long addr)
266 pr_debug("set_dma_next_desc_addr() : BEGIN \n");
268 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
269 && channel < MAX_BLACKFIN_DMA_CHANNEL));
271 dma_ch[channel].regs->next_desc_ptr = addr;
272 SSYNC();
273 pr_debug("set_dma_next_desc_addr() : END\n");
275 EXPORT_SYMBOL(set_dma_next_desc_addr);
277 void set_dma_curr_desc_addr(unsigned int channel, unsigned long addr)
279 pr_debug("set_dma_curr_desc_addr() : BEGIN \n");
281 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
282 && channel < MAX_BLACKFIN_DMA_CHANNEL));
284 dma_ch[channel].regs->curr_desc_ptr = addr;
285 SSYNC();
286 pr_debug("set_dma_curr_desc_addr() : END\n");
288 EXPORT_SYMBOL(set_dma_curr_desc_addr);
290 void set_dma_x_count(unsigned int channel, unsigned short x_count)
292 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
293 && channel < MAX_BLACKFIN_DMA_CHANNEL));
295 dma_ch[channel].regs->x_count = x_count;
296 SSYNC();
298 EXPORT_SYMBOL(set_dma_x_count);
300 void set_dma_y_count(unsigned int channel, unsigned short y_count)
302 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
303 && channel < MAX_BLACKFIN_DMA_CHANNEL));
305 dma_ch[channel].regs->y_count = y_count;
306 SSYNC();
308 EXPORT_SYMBOL(set_dma_y_count);
310 void set_dma_x_modify(unsigned int channel, short x_modify)
312 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
313 && channel < MAX_BLACKFIN_DMA_CHANNEL));
315 dma_ch[channel].regs->x_modify = x_modify;
316 SSYNC();
318 EXPORT_SYMBOL(set_dma_x_modify);
320 void set_dma_y_modify(unsigned int channel, short y_modify)
322 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
323 && channel < MAX_BLACKFIN_DMA_CHANNEL));
325 dma_ch[channel].regs->y_modify = y_modify;
326 SSYNC();
328 EXPORT_SYMBOL(set_dma_y_modify);
330 void set_dma_config(unsigned int channel, unsigned short config)
332 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
333 && channel < MAX_BLACKFIN_DMA_CHANNEL));
335 dma_ch[channel].regs->cfg = config;
336 SSYNC();
338 EXPORT_SYMBOL(set_dma_config);
340 unsigned short
341 set_bfin_dma_config(char direction, char flow_mode,
342 char intr_mode, char dma_mode, char width, char syncmode)
344 unsigned short config;
346 config =
347 ((direction << 1) | (width << 2) | (dma_mode << 4) |
348 (intr_mode << 6) | (flow_mode << 12) | (syncmode << 5));
349 return config;
351 EXPORT_SYMBOL(set_bfin_dma_config);
353 void set_dma_sg(unsigned int channel, struct dmasg *sg, int nr_sg)
355 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
356 && channel < MAX_BLACKFIN_DMA_CHANNEL));
358 dma_ch[channel].regs->cfg |= ((nr_sg & 0x0F) << 8);
360 dma_ch[channel].regs->next_desc_ptr = (unsigned int)sg;
362 SSYNC();
364 EXPORT_SYMBOL(set_dma_sg);
366 void set_dma_curr_addr(unsigned int channel, unsigned long addr)
368 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
369 && channel < MAX_BLACKFIN_DMA_CHANNEL));
371 dma_ch[channel].regs->curr_addr_ptr = addr;
372 SSYNC();
374 EXPORT_SYMBOL(set_dma_curr_addr);
376 /*------------------------------------------------------------------------------
377 * Get the DMA status of a specific DMA channel from the system.
378 *-----------------------------------------------------------------------------*/
379 unsigned short get_dma_curr_irqstat(unsigned int channel)
381 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
382 && channel < MAX_BLACKFIN_DMA_CHANNEL));
384 return dma_ch[channel].regs->irq_status;
386 EXPORT_SYMBOL(get_dma_curr_irqstat);
388 /*------------------------------------------------------------------------------
389 * Clear the DMA_DONE bit in DMA status. Stop the DMA completion interrupt.
390 *-----------------------------------------------------------------------------*/
391 void clear_dma_irqstat(unsigned int channel)
393 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
394 && channel < MAX_BLACKFIN_DMA_CHANNEL));
395 dma_ch[channel].regs->irq_status |= 3;
397 EXPORT_SYMBOL(clear_dma_irqstat);
399 /*------------------------------------------------------------------------------
400 * Get current DMA xcount of a specific DMA channel from the system.
401 *-----------------------------------------------------------------------------*/
402 unsigned short get_dma_curr_xcount(unsigned int channel)
404 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
405 && channel < MAX_BLACKFIN_DMA_CHANNEL));
407 return dma_ch[channel].regs->curr_x_count;
409 EXPORT_SYMBOL(get_dma_curr_xcount);
411 /*------------------------------------------------------------------------------
412 * Get current DMA ycount of a specific DMA channel from the system.
413 *-----------------------------------------------------------------------------*/
414 unsigned short get_dma_curr_ycount(unsigned int channel)
416 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
417 && channel < MAX_BLACKFIN_DMA_CHANNEL));
419 return dma_ch[channel].regs->curr_y_count;
421 EXPORT_SYMBOL(get_dma_curr_ycount);
423 unsigned long get_dma_next_desc_ptr(unsigned int channel)
425 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
426 && channel < MAX_BLACKFIN_DMA_CHANNEL));
428 return dma_ch[channel].regs->next_desc_ptr;
430 EXPORT_SYMBOL(get_dma_next_desc_ptr);
432 unsigned long get_dma_curr_desc_ptr(unsigned int channel)
434 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
435 && channel < MAX_BLACKFIN_DMA_CHANNEL));
437 return dma_ch[channel].regs->curr_desc_ptr;
439 EXPORT_SYMBOL(get_dma_curr_desc_ptr);
441 unsigned long get_dma_curr_addr(unsigned int channel)
443 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
444 && channel < MAX_BLACKFIN_DMA_CHANNEL));
446 return dma_ch[channel].regs->curr_addr_ptr;
448 EXPORT_SYMBOL(get_dma_curr_addr);
450 static void *__dma_memcpy(void *dest, const void *src, size_t size)
452 int direction; /* 1 - address decrease, 0 - address increase */
453 int flag_align; /* 1 - address aligned, 0 - address unaligned */
454 int flag_2D; /* 1 - 2D DMA needed, 0 - 1D DMA needed */
455 unsigned long flags;
457 if (size <= 0)
458 return NULL;
460 local_irq_save(flags);
462 if ((unsigned long)src < memory_end)
463 blackfin_dcache_flush_range((unsigned int)src,
464 (unsigned int)(src + size));
466 if ((unsigned long)dest < memory_end)
467 blackfin_dcache_invalidate_range((unsigned int)dest,
468 (unsigned int)(dest + size));
470 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
472 if ((unsigned long)src < (unsigned long)dest)
473 direction = 1;
474 else
475 direction = 0;
477 if ((((unsigned long)dest % 2) == 0) && (((unsigned long)src % 2) == 0)
478 && ((size % 2) == 0))
479 flag_align = 1;
480 else
481 flag_align = 0;
483 if (size > 0x10000) /* size > 64K */
484 flag_2D = 1;
485 else
486 flag_2D = 0;
488 /* Setup destination and source start address */
489 if (direction) {
490 if (flag_align) {
491 bfin_write_MDMA_D0_START_ADDR(dest + size - 2);
492 bfin_write_MDMA_S0_START_ADDR(src + size - 2);
493 } else {
494 bfin_write_MDMA_D0_START_ADDR(dest + size - 1);
495 bfin_write_MDMA_S0_START_ADDR(src + size - 1);
497 } else {
498 bfin_write_MDMA_D0_START_ADDR(dest);
499 bfin_write_MDMA_S0_START_ADDR(src);
502 /* Setup destination and source xcount */
503 if (flag_2D) {
504 if (flag_align) {
505 bfin_write_MDMA_D0_X_COUNT(1024 / 2);
506 bfin_write_MDMA_S0_X_COUNT(1024 / 2);
507 } else {
508 bfin_write_MDMA_D0_X_COUNT(1024);
509 bfin_write_MDMA_S0_X_COUNT(1024);
511 bfin_write_MDMA_D0_Y_COUNT(size >> 10);
512 bfin_write_MDMA_S0_Y_COUNT(size >> 10);
513 } else {
514 if (flag_align) {
515 bfin_write_MDMA_D0_X_COUNT(size / 2);
516 bfin_write_MDMA_S0_X_COUNT(size / 2);
517 } else {
518 bfin_write_MDMA_D0_X_COUNT(size);
519 bfin_write_MDMA_S0_X_COUNT(size);
523 /* Setup destination and source xmodify and ymodify */
524 if (direction) {
525 if (flag_align) {
526 bfin_write_MDMA_D0_X_MODIFY(-2);
527 bfin_write_MDMA_S0_X_MODIFY(-2);
528 if (flag_2D) {
529 bfin_write_MDMA_D0_Y_MODIFY(-2);
530 bfin_write_MDMA_S0_Y_MODIFY(-2);
532 } else {
533 bfin_write_MDMA_D0_X_MODIFY(-1);
534 bfin_write_MDMA_S0_X_MODIFY(-1);
535 if (flag_2D) {
536 bfin_write_MDMA_D0_Y_MODIFY(-1);
537 bfin_write_MDMA_S0_Y_MODIFY(-1);
540 } else {
541 if (flag_align) {
542 bfin_write_MDMA_D0_X_MODIFY(2);
543 bfin_write_MDMA_S0_X_MODIFY(2);
544 if (flag_2D) {
545 bfin_write_MDMA_D0_Y_MODIFY(2);
546 bfin_write_MDMA_S0_Y_MODIFY(2);
548 } else {
549 bfin_write_MDMA_D0_X_MODIFY(1);
550 bfin_write_MDMA_S0_X_MODIFY(1);
551 if (flag_2D) {
552 bfin_write_MDMA_D0_Y_MODIFY(1);
553 bfin_write_MDMA_S0_Y_MODIFY(1);
558 /* Enable source DMA */
559 if (flag_2D) {
560 if (flag_align) {
561 bfin_write_MDMA_S0_CONFIG(DMAEN | DMA2D | WDSIZE_16);
562 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | DMA2D | WDSIZE_16);
563 } else {
564 bfin_write_MDMA_S0_CONFIG(DMAEN | DMA2D);
565 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | DMA2D);
567 } else {
568 if (flag_align) {
569 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
570 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
571 } else {
572 bfin_write_MDMA_S0_CONFIG(DMAEN);
573 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN);
577 SSYNC();
579 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE))
582 bfin_write_MDMA_D0_IRQ_STATUS(bfin_read_MDMA_D0_IRQ_STATUS() |
583 (DMA_DONE | DMA_ERR));
585 bfin_write_MDMA_S0_CONFIG(0);
586 bfin_write_MDMA_D0_CONFIG(0);
588 local_irq_restore(flags);
590 return dest;
593 void *dma_memcpy(void *dest, const void *src, size_t size)
595 size_t bulk;
596 size_t rest;
597 void * addr;
599 bulk = (size >> 16) << 16;
600 rest = size - bulk;
601 if (bulk)
602 __dma_memcpy(dest, src, bulk);
603 addr = __dma_memcpy(dest+bulk, src+bulk, rest);
604 return addr;
606 EXPORT_SYMBOL(dma_memcpy);
608 void *safe_dma_memcpy(void *dest, const void *src, size_t size)
610 void *addr;
611 addr = dma_memcpy(dest, src, size);
612 return addr;
614 EXPORT_SYMBOL(safe_dma_memcpy);
616 void dma_outsb(unsigned long addr, const void *buf, unsigned short len)
618 unsigned long flags;
620 local_irq_save(flags);
622 blackfin_dcache_flush_range((unsigned int)buf,
623 (unsigned int)(buf) + len);
625 bfin_write_MDMA_D0_START_ADDR(addr);
626 bfin_write_MDMA_D0_X_COUNT(len);
627 bfin_write_MDMA_D0_X_MODIFY(0);
628 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
630 bfin_write_MDMA_S0_START_ADDR(buf);
631 bfin_write_MDMA_S0_X_COUNT(len);
632 bfin_write_MDMA_S0_X_MODIFY(1);
633 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
635 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_8);
636 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_8);
638 SSYNC();
640 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
642 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
644 bfin_write_MDMA_S0_CONFIG(0);
645 bfin_write_MDMA_D0_CONFIG(0);
646 local_irq_restore(flags);
649 EXPORT_SYMBOL(dma_outsb);
652 void dma_insb(unsigned long addr, void *buf, unsigned short len)
654 unsigned long flags;
656 blackfin_dcache_invalidate_range((unsigned int)buf,
657 (unsigned int)(buf) + len);
659 local_irq_save(flags);
660 bfin_write_MDMA_D0_START_ADDR(buf);
661 bfin_write_MDMA_D0_X_COUNT(len);
662 bfin_write_MDMA_D0_X_MODIFY(1);
663 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
665 bfin_write_MDMA_S0_START_ADDR(addr);
666 bfin_write_MDMA_S0_X_COUNT(len);
667 bfin_write_MDMA_S0_X_MODIFY(0);
668 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
670 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_8);
671 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_8);
673 SSYNC();
675 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
677 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
679 bfin_write_MDMA_S0_CONFIG(0);
680 bfin_write_MDMA_D0_CONFIG(0);
681 local_irq_restore(flags);
684 EXPORT_SYMBOL(dma_insb);
686 void dma_outsw(unsigned long addr, const void *buf, unsigned short len)
688 unsigned long flags;
690 local_irq_save(flags);
692 blackfin_dcache_flush_range((unsigned int)buf,
693 (unsigned int)(buf) + len * sizeof(short));
695 bfin_write_MDMA_D0_START_ADDR(addr);
696 bfin_write_MDMA_D0_X_COUNT(len);
697 bfin_write_MDMA_D0_X_MODIFY(0);
698 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
700 bfin_write_MDMA_S0_START_ADDR(buf);
701 bfin_write_MDMA_S0_X_COUNT(len);
702 bfin_write_MDMA_S0_X_MODIFY(2);
703 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
705 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
706 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
708 SSYNC();
710 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
712 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
714 bfin_write_MDMA_S0_CONFIG(0);
715 bfin_write_MDMA_D0_CONFIG(0);
716 local_irq_restore(flags);
719 EXPORT_SYMBOL(dma_outsw);
721 void dma_insw(unsigned long addr, void *buf, unsigned short len)
723 unsigned long flags;
725 blackfin_dcache_invalidate_range((unsigned int)buf,
726 (unsigned int)(buf) + len * sizeof(short));
728 local_irq_save(flags);
730 bfin_write_MDMA_D0_START_ADDR(buf);
731 bfin_write_MDMA_D0_X_COUNT(len);
732 bfin_write_MDMA_D0_X_MODIFY(2);
733 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
735 bfin_write_MDMA_S0_START_ADDR(addr);
736 bfin_write_MDMA_S0_X_COUNT(len);
737 bfin_write_MDMA_S0_X_MODIFY(0);
738 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
740 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
741 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
743 SSYNC();
745 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
747 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
749 bfin_write_MDMA_S0_CONFIG(0);
750 bfin_write_MDMA_D0_CONFIG(0);
751 local_irq_restore(flags);
754 EXPORT_SYMBOL(dma_insw);
756 void dma_outsl(unsigned long addr, const void *buf, unsigned short len)
758 unsigned long flags;
760 local_irq_save(flags);
762 blackfin_dcache_flush_range((unsigned int)buf,
763 (unsigned int)(buf) + len * sizeof(long));
765 bfin_write_MDMA_D0_START_ADDR(addr);
766 bfin_write_MDMA_D0_X_COUNT(len);
767 bfin_write_MDMA_D0_X_MODIFY(0);
768 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
770 bfin_write_MDMA_S0_START_ADDR(buf);
771 bfin_write_MDMA_S0_X_COUNT(len);
772 bfin_write_MDMA_S0_X_MODIFY(4);
773 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
775 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_32);
776 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_32);
778 SSYNC();
780 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
782 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
784 bfin_write_MDMA_S0_CONFIG(0);
785 bfin_write_MDMA_D0_CONFIG(0);
786 local_irq_restore(flags);
789 EXPORT_SYMBOL(dma_outsl);
791 void dma_insl(unsigned long addr, void *buf, unsigned short len)
793 unsigned long flags;
795 blackfin_dcache_invalidate_range((unsigned int)buf,
796 (unsigned int)(buf) + len * sizeof(long));
798 local_irq_save(flags);
800 bfin_write_MDMA_D0_START_ADDR(buf);
801 bfin_write_MDMA_D0_X_COUNT(len);
802 bfin_write_MDMA_D0_X_MODIFY(4);
803 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
805 bfin_write_MDMA_S0_START_ADDR(addr);
806 bfin_write_MDMA_S0_X_COUNT(len);
807 bfin_write_MDMA_S0_X_MODIFY(0);
808 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
810 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_32);
811 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_32);
813 SSYNC();
815 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
817 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
819 bfin_write_MDMA_S0_CONFIG(0);
820 bfin_write_MDMA_D0_CONFIG(0);
821 local_irq_restore(flags);
824 EXPORT_SYMBOL(dma_insl);