trivial endianness annotations: infiniband core
[pv_ops_mirror.git] / arch / blackfin / kernel / bfin_dma_5xx.c
blob8fd5d22cec34d124dfedb031bcad84a4149a4b33
1 /*
2 * File: arch/blackfin/kernel/bfin_dma_5xx.c
3 * Based on:
4 * Author:
6 * Created:
7 * Description: This file contains the simple DMA Implementation for Blackfin
9 * Modified:
10 * Copyright 2004-2006 Analog Devices Inc.
12 * Bugs: Enter bugs at http://blackfin.uclinux.org/
14 * This program is free software; you can redistribute it and/or modify
15 * it under the terms of the GNU General Public License as published by
16 * the Free Software Foundation; either version 2 of the License, or
17 * (at your option) any later version.
19 * This program is distributed in the hope that it will be useful,
20 * but WITHOUT ANY WARRANTY; without even the implied warranty of
21 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22 * GNU General Public License for more details.
24 * You should have received a copy of the GNU General Public License
25 * along with this program; if not, see the file COPYING, or write
26 * to the Free Software Foundation, Inc.,
27 * 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
30 #include <linux/errno.h>
31 #include <linux/module.h>
32 #include <linux/sched.h>
33 #include <linux/interrupt.h>
34 #include <linux/kernel.h>
35 #include <linux/param.h>
37 #include <asm/blackfin.h>
38 #include <asm/dma.h>
39 #include <asm/cacheflush.h>
41 /* Remove unused code not exported by symbol or internally called */
42 #define REMOVE_DEAD_CODE
44 /**************************************************************************
45 * Global Variables
46 ***************************************************************************/
48 static struct dma_channel dma_ch[MAX_BLACKFIN_DMA_CHANNEL];
50 /*------------------------------------------------------------------------------
51 * Set the Buffer Clear bit in the Configuration register of specific DMA
52 * channel. This will stop the descriptor based DMA operation.
53 *-----------------------------------------------------------------------------*/
54 static void clear_dma_buffer(unsigned int channel)
56 dma_ch[channel].regs->cfg |= RESTART;
57 SSYNC();
58 dma_ch[channel].regs->cfg &= ~RESTART;
59 SSYNC();
62 static int __init blackfin_dma_init(void)
64 int i;
66 printk(KERN_INFO "Blackfin DMA Controller\n");
68 for (i = 0; i < MAX_BLACKFIN_DMA_CHANNEL; i++) {
69 dma_ch[i].chan_status = DMA_CHANNEL_FREE;
70 dma_ch[i].regs = base_addr[i];
71 mutex_init(&(dma_ch[i].dmalock));
73 /* Mark MEMDMA Channel 0 as requested since we're using it internally */
74 dma_ch[CH_MEM_STREAM0_DEST].chan_status = DMA_CHANNEL_REQUESTED;
75 dma_ch[CH_MEM_STREAM0_SRC].chan_status = DMA_CHANNEL_REQUESTED;
77 #if defined(CONFIG_DEB_DMA_URGENT)
78 bfin_write_EBIU_DDRQUE(bfin_read_EBIU_DDRQUE()
79 | DEB1_URGENT | DEB2_URGENT | DEB3_URGENT);
80 #endif
81 return 0;
84 arch_initcall(blackfin_dma_init);
86 /*------------------------------------------------------------------------------
87 * Request the specific DMA channel from the system.
88 *-----------------------------------------------------------------------------*/
89 int request_dma(unsigned int channel, char *device_id)
92 pr_debug("request_dma() : BEGIN \n");
93 mutex_lock(&(dma_ch[channel].dmalock));
95 if ((dma_ch[channel].chan_status == DMA_CHANNEL_REQUESTED)
96 || (dma_ch[channel].chan_status == DMA_CHANNEL_ENABLED)) {
97 mutex_unlock(&(dma_ch[channel].dmalock));
98 pr_debug("DMA CHANNEL IN USE \n");
99 return -EBUSY;
100 } else {
101 dma_ch[channel].chan_status = DMA_CHANNEL_REQUESTED;
102 pr_debug("DMA CHANNEL IS ALLOCATED \n");
105 mutex_unlock(&(dma_ch[channel].dmalock));
107 #ifdef CONFIG_BF54x
108 if (channel >= CH_UART2_RX && channel <= CH_UART3_TX) {
109 if (strncmp(device_id, "BFIN_UART", 9) == 0)
110 dma_ch[channel].regs->peripheral_map |=
111 (channel - CH_UART2_RX + 0xC);
112 else
113 dma_ch[channel].regs->peripheral_map |=
114 (channel - CH_UART2_RX + 0x6);
116 #endif
118 dma_ch[channel].device_id = device_id;
119 dma_ch[channel].irq_callback = NULL;
121 /* This is to be enabled by putting a restriction -
122 * you have to request DMA, before doing any operations on
123 * descriptor/channel
125 pr_debug("request_dma() : END \n");
126 return channel;
128 EXPORT_SYMBOL(request_dma);
130 int set_dma_callback(unsigned int channel, dma_interrupt_t callback, void *data)
132 int ret_irq = 0;
134 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
135 && channel < MAX_BLACKFIN_DMA_CHANNEL));
137 if (callback != NULL) {
138 int ret_val;
139 ret_irq = channel2irq(channel);
141 dma_ch[channel].data = data;
143 ret_val =
144 request_irq(ret_irq, (void *)callback, IRQF_DISABLED,
145 dma_ch[channel].device_id, data);
146 if (ret_val) {
147 printk(KERN_NOTICE
148 "Request irq in DMA engine failed.\n");
149 return -EPERM;
151 dma_ch[channel].irq_callback = callback;
153 return 0;
155 EXPORT_SYMBOL(set_dma_callback);
157 void free_dma(unsigned int channel)
159 int ret_irq;
161 pr_debug("freedma() : BEGIN \n");
162 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
163 && channel < MAX_BLACKFIN_DMA_CHANNEL));
165 /* Halt the DMA */
166 disable_dma(channel);
167 clear_dma_buffer(channel);
169 if (dma_ch[channel].irq_callback != NULL) {
170 ret_irq = channel2irq(channel);
171 free_irq(ret_irq, dma_ch[channel].data);
174 /* Clear the DMA Variable in the Channel */
175 mutex_lock(&(dma_ch[channel].dmalock));
176 dma_ch[channel].chan_status = DMA_CHANNEL_FREE;
177 mutex_unlock(&(dma_ch[channel].dmalock));
179 pr_debug("freedma() : END \n");
181 EXPORT_SYMBOL(free_dma);
183 void dma_enable_irq(unsigned int channel)
185 int ret_irq;
187 pr_debug("dma_enable_irq() : BEGIN \n");
188 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
189 && channel < MAX_BLACKFIN_DMA_CHANNEL));
191 ret_irq = channel2irq(channel);
192 enable_irq(ret_irq);
194 EXPORT_SYMBOL(dma_enable_irq);
196 void dma_disable_irq(unsigned int channel)
198 int ret_irq;
200 pr_debug("dma_disable_irq() : BEGIN \n");
201 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
202 && channel < MAX_BLACKFIN_DMA_CHANNEL));
204 ret_irq = channel2irq(channel);
205 disable_irq(ret_irq);
207 EXPORT_SYMBOL(dma_disable_irq);
209 int dma_channel_active(unsigned int channel)
211 if (dma_ch[channel].chan_status == DMA_CHANNEL_FREE) {
212 return 0;
213 } else {
214 return 1;
217 EXPORT_SYMBOL(dma_channel_active);
219 /*------------------------------------------------------------------------------
220 * stop the specific DMA channel.
221 *-----------------------------------------------------------------------------*/
222 void disable_dma(unsigned int channel)
224 pr_debug("stop_dma() : BEGIN \n");
226 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
227 && channel < MAX_BLACKFIN_DMA_CHANNEL));
229 dma_ch[channel].regs->cfg &= ~DMAEN; /* Clean the enable bit */
230 SSYNC();
231 dma_ch[channel].chan_status = DMA_CHANNEL_REQUESTED;
232 /* Needs to be enabled Later */
233 pr_debug("stop_dma() : END \n");
234 return;
236 EXPORT_SYMBOL(disable_dma);
238 void enable_dma(unsigned int channel)
240 pr_debug("enable_dma() : BEGIN \n");
242 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
243 && channel < MAX_BLACKFIN_DMA_CHANNEL));
245 dma_ch[channel].chan_status = DMA_CHANNEL_ENABLED;
246 dma_ch[channel].regs->curr_x_count = 0;
247 dma_ch[channel].regs->curr_y_count = 0;
249 dma_ch[channel].regs->cfg |= DMAEN; /* Set the enable bit */
250 SSYNC();
251 pr_debug("enable_dma() : END \n");
252 return;
254 EXPORT_SYMBOL(enable_dma);
256 /*------------------------------------------------------------------------------
257 * Set the Start Address register for the specific DMA channel
258 * This function can be used for register based DMA,
259 * to setup the start address
260 * addr: Starting address of the DMA Data to be transferred.
261 *-----------------------------------------------------------------------------*/
262 void set_dma_start_addr(unsigned int channel, unsigned long addr)
264 pr_debug("set_dma_start_addr() : BEGIN \n");
266 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
267 && channel < MAX_BLACKFIN_DMA_CHANNEL));
269 dma_ch[channel].regs->start_addr = addr;
270 SSYNC();
271 pr_debug("set_dma_start_addr() : END\n");
273 EXPORT_SYMBOL(set_dma_start_addr);
275 void set_dma_next_desc_addr(unsigned int channel, unsigned long addr)
277 pr_debug("set_dma_next_desc_addr() : BEGIN \n");
279 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
280 && channel < MAX_BLACKFIN_DMA_CHANNEL));
282 dma_ch[channel].regs->next_desc_ptr = addr;
283 SSYNC();
284 pr_debug("set_dma_next_desc_addr() : END\n");
286 EXPORT_SYMBOL(set_dma_next_desc_addr);
288 void set_dma_curr_desc_addr(unsigned int channel, unsigned long addr)
290 pr_debug("set_dma_curr_desc_addr() : BEGIN \n");
292 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
293 && channel < MAX_BLACKFIN_DMA_CHANNEL));
295 dma_ch[channel].regs->curr_desc_ptr = addr;
296 SSYNC();
297 pr_debug("set_dma_curr_desc_addr() : END\n");
299 EXPORT_SYMBOL(set_dma_curr_desc_addr);
301 void set_dma_x_count(unsigned int channel, unsigned short x_count)
303 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
304 && channel < MAX_BLACKFIN_DMA_CHANNEL));
306 dma_ch[channel].regs->x_count = x_count;
307 SSYNC();
309 EXPORT_SYMBOL(set_dma_x_count);
311 void set_dma_y_count(unsigned int channel, unsigned short y_count)
313 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
314 && channel < MAX_BLACKFIN_DMA_CHANNEL));
316 dma_ch[channel].regs->y_count = y_count;
317 SSYNC();
319 EXPORT_SYMBOL(set_dma_y_count);
321 void set_dma_x_modify(unsigned int channel, short x_modify)
323 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
324 && channel < MAX_BLACKFIN_DMA_CHANNEL));
326 dma_ch[channel].regs->x_modify = x_modify;
327 SSYNC();
329 EXPORT_SYMBOL(set_dma_x_modify);
331 void set_dma_y_modify(unsigned int channel, short y_modify)
333 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
334 && channel < MAX_BLACKFIN_DMA_CHANNEL));
336 dma_ch[channel].regs->y_modify = y_modify;
337 SSYNC();
339 EXPORT_SYMBOL(set_dma_y_modify);
341 void set_dma_config(unsigned int channel, unsigned short config)
343 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
344 && channel < MAX_BLACKFIN_DMA_CHANNEL));
346 dma_ch[channel].regs->cfg = config;
347 SSYNC();
349 EXPORT_SYMBOL(set_dma_config);
351 unsigned short
352 set_bfin_dma_config(char direction, char flow_mode,
353 char intr_mode, char dma_mode, char width, char syncmode)
355 unsigned short config;
357 config =
358 ((direction << 1) | (width << 2) | (dma_mode << 4) |
359 (intr_mode << 6) | (flow_mode << 12) | (syncmode << 5));
360 return config;
362 EXPORT_SYMBOL(set_bfin_dma_config);
364 void set_dma_sg(unsigned int channel, struct dmasg *sg, int nr_sg)
366 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
367 && channel < MAX_BLACKFIN_DMA_CHANNEL));
369 dma_ch[channel].regs->cfg |= ((nr_sg & 0x0F) << 8);
371 dma_ch[channel].regs->next_desc_ptr = (unsigned int)sg;
373 SSYNC();
375 EXPORT_SYMBOL(set_dma_sg);
377 void set_dma_curr_addr(unsigned int channel, unsigned long addr)
379 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
380 && channel < MAX_BLACKFIN_DMA_CHANNEL));
382 dma_ch[channel].regs->curr_addr_ptr = addr;
383 SSYNC();
385 EXPORT_SYMBOL(set_dma_curr_addr);
387 /*------------------------------------------------------------------------------
388 * Get the DMA status of a specific DMA channel from the system.
389 *-----------------------------------------------------------------------------*/
390 unsigned short get_dma_curr_irqstat(unsigned int channel)
392 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
393 && channel < MAX_BLACKFIN_DMA_CHANNEL));
395 return dma_ch[channel].regs->irq_status;
397 EXPORT_SYMBOL(get_dma_curr_irqstat);
399 /*------------------------------------------------------------------------------
400 * Clear the DMA_DONE bit in DMA status. Stop the DMA completion interrupt.
401 *-----------------------------------------------------------------------------*/
402 void clear_dma_irqstat(unsigned int channel)
404 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
405 && channel < MAX_BLACKFIN_DMA_CHANNEL));
406 dma_ch[channel].regs->irq_status |= 3;
408 EXPORT_SYMBOL(clear_dma_irqstat);
410 /*------------------------------------------------------------------------------
411 * Get current DMA xcount of a specific DMA channel from the system.
412 *-----------------------------------------------------------------------------*/
413 unsigned short get_dma_curr_xcount(unsigned int channel)
415 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
416 && channel < MAX_BLACKFIN_DMA_CHANNEL));
418 return dma_ch[channel].regs->curr_x_count;
420 EXPORT_SYMBOL(get_dma_curr_xcount);
422 /*------------------------------------------------------------------------------
423 * Get current DMA ycount of a specific DMA channel from the system.
424 *-----------------------------------------------------------------------------*/
425 unsigned short get_dma_curr_ycount(unsigned int channel)
427 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
428 && channel < MAX_BLACKFIN_DMA_CHANNEL));
430 return dma_ch[channel].regs->curr_y_count;
432 EXPORT_SYMBOL(get_dma_curr_ycount);
434 unsigned long get_dma_next_desc_ptr(unsigned int channel)
436 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
437 && channel < MAX_BLACKFIN_DMA_CHANNEL));
439 return dma_ch[channel].regs->next_desc_ptr;
441 EXPORT_SYMBOL(get_dma_next_desc_ptr);
443 unsigned long get_dma_curr_desc_ptr(unsigned int channel)
445 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
446 && channel < MAX_BLACKFIN_DMA_CHANNEL));
448 return dma_ch[channel].regs->curr_desc_ptr;
450 EXPORT_SYMBOL(get_dma_curr_desc_ptr);
452 unsigned long get_dma_curr_addr(unsigned int channel)
454 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
455 && channel < MAX_BLACKFIN_DMA_CHANNEL));
457 return dma_ch[channel].regs->curr_addr_ptr;
459 EXPORT_SYMBOL(get_dma_curr_addr);
461 static void *__dma_memcpy(void *dest, const void *src, size_t size)
463 int direction; /* 1 - address decrease, 0 - address increase */
464 int flag_align; /* 1 - address aligned, 0 - address unaligned */
465 int flag_2D; /* 1 - 2D DMA needed, 0 - 1D DMA needed */
466 unsigned long flags;
468 if (size <= 0)
469 return NULL;
471 local_irq_save(flags);
473 if ((unsigned long)src < memory_end)
474 blackfin_dcache_flush_range((unsigned int)src,
475 (unsigned int)(src + size));
477 if ((unsigned long)dest < memory_end)
478 blackfin_dcache_invalidate_range((unsigned int)dest,
479 (unsigned int)(dest + size));
481 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
483 if ((unsigned long)src < (unsigned long)dest)
484 direction = 1;
485 else
486 direction = 0;
488 if ((((unsigned long)dest % 2) == 0) && (((unsigned long)src % 2) == 0)
489 && ((size % 2) == 0))
490 flag_align = 1;
491 else
492 flag_align = 0;
494 if (size > 0x10000) /* size > 64K */
495 flag_2D = 1;
496 else
497 flag_2D = 0;
499 /* Setup destination and source start address */
500 if (direction) {
501 if (flag_align) {
502 bfin_write_MDMA_D0_START_ADDR(dest + size - 2);
503 bfin_write_MDMA_S0_START_ADDR(src + size - 2);
504 } else {
505 bfin_write_MDMA_D0_START_ADDR(dest + size - 1);
506 bfin_write_MDMA_S0_START_ADDR(src + size - 1);
508 } else {
509 bfin_write_MDMA_D0_START_ADDR(dest);
510 bfin_write_MDMA_S0_START_ADDR(src);
513 /* Setup destination and source xcount */
514 if (flag_2D) {
515 if (flag_align) {
516 bfin_write_MDMA_D0_X_COUNT(1024 / 2);
517 bfin_write_MDMA_S0_X_COUNT(1024 / 2);
518 } else {
519 bfin_write_MDMA_D0_X_COUNT(1024);
520 bfin_write_MDMA_S0_X_COUNT(1024);
522 bfin_write_MDMA_D0_Y_COUNT(size >> 10);
523 bfin_write_MDMA_S0_Y_COUNT(size >> 10);
524 } else {
525 if (flag_align) {
526 bfin_write_MDMA_D0_X_COUNT(size / 2);
527 bfin_write_MDMA_S0_X_COUNT(size / 2);
528 } else {
529 bfin_write_MDMA_D0_X_COUNT(size);
530 bfin_write_MDMA_S0_X_COUNT(size);
534 /* Setup destination and source xmodify and ymodify */
535 if (direction) {
536 if (flag_align) {
537 bfin_write_MDMA_D0_X_MODIFY(-2);
538 bfin_write_MDMA_S0_X_MODIFY(-2);
539 if (flag_2D) {
540 bfin_write_MDMA_D0_Y_MODIFY(-2);
541 bfin_write_MDMA_S0_Y_MODIFY(-2);
543 } else {
544 bfin_write_MDMA_D0_X_MODIFY(-1);
545 bfin_write_MDMA_S0_X_MODIFY(-1);
546 if (flag_2D) {
547 bfin_write_MDMA_D0_Y_MODIFY(-1);
548 bfin_write_MDMA_S0_Y_MODIFY(-1);
551 } else {
552 if (flag_align) {
553 bfin_write_MDMA_D0_X_MODIFY(2);
554 bfin_write_MDMA_S0_X_MODIFY(2);
555 if (flag_2D) {
556 bfin_write_MDMA_D0_Y_MODIFY(2);
557 bfin_write_MDMA_S0_Y_MODIFY(2);
559 } else {
560 bfin_write_MDMA_D0_X_MODIFY(1);
561 bfin_write_MDMA_S0_X_MODIFY(1);
562 if (flag_2D) {
563 bfin_write_MDMA_D0_Y_MODIFY(1);
564 bfin_write_MDMA_S0_Y_MODIFY(1);
569 /* Enable source DMA */
570 if (flag_2D) {
571 if (flag_align) {
572 bfin_write_MDMA_S0_CONFIG(DMAEN | DMA2D | WDSIZE_16);
573 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | DMA2D | WDSIZE_16);
574 } else {
575 bfin_write_MDMA_S0_CONFIG(DMAEN | DMA2D);
576 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | DMA2D);
578 } else {
579 if (flag_align) {
580 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
581 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
582 } else {
583 bfin_write_MDMA_S0_CONFIG(DMAEN);
584 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN);
588 SSYNC();
590 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE))
593 bfin_write_MDMA_D0_IRQ_STATUS(bfin_read_MDMA_D0_IRQ_STATUS() |
594 (DMA_DONE | DMA_ERR));
596 bfin_write_MDMA_S0_CONFIG(0);
597 bfin_write_MDMA_D0_CONFIG(0);
599 local_irq_restore(flags);
601 return dest;
604 void *dma_memcpy(void *dest, const void *src, size_t size)
606 size_t bulk;
607 size_t rest;
608 void * addr;
610 bulk = (size >> 16) << 16;
611 rest = size - bulk;
612 if (bulk)
613 __dma_memcpy(dest, src, bulk);
614 addr = __dma_memcpy(dest+bulk, src+bulk, rest);
615 return addr;
617 EXPORT_SYMBOL(dma_memcpy);
619 void *safe_dma_memcpy(void *dest, const void *src, size_t size)
621 void *addr;
622 addr = dma_memcpy(dest, src, size);
623 return addr;
625 EXPORT_SYMBOL(safe_dma_memcpy);
627 void dma_outsb(unsigned long addr, const void *buf, unsigned short len)
629 unsigned long flags;
631 local_irq_save(flags);
633 blackfin_dcache_flush_range((unsigned int)buf,
634 (unsigned int)(buf) + len);
636 bfin_write_MDMA_D0_START_ADDR(addr);
637 bfin_write_MDMA_D0_X_COUNT(len);
638 bfin_write_MDMA_D0_X_MODIFY(0);
639 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
641 bfin_write_MDMA_S0_START_ADDR(buf);
642 bfin_write_MDMA_S0_X_COUNT(len);
643 bfin_write_MDMA_S0_X_MODIFY(1);
644 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
646 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_8);
647 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_8);
649 SSYNC();
651 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
653 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
655 bfin_write_MDMA_S0_CONFIG(0);
656 bfin_write_MDMA_D0_CONFIG(0);
657 local_irq_restore(flags);
660 EXPORT_SYMBOL(dma_outsb);
663 void dma_insb(unsigned long addr, void *buf, unsigned short len)
665 unsigned long flags;
667 blackfin_dcache_invalidate_range((unsigned int)buf,
668 (unsigned int)(buf) + len);
670 local_irq_save(flags);
671 bfin_write_MDMA_D0_START_ADDR(buf);
672 bfin_write_MDMA_D0_X_COUNT(len);
673 bfin_write_MDMA_D0_X_MODIFY(1);
674 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
676 bfin_write_MDMA_S0_START_ADDR(addr);
677 bfin_write_MDMA_S0_X_COUNT(len);
678 bfin_write_MDMA_S0_X_MODIFY(0);
679 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
681 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_8);
682 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_8);
684 SSYNC();
686 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
688 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
690 bfin_write_MDMA_S0_CONFIG(0);
691 bfin_write_MDMA_D0_CONFIG(0);
692 local_irq_restore(flags);
695 EXPORT_SYMBOL(dma_insb);
697 void dma_outsw(unsigned long addr, const void *buf, unsigned short len)
699 unsigned long flags;
701 local_irq_save(flags);
703 blackfin_dcache_flush_range((unsigned int)buf,
704 (unsigned int)(buf) + len * sizeof(short));
706 bfin_write_MDMA_D0_START_ADDR(addr);
707 bfin_write_MDMA_D0_X_COUNT(len);
708 bfin_write_MDMA_D0_X_MODIFY(0);
709 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
711 bfin_write_MDMA_S0_START_ADDR(buf);
712 bfin_write_MDMA_S0_X_COUNT(len);
713 bfin_write_MDMA_S0_X_MODIFY(2);
714 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
716 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
717 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
719 SSYNC();
721 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
723 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
725 bfin_write_MDMA_S0_CONFIG(0);
726 bfin_write_MDMA_D0_CONFIG(0);
727 local_irq_restore(flags);
730 EXPORT_SYMBOL(dma_outsw);
732 void dma_insw(unsigned long addr, void *buf, unsigned short len)
734 unsigned long flags;
736 blackfin_dcache_invalidate_range((unsigned int)buf,
737 (unsigned int)(buf) + len * sizeof(short));
739 local_irq_save(flags);
741 bfin_write_MDMA_D0_START_ADDR(buf);
742 bfin_write_MDMA_D0_X_COUNT(len);
743 bfin_write_MDMA_D0_X_MODIFY(2);
744 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
746 bfin_write_MDMA_S0_START_ADDR(addr);
747 bfin_write_MDMA_S0_X_COUNT(len);
748 bfin_write_MDMA_S0_X_MODIFY(0);
749 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
751 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
752 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
754 SSYNC();
756 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
758 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
760 bfin_write_MDMA_S0_CONFIG(0);
761 bfin_write_MDMA_D0_CONFIG(0);
762 local_irq_restore(flags);
765 EXPORT_SYMBOL(dma_insw);
767 void dma_outsl(unsigned long addr, const void *buf, unsigned short len)
769 unsigned long flags;
771 local_irq_save(flags);
773 blackfin_dcache_flush_range((unsigned int)buf,
774 (unsigned int)(buf) + len * sizeof(long));
776 bfin_write_MDMA_D0_START_ADDR(addr);
777 bfin_write_MDMA_D0_X_COUNT(len);
778 bfin_write_MDMA_D0_X_MODIFY(0);
779 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
781 bfin_write_MDMA_S0_START_ADDR(buf);
782 bfin_write_MDMA_S0_X_COUNT(len);
783 bfin_write_MDMA_S0_X_MODIFY(4);
784 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
786 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_32);
787 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_32);
789 SSYNC();
791 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
793 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
795 bfin_write_MDMA_S0_CONFIG(0);
796 bfin_write_MDMA_D0_CONFIG(0);
797 local_irq_restore(flags);
800 EXPORT_SYMBOL(dma_outsl);
802 void dma_insl(unsigned long addr, void *buf, unsigned short len)
804 unsigned long flags;
806 blackfin_dcache_invalidate_range((unsigned int)buf,
807 (unsigned int)(buf) + len * sizeof(long));
809 local_irq_save(flags);
811 bfin_write_MDMA_D0_START_ADDR(buf);
812 bfin_write_MDMA_D0_X_COUNT(len);
813 bfin_write_MDMA_D0_X_MODIFY(4);
814 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
816 bfin_write_MDMA_S0_START_ADDR(addr);
817 bfin_write_MDMA_S0_X_COUNT(len);
818 bfin_write_MDMA_S0_X_MODIFY(0);
819 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
821 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_32);
822 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_32);
824 SSYNC();
826 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
828 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
830 bfin_write_MDMA_S0_CONFIG(0);
831 bfin_write_MDMA_D0_CONFIG(0);
832 local_irq_restore(flags);
835 EXPORT_SYMBOL(dma_insl);