V4L/DVB (6926): tda18271: consolidate table lookup functions
[linux-2.6/verdex.git] / arch / ppc / syslib / ppc4xx_dma.c
blobbd301868996b9134715a218b5d58e3d665056dd9
1 /*
2 * IBM PPC4xx DMA engine core library
4 * Copyright 2000-2004 MontaVista Software Inc.
6 * Cleaned up and converted to new DCR access
7 * Matt Porter <mporter@kernel.crashing.org>
9 * Original code by Armin Kuster <akuster@mvista.com>
10 * and Pete Popov <ppopov@mvista.com>
12 * This program is free software; you can redistribute it and/or modify it
13 * under the terms of the GNU General Public License as published by the
14 * Free Software Foundation; either version 2 of the License, or (at your
15 * option) any later version.
17 * You should have received a copy of the GNU General Public License along
18 * with this program; if not, write to the Free Software Foundation, Inc.,
19 * 675 Mass Ave, Cambridge, MA 02139, USA.
22 #include <linux/kernel.h>
23 #include <linux/mm.h>
24 #include <linux/miscdevice.h>
25 #include <linux/init.h>
26 #include <linux/module.h>
28 #include <asm/system.h>
29 #include <asm/io.h>
30 #include <asm/dma.h>
31 #include <asm/ppc4xx_dma.h>
33 ppc_dma_ch_t dma_channels[MAX_PPC4xx_DMA_CHANNELS];
35 int
36 ppc4xx_get_dma_status(void)
38 return (mfdcr(DCRN_DMASR));
41 void
42 ppc4xx_set_src_addr(int dmanr, phys_addr_t src_addr)
44 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
45 printk("set_src_addr: bad channel: %d\n", dmanr);
46 return;
49 #ifdef PPC4xx_DMA_64BIT
50 mtdcr(DCRN_DMASAH0 + dmanr*2, (u32)(src_addr >> 32));
51 #else
52 mtdcr(DCRN_DMASA0 + dmanr*2, (u32)src_addr);
53 #endif
56 void
57 ppc4xx_set_dst_addr(int dmanr, phys_addr_t dst_addr)
59 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
60 printk("set_dst_addr: bad channel: %d\n", dmanr);
61 return;
64 #ifdef PPC4xx_DMA_64BIT
65 mtdcr(DCRN_DMADAH0 + dmanr*2, (u32)(dst_addr >> 32));
66 #else
67 mtdcr(DCRN_DMADA0 + dmanr*2, (u32)dst_addr);
68 #endif
71 void
72 ppc4xx_enable_dma(unsigned int dmanr)
74 unsigned int control;
75 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
76 unsigned int status_bits[] = { DMA_CS0 | DMA_TS0 | DMA_CH0_ERR,
77 DMA_CS1 | DMA_TS1 | DMA_CH1_ERR,
78 DMA_CS2 | DMA_TS2 | DMA_CH2_ERR,
79 DMA_CS3 | DMA_TS3 | DMA_CH3_ERR};
81 if (p_dma_ch->in_use) {
82 printk("enable_dma: channel %d in use\n", dmanr);
83 return;
86 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
87 printk("enable_dma: bad channel: %d\n", dmanr);
88 return;
91 if (p_dma_ch->mode == DMA_MODE_READ) {
92 /* peripheral to memory */
93 ppc4xx_set_src_addr(dmanr, 0);
94 ppc4xx_set_dst_addr(dmanr, p_dma_ch->addr);
95 } else if (p_dma_ch->mode == DMA_MODE_WRITE) {
96 /* memory to peripheral */
97 ppc4xx_set_src_addr(dmanr, p_dma_ch->addr);
98 ppc4xx_set_dst_addr(dmanr, 0);
101 /* for other xfer modes, the addresses are already set */
102 control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
104 control &= ~(DMA_TM_MASK | DMA_TD); /* clear all mode bits */
105 if (p_dma_ch->mode == DMA_MODE_MM) {
106 /* software initiated memory to memory */
107 control |= DMA_ETD_OUTPUT | DMA_TCE_ENABLE;
110 mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
113 * Clear the CS, TS, RI bits for the channel from DMASR. This
114 * has been observed to happen correctly only after the mode and
115 * ETD/DCE bits in DMACRx are set above. Must do this before
116 * enabling the channel.
119 mtdcr(DCRN_DMASR, status_bits[dmanr]);
122 * For device-paced transfers, Terminal Count Enable apparently
123 * must be on, and this must be turned on after the mode, etc.
124 * bits are cleared above (at least on Redwood-6).
127 if ((p_dma_ch->mode == DMA_MODE_MM_DEVATDST) ||
128 (p_dma_ch->mode == DMA_MODE_MM_DEVATSRC))
129 control |= DMA_TCE_ENABLE;
132 * Now enable the channel.
135 control |= (p_dma_ch->mode | DMA_CE_ENABLE);
137 mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
139 p_dma_ch->in_use = 1;
142 void
143 ppc4xx_disable_dma(unsigned int dmanr)
145 unsigned int control;
146 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
148 if (!p_dma_ch->in_use) {
149 printk("disable_dma: channel %d not in use\n", dmanr);
150 return;
153 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
154 printk("disable_dma: bad channel: %d\n", dmanr);
155 return;
158 control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
159 control &= ~DMA_CE_ENABLE;
160 mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
162 p_dma_ch->in_use = 0;
166 * Sets the dma mode for single DMA transfers only.
167 * For scatter/gather transfers, the mode is passed to the
168 * alloc_dma_handle() function as one of the parameters.
170 * The mode is simply saved and used later. This allows
171 * the driver to call set_dma_mode() and set_dma_addr() in
172 * any order.
174 * Valid mode values are:
176 * DMA_MODE_READ peripheral to memory
177 * DMA_MODE_WRITE memory to peripheral
178 * DMA_MODE_MM memory to memory
179 * DMA_MODE_MM_DEVATSRC device-paced memory to memory, device at src
180 * DMA_MODE_MM_DEVATDST device-paced memory to memory, device at dst
183 ppc4xx_set_dma_mode(unsigned int dmanr, unsigned int mode)
185 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
187 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
188 printk("set_dma_mode: bad channel 0x%x\n", dmanr);
189 return DMA_STATUS_BAD_CHANNEL;
192 p_dma_ch->mode = mode;
194 return DMA_STATUS_GOOD;
198 * Sets the DMA Count register. Note that 'count' is in bytes.
199 * However, the DMA Count register counts the number of "transfers",
200 * where each transfer is equal to the bus width. Thus, count
201 * MUST be a multiple of the bus width.
203 void
204 ppc4xx_set_dma_count(unsigned int dmanr, unsigned int count)
206 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
208 #ifdef DEBUG_4xxDMA
210 int error = 0;
211 switch (p_dma_ch->pwidth) {
212 case PW_8:
213 break;
214 case PW_16:
215 if (count & 0x1)
216 error = 1;
217 break;
218 case PW_32:
219 if (count & 0x3)
220 error = 1;
221 break;
222 case PW_64:
223 if (count & 0x7)
224 error = 1;
225 break;
226 default:
227 printk("set_dma_count: invalid bus width: 0x%x\n",
228 p_dma_ch->pwidth);
229 return;
231 if (error)
232 printk
233 ("Warning: set_dma_count count 0x%x bus width %d\n",
234 count, p_dma_ch->pwidth);
236 #endif
238 count = count >> p_dma_ch->shift;
240 mtdcr(DCRN_DMACT0 + (dmanr * 0x8), count);
244 * Returns the number of bytes left to be transferred.
245 * After a DMA transfer, this should return zero.
246 * Reading this while a DMA transfer is still in progress will return
247 * unpredictable results.
250 ppc4xx_get_dma_residue(unsigned int dmanr)
252 unsigned int count;
253 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
255 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
256 printk("ppc4xx_get_dma_residue: bad channel 0x%x\n", dmanr);
257 return DMA_STATUS_BAD_CHANNEL;
260 count = mfdcr(DCRN_DMACT0 + (dmanr * 0x8));
262 return (count << p_dma_ch->shift);
266 * Sets the DMA address for a memory to peripheral or peripheral
267 * to memory transfer. The address is just saved in the channel
268 * structure for now and used later in enable_dma().
270 void
271 ppc4xx_set_dma_addr(unsigned int dmanr, phys_addr_t addr)
273 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
275 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
276 printk("ppc4xx_set_dma_addr: bad channel: %d\n", dmanr);
277 return;
280 #ifdef DEBUG_4xxDMA
282 int error = 0;
283 switch (p_dma_ch->pwidth) {
284 case PW_8:
285 break;
286 case PW_16:
287 if ((unsigned) addr & 0x1)
288 error = 1;
289 break;
290 case PW_32:
291 if ((unsigned) addr & 0x3)
292 error = 1;
293 break;
294 case PW_64:
295 if ((unsigned) addr & 0x7)
296 error = 1;
297 break;
298 default:
299 printk("ppc4xx_set_dma_addr: invalid bus width: 0x%x\n",
300 p_dma_ch->pwidth);
301 return;
303 if (error)
304 printk("Warning: ppc4xx_set_dma_addr addr 0x%x bus width %d\n",
305 addr, p_dma_ch->pwidth);
307 #endif
309 /* save dma address and program it later after we know the xfer mode */
310 p_dma_ch->addr = addr;
314 * Sets both DMA addresses for a memory to memory transfer.
315 * For memory to peripheral or peripheral to memory transfers
316 * the function set_dma_addr() should be used instead.
318 void
319 ppc4xx_set_dma_addr2(unsigned int dmanr, phys_addr_t src_dma_addr,
320 phys_addr_t dst_dma_addr)
322 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
323 printk("ppc4xx_set_dma_addr2: bad channel: %d\n", dmanr);
324 return;
327 #ifdef DEBUG_4xxDMA
329 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
330 int error = 0;
331 switch (p_dma_ch->pwidth) {
332 case PW_8:
333 break;
334 case PW_16:
335 if (((unsigned) src_dma_addr & 0x1) ||
336 ((unsigned) dst_dma_addr & 0x1)
338 error = 1;
339 break;
340 case PW_32:
341 if (((unsigned) src_dma_addr & 0x3) ||
342 ((unsigned) dst_dma_addr & 0x3)
344 error = 1;
345 break;
346 case PW_64:
347 if (((unsigned) src_dma_addr & 0x7) ||
348 ((unsigned) dst_dma_addr & 0x7)
350 error = 1;
351 break;
352 default:
353 printk("ppc4xx_set_dma_addr2: invalid bus width: 0x%x\n",
354 p_dma_ch->pwidth);
355 return;
357 if (error)
358 printk
359 ("Warning: ppc4xx_set_dma_addr2 src 0x%x dst 0x%x bus width %d\n",
360 src_dma_addr, dst_dma_addr, p_dma_ch->pwidth);
362 #endif
364 ppc4xx_set_src_addr(dmanr, src_dma_addr);
365 ppc4xx_set_dst_addr(dmanr, dst_dma_addr);
369 * Enables the channel interrupt.
371 * If performing a scatter/gatter transfer, this function
372 * MUST be called before calling alloc_dma_handle() and building
373 * the sgl list. Otherwise, interrupts will not be enabled, if
374 * they were previously disabled.
377 ppc4xx_enable_dma_interrupt(unsigned int dmanr)
379 unsigned int control;
380 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
382 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
383 printk("ppc4xx_enable_dma_interrupt: bad channel: %d\n", dmanr);
384 return DMA_STATUS_BAD_CHANNEL;
387 p_dma_ch->int_enable = 1;
389 control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
390 control |= DMA_CIE_ENABLE; /* Channel Interrupt Enable */
391 mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
393 return DMA_STATUS_GOOD;
397 * Disables the channel interrupt.
399 * If performing a scatter/gatter transfer, this function
400 * MUST be called before calling alloc_dma_handle() and building
401 * the sgl list. Otherwise, interrupts will not be disabled, if
402 * they were previously enabled.
405 ppc4xx_disable_dma_interrupt(unsigned int dmanr)
407 unsigned int control;
408 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
410 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
411 printk("ppc4xx_disable_dma_interrupt: bad channel: %d\n", dmanr);
412 return DMA_STATUS_BAD_CHANNEL;
415 p_dma_ch->int_enable = 0;
417 control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
418 control &= ~DMA_CIE_ENABLE; /* Channel Interrupt Enable */
419 mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
421 return DMA_STATUS_GOOD;
425 * Configures a DMA channel, including the peripheral bus width, if a
426 * peripheral is attached to the channel, the polarity of the DMAReq and
427 * DMAAck signals, etc. This information should really be setup by the boot
428 * code, since most likely the configuration won't change dynamically.
429 * If the kernel has to call this function, it's recommended that it's
430 * called from platform specific init code. The driver should not need to
431 * call this function.
434 ppc4xx_init_dma_channel(unsigned int dmanr, ppc_dma_ch_t * p_init)
436 unsigned int polarity;
437 uint32_t control = 0;
438 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
440 DMA_MODE_READ = (unsigned long) DMA_TD; /* Peripheral to Memory */
441 DMA_MODE_WRITE = 0; /* Memory to Peripheral */
443 if (!p_init) {
444 printk("ppc4xx_init_dma_channel: NULL p_init\n");
445 return DMA_STATUS_NULL_POINTER;
448 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
449 printk("ppc4xx_init_dma_channel: bad channel %d\n", dmanr);
450 return DMA_STATUS_BAD_CHANNEL;
453 #if DCRN_POL > 0
454 polarity = mfdcr(DCRN_POL);
455 #else
456 polarity = 0;
457 #endif
459 /* Setup the control register based on the values passed to
460 * us in p_init. Then, over-write the control register with this
461 * new value.
463 control |= SET_DMA_CONTROL;
465 /* clear all polarity signals and then "or" in new signal levels */
466 polarity &= ~GET_DMA_POLARITY(dmanr);
467 polarity |= p_init->polarity;
468 #if DCRN_POL > 0
469 mtdcr(DCRN_POL, polarity);
470 #endif
471 mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
473 /* save these values in our dma channel structure */
474 memcpy(p_dma_ch, p_init, sizeof (ppc_dma_ch_t));
477 * The peripheral width values written in the control register are:
478 * PW_8 0
479 * PW_16 1
480 * PW_32 2
481 * PW_64 3
483 * Since the DMA count register takes the number of "transfers",
484 * we need to divide the count sent to us in certain
485 * functions by the appropriate number. It so happens that our
486 * right shift value is equal to the peripheral width value.
488 p_dma_ch->shift = p_init->pwidth;
491 * Save the control word for easy access.
493 p_dma_ch->control = control;
495 mtdcr(DCRN_DMASR, 0xffffffff); /* clear status register */
496 return DMA_STATUS_GOOD;
500 * This function returns the channel configuration.
503 ppc4xx_get_channel_config(unsigned int dmanr, ppc_dma_ch_t * p_dma_ch)
505 unsigned int polarity;
506 unsigned int control;
508 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
509 printk("ppc4xx_get_channel_config: bad channel %d\n", dmanr);
510 return DMA_STATUS_BAD_CHANNEL;
513 memcpy(p_dma_ch, &dma_channels[dmanr], sizeof (ppc_dma_ch_t));
515 #if DCRN_POL > 0
516 polarity = mfdcr(DCRN_POL);
517 #else
518 polarity = 0;
519 #endif
521 p_dma_ch->polarity = polarity & GET_DMA_POLARITY(dmanr);
522 control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
524 p_dma_ch->cp = GET_DMA_PRIORITY(control);
525 p_dma_ch->pwidth = GET_DMA_PW(control);
526 p_dma_ch->psc = GET_DMA_PSC(control);
527 p_dma_ch->pwc = GET_DMA_PWC(control);
528 p_dma_ch->phc = GET_DMA_PHC(control);
529 p_dma_ch->ce = GET_DMA_CE_ENABLE(control);
530 p_dma_ch->int_enable = GET_DMA_CIE_ENABLE(control);
531 p_dma_ch->shift = GET_DMA_PW(control);
533 #ifdef CONFIG_PPC4xx_EDMA
534 p_dma_ch->pf = GET_DMA_PREFETCH(control);
535 #else
536 p_dma_ch->ch_enable = GET_DMA_CH(control);
537 p_dma_ch->ece_enable = GET_DMA_ECE(control);
538 p_dma_ch->tcd_disable = GET_DMA_TCD(control);
539 #endif
540 return DMA_STATUS_GOOD;
544 * Sets the priority for the DMA channel dmanr.
545 * Since this is setup by the hardware init function, this function
546 * can be used to dynamically change the priority of a channel.
548 * Acceptable priorities:
550 * PRIORITY_LOW
551 * PRIORITY_MID_LOW
552 * PRIORITY_MID_HIGH
553 * PRIORITY_HIGH
557 ppc4xx_set_channel_priority(unsigned int dmanr, unsigned int priority)
559 unsigned int control;
561 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
562 printk("ppc4xx_set_channel_priority: bad channel %d\n", dmanr);
563 return DMA_STATUS_BAD_CHANNEL;
566 if ((priority != PRIORITY_LOW) &&
567 (priority != PRIORITY_MID_LOW) &&
568 (priority != PRIORITY_MID_HIGH) && (priority != PRIORITY_HIGH)) {
569 printk("ppc4xx_set_channel_priority: bad priority: 0x%x\n", priority);
572 control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
573 control |= SET_DMA_PRIORITY(priority);
574 mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
576 return DMA_STATUS_GOOD;
580 * Returns the width of the peripheral attached to this channel. This assumes
581 * that someone who knows the hardware configuration, boot code or some other
582 * init code, already set the width.
584 * The return value is one of:
585 * PW_8
586 * PW_16
587 * PW_32
588 * PW_64
590 * The function returns 0 on error.
592 unsigned int
593 ppc4xx_get_peripheral_width(unsigned int dmanr)
595 unsigned int control;
597 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
598 printk("ppc4xx_get_peripheral_width: bad channel %d\n", dmanr);
599 return DMA_STATUS_BAD_CHANNEL;
602 control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
604 return (GET_DMA_PW(control));
608 * Clears the channel status bits
611 ppc4xx_clr_dma_status(unsigned int dmanr)
613 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
614 printk(KERN_ERR "ppc4xx_clr_dma_status: bad channel: %d\n", dmanr);
615 return DMA_STATUS_BAD_CHANNEL;
617 mtdcr(DCRN_DMASR, ((u32)DMA_CH0_ERR | (u32)DMA_CS0 | (u32)DMA_TS0) >> dmanr);
618 return DMA_STATUS_GOOD;
621 #ifdef CONFIG_PPC4xx_EDMA
623 * Enables the burst on the channel (BTEN bit in the control/count register)
624 * Note:
625 * For scatter/gather dma, this function MUST be called before the
626 * ppc4xx_alloc_dma_handle() func as the chan count register is copied into the
627 * sgl list and used as each sgl element is added.
630 ppc4xx_enable_burst(unsigned int dmanr)
632 unsigned int ctc;
633 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
634 printk(KERN_ERR "ppc4xx_enable_burst: bad channel: %d\n", dmanr);
635 return DMA_STATUS_BAD_CHANNEL;
637 ctc = mfdcr(DCRN_DMACT0 + (dmanr * 0x8)) | DMA_CTC_BTEN;
638 mtdcr(DCRN_DMACT0 + (dmanr * 0x8), ctc);
639 return DMA_STATUS_GOOD;
642 * Disables the burst on the channel (BTEN bit in the control/count register)
643 * Note:
644 * For scatter/gather dma, this function MUST be called before the
645 * ppc4xx_alloc_dma_handle() func as the chan count register is copied into the
646 * sgl list and used as each sgl element is added.
649 ppc4xx_disable_burst(unsigned int dmanr)
651 unsigned int ctc;
652 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
653 printk(KERN_ERR "ppc4xx_disable_burst: bad channel: %d\n", dmanr);
654 return DMA_STATUS_BAD_CHANNEL;
656 ctc = mfdcr(DCRN_DMACT0 + (dmanr * 0x8)) &~ DMA_CTC_BTEN;
657 mtdcr(DCRN_DMACT0 + (dmanr * 0x8), ctc);
658 return DMA_STATUS_GOOD;
661 * Sets the burst size (number of peripheral widths) for the channel
662 * (BSIZ bits in the control/count register))
663 * must be one of:
664 * DMA_CTC_BSIZ_2
665 * DMA_CTC_BSIZ_4
666 * DMA_CTC_BSIZ_8
667 * DMA_CTC_BSIZ_16
668 * Note:
669 * For scatter/gather dma, this function MUST be called before the
670 * ppc4xx_alloc_dma_handle() func as the chan count register is copied into the
671 * sgl list and used as each sgl element is added.
674 ppc4xx_set_burst_size(unsigned int dmanr, unsigned int bsize)
676 unsigned int ctc;
677 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
678 printk(KERN_ERR "ppc4xx_set_burst_size: bad channel: %d\n", dmanr);
679 return DMA_STATUS_BAD_CHANNEL;
681 ctc = mfdcr(DCRN_DMACT0 + (dmanr * 0x8)) &~ DMA_CTC_BSIZ_MSK;
682 ctc |= (bsize & DMA_CTC_BSIZ_MSK);
683 mtdcr(DCRN_DMACT0 + (dmanr * 0x8), ctc);
684 return DMA_STATUS_GOOD;
687 EXPORT_SYMBOL(ppc4xx_enable_burst);
688 EXPORT_SYMBOL(ppc4xx_disable_burst);
689 EXPORT_SYMBOL(ppc4xx_set_burst_size);
690 #endif /* CONFIG_PPC4xx_EDMA */
692 EXPORT_SYMBOL(ppc4xx_init_dma_channel);
693 EXPORT_SYMBOL(ppc4xx_get_channel_config);
694 EXPORT_SYMBOL(ppc4xx_set_channel_priority);
695 EXPORT_SYMBOL(ppc4xx_get_peripheral_width);
696 EXPORT_SYMBOL(dma_channels);
697 EXPORT_SYMBOL(ppc4xx_set_src_addr);
698 EXPORT_SYMBOL(ppc4xx_set_dst_addr);
699 EXPORT_SYMBOL(ppc4xx_set_dma_addr);
700 EXPORT_SYMBOL(ppc4xx_set_dma_addr2);
701 EXPORT_SYMBOL(ppc4xx_enable_dma);
702 EXPORT_SYMBOL(ppc4xx_disable_dma);
703 EXPORT_SYMBOL(ppc4xx_set_dma_mode);
704 EXPORT_SYMBOL(ppc4xx_set_dma_count);
705 EXPORT_SYMBOL(ppc4xx_get_dma_residue);
706 EXPORT_SYMBOL(ppc4xx_enable_dma_interrupt);
707 EXPORT_SYMBOL(ppc4xx_disable_dma_interrupt);
708 EXPORT_SYMBOL(ppc4xx_get_dma_status);
709 EXPORT_SYMBOL(ppc4xx_clr_dma_status);