e1000: Add device IDs of blade version of the 82571 quad port
[pv_ops_mirror.git] / arch / arm / mach-pnx4008 / dma.c
blobf7009d845be838e58ede89dc9851d743a810bd4c
1 /*
2 * linux/arch/arm/mach-pnx4008/dma.c
4 * PNX4008 DMA registration and IRQ dispatching
6 * Author: Vitaly Wool
7 * Copyright: MontaVista Software Inc. (c) 2005
9 * Based on the code from Nicolas Pitre
11 * This program is free software; you can redistribute it and/or modify
12 * it under the terms of the GNU General Public License version 2 as
13 * published by the Free Software Foundation.
16 #include <linux/module.h>
17 #include <linux/init.h>
18 #include <linux/kernel.h>
19 #include <linux/interrupt.h>
20 #include <linux/errno.h>
21 #include <linux/err.h>
22 #include <linux/dma-mapping.h>
23 #include <linux/clk.h>
25 #include <asm/system.h>
26 #include <asm/hardware.h>
27 #include <asm/dma.h>
28 #include <asm/dma-mapping.h>
29 #include <asm/io.h>
30 #include <asm/mach/dma.h>
31 #include <asm/arch/clock.h>
33 static struct dma_channel {
34 char *name;
35 void (*irq_handler) (int, int, void *);
36 void *data;
37 struct pnx4008_dma_ll *ll;
38 u32 ll_dma;
39 void *target_addr;
40 int target_id;
41 } dma_channels[MAX_DMA_CHANNELS];
43 static struct ll_pool {
44 void *vaddr;
45 void *cur;
46 dma_addr_t dma_addr;
47 int count;
48 } ll_pool;
50 static DEFINE_SPINLOCK(ll_lock);
52 struct pnx4008_dma_ll *pnx4008_alloc_ll_entry(dma_addr_t * ll_dma)
54 struct pnx4008_dma_ll *ll = NULL;
55 unsigned long flags;
57 spin_lock_irqsave(&ll_lock, flags);
58 if (ll_pool.count > 4) { /* can give one more */
59 ll = *(struct pnx4008_dma_ll **) ll_pool.cur;
60 *ll_dma = ll_pool.dma_addr + ((void *)ll - ll_pool.vaddr);
61 *(void **)ll_pool.cur = **(void ***)ll_pool.cur;
62 memset(ll, 0, sizeof(*ll));
63 ll_pool.count--;
65 spin_unlock_irqrestore(&ll_lock, flags);
67 return ll;
70 EXPORT_SYMBOL_GPL(pnx4008_alloc_ll_entry);
72 void pnx4008_free_ll_entry(struct pnx4008_dma_ll * ll, dma_addr_t ll_dma)
74 unsigned long flags;
76 if (ll) {
77 if ((unsigned long)((long)ll - (long)ll_pool.vaddr) > 0x4000) {
78 printk(KERN_ERR "Trying to free entry not allocated by DMA\n");
79 BUG();
82 if (ll->flags & DMA_BUFFER_ALLOCATED)
83 ll->free(ll->alloc_data);
85 spin_lock_irqsave(&ll_lock, flags);
86 *(long *)ll = *(long *)ll_pool.cur;
87 *(long *)ll_pool.cur = (long)ll;
88 ll_pool.count++;
89 spin_unlock_irqrestore(&ll_lock, flags);
93 EXPORT_SYMBOL_GPL(pnx4008_free_ll_entry);
95 void pnx4008_free_ll(u32 ll_dma, struct pnx4008_dma_ll * ll)
97 struct pnx4008_dma_ll *ptr;
98 u32 dma;
100 while (ll) {
101 dma = ll->next_dma;
102 ptr = ll->next;
103 pnx4008_free_ll_entry(ll, ll_dma);
105 ll_dma = dma;
106 ll = ptr;
110 EXPORT_SYMBOL_GPL(pnx4008_free_ll);
112 static int dma_channels_requested = 0;
114 static inline void dma_increment_usage(void)
116 if (!dma_channels_requested++) {
117 struct clk *clk = clk_get(0, "dma_ck");
118 if (!IS_ERR(clk)) {
119 clk_set_rate(clk, 1);
120 clk_put(clk);
122 pnx4008_config_dma(-1, -1, 1);
125 static inline void dma_decrement_usage(void)
127 if (!--dma_channels_requested) {
128 struct clk *clk = clk_get(0, "dma_ck");
129 if (!IS_ERR(clk)) {
130 clk_set_rate(clk, 0);
131 clk_put(clk);
133 pnx4008_config_dma(-1, -1, 0);
138 static DEFINE_SPINLOCK(dma_lock);
140 static inline void pnx4008_dma_lock(void)
142 spin_lock_irq(&dma_lock);
145 static inline void pnx4008_dma_unlock(void)
147 spin_unlock_irq(&dma_lock);
150 #define VALID_CHANNEL(c) (((c) >= 0) && ((c) < MAX_DMA_CHANNELS))
152 int pnx4008_request_channel(char *name, int ch,
153 void (*irq_handler) (int, int, void *), void *data)
155 int i, found = 0;
157 /* basic sanity checks */
158 if (!name || (ch != -1 && !VALID_CHANNEL(ch)))
159 return -EINVAL;
161 pnx4008_dma_lock();
163 /* try grabbing a DMA channel with the requested priority */
164 for (i = MAX_DMA_CHANNELS - 1; i >= 0; i--) {
165 if (!dma_channels[i].name && (ch == -1 || ch == i)) {
166 found = 1;
167 break;
171 if (found) {
172 dma_increment_usage();
173 dma_channels[i].name = name;
174 dma_channels[i].irq_handler = irq_handler;
175 dma_channels[i].data = data;
176 dma_channels[i].ll = NULL;
177 dma_channels[i].ll_dma = 0;
178 } else {
179 printk(KERN_WARNING "No more available DMA channels for %s\n",
180 name);
181 i = -ENODEV;
184 pnx4008_dma_unlock();
185 return i;
188 EXPORT_SYMBOL_GPL(pnx4008_request_channel);
190 void pnx4008_free_channel(int ch)
192 if (!dma_channels[ch].name) {
193 printk(KERN_CRIT
194 "%s: trying to free channel %d which is already freed\n",
195 __FUNCTION__, ch);
196 return;
199 pnx4008_dma_lock();
200 pnx4008_free_ll(dma_channels[ch].ll_dma, dma_channels[ch].ll);
201 dma_channels[ch].ll = NULL;
202 dma_decrement_usage();
204 dma_channels[ch].name = NULL;
205 pnx4008_dma_unlock();
208 EXPORT_SYMBOL_GPL(pnx4008_free_channel);
210 int pnx4008_config_dma(int ahb_m1_be, int ahb_m2_be, int enable)
212 unsigned long dma_cfg = __raw_readl(DMAC_CONFIG);
214 switch (ahb_m1_be) {
215 case 0:
216 dma_cfg &= ~(1 << 1);
217 break;
218 case 1:
219 dma_cfg |= (1 << 1);
220 break;
221 default:
222 break;
225 switch (ahb_m2_be) {
226 case 0:
227 dma_cfg &= ~(1 << 2);
228 break;
229 case 1:
230 dma_cfg |= (1 << 2);
231 break;
232 default:
233 break;
236 switch (enable) {
237 case 0:
238 dma_cfg &= ~(1 << 0);
239 break;
240 case 1:
241 dma_cfg |= (1 << 0);
242 break;
243 default:
244 break;
247 pnx4008_dma_lock();
248 __raw_writel(dma_cfg, DMAC_CONFIG);
249 pnx4008_dma_unlock();
251 return 0;
254 EXPORT_SYMBOL_GPL(pnx4008_config_dma);
256 int pnx4008_dma_pack_control(const struct pnx4008_dma_ch_ctrl * ch_ctrl,
257 unsigned long *ctrl)
259 int i = 0, dbsize, sbsize, err = 0;
261 if (!ctrl || !ch_ctrl) {
262 err = -EINVAL;
263 goto out;
266 *ctrl = 0;
268 switch (ch_ctrl->tc_mask) {
269 case 0:
270 break;
271 case 1:
272 *ctrl |= (1 << 31);
273 break;
275 default:
276 err = -EINVAL;
277 goto out;
280 switch (ch_ctrl->cacheable) {
281 case 0:
282 break;
283 case 1:
284 *ctrl |= (1 << 30);
285 break;
287 default:
288 err = -EINVAL;
289 goto out;
291 switch (ch_ctrl->bufferable) {
292 case 0:
293 break;
294 case 1:
295 *ctrl |= (1 << 29);
296 break;
298 default:
299 err = -EINVAL;
300 goto out;
302 switch (ch_ctrl->priv_mode) {
303 case 0:
304 break;
305 case 1:
306 *ctrl |= (1 << 28);
307 break;
309 default:
310 err = -EINVAL;
311 goto out;
313 switch (ch_ctrl->di) {
314 case 0:
315 break;
316 case 1:
317 *ctrl |= (1 << 27);
318 break;
320 default:
321 err = -EINVAL;
322 goto out;
324 switch (ch_ctrl->si) {
325 case 0:
326 break;
327 case 1:
328 *ctrl |= (1 << 26);
329 break;
331 default:
332 err = -EINVAL;
333 goto out;
335 switch (ch_ctrl->dest_ahb1) {
336 case 0:
337 break;
338 case 1:
339 *ctrl |= (1 << 25);
340 break;
342 default:
343 err = -EINVAL;
344 goto out;
346 switch (ch_ctrl->src_ahb1) {
347 case 0:
348 break;
349 case 1:
350 *ctrl |= (1 << 24);
351 break;
353 default:
354 err = -EINVAL;
355 goto out;
357 switch (ch_ctrl->dwidth) {
358 case WIDTH_BYTE:
359 *ctrl &= ~(7 << 21);
360 break;
361 case WIDTH_HWORD:
362 *ctrl &= ~(7 << 21);
363 *ctrl |= (1 << 21);
364 break;
365 case WIDTH_WORD:
366 *ctrl &= ~(7 << 21);
367 *ctrl |= (2 << 21);
368 break;
370 default:
371 err = -EINVAL;
372 goto out;
374 switch (ch_ctrl->swidth) {
375 case WIDTH_BYTE:
376 *ctrl &= ~(7 << 18);
377 break;
378 case WIDTH_HWORD:
379 *ctrl &= ~(7 << 18);
380 *ctrl |= (1 << 18);
381 break;
382 case WIDTH_WORD:
383 *ctrl &= ~(7 << 18);
384 *ctrl |= (2 << 18);
385 break;
387 default:
388 err = -EINVAL;
389 goto out;
391 dbsize = ch_ctrl->dbsize;
392 while (!(dbsize & 1)) {
393 i++;
394 dbsize >>= 1;
396 if (ch_ctrl->dbsize != 1 || i > 8 || i == 1) {
397 err = -EINVAL;
398 goto out;
399 } else if (i > 1)
400 i--;
401 *ctrl &= ~(7 << 15);
402 *ctrl |= (i << 15);
404 sbsize = ch_ctrl->sbsize;
405 while (!(sbsize & 1)) {
406 i++;
407 sbsize >>= 1;
409 if (ch_ctrl->sbsize != 1 || i > 8 || i == 1) {
410 err = -EINVAL;
411 goto out;
412 } else if (i > 1)
413 i--;
414 *ctrl &= ~(7 << 12);
415 *ctrl |= (i << 12);
417 if (ch_ctrl->tr_size > 0x7ff) {
418 err = -E2BIG;
419 goto out;
421 *ctrl &= ~0x7ff;
422 *ctrl |= ch_ctrl->tr_size & 0x7ff;
424 out:
425 return err;
428 EXPORT_SYMBOL_GPL(pnx4008_dma_pack_control);
430 int pnx4008_dma_parse_control(unsigned long ctrl,
431 struct pnx4008_dma_ch_ctrl * ch_ctrl)
433 int err = 0;
435 if (!ch_ctrl) {
436 err = -EINVAL;
437 goto out;
440 ch_ctrl->tr_size = ctrl & 0x7ff;
441 ctrl >>= 12;
443 ch_ctrl->sbsize = 1 << (ctrl & 7);
444 if (ch_ctrl->sbsize > 1)
445 ch_ctrl->sbsize <<= 1;
446 ctrl >>= 3;
448 ch_ctrl->dbsize = 1 << (ctrl & 7);
449 if (ch_ctrl->dbsize > 1)
450 ch_ctrl->dbsize <<= 1;
451 ctrl >>= 3;
453 switch (ctrl & 7) {
454 case 0:
455 ch_ctrl->swidth = WIDTH_BYTE;
456 break;
457 case 1:
458 ch_ctrl->swidth = WIDTH_HWORD;
459 break;
460 case 2:
461 ch_ctrl->swidth = WIDTH_WORD;
462 break;
463 default:
464 err = -EINVAL;
465 goto out;
467 ctrl >>= 3;
469 switch (ctrl & 7) {
470 case 0:
471 ch_ctrl->dwidth = WIDTH_BYTE;
472 break;
473 case 1:
474 ch_ctrl->dwidth = WIDTH_HWORD;
475 break;
476 case 2:
477 ch_ctrl->dwidth = WIDTH_WORD;
478 break;
479 default:
480 err = -EINVAL;
481 goto out;
483 ctrl >>= 3;
485 ch_ctrl->src_ahb1 = ctrl & 1;
486 ctrl >>= 1;
488 ch_ctrl->dest_ahb1 = ctrl & 1;
489 ctrl >>= 1;
491 ch_ctrl->si = ctrl & 1;
492 ctrl >>= 1;
494 ch_ctrl->di = ctrl & 1;
495 ctrl >>= 1;
497 ch_ctrl->priv_mode = ctrl & 1;
498 ctrl >>= 1;
500 ch_ctrl->bufferable = ctrl & 1;
501 ctrl >>= 1;
503 ch_ctrl->cacheable = ctrl & 1;
504 ctrl >>= 1;
506 ch_ctrl->tc_mask = ctrl & 1;
508 out:
509 return err;
512 EXPORT_SYMBOL_GPL(pnx4008_dma_parse_control);
514 int pnx4008_dma_pack_config(const struct pnx4008_dma_ch_config * ch_cfg,
515 unsigned long *cfg)
517 int err = 0;
519 if (!cfg || !ch_cfg) {
520 err = -EINVAL;
521 goto out;
524 *cfg = 0;
526 switch (ch_cfg->halt) {
527 case 0:
528 break;
529 case 1:
530 *cfg |= (1 << 18);
531 break;
533 default:
534 err = -EINVAL;
535 goto out;
537 switch (ch_cfg->active) {
538 case 0:
539 break;
540 case 1:
541 *cfg |= (1 << 17);
542 break;
544 default:
545 err = -EINVAL;
546 goto out;
548 switch (ch_cfg->lock) {
549 case 0:
550 break;
551 case 1:
552 *cfg |= (1 << 16);
553 break;
555 default:
556 err = -EINVAL;
557 goto out;
559 switch (ch_cfg->itc) {
560 case 0:
561 break;
562 case 1:
563 *cfg |= (1 << 15);
564 break;
566 default:
567 err = -EINVAL;
568 goto out;
570 switch (ch_cfg->ie) {
571 case 0:
572 break;
573 case 1:
574 *cfg |= (1 << 14);
575 break;
577 default:
578 err = -EINVAL;
579 goto out;
581 switch (ch_cfg->flow_cntrl) {
582 case FC_MEM2MEM_DMA:
583 *cfg &= ~(7 << 11);
584 break;
585 case FC_MEM2PER_DMA:
586 *cfg &= ~(7 << 11);
587 *cfg |= (1 << 11);
588 break;
589 case FC_PER2MEM_DMA:
590 *cfg &= ~(7 << 11);
591 *cfg |= (2 << 11);
592 break;
593 case FC_PER2PER_DMA:
594 *cfg &= ~(7 << 11);
595 *cfg |= (3 << 11);
596 break;
597 case FC_PER2PER_DPER:
598 *cfg &= ~(7 << 11);
599 *cfg |= (4 << 11);
600 break;
601 case FC_MEM2PER_PER:
602 *cfg &= ~(7 << 11);
603 *cfg |= (5 << 11);
604 break;
605 case FC_PER2MEM_PER:
606 *cfg &= ~(7 << 11);
607 *cfg |= (6 << 11);
608 break;
609 case FC_PER2PER_SPER:
610 *cfg |= (7 << 11);
611 break;
613 default:
614 err = -EINVAL;
615 goto out;
617 *cfg &= ~(0x1f << 6);
618 *cfg |= ((ch_cfg->dest_per & 0x1f) << 6);
620 *cfg &= ~(0x1f << 1);
621 *cfg |= ((ch_cfg->src_per & 0x1f) << 1);
623 out:
624 return err;
627 EXPORT_SYMBOL_GPL(pnx4008_dma_pack_config);
629 int pnx4008_dma_parse_config(unsigned long cfg,
630 struct pnx4008_dma_ch_config * ch_cfg)
632 int err = 0;
634 if (!ch_cfg) {
635 err = -EINVAL;
636 goto out;
639 cfg >>= 1;
641 ch_cfg->src_per = cfg & 0x1f;
642 cfg >>= 5;
644 ch_cfg->dest_per = cfg & 0x1f;
645 cfg >>= 5;
647 switch (cfg & 7) {
648 case 0:
649 ch_cfg->flow_cntrl = FC_MEM2MEM_DMA;
650 break;
651 case 1:
652 ch_cfg->flow_cntrl = FC_MEM2PER_DMA;
653 break;
654 case 2:
655 ch_cfg->flow_cntrl = FC_PER2MEM_DMA;
656 break;
657 case 3:
658 ch_cfg->flow_cntrl = FC_PER2PER_DMA;
659 break;
660 case 4:
661 ch_cfg->flow_cntrl = FC_PER2PER_DPER;
662 break;
663 case 5:
664 ch_cfg->flow_cntrl = FC_MEM2PER_PER;
665 break;
666 case 6:
667 ch_cfg->flow_cntrl = FC_PER2MEM_PER;
668 break;
669 case 7:
670 ch_cfg->flow_cntrl = FC_PER2PER_SPER;
672 cfg >>= 3;
674 ch_cfg->ie = cfg & 1;
675 cfg >>= 1;
677 ch_cfg->itc = cfg & 1;
678 cfg >>= 1;
680 ch_cfg->lock = cfg & 1;
681 cfg >>= 1;
683 ch_cfg->active = cfg & 1;
684 cfg >>= 1;
686 ch_cfg->halt = cfg & 1;
688 out:
689 return err;
692 EXPORT_SYMBOL_GPL(pnx4008_dma_parse_config);
694 void pnx4008_dma_split_head_entry(struct pnx4008_dma_config * config,
695 struct pnx4008_dma_ch_ctrl * ctrl)
697 int new_len = ctrl->tr_size, num_entries = 0;
698 int old_len = new_len;
699 int src_width, dest_width, count = 1;
701 switch (ctrl->swidth) {
702 case WIDTH_BYTE:
703 src_width = 1;
704 break;
705 case WIDTH_HWORD:
706 src_width = 2;
707 break;
708 case WIDTH_WORD:
709 src_width = 4;
710 break;
711 default:
712 return;
715 switch (ctrl->dwidth) {
716 case WIDTH_BYTE:
717 dest_width = 1;
718 break;
719 case WIDTH_HWORD:
720 dest_width = 2;
721 break;
722 case WIDTH_WORD:
723 dest_width = 4;
724 break;
725 default:
726 return;
729 while (new_len > 0x7FF) {
730 num_entries++;
731 new_len = (ctrl->tr_size + num_entries) / (num_entries + 1);
733 if (num_entries != 0) {
734 struct pnx4008_dma_ll *ll = NULL;
735 config->ch_ctrl &= ~0x7ff;
736 config->ch_ctrl |= new_len;
737 if (!config->is_ll) {
738 config->is_ll = 1;
739 while (num_entries) {
740 if (!ll) {
741 config->ll =
742 pnx4008_alloc_ll_entry(&config->
743 ll_dma);
744 ll = config->ll;
745 } else {
746 ll->next =
747 pnx4008_alloc_ll_entry(&ll->
748 next_dma);
749 ll = ll->next;
752 if (ctrl->si)
753 ll->src_addr =
754 config->src_addr +
755 src_width * new_len * count;
756 else
757 ll->src_addr = config->src_addr;
758 if (ctrl->di)
759 ll->dest_addr =
760 config->dest_addr +
761 dest_width * new_len * count;
762 else
763 ll->dest_addr = config->dest_addr;
764 ll->ch_ctrl = config->ch_ctrl & 0x7fffffff;
765 ll->next_dma = 0;
766 ll->next = NULL;
767 num_entries--;
768 count++;
770 } else {
771 struct pnx4008_dma_ll *ll_old = config->ll;
772 unsigned long ll_dma_old = config->ll_dma;
773 while (num_entries) {
774 if (!ll) {
775 config->ll =
776 pnx4008_alloc_ll_entry(&config->
777 ll_dma);
778 ll = config->ll;
779 } else {
780 ll->next =
781 pnx4008_alloc_ll_entry(&ll->
782 next_dma);
783 ll = ll->next;
786 if (ctrl->si)
787 ll->src_addr =
788 config->src_addr +
789 src_width * new_len * count;
790 else
791 ll->src_addr = config->src_addr;
792 if (ctrl->di)
793 ll->dest_addr =
794 config->dest_addr +
795 dest_width * new_len * count;
796 else
797 ll->dest_addr = config->dest_addr;
798 ll->ch_ctrl = config->ch_ctrl & 0x7fffffff;
799 ll->next_dma = 0;
800 ll->next = NULL;
801 num_entries--;
802 count++;
804 ll->next_dma = ll_dma_old;
805 ll->next = ll_old;
807 /* adjust last length/tc */
808 ll->ch_ctrl = config->ch_ctrl & (~0x7ff);
809 ll->ch_ctrl |= old_len - new_len * (count - 1);
810 config->ch_ctrl &= 0x7fffffff;
814 EXPORT_SYMBOL_GPL(pnx4008_dma_split_head_entry);
816 void pnx4008_dma_split_ll_entry(struct pnx4008_dma_ll * cur_ll,
817 struct pnx4008_dma_ch_ctrl * ctrl)
819 int new_len = ctrl->tr_size, num_entries = 0;
820 int old_len = new_len;
821 int src_width, dest_width, count = 1;
823 switch (ctrl->swidth) {
824 case WIDTH_BYTE:
825 src_width = 1;
826 break;
827 case WIDTH_HWORD:
828 src_width = 2;
829 break;
830 case WIDTH_WORD:
831 src_width = 4;
832 break;
833 default:
834 return;
837 switch (ctrl->dwidth) {
838 case WIDTH_BYTE:
839 dest_width = 1;
840 break;
841 case WIDTH_HWORD:
842 dest_width = 2;
843 break;
844 case WIDTH_WORD:
845 dest_width = 4;
846 break;
847 default:
848 return;
851 while (new_len > 0x7FF) {
852 num_entries++;
853 new_len = (ctrl->tr_size + num_entries) / (num_entries + 1);
855 if (num_entries != 0) {
856 struct pnx4008_dma_ll *ll = NULL;
857 cur_ll->ch_ctrl &= ~0x7ff;
858 cur_ll->ch_ctrl |= new_len;
859 if (!cur_ll->next) {
860 while (num_entries) {
861 if (!ll) {
862 cur_ll->next =
863 pnx4008_alloc_ll_entry(&cur_ll->
864 next_dma);
865 ll = cur_ll->next;
866 } else {
867 ll->next =
868 pnx4008_alloc_ll_entry(&ll->
869 next_dma);
870 ll = ll->next;
873 if (ctrl->si)
874 ll->src_addr =
875 cur_ll->src_addr +
876 src_width * new_len * count;
877 else
878 ll->src_addr = cur_ll->src_addr;
879 if (ctrl->di)
880 ll->dest_addr =
881 cur_ll->dest_addr +
882 dest_width * new_len * count;
883 else
884 ll->dest_addr = cur_ll->dest_addr;
885 ll->ch_ctrl = cur_ll->ch_ctrl & 0x7fffffff;
886 ll->next_dma = 0;
887 ll->next = NULL;
888 num_entries--;
889 count++;
891 } else {
892 struct pnx4008_dma_ll *ll_old = cur_ll->next;
893 unsigned long ll_dma_old = cur_ll->next_dma;
894 while (num_entries) {
895 if (!ll) {
896 cur_ll->next =
897 pnx4008_alloc_ll_entry(&cur_ll->
898 next_dma);
899 ll = cur_ll->next;
900 } else {
901 ll->next =
902 pnx4008_alloc_ll_entry(&ll->
903 next_dma);
904 ll = ll->next;
907 if (ctrl->si)
908 ll->src_addr =
909 cur_ll->src_addr +
910 src_width * new_len * count;
911 else
912 ll->src_addr = cur_ll->src_addr;
913 if (ctrl->di)
914 ll->dest_addr =
915 cur_ll->dest_addr +
916 dest_width * new_len * count;
917 else
918 ll->dest_addr = cur_ll->dest_addr;
919 ll->ch_ctrl = cur_ll->ch_ctrl & 0x7fffffff;
920 ll->next_dma = 0;
921 ll->next = NULL;
922 num_entries--;
923 count++;
926 ll->next_dma = ll_dma_old;
927 ll->next = ll_old;
929 /* adjust last length/tc */
930 ll->ch_ctrl = cur_ll->ch_ctrl & (~0x7ff);
931 ll->ch_ctrl |= old_len - new_len * (count - 1);
932 cur_ll->ch_ctrl &= 0x7fffffff;
936 EXPORT_SYMBOL_GPL(pnx4008_dma_split_ll_entry);
938 int pnx4008_config_channel(int ch, struct pnx4008_dma_config * config)
940 if (!VALID_CHANNEL(ch) || !dma_channels[ch].name)
941 return -EINVAL;
943 pnx4008_dma_lock();
944 __raw_writel(config->src_addr, DMAC_Cx_SRC_ADDR(ch));
945 __raw_writel(config->dest_addr, DMAC_Cx_DEST_ADDR(ch));
947 if (config->is_ll)
948 __raw_writel(config->ll_dma, DMAC_Cx_LLI(ch));
949 else
950 __raw_writel(0, DMAC_Cx_LLI(ch));
952 __raw_writel(config->ch_ctrl, DMAC_Cx_CONTROL(ch));
953 __raw_writel(config->ch_cfg, DMAC_Cx_CONFIG(ch));
954 pnx4008_dma_unlock();
956 return 0;
960 EXPORT_SYMBOL_GPL(pnx4008_config_channel);
962 int pnx4008_channel_get_config(int ch, struct pnx4008_dma_config * config)
964 if (!VALID_CHANNEL(ch) || !dma_channels[ch].name || !config)
965 return -EINVAL;
967 pnx4008_dma_lock();
968 config->ch_cfg = __raw_readl(DMAC_Cx_CONFIG(ch));
969 config->ch_ctrl = __raw_readl(DMAC_Cx_CONTROL(ch));
971 config->ll_dma = __raw_readl(DMAC_Cx_LLI(ch));
972 config->is_ll = config->ll_dma ? 1 : 0;
974 config->src_addr = __raw_readl(DMAC_Cx_SRC_ADDR(ch));
975 config->dest_addr = __raw_readl(DMAC_Cx_DEST_ADDR(ch));
976 pnx4008_dma_unlock();
978 return 0;
981 EXPORT_SYMBOL_GPL(pnx4008_channel_get_config);
983 int pnx4008_dma_ch_enable(int ch)
985 unsigned long ch_cfg;
987 if (!VALID_CHANNEL(ch) || !dma_channels[ch].name)
988 return -EINVAL;
990 pnx4008_dma_lock();
991 ch_cfg = __raw_readl(DMAC_Cx_CONFIG(ch));
992 ch_cfg |= 1;
993 __raw_writel(ch_cfg, DMAC_Cx_CONFIG(ch));
994 pnx4008_dma_unlock();
996 return 0;
999 EXPORT_SYMBOL_GPL(pnx4008_dma_ch_enable);
1001 int pnx4008_dma_ch_disable(int ch)
1003 unsigned long ch_cfg;
1005 if (!VALID_CHANNEL(ch) || !dma_channels[ch].name)
1006 return -EINVAL;
1008 pnx4008_dma_lock();
1009 ch_cfg = __raw_readl(DMAC_Cx_CONFIG(ch));
1010 ch_cfg &= ~1;
1011 __raw_writel(ch_cfg, DMAC_Cx_CONFIG(ch));
1012 pnx4008_dma_unlock();
1014 return 0;
1017 EXPORT_SYMBOL_GPL(pnx4008_dma_ch_disable);
1019 int pnx4008_dma_ch_enabled(int ch)
1021 unsigned long ch_cfg;
1023 if (!VALID_CHANNEL(ch) || !dma_channels[ch].name)
1024 return -EINVAL;
1026 pnx4008_dma_lock();
1027 ch_cfg = __raw_readl(DMAC_Cx_CONFIG(ch));
1028 pnx4008_dma_unlock();
1030 return ch_cfg & 1;
1033 EXPORT_SYMBOL_GPL(pnx4008_dma_ch_enabled);
1035 static irqreturn_t dma_irq_handler(int irq, void *dev_id)
1037 int i;
1038 unsigned long dint = __raw_readl(DMAC_INT_STAT);
1039 unsigned long tcint = __raw_readl(DMAC_INT_TC_STAT);
1040 unsigned long eint = __raw_readl(DMAC_INT_ERR_STAT);
1041 unsigned long i_bit;
1043 for (i = MAX_DMA_CHANNELS - 1; i >= 0; i--) {
1044 i_bit = 1 << i;
1045 if (dint & i_bit) {
1046 struct dma_channel *channel = &dma_channels[i];
1048 if (channel->name && channel->irq_handler) {
1049 int cause = 0;
1051 if (eint & i_bit)
1052 cause |= DMA_ERR_INT;
1053 if (tcint & i_bit)
1054 cause |= DMA_TC_INT;
1055 channel->irq_handler(i, cause, channel->data);
1056 } else {
1058 * IRQ for an unregistered DMA channel
1060 printk(KERN_WARNING
1061 "spurious IRQ for DMA channel %d\n", i);
1063 if (tcint & i_bit)
1064 __raw_writel(i_bit, DMAC_INT_TC_CLEAR);
1065 if (eint & i_bit)
1066 __raw_writel(i_bit, DMAC_INT_ERR_CLEAR);
1069 return IRQ_HANDLED;
1072 static int __init pnx4008_dma_init(void)
1074 int ret, i;
1076 ret = request_irq(DMA_INT, dma_irq_handler, 0, "DMA", NULL);
1077 if (ret) {
1078 printk(KERN_CRIT "Wow! Can't register IRQ for DMA\n");
1079 goto out;
1082 ll_pool.count = 0x4000 / sizeof(struct pnx4008_dma_ll);
1083 ll_pool.cur = ll_pool.vaddr =
1084 dma_alloc_coherent(NULL, ll_pool.count * sizeof(struct pnx4008_dma_ll),
1085 &ll_pool.dma_addr, GFP_KERNEL);
1087 if (!ll_pool.vaddr) {
1088 ret = -ENOMEM;
1089 free_irq(DMA_INT, NULL);
1090 goto out;
1093 for (i = 0; i < ll_pool.count - 1; i++) {
1094 void **addr = ll_pool.vaddr + i * sizeof(struct pnx4008_dma_ll);
1095 *addr = (void *)addr + sizeof(struct pnx4008_dma_ll);
1097 *(long *)(ll_pool.vaddr +
1098 (ll_pool.count - 1) * sizeof(struct pnx4008_dma_ll)) =
1099 (long)ll_pool.vaddr;
1101 __raw_writel(1, DMAC_CONFIG);
1103 out:
1104 return ret;
1106 arch_initcall(pnx4008_dma_init);