2 * linux/arch/arm/mach-pnx4008/dma.c
4 * PNX4008 DMA registration and IRQ dispatching
7 * Copyright: MontaVista Software Inc. (c) 2005
9 * Based on the code from Nicolas Pitre
11 * This program is free software; you can redistribute it and/or modify
12 * it under the terms of the GNU General Public License version 2 as
13 * published by the Free Software Foundation.
16 #include <linux/module.h>
17 #include <linux/init.h>
18 #include <linux/kernel.h>
19 #include <linux/interrupt.h>
20 #include <linux/errno.h>
21 #include <linux/err.h>
22 #include <linux/dma-mapping.h>
23 #include <linux/clk.h>
26 #include <asm/system.h>
27 #include <mach/hardware.h>
29 #include <asm/dma-mapping.h>
30 #include <mach/clock.h>
32 static struct dma_channel
{
34 void (*irq_handler
) (int, int, void *);
36 struct pnx4008_dma_ll
*ll
;
40 } dma_channels
[MAX_DMA_CHANNELS
];
42 static struct ll_pool
{
49 static DEFINE_SPINLOCK(ll_lock
);
51 struct pnx4008_dma_ll
*pnx4008_alloc_ll_entry(dma_addr_t
* ll_dma
)
53 struct pnx4008_dma_ll
*ll
= NULL
;
56 spin_lock_irqsave(&ll_lock
, flags
);
57 if (ll_pool
.count
> 4) { /* can give one more */
58 ll
= *(struct pnx4008_dma_ll
**) ll_pool
.cur
;
59 *ll_dma
= ll_pool
.dma_addr
+ ((void *)ll
- ll_pool
.vaddr
);
60 *(void **)ll_pool
.cur
= **(void ***)ll_pool
.cur
;
61 memset(ll
, 0, sizeof(*ll
));
64 spin_unlock_irqrestore(&ll_lock
, flags
);
69 EXPORT_SYMBOL_GPL(pnx4008_alloc_ll_entry
);
71 void pnx4008_free_ll_entry(struct pnx4008_dma_ll
* ll
, dma_addr_t ll_dma
)
76 if ((unsigned long)((long)ll
- (long)ll_pool
.vaddr
) > 0x4000) {
77 printk(KERN_ERR
"Trying to free entry not allocated by DMA\n");
81 if (ll
->flags
& DMA_BUFFER_ALLOCATED
)
82 ll
->free(ll
->alloc_data
);
84 spin_lock_irqsave(&ll_lock
, flags
);
85 *(long *)ll
= *(long *)ll_pool
.cur
;
86 *(long *)ll_pool
.cur
= (long)ll
;
88 spin_unlock_irqrestore(&ll_lock
, flags
);
92 EXPORT_SYMBOL_GPL(pnx4008_free_ll_entry
);
94 void pnx4008_free_ll(u32 ll_dma
, struct pnx4008_dma_ll
* ll
)
96 struct pnx4008_dma_ll
*ptr
;
102 pnx4008_free_ll_entry(ll
, ll_dma
);
109 EXPORT_SYMBOL_GPL(pnx4008_free_ll
);
111 static int dma_channels_requested
= 0;
113 static inline void dma_increment_usage(void)
115 if (!dma_channels_requested
++) {
116 struct clk
*clk
= clk_get(0, "dma_ck");
118 clk_set_rate(clk
, 1);
121 pnx4008_config_dma(-1, -1, 1);
124 static inline void dma_decrement_usage(void)
126 if (!--dma_channels_requested
) {
127 struct clk
*clk
= clk_get(0, "dma_ck");
129 clk_set_rate(clk
, 0);
132 pnx4008_config_dma(-1, -1, 0);
137 static DEFINE_SPINLOCK(dma_lock
);
139 static inline void pnx4008_dma_lock(void)
141 spin_lock_irq(&dma_lock
);
144 static inline void pnx4008_dma_unlock(void)
146 spin_unlock_irq(&dma_lock
);
149 #define VALID_CHANNEL(c) (((c) >= 0) && ((c) < MAX_DMA_CHANNELS))
151 int pnx4008_request_channel(char *name
, int ch
,
152 void (*irq_handler
) (int, int, void *), void *data
)
156 /* basic sanity checks */
157 if (!name
|| (ch
!= -1 && !VALID_CHANNEL(ch
)))
162 /* try grabbing a DMA channel with the requested priority */
163 for (i
= MAX_DMA_CHANNELS
- 1; i
>= 0; i
--) {
164 if (!dma_channels
[i
].name
&& (ch
== -1 || ch
== i
)) {
171 dma_increment_usage();
172 dma_channels
[i
].name
= name
;
173 dma_channels
[i
].irq_handler
= irq_handler
;
174 dma_channels
[i
].data
= data
;
175 dma_channels
[i
].ll
= NULL
;
176 dma_channels
[i
].ll_dma
= 0;
178 printk(KERN_WARNING
"No more available DMA channels for %s\n",
183 pnx4008_dma_unlock();
187 EXPORT_SYMBOL_GPL(pnx4008_request_channel
);
189 void pnx4008_free_channel(int ch
)
191 if (!dma_channels
[ch
].name
) {
193 "%s: trying to free channel %d which is already freed\n",
199 pnx4008_free_ll(dma_channels
[ch
].ll_dma
, dma_channels
[ch
].ll
);
200 dma_channels
[ch
].ll
= NULL
;
201 dma_decrement_usage();
203 dma_channels
[ch
].name
= NULL
;
204 pnx4008_dma_unlock();
207 EXPORT_SYMBOL_GPL(pnx4008_free_channel
);
209 int pnx4008_config_dma(int ahb_m1_be
, int ahb_m2_be
, int enable
)
211 unsigned long dma_cfg
= __raw_readl(DMAC_CONFIG
);
215 dma_cfg
&= ~(1 << 1);
226 dma_cfg
&= ~(1 << 2);
237 dma_cfg
&= ~(1 << 0);
247 __raw_writel(dma_cfg
, DMAC_CONFIG
);
248 pnx4008_dma_unlock();
253 EXPORT_SYMBOL_GPL(pnx4008_config_dma
);
255 int pnx4008_dma_pack_control(const struct pnx4008_dma_ch_ctrl
* ch_ctrl
,
258 int i
= 0, dbsize
, sbsize
, err
= 0;
260 if (!ctrl
|| !ch_ctrl
) {
267 switch (ch_ctrl
->tc_mask
) {
279 switch (ch_ctrl
->cacheable
) {
290 switch (ch_ctrl
->bufferable
) {
301 switch (ch_ctrl
->priv_mode
) {
312 switch (ch_ctrl
->di
) {
323 switch (ch_ctrl
->si
) {
334 switch (ch_ctrl
->dest_ahb1
) {
345 switch (ch_ctrl
->src_ahb1
) {
356 switch (ch_ctrl
->dwidth
) {
373 switch (ch_ctrl
->swidth
) {
390 dbsize
= ch_ctrl
->dbsize
;
391 while (!(dbsize
& 1)) {
395 if (ch_ctrl
->dbsize
!= 1 || i
> 8 || i
== 1) {
403 sbsize
= ch_ctrl
->sbsize
;
404 while (!(sbsize
& 1)) {
408 if (ch_ctrl
->sbsize
!= 1 || i
> 8 || i
== 1) {
416 if (ch_ctrl
->tr_size
> 0x7ff) {
421 *ctrl
|= ch_ctrl
->tr_size
& 0x7ff;
427 EXPORT_SYMBOL_GPL(pnx4008_dma_pack_control
);
429 int pnx4008_dma_parse_control(unsigned long ctrl
,
430 struct pnx4008_dma_ch_ctrl
* ch_ctrl
)
439 ch_ctrl
->tr_size
= ctrl
& 0x7ff;
442 ch_ctrl
->sbsize
= 1 << (ctrl
& 7);
443 if (ch_ctrl
->sbsize
> 1)
444 ch_ctrl
->sbsize
<<= 1;
447 ch_ctrl
->dbsize
= 1 << (ctrl
& 7);
448 if (ch_ctrl
->dbsize
> 1)
449 ch_ctrl
->dbsize
<<= 1;
454 ch_ctrl
->swidth
= WIDTH_BYTE
;
457 ch_ctrl
->swidth
= WIDTH_HWORD
;
460 ch_ctrl
->swidth
= WIDTH_WORD
;
470 ch_ctrl
->dwidth
= WIDTH_BYTE
;
473 ch_ctrl
->dwidth
= WIDTH_HWORD
;
476 ch_ctrl
->dwidth
= WIDTH_WORD
;
484 ch_ctrl
->src_ahb1
= ctrl
& 1;
487 ch_ctrl
->dest_ahb1
= ctrl
& 1;
490 ch_ctrl
->si
= ctrl
& 1;
493 ch_ctrl
->di
= ctrl
& 1;
496 ch_ctrl
->priv_mode
= ctrl
& 1;
499 ch_ctrl
->bufferable
= ctrl
& 1;
502 ch_ctrl
->cacheable
= ctrl
& 1;
505 ch_ctrl
->tc_mask
= ctrl
& 1;
511 EXPORT_SYMBOL_GPL(pnx4008_dma_parse_control
);
513 int pnx4008_dma_pack_config(const struct pnx4008_dma_ch_config
* ch_cfg
,
518 if (!cfg
|| !ch_cfg
) {
525 switch (ch_cfg
->halt
) {
536 switch (ch_cfg
->active
) {
547 switch (ch_cfg
->lock
) {
558 switch (ch_cfg
->itc
) {
569 switch (ch_cfg
->ie
) {
580 switch (ch_cfg
->flow_cntrl
) {
596 case FC_PER2PER_DPER
:
608 case FC_PER2PER_SPER
:
616 *cfg
&= ~(0x1f << 6);
617 *cfg
|= ((ch_cfg
->dest_per
& 0x1f) << 6);
619 *cfg
&= ~(0x1f << 1);
620 *cfg
|= ((ch_cfg
->src_per
& 0x1f) << 1);
626 EXPORT_SYMBOL_GPL(pnx4008_dma_pack_config
);
628 int pnx4008_dma_parse_config(unsigned long cfg
,
629 struct pnx4008_dma_ch_config
* ch_cfg
)
640 ch_cfg
->src_per
= cfg
& 0x1f;
643 ch_cfg
->dest_per
= cfg
& 0x1f;
648 ch_cfg
->flow_cntrl
= FC_MEM2MEM_DMA
;
651 ch_cfg
->flow_cntrl
= FC_MEM2PER_DMA
;
654 ch_cfg
->flow_cntrl
= FC_PER2MEM_DMA
;
657 ch_cfg
->flow_cntrl
= FC_PER2PER_DMA
;
660 ch_cfg
->flow_cntrl
= FC_PER2PER_DPER
;
663 ch_cfg
->flow_cntrl
= FC_MEM2PER_PER
;
666 ch_cfg
->flow_cntrl
= FC_PER2MEM_PER
;
669 ch_cfg
->flow_cntrl
= FC_PER2PER_SPER
;
673 ch_cfg
->ie
= cfg
& 1;
676 ch_cfg
->itc
= cfg
& 1;
679 ch_cfg
->lock
= cfg
& 1;
682 ch_cfg
->active
= cfg
& 1;
685 ch_cfg
->halt
= cfg
& 1;
691 EXPORT_SYMBOL_GPL(pnx4008_dma_parse_config
);
693 void pnx4008_dma_split_head_entry(struct pnx4008_dma_config
* config
,
694 struct pnx4008_dma_ch_ctrl
* ctrl
)
696 int new_len
= ctrl
->tr_size
, num_entries
= 0;
697 int old_len
= new_len
;
698 int src_width
, dest_width
, count
= 1;
700 switch (ctrl
->swidth
) {
714 switch (ctrl
->dwidth
) {
728 while (new_len
> 0x7FF) {
730 new_len
= (ctrl
->tr_size
+ num_entries
) / (num_entries
+ 1);
732 if (num_entries
!= 0) {
733 struct pnx4008_dma_ll
*ll
= NULL
;
734 config
->ch_ctrl
&= ~0x7ff;
735 config
->ch_ctrl
|= new_len
;
736 if (!config
->is_ll
) {
738 while (num_entries
) {
741 pnx4008_alloc_ll_entry(&config
->
746 pnx4008_alloc_ll_entry(&ll
->
754 src_width
* new_len
* count
;
756 ll
->src_addr
= config
->src_addr
;
760 dest_width
* new_len
* count
;
762 ll
->dest_addr
= config
->dest_addr
;
763 ll
->ch_ctrl
= config
->ch_ctrl
& 0x7fffffff;
770 struct pnx4008_dma_ll
*ll_old
= config
->ll
;
771 unsigned long ll_dma_old
= config
->ll_dma
;
772 while (num_entries
) {
775 pnx4008_alloc_ll_entry(&config
->
780 pnx4008_alloc_ll_entry(&ll
->
788 src_width
* new_len
* count
;
790 ll
->src_addr
= config
->src_addr
;
794 dest_width
* new_len
* count
;
796 ll
->dest_addr
= config
->dest_addr
;
797 ll
->ch_ctrl
= config
->ch_ctrl
& 0x7fffffff;
803 ll
->next_dma
= ll_dma_old
;
806 /* adjust last length/tc */
807 ll
->ch_ctrl
= config
->ch_ctrl
& (~0x7ff);
808 ll
->ch_ctrl
|= old_len
- new_len
* (count
- 1);
809 config
->ch_ctrl
&= 0x7fffffff;
813 EXPORT_SYMBOL_GPL(pnx4008_dma_split_head_entry
);
815 void pnx4008_dma_split_ll_entry(struct pnx4008_dma_ll
* cur_ll
,
816 struct pnx4008_dma_ch_ctrl
* ctrl
)
818 int new_len
= ctrl
->tr_size
, num_entries
= 0;
819 int old_len
= new_len
;
820 int src_width
, dest_width
, count
= 1;
822 switch (ctrl
->swidth
) {
836 switch (ctrl
->dwidth
) {
850 while (new_len
> 0x7FF) {
852 new_len
= (ctrl
->tr_size
+ num_entries
) / (num_entries
+ 1);
854 if (num_entries
!= 0) {
855 struct pnx4008_dma_ll
*ll
= NULL
;
856 cur_ll
->ch_ctrl
&= ~0x7ff;
857 cur_ll
->ch_ctrl
|= new_len
;
859 while (num_entries
) {
862 pnx4008_alloc_ll_entry(&cur_ll
->
867 pnx4008_alloc_ll_entry(&ll
->
875 src_width
* new_len
* count
;
877 ll
->src_addr
= cur_ll
->src_addr
;
881 dest_width
* new_len
* count
;
883 ll
->dest_addr
= cur_ll
->dest_addr
;
884 ll
->ch_ctrl
= cur_ll
->ch_ctrl
& 0x7fffffff;
891 struct pnx4008_dma_ll
*ll_old
= cur_ll
->next
;
892 unsigned long ll_dma_old
= cur_ll
->next_dma
;
893 while (num_entries
) {
896 pnx4008_alloc_ll_entry(&cur_ll
->
901 pnx4008_alloc_ll_entry(&ll
->
909 src_width
* new_len
* count
;
911 ll
->src_addr
= cur_ll
->src_addr
;
915 dest_width
* new_len
* count
;
917 ll
->dest_addr
= cur_ll
->dest_addr
;
918 ll
->ch_ctrl
= cur_ll
->ch_ctrl
& 0x7fffffff;
925 ll
->next_dma
= ll_dma_old
;
928 /* adjust last length/tc */
929 ll
->ch_ctrl
= cur_ll
->ch_ctrl
& (~0x7ff);
930 ll
->ch_ctrl
|= old_len
- new_len
* (count
- 1);
931 cur_ll
->ch_ctrl
&= 0x7fffffff;
935 EXPORT_SYMBOL_GPL(pnx4008_dma_split_ll_entry
);
937 int pnx4008_config_channel(int ch
, struct pnx4008_dma_config
* config
)
939 if (!VALID_CHANNEL(ch
) || !dma_channels
[ch
].name
)
943 __raw_writel(config
->src_addr
, DMAC_Cx_SRC_ADDR(ch
));
944 __raw_writel(config
->dest_addr
, DMAC_Cx_DEST_ADDR(ch
));
947 __raw_writel(config
->ll_dma
, DMAC_Cx_LLI(ch
));
949 __raw_writel(0, DMAC_Cx_LLI(ch
));
951 __raw_writel(config
->ch_ctrl
, DMAC_Cx_CONTROL(ch
));
952 __raw_writel(config
->ch_cfg
, DMAC_Cx_CONFIG(ch
));
953 pnx4008_dma_unlock();
959 EXPORT_SYMBOL_GPL(pnx4008_config_channel
);
961 int pnx4008_channel_get_config(int ch
, struct pnx4008_dma_config
* config
)
963 if (!VALID_CHANNEL(ch
) || !dma_channels
[ch
].name
|| !config
)
967 config
->ch_cfg
= __raw_readl(DMAC_Cx_CONFIG(ch
));
968 config
->ch_ctrl
= __raw_readl(DMAC_Cx_CONTROL(ch
));
970 config
->ll_dma
= __raw_readl(DMAC_Cx_LLI(ch
));
971 config
->is_ll
= config
->ll_dma
? 1 : 0;
973 config
->src_addr
= __raw_readl(DMAC_Cx_SRC_ADDR(ch
));
974 config
->dest_addr
= __raw_readl(DMAC_Cx_DEST_ADDR(ch
));
975 pnx4008_dma_unlock();
980 EXPORT_SYMBOL_GPL(pnx4008_channel_get_config
);
982 int pnx4008_dma_ch_enable(int ch
)
984 unsigned long ch_cfg
;
986 if (!VALID_CHANNEL(ch
) || !dma_channels
[ch
].name
)
990 ch_cfg
= __raw_readl(DMAC_Cx_CONFIG(ch
));
992 __raw_writel(ch_cfg
, DMAC_Cx_CONFIG(ch
));
993 pnx4008_dma_unlock();
998 EXPORT_SYMBOL_GPL(pnx4008_dma_ch_enable
);
1000 int pnx4008_dma_ch_disable(int ch
)
1002 unsigned long ch_cfg
;
1004 if (!VALID_CHANNEL(ch
) || !dma_channels
[ch
].name
)
1008 ch_cfg
= __raw_readl(DMAC_Cx_CONFIG(ch
));
1010 __raw_writel(ch_cfg
, DMAC_Cx_CONFIG(ch
));
1011 pnx4008_dma_unlock();
1016 EXPORT_SYMBOL_GPL(pnx4008_dma_ch_disable
);
1018 int pnx4008_dma_ch_enabled(int ch
)
1020 unsigned long ch_cfg
;
1022 if (!VALID_CHANNEL(ch
) || !dma_channels
[ch
].name
)
1026 ch_cfg
= __raw_readl(DMAC_Cx_CONFIG(ch
));
1027 pnx4008_dma_unlock();
1032 EXPORT_SYMBOL_GPL(pnx4008_dma_ch_enabled
);
1034 static irqreturn_t
dma_irq_handler(int irq
, void *dev_id
)
1037 unsigned long dint
= __raw_readl(DMAC_INT_STAT
);
1038 unsigned long tcint
= __raw_readl(DMAC_INT_TC_STAT
);
1039 unsigned long eint
= __raw_readl(DMAC_INT_ERR_STAT
);
1040 unsigned long i_bit
;
1042 for (i
= MAX_DMA_CHANNELS
- 1; i
>= 0; i
--) {
1045 struct dma_channel
*channel
= &dma_channels
[i
];
1047 if (channel
->name
&& channel
->irq_handler
) {
1051 cause
|= DMA_ERR_INT
;
1053 cause
|= DMA_TC_INT
;
1054 channel
->irq_handler(i
, cause
, channel
->data
);
1057 * IRQ for an unregistered DMA channel
1060 "spurious IRQ for DMA channel %d\n", i
);
1063 __raw_writel(i_bit
, DMAC_INT_TC_CLEAR
);
1065 __raw_writel(i_bit
, DMAC_INT_ERR_CLEAR
);
1071 static int __init
pnx4008_dma_init(void)
1075 ret
= request_irq(DMA_INT
, dma_irq_handler
, 0, "DMA", NULL
);
1077 printk(KERN_CRIT
"Wow! Can't register IRQ for DMA\n");
1081 ll_pool
.count
= 0x4000 / sizeof(struct pnx4008_dma_ll
);
1082 ll_pool
.cur
= ll_pool
.vaddr
=
1083 dma_alloc_coherent(NULL
, ll_pool
.count
* sizeof(struct pnx4008_dma_ll
),
1084 &ll_pool
.dma_addr
, GFP_KERNEL
);
1086 if (!ll_pool
.vaddr
) {
1088 free_irq(DMA_INT
, NULL
);
1092 for (i
= 0; i
< ll_pool
.count
- 1; i
++) {
1093 void **addr
= ll_pool
.vaddr
+ i
* sizeof(struct pnx4008_dma_ll
);
1094 *addr
= (void *)addr
+ sizeof(struct pnx4008_dma_ll
);
1096 *(long *)(ll_pool
.vaddr
+
1097 (ll_pool
.count
- 1) * sizeof(struct pnx4008_dma_ll
)) =
1098 (long)ll_pool
.vaddr
;
1100 __raw_writel(1, DMAC_CONFIG
);
1105 arch_initcall(pnx4008_dma_init
);