2 * linux/arch/arm/mach-pnx4008/dma.c
4 * PNX4008 DMA registration and IRQ dispatching
7 * Copyright: MontaVista Software Inc. (c) 2005
9 * Based on the code from Nicolas Pitre
11 * This program is free software; you can redistribute it and/or modify
12 * it under the terms of the GNU General Public License version 2 as
13 * published by the Free Software Foundation.
16 #include <linux/module.h>
17 #include <linux/init.h>
18 #include <linux/kernel.h>
19 #include <linux/interrupt.h>
20 #include <linux/errno.h>
21 #include <linux/err.h>
22 #include <linux/dma-mapping.h>
23 #include <linux/clk.h>
25 #include <asm/system.h>
26 #include <asm/hardware.h>
28 #include <asm/dma-mapping.h>
30 #include <asm/mach/dma.h>
31 #include <asm/arch/clock.h>
33 static struct dma_channel
{
35 void (*irq_handler
) (int, int, void *);
37 struct pnx4008_dma_ll
*ll
;
41 } dma_channels
[MAX_DMA_CHANNELS
];
43 static struct ll_pool
{
50 static DEFINE_SPINLOCK(ll_lock
);
52 struct pnx4008_dma_ll
*pnx4008_alloc_ll_entry(dma_addr_t
* ll_dma
)
54 struct pnx4008_dma_ll
*ll
= NULL
;
57 spin_lock_irqsave(&ll_lock
, flags
);
58 if (ll_pool
.count
> 4) { /* can give one more */
59 ll
= *(struct pnx4008_dma_ll
**) ll_pool
.cur
;
60 *ll_dma
= ll_pool
.dma_addr
+ ((void *)ll
- ll_pool
.vaddr
);
61 *(void **)ll_pool
.cur
= **(void ***)ll_pool
.cur
;
62 memset(ll
, 0, sizeof(*ll
));
65 spin_unlock_irqrestore(&ll_lock
, flags
);
70 EXPORT_SYMBOL_GPL(pnx4008_alloc_ll_entry
);
72 void pnx4008_free_ll_entry(struct pnx4008_dma_ll
* ll
, dma_addr_t ll_dma
)
77 if ((unsigned long)((long)ll
- (long)ll_pool
.vaddr
) > 0x4000) {
78 printk(KERN_ERR
"Trying to free entry not allocated by DMA\n");
82 if (ll
->flags
& DMA_BUFFER_ALLOCATED
)
83 ll
->free(ll
->alloc_data
);
85 spin_lock_irqsave(&ll_lock
, flags
);
86 *(long *)ll
= *(long *)ll_pool
.cur
;
87 *(long *)ll_pool
.cur
= (long)ll
;
89 spin_unlock_irqrestore(&ll_lock
, flags
);
93 EXPORT_SYMBOL_GPL(pnx4008_free_ll_entry
);
95 void pnx4008_free_ll(u32 ll_dma
, struct pnx4008_dma_ll
* ll
)
97 struct pnx4008_dma_ll
*ptr
;
103 pnx4008_free_ll_entry(ll
, ll_dma
);
110 EXPORT_SYMBOL_GPL(pnx4008_free_ll
);
112 static int dma_channels_requested
= 0;
114 static inline void dma_increment_usage(void)
116 if (!dma_channels_requested
++) {
117 struct clk
*clk
= clk_get(0, "dma_ck");
119 clk_set_rate(clk
, 1);
122 pnx4008_config_dma(-1, -1, 1);
125 static inline void dma_decrement_usage(void)
127 if (!--dma_channels_requested
) {
128 struct clk
*clk
= clk_get(0, "dma_ck");
130 clk_set_rate(clk
, 0);
133 pnx4008_config_dma(-1, -1, 0);
138 static DEFINE_SPINLOCK(dma_lock
);
140 static inline void pnx4008_dma_lock(void)
142 spin_lock_irq(&dma_lock
);
145 static inline void pnx4008_dma_unlock(void)
147 spin_unlock_irq(&dma_lock
);
150 #define VALID_CHANNEL(c) (((c) >= 0) && ((c) < MAX_DMA_CHANNELS))
152 int pnx4008_request_channel(char *name
, int ch
,
153 void (*irq_handler
) (int, int, void *), void *data
)
157 /* basic sanity checks */
158 if (!name
|| (ch
!= -1 && !VALID_CHANNEL(ch
)))
163 /* try grabbing a DMA channel with the requested priority */
164 for (i
= MAX_DMA_CHANNELS
- 1; i
>= 0; i
--) {
165 if (!dma_channels
[i
].name
&& (ch
== -1 || ch
== i
)) {
172 dma_increment_usage();
173 dma_channels
[i
].name
= name
;
174 dma_channels
[i
].irq_handler
= irq_handler
;
175 dma_channels
[i
].data
= data
;
176 dma_channels
[i
].ll
= NULL
;
177 dma_channels
[i
].ll_dma
= 0;
179 printk(KERN_WARNING
"No more available DMA channels for %s\n",
184 pnx4008_dma_unlock();
188 EXPORT_SYMBOL_GPL(pnx4008_request_channel
);
190 void pnx4008_free_channel(int ch
)
192 if (!dma_channels
[ch
].name
) {
194 "%s: trying to free channel %d which is already freed\n",
195 <<<<<<< HEAD
:arch
/arm
/mach
-pnx4008
/dma
.c
199 >>>>>>> 264e3e889d86e552b4191d69bb60f4f3b383135a
:arch
/arm
/mach
-pnx4008
/dma
.c
204 pnx4008_free_ll(dma_channels
[ch
].ll_dma
, dma_channels
[ch
].ll
);
205 dma_channels
[ch
].ll
= NULL
;
206 dma_decrement_usage();
208 dma_channels
[ch
].name
= NULL
;
209 pnx4008_dma_unlock();
212 EXPORT_SYMBOL_GPL(pnx4008_free_channel
);
214 int pnx4008_config_dma(int ahb_m1_be
, int ahb_m2_be
, int enable
)
216 unsigned long dma_cfg
= __raw_readl(DMAC_CONFIG
);
220 dma_cfg
&= ~(1 << 1);
231 dma_cfg
&= ~(1 << 2);
242 dma_cfg
&= ~(1 << 0);
252 __raw_writel(dma_cfg
, DMAC_CONFIG
);
253 pnx4008_dma_unlock();
258 EXPORT_SYMBOL_GPL(pnx4008_config_dma
);
260 int pnx4008_dma_pack_control(const struct pnx4008_dma_ch_ctrl
* ch_ctrl
,
263 int i
= 0, dbsize
, sbsize
, err
= 0;
265 if (!ctrl
|| !ch_ctrl
) {
272 switch (ch_ctrl
->tc_mask
) {
284 switch (ch_ctrl
->cacheable
) {
295 switch (ch_ctrl
->bufferable
) {
306 switch (ch_ctrl
->priv_mode
) {
317 switch (ch_ctrl
->di
) {
328 switch (ch_ctrl
->si
) {
339 switch (ch_ctrl
->dest_ahb1
) {
350 switch (ch_ctrl
->src_ahb1
) {
361 switch (ch_ctrl
->dwidth
) {
378 switch (ch_ctrl
->swidth
) {
395 dbsize
= ch_ctrl
->dbsize
;
396 while (!(dbsize
& 1)) {
400 if (ch_ctrl
->dbsize
!= 1 || i
> 8 || i
== 1) {
408 sbsize
= ch_ctrl
->sbsize
;
409 while (!(sbsize
& 1)) {
413 if (ch_ctrl
->sbsize
!= 1 || i
> 8 || i
== 1) {
421 if (ch_ctrl
->tr_size
> 0x7ff) {
426 *ctrl
|= ch_ctrl
->tr_size
& 0x7ff;
432 EXPORT_SYMBOL_GPL(pnx4008_dma_pack_control
);
434 int pnx4008_dma_parse_control(unsigned long ctrl
,
435 struct pnx4008_dma_ch_ctrl
* ch_ctrl
)
444 ch_ctrl
->tr_size
= ctrl
& 0x7ff;
447 ch_ctrl
->sbsize
= 1 << (ctrl
& 7);
448 if (ch_ctrl
->sbsize
> 1)
449 ch_ctrl
->sbsize
<<= 1;
452 ch_ctrl
->dbsize
= 1 << (ctrl
& 7);
453 if (ch_ctrl
->dbsize
> 1)
454 ch_ctrl
->dbsize
<<= 1;
459 ch_ctrl
->swidth
= WIDTH_BYTE
;
462 ch_ctrl
->swidth
= WIDTH_HWORD
;
465 ch_ctrl
->swidth
= WIDTH_WORD
;
475 ch_ctrl
->dwidth
= WIDTH_BYTE
;
478 ch_ctrl
->dwidth
= WIDTH_HWORD
;
481 ch_ctrl
->dwidth
= WIDTH_WORD
;
489 ch_ctrl
->src_ahb1
= ctrl
& 1;
492 ch_ctrl
->dest_ahb1
= ctrl
& 1;
495 ch_ctrl
->si
= ctrl
& 1;
498 ch_ctrl
->di
= ctrl
& 1;
501 ch_ctrl
->priv_mode
= ctrl
& 1;
504 ch_ctrl
->bufferable
= ctrl
& 1;
507 ch_ctrl
->cacheable
= ctrl
& 1;
510 ch_ctrl
->tc_mask
= ctrl
& 1;
516 EXPORT_SYMBOL_GPL(pnx4008_dma_parse_control
);
518 int pnx4008_dma_pack_config(const struct pnx4008_dma_ch_config
* ch_cfg
,
523 if (!cfg
|| !ch_cfg
) {
530 switch (ch_cfg
->halt
) {
541 switch (ch_cfg
->active
) {
552 switch (ch_cfg
->lock
) {
563 switch (ch_cfg
->itc
) {
574 switch (ch_cfg
->ie
) {
585 switch (ch_cfg
->flow_cntrl
) {
601 case FC_PER2PER_DPER
:
613 case FC_PER2PER_SPER
:
621 *cfg
&= ~(0x1f << 6);
622 *cfg
|= ((ch_cfg
->dest_per
& 0x1f) << 6);
624 *cfg
&= ~(0x1f << 1);
625 *cfg
|= ((ch_cfg
->src_per
& 0x1f) << 1);
631 EXPORT_SYMBOL_GPL(pnx4008_dma_pack_config
);
633 int pnx4008_dma_parse_config(unsigned long cfg
,
634 struct pnx4008_dma_ch_config
* ch_cfg
)
645 ch_cfg
->src_per
= cfg
& 0x1f;
648 ch_cfg
->dest_per
= cfg
& 0x1f;
653 ch_cfg
->flow_cntrl
= FC_MEM2MEM_DMA
;
656 ch_cfg
->flow_cntrl
= FC_MEM2PER_DMA
;
659 ch_cfg
->flow_cntrl
= FC_PER2MEM_DMA
;
662 ch_cfg
->flow_cntrl
= FC_PER2PER_DMA
;
665 ch_cfg
->flow_cntrl
= FC_PER2PER_DPER
;
668 ch_cfg
->flow_cntrl
= FC_MEM2PER_PER
;
671 ch_cfg
->flow_cntrl
= FC_PER2MEM_PER
;
674 ch_cfg
->flow_cntrl
= FC_PER2PER_SPER
;
678 ch_cfg
->ie
= cfg
& 1;
681 ch_cfg
->itc
= cfg
& 1;
684 ch_cfg
->lock
= cfg
& 1;
687 ch_cfg
->active
= cfg
& 1;
690 ch_cfg
->halt
= cfg
& 1;
696 EXPORT_SYMBOL_GPL(pnx4008_dma_parse_config
);
698 void pnx4008_dma_split_head_entry(struct pnx4008_dma_config
* config
,
699 struct pnx4008_dma_ch_ctrl
* ctrl
)
701 int new_len
= ctrl
->tr_size
, num_entries
= 0;
702 int old_len
= new_len
;
703 int src_width
, dest_width
, count
= 1;
705 switch (ctrl
->swidth
) {
719 switch (ctrl
->dwidth
) {
733 while (new_len
> 0x7FF) {
735 new_len
= (ctrl
->tr_size
+ num_entries
) / (num_entries
+ 1);
737 if (num_entries
!= 0) {
738 struct pnx4008_dma_ll
*ll
= NULL
;
739 config
->ch_ctrl
&= ~0x7ff;
740 config
->ch_ctrl
|= new_len
;
741 if (!config
->is_ll
) {
743 while (num_entries
) {
746 pnx4008_alloc_ll_entry(&config
->
751 pnx4008_alloc_ll_entry(&ll
->
759 src_width
* new_len
* count
;
761 ll
->src_addr
= config
->src_addr
;
765 dest_width
* new_len
* count
;
767 ll
->dest_addr
= config
->dest_addr
;
768 ll
->ch_ctrl
= config
->ch_ctrl
& 0x7fffffff;
775 struct pnx4008_dma_ll
*ll_old
= config
->ll
;
776 unsigned long ll_dma_old
= config
->ll_dma
;
777 while (num_entries
) {
780 pnx4008_alloc_ll_entry(&config
->
785 pnx4008_alloc_ll_entry(&ll
->
793 src_width
* new_len
* count
;
795 ll
->src_addr
= config
->src_addr
;
799 dest_width
* new_len
* count
;
801 ll
->dest_addr
= config
->dest_addr
;
802 ll
->ch_ctrl
= config
->ch_ctrl
& 0x7fffffff;
808 ll
->next_dma
= ll_dma_old
;
811 /* adjust last length/tc */
812 ll
->ch_ctrl
= config
->ch_ctrl
& (~0x7ff);
813 ll
->ch_ctrl
|= old_len
- new_len
* (count
- 1);
814 config
->ch_ctrl
&= 0x7fffffff;
818 EXPORT_SYMBOL_GPL(pnx4008_dma_split_head_entry
);
820 void pnx4008_dma_split_ll_entry(struct pnx4008_dma_ll
* cur_ll
,
821 struct pnx4008_dma_ch_ctrl
* ctrl
)
823 int new_len
= ctrl
->tr_size
, num_entries
= 0;
824 int old_len
= new_len
;
825 int src_width
, dest_width
, count
= 1;
827 switch (ctrl
->swidth
) {
841 switch (ctrl
->dwidth
) {
855 while (new_len
> 0x7FF) {
857 new_len
= (ctrl
->tr_size
+ num_entries
) / (num_entries
+ 1);
859 if (num_entries
!= 0) {
860 struct pnx4008_dma_ll
*ll
= NULL
;
861 cur_ll
->ch_ctrl
&= ~0x7ff;
862 cur_ll
->ch_ctrl
|= new_len
;
864 while (num_entries
) {
867 pnx4008_alloc_ll_entry(&cur_ll
->
872 pnx4008_alloc_ll_entry(&ll
->
880 src_width
* new_len
* count
;
882 ll
->src_addr
= cur_ll
->src_addr
;
886 dest_width
* new_len
* count
;
888 ll
->dest_addr
= cur_ll
->dest_addr
;
889 ll
->ch_ctrl
= cur_ll
->ch_ctrl
& 0x7fffffff;
896 struct pnx4008_dma_ll
*ll_old
= cur_ll
->next
;
897 unsigned long ll_dma_old
= cur_ll
->next_dma
;
898 while (num_entries
) {
901 pnx4008_alloc_ll_entry(&cur_ll
->
906 pnx4008_alloc_ll_entry(&ll
->
914 src_width
* new_len
* count
;
916 ll
->src_addr
= cur_ll
->src_addr
;
920 dest_width
* new_len
* count
;
922 ll
->dest_addr
= cur_ll
->dest_addr
;
923 ll
->ch_ctrl
= cur_ll
->ch_ctrl
& 0x7fffffff;
930 ll
->next_dma
= ll_dma_old
;
933 /* adjust last length/tc */
934 ll
->ch_ctrl
= cur_ll
->ch_ctrl
& (~0x7ff);
935 ll
->ch_ctrl
|= old_len
- new_len
* (count
- 1);
936 cur_ll
->ch_ctrl
&= 0x7fffffff;
940 EXPORT_SYMBOL_GPL(pnx4008_dma_split_ll_entry
);
942 int pnx4008_config_channel(int ch
, struct pnx4008_dma_config
* config
)
944 if (!VALID_CHANNEL(ch
) || !dma_channels
[ch
].name
)
948 __raw_writel(config
->src_addr
, DMAC_Cx_SRC_ADDR(ch
));
949 __raw_writel(config
->dest_addr
, DMAC_Cx_DEST_ADDR(ch
));
952 __raw_writel(config
->ll_dma
, DMAC_Cx_LLI(ch
));
954 __raw_writel(0, DMAC_Cx_LLI(ch
));
956 __raw_writel(config
->ch_ctrl
, DMAC_Cx_CONTROL(ch
));
957 __raw_writel(config
->ch_cfg
, DMAC_Cx_CONFIG(ch
));
958 pnx4008_dma_unlock();
964 EXPORT_SYMBOL_GPL(pnx4008_config_channel
);
966 int pnx4008_channel_get_config(int ch
, struct pnx4008_dma_config
* config
)
968 if (!VALID_CHANNEL(ch
) || !dma_channels
[ch
].name
|| !config
)
972 config
->ch_cfg
= __raw_readl(DMAC_Cx_CONFIG(ch
));
973 config
->ch_ctrl
= __raw_readl(DMAC_Cx_CONTROL(ch
));
975 config
->ll_dma
= __raw_readl(DMAC_Cx_LLI(ch
));
976 config
->is_ll
= config
->ll_dma
? 1 : 0;
978 config
->src_addr
= __raw_readl(DMAC_Cx_SRC_ADDR(ch
));
979 config
->dest_addr
= __raw_readl(DMAC_Cx_DEST_ADDR(ch
));
980 pnx4008_dma_unlock();
985 EXPORT_SYMBOL_GPL(pnx4008_channel_get_config
);
987 int pnx4008_dma_ch_enable(int ch
)
989 unsigned long ch_cfg
;
991 if (!VALID_CHANNEL(ch
) || !dma_channels
[ch
].name
)
995 ch_cfg
= __raw_readl(DMAC_Cx_CONFIG(ch
));
997 __raw_writel(ch_cfg
, DMAC_Cx_CONFIG(ch
));
998 pnx4008_dma_unlock();
1003 EXPORT_SYMBOL_GPL(pnx4008_dma_ch_enable
);
1005 int pnx4008_dma_ch_disable(int ch
)
1007 unsigned long ch_cfg
;
1009 if (!VALID_CHANNEL(ch
) || !dma_channels
[ch
].name
)
1013 ch_cfg
= __raw_readl(DMAC_Cx_CONFIG(ch
));
1015 __raw_writel(ch_cfg
, DMAC_Cx_CONFIG(ch
));
1016 pnx4008_dma_unlock();
1021 EXPORT_SYMBOL_GPL(pnx4008_dma_ch_disable
);
1023 int pnx4008_dma_ch_enabled(int ch
)
1025 unsigned long ch_cfg
;
1027 if (!VALID_CHANNEL(ch
) || !dma_channels
[ch
].name
)
1031 ch_cfg
= __raw_readl(DMAC_Cx_CONFIG(ch
));
1032 pnx4008_dma_unlock();
1037 EXPORT_SYMBOL_GPL(pnx4008_dma_ch_enabled
);
1039 static irqreturn_t
dma_irq_handler(int irq
, void *dev_id
)
1042 unsigned long dint
= __raw_readl(DMAC_INT_STAT
);
1043 unsigned long tcint
= __raw_readl(DMAC_INT_TC_STAT
);
1044 unsigned long eint
= __raw_readl(DMAC_INT_ERR_STAT
);
1045 unsigned long i_bit
;
1047 for (i
= MAX_DMA_CHANNELS
- 1; i
>= 0; i
--) {
1050 struct dma_channel
*channel
= &dma_channels
[i
];
1052 if (channel
->name
&& channel
->irq_handler
) {
1056 cause
|= DMA_ERR_INT
;
1058 cause
|= DMA_TC_INT
;
1059 channel
->irq_handler(i
, cause
, channel
->data
);
1062 * IRQ for an unregistered DMA channel
1065 "spurious IRQ for DMA channel %d\n", i
);
1068 __raw_writel(i_bit
, DMAC_INT_TC_CLEAR
);
1070 __raw_writel(i_bit
, DMAC_INT_ERR_CLEAR
);
1076 static int __init
pnx4008_dma_init(void)
1080 ret
= request_irq(DMA_INT
, dma_irq_handler
, 0, "DMA", NULL
);
1082 printk(KERN_CRIT
"Wow! Can't register IRQ for DMA\n");
1086 ll_pool
.count
= 0x4000 / sizeof(struct pnx4008_dma_ll
);
1087 ll_pool
.cur
= ll_pool
.vaddr
=
1088 dma_alloc_coherent(NULL
, ll_pool
.count
* sizeof(struct pnx4008_dma_ll
),
1089 &ll_pool
.dma_addr
, GFP_KERNEL
);
1091 if (!ll_pool
.vaddr
) {
1093 free_irq(DMA_INT
, NULL
);
1097 for (i
= 0; i
< ll_pool
.count
- 1; i
++) {
1098 void **addr
= ll_pool
.vaddr
+ i
* sizeof(struct pnx4008_dma_ll
);
1099 *addr
= (void *)addr
+ sizeof(struct pnx4008_dma_ll
);
1101 *(long *)(ll_pool
.vaddr
+
1102 (ll_pool
.count
- 1) * sizeof(struct pnx4008_dma_ll
)) =
1103 (long)ll_pool
.vaddr
;
1105 __raw_writel(1, DMAC_CONFIG
);
1110 arch_initcall(pnx4008_dma_init
);