2 * linux/arch/arm/mach-pnx4008/dma.c
4 * PNX4008 DMA registration and IRQ dispatching
7 * Copyright: MontaVista Software Inc. (c) 2005
9 * Based on the code from Nicolas Pitre
11 * This program is free software; you can redistribute it and/or modify
12 * it under the terms of the GNU General Public License version 2 as
13 * published by the Free Software Foundation.
16 #include <linux/module.h>
17 #include <linux/init.h>
18 #include <linux/kernel.h>
19 #include <linux/interrupt.h>
20 #include <linux/errno.h>
21 #include <linux/err.h>
22 #include <linux/dma-mapping.h>
23 #include <linux/clk.h>
25 #include <asm/system.h>
26 #include <asm/hardware.h>
28 #include <asm/dma-mapping.h>
30 #include <asm/mach/dma.h>
31 #include <asm/arch/clock.h>
33 static struct dma_channel
{
35 void (*irq_handler
) (int, int, void *);
37 struct pnx4008_dma_ll
*ll
;
41 } dma_channels
[MAX_DMA_CHANNELS
];
43 static struct ll_pool
{
50 static DEFINE_SPINLOCK(ll_lock
);
52 struct pnx4008_dma_ll
*pnx4008_alloc_ll_entry(dma_addr_t
* ll_dma
)
54 struct pnx4008_dma_ll
*ll
= NULL
;
57 spin_lock_irqsave(&ll_lock
, flags
);
58 if (ll_pool
.count
> 4) { /* can give one more */
59 ll
= *(struct pnx4008_dma_ll
**) ll_pool
.cur
;
60 *ll_dma
= ll_pool
.dma_addr
+ ((void *)ll
- ll_pool
.vaddr
);
61 *(void **)ll_pool
.cur
= **(void ***)ll_pool
.cur
;
62 memset(ll
, 0, sizeof(*ll
));
65 spin_unlock_irqrestore(&ll_lock
, flags
);
70 EXPORT_SYMBOL_GPL(pnx4008_alloc_ll_entry
);
72 void pnx4008_free_ll_entry(struct pnx4008_dma_ll
* ll
, dma_addr_t ll_dma
)
77 if ((unsigned long)((long)ll
- (long)ll_pool
.vaddr
) > 0x4000) {
78 printk(KERN_ERR
"Trying to free entry not allocated by DMA\n");
82 if (ll
->flags
& DMA_BUFFER_ALLOCATED
)
83 ll
->free(ll
->alloc_data
);
85 spin_lock_irqsave(&ll_lock
, flags
);
86 *(long *)ll
= *(long *)ll_pool
.cur
;
87 *(long *)ll_pool
.cur
= (long)ll
;
89 spin_unlock_irqrestore(&ll_lock
, flags
);
93 EXPORT_SYMBOL_GPL(pnx4008_free_ll_entry
);
95 void pnx4008_free_ll(u32 ll_dma
, struct pnx4008_dma_ll
* ll
)
97 struct pnx4008_dma_ll
*ptr
;
103 pnx4008_free_ll_entry(ll
, ll_dma
);
110 EXPORT_SYMBOL_GPL(pnx4008_free_ll
);
112 static int dma_channels_requested
= 0;
114 static inline void dma_increment_usage(void)
116 if (!dma_channels_requested
++) {
117 struct clk
*clk
= clk_get(0, "dma_ck");
119 clk_set_rate(clk
, 1);
122 pnx4008_config_dma(-1, -1, 1);
125 static inline void dma_decrement_usage(void)
127 if (!--dma_channels_requested
) {
128 struct clk
*clk
= clk_get(0, "dma_ck");
130 clk_set_rate(clk
, 0);
133 pnx4008_config_dma(-1, -1, 0);
138 static DEFINE_SPINLOCK(dma_lock
);
140 static inline void pnx4008_dma_lock(void)
142 spin_lock_irq(&dma_lock
);
145 static inline void pnx4008_dma_unlock(void)
147 spin_unlock_irq(&dma_lock
);
150 #define VALID_CHANNEL(c) (((c) >= 0) && ((c) < MAX_DMA_CHANNELS))
152 int pnx4008_request_channel(char *name
, int ch
,
153 void (*irq_handler
) (int, int, void *), void *data
)
157 /* basic sanity checks */
158 if (!name
|| (ch
!= -1 && !VALID_CHANNEL(ch
)))
163 /* try grabbing a DMA channel with the requested priority */
164 for (i
= MAX_DMA_CHANNELS
- 1; i
>= 0; i
--) {
165 if (!dma_channels
[i
].name
&& (ch
== -1 || ch
== i
)) {
172 dma_increment_usage();
173 dma_channels
[i
].name
= name
;
174 dma_channels
[i
].irq_handler
= irq_handler
;
175 dma_channels
[i
].data
= data
;
176 dma_channels
[i
].ll
= NULL
;
177 dma_channels
[i
].ll_dma
= 0;
179 printk(KERN_WARNING
"No more available DMA channels for %s\n",
184 pnx4008_dma_unlock();
188 EXPORT_SYMBOL_GPL(pnx4008_request_channel
);
190 void pnx4008_free_channel(int ch
)
192 if (!dma_channels
[ch
].name
) {
194 "%s: trying to free channel %d which is already freed\n",
200 pnx4008_free_ll(dma_channels
[ch
].ll_dma
, dma_channels
[ch
].ll
);
201 dma_channels
[ch
].ll
= NULL
;
202 dma_decrement_usage();
204 dma_channels
[ch
].name
= NULL
;
205 pnx4008_dma_unlock();
208 EXPORT_SYMBOL_GPL(pnx4008_free_channel
);
210 int pnx4008_config_dma(int ahb_m1_be
, int ahb_m2_be
, int enable
)
212 unsigned long dma_cfg
= __raw_readl(DMAC_CONFIG
);
216 dma_cfg
&= ~(1 << 1);
227 dma_cfg
&= ~(1 << 2);
238 dma_cfg
&= ~(1 << 0);
248 __raw_writel(dma_cfg
, DMAC_CONFIG
);
249 pnx4008_dma_unlock();
254 EXPORT_SYMBOL_GPL(pnx4008_config_dma
);
256 int pnx4008_dma_pack_control(const struct pnx4008_dma_ch_ctrl
* ch_ctrl
,
259 int i
= 0, dbsize
, sbsize
, err
= 0;
261 if (!ctrl
|| !ch_ctrl
) {
268 switch (ch_ctrl
->tc_mask
) {
280 switch (ch_ctrl
->cacheable
) {
291 switch (ch_ctrl
->bufferable
) {
302 switch (ch_ctrl
->priv_mode
) {
313 switch (ch_ctrl
->di
) {
324 switch (ch_ctrl
->si
) {
335 switch (ch_ctrl
->dest_ahb1
) {
346 switch (ch_ctrl
->src_ahb1
) {
357 switch (ch_ctrl
->dwidth
) {
374 switch (ch_ctrl
->swidth
) {
391 dbsize
= ch_ctrl
->dbsize
;
392 while (!(dbsize
& 1)) {
396 if (ch_ctrl
->dbsize
!= 1 || i
> 8 || i
== 1) {
404 sbsize
= ch_ctrl
->sbsize
;
405 while (!(sbsize
& 1)) {
409 if (ch_ctrl
->sbsize
!= 1 || i
> 8 || i
== 1) {
417 if (ch_ctrl
->tr_size
> 0x7ff) {
422 *ctrl
|= ch_ctrl
->tr_size
& 0x7ff;
428 EXPORT_SYMBOL_GPL(pnx4008_dma_pack_control
);
430 int pnx4008_dma_parse_control(unsigned long ctrl
,
431 struct pnx4008_dma_ch_ctrl
* ch_ctrl
)
440 ch_ctrl
->tr_size
= ctrl
& 0x7ff;
443 ch_ctrl
->sbsize
= 1 << (ctrl
& 7);
444 if (ch_ctrl
->sbsize
> 1)
445 ch_ctrl
->sbsize
<<= 1;
448 ch_ctrl
->dbsize
= 1 << (ctrl
& 7);
449 if (ch_ctrl
->dbsize
> 1)
450 ch_ctrl
->dbsize
<<= 1;
455 ch_ctrl
->swidth
= WIDTH_BYTE
;
458 ch_ctrl
->swidth
= WIDTH_HWORD
;
461 ch_ctrl
->swidth
= WIDTH_WORD
;
471 ch_ctrl
->dwidth
= WIDTH_BYTE
;
474 ch_ctrl
->dwidth
= WIDTH_HWORD
;
477 ch_ctrl
->dwidth
= WIDTH_WORD
;
485 ch_ctrl
->src_ahb1
= ctrl
& 1;
488 ch_ctrl
->dest_ahb1
= ctrl
& 1;
491 ch_ctrl
->si
= ctrl
& 1;
494 ch_ctrl
->di
= ctrl
& 1;
497 ch_ctrl
->priv_mode
= ctrl
& 1;
500 ch_ctrl
->bufferable
= ctrl
& 1;
503 ch_ctrl
->cacheable
= ctrl
& 1;
506 ch_ctrl
->tc_mask
= ctrl
& 1;
512 EXPORT_SYMBOL_GPL(pnx4008_dma_parse_control
);
514 int pnx4008_dma_pack_config(const struct pnx4008_dma_ch_config
* ch_cfg
,
519 if (!cfg
|| !ch_cfg
) {
526 switch (ch_cfg
->halt
) {
537 switch (ch_cfg
->active
) {
548 switch (ch_cfg
->lock
) {
559 switch (ch_cfg
->itc
) {
570 switch (ch_cfg
->ie
) {
581 switch (ch_cfg
->flow_cntrl
) {
597 case FC_PER2PER_DPER
:
609 case FC_PER2PER_SPER
:
617 *cfg
&= ~(0x1f << 6);
618 *cfg
|= ((ch_cfg
->dest_per
& 0x1f) << 6);
620 *cfg
&= ~(0x1f << 1);
621 *cfg
|= ((ch_cfg
->src_per
& 0x1f) << 1);
627 EXPORT_SYMBOL_GPL(pnx4008_dma_pack_config
);
629 int pnx4008_dma_parse_config(unsigned long cfg
,
630 struct pnx4008_dma_ch_config
* ch_cfg
)
641 ch_cfg
->src_per
= cfg
& 0x1f;
644 ch_cfg
->dest_per
= cfg
& 0x1f;
649 ch_cfg
->flow_cntrl
= FC_MEM2MEM_DMA
;
652 ch_cfg
->flow_cntrl
= FC_MEM2PER_DMA
;
655 ch_cfg
->flow_cntrl
= FC_PER2MEM_DMA
;
658 ch_cfg
->flow_cntrl
= FC_PER2PER_DMA
;
661 ch_cfg
->flow_cntrl
= FC_PER2PER_DPER
;
664 ch_cfg
->flow_cntrl
= FC_MEM2PER_PER
;
667 ch_cfg
->flow_cntrl
= FC_PER2MEM_PER
;
670 ch_cfg
->flow_cntrl
= FC_PER2PER_SPER
;
674 ch_cfg
->ie
= cfg
& 1;
677 ch_cfg
->itc
= cfg
& 1;
680 ch_cfg
->lock
= cfg
& 1;
683 ch_cfg
->active
= cfg
& 1;
686 ch_cfg
->halt
= cfg
& 1;
692 EXPORT_SYMBOL_GPL(pnx4008_dma_parse_config
);
694 void pnx4008_dma_split_head_entry(struct pnx4008_dma_config
* config
,
695 struct pnx4008_dma_ch_ctrl
* ctrl
)
697 int new_len
= ctrl
->tr_size
, num_entries
= 0;
698 int old_len
= new_len
;
699 int src_width
, dest_width
, count
= 1;
701 switch (ctrl
->swidth
) {
715 switch (ctrl
->dwidth
) {
729 while (new_len
> 0x7FF) {
731 new_len
= (ctrl
->tr_size
+ num_entries
) / (num_entries
+ 1);
733 if (num_entries
!= 0) {
734 struct pnx4008_dma_ll
*ll
= NULL
;
735 config
->ch_ctrl
&= ~0x7ff;
736 config
->ch_ctrl
|= new_len
;
737 if (!config
->is_ll
) {
739 while (num_entries
) {
742 pnx4008_alloc_ll_entry(&config
->
747 pnx4008_alloc_ll_entry(&ll
->
755 src_width
* new_len
* count
;
757 ll
->src_addr
= config
->src_addr
;
761 dest_width
* new_len
* count
;
763 ll
->dest_addr
= config
->dest_addr
;
764 ll
->ch_ctrl
= config
->ch_ctrl
& 0x7fffffff;
771 struct pnx4008_dma_ll
*ll_old
= config
->ll
;
772 unsigned long ll_dma_old
= config
->ll_dma
;
773 while (num_entries
) {
776 pnx4008_alloc_ll_entry(&config
->
781 pnx4008_alloc_ll_entry(&ll
->
789 src_width
* new_len
* count
;
791 ll
->src_addr
= config
->src_addr
;
795 dest_width
* new_len
* count
;
797 ll
->dest_addr
= config
->dest_addr
;
798 ll
->ch_ctrl
= config
->ch_ctrl
& 0x7fffffff;
804 ll
->next_dma
= ll_dma_old
;
807 /* adjust last length/tc */
808 ll
->ch_ctrl
= config
->ch_ctrl
& (~0x7ff);
809 ll
->ch_ctrl
|= old_len
- new_len
* (count
- 1);
810 config
->ch_ctrl
&= 0x7fffffff;
814 EXPORT_SYMBOL_GPL(pnx4008_dma_split_head_entry
);
816 void pnx4008_dma_split_ll_entry(struct pnx4008_dma_ll
* cur_ll
,
817 struct pnx4008_dma_ch_ctrl
* ctrl
)
819 int new_len
= ctrl
->tr_size
, num_entries
= 0;
820 int old_len
= new_len
;
821 int src_width
, dest_width
, count
= 1;
823 switch (ctrl
->swidth
) {
837 switch (ctrl
->dwidth
) {
851 while (new_len
> 0x7FF) {
853 new_len
= (ctrl
->tr_size
+ num_entries
) / (num_entries
+ 1);
855 if (num_entries
!= 0) {
856 struct pnx4008_dma_ll
*ll
= NULL
;
857 cur_ll
->ch_ctrl
&= ~0x7ff;
858 cur_ll
->ch_ctrl
|= new_len
;
860 while (num_entries
) {
863 pnx4008_alloc_ll_entry(&cur_ll
->
868 pnx4008_alloc_ll_entry(&ll
->
876 src_width
* new_len
* count
;
878 ll
->src_addr
= cur_ll
->src_addr
;
882 dest_width
* new_len
* count
;
884 ll
->dest_addr
= cur_ll
->dest_addr
;
885 ll
->ch_ctrl
= cur_ll
->ch_ctrl
& 0x7fffffff;
892 struct pnx4008_dma_ll
*ll_old
= cur_ll
->next
;
893 unsigned long ll_dma_old
= cur_ll
->next_dma
;
894 while (num_entries
) {
897 pnx4008_alloc_ll_entry(&cur_ll
->
902 pnx4008_alloc_ll_entry(&ll
->
910 src_width
* new_len
* count
;
912 ll
->src_addr
= cur_ll
->src_addr
;
916 dest_width
* new_len
* count
;
918 ll
->dest_addr
= cur_ll
->dest_addr
;
919 ll
->ch_ctrl
= cur_ll
->ch_ctrl
& 0x7fffffff;
926 ll
->next_dma
= ll_dma_old
;
929 /* adjust last length/tc */
930 ll
->ch_ctrl
= cur_ll
->ch_ctrl
& (~0x7ff);
931 ll
->ch_ctrl
|= old_len
- new_len
* (count
- 1);
932 cur_ll
->ch_ctrl
&= 0x7fffffff;
936 EXPORT_SYMBOL_GPL(pnx4008_dma_split_ll_entry
);
938 int pnx4008_config_channel(int ch
, struct pnx4008_dma_config
* config
)
940 if (!VALID_CHANNEL(ch
) || !dma_channels
[ch
].name
)
944 __raw_writel(config
->src_addr
, DMAC_Cx_SRC_ADDR(ch
));
945 __raw_writel(config
->dest_addr
, DMAC_Cx_DEST_ADDR(ch
));
948 __raw_writel(config
->ll_dma
, DMAC_Cx_LLI(ch
));
950 __raw_writel(0, DMAC_Cx_LLI(ch
));
952 __raw_writel(config
->ch_ctrl
, DMAC_Cx_CONTROL(ch
));
953 __raw_writel(config
->ch_cfg
, DMAC_Cx_CONFIG(ch
));
954 pnx4008_dma_unlock();
960 EXPORT_SYMBOL_GPL(pnx4008_config_channel
);
962 int pnx4008_channel_get_config(int ch
, struct pnx4008_dma_config
* config
)
964 if (!VALID_CHANNEL(ch
) || !dma_channels
[ch
].name
|| !config
)
968 config
->ch_cfg
= __raw_readl(DMAC_Cx_CONFIG(ch
));
969 config
->ch_ctrl
= __raw_readl(DMAC_Cx_CONTROL(ch
));
971 config
->ll_dma
= __raw_readl(DMAC_Cx_LLI(ch
));
972 config
->is_ll
= config
->ll_dma
? 1 : 0;
974 config
->src_addr
= __raw_readl(DMAC_Cx_SRC_ADDR(ch
));
975 config
->dest_addr
= __raw_readl(DMAC_Cx_DEST_ADDR(ch
));
976 pnx4008_dma_unlock();
981 EXPORT_SYMBOL_GPL(pnx4008_channel_get_config
);
983 int pnx4008_dma_ch_enable(int ch
)
985 unsigned long ch_cfg
;
987 if (!VALID_CHANNEL(ch
) || !dma_channels
[ch
].name
)
991 ch_cfg
= __raw_readl(DMAC_Cx_CONFIG(ch
));
993 __raw_writel(ch_cfg
, DMAC_Cx_CONFIG(ch
));
994 pnx4008_dma_unlock();
999 EXPORT_SYMBOL_GPL(pnx4008_dma_ch_enable
);
1001 int pnx4008_dma_ch_disable(int ch
)
1003 unsigned long ch_cfg
;
1005 if (!VALID_CHANNEL(ch
) || !dma_channels
[ch
].name
)
1009 ch_cfg
= __raw_readl(DMAC_Cx_CONFIG(ch
));
1011 __raw_writel(ch_cfg
, DMAC_Cx_CONFIG(ch
));
1012 pnx4008_dma_unlock();
1017 EXPORT_SYMBOL_GPL(pnx4008_dma_ch_disable
);
1019 int pnx4008_dma_ch_enabled(int ch
)
1021 unsigned long ch_cfg
;
1023 if (!VALID_CHANNEL(ch
) || !dma_channels
[ch
].name
)
1027 ch_cfg
= __raw_readl(DMAC_Cx_CONFIG(ch
));
1028 pnx4008_dma_unlock();
1033 EXPORT_SYMBOL_GPL(pnx4008_dma_ch_enabled
);
1035 static irqreturn_t
dma_irq_handler(int irq
, void *dev_id
)
1038 unsigned long dint
= __raw_readl(DMAC_INT_STAT
);
1039 unsigned long tcint
= __raw_readl(DMAC_INT_TC_STAT
);
1040 unsigned long eint
= __raw_readl(DMAC_INT_ERR_STAT
);
1041 unsigned long i_bit
;
1043 for (i
= MAX_DMA_CHANNELS
- 1; i
>= 0; i
--) {
1046 struct dma_channel
*channel
= &dma_channels
[i
];
1048 if (channel
->name
&& channel
->irq_handler
) {
1052 cause
|= DMA_ERR_INT
;
1054 cause
|= DMA_TC_INT
;
1055 channel
->irq_handler(i
, cause
, channel
->data
);
1058 * IRQ for an unregistered DMA channel
1061 "spurious IRQ for DMA channel %d\n", i
);
1064 __raw_writel(i_bit
, DMAC_INT_TC_CLEAR
);
1066 __raw_writel(i_bit
, DMAC_INT_ERR_CLEAR
);
1072 static int __init
pnx4008_dma_init(void)
1076 ret
= request_irq(DMA_INT
, dma_irq_handler
, 0, "DMA", NULL
);
1078 printk(KERN_CRIT
"Wow! Can't register IRQ for DMA\n");
1082 ll_pool
.count
= 0x4000 / sizeof(struct pnx4008_dma_ll
);
1083 ll_pool
.cur
= ll_pool
.vaddr
=
1084 dma_alloc_coherent(NULL
, ll_pool
.count
* sizeof(struct pnx4008_dma_ll
),
1085 &ll_pool
.dma_addr
, GFP_KERNEL
);
1087 if (!ll_pool
.vaddr
) {
1089 free_irq(DMA_INT
, NULL
);
1093 for (i
= 0; i
< ll_pool
.count
- 1; i
++) {
1094 void **addr
= ll_pool
.vaddr
+ i
* sizeof(struct pnx4008_dma_ll
);
1095 *addr
= (void *)addr
+ sizeof(struct pnx4008_dma_ll
);
1097 *(long *)(ll_pool
.vaddr
+
1098 (ll_pool
.count
- 1) * sizeof(struct pnx4008_dma_ll
)) =
1099 (long)ll_pool
.vaddr
;
1101 __raw_writel(1, DMAC_CONFIG
);
1106 arch_initcall(pnx4008_dma_init
);