1 /* Wrapper for DMA channel allocator that updates DMA client muxing.
2 * Copyright 2004-2007, Axis Communications AB
5 #include <linux/kernel.h>
6 #include <linux/module.h>
7 #include <linux/errno.h>
10 #include <arch/svinto.h>
12 /* Macro to access ETRAX 100 registers */
13 #define SETS(var, reg, field, val) var = (var & ~IO_MASK_(reg##_, field##_)) | \
14 IO_STATE_(reg##_, field##_, _##val)
17 static char used_dma_channels
[MAX_DMA_CHANNELS
];
18 static const char * used_dma_channels_users
[MAX_DMA_CHANNELS
];
20 int cris_request_dma(unsigned int dmanr
, const char * device_id
,
21 unsigned options
, enum dma_owner owner
)
24 unsigned long int gens
;
27 if ((dmanr
< 0) || (dmanr
>= MAX_DMA_CHANNELS
)) {
28 printk(KERN_CRIT
"cris_request_dma: invalid DMA channel %u\n", dmanr
);
32 local_irq_save(flags
);
33 if (used_dma_channels
[dmanr
]) {
34 local_irq_restore(flags
);
35 if (options
& DMA_VERBOSE_ON_ERROR
) {
36 printk(KERN_CRIT
"Failed to request DMA %i for %s, already allocated by %s\n", dmanr
, device_id
, used_dma_channels_users
[dmanr
]);
38 if (options
& DMA_PANIC_ON_ERROR
) {
39 panic("request_dma error!");
44 gens
= genconfig_shadow
;
49 if ((dmanr
!= NETWORK_TX_DMA_NBR
) &&
50 (dmanr
!= NETWORK_RX_DMA_NBR
)) {
51 printk(KERN_CRIT
"Invalid DMA channel for eth\n");
56 if (dmanr
== SER0_TX_DMA_NBR
) {
57 SETS(gens
, R_GEN_CONFIG
, dma6
, serial0
);
58 } else if (dmanr
== SER0_RX_DMA_NBR
) {
59 SETS(gens
, R_GEN_CONFIG
, dma7
, serial0
);
61 printk(KERN_CRIT
"Invalid DMA channel for ser0\n");
66 if (dmanr
== SER1_TX_DMA_NBR
) {
67 SETS(gens
, R_GEN_CONFIG
, dma8
, serial1
);
68 } else if (dmanr
== SER1_RX_DMA_NBR
) {
69 SETS(gens
, R_GEN_CONFIG
, dma9
, serial1
);
71 printk(KERN_CRIT
"Invalid DMA channel for ser1\n");
76 if (dmanr
== SER2_TX_DMA_NBR
) {
77 SETS(gens
, R_GEN_CONFIG
, dma2
, serial2
);
78 } else if (dmanr
== SER2_RX_DMA_NBR
) {
79 SETS(gens
, R_GEN_CONFIG
, dma3
, serial2
);
81 printk(KERN_CRIT
"Invalid DMA channel for ser2\n");
86 if (dmanr
== SER3_TX_DMA_NBR
) {
87 SETS(gens
, R_GEN_CONFIG
, dma4
, serial3
);
88 } else if (dmanr
== SER3_RX_DMA_NBR
) {
89 SETS(gens
, R_GEN_CONFIG
, dma5
, serial3
);
91 printk(KERN_CRIT
"Invalid DMA channel for ser3\n");
96 if (dmanr
== ATA_TX_DMA_NBR
) {
97 SETS(gens
, R_GEN_CONFIG
, dma2
, ata
);
98 } else if (dmanr
== ATA_RX_DMA_NBR
) {
99 SETS(gens
, R_GEN_CONFIG
, dma3
, ata
);
101 printk(KERN_CRIT
"Invalid DMA channel for ata\n");
106 if (dmanr
== EXTDMA0_TX_DMA_NBR
) {
107 SETS(gens
, R_GEN_CONFIG
, dma4
, extdma0
);
108 } else if (dmanr
== EXTDMA0_RX_DMA_NBR
) {
109 SETS(gens
, R_GEN_CONFIG
, dma5
, extdma0
);
111 printk(KERN_CRIT
"Invalid DMA channel for ext0\n");
116 if (dmanr
== EXTDMA1_TX_DMA_NBR
) {
117 SETS(gens
, R_GEN_CONFIG
, dma6
, extdma1
);
118 } else if (dmanr
== EXTDMA1_RX_DMA_NBR
) {
119 SETS(gens
, R_GEN_CONFIG
, dma7
, extdma1
);
121 printk(KERN_CRIT
"Invalid DMA channel for ext1\n");
126 if (dmanr
== MEM2MEM_RX_DMA_NBR
) {
127 SETS(gens
, R_GEN_CONFIG
, dma7
, intdma6
);
129 printk(KERN_CRIT
"Invalid DMA channel for int6\n");
134 if (dmanr
== MEM2MEM_TX_DMA_NBR
) {
135 SETS(gens
, R_GEN_CONFIG
, dma6
, intdma7
);
137 printk(KERN_CRIT
"Invalid DMA channel for int7\n");
142 if (dmanr
== USB_TX_DMA_NBR
) {
143 SETS(gens
, R_GEN_CONFIG
, dma8
, usb
);
144 } else if (dmanr
== USB_RX_DMA_NBR
) {
145 SETS(gens
, R_GEN_CONFIG
, dma9
, usb
);
147 printk(KERN_CRIT
"Invalid DMA channel for usb\n");
152 if (dmanr
== SCSI0_TX_DMA_NBR
) {
153 SETS(gens
, R_GEN_CONFIG
, dma2
, scsi0
);
154 } else if (dmanr
== SCSI0_RX_DMA_NBR
) {
155 SETS(gens
, R_GEN_CONFIG
, dma3
, scsi0
);
157 printk(KERN_CRIT
"Invalid DMA channel for scsi0\n");
162 if (dmanr
== SCSI1_TX_DMA_NBR
) {
163 SETS(gens
, R_GEN_CONFIG
, dma4
, scsi1
);
164 } else if (dmanr
== SCSI1_RX_DMA_NBR
) {
165 SETS(gens
, R_GEN_CONFIG
, dma5
, scsi1
);
167 printk(KERN_CRIT
"Invalid DMA channel for scsi1\n");
172 if (dmanr
== PAR0_TX_DMA_NBR
) {
173 SETS(gens
, R_GEN_CONFIG
, dma2
, par0
);
174 } else if (dmanr
== PAR0_RX_DMA_NBR
) {
175 SETS(gens
, R_GEN_CONFIG
, dma3
, par0
);
177 printk(KERN_CRIT
"Invalid DMA channel for par0\n");
182 if (dmanr
== PAR1_TX_DMA_NBR
) {
183 SETS(gens
, R_GEN_CONFIG
, dma4
, par1
);
184 } else if (dmanr
== PAR1_RX_DMA_NBR
) {
185 SETS(gens
, R_GEN_CONFIG
, dma5
, par1
);
187 printk(KERN_CRIT
"Invalid DMA channel for par1\n");
192 printk(KERN_CRIT
"Invalid DMA owner.\n");
196 used_dma_channels
[dmanr
] = 1;
197 used_dma_channels_users
[dmanr
] = device_id
;
201 genconfig_shadow
= gens
;
202 *R_GEN_CONFIG
= genconfig_shadow
;
203 /* Wait 12 cycles before doing any DMA command */
204 for(i
= 6; i
> 0; i
--)
209 local_irq_restore(flags
);
213 void cris_free_dma(unsigned int dmanr
, const char * device_id
)
216 if ((dmanr
< 0) || (dmanr
>= MAX_DMA_CHANNELS
)) {
217 printk(KERN_CRIT
"cris_free_dma: invalid DMA channel %u\n", dmanr
);
221 local_irq_save(flags
);
222 if (!used_dma_channels
[dmanr
]) {
223 printk(KERN_CRIT
"cris_free_dma: DMA channel %u not allocated\n", dmanr
);
224 } else if (device_id
!= used_dma_channels_users
[dmanr
]) {
225 printk(KERN_CRIT
"cris_free_dma: DMA channel %u not allocated by device\n", dmanr
);
230 *R_DMA_CH0_CMD
= IO_STATE(R_DMA_CH0_CMD
, cmd
, reset
);
231 while (IO_EXTRACT(R_DMA_CH0_CMD
, cmd
, *R_DMA_CH0_CMD
) ==
232 IO_STATE_VALUE(R_DMA_CH0_CMD
, cmd
, reset
));
235 *R_DMA_CH1_CMD
= IO_STATE(R_DMA_CH1_CMD
, cmd
, reset
);
236 while (IO_EXTRACT(R_DMA_CH1_CMD
, cmd
, *R_DMA_CH1_CMD
) ==
237 IO_STATE_VALUE(R_DMA_CH1_CMD
, cmd
, reset
));
240 *R_DMA_CH2_CMD
= IO_STATE(R_DMA_CH2_CMD
, cmd
, reset
);
241 while (IO_EXTRACT(R_DMA_CH2_CMD
, cmd
, *R_DMA_CH2_CMD
) ==
242 IO_STATE_VALUE(R_DMA_CH2_CMD
, cmd
, reset
));
245 *R_DMA_CH3_CMD
= IO_STATE(R_DMA_CH3_CMD
, cmd
, reset
);
246 while (IO_EXTRACT(R_DMA_CH3_CMD
, cmd
, *R_DMA_CH3_CMD
) ==
247 IO_STATE_VALUE(R_DMA_CH3_CMD
, cmd
, reset
));
250 *R_DMA_CH4_CMD
= IO_STATE(R_DMA_CH4_CMD
, cmd
, reset
);
251 while (IO_EXTRACT(R_DMA_CH4_CMD
, cmd
, *R_DMA_CH4_CMD
) ==
252 IO_STATE_VALUE(R_DMA_CH4_CMD
, cmd
, reset
));
255 *R_DMA_CH5_CMD
= IO_STATE(R_DMA_CH5_CMD
, cmd
, reset
);
256 while (IO_EXTRACT(R_DMA_CH5_CMD
, cmd
, *R_DMA_CH5_CMD
) ==
257 IO_STATE_VALUE(R_DMA_CH5_CMD
, cmd
, reset
));
260 *R_DMA_CH6_CMD
= IO_STATE(R_DMA_CH6_CMD
, cmd
, reset
);
261 while (IO_EXTRACT(R_DMA_CH6_CMD
, cmd
, *R_DMA_CH6_CMD
) ==
262 IO_STATE_VALUE(R_DMA_CH6_CMD
, cmd
, reset
));
265 *R_DMA_CH7_CMD
= IO_STATE(R_DMA_CH7_CMD
, cmd
, reset
);
266 while (IO_EXTRACT(R_DMA_CH7_CMD
, cmd
, *R_DMA_CH7_CMD
) ==
267 IO_STATE_VALUE(R_DMA_CH7_CMD
, cmd
, reset
));
270 *R_DMA_CH8_CMD
= IO_STATE(R_DMA_CH8_CMD
, cmd
, reset
);
271 while (IO_EXTRACT(R_DMA_CH8_CMD
, cmd
, *R_DMA_CH8_CMD
) ==
272 IO_STATE_VALUE(R_DMA_CH8_CMD
, cmd
, reset
));
275 *R_DMA_CH9_CMD
= IO_STATE(R_DMA_CH9_CMD
, cmd
, reset
);
276 while (IO_EXTRACT(R_DMA_CH9_CMD
, cmd
, *R_DMA_CH9_CMD
) ==
277 IO_STATE_VALUE(R_DMA_CH9_CMD
, cmd
, reset
));
280 used_dma_channels
[dmanr
] = 0;
282 local_irq_restore(flags
);
285 EXPORT_SYMBOL(cris_request_dma
);
286 EXPORT_SYMBOL(cris_free_dma
);