1 /* Wrapper for DMA channel allocator that updates DMA client muxing.
2 * Copyright 2004, Axis Communications AB
3 * $Id: dma.c,v 1.1 2004/12/13 12:21:51 starvik Exp $
6 #include <linux/kernel.h>
7 #include <linux/module.h>
8 #include <linux/errno.h>
11 #include <asm/arch/svinto.h>
13 /* Macro to access ETRAX 100 registers */
14 #define SETS(var, reg, field, val) var = (var & ~IO_MASK_(reg##_, field##_)) | \
15 IO_STATE_(reg##_, field##_, _##val)
18 static char used_dma_channels
[MAX_DMA_CHANNELS
];
19 static const char * used_dma_channels_users
[MAX_DMA_CHANNELS
];
21 int cris_request_dma(unsigned int dmanr
, const char * device_id
,
22 unsigned options
, enum dma_owner owner
)
25 unsigned long int gens
;
28 if ((dmanr
< 0) || (dmanr
>= MAX_DMA_CHANNELS
)) {
29 printk(KERN_CRIT
"cris_request_dma: invalid DMA channel %u\n", dmanr
);
33 local_irq_save(flags
);
34 if (used_dma_channels
[dmanr
]) {
35 local_irq_restore(flags
);
36 if (options
& DMA_VERBOSE_ON_ERROR
) {
37 printk(KERN_CRIT
"Failed to request DMA %i for %s, already allocated by %s\n", dmanr
, device_id
, used_dma_channels_users
[dmanr
]);
39 if (options
& DMA_PANIC_ON_ERROR
) {
40 panic("request_dma error!");
45 gens
= genconfig_shadow
;
50 if ((dmanr
!= NETWORK_TX_DMA_NBR
) &&
51 (dmanr
!= NETWORK_RX_DMA_NBR
)) {
52 printk(KERN_CRIT
"Invalid DMA channel for eth\n");
57 if (dmanr
== SER0_TX_DMA_NBR
) {
58 SETS(gens
, R_GEN_CONFIG
, dma6
, serial0
);
59 } else if (dmanr
== SER0_RX_DMA_NBR
) {
60 SETS(gens
, R_GEN_CONFIG
, dma7
, serial0
);
62 printk(KERN_CRIT
"Invalid DMA channel for ser0\n");
67 if (dmanr
== SER1_TX_DMA_NBR
) {
68 SETS(gens
, R_GEN_CONFIG
, dma8
, serial1
);
69 } else if (dmanr
== SER1_RX_DMA_NBR
) {
70 SETS(gens
, R_GEN_CONFIG
, dma9
, serial1
);
72 printk(KERN_CRIT
"Invalid DMA channel for ser1\n");
77 if (dmanr
== SER2_TX_DMA_NBR
) {
78 SETS(gens
, R_GEN_CONFIG
, dma2
, serial2
);
79 } else if (dmanr
== SER2_RX_DMA_NBR
) {
80 SETS(gens
, R_GEN_CONFIG
, dma3
, serial2
);
82 printk(KERN_CRIT
"Invalid DMA channel for ser2\n");
87 if (dmanr
== SER3_TX_DMA_NBR
) {
88 SETS(gens
, R_GEN_CONFIG
, dma4
, serial3
);
89 } else if (dmanr
== SER3_RX_DMA_NBR
) {
90 SETS(gens
, R_GEN_CONFIG
, dma5
, serial3
);
92 printk(KERN_CRIT
"Invalid DMA channel for ser3\n");
97 if (dmanr
== ATA_TX_DMA_NBR
) {
98 SETS(gens
, R_GEN_CONFIG
, dma2
, ata
);
99 } else if (dmanr
== ATA_RX_DMA_NBR
) {
100 SETS(gens
, R_GEN_CONFIG
, dma3
, ata
);
102 printk(KERN_CRIT
"Invalid DMA channel for ata\n");
107 if (dmanr
== EXTDMA0_TX_DMA_NBR
) {
108 SETS(gens
, R_GEN_CONFIG
, dma4
, extdma0
);
109 } else if (dmanr
== EXTDMA0_RX_DMA_NBR
) {
110 SETS(gens
, R_GEN_CONFIG
, dma5
, extdma0
);
112 printk(KERN_CRIT
"Invalid DMA channel for ext0\n");
117 if (dmanr
== EXTDMA1_TX_DMA_NBR
) {
118 SETS(gens
, R_GEN_CONFIG
, dma6
, extdma1
);
119 } else if (dmanr
== EXTDMA1_RX_DMA_NBR
) {
120 SETS(gens
, R_GEN_CONFIG
, dma7
, extdma1
);
122 printk(KERN_CRIT
"Invalid DMA channel for ext1\n");
127 if (dmanr
== MEM2MEM_RX_DMA_NBR
) {
128 SETS(gens
, R_GEN_CONFIG
, dma7
, intdma6
);
130 printk(KERN_CRIT
"Invalid DMA channel for int6\n");
135 if (dmanr
== MEM2MEM_TX_DMA_NBR
) {
136 SETS(gens
, R_GEN_CONFIG
, dma6
, intdma7
);
138 printk(KERN_CRIT
"Invalid DMA channel for int7\n");
143 if (dmanr
== USB_TX_DMA_NBR
) {
144 SETS(gens
, R_GEN_CONFIG
, dma8
, usb
);
145 } else if (dmanr
== USB_RX_DMA_NBR
) {
146 SETS(gens
, R_GEN_CONFIG
, dma9
, usb
);
148 printk(KERN_CRIT
"Invalid DMA channel for usb\n");
153 if (dmanr
== SCSI0_TX_DMA_NBR
) {
154 SETS(gens
, R_GEN_CONFIG
, dma2
, scsi0
);
155 } else if (dmanr
== SCSI0_RX_DMA_NBR
) {
156 SETS(gens
, R_GEN_CONFIG
, dma3
, scsi0
);
158 printk(KERN_CRIT
"Invalid DMA channel for scsi0\n");
163 if (dmanr
== SCSI1_TX_DMA_NBR
) {
164 SETS(gens
, R_GEN_CONFIG
, dma4
, scsi1
);
165 } else if (dmanr
== SCSI1_RX_DMA_NBR
) {
166 SETS(gens
, R_GEN_CONFIG
, dma5
, scsi1
);
168 printk(KERN_CRIT
"Invalid DMA channel for scsi1\n");
173 if (dmanr
== PAR0_TX_DMA_NBR
) {
174 SETS(gens
, R_GEN_CONFIG
, dma2
, par0
);
175 } else if (dmanr
== PAR0_RX_DMA_NBR
) {
176 SETS(gens
, R_GEN_CONFIG
, dma3
, par0
);
178 printk(KERN_CRIT
"Invalid DMA channel for par0\n");
183 if (dmanr
== PAR1_TX_DMA_NBR
) {
184 SETS(gens
, R_GEN_CONFIG
, dma4
, par1
);
185 } else if (dmanr
== PAR1_RX_DMA_NBR
) {
186 SETS(gens
, R_GEN_CONFIG
, dma5
, par1
);
188 printk(KERN_CRIT
"Invalid DMA channel for par1\n");
193 printk(KERN_CRIT
"Invalid DMA owner.\n");
197 used_dma_channels
[dmanr
] = 1;
198 used_dma_channels_users
[dmanr
] = device_id
;
202 genconfig_shadow
= gens
;
203 *R_GEN_CONFIG
= genconfig_shadow
;
204 /* Wait 12 cycles before doing any DMA command */
205 for(i
= 6; i
> 0; i
--)
210 local_irq_restore(flags
);
214 void cris_free_dma(unsigned int dmanr
, const char * device_id
)
217 if ((dmanr
< 0) || (dmanr
>= MAX_DMA_CHANNELS
)) {
218 printk(KERN_CRIT
"cris_free_dma: invalid DMA channel %u\n", dmanr
);
222 local_irq_save(flags
);
223 if (!used_dma_channels
[dmanr
]) {
224 printk(KERN_CRIT
"cris_free_dma: DMA channel %u not allocated\n", dmanr
);
225 } else if (device_id
!= used_dma_channels_users
[dmanr
]) {
226 printk(KERN_CRIT
"cris_free_dma: DMA channel %u not allocated by device\n", dmanr
);
231 *R_DMA_CH0_CMD
= IO_STATE(R_DMA_CH0_CMD
, cmd
, reset
);
232 while (IO_EXTRACT(R_DMA_CH0_CMD
, cmd
, *R_DMA_CH0_CMD
) ==
233 IO_STATE_VALUE(R_DMA_CH0_CMD
, cmd
, reset
));
236 *R_DMA_CH1_CMD
= IO_STATE(R_DMA_CH1_CMD
, cmd
, reset
);
237 while (IO_EXTRACT(R_DMA_CH1_CMD
, cmd
, *R_DMA_CH1_CMD
) ==
238 IO_STATE_VALUE(R_DMA_CH1_CMD
, cmd
, reset
));
241 *R_DMA_CH2_CMD
= IO_STATE(R_DMA_CH2_CMD
, cmd
, reset
);
242 while (IO_EXTRACT(R_DMA_CH2_CMD
, cmd
, *R_DMA_CH2_CMD
) ==
243 IO_STATE_VALUE(R_DMA_CH2_CMD
, cmd
, reset
));
246 *R_DMA_CH3_CMD
= IO_STATE(R_DMA_CH3_CMD
, cmd
, reset
);
247 while (IO_EXTRACT(R_DMA_CH3_CMD
, cmd
, *R_DMA_CH3_CMD
) ==
248 IO_STATE_VALUE(R_DMA_CH3_CMD
, cmd
, reset
));
251 *R_DMA_CH4_CMD
= IO_STATE(R_DMA_CH4_CMD
, cmd
, reset
);
252 while (IO_EXTRACT(R_DMA_CH4_CMD
, cmd
, *R_DMA_CH4_CMD
) ==
253 IO_STATE_VALUE(R_DMA_CH4_CMD
, cmd
, reset
));
256 *R_DMA_CH5_CMD
= IO_STATE(R_DMA_CH5_CMD
, cmd
, reset
);
257 while (IO_EXTRACT(R_DMA_CH5_CMD
, cmd
, *R_DMA_CH5_CMD
) ==
258 IO_STATE_VALUE(R_DMA_CH5_CMD
, cmd
, reset
));
261 *R_DMA_CH6_CMD
= IO_STATE(R_DMA_CH6_CMD
, cmd
, reset
);
262 while (IO_EXTRACT(R_DMA_CH6_CMD
, cmd
, *R_DMA_CH6_CMD
) ==
263 IO_STATE_VALUE(R_DMA_CH6_CMD
, cmd
, reset
));
266 *R_DMA_CH7_CMD
= IO_STATE(R_DMA_CH7_CMD
, cmd
, reset
);
267 while (IO_EXTRACT(R_DMA_CH7_CMD
, cmd
, *R_DMA_CH7_CMD
) ==
268 IO_STATE_VALUE(R_DMA_CH7_CMD
, cmd
, reset
));
271 *R_DMA_CH8_CMD
= IO_STATE(R_DMA_CH8_CMD
, cmd
, reset
);
272 while (IO_EXTRACT(R_DMA_CH8_CMD
, cmd
, *R_DMA_CH8_CMD
) ==
273 IO_STATE_VALUE(R_DMA_CH8_CMD
, cmd
, reset
));
276 *R_DMA_CH9_CMD
= IO_STATE(R_DMA_CH9_CMD
, cmd
, reset
);
277 while (IO_EXTRACT(R_DMA_CH9_CMD
, cmd
, *R_DMA_CH9_CMD
) ==
278 IO_STATE_VALUE(R_DMA_CH9_CMD
, cmd
, reset
));
281 used_dma_channels
[dmanr
] = 0;
283 local_irq_restore(flags
);
286 EXPORT_SYMBOL(cris_request_dma
);
287 EXPORT_SYMBOL(cris_free_dma
);