blk: rq_data_dir() should not return a boolean
[cris-mirror.git] / arch / cris / arch-v10 / kernel / dma.c
blob5795047359b2e825ac61dab09516521c159cb8d6
1 /* Wrapper for DMA channel allocator that updates DMA client muxing.
2 * Copyright 2004-2007, Axis Communications AB
3 */
5 #include <linux/kernel.h>
6 #include <linux/module.h>
7 #include <linux/errno.h>
9 #include <asm/dma.h>
10 #include <arch/svinto.h>
11 #include <arch/system.h>
13 /* Macro to access ETRAX 100 registers */
14 #define SETS(var, reg, field, val) var = (var & ~IO_MASK_(reg##_, field##_)) | \
15 IO_STATE_(reg##_, field##_, _##val)
18 static char used_dma_channels[MAX_DMA_CHANNELS];
19 static const char * used_dma_channels_users[MAX_DMA_CHANNELS];
21 int cris_request_dma(unsigned int dmanr, const char * device_id,
22 unsigned options, enum dma_owner owner)
24 unsigned long flags;
25 unsigned long int gens;
26 int fail = -EINVAL;
28 if (dmanr >= MAX_DMA_CHANNELS) {
29 printk(KERN_CRIT "cris_request_dma: invalid DMA channel %u\n", dmanr);
30 return -EINVAL;
33 local_irq_save(flags);
34 if (used_dma_channels[dmanr]) {
35 local_irq_restore(flags);
36 if (options & DMA_VERBOSE_ON_ERROR) {
37 printk(KERN_CRIT "Failed to request DMA %i for %s, already allocated by %s\n", dmanr, device_id, used_dma_channels_users[dmanr]);
39 if (options & DMA_PANIC_ON_ERROR) {
40 panic("request_dma error!");
42 return -EBUSY;
45 gens = genconfig_shadow;
47 switch(owner)
49 case dma_eth:
50 if ((dmanr != NETWORK_TX_DMA_NBR) &&
51 (dmanr != NETWORK_RX_DMA_NBR)) {
52 printk(KERN_CRIT "Invalid DMA channel for eth\n");
53 goto bail;
55 break;
56 case dma_ser0:
57 if (dmanr == SER0_TX_DMA_NBR) {
58 SETS(gens, R_GEN_CONFIG, dma6, serial0);
59 } else if (dmanr == SER0_RX_DMA_NBR) {
60 SETS(gens, R_GEN_CONFIG, dma7, serial0);
61 } else {
62 printk(KERN_CRIT "Invalid DMA channel for ser0\n");
63 goto bail;
65 break;
66 case dma_ser1:
67 if (dmanr == SER1_TX_DMA_NBR) {
68 SETS(gens, R_GEN_CONFIG, dma8, serial1);
69 } else if (dmanr == SER1_RX_DMA_NBR) {
70 SETS(gens, R_GEN_CONFIG, dma9, serial1);
71 } else {
72 printk(KERN_CRIT "Invalid DMA channel for ser1\n");
73 goto bail;
75 break;
76 case dma_ser2:
77 if (dmanr == SER2_TX_DMA_NBR) {
78 SETS(gens, R_GEN_CONFIG, dma2, serial2);
79 } else if (dmanr == SER2_RX_DMA_NBR) {
80 SETS(gens, R_GEN_CONFIG, dma3, serial2);
81 } else {
82 printk(KERN_CRIT "Invalid DMA channel for ser2\n");
83 goto bail;
85 break;
86 case dma_ser3:
87 if (dmanr == SER3_TX_DMA_NBR) {
88 SETS(gens, R_GEN_CONFIG, dma4, serial3);
89 } else if (dmanr == SER3_RX_DMA_NBR) {
90 SETS(gens, R_GEN_CONFIG, dma5, serial3);
91 } else {
92 printk(KERN_CRIT "Invalid DMA channel for ser3\n");
93 goto bail;
95 break;
96 case dma_ata:
97 if (dmanr == ATA_TX_DMA_NBR) {
98 SETS(gens, R_GEN_CONFIG, dma2, ata);
99 } else if (dmanr == ATA_RX_DMA_NBR) {
100 SETS(gens, R_GEN_CONFIG, dma3, ata);
101 } else {
102 printk(KERN_CRIT "Invalid DMA channel for ata\n");
103 goto bail;
105 break;
106 case dma_ext0:
107 if (dmanr == EXTDMA0_TX_DMA_NBR) {
108 SETS(gens, R_GEN_CONFIG, dma4, extdma0);
109 } else if (dmanr == EXTDMA0_RX_DMA_NBR) {
110 SETS(gens, R_GEN_CONFIG, dma5, extdma0);
111 } else {
112 printk(KERN_CRIT "Invalid DMA channel for ext0\n");
113 goto bail;
115 break;
116 case dma_ext1:
117 if (dmanr == EXTDMA1_TX_DMA_NBR) {
118 SETS(gens, R_GEN_CONFIG, dma6, extdma1);
119 } else if (dmanr == EXTDMA1_RX_DMA_NBR) {
120 SETS(gens, R_GEN_CONFIG, dma7, extdma1);
121 } else {
122 printk(KERN_CRIT "Invalid DMA channel for ext1\n");
123 goto bail;
125 break;
126 case dma_int6:
127 if (dmanr == MEM2MEM_RX_DMA_NBR) {
128 SETS(gens, R_GEN_CONFIG, dma7, intdma6);
129 } else {
130 printk(KERN_CRIT "Invalid DMA channel for int6\n");
131 goto bail;
133 break;
134 case dma_int7:
135 if (dmanr == MEM2MEM_TX_DMA_NBR) {
136 SETS(gens, R_GEN_CONFIG, dma6, intdma7);
137 } else {
138 printk(KERN_CRIT "Invalid DMA channel for int7\n");
139 goto bail;
141 break;
142 case dma_usb:
143 if (dmanr == USB_TX_DMA_NBR) {
144 SETS(gens, R_GEN_CONFIG, dma8, usb);
145 } else if (dmanr == USB_RX_DMA_NBR) {
146 SETS(gens, R_GEN_CONFIG, dma9, usb);
147 } else {
148 printk(KERN_CRIT "Invalid DMA channel for usb\n");
149 goto bail;
151 break;
152 case dma_scsi0:
153 if (dmanr == SCSI0_TX_DMA_NBR) {
154 SETS(gens, R_GEN_CONFIG, dma2, scsi0);
155 } else if (dmanr == SCSI0_RX_DMA_NBR) {
156 SETS(gens, R_GEN_CONFIG, dma3, scsi0);
157 } else {
158 printk(KERN_CRIT "Invalid DMA channel for scsi0\n");
159 goto bail;
161 break;
162 case dma_scsi1:
163 if (dmanr == SCSI1_TX_DMA_NBR) {
164 SETS(gens, R_GEN_CONFIG, dma4, scsi1);
165 } else if (dmanr == SCSI1_RX_DMA_NBR) {
166 SETS(gens, R_GEN_CONFIG, dma5, scsi1);
167 } else {
168 printk(KERN_CRIT "Invalid DMA channel for scsi1\n");
169 goto bail;
171 break;
172 case dma_par0:
173 if (dmanr == PAR0_TX_DMA_NBR) {
174 SETS(gens, R_GEN_CONFIG, dma2, par0);
175 } else if (dmanr == PAR0_RX_DMA_NBR) {
176 SETS(gens, R_GEN_CONFIG, dma3, par0);
177 } else {
178 printk(KERN_CRIT "Invalid DMA channel for par0\n");
179 goto bail;
181 break;
182 case dma_par1:
183 if (dmanr == PAR1_TX_DMA_NBR) {
184 SETS(gens, R_GEN_CONFIG, dma4, par1);
185 } else if (dmanr == PAR1_RX_DMA_NBR) {
186 SETS(gens, R_GEN_CONFIG, dma5, par1);
187 } else {
188 printk(KERN_CRIT "Invalid DMA channel for par1\n");
189 goto bail;
191 break;
192 default:
193 printk(KERN_CRIT "Invalid DMA owner.\n");
194 goto bail;
197 used_dma_channels[dmanr] = 1;
198 used_dma_channels_users[dmanr] = device_id;
201 volatile int i;
202 genconfig_shadow = gens;
203 *R_GEN_CONFIG = genconfig_shadow;
204 /* Wait 12 cycles before doing any DMA command */
205 for(i = 6; i > 0; i--)
206 nop();
208 fail = 0;
209 bail:
210 local_irq_restore(flags);
211 return fail;
214 void cris_free_dma(unsigned int dmanr, const char * device_id)
216 unsigned long flags;
217 if (dmanr >= MAX_DMA_CHANNELS) {
218 printk(KERN_CRIT "cris_free_dma: invalid DMA channel %u\n", dmanr);
219 return;
222 local_irq_save(flags);
223 if (!used_dma_channels[dmanr]) {
224 printk(KERN_CRIT "cris_free_dma: DMA channel %u not allocated\n", dmanr);
225 } else if (device_id != used_dma_channels_users[dmanr]) {
226 printk(KERN_CRIT "cris_free_dma: DMA channel %u not allocated by device\n", dmanr);
227 } else {
228 switch(dmanr)
230 case 0:
231 *R_DMA_CH0_CMD = IO_STATE(R_DMA_CH0_CMD, cmd, reset);
232 while (IO_EXTRACT(R_DMA_CH0_CMD, cmd, *R_DMA_CH0_CMD) ==
233 IO_STATE_VALUE(R_DMA_CH0_CMD, cmd, reset));
234 break;
235 case 1:
236 *R_DMA_CH1_CMD = IO_STATE(R_DMA_CH1_CMD, cmd, reset);
237 while (IO_EXTRACT(R_DMA_CH1_CMD, cmd, *R_DMA_CH1_CMD) ==
238 IO_STATE_VALUE(R_DMA_CH1_CMD, cmd, reset));
239 break;
240 case 2:
241 *R_DMA_CH2_CMD = IO_STATE(R_DMA_CH2_CMD, cmd, reset);
242 while (IO_EXTRACT(R_DMA_CH2_CMD, cmd, *R_DMA_CH2_CMD) ==
243 IO_STATE_VALUE(R_DMA_CH2_CMD, cmd, reset));
244 break;
245 case 3:
246 *R_DMA_CH3_CMD = IO_STATE(R_DMA_CH3_CMD, cmd, reset);
247 while (IO_EXTRACT(R_DMA_CH3_CMD, cmd, *R_DMA_CH3_CMD) ==
248 IO_STATE_VALUE(R_DMA_CH3_CMD, cmd, reset));
249 break;
250 case 4:
251 *R_DMA_CH4_CMD = IO_STATE(R_DMA_CH4_CMD, cmd, reset);
252 while (IO_EXTRACT(R_DMA_CH4_CMD, cmd, *R_DMA_CH4_CMD) ==
253 IO_STATE_VALUE(R_DMA_CH4_CMD, cmd, reset));
254 break;
255 case 5:
256 *R_DMA_CH5_CMD = IO_STATE(R_DMA_CH5_CMD, cmd, reset);
257 while (IO_EXTRACT(R_DMA_CH5_CMD, cmd, *R_DMA_CH5_CMD) ==
258 IO_STATE_VALUE(R_DMA_CH5_CMD, cmd, reset));
259 break;
260 case 6:
261 *R_DMA_CH6_CMD = IO_STATE(R_DMA_CH6_CMD, cmd, reset);
262 while (IO_EXTRACT(R_DMA_CH6_CMD, cmd, *R_DMA_CH6_CMD) ==
263 IO_STATE_VALUE(R_DMA_CH6_CMD, cmd, reset));
264 break;
265 case 7:
266 *R_DMA_CH7_CMD = IO_STATE(R_DMA_CH7_CMD, cmd, reset);
267 while (IO_EXTRACT(R_DMA_CH7_CMD, cmd, *R_DMA_CH7_CMD) ==
268 IO_STATE_VALUE(R_DMA_CH7_CMD, cmd, reset));
269 break;
270 case 8:
271 *R_DMA_CH8_CMD = IO_STATE(R_DMA_CH8_CMD, cmd, reset);
272 while (IO_EXTRACT(R_DMA_CH8_CMD, cmd, *R_DMA_CH8_CMD) ==
273 IO_STATE_VALUE(R_DMA_CH8_CMD, cmd, reset));
274 break;
275 case 9:
276 *R_DMA_CH9_CMD = IO_STATE(R_DMA_CH9_CMD, cmd, reset);
277 while (IO_EXTRACT(R_DMA_CH9_CMD, cmd, *R_DMA_CH9_CMD) ==
278 IO_STATE_VALUE(R_DMA_CH9_CMD, cmd, reset));
279 break;
281 used_dma_channels[dmanr] = 0;
283 local_irq_restore(flags);
286 EXPORT_SYMBOL(cris_request_dma);
287 EXPORT_SYMBOL(cris_free_dma);