x86/mm/pat: Don't report PAT on CPUs that don't support it
[linux/fpc-iii.git] / arch / m68k / include / asm / raw_io.h
blob932faa35655b1b4d35d870e8c881559f02ac38c5
1 /*
2 * linux/include/asm-m68k/raw_io.h
4 * 10/20/00 RZ: - created from bits of io.h and ide.h to cleanup namespace
6 */
8 #ifndef _RAW_IO_H
9 #define _RAW_IO_H
11 #ifdef __KERNEL__
13 #include <asm/byteorder.h>
16 /* Values for nocacheflag and cmode */
17 #define IOMAP_FULL_CACHING 0
18 #define IOMAP_NOCACHE_SER 1
19 #define IOMAP_NOCACHE_NONSER 2
20 #define IOMAP_WRITETHROUGH 3
22 extern void iounmap(void __iomem *addr);
24 extern void __iomem *__ioremap(unsigned long physaddr, unsigned long size,
25 int cacheflag);
26 extern void __iounmap(void *addr, unsigned long size);
29 /* ++roman: The assignments to temp. vars avoid that gcc sometimes generates
30 * two accesses to memory, which may be undesirable for some devices.
32 #define in_8(addr) \
33 ({ u8 __v = (*(__force volatile u8 *) (addr)); __v; })
34 #define in_be16(addr) \
35 ({ u16 __v = (*(__force volatile u16 *) (addr)); __v; })
36 #define in_be32(addr) \
37 ({ u32 __v = (*(__force volatile u32 *) (addr)); __v; })
38 #define in_le16(addr) \
39 ({ u16 __v = le16_to_cpu(*(__force volatile __le16 *) (addr)); __v; })
40 #define in_le32(addr) \
41 ({ u32 __v = le32_to_cpu(*(__force volatile __le32 *) (addr)); __v; })
43 #define out_8(addr,b) (void)((*(__force volatile u8 *) (addr)) = (b))
44 #define out_be16(addr,w) (void)((*(__force volatile u16 *) (addr)) = (w))
45 #define out_be32(addr,l) (void)((*(__force volatile u32 *) (addr)) = (l))
46 #define out_le16(addr,w) (void)((*(__force volatile __le16 *) (addr)) = cpu_to_le16(w))
47 #define out_le32(addr,l) (void)((*(__force volatile __le32 *) (addr)) = cpu_to_le32(l))
49 #define raw_inb in_8
50 #define raw_inw in_be16
51 #define raw_inl in_be32
52 #define __raw_readb in_8
53 #define __raw_readw in_be16
54 #define __raw_readl in_be32
56 #define raw_outb(val,port) out_8((port),(val))
57 #define raw_outw(val,port) out_be16((port),(val))
58 #define raw_outl(val,port) out_be32((port),(val))
59 #define __raw_writeb(val,addr) out_8((addr),(val))
60 #define __raw_writew(val,addr) out_be16((addr),(val))
61 #define __raw_writel(val,addr) out_be32((addr),(val))
64 * Atari ROM port (cartridge port) ISA adapter, used for the EtherNEC NE2000
65 * network card driver.
66 * The ISA adapter connects address lines A9-A13 to ISA address lines A0-A4,
67 * and hardwires the rest of the ISA addresses for a base address of 0x300.
69 * Data lines D8-D15 are connected to ISA data lines D0-D7 for reading.
70 * For writes, address lines A1-A8 are latched to ISA data lines D0-D7
71 * (meaning the bit pattern on A1-A8 can be read back as byte).
73 * Read and write operations are distinguished by the base address used:
74 * reads are from the ROM A side range, writes are through the B side range
75 * addresses (A side base + 0x10000).
77 * Reads and writes are byte only.
79 * 16 bit reads and writes are necessary for the NetUSBee adapter's USB
80 * chipset - 16 bit words are read straight off the ROM port while 16 bit
81 * reads are split into two byte writes. The low byte is latched to the
82 * NetUSBee buffer by a read from the _read_ window (with the data pattern
83 * asserted as A1-A8 address pattern). The high byte is then written to the
84 * write range as usual, completing the write cycle.
87 #if defined(CONFIG_ATARI_ROM_ISA)
88 #define rom_in_8(addr) \
89 ({ u16 __v = (*(__force volatile u16 *) (addr)); __v >>= 8; __v; })
90 #define rom_in_be16(addr) \
91 ({ u16 __v = (*(__force volatile u16 *) (addr)); __v; })
92 #define rom_in_le16(addr) \
93 ({ u16 __v = le16_to_cpu(*(__force volatile u16 *) (addr)); __v; })
95 #define rom_out_8(addr, b) \
96 ({u8 __w, __v = (b); u32 _addr = ((u32) (addr)); \
97 __w = ((*(__force volatile u8 *) ((_addr | 0x10000) + (__v<<1)))); })
98 #define rom_out_be16(addr, w) \
99 ({u16 __w, __v = (w); u32 _addr = ((u32) (addr)); \
100 __w = ((*(__force volatile u16 *) ((_addr & 0xFFFF0000UL) + ((__v & 0xFF)<<1)))); \
101 __w = ((*(__force volatile u16 *) ((_addr | 0x10000) + ((__v >> 8)<<1)))); })
102 #define rom_out_le16(addr, w) \
103 ({u16 __w, __v = (w); u32 _addr = ((u32) (addr)); \
104 __w = ((*(__force volatile u16 *) ((_addr & 0xFFFF0000UL) + ((__v >> 8)<<1)))); \
105 __w = ((*(__force volatile u16 *) ((_addr | 0x10000) + ((__v & 0xFF)<<1)))); })
107 #define raw_rom_inb rom_in_8
108 #define raw_rom_inw rom_in_be16
110 #define raw_rom_outb(val, port) rom_out_8((port), (val))
111 #define raw_rom_outw(val, port) rom_out_be16((port), (val))
112 #endif /* CONFIG_ATARI_ROM_ISA */
114 static inline void raw_insb(volatile u8 __iomem *port, u8 *buf, unsigned int len)
116 unsigned int i;
118 for (i = 0; i < len; i++)
119 *buf++ = in_8(port);
122 static inline void raw_outsb(volatile u8 __iomem *port, const u8 *buf,
123 unsigned int len)
125 unsigned int i;
127 for (i = 0; i < len; i++)
128 out_8(port, *buf++);
131 static inline void raw_insw(volatile u16 __iomem *port, u16 *buf, unsigned int nr)
133 unsigned int tmp;
135 if (nr & 15) {
136 tmp = (nr & 15) - 1;
137 asm volatile (
138 "1: movew %2@,%0@+; dbra %1,1b"
139 : "=a" (buf), "=d" (tmp)
140 : "a" (port), "0" (buf),
141 "1" (tmp));
143 if (nr >> 4) {
144 tmp = (nr >> 4) - 1;
145 asm volatile (
146 "1: "
147 "movew %2@,%0@+; "
148 "movew %2@,%0@+; "
149 "movew %2@,%0@+; "
150 "movew %2@,%0@+; "
151 "movew %2@,%0@+; "
152 "movew %2@,%0@+; "
153 "movew %2@,%0@+; "
154 "movew %2@,%0@+; "
155 "movew %2@,%0@+; "
156 "movew %2@,%0@+; "
157 "movew %2@,%0@+; "
158 "movew %2@,%0@+; "
159 "movew %2@,%0@+; "
160 "movew %2@,%0@+; "
161 "movew %2@,%0@+; "
162 "movew %2@,%0@+; "
163 "dbra %1,1b"
164 : "=a" (buf), "=d" (tmp)
165 : "a" (port), "0" (buf),
166 "1" (tmp));
170 static inline void raw_outsw(volatile u16 __iomem *port, const u16 *buf,
171 unsigned int nr)
173 unsigned int tmp;
175 if (nr & 15) {
176 tmp = (nr & 15) - 1;
177 asm volatile (
178 "1: movew %0@+,%2@; dbra %1,1b"
179 : "=a" (buf), "=d" (tmp)
180 : "a" (port), "0" (buf),
181 "1" (tmp));
183 if (nr >> 4) {
184 tmp = (nr >> 4) - 1;
185 asm volatile (
186 "1: "
187 "movew %0@+,%2@; "
188 "movew %0@+,%2@; "
189 "movew %0@+,%2@; "
190 "movew %0@+,%2@; "
191 "movew %0@+,%2@; "
192 "movew %0@+,%2@; "
193 "movew %0@+,%2@; "
194 "movew %0@+,%2@; "
195 "movew %0@+,%2@; "
196 "movew %0@+,%2@; "
197 "movew %0@+,%2@; "
198 "movew %0@+,%2@; "
199 "movew %0@+,%2@; "
200 "movew %0@+,%2@; "
201 "movew %0@+,%2@; "
202 "movew %0@+,%2@; "
203 "dbra %1,1b"
204 : "=a" (buf), "=d" (tmp)
205 : "a" (port), "0" (buf),
206 "1" (tmp));
210 static inline void raw_insl(volatile u32 __iomem *port, u32 *buf, unsigned int nr)
212 unsigned int tmp;
214 if (nr & 15) {
215 tmp = (nr & 15) - 1;
216 asm volatile (
217 "1: movel %2@,%0@+; dbra %1,1b"
218 : "=a" (buf), "=d" (tmp)
219 : "a" (port), "0" (buf),
220 "1" (tmp));
222 if (nr >> 4) {
223 tmp = (nr >> 4) - 1;
224 asm volatile (
225 "1: "
226 "movel %2@,%0@+; "
227 "movel %2@,%0@+; "
228 "movel %2@,%0@+; "
229 "movel %2@,%0@+; "
230 "movel %2@,%0@+; "
231 "movel %2@,%0@+; "
232 "movel %2@,%0@+; "
233 "movel %2@,%0@+; "
234 "movel %2@,%0@+; "
235 "movel %2@,%0@+; "
236 "movel %2@,%0@+; "
237 "movel %2@,%0@+; "
238 "movel %2@,%0@+; "
239 "movel %2@,%0@+; "
240 "movel %2@,%0@+; "
241 "movel %2@,%0@+; "
242 "dbra %1,1b"
243 : "=a" (buf), "=d" (tmp)
244 : "a" (port), "0" (buf),
245 "1" (tmp));
249 static inline void raw_outsl(volatile u32 __iomem *port, const u32 *buf,
250 unsigned int nr)
252 unsigned int tmp;
254 if (nr & 15) {
255 tmp = (nr & 15) - 1;
256 asm volatile (
257 "1: movel %0@+,%2@; dbra %1,1b"
258 : "=a" (buf), "=d" (tmp)
259 : "a" (port), "0" (buf),
260 "1" (tmp));
262 if (nr >> 4) {
263 tmp = (nr >> 4) - 1;
264 asm volatile (
265 "1: "
266 "movel %0@+,%2@; "
267 "movel %0@+,%2@; "
268 "movel %0@+,%2@; "
269 "movel %0@+,%2@; "
270 "movel %0@+,%2@; "
271 "movel %0@+,%2@; "
272 "movel %0@+,%2@; "
273 "movel %0@+,%2@; "
274 "movel %0@+,%2@; "
275 "movel %0@+,%2@; "
276 "movel %0@+,%2@; "
277 "movel %0@+,%2@; "
278 "movel %0@+,%2@; "
279 "movel %0@+,%2@; "
280 "movel %0@+,%2@; "
281 "movel %0@+,%2@; "
282 "dbra %1,1b"
283 : "=a" (buf), "=d" (tmp)
284 : "a" (port), "0" (buf),
285 "1" (tmp));
290 static inline void raw_insw_swapw(volatile u16 __iomem *port, u16 *buf,
291 unsigned int nr)
293 if ((nr) % 8)
294 __asm__ __volatile__
295 ("\tmovel %0,%/a0\n\t"
296 "movel %1,%/a1\n\t"
297 "movel %2,%/d6\n\t"
298 "subql #1,%/d6\n"
299 "1:\tmovew %/a0@,%/d0\n\t"
300 "rolw #8,%/d0\n\t"
301 "movew %/d0,%/a1@+\n\t"
302 "dbra %/d6,1b"
304 : "g" (port), "g" (buf), "g" (nr)
305 : "d0", "a0", "a1", "d6");
306 else
307 __asm__ __volatile__
308 ("movel %0,%/a0\n\t"
309 "movel %1,%/a1\n\t"
310 "movel %2,%/d6\n\t"
311 "lsrl #3,%/d6\n\t"
312 "subql #1,%/d6\n"
313 "1:\tmovew %/a0@,%/d0\n\t"
314 "rolw #8,%/d0\n\t"
315 "movew %/d0,%/a1@+\n\t"
316 "movew %/a0@,%/d0\n\t"
317 "rolw #8,%/d0\n\t"
318 "movew %/d0,%/a1@+\n\t"
319 "movew %/a0@,%/d0\n\t"
320 "rolw #8,%/d0\n\t"
321 "movew %/d0,%/a1@+\n\t"
322 "movew %/a0@,%/d0\n\t"
323 "rolw #8,%/d0\n\t"
324 "movew %/d0,%/a1@+\n\t"
325 "movew %/a0@,%/d0\n\t"
326 "rolw #8,%/d0\n\t"
327 "movew %/d0,%/a1@+\n\t"
328 "movew %/a0@,%/d0\n\t"
329 "rolw #8,%/d0\n\t"
330 "movew %/d0,%/a1@+\n\t"
331 "movew %/a0@,%/d0\n\t"
332 "rolw #8,%/d0\n\t"
333 "movew %/d0,%/a1@+\n\t"
334 "movew %/a0@,%/d0\n\t"
335 "rolw #8,%/d0\n\t"
336 "movew %/d0,%/a1@+\n\t"
337 "dbra %/d6,1b"
339 : "g" (port), "g" (buf), "g" (nr)
340 : "d0", "a0", "a1", "d6");
343 static inline void raw_outsw_swapw(volatile u16 __iomem *port, const u16 *buf,
344 unsigned int nr)
346 if ((nr) % 8)
347 __asm__ __volatile__
348 ("movel %0,%/a0\n\t"
349 "movel %1,%/a1\n\t"
350 "movel %2,%/d6\n\t"
351 "subql #1,%/d6\n"
352 "1:\tmovew %/a1@+,%/d0\n\t"
353 "rolw #8,%/d0\n\t"
354 "movew %/d0,%/a0@\n\t"
355 "dbra %/d6,1b"
357 : "g" (port), "g" (buf), "g" (nr)
358 : "d0", "a0", "a1", "d6");
359 else
360 __asm__ __volatile__
361 ("movel %0,%/a0\n\t"
362 "movel %1,%/a1\n\t"
363 "movel %2,%/d6\n\t"
364 "lsrl #3,%/d6\n\t"
365 "subql #1,%/d6\n"
366 "1:\tmovew %/a1@+,%/d0\n\t"
367 "rolw #8,%/d0\n\t"
368 "movew %/d0,%/a0@\n\t"
369 "movew %/a1@+,%/d0\n\t"
370 "rolw #8,%/d0\n\t"
371 "movew %/d0,%/a0@\n\t"
372 "movew %/a1@+,%/d0\n\t"
373 "rolw #8,%/d0\n\t"
374 "movew %/d0,%/a0@\n\t"
375 "movew %/a1@+,%/d0\n\t"
376 "rolw #8,%/d0\n\t"
377 "movew %/d0,%/a0@\n\t"
378 "movew %/a1@+,%/d0\n\t"
379 "rolw #8,%/d0\n\t"
380 "movew %/d0,%/a0@\n\t"
381 "movew %/a1@+,%/d0\n\t"
382 "rolw #8,%/d0\n\t"
383 "movew %/d0,%/a0@\n\t"
384 "movew %/a1@+,%/d0\n\t"
385 "rolw #8,%/d0\n\t"
386 "movew %/d0,%/a0@\n\t"
387 "movew %/a1@+,%/d0\n\t"
388 "rolw #8,%/d0\n\t"
389 "movew %/d0,%/a0@\n\t"
390 "dbra %/d6,1b"
392 : "g" (port), "g" (buf), "g" (nr)
393 : "d0", "a0", "a1", "d6");
397 #if defined(CONFIG_ATARI_ROM_ISA)
398 static inline void raw_rom_insb(volatile u8 __iomem *port, u8 *buf, unsigned int len)
400 unsigned int i;
402 for (i = 0; i < len; i++)
403 *buf++ = rom_in_8(port);
406 static inline void raw_rom_outsb(volatile u8 __iomem *port, const u8 *buf,
407 unsigned int len)
409 unsigned int i;
411 for (i = 0; i < len; i++)
412 rom_out_8(port, *buf++);
415 static inline void raw_rom_insw(volatile u16 __iomem *port, u16 *buf,
416 unsigned int nr)
418 unsigned int i;
420 for (i = 0; i < nr; i++)
421 *buf++ = rom_in_be16(port);
424 static inline void raw_rom_outsw(volatile u16 __iomem *port, const u16 *buf,
425 unsigned int nr)
427 unsigned int i;
429 for (i = 0; i < nr; i++)
430 rom_out_be16(port, *buf++);
433 static inline void raw_rom_insw_swapw(volatile u16 __iomem *port, u16 *buf,
434 unsigned int nr)
436 unsigned int i;
438 for (i = 0; i < nr; i++)
439 *buf++ = rom_in_le16(port);
442 static inline void raw_rom_outsw_swapw(volatile u16 __iomem *port, const u16 *buf,
443 unsigned int nr)
445 unsigned int i;
447 for (i = 0; i < nr; i++)
448 rom_out_le16(port, *buf++);
450 #endif /* CONFIG_ATARI_ROM_ISA */
452 #endif /* __KERNEL__ */
454 #endif /* _RAW_IO_H */