[SPARC64]: Fix cpu trampoline et al. mismatch warnings.
[linux-2.6/openmoko-kernel/knife-kernel.git] / include / asm-x86 / byteorder.h
blobfe2f2e5d51baf2bbe66e649c88d5de1713b56c42
1 #ifndef _ASM_X86_BYTEORDER_H
2 #define _ASM_X86_BYTEORDER_H
4 #include <asm/types.h>
5 #include <linux/compiler.h>
7 #ifdef __GNUC__
9 #ifdef __i386__
11 static __inline__ __attribute_const__ __u32 ___arch__swab32(__u32 x)
13 #ifdef CONFIG_X86_BSWAP
14 __asm__("bswap %0" : "=r" (x) : "0" (x));
15 #else
16 __asm__("xchgb %b0,%h0\n\t" /* swap lower bytes */
17 "rorl $16,%0\n\t" /* swap words */
18 "xchgb %b0,%h0" /* swap higher bytes */
19 :"=q" (x)
20 : "0" (x));
21 #endif
22 return x;
25 static __inline__ __attribute_const__ __u64 ___arch__swab64(__u64 val)
27 union {
28 struct { __u32 a,b; } s;
29 __u64 u;
30 } v;
31 v.u = val;
32 #ifdef CONFIG_X86_BSWAP
33 __asm__("bswapl %0 ; bswapl %1 ; xchgl %0,%1"
34 : "=r" (v.s.a), "=r" (v.s.b)
35 : "0" (v.s.a), "1" (v.s.b));
36 #else
37 v.s.a = ___arch__swab32(v.s.a);
38 v.s.b = ___arch__swab32(v.s.b);
39 __asm__("xchgl %0,%1" : "=r" (v.s.a), "=r" (v.s.b) : "0" (v.s.a), "1" (v.s.b));
40 #endif
41 return v.u;
44 #else /* __i386__ */
46 static __inline__ __attribute_const__ __u64 ___arch__swab64(__u64 x)
48 __asm__("bswapq %0" : "=r" (x) : "0" (x));
49 return x;
52 static __inline__ __attribute_const__ __u32 ___arch__swab32(__u32 x)
54 __asm__("bswapl %0" : "=r" (x) : "0" (x));
55 return x;
58 #endif
60 /* Do not define swab16. Gcc is smart enough to recognize "C" version and
61 convert it into rotation or exhange. */
63 #define __arch__swab64(x) ___arch__swab64(x)
64 #define __arch__swab32(x) ___arch__swab32(x)
66 #define __BYTEORDER_HAS_U64__
68 #endif /* __GNUC__ */
70 #include <linux/byteorder/little_endian.h>
72 #endif /* _ASM_X86_BYTEORDER_H */