fs: use kmem_cache_zalloc instead
[pv_ops_mirror.git] / include / asm-sh / byteorder.h
blobbff2b1382e01b1eae63c30db2c73e1c533807d4b
1 #ifndef __ASM_SH_BYTEORDER_H
2 #define __ASM_SH_BYTEORDER_H
4 /*
5 * Copyright (C) 1999 Niibe Yutaka
6 */
8 #include <asm/types.h>
9 #include <linux/compiler.h>
11 static __inline__ __attribute_const__ __u32 ___arch__swab32(__u32 x)
13 __asm__("swap.b %0, %0\n\t"
14 "swap.w %0, %0\n\t"
15 "swap.b %0, %0"
16 : "=r" (x)
17 : "0" (x));
18 return x;
21 static __inline__ __attribute_const__ __u16 ___arch__swab16(__u16 x)
23 __asm__("swap.b %0, %0"
24 : "=r" (x)
25 : "0" (x));
26 return x;
29 static inline __u64 ___arch__swab64(__u64 val)
31 union {
32 struct { __u32 a,b; } s;
33 __u64 u;
34 } v, w;
35 v.u = val;
36 w.s.b = ___arch__swab32(v.s.a);
37 w.s.a = ___arch__swab32(v.s.b);
38 return w.u;
41 #define __arch__swab64(x) ___arch__swab64(x)
42 #define __arch__swab32(x) ___arch__swab32(x)
43 #define __arch__swab16(x) ___arch__swab16(x)
45 #if !defined(__STRICT_ANSI__) || defined(__KERNEL__)
46 # define __BYTEORDER_HAS_U64__
47 # define __SWAB_64_THRU_32__
48 #endif
50 #ifdef __LITTLE_ENDIAN__
51 #include <linux/byteorder/little_endian.h>
52 #else
53 #include <linux/byteorder/big_endian.h>
54 #endif
56 #endif /* __ASM_SH_BYTEORDER_H */