fs: use kmem_cache_zalloc instead
[pv_ops_mirror.git] / include / asm-s390 / byteorder.h
blob1fe2492baa8d141414c66f343f6ddae3e30eeed5
1 #ifndef _S390_BYTEORDER_H
2 #define _S390_BYTEORDER_H
4 /*
5 * include/asm-s390/byteorder.h
7 * S390 version
8 * Copyright (C) 1999 IBM Deutschland Entwicklung GmbH, IBM Corporation
9 * Author(s): Martin Schwidefsky (schwidefsky@de.ibm.com)
12 #include <asm/types.h>
14 #ifdef __GNUC__
16 #ifdef __s390x__
17 static inline __u64 ___arch__swab64p(const __u64 *x)
19 __u64 result;
21 asm volatile("lrvg %0,%1" : "=d" (result) : "m" (*x));
22 return result;
25 static inline __u64 ___arch__swab64(__u64 x)
27 __u64 result;
29 asm volatile("lrvgr %0,%1" : "=d" (result) : "d" (x));
30 return result;
33 static inline void ___arch__swab64s(__u64 *x)
35 *x = ___arch__swab64p(x);
37 #endif /* __s390x__ */
39 static inline __u32 ___arch__swab32p(const __u32 *x)
41 __u32 result;
43 asm volatile(
44 #ifndef __s390x__
45 " icm %0,8,3(%1)\n"
46 " icm %0,4,2(%1)\n"
47 " icm %0,2,1(%1)\n"
48 " ic %0,0(%1)"
49 : "=&d" (result) : "a" (x), "m" (*x) : "cc");
50 #else /* __s390x__ */
51 " lrv %0,%1"
52 : "=d" (result) : "m" (*x));
53 #endif /* __s390x__ */
54 return result;
57 static inline __u32 ___arch__swab32(__u32 x)
59 #ifndef __s390x__
60 return ___arch__swab32p(&x);
61 #else /* __s390x__ */
62 __u32 result;
64 asm volatile("lrvr %0,%1" : "=d" (result) : "d" (x));
65 return result;
66 #endif /* __s390x__ */
69 static __inline__ void ___arch__swab32s(__u32 *x)
71 *x = ___arch__swab32p(x);
74 static __inline__ __u16 ___arch__swab16p(const __u16 *x)
76 __u16 result;
78 asm volatile(
79 #ifndef __s390x__
80 " icm %0,2,1(%1)\n"
81 " ic %0,0(%1)\n"
82 : "=&d" (result) : "a" (x), "m" (*x) : "cc");
83 #else /* __s390x__ */
84 " lrvh %0,%1"
85 : "=d" (result) : "m" (*x));
86 #endif /* __s390x__ */
87 return result;
90 static __inline__ __u16 ___arch__swab16(__u16 x)
92 return ___arch__swab16p(&x);
95 static __inline__ void ___arch__swab16s(__u16 *x)
97 *x = ___arch__swab16p(x);
100 #ifdef __s390x__
101 #define __arch__swab64(x) ___arch__swab64(x)
102 #define __arch__swab64p(x) ___arch__swab64p(x)
103 #define __arch__swab64s(x) ___arch__swab64s(x)
104 #endif /* __s390x__ */
105 #define __arch__swab32(x) ___arch__swab32(x)
106 #define __arch__swab16(x) ___arch__swab16(x)
107 #define __arch__swab32p(x) ___arch__swab32p(x)
108 #define __arch__swab16p(x) ___arch__swab16p(x)
109 #define __arch__swab32s(x) ___arch__swab32s(x)
110 #define __arch__swab16s(x) ___arch__swab16s(x)
112 #ifndef __s390x__
113 #if !defined(__STRICT_ANSI__) || defined(__KERNEL__)
114 # define __BYTEORDER_HAS_U64__
115 # define __SWAB_64_THRU_32__
116 #endif
117 #else /* __s390x__ */
118 #define __BYTEORDER_HAS_U64__
119 #endif /* __s390x__ */
121 #endif /* __GNUC__ */
123 #include <linux/byteorder/big_endian.h>
125 #endif /* _S390_BYTEORDER_H */