x86: add PAGE_KERNEL_EXEC_NOCACHE
[wrt350n-kernel.git] / fs / ufs / swab.h
blob1683d2bee614249a40d91d79af64db3295857d88
1 /*
2 * linux/fs/ufs/swab.h
4 * Copyright (C) 1997, 1998 Francois-Rene Rideau <fare@tunes.org>
5 * Copyright (C) 1998 Jakub Jelinek <jj@ultra.linux.cz>
6 * Copyright (C) 2001 Christoph Hellwig <hch@infradead.org>
7 */
9 #ifndef _UFS_SWAB_H
10 #define _UFS_SWAB_H
13 * Notes:
14 * HERE WE ASSUME EITHER BIG OR LITTLE ENDIAN UFSes
15 * in case there are ufs implementations that have strange bytesexes,
16 * you'll need to modify code here as well as in ufs_super.c and ufs_fs.h
17 * to support them.
20 enum {
21 BYTESEX_LE,
22 BYTESEX_BE
25 static inline u64
26 fs64_to_cpu(struct super_block *sbp, __fs64 n)
28 if (UFS_SB(sbp)->s_bytesex == BYTESEX_LE)
29 return le64_to_cpu((__force __le64)n);
30 else
31 return be64_to_cpu((__force __be64)n);
34 static inline __fs64
35 cpu_to_fs64(struct super_block *sbp, u64 n)
37 if (UFS_SB(sbp)->s_bytesex == BYTESEX_LE)
38 return (__force __fs64)cpu_to_le64(n);
39 else
40 return (__force __fs64)cpu_to_be64(n);
43 static __inline u32
44 fs64_add(struct super_block *sbp, u32 *n, int d)
46 if (UFS_SB(sbp)->s_bytesex == BYTESEX_LE)
47 return *n = cpu_to_le64(le64_to_cpu(*n)+d);
48 else
49 return *n = cpu_to_be64(be64_to_cpu(*n)+d);
52 static __inline u32
53 fs64_sub(struct super_block *sbp, u32 *n, int d)
55 if (UFS_SB(sbp)->s_bytesex == BYTESEX_LE)
56 return *n = cpu_to_le64(le64_to_cpu(*n)-d);
57 else
58 return *n = cpu_to_be64(be64_to_cpu(*n)-d);
61 static __inline u32
62 fs32_to_cpu(struct super_block *sbp, __fs32 n)
64 if (UFS_SB(sbp)->s_bytesex == BYTESEX_LE)
65 return le32_to_cpu((__force __le32)n);
66 else
67 return be32_to_cpu((__force __be32)n);
70 static inline __fs32
71 cpu_to_fs32(struct super_block *sbp, u32 n)
73 if (UFS_SB(sbp)->s_bytesex == BYTESEX_LE)
74 return (__force __fs32)cpu_to_le32(n);
75 else
76 return (__force __fs32)cpu_to_be32(n);
79 static inline void
80 fs32_add(struct super_block *sbp, __fs32 *n, int d)
82 if (UFS_SB(sbp)->s_bytesex == BYTESEX_LE)
83 *(__le32 *)n = cpu_to_le32(le32_to_cpu(*(__le32 *)n)+d);
84 else
85 *(__be32 *)n = cpu_to_be32(be32_to_cpu(*(__be32 *)n)+d);
88 static inline void
89 fs32_sub(struct super_block *sbp, __fs32 *n, int d)
91 if (UFS_SB(sbp)->s_bytesex == BYTESEX_LE)
92 *(__le32 *)n = cpu_to_le32(le32_to_cpu(*(__le32 *)n)-d);
93 else
94 *(__be32 *)n = cpu_to_be32(be32_to_cpu(*(__be32 *)n)-d);
97 static inline u16
98 fs16_to_cpu(struct super_block *sbp, __fs16 n)
100 if (UFS_SB(sbp)->s_bytesex == BYTESEX_LE)
101 return le16_to_cpu((__force __le16)n);
102 else
103 return be16_to_cpu((__force __be16)n);
106 static inline __fs16
107 cpu_to_fs16(struct super_block *sbp, u16 n)
109 if (UFS_SB(sbp)->s_bytesex == BYTESEX_LE)
110 return (__force __fs16)cpu_to_le16(n);
111 else
112 return (__force __fs16)cpu_to_be16(n);
115 static inline void
116 fs16_add(struct super_block *sbp, __fs16 *n, int d)
118 if (UFS_SB(sbp)->s_bytesex == BYTESEX_LE)
119 *(__le16 *)n = cpu_to_le16(le16_to_cpu(*(__le16 *)n)+d);
120 else
121 *(__be16 *)n = cpu_to_be16(be16_to_cpu(*(__be16 *)n)+d);
124 static inline void
125 fs16_sub(struct super_block *sbp, __fs16 *n, int d)
127 if (UFS_SB(sbp)->s_bytesex == BYTESEX_LE)
128 *(__le16 *)n = cpu_to_le16(le16_to_cpu(*(__le16 *)n)-d);
129 else
130 *(__be16 *)n = cpu_to_be16(be16_to_cpu(*(__be16 *)n)-d);
133 #endif /* _UFS_SWAB_H */