2 * Port on Texas Instruments TMS320C6x architecture
4 * Copyright (C) 2004, 2009, 2010 Texas Instruments Incorporated
5 * Author: Aurelien Jacquiot (aurelien.jacquiot@jaluna.com)
6 * Rewritten for 2.6.3x: Mark Salter <msalter@redhat.com>
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License version 2 as
10 * published by the Free Software Foundation.
12 #ifndef _ASM_C6X_UNALIGNED_H
13 #define _ASM_C6X_UNALIGNED_H
15 #include <linux/swab.h>
18 * The C64x+ can do unaligned word and dword accesses in hardware
19 * using special load/store instructions.
22 static inline u16
get_unaligned_le16(const void *p
)
25 return _p
[0] | _p
[1] << 8;
28 static inline u16
get_unaligned_be16(const void *p
)
31 return _p
[0] << 8 | _p
[1];
34 static inline void put_unaligned_le16(u16 val
, void *p
)
41 static inline void put_unaligned_be16(u16 val
, void *p
)
48 static inline u32
get_unaligned32(const void *p
)
51 asm (" ldnw .d1t1 *%0,%0\n"
57 static inline void put_unaligned32(u32 val
, void *p
)
59 asm volatile (" stnw .d2t1 %0,*%1\n"
60 : : "a"(val
), "b"(p
) : "memory");
63 static inline u64
get_unaligned64(const void *p
)
66 asm volatile (" ldndw .d1t1 *%1,%0\n"
68 : "=a"(val
) : "a"(p
));
72 static inline void put_unaligned64(u64 val
, const void *p
)
74 asm volatile (" stndw .d2t1 %0,*%1\n"
75 : : "a"(val
), "b"(p
) : "memory");
78 #ifdef CONFIG_CPU_BIG_ENDIAN
80 #define get_unaligned_le32(p) __swab32(get_unaligned32(p))
81 #define get_unaligned_le64(p) __swab64(get_unaligned64(p))
82 #define get_unaligned_be32(p) get_unaligned32(p)
83 #define get_unaligned_be64(p) get_unaligned64(p)
84 #define put_unaligned_le32(v, p) put_unaligned32(__swab32(v), (p))
85 #define put_unaligned_le64(v, p) put_unaligned64(__swab64(v), (p))
86 #define put_unaligned_be32(v, p) put_unaligned32((v), (p))
87 #define put_unaligned_be64(v, p) put_unaligned64((v), (p))
88 #define get_unaligned __get_unaligned_be
89 #define put_unaligned __put_unaligned_be
93 #define get_unaligned_le32(p) get_unaligned32(p)
94 #define get_unaligned_le64(p) get_unaligned64(p)
95 #define get_unaligned_be32(p) __swab32(get_unaligned32(p))
96 #define get_unaligned_be64(p) __swab64(get_unaligned64(p))
97 #define put_unaligned_le32(v, p) put_unaligned32((v), (p))
98 #define put_unaligned_le64(v, p) put_unaligned64((v), (p))
99 #define put_unaligned_be32(v, p) put_unaligned32(__swab32(v), (p))
100 #define put_unaligned_be64(v, p) put_unaligned64(__swab64(v), (p))
101 #define get_unaligned __get_unaligned_le
102 #define put_unaligned __put_unaligned_le
107 * Cause a link-time error if we try an unaligned access other than
108 * 1,2,4 or 8 bytes long
110 extern int __bad_unaligned_access_size(void);
112 #define __get_unaligned_le(ptr) (typeof(*(ptr)))({ \
113 sizeof(*(ptr)) == 1 ? *(ptr) : \
114 (sizeof(*(ptr)) == 2 ? get_unaligned_le16((ptr)) : \
115 (sizeof(*(ptr)) == 4 ? get_unaligned_le32((ptr)) : \
116 (sizeof(*(ptr)) == 8 ? get_unaligned_le64((ptr)) : \
117 __bad_unaligned_access_size()))); \
120 #define __get_unaligned_be(ptr) (__force typeof(*(ptr)))({ \
121 sizeof(*(ptr)) == 1 ? *(ptr) : \
122 (sizeof(*(ptr)) == 2 ? get_unaligned_be16((ptr)) : \
123 (sizeof(*(ptr)) == 4 ? get_unaligned_be32((ptr)) : \
124 (sizeof(*(ptr)) == 8 ? get_unaligned_be64((ptr)) : \
125 __bad_unaligned_access_size()))); \
128 #define __put_unaligned_le(val, ptr) ({ \
129 void *__gu_p = (ptr); \
130 switch (sizeof(*(ptr))) { \
132 *(u8 *)__gu_p = (__force u8)(val); \
135 put_unaligned_le16((__force u16)(val), __gu_p); \
138 put_unaligned_le32((__force u32)(val), __gu_p); \
141 put_unaligned_le64((__force u64)(val), __gu_p); \
144 __bad_unaligned_access_size(); \
149 #define __put_unaligned_be(val, ptr) ({ \
150 void *__gu_p = (ptr); \
151 switch (sizeof(*(ptr))) { \
153 *(u8 *)__gu_p = (__force u8)(val); \
156 put_unaligned_be16((__force u16)(val), __gu_p); \
159 put_unaligned_be32((__force u32)(val), __gu_p); \
162 put_unaligned_be64((__force u64)(val), __gu_p); \
165 __bad_unaligned_access_size(); \
170 #endif /* _ASM_C6X_UNALIGNED_H */