uwb: Use kcalloc instead of kzalloc to allocate array
[zen-stable.git] / arch / x86 / include / asm / string_32.h
blob3d3e8353ee5c09db9b83000efde37a9e34365690
1 #ifndef _ASM_X86_STRING_32_H
2 #define _ASM_X86_STRING_32_H
4 #ifdef __KERNEL__
6 /* Let gcc decide whether to inline or use the out of line functions */
8 #define __HAVE_ARCH_STRCPY
9 extern char *strcpy(char *dest, const char *src);
11 #define __HAVE_ARCH_STRNCPY
12 extern char *strncpy(char *dest, const char *src, size_t count);
14 #define __HAVE_ARCH_STRCAT
15 extern char *strcat(char *dest, const char *src);
17 #define __HAVE_ARCH_STRNCAT
18 extern char *strncat(char *dest, const char *src, size_t count);
20 #define __HAVE_ARCH_STRCMP
21 extern int strcmp(const char *cs, const char *ct);
23 #define __HAVE_ARCH_STRNCMP
24 extern int strncmp(const char *cs, const char *ct, size_t count);
26 #define __HAVE_ARCH_STRCHR
27 extern char *strchr(const char *s, int c);
29 #define __HAVE_ARCH_STRLEN
30 extern size_t strlen(const char *s);
32 static __always_inline void *__memcpy(void *to, const void *from, size_t n)
34 int d0, d1, d2;
35 asm volatile("rep ; movsl\n\t"
36 "movl %4,%%ecx\n\t"
37 "andl $3,%%ecx\n\t"
38 "jz 1f\n\t"
39 "rep ; movsb\n\t"
40 "1:"
41 : "=&c" (d0), "=&D" (d1), "=&S" (d2)
42 : "0" (n / 4), "g" (n), "1" ((long)to), "2" ((long)from)
43 : "memory");
44 return to;
48 * This looks ugly, but the compiler can optimize it totally,
49 * as the count is constant.
51 static __always_inline void *__constant_memcpy(void *to, const void *from,
52 size_t n)
54 long esi, edi;
55 if (!n)
56 return to;
58 switch (n) {
59 case 1:
60 *(char *)to = *(char *)from;
61 return to;
62 case 2:
63 *(short *)to = *(short *)from;
64 return to;
65 case 4:
66 *(int *)to = *(int *)from;
67 return to;
68 case 3:
69 *(short *)to = *(short *)from;
70 *((char *)to + 2) = *((char *)from + 2);
71 return to;
72 case 5:
73 *(int *)to = *(int *)from;
74 *((char *)to + 4) = *((char *)from + 4);
75 return to;
76 case 6:
77 *(int *)to = *(int *)from;
78 *((short *)to + 2) = *((short *)from + 2);
79 return to;
80 case 8:
81 *(int *)to = *(int *)from;
82 *((int *)to + 1) = *((int *)from + 1);
83 return to;
86 esi = (long)from;
87 edi = (long)to;
88 if (n >= 5 * 4) {
89 /* large block: use rep prefix */
90 int ecx;
91 asm volatile("rep ; movsl"
92 : "=&c" (ecx), "=&D" (edi), "=&S" (esi)
93 : "0" (n / 4), "1" (edi), "2" (esi)
94 : "memory"
96 } else {
97 /* small block: don't clobber ecx + smaller code */
98 if (n >= 4 * 4)
99 asm volatile("movsl"
100 : "=&D"(edi), "=&S"(esi)
101 : "0"(edi), "1"(esi)
102 : "memory");
103 if (n >= 3 * 4)
104 asm volatile("movsl"
105 : "=&D"(edi), "=&S"(esi)
106 : "0"(edi), "1"(esi)
107 : "memory");
108 if (n >= 2 * 4)
109 asm volatile("movsl"
110 : "=&D"(edi), "=&S"(esi)
111 : "0"(edi), "1"(esi)
112 : "memory");
113 if (n >= 1 * 4)
114 asm volatile("movsl"
115 : "=&D"(edi), "=&S"(esi)
116 : "0"(edi), "1"(esi)
117 : "memory");
119 switch (n % 4) {
120 /* tail */
121 case 0:
122 return to;
123 case 1:
124 asm volatile("movsb"
125 : "=&D"(edi), "=&S"(esi)
126 : "0"(edi), "1"(esi)
127 : "memory");
128 return to;
129 case 2:
130 asm volatile("movsw"
131 : "=&D"(edi), "=&S"(esi)
132 : "0"(edi), "1"(esi)
133 : "memory");
134 return to;
135 default:
136 asm volatile("movsw\n\tmovsb"
137 : "=&D"(edi), "=&S"(esi)
138 : "0"(edi), "1"(esi)
139 : "memory");
140 return to;
144 #define __HAVE_ARCH_MEMCPY
146 #ifdef CONFIG_X86_USE_3DNOW
148 #include <asm/mmx.h>
151 * This CPU favours 3DNow strongly (eg AMD Athlon)
154 static inline void *__constant_memcpy3d(void *to, const void *from, size_t len)
156 if (len < 512)
157 return __constant_memcpy(to, from, len);
158 return _mmx_memcpy(to, from, len);
161 static inline void *__memcpy3d(void *to, const void *from, size_t len)
163 if (len < 512)
164 return __memcpy(to, from, len);
165 return _mmx_memcpy(to, from, len);
168 #define memcpy(t, f, n) \
169 (__builtin_constant_p((n)) \
170 ? __constant_memcpy3d((t), (f), (n)) \
171 : __memcpy3d((t), (f), (n)))
173 #else
176 * No 3D Now!
179 #ifndef CONFIG_KMEMCHECK
181 #if (__GNUC__ >= 4)
182 #define memcpy(t, f, n) __builtin_memcpy(t, f, n)
183 #else
184 #define memcpy(t, f, n) \
185 (__builtin_constant_p((n)) \
186 ? __constant_memcpy((t), (f), (n)) \
187 : __memcpy((t), (f), (n)))
188 #endif
189 #else
191 * kmemcheck becomes very happy if we use the REP instructions unconditionally,
192 * because it means that we know both memory operands in advance.
194 #define memcpy(t, f, n) __memcpy((t), (f), (n))
195 #endif
197 #endif
199 #define __HAVE_ARCH_MEMMOVE
200 void *memmove(void *dest, const void *src, size_t n);
202 #define memcmp __builtin_memcmp
204 #define __HAVE_ARCH_MEMCHR
205 extern void *memchr(const void *cs, int c, size_t count);
207 static inline void *__memset_generic(void *s, char c, size_t count)
209 int d0, d1;
210 asm volatile("rep\n\t"
211 "stosb"
212 : "=&c" (d0), "=&D" (d1)
213 : "a" (c), "1" (s), "0" (count)
214 : "memory");
215 return s;
218 /* we might want to write optimized versions of these later */
219 #define __constant_count_memset(s, c, count) __memset_generic((s), (c), (count))
222 * memset(x, 0, y) is a reasonably common thing to do, so we want to fill
223 * things 32 bits at a time even when we don't know the size of the
224 * area at compile-time..
226 static __always_inline
227 void *__constant_c_memset(void *s, unsigned long c, size_t count)
229 int d0, d1;
230 asm volatile("rep ; stosl\n\t"
231 "testb $2,%b3\n\t"
232 "je 1f\n\t"
233 "stosw\n"
234 "1:\ttestb $1,%b3\n\t"
235 "je 2f\n\t"
236 "stosb\n"
237 "2:"
238 : "=&c" (d0), "=&D" (d1)
239 : "a" (c), "q" (count), "0" (count/4), "1" ((long)s)
240 : "memory");
241 return s;
244 /* Added by Gertjan van Wingerde to make minix and sysv module work */
245 #define __HAVE_ARCH_STRNLEN
246 extern size_t strnlen(const char *s, size_t count);
247 /* end of additional stuff */
249 #define __HAVE_ARCH_STRSTR
250 extern char *strstr(const char *cs, const char *ct);
253 * This looks horribly ugly, but the compiler can optimize it totally,
254 * as we by now know that both pattern and count is constant..
256 static __always_inline
257 void *__constant_c_and_count_memset(void *s, unsigned long pattern,
258 size_t count)
260 switch (count) {
261 case 0:
262 return s;
263 case 1:
264 *(unsigned char *)s = pattern & 0xff;
265 return s;
266 case 2:
267 *(unsigned short *)s = pattern & 0xffff;
268 return s;
269 case 3:
270 *(unsigned short *)s = pattern & 0xffff;
271 *((unsigned char *)s + 2) = pattern & 0xff;
272 return s;
273 case 4:
274 *(unsigned long *)s = pattern;
275 return s;
278 #define COMMON(x) \
279 asm volatile("rep ; stosl" \
281 : "=&c" (d0), "=&D" (d1) \
282 : "a" (eax), "0" (count/4), "1" ((long)s) \
283 : "memory")
286 int d0, d1;
287 #if __GNUC__ == 4 && __GNUC_MINOR__ == 0
288 /* Workaround for broken gcc 4.0 */
289 register unsigned long eax asm("%eax") = pattern;
290 #else
291 unsigned long eax = pattern;
292 #endif
294 switch (count % 4) {
295 case 0:
296 COMMON("");
297 return s;
298 case 1:
299 COMMON("\n\tstosb");
300 return s;
301 case 2:
302 COMMON("\n\tstosw");
303 return s;
304 default:
305 COMMON("\n\tstosw\n\tstosb");
306 return s;
310 #undef COMMON
313 #define __constant_c_x_memset(s, c, count) \
314 (__builtin_constant_p(count) \
315 ? __constant_c_and_count_memset((s), (c), (count)) \
316 : __constant_c_memset((s), (c), (count)))
318 #define __memset(s, c, count) \
319 (__builtin_constant_p(count) \
320 ? __constant_count_memset((s), (c), (count)) \
321 : __memset_generic((s), (c), (count)))
323 #define __HAVE_ARCH_MEMSET
324 #if (__GNUC__ >= 4)
325 #define memset(s, c, count) __builtin_memset(s, c, count)
326 #else
327 #define memset(s, c, count) \
328 (__builtin_constant_p(c) \
329 ? __constant_c_x_memset((s), (0x01010101UL * (unsigned char)(c)), \
330 (count)) \
331 : __memset((s), (c), (count)))
332 #endif
335 * find the first occurrence of byte 'c', or 1 past the area if none
337 #define __HAVE_ARCH_MEMSCAN
338 extern void *memscan(void *addr, int c, size_t size);
340 #endif /* __KERNEL__ */
342 #endif /* _ASM_X86_STRING_32_H */