1 #ifndef _ASM_X86_STRING_32_H
2 #define _ASM_X86_STRING_32_H
6 /* Let gcc decide whether to inline or use the out of line functions */
8 #define __HAVE_ARCH_STRCPY
9 extern char *strcpy(char *dest
, const char *src
);
11 #define __HAVE_ARCH_STRNCPY
12 extern char *strncpy(char *dest
, const char *src
, size_t count
);
14 #define __HAVE_ARCH_STRCAT
15 extern char *strcat(char *dest
, const char *src
);
17 #define __HAVE_ARCH_STRNCAT
18 extern char *strncat(char *dest
, const char *src
, size_t count
);
20 #define __HAVE_ARCH_STRCMP
21 extern int strcmp(const char *cs
, const char *ct
);
23 #define __HAVE_ARCH_STRNCMP
24 extern int strncmp(const char *cs
, const char *ct
, size_t count
);
26 #define __HAVE_ARCH_STRCHR
27 extern char *strchr(const char *s
, int c
);
29 #define __HAVE_ARCH_STRLEN
30 extern size_t strlen(const char *s
);
32 static __always_inline
void *__memcpy(void *to
, const void *from
, size_t n
)
35 asm volatile("rep ; movsl\n\t"
41 : "=&c" (d0
), "=&D" (d1
), "=&S" (d2
)
42 : "0" (n
/ 4), "g" (n
), "1" ((long)to
), "2" ((long)from
)
48 * This looks ugly, but the compiler can optimize it totally,
49 * as the count is constant.
51 static __always_inline
void *__constant_memcpy(void *to
, const void *from
,
60 *(char *)to
= *(char *)from
;
63 *(short *)to
= *(short *)from
;
66 *(int *)to
= *(int *)from
;
69 *(short *)to
= *(short *)from
;
70 *((char *)to
+ 2) = *((char *)from
+ 2);
73 *(int *)to
= *(int *)from
;
74 *((char *)to
+ 4) = *((char *)from
+ 4);
77 *(int *)to
= *(int *)from
;
78 *((short *)to
+ 2) = *((short *)from
+ 2);
81 *(int *)to
= *(int *)from
;
82 *((int *)to
+ 1) = *((int *)from
+ 1);
89 /* large block: use rep prefix */
91 asm volatile("rep ; movsl"
92 : "=&c" (ecx
), "=&D" (edi
), "=&S" (esi
)
93 : "0" (n
/ 4), "1" (edi
), "2" (esi
)
97 /* small block: don't clobber ecx + smaller code */
100 : "=&D"(edi
), "=&S"(esi
)
105 : "=&D"(edi
), "=&S"(esi
)
110 : "=&D"(edi
), "=&S"(esi
)
115 : "=&D"(edi
), "=&S"(esi
)
125 : "=&D"(edi
), "=&S"(esi
)
131 : "=&D"(edi
), "=&S"(esi
)
136 asm volatile("movsw\n\tmovsb"
137 : "=&D"(edi
), "=&S"(esi
)
144 #define __HAVE_ARCH_MEMCPY
145 extern void *memcpy(void *, const void *, size_t);
147 #ifndef CONFIG_FORTIFY_SOURCE
148 #ifdef CONFIG_X86_USE_3DNOW
153 * This CPU favours 3DNow strongly (eg AMD Athlon)
156 static inline void *__constant_memcpy3d(void *to
, const void *from
, size_t len
)
159 return __constant_memcpy(to
, from
, len
);
160 return _mmx_memcpy(to
, from
, len
);
163 static inline void *__memcpy3d(void *to
, const void *from
, size_t len
)
166 return __memcpy(to
, from
, len
);
167 return _mmx_memcpy(to
, from
, len
);
170 #define memcpy(t, f, n) \
171 (__builtin_constant_p((n)) \
172 ? __constant_memcpy3d((t), (f), (n)) \
173 : __memcpy3d((t), (f), (n)))
181 #ifndef CONFIG_KMEMCHECK
184 #define memcpy(t, f, n) __builtin_memcpy(t, f, n)
186 #define memcpy(t, f, n) \
187 (__builtin_constant_p((n)) \
188 ? __constant_memcpy((t), (f), (n)) \
189 : __memcpy((t), (f), (n)))
193 * kmemcheck becomes very happy if we use the REP instructions unconditionally,
194 * because it means that we know both memory operands in advance.
196 #define memcpy(t, f, n) __memcpy((t), (f), (n))
200 #endif /* !CONFIG_FORTIFY_SOURCE */
202 #define __HAVE_ARCH_MEMMOVE
203 void *memmove(void *dest
, const void *src
, size_t n
);
205 extern int memcmp(const void *, const void *, size_t);
206 #ifndef CONFIG_FORTIFY_SOURCE
207 #define memcmp __builtin_memcmp
210 #define __HAVE_ARCH_MEMCHR
211 extern void *memchr(const void *cs
, int c
, size_t count
);
213 static inline void *__memset_generic(void *s
, char c
, size_t count
)
216 asm volatile("rep\n\t"
218 : "=&c" (d0
), "=&D" (d1
)
219 : "a" (c
), "1" (s
), "0" (count
)
224 /* we might want to write optimized versions of these later */
225 #define __constant_count_memset(s, c, count) __memset_generic((s), (c), (count))
228 * memset(x, 0, y) is a reasonably common thing to do, so we want to fill
229 * things 32 bits at a time even when we don't know the size of the
230 * area at compile-time..
232 static __always_inline
233 void *__constant_c_memset(void *s
, unsigned long c
, size_t count
)
236 asm volatile("rep ; stosl\n\t"
240 "1:\ttestb $1,%b3\n\t"
244 : "=&c" (d0
), "=&D" (d1
)
245 : "a" (c
), "q" (count
), "0" (count
/4), "1" ((long)s
)
250 /* Added by Gertjan van Wingerde to make minix and sysv module work */
251 #define __HAVE_ARCH_STRNLEN
252 extern size_t strnlen(const char *s
, size_t count
);
253 /* end of additional stuff */
255 #define __HAVE_ARCH_STRSTR
256 extern char *strstr(const char *cs
, const char *ct
);
259 * This looks horribly ugly, but the compiler can optimize it totally,
260 * as we by now know that both pattern and count is constant..
262 static __always_inline
263 void *__constant_c_and_count_memset(void *s
, unsigned long pattern
,
270 *(unsigned char *)s
= pattern
& 0xff;
273 *(unsigned short *)s
= pattern
& 0xffff;
276 *(unsigned short *)s
= pattern
& 0xffff;
277 *((unsigned char *)s
+ 2) = pattern
& 0xff;
280 *(unsigned long *)s
= pattern
;
285 asm volatile("rep ; stosl" \
287 : "=&c" (d0), "=&D" (d1) \
288 : "a" (eax), "0" (count/4), "1" ((long)s) \
293 #if __GNUC__ == 4 && __GNUC_MINOR__ == 0
294 /* Workaround for broken gcc 4.0 */
295 register unsigned long eax
asm("%eax") = pattern
;
297 unsigned long eax
= pattern
;
311 COMMON("\n\tstosw\n\tstosb");
319 #define __constant_c_x_memset(s, c, count) \
320 (__builtin_constant_p(count) \
321 ? __constant_c_and_count_memset((s), (c), (count)) \
322 : __constant_c_memset((s), (c), (count)))
324 #define __memset(s, c, count) \
325 (__builtin_constant_p(count) \
326 ? __constant_count_memset((s), (c), (count)) \
327 : __memset_generic((s), (c), (count)))
329 #define __HAVE_ARCH_MEMSET
330 extern void *memset(void *, int, size_t);
331 #ifndef CONFIG_FORTIFY_SOURCE
333 #define memset(s, c, count) __builtin_memset(s, c, count)
335 #define memset(s, c, count) \
336 (__builtin_constant_p(c) \
337 ? __constant_c_x_memset((s), (0x01010101UL * (unsigned char)(c)), \
339 : __memset((s), (c), (count)))
341 #endif /* !CONFIG_FORTIFY_SOURCE */
344 * find the first occurrence of byte 'c', or 1 past the area if none
346 #define __HAVE_ARCH_MEMSCAN
347 extern void *memscan(void *addr
, int c
, size_t size
);
349 #endif /* __KERNEL__ */
351 #endif /* _ASM_X86_STRING_32_H */