1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_X86_STRING_32_H
3 #define _ASM_X86_STRING_32_H
7 /* Let gcc decide whether to inline or use the out of line functions */
9 #define __HAVE_ARCH_STRCPY
10 extern char *strcpy(char *dest
, const char *src
);
12 #define __HAVE_ARCH_STRNCPY
13 extern char *strncpy(char *dest
, const char *src
, size_t count
);
15 #define __HAVE_ARCH_STRCAT
16 extern char *strcat(char *dest
, const char *src
);
18 #define __HAVE_ARCH_STRNCAT
19 extern char *strncat(char *dest
, const char *src
, size_t count
);
21 #define __HAVE_ARCH_STRCMP
22 extern int strcmp(const char *cs
, const char *ct
);
24 #define __HAVE_ARCH_STRNCMP
25 extern int strncmp(const char *cs
, const char *ct
, size_t count
);
27 #define __HAVE_ARCH_STRCHR
28 extern char *strchr(const char *s
, int c
);
30 #define __HAVE_ARCH_STRLEN
31 extern size_t strlen(const char *s
);
33 static __always_inline
void *__memcpy(void *to
, const void *from
, size_t n
)
36 asm volatile("rep ; movsl\n\t"
42 : "=&c" (d0
), "=&D" (d1
), "=&S" (d2
)
43 : "0" (n
/ 4), "g" (n
), "1" ((long)to
), "2" ((long)from
)
49 * This looks ugly, but the compiler can optimize it totally,
50 * as the count is constant.
52 static __always_inline
void *__constant_memcpy(void *to
, const void *from
,
61 *(char *)to
= *(char *)from
;
64 *(short *)to
= *(short *)from
;
67 *(int *)to
= *(int *)from
;
70 *(short *)to
= *(short *)from
;
71 *((char *)to
+ 2) = *((char *)from
+ 2);
74 *(int *)to
= *(int *)from
;
75 *((char *)to
+ 4) = *((char *)from
+ 4);
78 *(int *)to
= *(int *)from
;
79 *((short *)to
+ 2) = *((short *)from
+ 2);
82 *(int *)to
= *(int *)from
;
83 *((int *)to
+ 1) = *((int *)from
+ 1);
90 /* large block: use rep prefix */
92 asm volatile("rep ; movsl"
93 : "=&c" (ecx
), "=&D" (edi
), "=&S" (esi
)
94 : "0" (n
/ 4), "1" (edi
), "2" (esi
)
98 /* small block: don't clobber ecx + smaller code */
101 : "=&D"(edi
), "=&S"(esi
)
106 : "=&D"(edi
), "=&S"(esi
)
111 : "=&D"(edi
), "=&S"(esi
)
116 : "=&D"(edi
), "=&S"(esi
)
126 : "=&D"(edi
), "=&S"(esi
)
132 : "=&D"(edi
), "=&S"(esi
)
137 asm volatile("movsw\n\tmovsb"
138 : "=&D"(edi
), "=&S"(esi
)
145 #define __HAVE_ARCH_MEMCPY
146 extern void *memcpy(void *, const void *, size_t);
148 #ifndef CONFIG_FORTIFY_SOURCE
149 #ifdef CONFIG_X86_USE_3DNOW
154 * This CPU favours 3DNow strongly (eg AMD Athlon)
157 static inline void *__constant_memcpy3d(void *to
, const void *from
, size_t len
)
160 return __constant_memcpy(to
, from
, len
);
161 return _mmx_memcpy(to
, from
, len
);
164 static inline void *__memcpy3d(void *to
, const void *from
, size_t len
)
167 return __memcpy(to
, from
, len
);
168 return _mmx_memcpy(to
, from
, len
);
171 #define memcpy(t, f, n) \
172 (__builtin_constant_p((n)) \
173 ? __constant_memcpy3d((t), (f), (n)) \
174 : __memcpy3d((t), (f), (n)))
182 #define memcpy(t, f, n) __builtin_memcpy(t, f, n)
185 #endif /* !CONFIG_FORTIFY_SOURCE */
187 #define __HAVE_ARCH_MEMMOVE
188 void *memmove(void *dest
, const void *src
, size_t n
);
190 extern int memcmp(const void *, const void *, size_t);
191 #ifndef CONFIG_FORTIFY_SOURCE
192 #define memcmp __builtin_memcmp
195 #define __HAVE_ARCH_MEMCHR
196 extern void *memchr(const void *cs
, int c
, size_t count
);
198 static inline void *__memset_generic(void *s
, char c
, size_t count
)
201 asm volatile("rep\n\t"
203 : "=&c" (d0
), "=&D" (d1
)
204 : "a" (c
), "1" (s
), "0" (count
)
209 /* we might want to write optimized versions of these later */
210 #define __constant_count_memset(s, c, count) __memset_generic((s), (c), (count))
213 * memset(x, 0, y) is a reasonably common thing to do, so we want to fill
214 * things 32 bits at a time even when we don't know the size of the
215 * area at compile-time..
217 static __always_inline
218 void *__constant_c_memset(void *s
, unsigned long c
, size_t count
)
221 asm volatile("rep ; stosl\n\t"
225 "1:\ttestb $1,%b3\n\t"
229 : "=&c" (d0
), "=&D" (d1
)
230 : "a" (c
), "q" (count
), "0" (count
/4), "1" ((long)s
)
235 /* Added by Gertjan van Wingerde to make minix and sysv module work */
236 #define __HAVE_ARCH_STRNLEN
237 extern size_t strnlen(const char *s
, size_t count
);
238 /* end of additional stuff */
240 #define __HAVE_ARCH_STRSTR
241 extern char *strstr(const char *cs
, const char *ct
);
244 * This looks horribly ugly, but the compiler can optimize it totally,
245 * as we by now know that both pattern and count is constant..
247 static __always_inline
248 void *__constant_c_and_count_memset(void *s
, unsigned long pattern
,
255 *(unsigned char *)s
= pattern
& 0xff;
258 *(unsigned short *)s
= pattern
& 0xffff;
261 *(unsigned short *)s
= pattern
& 0xffff;
262 *((unsigned char *)s
+ 2) = pattern
& 0xff;
265 *(unsigned long *)s
= pattern
;
270 asm volatile("rep ; stosl" \
272 : "=&c" (d0), "=&D" (d1) \
273 : "a" (eax), "0" (count/4), "1" ((long)s) \
278 unsigned long eax
= pattern
;
291 COMMON("\n\tstosw\n\tstosb");
299 #define __constant_c_x_memset(s, c, count) \
300 (__builtin_constant_p(count) \
301 ? __constant_c_and_count_memset((s), (c), (count)) \
302 : __constant_c_memset((s), (c), (count)))
304 #define __memset(s, c, count) \
305 (__builtin_constant_p(count) \
306 ? __constant_count_memset((s), (c), (count)) \
307 : __memset_generic((s), (c), (count)))
309 #define __HAVE_ARCH_MEMSET
310 extern void *memset(void *, int, size_t);
311 #ifndef CONFIG_FORTIFY_SOURCE
312 #define memset(s, c, count) __builtin_memset(s, c, count)
313 #endif /* !CONFIG_FORTIFY_SOURCE */
315 #define __HAVE_ARCH_MEMSET16
316 static inline void *memset16(uint16_t *s
, uint16_t v
, size_t n
)
319 asm volatile("rep\n\t"
321 : "=&c" (d0
), "=&D" (d1
)
322 : "a" (v
), "1" (s
), "0" (n
)
327 #define __HAVE_ARCH_MEMSET32
328 static inline void *memset32(uint32_t *s
, uint32_t v
, size_t n
)
331 asm volatile("rep\n\t"
333 : "=&c" (d0
), "=&D" (d1
)
334 : "a" (v
), "1" (s
), "0" (n
)
340 * find the first occurrence of byte 'c', or 1 past the area if none
342 #define __HAVE_ARCH_MEMSCAN
343 extern void *memscan(void *addr
, int c
, size_t size
);
345 #endif /* __KERNEL__ */
347 #endif /* _ASM_X86_STRING_32_H */