1 #ifndef _I386_STRING_H_
2 #define _I386_STRING_H_
5 #include <linux/config.h>
7 * On a 486 or Pentium, we are better off not using the
8 * byte string operations. But on a 386 or a PPro the
9 * byte string ops are faster than doing it by hand
10 * (MUCH faster on a Pentium).
14 * This string-include defines all string functions as inline
15 * functions. Use gcc. It also assumes ds=es=data space, this should be
16 * normal. Most of the string-functions are rather heavily hand-optimized,
17 * see especially strsep,strstr,str[c]spn. They should work, but are not
18 * very easy to understand. Everything is done entirely within the register
19 * set, making the functions fast and clean. String instructions have been
20 * used through-out, making for "slightly" unclear code :-)
22 * NO Copyright (C) 1991, 1992 Linus Torvalds,
23 * consider these trivial functions to be PD.
26 /* AK: in fact I bet it would be better to move this stuff all out of line.
29 #define __HAVE_ARCH_STRCPY
30 static inline char * strcpy(char * dest
,const char *src
)
38 : "=&S" (d0
), "=&D" (d1
), "=&a" (d2
)
39 :"0" (src
),"1" (dest
) : "memory");
43 #define __HAVE_ARCH_STRNCPY
44 static inline char * strncpy(char * dest
,const char *src
,size_t count
)
57 : "=&S" (d0
), "=&D" (d1
), "=&c" (d2
), "=&a" (d3
)
58 :"0" (src
),"1" (dest
),"2" (count
) : "memory");
62 #define __HAVE_ARCH_STRCAT
63 static inline char * strcat(char * dest
,const char * src
)
74 : "=&S" (d0
), "=&D" (d1
), "=&a" (d2
), "=&c" (d3
)
75 : "0" (src
), "1" (dest
), "2" (0), "3" (0xffffffffu
):"memory");
79 #define __HAVE_ARCH_STRNCAT
80 static inline char * strncat(char * dest
,const char * src
,size_t count
)
96 : "=&S" (d0
), "=&D" (d1
), "=&a" (d2
), "=&c" (d3
)
97 : "0" (src
),"1" (dest
),"2" (0),"3" (0xffffffffu
), "g" (count
)
102 #define __HAVE_ARCH_STRCMP
103 static inline int strcmp(const char * cs
,const char * ct
)
107 __asm__
__volatile__(
111 "testb %%al,%%al\n\t"
113 "xorl %%eax,%%eax\n\t"
115 "2:\tsbbl %%eax,%%eax\n\t"
118 :"=a" (__res
), "=&S" (d0
), "=&D" (d1
)
124 #define __HAVE_ARCH_STRNCMP
125 static inline int strncmp(const char * cs
,const char * ct
,size_t count
)
129 __asm__
__volatile__(
135 "testb %%al,%%al\n\t"
137 "2:\txorl %%eax,%%eax\n\t"
139 "3:\tsbbl %%eax,%%eax\n\t"
142 :"=a" (__res
), "=&S" (d0
), "=&D" (d1
), "=&c" (d2
)
143 :"1" (cs
),"2" (ct
),"3" (count
)
148 #define __HAVE_ARCH_STRCHR
149 static inline char * strchr(const char * s
, int c
)
152 register char * __res
;
153 __asm__
__volatile__(
158 "testb %%al,%%al\n\t"
163 :"=a" (__res
), "=&S" (d0
)
169 #define __HAVE_ARCH_STRRCHR
170 static inline char * strrchr(const char * s
, int c
)
173 register char * __res
;
174 __asm__
__volatile__(
179 "leal -1(%%esi),%0\n"
180 "2:\ttestb %%al,%%al\n\t"
182 :"=g" (__res
), "=&S" (d0
), "=&a" (d1
)
183 :"0" (0),"1" (s
),"2" (c
)
188 #define __HAVE_ARCH_STRLEN
189 static inline size_t strlen(const char * s
)
193 __asm__
__volatile__(
198 :"=c" (__res
), "=&D" (d0
)
199 :"1" (s
),"a" (0), "0" (0xffffffffu
)
204 static __always_inline
void * __memcpy(void * to
, const void * from
, size_t n
)
207 __asm__
__volatile__(
211 #if 1 /* want to pay 2 byte penalty for a chance to skip microcoded rep? */
216 : "=&c" (d0
), "=&D" (d1
), "=&S" (d2
)
217 : "0" (n
/4), "g" (n
), "1" ((long) to
), "2" ((long) from
)
223 * This looks ugly, but the compiler can optimize it totally,
224 * as the count is constant.
226 static __always_inline
void * __constant_memcpy(void * to
, const void * from
, size_t n
)
230 #if 1 /* want to do small copies with non-string ops? */
232 case 1: *(char*)to
= *(char*)from
; return to
;
233 case 2: *(short*)to
= *(short*)from
; return to
;
234 case 4: *(int*)to
= *(int*)from
; return to
;
235 #if 1 /* including those doable with two moves? */
236 case 3: *(short*)to
= *(short*)from
;
237 *((char*)to
+2) = *((char*)from
+2); return to
;
238 case 5: *(int*)to
= *(int*)from
;
239 *((char*)to
+4) = *((char*)from
+4); return to
;
240 case 6: *(int*)to
= *(int*)from
;
241 *((short*)to
+2) = *((short*)from
+2); return to
;
242 case 8: *(int*)to
= *(int*)from
;
243 *((int*)to
+1) = *((int*)from
+1); return to
;
250 /* large block: use rep prefix */
252 __asm__
__volatile__(
254 : "=&c" (ecx
), "=&D" (edi
), "=&S" (esi
)
255 : "0" (n
/4), "1" (edi
),"2" (esi
)
259 /* small block: don't clobber ecx + smaller code */
260 if (n
>= 4*4) __asm__
__volatile__("movsl"
261 :"=&D"(edi
),"=&S"(esi
):"0"(edi
),"1"(esi
):"memory");
262 if (n
>= 3*4) __asm__
__volatile__("movsl"
263 :"=&D"(edi
),"=&S"(esi
):"0"(edi
),"1"(esi
):"memory");
264 if (n
>= 2*4) __asm__
__volatile__("movsl"
265 :"=&D"(edi
),"=&S"(esi
):"0"(edi
),"1"(esi
):"memory");
266 if (n
>= 1*4) __asm__
__volatile__("movsl"
267 :"=&D"(edi
),"=&S"(esi
):"0"(edi
),"1"(esi
):"memory");
272 case 1: __asm__
__volatile__("movsb"
273 :"=&D"(edi
),"=&S"(esi
):"0"(edi
),"1"(esi
):"memory");
275 case 2: __asm__
__volatile__("movsw"
276 :"=&D"(edi
),"=&S"(esi
):"0"(edi
),"1"(esi
):"memory");
278 default: __asm__
__volatile__("movsw\n\tmovsb"
279 :"=&D"(edi
),"=&S"(esi
):"0"(edi
),"1"(esi
):"memory");
284 #define __HAVE_ARCH_MEMCPY
286 #ifdef CONFIG_X86_USE_3DNOW
291 * This CPU favours 3DNow strongly (eg AMD Athlon)
294 static inline void * __constant_memcpy3d(void * to
, const void * from
, size_t len
)
297 return __constant_memcpy(to
, from
, len
);
298 return _mmx_memcpy(to
, from
, len
);
301 static __inline__
void *__memcpy3d(void *to
, const void *from
, size_t len
)
304 return __memcpy(to
, from
, len
);
305 return _mmx_memcpy(to
, from
, len
);
308 #define memcpy(t, f, n) \
309 (__builtin_constant_p(n) ? \
310 __constant_memcpy3d((t),(f),(n)) : \
311 __memcpy3d((t),(f),(n)))
319 #define memcpy(t, f, n) \
320 (__builtin_constant_p(n) ? \
321 __constant_memcpy((t),(f),(n)) : \
322 __memcpy((t),(f),(n)))
326 #define __HAVE_ARCH_MEMMOVE
327 void *memmove(void * dest
,const void * src
, size_t n
);
329 #define memcmp __builtin_memcmp
331 #define __HAVE_ARCH_MEMCHR
332 static inline void * memchr(const void * cs
,int c
,size_t count
)
335 register void * __res
;
338 __asm__
__volatile__(
344 :"=D" (__res
), "=&c" (d0
)
345 :"a" (c
),"0" (cs
),"1" (count
)
350 static inline void * __memset_generic(void * s
, char c
,size_t count
)
353 __asm__
__volatile__(
356 : "=&c" (d0
), "=&D" (d1
)
357 :"a" (c
),"1" (s
),"0" (count
)
362 /* we might want to write optimized versions of these later */
363 #define __constant_count_memset(s,c,count) __memset_generic((s),(c),(count))
366 * memset(x,0,y) is a reasonably common thing to do, so we want to fill
367 * things 32 bits at a time even when we don't know the size of the
368 * area at compile-time..
370 static __always_inline
void * __constant_c_memset(void * s
, unsigned long c
, size_t count
)
373 __asm__
__volatile__(
378 "1:\ttestb $1,%b3\n\t"
382 :"=&c" (d0
), "=&D" (d1
)
383 :"a" (c
), "q" (count
), "0" (count
/4), "1" ((long) s
)
388 /* Added by Gertjan van Wingerde to make minix and sysv module work */
389 #define __HAVE_ARCH_STRNLEN
390 static inline size_t strnlen(const char * s
, size_t count
)
394 __asm__
__volatile__(
397 "1:\tcmpb $0,(%0)\n\t"
404 :"=a" (__res
), "=&d" (d0
)
409 /* end of additional stuff */
411 #define __HAVE_ARCH_STRSTR
413 extern char *strstr(const char *cs
, const char *ct
);
416 * This looks horribly ugly, but the compiler can optimize it totally,
417 * as we by now know that both pattern and count is constant..
419 static __always_inline
void * __constant_c_and_count_memset(void * s
, unsigned long pattern
, size_t count
)
425 *(unsigned char *)s
= pattern
;
428 *(unsigned short *)s
= pattern
;
431 *(unsigned short *)s
= pattern
;
432 *(2+(unsigned char *)s
) = pattern
;
435 *(unsigned long *)s
= pattern
;
439 __asm__ __volatile__( \
442 : "=&c" (d0), "=&D" (d1) \
443 : "a" (pattern),"0" (count/4),"1" ((long) s) \
448 case 0: COMMON(""); return s
;
449 case 1: COMMON("\n\tstosb"); return s
;
450 case 2: COMMON("\n\tstosw"); return s
;
451 default: COMMON("\n\tstosw\n\tstosb"); return s
;
458 #define __constant_c_x_memset(s, c, count) \
459 (__builtin_constant_p(count) ? \
460 __constant_c_and_count_memset((s),(c),(count)) : \
461 __constant_c_memset((s),(c),(count)))
463 #define __memset(s, c, count) \
464 (__builtin_constant_p(count) ? \
465 __constant_count_memset((s),(c),(count)) : \
466 __memset_generic((s),(c),(count)))
468 #define __HAVE_ARCH_MEMSET
469 #define memset(s, c, count) \
470 (__builtin_constant_p(c) ? \
471 __constant_c_x_memset((s),(0x01010101UL*(unsigned char)(c)),(count)) : \
472 __memset((s),(c),(count)))
475 * find the first occurrence of byte 'c', or 1 past the area if none
477 #define __HAVE_ARCH_MEMSCAN
478 static inline void * memscan(void * addr
, int c
, size_t size
)
482 __asm__("repnz; scasb\n\t"
486 : "=D" (addr
), "=c" (size
)
487 : "0" (addr
), "1" (size
), "a" (c
)
492 #endif /* __KERNEL__ */