2 * Taken from Linux /usr/include/asm/string.h
3 * All except memcpy, memmove, memset and memcmp removed.
6 #ifndef _I386_STRING_H_
7 #define _I386_STRING_H_
10 * This string-include defines all string functions as inline
11 * functions. Use gcc. It also assumes ds=es=data space, this should be
12 * normal. Most of the string-functions are rather heavily hand-optimized,
13 * see especially strtok,strstr,str[c]spn. They should work, but are not
14 * very easy to understand. Everything is done entirely within the register
15 * set, making the functions fast and clean. String instructions have been
16 * used through-out, making for "slightly" unclear code :-)
18 * NO Copyright (C) 1991, 1992 Linus Torvalds,
19 * consider these trivial functions to be PD.
24 extern void *__memcpy(void * to
, const void * from
, size_t n
);
25 extern void *__constant_memcpy(void * to
, const void * from
, size_t n
);
26 extern void *memmove(void * dest
,const void * src
, size_t n
);
27 extern void *__memset_generic(void * s
, char c
,size_t count
);
28 extern void *__constant_c_memset(void * s
, unsigned long c
, size_t count
);
29 extern void *__constant_c_and_count_memset(void * s
, unsigned long pattern
, size_t count
);
32 extern inline void * __memcpy(void * to
, const void * from
, size_t n
)
41 "1:\ttestb $1,%b4\n\t"
45 : "=&c" (d0
), "=&D" (d1
), "=&S" (d2
)
46 :"0" (n
/4), "q" (n
),"1" ((long) to
),"2" ((long) from
)
52 * This looks horribly ugly, but the compiler can optimize it totally,
53 * as the count is constant.
55 extern inline void * __constant_memcpy(void * to
, const void * from
, size_t n
)
61 *(unsigned char *)to
= *(const unsigned char *)from
;
64 *(unsigned short *)to
= *(const unsigned short *)from
;
67 *(unsigned short *)to
= *(const unsigned short *)from
;
68 *(2+(unsigned char *)to
) = *(2+(const unsigned char *)from
);
71 *(unsigned long *)to
= *(const unsigned long *)from
;
73 case 6: /* for Ethernet addresses */
74 *(unsigned long *)to
= *(const unsigned long *)from
;
75 *(2+(unsigned short *)to
) = *(2+(const unsigned short *)from
);
78 *(unsigned long *)to
= *(const unsigned long *)from
;
79 *(1+(unsigned long *)to
) = *(1+(const unsigned long *)from
);
82 *(unsigned long *)to
= *(const unsigned long *)from
;
83 *(1+(unsigned long *)to
) = *(1+(const unsigned long *)from
);
84 *(2+(unsigned long *)to
) = *(2+(const unsigned long *)from
);
87 *(unsigned long *)to
= *(const unsigned long *)from
;
88 *(1+(unsigned long *)to
) = *(1+(const unsigned long *)from
);
89 *(2+(unsigned long *)to
) = *(2+(const unsigned long *)from
);
90 *(3+(unsigned long *)to
) = *(3+(const unsigned long *)from
);
93 *(unsigned long *)to
= *(const unsigned long *)from
;
94 *(1+(unsigned long *)to
) = *(1+(const unsigned long *)from
);
95 *(2+(unsigned long *)to
) = *(2+(const unsigned long *)from
);
96 *(3+(unsigned long *)to
) = *(3+(const unsigned long *)from
);
97 *(4+(unsigned long *)to
) = *(4+(const unsigned long *)from
);
101 __asm__ __volatile__( \
105 : "=&c" (d0), "=&D" (d1), "=&S" (d2) \
106 : "0" (n/4),"1" ((long) to),"2" ((long) from) \
111 case 0: COMMON(""); return to
;
112 case 1: COMMON("\n\tmovsb"); return to
;
113 case 2: COMMON("\n\tmovsw"); return to
;
114 default: COMMON("\n\tmovsw\n\tmovsb"); return to
;
121 #define __HAVE_ARCH_MEMCPY
122 #define memcpy(t, f, n) \
123 (__builtin_constant_p(n) ? \
124 __constant_memcpy((t),(f),(n)) : \
125 __memcpy((t),(f),(n)))
127 #define __HAVE_ARCH_MEMMOVE
128 extern inline void * memmove(void * dest
,const void * src
, size_t n
)
132 __asm__
__volatile__(
136 : "=&c" (d0
), "=&S" (d1
), "=&D" (d2
)
137 :"0" (n
),"1" (src
),"2" (dest
)
140 __asm__
__volatile__(
145 : "=&c" (d0
), "=&S" (d1
), "=&D" (d2
)
147 "1" (n
-1+(const char *)src
),
148 "2" (n
-1+(char *)dest
)
153 #define memcmp __builtin_memcmp
155 extern inline void * __memset_generic(void * s
, char c
,size_t count
)
158 __asm__
__volatile__(
162 : "=&c" (d0
), "=&D" (d1
)
163 :"a" (c
),"1" (s
),"0" (count
)
168 /* we might want to write optimized versions of these later */
169 #define __constant_count_memset(s,c,count) __memset_generic((s),(c),(count))
172 * memset(x,0,y) is a reasonably common thing to do, so we want to fill
173 * things 32 bits at a time even when we don't know the size of the
174 * area at compile-time..
176 extern inline void * __constant_c_memset(void * s
, unsigned long c
, size_t count
)
179 __asm__
__volatile__(
185 "1:\ttestb $1,%b3\n\t"
189 : "=&c" (d0
), "=&D" (d1
)
190 :"a" (c
), "q" (count
), "0" (count
/4), "1" ((long) s
)
196 * This looks horribly ugly, but the compiler can optimize it totally,
197 * as we by now know that both pattern and count is constant..
199 extern inline void * __constant_c_and_count_memset(void * s
, unsigned long pattern
, size_t count
)
205 *(unsigned char *)s
= pattern
;
208 *(unsigned short *)s
= pattern
;
211 *(unsigned short *)s
= pattern
;
212 *(2+(unsigned char *)s
) = pattern
;
215 *(unsigned long *)s
= pattern
;
219 __asm__ __volatile__("cld\n\t" \
222 : "=&c" (d0), "=&D" (d1) \
223 : "a" (pattern),"0" (count/4),"1" ((long) s) \
228 case 0: COMMON(""); return s
;
229 case 1: COMMON("\n\tstosb"); return s
;
230 case 2: COMMON("\n\tstosw"); return s
;
231 default: COMMON("\n\tstosw\n\tstosb"); return s
;
238 #define __constant_c_x_memset(s, c, count) \
239 (__builtin_constant_p(count) ? \
240 __constant_c_and_count_memset((s),(c),(count)) : \
241 __constant_c_memset((s),(c),(count)))
243 #define __memset(s, c, count) \
244 (__builtin_constant_p(count) ? \
245 __constant_count_memset((s),(c),(count)) : \
246 __memset_generic((s),(c),(count)))
248 #define __HAVE_ARCH_MEMSET
249 #define memset(s, c, count) \
250 (__builtin_constant_p(c) ? \
251 __constant_c_x_memset((s),(c),(count)) : \
252 __memset((s),(c),(count)))
254 #define __HAVE_ARCH_STRNCMP
255 static inline int strncmp(const char * cs
,const char * ct
,size_t count
)
259 __asm__
__volatile__(
265 "testb %%al,%%al\n\t"
267 "2:\txorl %%eax,%%eax\n\t"
269 "3:\tsbbl %%eax,%%eax\n\t"
272 :"=a" (__res
), "=&S" (d0
), "=&D" (d1
), "=&c" (d2
)
273 :"1" (cs
),"2" (ct
),"3" (count
));
277 #define __HAVE_ARCH_STRLEN
278 static inline size_t strlen(const char * s
)
282 __asm__
__volatile__(
287 :"=c" (__res
), "=&D" (d0
) :"1" (s
),"a" (0), "0" (0xffffffff));