Indentation fix, cleanup.
[AROS.git] / arch / all-pc / boot / grub / netboot / linux-asm-string.h
blobd5bec088cca83630465e8460bd9fbaf1ef72592d
1 /*
2 * Taken from Linux /usr/include/asm/string.h
3 * All except memcpy, memmove, memset and memcmp removed.
4 */
6 #ifndef _I386_STRING_H_
7 #define _I386_STRING_H_
9 /*
10 * This string-include defines all string functions as inline
11 * functions. Use gcc. It also assumes ds=es=data space, this should be
12 * normal. Most of the string-functions are rather heavily hand-optimized,
13 * see especially strtok,strstr,str[c]spn. They should work, but are not
14 * very easy to understand. Everything is done entirely within the register
15 * set, making the functions fast and clean. String instructions have been
16 * used through-out, making for "slightly" unclear code :-)
18 * NO Copyright (C) 1991, 1992 Linus Torvalds,
19 * consider these trivial functions to be PD.
22 typedef int size_t;
24 extern void *__memcpy(void * to, const void * from, size_t n);
25 extern void *__constant_memcpy(void * to, const void * from, size_t n);
26 extern void *memmove(void * dest,const void * src, size_t n);
27 extern void *__memset_generic(void * s, char c,size_t count);
28 extern void *__constant_c_memset(void * s, unsigned long c, size_t count);
29 extern void *__constant_c_and_count_memset(void * s, unsigned long pattern, size_t count);
32 extern inline void * __memcpy(void * to, const void * from, size_t n)
34 int d0, d1, d2;
35 __asm__ __volatile__(
36 "cld\n\t"
37 "rep ; movsl\n\t"
38 "testb $2,%b4\n\t"
39 "je 1f\n\t"
40 "movsw\n"
41 "1:\ttestb $1,%b4\n\t"
42 "je 2f\n\t"
43 "movsb\n"
44 "2:"
45 : "=&c" (d0), "=&D" (d1), "=&S" (d2)
46 :"0" (n/4), "q" (n),"1" ((long) to),"2" ((long) from)
47 : "memory");
48 return (to);
52 * This looks horribly ugly, but the compiler can optimize it totally,
53 * as the count is constant.
55 extern inline void * __constant_memcpy(void * to, const void * from, size_t n)
57 switch (n) {
58 case 0:
59 return to;
60 case 1:
61 *(unsigned char *)to = *(const unsigned char *)from;
62 return to;
63 case 2:
64 *(unsigned short *)to = *(const unsigned short *)from;
65 return to;
66 case 3:
67 *(unsigned short *)to = *(const unsigned short *)from;
68 *(2+(unsigned char *)to) = *(2+(const unsigned char *)from);
69 return to;
70 case 4:
71 *(unsigned long *)to = *(const unsigned long *)from;
72 return to;
73 case 6: /* for Ethernet addresses */
74 *(unsigned long *)to = *(const unsigned long *)from;
75 *(2+(unsigned short *)to) = *(2+(const unsigned short *)from);
76 return to;
77 case 8:
78 *(unsigned long *)to = *(const unsigned long *)from;
79 *(1+(unsigned long *)to) = *(1+(const unsigned long *)from);
80 return to;
81 case 12:
82 *(unsigned long *)to = *(const unsigned long *)from;
83 *(1+(unsigned long *)to) = *(1+(const unsigned long *)from);
84 *(2+(unsigned long *)to) = *(2+(const unsigned long *)from);
85 return to;
86 case 16:
87 *(unsigned long *)to = *(const unsigned long *)from;
88 *(1+(unsigned long *)to) = *(1+(const unsigned long *)from);
89 *(2+(unsigned long *)to) = *(2+(const unsigned long *)from);
90 *(3+(unsigned long *)to) = *(3+(const unsigned long *)from);
91 return to;
92 case 20:
93 *(unsigned long *)to = *(const unsigned long *)from;
94 *(1+(unsigned long *)to) = *(1+(const unsigned long *)from);
95 *(2+(unsigned long *)to) = *(2+(const unsigned long *)from);
96 *(3+(unsigned long *)to) = *(3+(const unsigned long *)from);
97 *(4+(unsigned long *)to) = *(4+(const unsigned long *)from);
98 return to;
100 #define COMMON(x) \
101 __asm__ __volatile__( \
102 "cld\n\t" \
103 "rep ; movsl" \
105 : "=&c" (d0), "=&D" (d1), "=&S" (d2) \
106 : "0" (n/4),"1" ((long) to),"2" ((long) from) \
107 : "memory");
109 int d0, d1, d2;
110 switch (n % 4) {
111 case 0: COMMON(""); return to;
112 case 1: COMMON("\n\tmovsb"); return to;
113 case 2: COMMON("\n\tmovsw"); return to;
114 default: COMMON("\n\tmovsw\n\tmovsb"); return to;
118 #undef COMMON
121 #define __HAVE_ARCH_MEMCPY
122 #define memcpy(t, f, n) \
123 (__builtin_constant_p(n) ? \
124 __constant_memcpy((t),(f),(n)) : \
125 __memcpy((t),(f),(n)))
127 #define __HAVE_ARCH_MEMMOVE
128 extern inline void * memmove(void * dest,const void * src, size_t n)
130 int d0, d1, d2;
131 if (dest<src)
132 __asm__ __volatile__(
133 "cld\n\t"
134 "rep\n\t"
135 "movsb"
136 : "=&c" (d0), "=&S" (d1), "=&D" (d2)
137 :"0" (n),"1" (src),"2" (dest)
138 : "memory");
139 else
140 __asm__ __volatile__(
141 "std\n\t"
142 "rep\n\t"
143 "movsb\n\t"
144 "cld"
145 : "=&c" (d0), "=&S" (d1), "=&D" (d2)
146 :"0" (n),
147 "1" (n-1+(const char *)src),
148 "2" (n-1+(char *)dest)
149 :"memory");
150 return dest;
153 #define memcmp __builtin_memcmp
155 extern inline void * __memset_generic(void * s, char c,size_t count)
157 int d0, d1;
158 __asm__ __volatile__(
159 "cld\n\t"
160 "rep\n\t"
161 "stosb"
162 : "=&c" (d0), "=&D" (d1)
163 :"a" (c),"1" (s),"0" (count)
164 :"memory");
165 return s;
168 /* we might want to write optimized versions of these later */
169 #define __constant_count_memset(s,c,count) __memset_generic((s),(c),(count))
172 * memset(x,0,y) is a reasonably common thing to do, so we want to fill
173 * things 32 bits at a time even when we don't know the size of the
174 * area at compile-time..
176 extern inline void * __constant_c_memset(void * s, unsigned long c, size_t count)
178 int d0, d1;
179 __asm__ __volatile__(
180 "cld\n\t"
181 "rep ; stosl\n\t"
182 "testb $2,%b3\n\t"
183 "je 1f\n\t"
184 "stosw\n"
185 "1:\ttestb $1,%b3\n\t"
186 "je 2f\n\t"
187 "stosb\n"
188 "2:"
189 : "=&c" (d0), "=&D" (d1)
190 :"a" (c), "q" (count), "0" (count/4), "1" ((long) s)
191 :"memory");
192 return (s);
196 * This looks horribly ugly, but the compiler can optimize it totally,
197 * as we by now know that both pattern and count is constant..
199 extern inline void * __constant_c_and_count_memset(void * s, unsigned long pattern, size_t count)
201 switch (count) {
202 case 0:
203 return s;
204 case 1:
205 *(unsigned char *)s = pattern;
206 return s;
207 case 2:
208 *(unsigned short *)s = pattern;
209 return s;
210 case 3:
211 *(unsigned short *)s = pattern;
212 *(2+(unsigned char *)s) = pattern;
213 return s;
214 case 4:
215 *(unsigned long *)s = pattern;
216 return s;
218 #define COMMON(x) \
219 __asm__ __volatile__("cld\n\t" \
220 "rep ; stosl" \
222 : "=&c" (d0), "=&D" (d1) \
223 : "a" (pattern),"0" (count/4),"1" ((long) s) \
224 : "memory")
226 int d0, d1;
227 switch (count % 4) {
228 case 0: COMMON(""); return s;
229 case 1: COMMON("\n\tstosb"); return s;
230 case 2: COMMON("\n\tstosw"); return s;
231 default: COMMON("\n\tstosw\n\tstosb"); return s;
235 #undef COMMON
238 #define __constant_c_x_memset(s, c, count) \
239 (__builtin_constant_p(count) ? \
240 __constant_c_and_count_memset((s),(c),(count)) : \
241 __constant_c_memset((s),(c),(count)))
243 #define __memset(s, c, count) \
244 (__builtin_constant_p(count) ? \
245 __constant_count_memset((s),(c),(count)) : \
246 __memset_generic((s),(c),(count)))
248 #define __HAVE_ARCH_MEMSET
249 #define memset(s, c, count) \
250 (__builtin_constant_p(c) ? \
251 __constant_c_x_memset((s),(c),(count)) : \
252 __memset((s),(c),(count)))
254 #define __HAVE_ARCH_STRNCMP
255 static inline int strncmp(const char * cs,const char * ct,size_t count)
257 register int __res;
258 int d0, d1, d2;
259 __asm__ __volatile__(
260 "1:\tdecl %3\n\t"
261 "js 2f\n\t"
262 "lodsb\n\t"
263 "scasb\n\t"
264 "jne 3f\n\t"
265 "testb %%al,%%al\n\t"
266 "jne 1b\n"
267 "2:\txorl %%eax,%%eax\n\t"
268 "jmp 4f\n"
269 "3:\tsbbl %%eax,%%eax\n\t"
270 "orb $1,%%al\n"
271 "4:"
272 :"=a" (__res), "=&S" (d0), "=&D" (d1), "=&c" (d2)
273 :"1" (cs),"2" (ct),"3" (count));
274 return __res;
277 #define __HAVE_ARCH_STRLEN
278 static inline size_t strlen(const char * s)
280 int d0;
281 register int __res;
282 __asm__ __volatile__(
283 "repne\n\t"
284 "scasb\n\t"
285 "notl %0\n\t"
286 "decl %0"
287 :"=c" (__res), "=&D" (d0) :"1" (s),"a" (0), "0" (0xffffffff));
288 return __res;
291 #endif