fs: use kmem_cache_zalloc instead
[pv_ops_mirror.git] / include / asm-x86 / string_64.h
blobe583da7918fb314b34fe8156dd1b7626fbeadf4d
1 #ifndef _X86_64_STRING_H_
2 #define _X86_64_STRING_H_
4 #ifdef __KERNEL__
6 /* Written 2002 by Andi Kleen */
8 /* Only used for special circumstances. Stolen from i386/string.h */
9 static __always_inline void *
10 __inline_memcpy(void * to, const void * from, size_t n)
12 unsigned long d0, d1, d2;
13 __asm__ __volatile__(
14 "rep ; movsl\n\t"
15 "testb $2,%b4\n\t"
16 "je 1f\n\t"
17 "movsw\n"
18 "1:\ttestb $1,%b4\n\t"
19 "je 2f\n\t"
20 "movsb\n"
21 "2:"
22 : "=&c" (d0), "=&D" (d1), "=&S" (d2)
23 :"0" (n/4), "q" (n),"1" ((long) to),"2" ((long) from)
24 : "memory");
25 return (to);
28 /* Even with __builtin_ the compiler may decide to use the out of line
29 function. */
31 #define __HAVE_ARCH_MEMCPY 1
32 #if (__GNUC__ == 4 && __GNUC_MINOR__ >= 3) || __GNUC__ > 4
33 extern void *memcpy(void *to, const void *from, size_t len);
34 #else
35 extern void *__memcpy(void *to, const void *from, size_t len);
36 #define memcpy(dst,src,len) \
37 ({ size_t __len = (len); \
38 void *__ret; \
39 if (__builtin_constant_p(len) && __len >= 64) \
40 __ret = __memcpy((dst),(src),__len); \
41 else \
42 __ret = __builtin_memcpy((dst),(src),__len); \
43 __ret; })
44 #endif
46 #define __HAVE_ARCH_MEMSET
47 void *memset(void *s, int c, size_t n);
49 #define __HAVE_ARCH_MEMMOVE
50 void * memmove(void * dest,const void *src,size_t count);
52 int memcmp(const void * cs,const void * ct,size_t count);
53 size_t strlen(const char * s);
54 char *strcpy(char * dest,const char *src);
55 char *strcat(char * dest, const char * src);
56 int strcmp(const char * cs,const char * ct);
58 #endif /* __KERNEL__ */
60 #endif