H. Peter Anvin | 1965aae | 2008-10-22 22:26:29 -0700 | [diff] [blame] | 1 | #ifndef _ASM_X86_STRING_64_H |
| 2 | #define _ASM_X86_STRING_64_H |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 3 | |
| 4 | #ifdef __KERNEL__ |
| 5 | |
Joe Perches | 953b2f1 | 2008-03-23 01:03:34 -0700 | [diff] [blame] | 6 | /* Written 2002 by Andi Kleen */ |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 7 | |
Joe Perches | 953b2f1 | 2008-03-23 01:03:34 -0700 | [diff] [blame] | 8 | /* Only used for special circumstances. Stolen from i386/string.h */ |
| 9 | static __always_inline void *__inline_memcpy(void *to, const void *from, size_t n) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 10 | { |
Joe Perches | 953b2f1 | 2008-03-23 01:03:34 -0700 | [diff] [blame] | 11 | unsigned long d0, d1, d2; |
| 12 | asm volatile("rep ; movsl\n\t" |
| 13 | "testb $2,%b4\n\t" |
| 14 | "je 1f\n\t" |
| 15 | "movsw\n" |
| 16 | "1:\ttestb $1,%b4\n\t" |
| 17 | "je 2f\n\t" |
| 18 | "movsb\n" |
| 19 | "2:" |
| 20 | : "=&c" (d0), "=&D" (d1), "=&S" (d2) |
| 21 | : "0" (n / 4), "q" (n), "1" ((long)to), "2" ((long)from) |
| 22 | : "memory"); |
| 23 | return to; |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 24 | } |
| 25 | |
| 26 | /* Even with __builtin_ the compiler may decide to use the out of line |
| 27 | function. */ |
| 28 | |
| 29 | #define __HAVE_ARCH_MEMCPY 1 |
Vegard Nossum | f856129 | 2008-04-04 00:53:23 +0200 | [diff] [blame] | 30 | #ifndef CONFIG_KMEMCHECK |
Andi Kleen | aac57f8 | 2007-07-21 17:09:58 +0200 | [diff] [blame] | 31 | #if (__GNUC__ == 4 && __GNUC_MINOR__ >= 3) || __GNUC__ > 4 |
| 32 | extern void *memcpy(void *to, const void *from, size_t len); |
| 33 | #else |
Joe Perches | 953b2f1 | 2008-03-23 01:03:34 -0700 | [diff] [blame] | 34 | extern void *__memcpy(void *to, const void *from, size_t len); |
| 35 | #define memcpy(dst, src, len) \ |
| 36 | ({ \ |
| 37 | size_t __len = (len); \ |
| 38 | void *__ret; \ |
| 39 | if (__builtin_constant_p(len) && __len >= 64) \ |
| 40 | __ret = __memcpy((dst), (src), __len); \ |
| 41 | else \ |
| 42 | __ret = __builtin_memcpy((dst), (src), __len); \ |
| 43 | __ret; \ |
| 44 | }) |
Andi Kleen | aac57f8 | 2007-07-21 17:09:58 +0200 | [diff] [blame] | 45 | #endif |
Vegard Nossum | f856129 | 2008-04-04 00:53:23 +0200 | [diff] [blame] | 46 | #else |
| 47 | /* |
| 48 | * kmemcheck becomes very happy if we use the REP instructions unconditionally, |
| 49 | * because it means that we know both memory operands in advance. |
| 50 | */ |
| 51 | #define memcpy(dst, src, len) __inline_memcpy((dst), (src), (len)) |
| 52 | #endif |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 53 | |
| 54 | #define __HAVE_ARCH_MEMSET |
Andi Kleen | 6edfba1 | 2006-03-25 16:29:49 +0100 | [diff] [blame] | 55 | void *memset(void *s, int c, size_t n); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 56 | |
| 57 | #define __HAVE_ARCH_MEMMOVE |
Joe Perches | 953b2f1 | 2008-03-23 01:03:34 -0700 | [diff] [blame] | 58 | void *memmove(void *dest, const void *src, size_t count); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 59 | |
Joe Perches | 953b2f1 | 2008-03-23 01:03:34 -0700 | [diff] [blame] | 60 | int memcmp(const void *cs, const void *ct, size_t count); |
| 61 | size_t strlen(const char *s); |
| 62 | char *strcpy(char *dest, const char *src); |
| 63 | char *strcat(char *dest, const char *src); |
| 64 | int strcmp(const char *cs, const char *ct); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 65 | |
| 66 | #endif /* __KERNEL__ */ |
| 67 | |
H. Peter Anvin | 1965aae | 2008-10-22 22:26:29 -0700 | [diff] [blame] | 68 | #endif /* _ASM_X86_STRING_64_H */ |