H. Peter Anvin | 1965aae | 2008-10-22 22:26:29 -0700 | [diff] [blame] | 1 | #ifndef _ASM_X86_STRING_64_H |
| 2 | #define _ASM_X86_STRING_64_H |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 3 | |
| 4 | #ifdef __KERNEL__ |
| 5 | |
Joe Perches | 953b2f1 | 2008-03-23 01:03:34 -0700 | [diff] [blame] | 6 | /* Written 2002 by Andi Kleen */ |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 7 | |
Joe Perches | 953b2f1 | 2008-03-23 01:03:34 -0700 | [diff] [blame] | 8 | /* Only used for special circumstances. Stolen from i386/string.h */ |
| 9 | static __always_inline void *__inline_memcpy(void *to, const void *from, size_t n) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 10 | { |
Joe Perches | 953b2f1 | 2008-03-23 01:03:34 -0700 | [diff] [blame] | 11 | unsigned long d0, d1, d2; |
| 12 | asm volatile("rep ; movsl\n\t" |
| 13 | "testb $2,%b4\n\t" |
| 14 | "je 1f\n\t" |
| 15 | "movsw\n" |
| 16 | "1:\ttestb $1,%b4\n\t" |
| 17 | "je 2f\n\t" |
| 18 | "movsb\n" |
| 19 | "2:" |
| 20 | : "=&c" (d0), "=&D" (d1), "=&S" (d2) |
| 21 | : "0" (n / 4), "q" (n), "1" ((long)to), "2" ((long)from) |
| 22 | : "memory"); |
| 23 | return to; |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 24 | } |
| 25 | |
| 26 | /* Even with __builtin_ the compiler may decide to use the out of line |
| 27 | function. */ |
| 28 | |
| 29 | #define __HAVE_ARCH_MEMCPY 1 |
Andrey Ryabinin | a75ca54 | 2015-10-16 14:28:53 +0300 | [diff] [blame] | 30 | extern void *memcpy(void *to, const void *from, size_t len); |
Andrey Ryabinin | 393f203 | 2015-02-13 14:39:56 -0800 | [diff] [blame] | 31 | extern void *__memcpy(void *to, const void *from, size_t len); |
| 32 | |
Vegard Nossum | f856129 | 2008-04-04 00:53:23 +0200 | [diff] [blame] | 33 | #ifndef CONFIG_KMEMCHECK |
Andrey Ryabinin | a75ca54 | 2015-10-16 14:28:53 +0300 | [diff] [blame] | 34 | #if (__GNUC__ == 4 && __GNUC_MINOR__ < 3) || __GNUC__ < 4 |
Joe Perches | 953b2f1 | 2008-03-23 01:03:34 -0700 | [diff] [blame] | 35 | #define memcpy(dst, src, len) \ |
| 36 | ({ \ |
| 37 | size_t __len = (len); \ |
| 38 | void *__ret; \ |
| 39 | if (__builtin_constant_p(len) && __len >= 64) \ |
| 40 | __ret = __memcpy((dst), (src), __len); \ |
| 41 | else \ |
| 42 | __ret = __builtin_memcpy((dst), (src), __len); \ |
| 43 | __ret; \ |
| 44 | }) |
Andi Kleen | aac57f8 | 2007-07-21 17:09:58 +0200 | [diff] [blame] | 45 | #endif |
Vegard Nossum | f856129 | 2008-04-04 00:53:23 +0200 | [diff] [blame] | 46 | #else |
| 47 | /* |
| 48 | * kmemcheck becomes very happy if we use the REP instructions unconditionally, |
| 49 | * because it means that we know both memory operands in advance. |
| 50 | */ |
| 51 | #define memcpy(dst, src, len) __inline_memcpy((dst), (src), (len)) |
| 52 | #endif |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 53 | |
| 54 | #define __HAVE_ARCH_MEMSET |
Andi Kleen | 6edfba1 | 2006-03-25 16:29:49 +0100 | [diff] [blame] | 55 | void *memset(void *s, int c, size_t n); |
Andrey Ryabinin | 393f203 | 2015-02-13 14:39:56 -0800 | [diff] [blame] | 56 | void *__memset(void *s, int c, size_t n); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 57 | |
| 58 | #define __HAVE_ARCH_MEMMOVE |
Joe Perches | 953b2f1 | 2008-03-23 01:03:34 -0700 | [diff] [blame] | 59 | void *memmove(void *dest, const void *src, size_t count); |
Andrey Ryabinin | 393f203 | 2015-02-13 14:39:56 -0800 | [diff] [blame] | 60 | void *__memmove(void *dest, const void *src, size_t count); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 61 | |
Joe Perches | 953b2f1 | 2008-03-23 01:03:34 -0700 | [diff] [blame] | 62 | int memcmp(const void *cs, const void *ct, size_t count); |
| 63 | size_t strlen(const char *s); |
| 64 | char *strcpy(char *dest, const char *src); |
| 65 | char *strcat(char *dest, const char *src); |
| 66 | int strcmp(const char *cs, const char *ct); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 67 | |
Andrey Ryabinin | 393f203 | 2015-02-13 14:39:56 -0800 | [diff] [blame] | 68 | #if defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__) |
| 69 | |
| 70 | /* |
| 71 | * For files that not instrumented (e.g. mm/slub.c) we |
| 72 | * should use not instrumented version of mem* functions. |
| 73 | */ |
| 74 | |
| 75 | #undef memcpy |
| 76 | #define memcpy(dst, src, len) __memcpy(dst, src, len) |
| 77 | #define memmove(dst, src, len) __memmove(dst, src, len) |
| 78 | #define memset(s, c, n) __memset(s, c, n) |
| 79 | #endif |
| 80 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 81 | #endif /* __KERNEL__ */ |
| 82 | |
H. Peter Anvin | 1965aae | 2008-10-22 22:26:29 -0700 | [diff] [blame] | 83 | #endif /* _ASM_X86_STRING_64_H */ |