H. Peter Anvin | 1965aae | 2008-10-22 22:26:29 -0700 | [diff] [blame] | 1 | #ifndef _ASM_X86_STRING_64_H |
| 2 | #define _ASM_X86_STRING_64_H |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 3 | |
| 4 | #ifdef __KERNEL__ |
Tony Luck | 3637efb | 2016-09-01 11:39:33 -0700 | [diff] [blame^] | 5 | #include <linux/jump_label.h> |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 6 | |
Joe Perches | 953b2f1 | 2008-03-23 01:03:34 -0700 | [diff] [blame] | 7 | /* Written 2002 by Andi Kleen */ |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 8 | |
Joe Perches | 953b2f1 | 2008-03-23 01:03:34 -0700 | [diff] [blame] | 9 | /* Only used for special circumstances. Stolen from i386/string.h */ |
| 10 | static __always_inline void *__inline_memcpy(void *to, const void *from, size_t n) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 11 | { |
Joe Perches | 953b2f1 | 2008-03-23 01:03:34 -0700 | [diff] [blame] | 12 | unsigned long d0, d1, d2; |
| 13 | asm volatile("rep ; movsl\n\t" |
| 14 | "testb $2,%b4\n\t" |
| 15 | "je 1f\n\t" |
| 16 | "movsw\n" |
| 17 | "1:\ttestb $1,%b4\n\t" |
| 18 | "je 2f\n\t" |
| 19 | "movsb\n" |
| 20 | "2:" |
| 21 | : "=&c" (d0), "=&D" (d1), "=&S" (d2) |
| 22 | : "0" (n / 4), "q" (n), "1" ((long)to), "2" ((long)from) |
| 23 | : "memory"); |
| 24 | return to; |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 25 | } |
| 26 | |
| 27 | /* Even with __builtin_ the compiler may decide to use the out of line |
| 28 | function. */ |
| 29 | |
| 30 | #define __HAVE_ARCH_MEMCPY 1 |
Andrey Ryabinin | a75ca54 | 2015-10-16 14:28:53 +0300 | [diff] [blame] | 31 | extern void *memcpy(void *to, const void *from, size_t len); |
Andrey Ryabinin | 393f203 | 2015-02-13 14:39:56 -0800 | [diff] [blame] | 32 | extern void *__memcpy(void *to, const void *from, size_t len); |
| 33 | |
Vegard Nossum | f856129 | 2008-04-04 00:53:23 +0200 | [diff] [blame] | 34 | #ifndef CONFIG_KMEMCHECK |
Andrey Ryabinin | a75ca54 | 2015-10-16 14:28:53 +0300 | [diff] [blame] | 35 | #if (__GNUC__ == 4 && __GNUC_MINOR__ < 3) || __GNUC__ < 4 |
Joe Perches | 953b2f1 | 2008-03-23 01:03:34 -0700 | [diff] [blame] | 36 | #define memcpy(dst, src, len) \ |
| 37 | ({ \ |
| 38 | size_t __len = (len); \ |
| 39 | void *__ret; \ |
| 40 | if (__builtin_constant_p(len) && __len >= 64) \ |
| 41 | __ret = __memcpy((dst), (src), __len); \ |
| 42 | else \ |
| 43 | __ret = __builtin_memcpy((dst), (src), __len); \ |
| 44 | __ret; \ |
| 45 | }) |
Andi Kleen | aac57f8 | 2007-07-21 17:09:58 +0200 | [diff] [blame] | 46 | #endif |
Vegard Nossum | f856129 | 2008-04-04 00:53:23 +0200 | [diff] [blame] | 47 | #else |
| 48 | /* |
| 49 | * kmemcheck becomes very happy if we use the REP instructions unconditionally, |
| 50 | * because it means that we know both memory operands in advance. |
| 51 | */ |
| 52 | #define memcpy(dst, src, len) __inline_memcpy((dst), (src), (len)) |
| 53 | #endif |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 54 | |
| 55 | #define __HAVE_ARCH_MEMSET |
Andi Kleen | 6edfba1 | 2006-03-25 16:29:49 +0100 | [diff] [blame] | 56 | void *memset(void *s, int c, size_t n); |
Andrey Ryabinin | 393f203 | 2015-02-13 14:39:56 -0800 | [diff] [blame] | 57 | void *__memset(void *s, int c, size_t n); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 58 | |
| 59 | #define __HAVE_ARCH_MEMMOVE |
Joe Perches | 953b2f1 | 2008-03-23 01:03:34 -0700 | [diff] [blame] | 60 | void *memmove(void *dest, const void *src, size_t count); |
Andrey Ryabinin | 393f203 | 2015-02-13 14:39:56 -0800 | [diff] [blame] | 61 | void *__memmove(void *dest, const void *src, size_t count); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 62 | |
Joe Perches | 953b2f1 | 2008-03-23 01:03:34 -0700 | [diff] [blame] | 63 | int memcmp(const void *cs, const void *ct, size_t count); |
| 64 | size_t strlen(const char *s); |
| 65 | char *strcpy(char *dest, const char *src); |
| 66 | char *strcat(char *dest, const char *src); |
| 67 | int strcmp(const char *cs, const char *ct); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 68 | |
Andrey Ryabinin | 393f203 | 2015-02-13 14:39:56 -0800 | [diff] [blame] | 69 | #if defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__) |
| 70 | |
| 71 | /* |
| 72 | * For files that not instrumented (e.g. mm/slub.c) we |
| 73 | * should use not instrumented version of mem* functions. |
| 74 | */ |
| 75 | |
| 76 | #undef memcpy |
| 77 | #define memcpy(dst, src, len) __memcpy(dst, src, len) |
| 78 | #define memmove(dst, src, len) __memmove(dst, src, len) |
| 79 | #define memset(s, c, n) __memset(s, c, n) |
| 80 | #endif |
| 81 | |
Tony Luck | 3637efb | 2016-09-01 11:39:33 -0700 | [diff] [blame^] | 82 | DECLARE_STATIC_KEY_FALSE(mcsafe_key); |
| 83 | |
Tony Luck | 92b0729 | 2016-02-18 11:47:26 -0800 | [diff] [blame] | 84 | /** |
| 85 | * memcpy_mcsafe - copy memory with indication if a machine check happened |
| 86 | * |
| 87 | * @dst: destination address |
| 88 | * @src: source address |
| 89 | * @cnt: number of bytes to copy |
| 90 | * |
| 91 | * Low level memory copy function that catches machine checks |
| 92 | * |
Tony Luck | cbf8b5a | 2016-03-14 15:33:39 -0700 | [diff] [blame] | 93 | * Return 0 for success, -EFAULT for fail |
Tony Luck | 92b0729 | 2016-02-18 11:47:26 -0800 | [diff] [blame] | 94 | */ |
Tony Luck | cbf8b5a | 2016-03-14 15:33:39 -0700 | [diff] [blame] | 95 | int memcpy_mcsafe(void *dst, const void *src, size_t cnt); |
Tony Luck | 92b0729 | 2016-02-18 11:47:26 -0800 | [diff] [blame] | 96 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 97 | #endif /* __KERNEL__ */ |
| 98 | |
H. Peter Anvin | 1965aae | 2008-10-22 22:26:29 -0700 | [diff] [blame] | 99 | #endif /* _ASM_X86_STRING_64_H */ |