Andrey Ryabinin | ef7f0d6 | 2015-02-13 14:39:25 -0800 | [diff] [blame] | 1 | #ifndef _ASM_X86_KASAN_H |
| 2 | #define _ASM_X86_KASAN_H |
| 3 | |
Andrey Ryabinin | 920e277 | 2015-08-13 08:37:23 +0300 | [diff] [blame] | 4 | #include <linux/const.h> |
| 5 | #define KASAN_SHADOW_OFFSET _AC(CONFIG_KASAN_SHADOW_OFFSET, UL) |
| 6 | |
Andrey Ryabinin | ef7f0d6 | 2015-02-13 14:39:25 -0800 | [diff] [blame] | 7 | /* |
| 8 | * Compiler uses shadow offset assuming that addresses start |
| 9 | * from 0. Kernel addresses don't start from 0, so shadow |
| 10 | * for kernel really starts from compiler's shadow offset + |
| 11 | * 'kernel address space start' >> KASAN_SHADOW_SCALE_SHIFT |
| 12 | */ |
| 13 | #define KASAN_SHADOW_START (KASAN_SHADOW_OFFSET + \ |
Kirill A. Shutemov | 4c7c448 | 2017-03-30 11:07:27 +0300 | [diff] [blame] | 14 | ((-1UL << __VIRTUAL_MASK_SHIFT) >> 3)) |
| 15 | /* |
| 16 | * 47 bits for kernel address -> (47 - 3) bits for shadow |
| 17 | * 56 bits for kernel address -> (56 - 3) bits for shadow |
| 18 | */ |
| 19 | #define KASAN_SHADOW_END (KASAN_SHADOW_START + (1ULL << (__VIRTUAL_MASK_SHIFT - 3))) |
Andrey Ryabinin | ef7f0d6 | 2015-02-13 14:39:25 -0800 | [diff] [blame] | 20 | |
| 21 | #ifndef __ASSEMBLY__ |
| 22 | |
Andrey Ryabinin | ef7f0d6 | 2015-02-13 14:39:25 -0800 | [diff] [blame] | 23 | #ifdef CONFIG_KASAN |
Alexander Popov | 5d5aa3c | 2015-07-02 12:09:34 +0300 | [diff] [blame] | 24 | void __init kasan_early_init(void); |
Andrey Ryabinin | ef7f0d6 | 2015-02-13 14:39:25 -0800 | [diff] [blame] | 25 | void __init kasan_init(void); |
| 26 | #else |
Alexander Popov | 5d5aa3c | 2015-07-02 12:09:34 +0300 | [diff] [blame] | 27 | static inline void kasan_early_init(void) { } |
Andrey Ryabinin | ef7f0d6 | 2015-02-13 14:39:25 -0800 | [diff] [blame] | 28 | static inline void kasan_init(void) { } |
| 29 | #endif |
| 30 | |
| 31 | #endif |
| 32 | |
| 33 | #endif |