Andrey Ryabinin | ef7f0d6 | 2015-02-13 14:39:25 -0800 | [diff] [blame] | 1 | #ifndef _ASM_X86_KASAN_H |
| 2 | #define _ASM_X86_KASAN_H |
| 3 | |
Andrey Ryabinin | 920e277 | 2015-08-13 08:37:23 +0300 | [diff] [blame] | 4 | #include <linux/const.h> |
| 5 | #define KASAN_SHADOW_OFFSET _AC(CONFIG_KASAN_SHADOW_OFFSET, UL) |
| 6 | |
Andrey Ryabinin | ef7f0d6 | 2015-02-13 14:39:25 -0800 | [diff] [blame] | 7 | /* |
| 8 | * Compiler uses shadow offset assuming that addresses start |
| 9 | * from 0. Kernel addresses don't start from 0, so shadow |
| 10 | * for kernel really starts from compiler's shadow offset + |
| 11 | * 'kernel address space start' >> KASAN_SHADOW_SCALE_SHIFT |
| 12 | */ |
| 13 | #define KASAN_SHADOW_START (KASAN_SHADOW_OFFSET + \ |
| 14 | (0xffff800000000000ULL >> 3)) |
| 15 | /* 47 bits for kernel address -> (47 - 3) bits for shadow */ |
| 16 | #define KASAN_SHADOW_END (KASAN_SHADOW_START + (1ULL << (47 - 3))) |
| 17 | |
| 18 | #ifndef __ASSEMBLY__ |
| 19 | |
Andrey Ryabinin | ef7f0d6 | 2015-02-13 14:39:25 -0800 | [diff] [blame] | 20 | #ifdef CONFIG_KASAN |
Alexander Popov | 5d5aa3c | 2015-07-02 12:09:34 +0300 | [diff] [blame] | 21 | void __init kasan_early_init(void); |
Andrey Ryabinin | ef7f0d6 | 2015-02-13 14:39:25 -0800 | [diff] [blame] | 22 | void __init kasan_init(void); |
| 23 | #else |
Alexander Popov | 5d5aa3c | 2015-07-02 12:09:34 +0300 | [diff] [blame] | 24 | static inline void kasan_early_init(void) { } |
Andrey Ryabinin | ef7f0d6 | 2015-02-13 14:39:25 -0800 | [diff] [blame] | 25 | static inline void kasan_init(void) { } |
| 26 | #endif |
| 27 | |
| 28 | #endif |
| 29 | |
| 30 | #endif |