Andrey Ryabinin | 0b24bec | 2015-02-13 14:39:17 -0800 | [diff] [blame] | 1 | #ifndef _LINUX_KASAN_H |
| 2 | #define _LINUX_KASAN_H |
| 3 | |
Mark Rutland | e3ae116 | 2016-03-09 14:08:15 -0800 | [diff] [blame] | 4 | #include <linux/sched.h> |
Andrey Ryabinin | 0b24bec | 2015-02-13 14:39:17 -0800 | [diff] [blame] | 5 | #include <linux/types.h> |
| 6 | |
| 7 | struct kmem_cache; |
| 8 | struct page; |
Andrey Ryabinin | a5af5aa | 2015-03-12 16:26:11 -0700 | [diff] [blame] | 9 | struct vm_struct; |
Andrey Ryabinin | 0b24bec | 2015-02-13 14:39:17 -0800 | [diff] [blame] | 10 | |
| 11 | #ifdef CONFIG_KASAN |
| 12 | |
| 13 | #define KASAN_SHADOW_SCALE_SHIFT 3 |
Andrey Ryabinin | 0b24bec | 2015-02-13 14:39:17 -0800 | [diff] [blame] | 14 | |
| 15 | #include <asm/kasan.h> |
Andrey Ryabinin | 69786cdb | 2015-08-13 08:37:24 +0300 | [diff] [blame] | 16 | #include <asm/pgtable.h> |
Andrey Ryabinin | 0b24bec | 2015-02-13 14:39:17 -0800 | [diff] [blame] | 17 | |
Andrey Ryabinin | 69786cdb | 2015-08-13 08:37:24 +0300 | [diff] [blame] | 18 | extern unsigned char kasan_zero_page[PAGE_SIZE]; |
| 19 | extern pte_t kasan_zero_pte[PTRS_PER_PTE]; |
| 20 | extern pmd_t kasan_zero_pmd[PTRS_PER_PMD]; |
| 21 | extern pud_t kasan_zero_pud[PTRS_PER_PUD]; |
| 22 | |
| 23 | void kasan_populate_zero_shadow(const void *shadow_start, |
| 24 | const void *shadow_end); |
| 25 | |
Andrey Ryabinin | 0b24bec | 2015-02-13 14:39:17 -0800 | [diff] [blame] | 26 | static inline void *kasan_mem_to_shadow(const void *addr) |
| 27 | { |
| 28 | return (void *)((unsigned long)addr >> KASAN_SHADOW_SCALE_SHIFT) |
| 29 | + KASAN_SHADOW_OFFSET; |
| 30 | } |
| 31 | |
| 32 | /* Enable reporting bugs after kasan_disable_current() */ |
| 33 | static inline void kasan_enable_current(void) |
| 34 | { |
| 35 | current->kasan_depth++; |
| 36 | } |
| 37 | |
| 38 | /* Disable reporting bugs for current task */ |
| 39 | static inline void kasan_disable_current(void) |
| 40 | { |
| 41 | current->kasan_depth--; |
| 42 | } |
| 43 | |
| 44 | void kasan_unpoison_shadow(const void *address, size_t size); |
| 45 | |
Mark Rutland | e3ae116 | 2016-03-09 14:08:15 -0800 | [diff] [blame] | 46 | void kasan_unpoison_task_stack(struct task_struct *task); |
Dmitry Vyukov | 9f7d416 | 2016-10-14 16:07:23 +0200 | [diff] [blame] | 47 | void kasan_unpoison_stack_above_sp_to(const void *watermark); |
Mark Rutland | e3ae116 | 2016-03-09 14:08:15 -0800 | [diff] [blame] | 48 | |
Andrey Ryabinin | b8c73fc | 2015-02-13 14:39:28 -0800 | [diff] [blame] | 49 | void kasan_alloc_pages(struct page *page, unsigned int order); |
| 50 | void kasan_free_pages(struct page *page, unsigned int order); |
| 51 | |
Alexander Potapenko | 7ed2f9e | 2016-03-25 14:21:59 -0700 | [diff] [blame] | 52 | void kasan_cache_create(struct kmem_cache *cache, size_t *size, |
| 53 | unsigned long *flags); |
Alexander Potapenko | 55834c5 | 2016-05-20 16:59:11 -0700 | [diff] [blame] | 54 | void kasan_cache_shrink(struct kmem_cache *cache); |
| 55 | void kasan_cache_destroy(struct kmem_cache *cache); |
Alexander Potapenko | 7ed2f9e | 2016-03-25 14:21:59 -0700 | [diff] [blame] | 56 | |
Andrey Ryabinin | 0316bec | 2015-02-13 14:39:42 -0800 | [diff] [blame] | 57 | void kasan_poison_slab(struct page *page); |
| 58 | void kasan_unpoison_object_data(struct kmem_cache *cache, void *object); |
| 59 | void kasan_poison_object_data(struct kmem_cache *cache, void *object); |
Andrey Ryabinin | b3cbd9b | 2016-08-02 14:02:52 -0700 | [diff] [blame] | 60 | void kasan_init_slab_obj(struct kmem_cache *cache, const void *object); |
Andrey Ryabinin | 0316bec | 2015-02-13 14:39:42 -0800 | [diff] [blame] | 61 | |
Alexander Potapenko | 505f5dc | 2016-03-25 14:22:02 -0700 | [diff] [blame] | 62 | void kasan_kmalloc_large(const void *ptr, size_t size, gfp_t flags); |
Andrey Ryabinin | 0316bec | 2015-02-13 14:39:42 -0800 | [diff] [blame] | 63 | void kasan_kfree_large(const void *ptr); |
Andrey Ryabinin | 9b75a86 | 2016-06-24 14:49:34 -0700 | [diff] [blame] | 64 | void kasan_poison_kfree(void *ptr); |
Alexander Potapenko | 505f5dc | 2016-03-25 14:22:02 -0700 | [diff] [blame] | 65 | void kasan_kmalloc(struct kmem_cache *s, const void *object, size_t size, |
| 66 | gfp_t flags); |
| 67 | void kasan_krealloc(const void *object, size_t new_size, gfp_t flags); |
Andrey Ryabinin | 0316bec | 2015-02-13 14:39:42 -0800 | [diff] [blame] | 68 | |
Alexander Potapenko | 505f5dc | 2016-03-25 14:22:02 -0700 | [diff] [blame] | 69 | void kasan_slab_alloc(struct kmem_cache *s, void *object, gfp_t flags); |
Alexander Potapenko | 55834c5 | 2016-05-20 16:59:11 -0700 | [diff] [blame] | 70 | bool kasan_slab_free(struct kmem_cache *s, void *object); |
Andrey Ryabinin | 0316bec | 2015-02-13 14:39:42 -0800 | [diff] [blame] | 71 | |
Alexander Potapenko | 7ed2f9e | 2016-03-25 14:21:59 -0700 | [diff] [blame] | 72 | struct kasan_cache { |
| 73 | int alloc_meta_offset; |
| 74 | int free_meta_offset; |
| 75 | }; |
| 76 | |
Andrey Ryabinin | bebf56a | 2015-02-13 14:40:17 -0800 | [diff] [blame] | 77 | int kasan_module_alloc(void *addr, size_t size); |
Andrey Ryabinin | a5af5aa | 2015-03-12 16:26:11 -0700 | [diff] [blame] | 78 | void kasan_free_shadow(const struct vm_struct *vm); |
Andrey Ryabinin | bebf56a | 2015-02-13 14:40:17 -0800 | [diff] [blame] | 79 | |
Andrey Ryabinin | 9b75a86 | 2016-06-24 14:49:34 -0700 | [diff] [blame] | 80 | size_t ksize(const void *); |
| 81 | static inline void kasan_unpoison_slab(const void *ptr) { ksize(ptr); } |
Alexander Potapenko | 80a9201 | 2016-07-28 15:49:07 -0700 | [diff] [blame] | 82 | size_t kasan_metadata_size(struct kmem_cache *cache); |
Andrey Ryabinin | 9b75a86 | 2016-06-24 14:49:34 -0700 | [diff] [blame] | 83 | |
Andrey Ryabinin | 0b24bec | 2015-02-13 14:39:17 -0800 | [diff] [blame] | 84 | #else /* CONFIG_KASAN */ |
| 85 | |
| 86 | static inline void kasan_unpoison_shadow(const void *address, size_t size) {} |
| 87 | |
Mark Rutland | e3ae116 | 2016-03-09 14:08:15 -0800 | [diff] [blame] | 88 | static inline void kasan_unpoison_task_stack(struct task_struct *task) {} |
Dmitry Vyukov | 9f7d416 | 2016-10-14 16:07:23 +0200 | [diff] [blame] | 89 | static inline void kasan_unpoison_stack_above_sp_to(const void *watermark) {} |
Mark Rutland | e3ae116 | 2016-03-09 14:08:15 -0800 | [diff] [blame] | 90 | |
Andrey Ryabinin | 0b24bec | 2015-02-13 14:39:17 -0800 | [diff] [blame] | 91 | static inline void kasan_enable_current(void) {} |
| 92 | static inline void kasan_disable_current(void) {} |
| 93 | |
Andrey Ryabinin | b8c73fc | 2015-02-13 14:39:28 -0800 | [diff] [blame] | 94 | static inline void kasan_alloc_pages(struct page *page, unsigned int order) {} |
| 95 | static inline void kasan_free_pages(struct page *page, unsigned int order) {} |
| 96 | |
Alexander Potapenko | 7ed2f9e | 2016-03-25 14:21:59 -0700 | [diff] [blame] | 97 | static inline void kasan_cache_create(struct kmem_cache *cache, |
| 98 | size_t *size, |
| 99 | unsigned long *flags) {} |
Alexander Potapenko | 55834c5 | 2016-05-20 16:59:11 -0700 | [diff] [blame] | 100 | static inline void kasan_cache_shrink(struct kmem_cache *cache) {} |
| 101 | static inline void kasan_cache_destroy(struct kmem_cache *cache) {} |
Alexander Potapenko | 7ed2f9e | 2016-03-25 14:21:59 -0700 | [diff] [blame] | 102 | |
Andrey Ryabinin | 0316bec | 2015-02-13 14:39:42 -0800 | [diff] [blame] | 103 | static inline void kasan_poison_slab(struct page *page) {} |
| 104 | static inline void kasan_unpoison_object_data(struct kmem_cache *cache, |
| 105 | void *object) {} |
| 106 | static inline void kasan_poison_object_data(struct kmem_cache *cache, |
| 107 | void *object) {} |
Andrey Ryabinin | b3cbd9b | 2016-08-02 14:02:52 -0700 | [diff] [blame] | 108 | static inline void kasan_init_slab_obj(struct kmem_cache *cache, |
| 109 | const void *object) {} |
Andrey Ryabinin | 0316bec | 2015-02-13 14:39:42 -0800 | [diff] [blame] | 110 | |
Alexander Potapenko | 505f5dc | 2016-03-25 14:22:02 -0700 | [diff] [blame] | 111 | static inline void kasan_kmalloc_large(void *ptr, size_t size, gfp_t flags) {} |
Andrey Ryabinin | 0316bec | 2015-02-13 14:39:42 -0800 | [diff] [blame] | 112 | static inline void kasan_kfree_large(const void *ptr) {} |
Andrey Ryabinin | 9b75a86 | 2016-06-24 14:49:34 -0700 | [diff] [blame] | 113 | static inline void kasan_poison_kfree(void *ptr) {} |
Andrey Ryabinin | 0316bec | 2015-02-13 14:39:42 -0800 | [diff] [blame] | 114 | static inline void kasan_kmalloc(struct kmem_cache *s, const void *object, |
Alexander Potapenko | 505f5dc | 2016-03-25 14:22:02 -0700 | [diff] [blame] | 115 | size_t size, gfp_t flags) {} |
| 116 | static inline void kasan_krealloc(const void *object, size_t new_size, |
| 117 | gfp_t flags) {} |
Andrey Ryabinin | 0316bec | 2015-02-13 14:39:42 -0800 | [diff] [blame] | 118 | |
Alexander Potapenko | 505f5dc | 2016-03-25 14:22:02 -0700 | [diff] [blame] | 119 | static inline void kasan_slab_alloc(struct kmem_cache *s, void *object, |
| 120 | gfp_t flags) {} |
Alexander Potapenko | 55834c5 | 2016-05-20 16:59:11 -0700 | [diff] [blame] | 121 | static inline bool kasan_slab_free(struct kmem_cache *s, void *object) |
| 122 | { |
| 123 | return false; |
| 124 | } |
Andrey Ryabinin | 0316bec | 2015-02-13 14:39:42 -0800 | [diff] [blame] | 125 | |
Andrey Ryabinin | bebf56a | 2015-02-13 14:40:17 -0800 | [diff] [blame] | 126 | static inline int kasan_module_alloc(void *addr, size_t size) { return 0; } |
Andrey Ryabinin | a5af5aa | 2015-03-12 16:26:11 -0700 | [diff] [blame] | 127 | static inline void kasan_free_shadow(const struct vm_struct *vm) {} |
Andrey Ryabinin | bebf56a | 2015-02-13 14:40:17 -0800 | [diff] [blame] | 128 | |
Andrey Ryabinin | 9b75a86 | 2016-06-24 14:49:34 -0700 | [diff] [blame] | 129 | static inline void kasan_unpoison_slab(const void *ptr) { } |
Alexander Potapenko | 80a9201 | 2016-07-28 15:49:07 -0700 | [diff] [blame] | 130 | static inline size_t kasan_metadata_size(struct kmem_cache *cache) { return 0; } |
Andrey Ryabinin | 9b75a86 | 2016-06-24 14:49:34 -0700 | [diff] [blame] | 131 | |
Andrey Ryabinin | 0b24bec | 2015-02-13 14:39:17 -0800 | [diff] [blame] | 132 | #endif /* CONFIG_KASAN */ |
| 133 | |
| 134 | #endif /* LINUX_KASAN_H */ |