Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 1 | /* |
| 2 | * This file is subject to the terms and conditions of the GNU General Public |
| 3 | * License. See the file "COPYING" in the main directory of this archive |
| 4 | * for more details. |
| 5 | * |
| 6 | * Copyright (C) 1994, 95, 96, 97, 98, 99, 2000, 01, 02, 03 by Ralf Baechle |
| 7 | * Copyright (C) 1999, 2000, 2001 Silicon Graphics, Inc. |
| 8 | */ |
| 9 | #ifndef _ASM_CACHEFLUSH_H |
| 10 | #define _ASM_CACHEFLUSH_H |
| 11 | |
| 12 | /* Keep includes the same across arches. */ |
| 13 | #include <linux/mm.h> |
| 14 | #include <asm/cpu-features.h> |
| 15 | |
| 16 | /* Cache flushing: |
| 17 | * |
| 18 | * - flush_cache_all() flushes entire cache |
| 19 | * - flush_cache_mm(mm) flushes the specified mm context's cache lines |
Ralf Baechle | ec8c044 | 2006-12-12 17:14:57 +0000 | [diff] [blame] | 20 | * - flush_cache_dup mm(mm) handles cache flushing when forking |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 21 | * - flush_cache_page(mm, vmaddr, pfn) flushes a single page |
| 22 | * - flush_cache_range(vma, start, end) flushes a range of pages |
| 23 | * - flush_icache_range(start, end) flush a range of instructions |
| 24 | * - flush_dcache_page(pg) flushes(wback&invalidates) a page for dcache |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 25 | * |
| 26 | * MIPS specific flush operations: |
| 27 | * |
| 28 | * - flush_cache_sigtramp() flush signal trampoline |
| 29 | * - flush_icache_all() flush the entire instruction cache |
| 30 | * - flush_data_cache_page() flushes a page from the data cache |
| 31 | */ |
Lars Persson | 4d46a67 | 2015-02-26 14:16:03 +0100 | [diff] [blame^] | 32 | |
| 33 | /* |
| 34 | * This flag is used to indicate that the page pointed to by a pte |
| 35 | * is dirty and requires cleaning before returning it to the user. |
| 36 | */ |
| 37 | #define PG_dcache_dirty PG_arch_1 |
| 38 | |
| 39 | #define Page_dcache_dirty(page) \ |
| 40 | test_bit(PG_dcache_dirty, &(page)->flags) |
| 41 | #define SetPageDcacheDirty(page) \ |
| 42 | set_bit(PG_dcache_dirty, &(page)->flags) |
| 43 | #define ClearPageDcacheDirty(page) \ |
| 44 | clear_bit(PG_dcache_dirty, &(page)->flags) |
| 45 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 46 | extern void (*flush_cache_all)(void); |
| 47 | extern void (*__flush_cache_all)(void); |
| 48 | extern void (*flush_cache_mm)(struct mm_struct *mm); |
Ralf Baechle | ec8c044 | 2006-12-12 17:14:57 +0000 | [diff] [blame] | 49 | #define flush_cache_dup_mm(mm) do { (void) (mm); } while (0) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 50 | extern void (*flush_cache_range)(struct vm_area_struct *vma, |
| 51 | unsigned long start, unsigned long end); |
| 52 | extern void (*flush_cache_page)(struct vm_area_struct *vma, unsigned long page, unsigned long pfn); |
| 53 | extern void __flush_dcache_page(struct page *page); |
Lars Persson | 4d46a67 | 2015-02-26 14:16:03 +0100 | [diff] [blame^] | 54 | extern void __flush_icache_page(struct vm_area_struct *vma, struct page *page); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 55 | |
Ilya Loginov | 2d4dc89 | 2009-11-26 09:16:19 +0100 | [diff] [blame] | 56 | #define ARCH_IMPLEMENTS_FLUSH_DCACHE_PAGE 1 |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 57 | static inline void flush_dcache_page(struct page *page) |
| 58 | { |
Lars Persson | 4d46a67 | 2015-02-26 14:16:03 +0100 | [diff] [blame^] | 59 | if (cpu_has_dc_aliases) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 60 | __flush_dcache_page(page); |
Lars Persson | 4d46a67 | 2015-02-26 14:16:03 +0100 | [diff] [blame^] | 61 | else if (!cpu_has_ic_fills_f_dc) |
| 62 | SetPageDcacheDirty(page); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 63 | } |
| 64 | |
| 65 | #define flush_dcache_mmap_lock(mapping) do { } while (0) |
| 66 | #define flush_dcache_mmap_unlock(mapping) do { } while (0) |
| 67 | |
Ralf Baechle | 7575a49 | 2007-03-23 21:36:37 +0000 | [diff] [blame] | 68 | #define ARCH_HAS_FLUSH_ANON_PAGE |
| 69 | extern void __flush_anon_page(struct page *, unsigned long); |
| 70 | static inline void flush_anon_page(struct vm_area_struct *vma, |
| 71 | struct page *page, unsigned long vmaddr) |
| 72 | { |
| 73 | if (cpu_has_dc_aliases && PageAnon(page)) |
| 74 | __flush_anon_page(page, vmaddr); |
| 75 | } |
| 76 | |
Ralf Baechle | 585fa72 | 2006-08-12 16:40:08 +0100 | [diff] [blame] | 77 | static inline void flush_icache_page(struct vm_area_struct *vma, |
| 78 | struct page *page) |
| 79 | { |
Lars Persson | 4d46a67 | 2015-02-26 14:16:03 +0100 | [diff] [blame^] | 80 | if (!cpu_has_ic_fills_f_dc && (vma->vm_flags & VM_EXEC) && |
| 81 | Page_dcache_dirty(page)) { |
| 82 | __flush_icache_page(vma, page); |
| 83 | ClearPageDcacheDirty(page); |
| 84 | } |
Ralf Baechle | 585fa72 | 2006-08-12 16:40:08 +0100 | [diff] [blame] | 85 | } |
| 86 | |
Atsushi Nemoto | d4264f1 | 2006-01-29 02:27:51 +0900 | [diff] [blame] | 87 | extern void (*flush_icache_range)(unsigned long start, unsigned long end); |
Thomas Bogendoerfer | e0cee3e | 2008-08-04 20:53:57 +0200 | [diff] [blame] | 88 | extern void (*local_flush_icache_range)(unsigned long start, unsigned long end); |
Ralf Baechle | 9c5a3d7 | 2008-04-05 15:13:23 +0100 | [diff] [blame] | 89 | |
| 90 | extern void (*__flush_cache_vmap)(void); |
| 91 | |
| 92 | static inline void flush_cache_vmap(unsigned long start, unsigned long end) |
| 93 | { |
| 94 | if (cpu_has_dc_aliases) |
| 95 | __flush_cache_vmap(); |
| 96 | } |
| 97 | |
| 98 | extern void (*__flush_cache_vunmap)(void); |
| 99 | |
| 100 | static inline void flush_cache_vunmap(unsigned long start, unsigned long end) |
| 101 | { |
| 102 | if (cpu_has_dc_aliases) |
| 103 | __flush_cache_vunmap(); |
| 104 | } |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 105 | |
Ralf Baechle | f8829ca | 2006-10-21 23:17:35 +0100 | [diff] [blame] | 106 | extern void copy_to_user_page(struct vm_area_struct *vma, |
Ralf Baechle | 53de0d4 | 2005-03-18 17:36:42 +0000 | [diff] [blame] | 107 | struct page *page, unsigned long vaddr, void *dst, const void *src, |
Ralf Baechle | f8829ca | 2006-10-21 23:17:35 +0100 | [diff] [blame] | 108 | unsigned long len); |
Ralf Baechle | 53de0d4 | 2005-03-18 17:36:42 +0000 | [diff] [blame] | 109 | |
Ralf Baechle | f8829ca | 2006-10-21 23:17:35 +0100 | [diff] [blame] | 110 | extern void copy_from_user_page(struct vm_area_struct *vma, |
Ralf Baechle | 53de0d4 | 2005-03-18 17:36:42 +0000 | [diff] [blame] | 111 | struct page *page, unsigned long vaddr, void *dst, const void *src, |
Ralf Baechle | f8829ca | 2006-10-21 23:17:35 +0100 | [diff] [blame] | 112 | unsigned long len); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 113 | |
| 114 | extern void (*flush_cache_sigtramp)(unsigned long addr); |
| 115 | extern void (*flush_icache_all)(void); |
Ralf Baechle | 7e3bfc7 | 2006-04-05 20:42:04 +0100 | [diff] [blame] | 116 | extern void (*local_flush_data_cache_page)(void * addr); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 117 | extern void (*flush_data_cache_page)(unsigned long addr); |
| 118 | |
Thiemo Seufer | ba5187d | 2005-04-25 16:36:23 +0000 | [diff] [blame] | 119 | /* Run kernel code uncached, useful for cache probing functions. */ |
Ralf Baechle | 234fcd1 | 2008-03-08 09:56:28 +0000 | [diff] [blame] | 120 | unsigned long run_uncached(void *func); |
Thiemo Seufer | ba5187d | 2005-04-25 16:36:23 +0000 | [diff] [blame] | 121 | |
Ralf Baechle | 7575a49 | 2007-03-23 21:36:37 +0000 | [diff] [blame] | 122 | extern void *kmap_coherent(struct page *page, unsigned long addr); |
Ralf Baechle | eacb9d6 | 2007-04-26 15:46:25 +0100 | [diff] [blame] | 123 | extern void kunmap_coherent(void); |
Paul Burton | e2a9e5a | 2014-03-03 12:08:40 +0000 | [diff] [blame] | 124 | extern void *kmap_noncoherent(struct page *page, unsigned long addr); |
| 125 | |
| 126 | static inline void kunmap_noncoherent(void) |
| 127 | { |
| 128 | kunmap_coherent(); |
| 129 | } |
Ralf Baechle | 7575a49 | 2007-03-23 21:36:37 +0000 | [diff] [blame] | 130 | |
Ralf Baechle | d9cdc901 | 2011-06-17 16:20:28 +0100 | [diff] [blame] | 131 | #define ARCH_HAS_FLUSH_KERNEL_DCACHE_PAGE |
| 132 | static inline void flush_kernel_dcache_page(struct page *page) |
| 133 | { |
| 134 | BUG_ON(cpu_has_dc_aliases && PageHighMem(page)); |
| 135 | } |
| 136 | |
| 137 | /* |
| 138 | * For now flush_kernel_vmap_range and invalidate_kernel_vmap_range both do a |
| 139 | * cache writeback and invalidate operation. |
| 140 | */ |
| 141 | extern void (*__flush_kernel_vmap_range)(unsigned long vaddr, int size); |
| 142 | |
| 143 | static inline void flush_kernel_vmap_range(void *vaddr, int size) |
| 144 | { |
| 145 | if (cpu_has_dc_aliases) |
| 146 | __flush_kernel_vmap_range((unsigned long) vaddr, size); |
| 147 | } |
| 148 | |
| 149 | static inline void invalidate_kernel_vmap_range(void *vaddr, int size) |
| 150 | { |
| 151 | if (cpu_has_dc_aliases) |
| 152 | __flush_kernel_vmap_range((unsigned long) vaddr, size); |
| 153 | } |
| 154 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 155 | #endif /* _ASM_CACHEFLUSH_H */ |