Sam Ravnborg | f5e706a | 2008-07-17 21:55:51 -0700 | [diff] [blame] | 1 | #ifndef _SPARC_CACHEFLUSH_H |
| 2 | #define _SPARC_CACHEFLUSH_H |
| 3 | |
David S. Miller | 5d83d66 | 2012-05-13 20:49:31 -0700 | [diff] [blame] | 4 | #include <asm/cachetlb_32.h> |
Sam Ravnborg | f5e706a | 2008-07-17 21:55:51 -0700 | [diff] [blame] | 5 | |
David S. Miller | 5d83d66 | 2012-05-13 20:49:31 -0700 | [diff] [blame] | 6 | #define flush_cache_all() \ |
| 7 | sparc32_cachetlb_ops->cache_all() |
| 8 | #define flush_cache_mm(mm) \ |
| 9 | sparc32_cachetlb_ops->cache_mm(mm) |
| 10 | #define flush_cache_dup_mm(mm) \ |
| 11 | sparc32_cachetlb_ops->cache_mm(mm) |
| 12 | #define flush_cache_range(vma,start,end) \ |
| 13 | sparc32_cachetlb_ops->cache_range(vma, start, end) |
| 14 | #define flush_cache_page(vma,addr,pfn) \ |
| 15 | sparc32_cachetlb_ops->cache_page(vma, addr) |
Sam Ravnborg | f5e706a | 2008-07-17 21:55:51 -0700 | [diff] [blame] | 16 | #define flush_icache_range(start, end) do { } while (0) |
| 17 | #define flush_icache_page(vma, pg) do { } while (0) |
| 18 | |
| 19 | #define flush_icache_user_range(vma,pg,adr,len) do { } while (0) |
| 20 | |
| 21 | #define copy_to_user_page(vma, page, vaddr, dst, src, len) \ |
| 22 | do { \ |
| 23 | flush_cache_page(vma, vaddr, page_to_pfn(page));\ |
| 24 | memcpy(dst, src, len); \ |
| 25 | } while (0) |
| 26 | #define copy_from_user_page(vma, page, vaddr, dst, src, len) \ |
| 27 | do { \ |
| 28 | flush_cache_page(vma, vaddr, page_to_pfn(page));\ |
| 29 | memcpy(dst, src, len); \ |
| 30 | } while (0) |
| 31 | |
David S. Miller | 5d83d66 | 2012-05-13 20:49:31 -0700 | [diff] [blame] | 32 | #define __flush_page_to_ram(addr) \ |
| 33 | sparc32_cachetlb_ops->page_to_ram(addr) |
| 34 | #define flush_sig_insns(mm,insn_addr) \ |
| 35 | sparc32_cachetlb_ops->sig_insns(mm, insn_addr) |
| 36 | #define flush_page_for_dma(addr) \ |
| 37 | sparc32_cachetlb_ops->page_for_dma(addr) |
Sam Ravnborg | f5e706a | 2008-07-17 21:55:51 -0700 | [diff] [blame] | 38 | |
| 39 | extern void sparc_flush_page_to_ram(struct page *page); |
| 40 | |
Ilya Loginov | 2d4dc89 | 2009-11-26 09:16:19 +0100 | [diff] [blame] | 41 | #define ARCH_IMPLEMENTS_FLUSH_DCACHE_PAGE 1 |
Sam Ravnborg | f5e706a | 2008-07-17 21:55:51 -0700 | [diff] [blame] | 42 | #define flush_dcache_page(page) sparc_flush_page_to_ram(page) |
| 43 | #define flush_dcache_mmap_lock(mapping) do { } while (0) |
| 44 | #define flush_dcache_mmap_unlock(mapping) do { } while (0) |
| 45 | |
| 46 | #define flush_cache_vmap(start, end) flush_cache_all() |
| 47 | #define flush_cache_vunmap(start, end) flush_cache_all() |
| 48 | |
David Howells | d550bbd | 2012-03-28 18:30:03 +0100 | [diff] [blame] | 49 | /* When a context switch happens we must flush all user windows so that |
| 50 | * the windows of the current process are flushed onto its stack. This |
| 51 | * way the windows are all clean for the next process and the stack |
| 52 | * frames are up to date. |
| 53 | */ |
| 54 | extern void flush_user_windows(void); |
| 55 | extern void kill_user_windows(void); |
| 56 | extern void flushw_all(void); |
| 57 | |
Sam Ravnborg | f5e706a | 2008-07-17 21:55:51 -0700 | [diff] [blame] | 58 | #endif /* _SPARC_CACHEFLUSH_H */ |