blob: 99c50dc7551eda17ae58f8aa5488a4767814e77c [file] [log] [blame]
Paul Mundt81742522009-08-04 18:06:01 +09001#include <linux/mm.h>
2#include <asm/mmu_context.h>
3#include <asm/cacheflush.h>
4
5/*
6 * Write back the dirty D-caches, but not invalidate them.
7 *
8 * START: Virtual Address (U0, P1, or P3)
9 * SIZE: Size of the region.
10 */
Paul Mundt37443ef2009-08-15 12:29:49 +090011static void sh4__flush_wback_region(void *start, int size)
Paul Mundt81742522009-08-04 18:06:01 +090012{
Paul Mundt43bc61d2009-08-15 01:57:36 +090013 reg_size_t aligned_start, v, cnt, end;
Paul Mundt81742522009-08-04 18:06:01 +090014
Paul Mundt43bc61d2009-08-15 01:57:36 +090015 aligned_start = register_align(start);
16 v = aligned_start & ~(L1_CACHE_BYTES-1);
17 end = (aligned_start + size + L1_CACHE_BYTES-1)
Paul Mundt81742522009-08-04 18:06:01 +090018 & ~(L1_CACHE_BYTES-1);
Paul Mundt0837f522009-08-04 18:09:54 +090019 cnt = (end - v) / L1_CACHE_BYTES;
20
21 while (cnt >= 8) {
22 asm volatile("ocbwb @%0" : : "r" (v));
23 v += L1_CACHE_BYTES;
24 asm volatile("ocbwb @%0" : : "r" (v));
25 v += L1_CACHE_BYTES;
26 asm volatile("ocbwb @%0" : : "r" (v));
27 v += L1_CACHE_BYTES;
28 asm volatile("ocbwb @%0" : : "r" (v));
29 v += L1_CACHE_BYTES;
30 asm volatile("ocbwb @%0" : : "r" (v));
31 v += L1_CACHE_BYTES;
32 asm volatile("ocbwb @%0" : : "r" (v));
33 v += L1_CACHE_BYTES;
34 asm volatile("ocbwb @%0" : : "r" (v));
35 v += L1_CACHE_BYTES;
36 asm volatile("ocbwb @%0" : : "r" (v));
37 v += L1_CACHE_BYTES;
38 cnt -= 8;
39 }
40
41 while (cnt) {
42 asm volatile("ocbwb @%0" : : "r" (v));
43 v += L1_CACHE_BYTES;
44 cnt--;
Paul Mundt81742522009-08-04 18:06:01 +090045 }
46}
47
48/*
49 * Write back the dirty D-caches and invalidate them.
50 *
51 * START: Virtual Address (U0, P1, or P3)
52 * SIZE: Size of the region.
53 */
Paul Mundt37443ef2009-08-15 12:29:49 +090054static void sh4__flush_purge_region(void *start, int size)
Paul Mundt81742522009-08-04 18:06:01 +090055{
Paul Mundt43bc61d2009-08-15 01:57:36 +090056 reg_size_t aligned_start, v, cnt, end;
Paul Mundt81742522009-08-04 18:06:01 +090057
Paul Mundt43bc61d2009-08-15 01:57:36 +090058 aligned_start = register_align(start);
59 v = aligned_start & ~(L1_CACHE_BYTES-1);
60 end = (aligned_start + size + L1_CACHE_BYTES-1)
Paul Mundt81742522009-08-04 18:06:01 +090061 & ~(L1_CACHE_BYTES-1);
Paul Mundt0837f522009-08-04 18:09:54 +090062 cnt = (end - v) / L1_CACHE_BYTES;
63
64 while (cnt >= 8) {
65 asm volatile("ocbp @%0" : : "r" (v));
66 v += L1_CACHE_BYTES;
67 asm volatile("ocbp @%0" : : "r" (v));
68 v += L1_CACHE_BYTES;
69 asm volatile("ocbp @%0" : : "r" (v));
70 v += L1_CACHE_BYTES;
71 asm volatile("ocbp @%0" : : "r" (v));
72 v += L1_CACHE_BYTES;
73 asm volatile("ocbp @%0" : : "r" (v));
74 v += L1_CACHE_BYTES;
75 asm volatile("ocbp @%0" : : "r" (v));
76 v += L1_CACHE_BYTES;
77 asm volatile("ocbp @%0" : : "r" (v));
78 v += L1_CACHE_BYTES;
79 asm volatile("ocbp @%0" : : "r" (v));
80 v += L1_CACHE_BYTES;
81 cnt -= 8;
82 }
83 while (cnt) {
84 asm volatile("ocbp @%0" : : "r" (v));
85 v += L1_CACHE_BYTES;
86 cnt--;
Paul Mundt81742522009-08-04 18:06:01 +090087 }
88}
89
90/*
91 * No write back please
92 */
Paul Mundt37443ef2009-08-15 12:29:49 +090093static void sh4__flush_invalidate_region(void *start, int size)
Paul Mundt81742522009-08-04 18:06:01 +090094{
Paul Mundt43bc61d2009-08-15 01:57:36 +090095 reg_size_t aligned_start, v, cnt, end;
Paul Mundt81742522009-08-04 18:06:01 +090096
Paul Mundt43bc61d2009-08-15 01:57:36 +090097 aligned_start = register_align(start);
98 v = aligned_start & ~(L1_CACHE_BYTES-1);
99 end = (aligned_start + size + L1_CACHE_BYTES-1)
Paul Mundt81742522009-08-04 18:06:01 +0900100 & ~(L1_CACHE_BYTES-1);
Paul Mundt0837f522009-08-04 18:09:54 +0900101 cnt = (end - v) / L1_CACHE_BYTES;
102
103 while (cnt >= 8) {
104 asm volatile("ocbi @%0" : : "r" (v));
105 v += L1_CACHE_BYTES;
106 asm volatile("ocbi @%0" : : "r" (v));
107 v += L1_CACHE_BYTES;
108 asm volatile("ocbi @%0" : : "r" (v));
109 v += L1_CACHE_BYTES;
110 asm volatile("ocbi @%0" : : "r" (v));
111 v += L1_CACHE_BYTES;
112 asm volatile("ocbi @%0" : : "r" (v));
113 v += L1_CACHE_BYTES;
114 asm volatile("ocbi @%0" : : "r" (v));
115 v += L1_CACHE_BYTES;
116 asm volatile("ocbi @%0" : : "r" (v));
117 v += L1_CACHE_BYTES;
118 asm volatile("ocbi @%0" : : "r" (v));
119 v += L1_CACHE_BYTES;
120 cnt -= 8;
121 }
122
123 while (cnt) {
124 asm volatile("ocbi @%0" : : "r" (v));
125 v += L1_CACHE_BYTES;
126 cnt--;
Paul Mundt81742522009-08-04 18:06:01 +0900127 }
128}
Paul Mundt37443ef2009-08-15 12:29:49 +0900129
130void __init sh4__flush_region_init(void)
131{
132 __flush_wback_region = sh4__flush_wback_region;
133 __flush_invalidate_region = sh4__flush_invalidate_region;
134 __flush_purge_region = sh4__flush_purge_region;
135}