blob: 6fc7d19582a1166b5b516b74d12bb4e6132485a2 [file] [log] [blame]
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -07001/*
2 * Copyright (c) 2008 Travis Geiselbrecht
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining
5 * a copy of this software and associated documentation files
6 * (the "Software"), to deal in the Software without restriction,
7 * including without limitation the rights to use, copy, modify, merge,
8 * publish, distribute, sublicense, and/or sell copies of the Software,
9 * and to permit persons to whom the Software is furnished to do so,
10 * subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice shall be
13 * included in all copies or substantial portions of the Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
16 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
17 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
18 * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
19 * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
20 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
21 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
22 */
23#include <asm.h>
24#include <arch/ops.h>
25#include <arch/defines.h>
26
27.text
28
29#if ARM_WITH_CACHE
30
31/* low level cache routines for various cpu families */
32
33#if ARM_CPU_ARM1136 || ARM_CPU_ARM926
34
35/* void arch_disable_cache(uint flags) */
36FUNCTION(arch_disable_cache)
37 mov r12, #0 // zero register
38 mrs r3, cpsr // save the old interrupt state
39#if ARM_ISA_ARMv6
40 .word 0xf10c01c0 /* cpsid iaf */ // interrupts disabled
41#else
42 orr r3, r3, #(1<<7)
43 msr cpsr, r3
44#endif
45
46.Ldcache_disable:
47 tst r0, #DCACHE
48 beq .Licache_disable
49 mrc p15, 0, r1, c1, c0, 0 // cr1
50 tst r1, #(1<<2) // is the dcache already disabled?
51 beq .Licache_disable
52
53 bic r1, #(1<<2)
54 mcr p15, 0, r1, c1, c0, 0 // disable dcache
55
56#if ARM_CPU_ARM1136
57 mcr p15, 0, r12, c7, c14, 0 // clean & invalidate dcache
58#elif ARM_CPU_ARM926
590:
60 mrc p15, 0, r15, c7, c14, 3 // clean & invalidate dcache
61 bne 0b
62#else
63#error whut?
64#endif
65 mcr p15, 0, r0, c7, c10, 4 // data sync barrier (formerly drain write buffer)
66
67.Licache_disable:
68 tst r0, #ICACHE
69 beq .Ldone_disable
70
71 mrc p15, 0, r1, c1, c0, 0 // cr1
72 bic r1, #(1<<12)
73 mcr p15, 0, r1, c1, c0, 0 // disable icache
74
75 mcr p15, 0, r12, c7, c5, 0 // invalidate icache
76
77.Ldone_disable:
78 msr cpsr, r3
79 bx lr
80
81/* void arch_enable_cache(uint flags) */
82FUNCTION(arch_enable_cache)
83 mov r12, #0 // zero register
84 mrs r3, cpsr // save the old interrupt state
85#if ARM_ISA_ARMv6
86 .word 0xf10c01c0 /* cpsid iaf */ // interrupts disabled
87#else
88 orr r3, r3, #(1<<7)
89 msr cpsr, r3
90#endif
91
92.Ldcache_enable:
93 tst r0, #DCACHE
94 beq .Licache_enable
95 mrc p15, 0, r1, c1, c0, 0 // cr1
96 tst r1, #(1<<2) // is the dcache already enabled?
97 bne .Licache_enable
98
99 mcr p15, 0, r12, c7, c6, 0 // invalidate dcache
100
101 orr r1, #(1<<2)
102 mcr p15, 0, r1, c1, c0, 0 // enable dcache
103
104.Licache_enable:
105 tst r0, #ICACHE
106 beq .Ldone_enable
107
108 mcr p15, 0, r12, c7, c5, 0 // invalidate icache
109
110 mrc p15, 0, r1, c1, c0, 0 // cr1
111 orr r1, #(1<<12)
112 mcr p15, 0, r1, c1, c0, 0 // enable icache
113
114.Ldone_enable:
115 msr cpsr, r3
116 bx lr
117
118#elif ARM_CPU_CORTEX_A8
119
120/* void arch_disable_cache(uint flags) */
121FUNCTION(arch_disable_cache)
122 stmfd sp!, {r4-r11, lr}
123
124 mov r7, r0 // save flags
125
126 mrs r12, cpsr // save the old interrupt state
127 .word 0xf10c01c0 /* cpsid iaf */ // interrupts disabled
128
129.Ldcache_disable:
130 tst r7, #DCACHE
131 beq .Licache_disable
132 mrc p15, 0, r0, c1, c0, 0 // cr1
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700133
134 bic r0, #(1<<2)
135 mcr p15, 0, r0, c1, c0, 0 // disable dcache
136
137 // flush and invalidate the dcache
138 // NOTE: trashes a bunch of registers, can't be spilling stuff to the stack
139 bl flush_invalidate_cache_v7
140
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700141.Ldcache_disable_L2:
142
143#if ARM_WITH_L2
144 // disable the L2, if present
145 mrc p15, 0, r0, c1, c0, 1 // aux cr1
146 bic r0, #(1<<1)
147 mcr p15, 0, r0, c1, c0, 1 // disable L2 dcache
148#endif
149
150.Licache_disable:
151 tst r7, #ICACHE
152 beq .Ldone_disable
153
154 mrc p15, 0, r0, c1, c0, 0 // cr1
155 bic r0, #(1<<12)
156 mcr p15, 0, r0, c1, c0, 0 // disable icache
157
158.Ldone_disable:
159 // make sure the icache is always invalidated
160 mov r0, #0
161 mcr p15, 0, r0, c7, c5, 0 // invalidate icache to PoU
162
163 msr cpsr, r12
164 ldmfd sp!, {r4-r11, pc}
165
166/* void arch_enable_cache(uint flags) */
167FUNCTION(arch_enable_cache)
168 stmfd sp!, {r4-r11, lr}
169
170 mov r7, r0 // save flags
171
172 mrs r12, cpsr // save the old interrupt state
173 .word 0xf10c01c0 /* cpsid iaf */ // interrupts disabled
Ajay Dudani644810a2010-07-01 18:04:13 -0700174
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700175.Ldcache_enable:
176 tst r7, #DCACHE
177 beq .Licache_enable
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700178
179 // invalidate L1 and L2
180 // NOTE: trashes a bunch of registers, can't be spilling stuff to the stack
181 bl invalidate_cache_v7
182
183#if ARM_WITH_L2
184 // enable the L2, if present
185 mrc p15, 0, r0, c1, c0, 1 // aux cr1
186 orr r0, #(1<<1)
187 mcr p15, 0, r0, c1, c0, 1 // enable L2 dcache
188#endif
189
190 mrc p15, 0, r0, c1, c0, 0 // cr1
Ajay Dudani644810a2010-07-01 18:04:13 -0700191 bic r0, #(1<<2)
192 mcr p15, 0, r0, c1, c0, 0 // disable dcache
193
194 mrc p15, 0, r0, c1, c0, 0 // cr1
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700195 orr r0, #(1<<2)
196 mcr p15, 0, r0, c1, c0, 0 // enable dcache
197
198.Licache_enable:
199 tst r7, #ICACHE
200 beq .Ldone_enable
201
202 mov r0, #0
203 mcr p15, 0, r0, c7, c5, 0 // invalidate icache to PoU
204
205 mrc p15, 0, r0, c1, c0, 0 // cr1
Ajay Dudani644810a2010-07-01 18:04:13 -0700206 bic r0, #(1<<12)
207 mcr p15, 0, r0, c1, c0, 0 // disable icache
208
209 mrc p15, 0, r0, c1, c0, 0 // cr1
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700210 orr r0, #(1<<12)
211 mcr p15, 0, r0, c1, c0, 0 // enable icache
212
213.Ldone_enable:
214 msr cpsr, r12
215 ldmfd sp!, {r4-r11, pc}
216
217// flush & invalidate cache routine, trashes r0-r6, r9-r11
218flush_invalidate_cache_v7:
219 /* from ARMv7 manual, B2-17 */
220 MRC p15, 1, R0, c0, c0, 1 // Read CLIDR
221 ANDS R3, R0, #0x7000000
222 MOV R3, R3, LSR #23 // Cache level value (naturally aligned)
223 BEQ .Lfinished
224 MOV R10, #0
225.Loop1:
226 ADD R2, R10, R10, LSR #1 // Work out 3xcachelevel
227 MOV R1, R0, LSR R2 // bottom 3 bits are the Cache type for this level
228 AND R1, R1, #7 // get those 3 bits alone
229 CMP R1, #2
230 BLT .Lskip // no cache or only instruction cache at this level
231 MCR p15, 2, R10, c0, c0, 0 // write the Cache Size selection register
232 .word 0xf57ff06f // ISB // ISB to sync the change to the CacheSizeID reg
233 MRC p15, 1, R1, c0, c0, 0 // reads current Cache Size ID register
234 AND R2, R1, #0x7 // extract the line length field
235 ADD R2, R2, #4 // add 4 for the line length offset (log2 16 bytes)
236 LDR R4, =0x3FF
237 ANDS R4, R4, R1, LSR #3 // R4 is the max number on the way size (right aligned)
238 CLZ R5, R4 // R5 is the bit position of the way size increment
239 LDR R6, =0x00007FFF
240 ANDS R6, R6, R1, LSR #13 // R6 is the max number of the index size (right aligned)
241.Loop2:
242 MOV R9, R4 // R9 working copy of the max way size (right aligned)
243.Loop3:
244 ORR R11, R10, R9, LSL R5 // factor in the way number and cache number into R11
245 ORR R11, R11, R6, LSL R2 // factor in the index number
246 MCR p15, 0, R11, c7, c14, 2 // clean & invalidate by set/way
247 SUBS R9, R9, #1 // decrement the way number
248 BGE .Loop3
249 SUBS R6, R6, #1 // decrement the index
250 BGE .Loop2
251.Lskip:
252 ADD R10, R10, #2 // increment the cache number
253 CMP R3, R10
254 BGT .Loop1
255
256.Lfinished:
257 mov r10, #0
258 mcr p15, 2, r10, c0, c0, 0 // select cache level 0
259 .word 0xf57ff06f // isb
260
261 bx lr
262
263// invalidate cache routine, trashes r0-r6, r9-r11
264invalidate_cache_v7:
265 /* from ARMv7 manual, B2-17 */
266 MRC p15, 1, R0, c0, c0, 1 // Read CLIDR
267 ANDS R3, R0, #0x7000000
268 MOV R3, R3, LSR #23 // Cache level value (naturally aligned)
269 BEQ .Lfinished_invalidate
270 MOV R10, #0
271.Loop1_invalidate:
272 ADD R2, R10, R10, LSR #1 // Work out 3xcachelevel
273 MOV R1, R0, LSR R2 // bottom 3 bits are the Cache type for this level
274 AND R1, R1, #7 // get those 3 bits alone
275 CMP R1, #2
276 BLT .Lskip_invalidate // no cache or only instruction cache at this level
277 MCR p15, 2, R10, c0, c0, 0 // write the Cache Size selection register
278 .word 0xf57ff06f // ISB // ISB to sync the change to the CacheSizeID reg
279 MRC p15, 1, R1, c0, c0, 0 // reads current Cache Size ID register
280 AND R2, R1, #0x7 // extract the line length field
281 ADD R2, R2, #4 // add 4 for the line length offset (log2 16 bytes)
282 LDR R4, =0x3FF
283 ANDS R4, R4, R1, LSR #3 // R4 is the max number on the way size (right aligned)
284 CLZ R5, R4 // R5 is the bit position of the way size increment
285 LDR R6, =0x00007FFF
286 ANDS R6, R6, R1, LSR #13 // R6 is the max number of the index size (right aligned)
287.Loop2_invalidate:
288 MOV R9, R4 // R9 working copy of the max way size (right aligned)
289.Loop3_invalidate:
290 ORR R11, R10, R9, LSL R5 // factor in the way number and cache number into R11
291 ORR R11, R11, R6, LSL R2 // factor in the index number
292 MCR p15, 0, R11, c7, c6, 2 // invalidate by set/way
293 SUBS R9, R9, #1 // decrement the way number
294 BGE .Loop3_invalidate
295 SUBS R6, R6, #1 // decrement the index
296 BGE .Loop2_invalidate
297.Lskip_invalidate:
298 ADD R10, R10, #2 // increment the cache number
299 CMP R3, R10
300 BGT .Loop1_invalidate
301
302.Lfinished_invalidate:
303 mov r10, #0
304 mcr p15, 2, r10, c0, c0, 0 // select cache level 0
305 .word 0xf57ff06f // isb
306
307 bx lr
308
309#else
310#error unhandled cpu
311#endif
312
313#if ARM_CPU_ARM926 || ARM_CPU_ARM1136 || ARM_CPU_CORTEX_A8
314/* shared cache flush routines */
315
316 /* void arch_flush_cache_range(addr_t start, size_t len); */
317FUNCTION(arch_clean_cache_range)
3180:
319 mcr p15, 0, r0, c7, c10, 1 // clean cache to PoC by MVA
320 add r0, r0, #CACHE_LINE
321 subs r1, r1, #CACHE_LINE
322 bhs 0b
323
324 mov r0, #0
325 mcr p15, 0, r0, c7, c10, 4 // data sync barrier (formerly drain write buffer)
326
327 bx lr
328
329 /* void arch_flush_invalidate_cache_range(addr_t start, size_t len); */
330FUNCTION(arch_clean_invalidate_cache_range)
3310:
332 mcr p15, 0, r0, c7, c14, 1 // clean & invalidate cache to PoC by MVA
333 add r0, r0, #CACHE_LINE
334 subs r1, r1, #CACHE_LINE
335 bhs 0b
336
337 mov r0, #0
338 mcr p15, 0, r0, c7, c10, 4 // data sync barrier (formerly drain write buffer)
339
340 bx lr
341#else
342#error unhandled cpu
343#endif
344
345#else
346
347/* no cache */
348
349FUNCTION(arch_disable_cache)
350 bx lr
351
352FUNCTION(arch_enable_cache)
353 bx lr
354
355FUNCTION(arch_clean_cache_range)
356 bx lr
357
358FUNCTION(arch_clean_invalidate_cache_range)
359 bx lr
360
361#endif // ARM_WITH_CACHE
362