blob: e67dc659e574eca3034b71f0167ed3117f3edbb9 [file] [log] [blame]
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -07001/*
2 * Copyright (c) 2008 Travis Geiselbrecht
Channagoud Kadabi30a8be12014-02-11 16:48:15 -08003 * Copyright (c) 2013-2014, The Linux Foundation. All rights reserved.
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -07004 *
5 * Permission is hereby granted, free of charge, to any person obtaining
6 * a copy of this software and associated documentation files
7 * (the "Software"), to deal in the Software without restriction,
8 * including without limitation the rights to use, copy, modify, merge,
9 * publish, distribute, sublicense, and/or sell copies of the Software,
10 * and to permit persons to whom the Software is furnished to do so,
11 * subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be
14 * included in all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
19 * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
20 * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
23 */
24#include <asm.h>
25#include <arch/ops.h>
26#include <arch/defines.h>
27
28.text
29
30#if ARM_WITH_CACHE
31
32/* low level cache routines for various cpu families */
33
34#if ARM_CPU_ARM1136 || ARM_CPU_ARM926
35
36/* void arch_disable_cache(uint flags) */
37FUNCTION(arch_disable_cache)
38 mov r12, #0 // zero register
39 mrs r3, cpsr // save the old interrupt state
40#if ARM_ISA_ARMv6
41 .word 0xf10c01c0 /* cpsid iaf */ // interrupts disabled
42#else
43 orr r3, r3, #(1<<7)
44 msr cpsr, r3
45#endif
46
47.Ldcache_disable:
48 tst r0, #DCACHE
49 beq .Licache_disable
50 mrc p15, 0, r1, c1, c0, 0 // cr1
51 tst r1, #(1<<2) // is the dcache already disabled?
52 beq .Licache_disable
53
54 bic r1, #(1<<2)
55 mcr p15, 0, r1, c1, c0, 0 // disable dcache
56
57#if ARM_CPU_ARM1136
58 mcr p15, 0, r12, c7, c14, 0 // clean & invalidate dcache
59#elif ARM_CPU_ARM926
600:
61 mrc p15, 0, r15, c7, c14, 3 // clean & invalidate dcache
62 bne 0b
63#else
64#error whut?
65#endif
66 mcr p15, 0, r0, c7, c10, 4 // data sync barrier (formerly drain write buffer)
67
68.Licache_disable:
69 tst r0, #ICACHE
70 beq .Ldone_disable
71
72 mrc p15, 0, r1, c1, c0, 0 // cr1
73 bic r1, #(1<<12)
74 mcr p15, 0, r1, c1, c0, 0 // disable icache
75
76 mcr p15, 0, r12, c7, c5, 0 // invalidate icache
77
78.Ldone_disable:
79 msr cpsr, r3
80 bx lr
81
82/* void arch_enable_cache(uint flags) */
83FUNCTION(arch_enable_cache)
84 mov r12, #0 // zero register
85 mrs r3, cpsr // save the old interrupt state
86#if ARM_ISA_ARMv6
87 .word 0xf10c01c0 /* cpsid iaf */ // interrupts disabled
88#else
89 orr r3, r3, #(1<<7)
90 msr cpsr, r3
91#endif
92
93.Ldcache_enable:
94 tst r0, #DCACHE
95 beq .Licache_enable
96 mrc p15, 0, r1, c1, c0, 0 // cr1
97 tst r1, #(1<<2) // is the dcache already enabled?
98 bne .Licache_enable
99
100 mcr p15, 0, r12, c7, c6, 0 // invalidate dcache
101
102 orr r1, #(1<<2)
103 mcr p15, 0, r1, c1, c0, 0 // enable dcache
104
105.Licache_enable:
106 tst r0, #ICACHE
107 beq .Ldone_enable
108
109 mcr p15, 0, r12, c7, c5, 0 // invalidate icache
110
111 mrc p15, 0, r1, c1, c0, 0 // cr1
112 orr r1, #(1<<12)
113 mcr p15, 0, r1, c1, c0, 0 // enable icache
114
115.Ldone_enable:
116 msr cpsr, r3
117 bx lr
118
119#elif ARM_CPU_CORTEX_A8
120
121/* void arch_disable_cache(uint flags) */
122FUNCTION(arch_disable_cache)
123 stmfd sp!, {r4-r11, lr}
124
125 mov r7, r0 // save flags
126
127 mrs r12, cpsr // save the old interrupt state
128 .word 0xf10c01c0 /* cpsid iaf */ // interrupts disabled
129
130.Ldcache_disable:
131 tst r7, #DCACHE
132 beq .Licache_disable
133 mrc p15, 0, r0, c1, c0, 0 // cr1
Ajay Dudani452f26f2010-07-02 13:42:43 -0700134 tst r0, #(1<<2) // is the dcache already disabled?
135 beq .Ldcache_already_disabled
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700136
137 bic r0, #(1<<2)
V S Ramanjaneya Kumar Ta7fb1062013-01-04 22:36:29 +0530138 // make sure all data operations are completed
139 dsb
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700140 mcr p15, 0, r0, c1, c0, 0 // disable dcache
V S Ramanjaneya Kumar Ta7fb1062013-01-04 22:36:29 +0530141 // make sure previous instruction finishes before we clean and flush
142 isb
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700143
144 // flush and invalidate the dcache
145 // NOTE: trashes a bunch of registers, can't be spilling stuff to the stack
146 bl flush_invalidate_cache_v7
147
Ajay Dudani452f26f2010-07-02 13:42:43 -0700148 b .Ldcache_disable_L2
149
150.Ldcache_already_disabled:
151 // make sure all of the caches are invalidated
152 // NOTE: trashes a bunch of registers, can't be spilling stuff to the stack
153 bl invalidate_cache_v7
154
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700155.Ldcache_disable_L2:
156
157#if ARM_WITH_L2
158 // disable the L2, if present
159 mrc p15, 0, r0, c1, c0, 1 // aux cr1
160 bic r0, #(1<<1)
161 mcr p15, 0, r0, c1, c0, 1 // disable L2 dcache
162#endif
163
164.Licache_disable:
165 tst r7, #ICACHE
166 beq .Ldone_disable
167
168 mrc p15, 0, r0, c1, c0, 0 // cr1
169 bic r0, #(1<<12)
170 mcr p15, 0, r0, c1, c0, 0 // disable icache
V S Ramanjaneya Kumar Ta7fb1062013-01-04 22:36:29 +0530171 // make sure previous instruction finishes
172 isb
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700173
174.Ldone_disable:
175 // make sure the icache is always invalidated
176 mov r0, #0
177 mcr p15, 0, r0, c7, c5, 0 // invalidate icache to PoU
V S Ramanjaneya Kumar Ta7fb1062013-01-04 22:36:29 +0530178 // make sure that data is in sync
179 dsb
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700180
181 msr cpsr, r12
182 ldmfd sp!, {r4-r11, pc}
183
184/* void arch_enable_cache(uint flags) */
185FUNCTION(arch_enable_cache)
186 stmfd sp!, {r4-r11, lr}
187
188 mov r7, r0 // save flags
189
190 mrs r12, cpsr // save the old interrupt state
191 .word 0xf10c01c0 /* cpsid iaf */ // interrupts disabled
Ajay Dudani452f26f2010-07-02 13:42:43 -0700192
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700193.Ldcache_enable:
194 tst r7, #DCACHE
195 beq .Licache_enable
Ajay Dudani452f26f2010-07-02 13:42:43 -0700196 mrc p15, 0, r0, c1, c0, 0 // cr1
197 tst r0, #(1<<2) // is the dcache already enabled?
198 bne .Licache_enable
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700199
200 // invalidate L1 and L2
201 // NOTE: trashes a bunch of registers, can't be spilling stuff to the stack
202 bl invalidate_cache_v7
203
204#if ARM_WITH_L2
205 // enable the L2, if present
206 mrc p15, 0, r0, c1, c0, 1 // aux cr1
207 orr r0, #(1<<1)
208 mcr p15, 0, r0, c1, c0, 1 // enable L2 dcache
209#endif
210
211 mrc p15, 0, r0, c1, c0, 0 // cr1
212 orr r0, #(1<<2)
213 mcr p15, 0, r0, c1, c0, 0 // enable dcache
214
215.Licache_enable:
216 tst r7, #ICACHE
217 beq .Ldone_enable
218
219 mov r0, #0
220 mcr p15, 0, r0, c7, c5, 0 // invalidate icache to PoU
221
222 mrc p15, 0, r0, c1, c0, 0 // cr1
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700223 orr r0, #(1<<12)
224 mcr p15, 0, r0, c1, c0, 0 // enable icache
225
226.Ldone_enable:
227 msr cpsr, r12
228 ldmfd sp!, {r4-r11, pc}
229
230// flush & invalidate cache routine, trashes r0-r6, r9-r11
231flush_invalidate_cache_v7:
V S Ramanjaneya Kumar Ta7fb1062013-01-04 22:36:29 +0530232 DMB
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700233 /* from ARMv7 manual, B2-17 */
234 MRC p15, 1, R0, c0, c0, 1 // Read CLIDR
235 ANDS R3, R0, #0x7000000
236 MOV R3, R3, LSR #23 // Cache level value (naturally aligned)
237 BEQ .Lfinished
238 MOV R10, #0
239.Loop1:
240 ADD R2, R10, R10, LSR #1 // Work out 3xcachelevel
241 MOV R1, R0, LSR R2 // bottom 3 bits are the Cache type for this level
242 AND R1, R1, #7 // get those 3 bits alone
243 CMP R1, #2
244 BLT .Lskip // no cache or only instruction cache at this level
245 MCR p15, 2, R10, c0, c0, 0 // write the Cache Size selection register
246 .word 0xf57ff06f // ISB // ISB to sync the change to the CacheSizeID reg
247 MRC p15, 1, R1, c0, c0, 0 // reads current Cache Size ID register
248 AND R2, R1, #0x7 // extract the line length field
249 ADD R2, R2, #4 // add 4 for the line length offset (log2 16 bytes)
250 LDR R4, =0x3FF
251 ANDS R4, R4, R1, LSR #3 // R4 is the max number on the way size (right aligned)
252 CLZ R5, R4 // R5 is the bit position of the way size increment
253 LDR R6, =0x00007FFF
254 ANDS R6, R6, R1, LSR #13 // R6 is the max number of the index size (right aligned)
255.Loop2:
256 MOV R9, R4 // R9 working copy of the max way size (right aligned)
257.Loop3:
258 ORR R11, R10, R9, LSL R5 // factor in the way number and cache number into R11
259 ORR R11, R11, R6, LSL R2 // factor in the index number
260 MCR p15, 0, R11, c7, c14, 2 // clean & invalidate by set/way
261 SUBS R9, R9, #1 // decrement the way number
262 BGE .Loop3
263 SUBS R6, R6, #1 // decrement the index
264 BGE .Loop2
265.Lskip:
266 ADD R10, R10, #2 // increment the cache number
267 CMP R3, R10
268 BGT .Loop1
269
270.Lfinished:
271 mov r10, #0
272 mcr p15, 2, r10, c0, c0, 0 // select cache level 0
V S Ramanjaneya Kumar Ta7fb1062013-01-04 22:36:29 +0530273 dsb
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700274 .word 0xf57ff06f // isb
275
276 bx lr
277
278// invalidate cache routine, trashes r0-r6, r9-r11
279invalidate_cache_v7:
280 /* from ARMv7 manual, B2-17 */
281 MRC p15, 1, R0, c0, c0, 1 // Read CLIDR
282 ANDS R3, R0, #0x7000000
283 MOV R3, R3, LSR #23 // Cache level value (naturally aligned)
284 BEQ .Lfinished_invalidate
285 MOV R10, #0
286.Loop1_invalidate:
287 ADD R2, R10, R10, LSR #1 // Work out 3xcachelevel
288 MOV R1, R0, LSR R2 // bottom 3 bits are the Cache type for this level
289 AND R1, R1, #7 // get those 3 bits alone
290 CMP R1, #2
291 BLT .Lskip_invalidate // no cache or only instruction cache at this level
292 MCR p15, 2, R10, c0, c0, 0 // write the Cache Size selection register
293 .word 0xf57ff06f // ISB // ISB to sync the change to the CacheSizeID reg
294 MRC p15, 1, R1, c0, c0, 0 // reads current Cache Size ID register
295 AND R2, R1, #0x7 // extract the line length field
296 ADD R2, R2, #4 // add 4 for the line length offset (log2 16 bytes)
297 LDR R4, =0x3FF
298 ANDS R4, R4, R1, LSR #3 // R4 is the max number on the way size (right aligned)
299 CLZ R5, R4 // R5 is the bit position of the way size increment
300 LDR R6, =0x00007FFF
301 ANDS R6, R6, R1, LSR #13 // R6 is the max number of the index size (right aligned)
302.Loop2_invalidate:
303 MOV R9, R4 // R9 working copy of the max way size (right aligned)
304.Loop3_invalidate:
305 ORR R11, R10, R9, LSL R5 // factor in the way number and cache number into R11
306 ORR R11, R11, R6, LSL R2 // factor in the index number
307 MCR p15, 0, R11, c7, c6, 2 // invalidate by set/way
308 SUBS R9, R9, #1 // decrement the way number
309 BGE .Loop3_invalidate
310 SUBS R6, R6, #1 // decrement the index
311 BGE .Loop2_invalidate
312.Lskip_invalidate:
313 ADD R10, R10, #2 // increment the cache number
314 CMP R3, R10
315 BGT .Loop1_invalidate
316
317.Lfinished_invalidate:
318 mov r10, #0
319 mcr p15, 2, r10, c0, c0, 0 // select cache level 0
V S Ramanjaneya Kumar Ta7fb1062013-01-04 22:36:29 +0530320 dsb
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700321 .word 0xf57ff06f // isb
322
323 bx lr
324
325#else
326#error unhandled cpu
327#endif
328
329#if ARM_CPU_ARM926 || ARM_CPU_ARM1136 || ARM_CPU_CORTEX_A8
330/* shared cache flush routines */
331
332 /* void arch_flush_cache_range(addr_t start, size_t len); */
333FUNCTION(arch_clean_cache_range)
3340:
335 mcr p15, 0, r0, c7, c10, 1 // clean cache to PoC by MVA
336 add r0, r0, #CACHE_LINE
337 subs r1, r1, #CACHE_LINE
338 bhs 0b
339
340 mov r0, #0
Channagoud Kadabi30a8be12014-02-11 16:48:15 -0800341 dsb
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700342
343 bx lr
344
345 /* void arch_flush_invalidate_cache_range(addr_t start, size_t len); */
346FUNCTION(arch_clean_invalidate_cache_range)
3470:
348 mcr p15, 0, r0, c7, c14, 1 // clean & invalidate cache to PoC by MVA
349 add r0, r0, #CACHE_LINE
350 subs r1, r1, #CACHE_LINE
351 bhs 0b
352
353 mov r0, #0
Channagoud Kadabi30a8be12014-02-11 16:48:15 -0800354 dsb
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700355
356 bx lr
Travis Geiselbrecht8b99c682010-06-16 00:37:13 -0700357
Hanumant Singh5a9400c2012-12-06 21:54:25 -0800358 /* void arch_invalidate_cache_range(addr_t start, size_t len); */
359FUNCTION(arch_invalidate_cache_range)
3600:
361 /* invalidate cache line */
362 mcr p15, 0, r0, c7, c6, 1
363 add r0, r0, #CACHE_LINE
364 subs r1, r1, #CACHE_LINE
365 bhs 0b
366 mov r0, #0
Channagoud Kadabi30a8be12014-02-11 16:48:15 -0800367 dsb
Hanumant Singh5a9400c2012-12-06 21:54:25 -0800368 bx lr
369
Travis Geiselbrecht8b99c682010-06-16 00:37:13 -0700370 /* void arch_sync_cache_range(addr_t start, size_t len); */
371FUNCTION(arch_sync_cache_range)
372 push { r14 }
373 bl arch_clean_cache_range
374
375 mov r0, #0
376 mcr p15, 0, r0, c7, c5, 0 // invalidate icache to PoU
377
378 pop { pc }
379
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700380#else
381#error unhandled cpu
382#endif
383
384#else
385
386/* no cache */
387
388FUNCTION(arch_disable_cache)
389 bx lr
390
391FUNCTION(arch_enable_cache)
392 bx lr
393
394FUNCTION(arch_clean_cache_range)
395 bx lr
396
397FUNCTION(arch_clean_invalidate_cache_range)
398 bx lr
399
Travis Geiselbrecht8b99c682010-06-16 00:37:13 -0700400FUNCTION(arch_sync_cache_range)
401 bx lr
402
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700403#endif // ARM_WITH_CACHE
404