blob: 7c4526da82ed2c9cd91efdbaa07cb36979d03ca8 [file] [log] [blame]
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -07001/*
2 * Copyright (c) 2008 Travis Geiselbrecht
Sridhar Parasuram85f08032015-05-14 16:16:14 -07003 * Copyright (c) 2013-2015, The Linux Foundation. All rights reserved.
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -07004 *
5 * Permission is hereby granted, free of charge, to any person obtaining
6 * a copy of this software and associated documentation files
7 * (the "Software"), to deal in the Software without restriction,
8 * including without limitation the rights to use, copy, modify, merge,
9 * publish, distribute, sublicense, and/or sell copies of the Software,
10 * and to permit persons to whom the Software is furnished to do so,
11 * subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be
14 * included in all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
19 * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
20 * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
23 */
24#include <asm.h>
25#include <arch/ops.h>
26#include <arch/defines.h>
27
28.text
29
30#if ARM_WITH_CACHE
31
32/* low level cache routines for various cpu families */
33
34#if ARM_CPU_ARM1136 || ARM_CPU_ARM926
35
36/* void arch_disable_cache(uint flags) */
37FUNCTION(arch_disable_cache)
38 mov r12, #0 // zero register
39 mrs r3, cpsr // save the old interrupt state
40#if ARM_ISA_ARMv6
41 .word 0xf10c01c0 /* cpsid iaf */ // interrupts disabled
42#else
43 orr r3, r3, #(1<<7)
44 msr cpsr, r3
45#endif
46
47.Ldcache_disable:
48 tst r0, #DCACHE
49 beq .Licache_disable
50 mrc p15, 0, r1, c1, c0, 0 // cr1
51 tst r1, #(1<<2) // is the dcache already disabled?
52 beq .Licache_disable
53
54 bic r1, #(1<<2)
55 mcr p15, 0, r1, c1, c0, 0 // disable dcache
56
57#if ARM_CPU_ARM1136
58 mcr p15, 0, r12, c7, c14, 0 // clean & invalidate dcache
59#elif ARM_CPU_ARM926
600:
61 mrc p15, 0, r15, c7, c14, 3 // clean & invalidate dcache
62 bne 0b
63#else
64#error whut?
65#endif
66 mcr p15, 0, r0, c7, c10, 4 // data sync barrier (formerly drain write buffer)
67
68.Licache_disable:
69 tst r0, #ICACHE
70 beq .Ldone_disable
71
72 mrc p15, 0, r1, c1, c0, 0 // cr1
73 bic r1, #(1<<12)
74 mcr p15, 0, r1, c1, c0, 0 // disable icache
75
76 mcr p15, 0, r12, c7, c5, 0 // invalidate icache
77
78.Ldone_disable:
79 msr cpsr, r3
80 bx lr
81
82/* void arch_enable_cache(uint flags) */
83FUNCTION(arch_enable_cache)
84 mov r12, #0 // zero register
85 mrs r3, cpsr // save the old interrupt state
86#if ARM_ISA_ARMv6
87 .word 0xf10c01c0 /* cpsid iaf */ // interrupts disabled
88#else
89 orr r3, r3, #(1<<7)
90 msr cpsr, r3
91#endif
92
93.Ldcache_enable:
94 tst r0, #DCACHE
95 beq .Licache_enable
96 mrc p15, 0, r1, c1, c0, 0 // cr1
97 tst r1, #(1<<2) // is the dcache already enabled?
98 bne .Licache_enable
99
100 mcr p15, 0, r12, c7, c6, 0 // invalidate dcache
101
102 orr r1, #(1<<2)
103 mcr p15, 0, r1, c1, c0, 0 // enable dcache
104
105.Licache_enable:
106 tst r0, #ICACHE
107 beq .Ldone_enable
108
109 mcr p15, 0, r12, c7, c5, 0 // invalidate icache
110
111 mrc p15, 0, r1, c1, c0, 0 // cr1
112 orr r1, #(1<<12)
113 mcr p15, 0, r1, c1, c0, 0 // enable icache
114
115.Ldone_enable:
116 msr cpsr, r3
117 bx lr
118
119#elif ARM_CPU_CORTEX_A8
120
121/* void arch_disable_cache(uint flags) */
122FUNCTION(arch_disable_cache)
123 stmfd sp!, {r4-r11, lr}
124
125 mov r7, r0 // save flags
126
127 mrs r12, cpsr // save the old interrupt state
Channagoud Kadabibca52542014-01-30 14:35:39 -0800128 cpsid iaf // interrupts disabled
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700129
130.Ldcache_disable:
131 tst r7, #DCACHE
132 beq .Licache_disable
133 mrc p15, 0, r0, c1, c0, 0 // cr1
Ajay Dudani452f26f2010-07-02 13:42:43 -0700134 tst r0, #(1<<2) // is the dcache already disabled?
135 beq .Ldcache_already_disabled
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700136
137 bic r0, #(1<<2)
V S Ramanjaneya Kumar Ta7fb1062013-01-04 22:36:29 +0530138 // make sure all data operations are completed
139 dsb
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700140 mcr p15, 0, r0, c1, c0, 0 // disable dcache
V S Ramanjaneya Kumar Ta7fb1062013-01-04 22:36:29 +0530141 // make sure previous instruction finishes before we clean and flush
142 isb
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700143
144 // flush and invalidate the dcache
145 // NOTE: trashes a bunch of registers, can't be spilling stuff to the stack
146 bl flush_invalidate_cache_v7
147
Ajay Dudani452f26f2010-07-02 13:42:43 -0700148.Ldcache_already_disabled:
149 // make sure all of the caches are invalidated
150 // NOTE: trashes a bunch of registers, can't be spilling stuff to the stack
151 bl invalidate_cache_v7
152
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700153.Licache_disable:
154 tst r7, #ICACHE
155 beq .Ldone_disable
156
157 mrc p15, 0, r0, c1, c0, 0 // cr1
158 bic r0, #(1<<12)
159 mcr p15, 0, r0, c1, c0, 0 // disable icache
V S Ramanjaneya Kumar Ta7fb1062013-01-04 22:36:29 +0530160 // make sure previous instruction finishes
161 isb
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700162
163.Ldone_disable:
164 // make sure the icache is always invalidated
165 mov r0, #0
166 mcr p15, 0, r0, c7, c5, 0 // invalidate icache to PoU
V S Ramanjaneya Kumar Ta7fb1062013-01-04 22:36:29 +0530167 // make sure that data is in sync
168 dsb
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700169
170 msr cpsr, r12
171 ldmfd sp!, {r4-r11, pc}
172
173/* void arch_enable_cache(uint flags) */
174FUNCTION(arch_enable_cache)
175 stmfd sp!, {r4-r11, lr}
176
177 mov r7, r0 // save flags
178
179 mrs r12, cpsr // save the old interrupt state
180 .word 0xf10c01c0 /* cpsid iaf */ // interrupts disabled
Ajay Dudani452f26f2010-07-02 13:42:43 -0700181
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700182.Ldcache_enable:
183 tst r7, #DCACHE
184 beq .Licache_enable
Ajay Dudani452f26f2010-07-02 13:42:43 -0700185 mrc p15, 0, r0, c1, c0, 0 // cr1
186 tst r0, #(1<<2) // is the dcache already enabled?
187 bne .Licache_enable
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700188
189 // invalidate L1 and L2
190 // NOTE: trashes a bunch of registers, can't be spilling stuff to the stack
191 bl invalidate_cache_v7
192
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700193 mrc p15, 0, r0, c1, c0, 0 // cr1
194 orr r0, #(1<<2)
195 mcr p15, 0, r0, c1, c0, 0 // enable dcache
196
197.Licache_enable:
198 tst r7, #ICACHE
199 beq .Ldone_enable
200
201 mov r0, #0
202 mcr p15, 0, r0, c7, c5, 0 // invalidate icache to PoU
203
204 mrc p15, 0, r0, c1, c0, 0 // cr1
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700205 orr r0, #(1<<12)
206 mcr p15, 0, r0, c1, c0, 0 // enable icache
207
208.Ldone_enable:
209 msr cpsr, r12
210 ldmfd sp!, {r4-r11, pc}
211
Channagoud Kadabibca52542014-01-30 14:35:39 -0800212// flush & invalidate cache routine
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700213flush_invalidate_cache_v7:
V S Ramanjaneya Kumar Ta7fb1062013-01-04 22:36:29 +0530214 DMB
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700215 /* from ARMv7 manual, B2-17 */
216 MRC p15, 1, R0, c0, c0, 1 // Read CLIDR
217 ANDS R3, R0, #0x7000000
218 MOV R3, R3, LSR #23 // Cache level value (naturally aligned)
219 BEQ .Lfinished
220 MOV R10, #0
221.Loop1:
222 ADD R2, R10, R10, LSR #1 // Work out 3xcachelevel
223 MOV R1, R0, LSR R2 // bottom 3 bits are the Cache type for this level
224 AND R1, R1, #7 // get those 3 bits alone
225 CMP R1, #2
226 BLT .Lskip // no cache or only instruction cache at this level
227 MCR p15, 2, R10, c0, c0, 0 // write the Cache Size selection register
Channagoud Kadabibca52542014-01-30 14:35:39 -0800228 ISB // ISB to sync the change to the CacheSizeID reg
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700229 MRC p15, 1, R1, c0, c0, 0 // reads current Cache Size ID register
230 AND R2, R1, #0x7 // extract the line length field
231 ADD R2, R2, #4 // add 4 for the line length offset (log2 16 bytes)
232 LDR R4, =0x3FF
233 ANDS R4, R4, R1, LSR #3 // R4 is the max number on the way size (right aligned)
Channagoud Kadabibca52542014-01-30 14:35:39 -0800234 CLZ R5, R4 // R5 is the bit position of the way size increment
235 MOV R9, R4 // R9 working copy of max way size (right aligned)
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700236.Loop2:
Channagoud Kadabibca52542014-01-30 14:35:39 -0800237 LDR R7, =0x00007FFF
238 ANDS R7, R7, R1, LSR #13 // R7 is the max number of the index size (right aligned)
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700239.Loop3:
240 ORR R11, R10, R9, LSL R5 // factor in the way number and cache number into R11
Channagoud Kadabibca52542014-01-30 14:35:39 -0800241 ORR R11, R11, R7, LSL R2 // factor in the index number
242 MCR p15, 0, R11, c7, c10, 2 // clean & invalidate by set/way
243 SUBS R7, R7, #1 // decrement the index
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700244 BGE .Loop3
Channagoud Kadabibca52542014-01-30 14:35:39 -0800245 SUBS R9, R9, #1 // decrement the index
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700246 BGE .Loop2
247.Lskip:
248 ADD R10, R10, #2 // increment the cache number
249 CMP R3, R10
250 BGT .Loop1
251
252.Lfinished:
253 mov r10, #0
254 mcr p15, 2, r10, c0, c0, 0 // select cache level 0
V S Ramanjaneya Kumar Ta7fb1062013-01-04 22:36:29 +0530255 dsb
Channagoud Kadabibca52542014-01-30 14:35:39 -0800256 ISB
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700257
258 bx lr
259
Channagoud Kadabibca52542014-01-30 14:35:39 -0800260// invalidate cache routine
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700261invalidate_cache_v7:
262 /* from ARMv7 manual, B2-17 */
263 MRC p15, 1, R0, c0, c0, 1 // Read CLIDR
264 ANDS R3, R0, #0x7000000
265 MOV R3, R3, LSR #23 // Cache level value (naturally aligned)
266 BEQ .Lfinished_invalidate
267 MOV R10, #0
268.Loop1_invalidate:
269 ADD R2, R10, R10, LSR #1 // Work out 3xcachelevel
270 MOV R1, R0, LSR R2 // bottom 3 bits are the Cache type for this level
271 AND R1, R1, #7 // get those 3 bits alone
272 CMP R1, #2
273 BLT .Lskip_invalidate // no cache or only instruction cache at this level
274 MCR p15, 2, R10, c0, c0, 0 // write the Cache Size selection register
Channagoud Kadabibca52542014-01-30 14:35:39 -0800275 ISB // ISB to sync the change to the CacheSizeID reg
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700276 MRC p15, 1, R1, c0, c0, 0 // reads current Cache Size ID register
277 AND R2, R1, #0x7 // extract the line length field
278 ADD R2, R2, #4 // add 4 for the line length offset (log2 16 bytes)
279 LDR R4, =0x3FF
280 ANDS R4, R4, R1, LSR #3 // R4 is the max number on the way size (right aligned)
281 CLZ R5, R4 // R5 is the bit position of the way size increment
Channagoud Kadabibca52542014-01-30 14:35:39 -0800282 MOV R9, R4 //R9 working copy of the max way size (right aligned)
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700283.Loop2_invalidate:
Channagoud Kadabibca52542014-01-30 14:35:39 -0800284 LDR R7, =0x00007FFF
285 ANDS R7, R7, R1, LSR #13 // R7 is the max number of the index size (right aligned)
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700286.Loop3_invalidate:
287 ORR R11, R10, R9, LSL R5 // factor in the way number and cache number into R11
Channagoud Kadabibca52542014-01-30 14:35:39 -0800288 ORR R11, R11, R7, LSL R2 // factor in the index number
Sridhar Parasuram85f08032015-05-14 16:16:14 -0700289 MCR p15, 0, R11, c7, c14, 2 // clean and invalidate by set/way
Channagoud Kadabibca52542014-01-30 14:35:39 -0800290 SUBS R7, R7, #1 // decrement the way number
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700291 BGE .Loop3_invalidate
Channagoud Kadabibca52542014-01-30 14:35:39 -0800292 SUBS R9, R9, #1 // decrement the index
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700293 BGE .Loop2_invalidate
294.Lskip_invalidate:
295 ADD R10, R10, #2 // increment the cache number
296 CMP R3, R10
297 BGT .Loop1_invalidate
298
299.Lfinished_invalidate:
300 mov r10, #0
301 mcr p15, 2, r10, c0, c0, 0 // select cache level 0
V S Ramanjaneya Kumar Ta7fb1062013-01-04 22:36:29 +0530302 dsb
Channagoud Kadabibca52542014-01-30 14:35:39 -0800303 ISB
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700304
305 bx lr
306
307#else
308#error unhandled cpu
309#endif
310
311#if ARM_CPU_ARM926 || ARM_CPU_ARM1136 || ARM_CPU_CORTEX_A8
312/* shared cache flush routines */
313
314 /* void arch_flush_cache_range(addr_t start, size_t len); */
315FUNCTION(arch_clean_cache_range)
Channagoud Kadabi10626e62014-10-14 13:41:35 -0700316 add r2, r0, r1 // Calculate the end address
317 bic r0,#(CACHE_LINE-1) // Align start with cache line
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -07003180:
319 mcr p15, 0, r0, c7, c10, 1 // clean cache to PoC by MVA
320 add r0, r0, #CACHE_LINE
Channagoud Kadabi10626e62014-10-14 13:41:35 -0700321 cmp r0, r2
322 blo 0b
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700323
324 mov r0, #0
Channagoud Kadabi30a8be12014-02-11 16:48:15 -0800325 dsb
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700326
327 bx lr
328
329 /* void arch_flush_invalidate_cache_range(addr_t start, size_t len); */
330FUNCTION(arch_clean_invalidate_cache_range)
Matthew Qinfed864d2015-02-13 10:49:04 +0800331 dsb
Channagoud Kadabi10626e62014-10-14 13:41:35 -0700332 add r2, r0, r1 // Calculate the end address
333 bic r0,#(CACHE_LINE-1) // Align start with cache line
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -07003340:
335 mcr p15, 0, r0, c7, c14, 1 // clean & invalidate cache to PoC by MVA
336 add r0, r0, #CACHE_LINE
Channagoud Kadabi10626e62014-10-14 13:41:35 -0700337 cmp r0, r2
338 blo 0b
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700339
340 mov r0, #0
Channagoud Kadabi30a8be12014-02-11 16:48:15 -0800341 dsb
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700342
343 bx lr
Travis Geiselbrecht8b99c682010-06-16 00:37:13 -0700344
Hanumant Singh5a9400c2012-12-06 21:54:25 -0800345 /* void arch_invalidate_cache_range(addr_t start, size_t len); */
346FUNCTION(arch_invalidate_cache_range)
Hanumant Singh5a9400c2012-12-06 21:54:25 -0800347 /* invalidate cache line */
Channagoud Kadabi10626e62014-10-14 13:41:35 -0700348 add r2, r0, r1 // Calculate the end address
349 bic r0,#(CACHE_LINE-1) // Align start with cache line
3500:
Hanumant Singh5a9400c2012-12-06 21:54:25 -0800351 mcr p15, 0, r0, c7, c6, 1
352 add r0, r0, #CACHE_LINE
Channagoud Kadabi10626e62014-10-14 13:41:35 -0700353 cmp r0, r2
354 blo 0b
Hanumant Singh5a9400c2012-12-06 21:54:25 -0800355 mov r0, #0
Channagoud Kadabi30a8be12014-02-11 16:48:15 -0800356 dsb
Hanumant Singh5a9400c2012-12-06 21:54:25 -0800357 bx lr
358
Travis Geiselbrecht8b99c682010-06-16 00:37:13 -0700359 /* void arch_sync_cache_range(addr_t start, size_t len); */
360FUNCTION(arch_sync_cache_range)
361 push { r14 }
362 bl arch_clean_cache_range
363
364 mov r0, #0
365 mcr p15, 0, r0, c7, c5, 0 // invalidate icache to PoU
366
367 pop { pc }
368
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700369#else
370#error unhandled cpu
371#endif
372
373#else
374
375/* no cache */
376
377FUNCTION(arch_disable_cache)
378 bx lr
379
380FUNCTION(arch_enable_cache)
381 bx lr
382
383FUNCTION(arch_clean_cache_range)
384 bx lr
385
386FUNCTION(arch_clean_invalidate_cache_range)
387 bx lr
388
Travis Geiselbrecht8b99c682010-06-16 00:37:13 -0700389FUNCTION(arch_sync_cache_range)
390 bx lr
391
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -0700392#endif // ARM_WITH_CACHE
393