Travis Geiselbrecht | 1d0df69 | 2008-09-01 02:26:09 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (c) 2008 Travis Geiselbrecht |
Sridhar Parasuram | 85f0803 | 2015-05-14 16:16:14 -0700 | [diff] [blame] | 3 | * Copyright (c) 2013-2015, The Linux Foundation. All rights reserved. |
Travis Geiselbrecht | 1d0df69 | 2008-09-01 02:26:09 -0700 | [diff] [blame] | 4 | * |
| 5 | * Permission is hereby granted, free of charge, to any person obtaining |
| 6 | * a copy of this software and associated documentation files |
| 7 | * (the "Software"), to deal in the Software without restriction, |
| 8 | * including without limitation the rights to use, copy, modify, merge, |
| 9 | * publish, distribute, sublicense, and/or sell copies of the Software, |
| 10 | * and to permit persons to whom the Software is furnished to do so, |
| 11 | * subject to the following conditions: |
| 12 | * |
| 13 | * The above copyright notice and this permission notice shall be |
| 14 | * included in all copies or substantial portions of the Software. |
| 15 | * |
| 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, |
| 17 | * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
| 18 | * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. |
| 19 | * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY |
| 20 | * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, |
| 21 | * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE |
| 22 | * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. |
| 23 | */ |
| 24 | #include <asm.h> |
| 25 | #include <arch/ops.h> |
| 26 | #include <arch/defines.h> |
| 27 | |
| 28 | .text |
| 29 | |
| 30 | #if ARM_WITH_CACHE |
| 31 | |
| 32 | /* low level cache routines for various cpu families */ |
| 33 | |
| 34 | #if ARM_CPU_ARM1136 || ARM_CPU_ARM926 |
| 35 | |
| 36 | /* void arch_disable_cache(uint flags) */ |
| 37 | FUNCTION(arch_disable_cache) |
| 38 | mov r12, #0 // zero register |
| 39 | mrs r3, cpsr // save the old interrupt state |
| 40 | #if ARM_ISA_ARMv6 |
| 41 | .word 0xf10c01c0 /* cpsid iaf */ // interrupts disabled |
| 42 | #else |
| 43 | orr r3, r3, #(1<<7) |
| 44 | msr cpsr, r3 |
| 45 | #endif |
| 46 | |
| 47 | .Ldcache_disable: |
| 48 | tst r0, #DCACHE |
| 49 | beq .Licache_disable |
| 50 | mrc p15, 0, r1, c1, c0, 0 // cr1 |
| 51 | tst r1, #(1<<2) // is the dcache already disabled? |
| 52 | beq .Licache_disable |
| 53 | |
| 54 | bic r1, #(1<<2) |
| 55 | mcr p15, 0, r1, c1, c0, 0 // disable dcache |
| 56 | |
| 57 | #if ARM_CPU_ARM1136 |
| 58 | mcr p15, 0, r12, c7, c14, 0 // clean & invalidate dcache |
| 59 | #elif ARM_CPU_ARM926 |
| 60 | 0: |
| 61 | mrc p15, 0, r15, c7, c14, 3 // clean & invalidate dcache |
| 62 | bne 0b |
| 63 | #else |
| 64 | #error whut? |
| 65 | #endif |
| 66 | mcr p15, 0, r0, c7, c10, 4 // data sync barrier (formerly drain write buffer) |
| 67 | |
| 68 | .Licache_disable: |
| 69 | tst r0, #ICACHE |
| 70 | beq .Ldone_disable |
| 71 | |
| 72 | mrc p15, 0, r1, c1, c0, 0 // cr1 |
| 73 | bic r1, #(1<<12) |
| 74 | mcr p15, 0, r1, c1, c0, 0 // disable icache |
| 75 | |
| 76 | mcr p15, 0, r12, c7, c5, 0 // invalidate icache |
| 77 | |
| 78 | .Ldone_disable: |
| 79 | msr cpsr, r3 |
| 80 | bx lr |
| 81 | |
| 82 | /* void arch_enable_cache(uint flags) */ |
| 83 | FUNCTION(arch_enable_cache) |
| 84 | mov r12, #0 // zero register |
| 85 | mrs r3, cpsr // save the old interrupt state |
| 86 | #if ARM_ISA_ARMv6 |
| 87 | .word 0xf10c01c0 /* cpsid iaf */ // interrupts disabled |
| 88 | #else |
| 89 | orr r3, r3, #(1<<7) |
| 90 | msr cpsr, r3 |
| 91 | #endif |
| 92 | |
| 93 | .Ldcache_enable: |
| 94 | tst r0, #DCACHE |
| 95 | beq .Licache_enable |
| 96 | mrc p15, 0, r1, c1, c0, 0 // cr1 |
| 97 | tst r1, #(1<<2) // is the dcache already enabled? |
| 98 | bne .Licache_enable |
| 99 | |
| 100 | mcr p15, 0, r12, c7, c6, 0 // invalidate dcache |
| 101 | |
| 102 | orr r1, #(1<<2) |
| 103 | mcr p15, 0, r1, c1, c0, 0 // enable dcache |
| 104 | |
| 105 | .Licache_enable: |
| 106 | tst r0, #ICACHE |
| 107 | beq .Ldone_enable |
| 108 | |
| 109 | mcr p15, 0, r12, c7, c5, 0 // invalidate icache |
| 110 | |
| 111 | mrc p15, 0, r1, c1, c0, 0 // cr1 |
| 112 | orr r1, #(1<<12) |
| 113 | mcr p15, 0, r1, c1, c0, 0 // enable icache |
| 114 | |
| 115 | .Ldone_enable: |
| 116 | msr cpsr, r3 |
| 117 | bx lr |
| 118 | |
| 119 | #elif ARM_CPU_CORTEX_A8 |
| 120 | |
| 121 | /* void arch_disable_cache(uint flags) */ |
| 122 | FUNCTION(arch_disable_cache) |
| 123 | stmfd sp!, {r4-r11, lr} |
| 124 | |
| 125 | mov r7, r0 // save flags |
| 126 | |
| 127 | mrs r12, cpsr // save the old interrupt state |
Channagoud Kadabi | bca5254 | 2014-01-30 14:35:39 -0800 | [diff] [blame] | 128 | cpsid iaf // interrupts disabled |
Travis Geiselbrecht | 1d0df69 | 2008-09-01 02:26:09 -0700 | [diff] [blame] | 129 | |
| 130 | .Ldcache_disable: |
| 131 | tst r7, #DCACHE |
| 132 | beq .Licache_disable |
| 133 | mrc p15, 0, r0, c1, c0, 0 // cr1 |
Ajay Dudani | 452f26f | 2010-07-02 13:42:43 -0700 | [diff] [blame] | 134 | tst r0, #(1<<2) // is the dcache already disabled? |
| 135 | beq .Ldcache_already_disabled |
Travis Geiselbrecht | 1d0df69 | 2008-09-01 02:26:09 -0700 | [diff] [blame] | 136 | |
| 137 | bic r0, #(1<<2) |
V S Ramanjaneya Kumar T | a7fb106 | 2013-01-04 22:36:29 +0530 | [diff] [blame] | 138 | // make sure all data operations are completed |
| 139 | dsb |
Travis Geiselbrecht | 1d0df69 | 2008-09-01 02:26:09 -0700 | [diff] [blame] | 140 | mcr p15, 0, r0, c1, c0, 0 // disable dcache |
V S Ramanjaneya Kumar T | a7fb106 | 2013-01-04 22:36:29 +0530 | [diff] [blame] | 141 | // make sure previous instruction finishes before we clean and flush |
| 142 | isb |
Travis Geiselbrecht | 1d0df69 | 2008-09-01 02:26:09 -0700 | [diff] [blame] | 143 | |
| 144 | // flush and invalidate the dcache |
| 145 | // NOTE: trashes a bunch of registers, can't be spilling stuff to the stack |
| 146 | bl flush_invalidate_cache_v7 |
| 147 | |
Ajay Dudani | 452f26f | 2010-07-02 13:42:43 -0700 | [diff] [blame] | 148 | .Ldcache_already_disabled: |
| 149 | // make sure all of the caches are invalidated |
| 150 | // NOTE: trashes a bunch of registers, can't be spilling stuff to the stack |
| 151 | bl invalidate_cache_v7 |
| 152 | |
Travis Geiselbrecht | 1d0df69 | 2008-09-01 02:26:09 -0700 | [diff] [blame] | 153 | .Licache_disable: |
| 154 | tst r7, #ICACHE |
| 155 | beq .Ldone_disable |
| 156 | |
| 157 | mrc p15, 0, r0, c1, c0, 0 // cr1 |
| 158 | bic r0, #(1<<12) |
| 159 | mcr p15, 0, r0, c1, c0, 0 // disable icache |
V S Ramanjaneya Kumar T | a7fb106 | 2013-01-04 22:36:29 +0530 | [diff] [blame] | 160 | // make sure previous instruction finishes |
| 161 | isb |
Travis Geiselbrecht | 1d0df69 | 2008-09-01 02:26:09 -0700 | [diff] [blame] | 162 | |
| 163 | .Ldone_disable: |
| 164 | // make sure the icache is always invalidated |
| 165 | mov r0, #0 |
| 166 | mcr p15, 0, r0, c7, c5, 0 // invalidate icache to PoU |
V S Ramanjaneya Kumar T | a7fb106 | 2013-01-04 22:36:29 +0530 | [diff] [blame] | 167 | // make sure that data is in sync |
| 168 | dsb |
Travis Geiselbrecht | 1d0df69 | 2008-09-01 02:26:09 -0700 | [diff] [blame] | 169 | |
| 170 | msr cpsr, r12 |
| 171 | ldmfd sp!, {r4-r11, pc} |
| 172 | |
| 173 | /* void arch_enable_cache(uint flags) */ |
| 174 | FUNCTION(arch_enable_cache) |
| 175 | stmfd sp!, {r4-r11, lr} |
| 176 | |
| 177 | mov r7, r0 // save flags |
| 178 | |
| 179 | mrs r12, cpsr // save the old interrupt state |
| 180 | .word 0xf10c01c0 /* cpsid iaf */ // interrupts disabled |
Ajay Dudani | 452f26f | 2010-07-02 13:42:43 -0700 | [diff] [blame] | 181 | |
Travis Geiselbrecht | 1d0df69 | 2008-09-01 02:26:09 -0700 | [diff] [blame] | 182 | .Ldcache_enable: |
| 183 | tst r7, #DCACHE |
| 184 | beq .Licache_enable |
Ajay Dudani | 452f26f | 2010-07-02 13:42:43 -0700 | [diff] [blame] | 185 | mrc p15, 0, r0, c1, c0, 0 // cr1 |
| 186 | tst r0, #(1<<2) // is the dcache already enabled? |
| 187 | bne .Licache_enable |
Travis Geiselbrecht | 1d0df69 | 2008-09-01 02:26:09 -0700 | [diff] [blame] | 188 | |
| 189 | // invalidate L1 and L2 |
| 190 | // NOTE: trashes a bunch of registers, can't be spilling stuff to the stack |
| 191 | bl invalidate_cache_v7 |
| 192 | |
Travis Geiselbrecht | 1d0df69 | 2008-09-01 02:26:09 -0700 | [diff] [blame] | 193 | mrc p15, 0, r0, c1, c0, 0 // cr1 |
| 194 | orr r0, #(1<<2) |
| 195 | mcr p15, 0, r0, c1, c0, 0 // enable dcache |
| 196 | |
| 197 | .Licache_enable: |
| 198 | tst r7, #ICACHE |
| 199 | beq .Ldone_enable |
| 200 | |
| 201 | mov r0, #0 |
| 202 | mcr p15, 0, r0, c7, c5, 0 // invalidate icache to PoU |
| 203 | |
| 204 | mrc p15, 0, r0, c1, c0, 0 // cr1 |
Travis Geiselbrecht | 1d0df69 | 2008-09-01 02:26:09 -0700 | [diff] [blame] | 205 | orr r0, #(1<<12) |
| 206 | mcr p15, 0, r0, c1, c0, 0 // enable icache |
| 207 | |
| 208 | .Ldone_enable: |
| 209 | msr cpsr, r12 |
| 210 | ldmfd sp!, {r4-r11, pc} |
| 211 | |
Channagoud Kadabi | bca5254 | 2014-01-30 14:35:39 -0800 | [diff] [blame] | 212 | // flush & invalidate cache routine |
Travis Geiselbrecht | 1d0df69 | 2008-09-01 02:26:09 -0700 | [diff] [blame] | 213 | flush_invalidate_cache_v7: |
V S Ramanjaneya Kumar T | a7fb106 | 2013-01-04 22:36:29 +0530 | [diff] [blame] | 214 | DMB |
Travis Geiselbrecht | 1d0df69 | 2008-09-01 02:26:09 -0700 | [diff] [blame] | 215 | /* from ARMv7 manual, B2-17 */ |
| 216 | MRC p15, 1, R0, c0, c0, 1 // Read CLIDR |
| 217 | ANDS R3, R0, #0x7000000 |
| 218 | MOV R3, R3, LSR #23 // Cache level value (naturally aligned) |
| 219 | BEQ .Lfinished |
| 220 | MOV R10, #0 |
| 221 | .Loop1: |
| 222 | ADD R2, R10, R10, LSR #1 // Work out 3xcachelevel |
| 223 | MOV R1, R0, LSR R2 // bottom 3 bits are the Cache type for this level |
| 224 | AND R1, R1, #7 // get those 3 bits alone |
| 225 | CMP R1, #2 |
| 226 | BLT .Lskip // no cache or only instruction cache at this level |
| 227 | MCR p15, 2, R10, c0, c0, 0 // write the Cache Size selection register |
Channagoud Kadabi | bca5254 | 2014-01-30 14:35:39 -0800 | [diff] [blame] | 228 | ISB // ISB to sync the change to the CacheSizeID reg |
Travis Geiselbrecht | 1d0df69 | 2008-09-01 02:26:09 -0700 | [diff] [blame] | 229 | MRC p15, 1, R1, c0, c0, 0 // reads current Cache Size ID register |
| 230 | AND R2, R1, #0x7 // extract the line length field |
| 231 | ADD R2, R2, #4 // add 4 for the line length offset (log2 16 bytes) |
| 232 | LDR R4, =0x3FF |
| 233 | ANDS R4, R4, R1, LSR #3 // R4 is the max number on the way size (right aligned) |
Channagoud Kadabi | bca5254 | 2014-01-30 14:35:39 -0800 | [diff] [blame] | 234 | CLZ R5, R4 // R5 is the bit position of the way size increment |
| 235 | MOV R9, R4 // R9 working copy of max way size (right aligned) |
Travis Geiselbrecht | 1d0df69 | 2008-09-01 02:26:09 -0700 | [diff] [blame] | 236 | .Loop2: |
Channagoud Kadabi | bca5254 | 2014-01-30 14:35:39 -0800 | [diff] [blame] | 237 | LDR R7, =0x00007FFF |
| 238 | ANDS R7, R7, R1, LSR #13 // R7 is the max number of the index size (right aligned) |
Travis Geiselbrecht | 1d0df69 | 2008-09-01 02:26:09 -0700 | [diff] [blame] | 239 | .Loop3: |
| 240 | ORR R11, R10, R9, LSL R5 // factor in the way number and cache number into R11 |
Channagoud Kadabi | bca5254 | 2014-01-30 14:35:39 -0800 | [diff] [blame] | 241 | ORR R11, R11, R7, LSL R2 // factor in the index number |
| 242 | MCR p15, 0, R11, c7, c10, 2 // clean & invalidate by set/way |
| 243 | SUBS R7, R7, #1 // decrement the index |
Travis Geiselbrecht | 1d0df69 | 2008-09-01 02:26:09 -0700 | [diff] [blame] | 244 | BGE .Loop3 |
Channagoud Kadabi | bca5254 | 2014-01-30 14:35:39 -0800 | [diff] [blame] | 245 | SUBS R9, R9, #1 // decrement the index |
Travis Geiselbrecht | 1d0df69 | 2008-09-01 02:26:09 -0700 | [diff] [blame] | 246 | BGE .Loop2 |
| 247 | .Lskip: |
| 248 | ADD R10, R10, #2 // increment the cache number |
| 249 | CMP R3, R10 |
| 250 | BGT .Loop1 |
| 251 | |
| 252 | .Lfinished: |
| 253 | mov r10, #0 |
| 254 | mcr p15, 2, r10, c0, c0, 0 // select cache level 0 |
V S Ramanjaneya Kumar T | a7fb106 | 2013-01-04 22:36:29 +0530 | [diff] [blame] | 255 | dsb |
Channagoud Kadabi | bca5254 | 2014-01-30 14:35:39 -0800 | [diff] [blame] | 256 | ISB |
Travis Geiselbrecht | 1d0df69 | 2008-09-01 02:26:09 -0700 | [diff] [blame] | 257 | |
| 258 | bx lr |
| 259 | |
Channagoud Kadabi | bca5254 | 2014-01-30 14:35:39 -0800 | [diff] [blame] | 260 | // invalidate cache routine |
Travis Geiselbrecht | 1d0df69 | 2008-09-01 02:26:09 -0700 | [diff] [blame] | 261 | invalidate_cache_v7: |
| 262 | /* from ARMv7 manual, B2-17 */ |
| 263 | MRC p15, 1, R0, c0, c0, 1 // Read CLIDR |
| 264 | ANDS R3, R0, #0x7000000 |
| 265 | MOV R3, R3, LSR #23 // Cache level value (naturally aligned) |
| 266 | BEQ .Lfinished_invalidate |
| 267 | MOV R10, #0 |
| 268 | .Loop1_invalidate: |
| 269 | ADD R2, R10, R10, LSR #1 // Work out 3xcachelevel |
| 270 | MOV R1, R0, LSR R2 // bottom 3 bits are the Cache type for this level |
| 271 | AND R1, R1, #7 // get those 3 bits alone |
| 272 | CMP R1, #2 |
| 273 | BLT .Lskip_invalidate // no cache or only instruction cache at this level |
| 274 | MCR p15, 2, R10, c0, c0, 0 // write the Cache Size selection register |
Channagoud Kadabi | bca5254 | 2014-01-30 14:35:39 -0800 | [diff] [blame] | 275 | ISB // ISB to sync the change to the CacheSizeID reg |
Travis Geiselbrecht | 1d0df69 | 2008-09-01 02:26:09 -0700 | [diff] [blame] | 276 | MRC p15, 1, R1, c0, c0, 0 // reads current Cache Size ID register |
| 277 | AND R2, R1, #0x7 // extract the line length field |
| 278 | ADD R2, R2, #4 // add 4 for the line length offset (log2 16 bytes) |
| 279 | LDR R4, =0x3FF |
| 280 | ANDS R4, R4, R1, LSR #3 // R4 is the max number on the way size (right aligned) |
| 281 | CLZ R5, R4 // R5 is the bit position of the way size increment |
Channagoud Kadabi | bca5254 | 2014-01-30 14:35:39 -0800 | [diff] [blame] | 282 | MOV R9, R4 //R9 working copy of the max way size (right aligned) |
Travis Geiselbrecht | 1d0df69 | 2008-09-01 02:26:09 -0700 | [diff] [blame] | 283 | .Loop2_invalidate: |
Channagoud Kadabi | bca5254 | 2014-01-30 14:35:39 -0800 | [diff] [blame] | 284 | LDR R7, =0x00007FFF |
| 285 | ANDS R7, R7, R1, LSR #13 // R7 is the max number of the index size (right aligned) |
Travis Geiselbrecht | 1d0df69 | 2008-09-01 02:26:09 -0700 | [diff] [blame] | 286 | .Loop3_invalidate: |
| 287 | ORR R11, R10, R9, LSL R5 // factor in the way number and cache number into R11 |
Channagoud Kadabi | bca5254 | 2014-01-30 14:35:39 -0800 | [diff] [blame] | 288 | ORR R11, R11, R7, LSL R2 // factor in the index number |
Sridhar Parasuram | 85f0803 | 2015-05-14 16:16:14 -0700 | [diff] [blame] | 289 | MCR p15, 0, R11, c7, c14, 2 // clean and invalidate by set/way |
Channagoud Kadabi | bca5254 | 2014-01-30 14:35:39 -0800 | [diff] [blame] | 290 | SUBS R7, R7, #1 // decrement the way number |
Travis Geiselbrecht | 1d0df69 | 2008-09-01 02:26:09 -0700 | [diff] [blame] | 291 | BGE .Loop3_invalidate |
Channagoud Kadabi | bca5254 | 2014-01-30 14:35:39 -0800 | [diff] [blame] | 292 | SUBS R9, R9, #1 // decrement the index |
Travis Geiselbrecht | 1d0df69 | 2008-09-01 02:26:09 -0700 | [diff] [blame] | 293 | BGE .Loop2_invalidate |
| 294 | .Lskip_invalidate: |
| 295 | ADD R10, R10, #2 // increment the cache number |
| 296 | CMP R3, R10 |
| 297 | BGT .Loop1_invalidate |
| 298 | |
| 299 | .Lfinished_invalidate: |
| 300 | mov r10, #0 |
| 301 | mcr p15, 2, r10, c0, c0, 0 // select cache level 0 |
V S Ramanjaneya Kumar T | a7fb106 | 2013-01-04 22:36:29 +0530 | [diff] [blame] | 302 | dsb |
Channagoud Kadabi | bca5254 | 2014-01-30 14:35:39 -0800 | [diff] [blame] | 303 | ISB |
Travis Geiselbrecht | 1d0df69 | 2008-09-01 02:26:09 -0700 | [diff] [blame] | 304 | |
| 305 | bx lr |
| 306 | |
| 307 | #else |
| 308 | #error unhandled cpu |
| 309 | #endif |
| 310 | |
| 311 | #if ARM_CPU_ARM926 || ARM_CPU_ARM1136 || ARM_CPU_CORTEX_A8 |
| 312 | /* shared cache flush routines */ |
| 313 | |
| 314 | /* void arch_flush_cache_range(addr_t start, size_t len); */ |
| 315 | FUNCTION(arch_clean_cache_range) |
Channagoud Kadabi | 10626e6 | 2014-10-14 13:41:35 -0700 | [diff] [blame] | 316 | add r2, r0, r1 // Calculate the end address |
| 317 | bic r0,#(CACHE_LINE-1) // Align start with cache line |
Travis Geiselbrecht | 1d0df69 | 2008-09-01 02:26:09 -0700 | [diff] [blame] | 318 | 0: |
| 319 | mcr p15, 0, r0, c7, c10, 1 // clean cache to PoC by MVA |
| 320 | add r0, r0, #CACHE_LINE |
Channagoud Kadabi | 10626e6 | 2014-10-14 13:41:35 -0700 | [diff] [blame] | 321 | cmp r0, r2 |
| 322 | blo 0b |
Travis Geiselbrecht | 1d0df69 | 2008-09-01 02:26:09 -0700 | [diff] [blame] | 323 | |
| 324 | mov r0, #0 |
Channagoud Kadabi | 30a8be1 | 2014-02-11 16:48:15 -0800 | [diff] [blame] | 325 | dsb |
Travis Geiselbrecht | 1d0df69 | 2008-09-01 02:26:09 -0700 | [diff] [blame] | 326 | |
| 327 | bx lr |
| 328 | |
| 329 | /* void arch_flush_invalidate_cache_range(addr_t start, size_t len); */ |
| 330 | FUNCTION(arch_clean_invalidate_cache_range) |
Matthew Qin | fed864d | 2015-02-13 10:49:04 +0800 | [diff] [blame] | 331 | dsb |
Channagoud Kadabi | 10626e6 | 2014-10-14 13:41:35 -0700 | [diff] [blame] | 332 | add r2, r0, r1 // Calculate the end address |
| 333 | bic r0,#(CACHE_LINE-1) // Align start with cache line |
Travis Geiselbrecht | 1d0df69 | 2008-09-01 02:26:09 -0700 | [diff] [blame] | 334 | 0: |
| 335 | mcr p15, 0, r0, c7, c14, 1 // clean & invalidate cache to PoC by MVA |
| 336 | add r0, r0, #CACHE_LINE |
Channagoud Kadabi | 10626e6 | 2014-10-14 13:41:35 -0700 | [diff] [blame] | 337 | cmp r0, r2 |
| 338 | blo 0b |
Travis Geiselbrecht | 1d0df69 | 2008-09-01 02:26:09 -0700 | [diff] [blame] | 339 | |
| 340 | mov r0, #0 |
Channagoud Kadabi | 30a8be1 | 2014-02-11 16:48:15 -0800 | [diff] [blame] | 341 | dsb |
Travis Geiselbrecht | 1d0df69 | 2008-09-01 02:26:09 -0700 | [diff] [blame] | 342 | |
| 343 | bx lr |
Travis Geiselbrecht | 8b99c68 | 2010-06-16 00:37:13 -0700 | [diff] [blame] | 344 | |
Hanumant Singh | 5a9400c | 2012-12-06 21:54:25 -0800 | [diff] [blame] | 345 | /* void arch_invalidate_cache_range(addr_t start, size_t len); */ |
| 346 | FUNCTION(arch_invalidate_cache_range) |
Hanumant Singh | 5a9400c | 2012-12-06 21:54:25 -0800 | [diff] [blame] | 347 | /* invalidate cache line */ |
Channagoud Kadabi | 10626e6 | 2014-10-14 13:41:35 -0700 | [diff] [blame] | 348 | add r2, r0, r1 // Calculate the end address |
| 349 | bic r0,#(CACHE_LINE-1) // Align start with cache line |
| 350 | 0: |
Hanumant Singh | 5a9400c | 2012-12-06 21:54:25 -0800 | [diff] [blame] | 351 | mcr p15, 0, r0, c7, c6, 1 |
| 352 | add r0, r0, #CACHE_LINE |
Channagoud Kadabi | 10626e6 | 2014-10-14 13:41:35 -0700 | [diff] [blame] | 353 | cmp r0, r2 |
| 354 | blo 0b |
Hanumant Singh | 5a9400c | 2012-12-06 21:54:25 -0800 | [diff] [blame] | 355 | mov r0, #0 |
Channagoud Kadabi | 30a8be1 | 2014-02-11 16:48:15 -0800 | [diff] [blame] | 356 | dsb |
Hanumant Singh | 5a9400c | 2012-12-06 21:54:25 -0800 | [diff] [blame] | 357 | bx lr |
| 358 | |
Travis Geiselbrecht | 8b99c68 | 2010-06-16 00:37:13 -0700 | [diff] [blame] | 359 | /* void arch_sync_cache_range(addr_t start, size_t len); */ |
| 360 | FUNCTION(arch_sync_cache_range) |
| 361 | push { r14 } |
| 362 | bl arch_clean_cache_range |
| 363 | |
| 364 | mov r0, #0 |
| 365 | mcr p15, 0, r0, c7, c5, 0 // invalidate icache to PoU |
| 366 | |
| 367 | pop { pc } |
| 368 | |
Travis Geiselbrecht | 1d0df69 | 2008-09-01 02:26:09 -0700 | [diff] [blame] | 369 | #else |
| 370 | #error unhandled cpu |
| 371 | #endif |
| 372 | |
| 373 | #else |
| 374 | |
| 375 | /* no cache */ |
| 376 | |
| 377 | FUNCTION(arch_disable_cache) |
| 378 | bx lr |
| 379 | |
| 380 | FUNCTION(arch_enable_cache) |
| 381 | bx lr |
| 382 | |
| 383 | FUNCTION(arch_clean_cache_range) |
| 384 | bx lr |
| 385 | |
| 386 | FUNCTION(arch_clean_invalidate_cache_range) |
| 387 | bx lr |
| 388 | |
Travis Geiselbrecht | 8b99c68 | 2010-06-16 00:37:13 -0700 | [diff] [blame] | 389 | FUNCTION(arch_sync_cache_range) |
| 390 | bx lr |
| 391 | |
Travis Geiselbrecht | 1d0df69 | 2008-09-01 02:26:09 -0700 | [diff] [blame] | 392 | #endif // ARM_WITH_CACHE |
| 393 | |