Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 1 | /* |
| 2 | * linux/arch/arm/mach-omap2/sleep.S |
| 3 | * |
| 4 | * (C) Copyright 2007 |
| 5 | * Texas Instruments |
| 6 | * Karthik Dasu <karthik-dp@ti.com> |
| 7 | * |
| 8 | * (C) Copyright 2004 |
| 9 | * Texas Instruments, <www.ti.com> |
| 10 | * Richard Woodruff <r-woodruff2@ti.com> |
| 11 | * |
| 12 | * This program is free software; you can redistribute it and/or |
| 13 | * modify it under the terms of the GNU General Public License as |
| 14 | * published by the Free Software Foundation; either version 2 of |
| 15 | * the License, or (at your option) any later version. |
| 16 | * |
| 17 | * This program is distributed in the hope that it will be useful, |
| 18 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
| 19 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR /PURPOSE. See the |
| 20 | * GNU General Public License for more details. |
| 21 | * |
| 22 | * You should have received a copy of the GNU General Public License |
| 23 | * along with this program; if not, write to the Free Software |
| 24 | * Foundation, Inc., 59 Temple Place, Suite 330, Boston, |
| 25 | * MA 02111-1307 USA |
| 26 | */ |
| 27 | #include <linux/linkage.h> |
| 28 | #include <asm/assembler.h> |
| 29 | #include <mach/io.h> |
Tony Lindgren | ce491cf | 2009-10-20 09:40:47 -0700 | [diff] [blame] | 30 | #include <plat/control.h> |
Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 31 | |
Peter 'p2' De Schrijver | 89139dc | 2009-01-16 18:53:48 +0200 | [diff] [blame] | 32 | #include "cm.h" |
Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 33 | #include "prm.h" |
| 34 | #include "sdrc.h" |
| 35 | |
Rajendra Nayak | a89b6f0 | 2009-05-28 18:13:06 +0530 | [diff] [blame^] | 36 | #define SDRC_SCRATCHPAD_SEM_V 0xfa00291c |
| 37 | |
Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 38 | #define PM_PREPWSTST_CORE_V OMAP34XX_PRM_REGADDR(CORE_MOD, \ |
| 39 | OMAP3430_PM_PREPWSTST) |
Tero Kristo | 0795a75 | 2008-10-13 17:58:50 +0300 | [diff] [blame] | 40 | #define PM_PREPWSTST_CORE_P 0x48306AE8 |
Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 41 | #define PM_PREPWSTST_MPU_V OMAP34XX_PRM_REGADDR(MPU_MOD, \ |
| 42 | OMAP3430_PM_PREPWSTST) |
Abhijit Pagare | 3790300 | 2010-01-26 20:12:51 -0700 | [diff] [blame] | 43 | #define PM_PWSTCTRL_MPU_P OMAP3430_PRM_BASE + MPU_MOD + OMAP2_PM_PWSTCTRL |
Peter 'p2' De Schrijver | 89139dc | 2009-01-16 18:53:48 +0200 | [diff] [blame] | 44 | #define CM_IDLEST1_CORE_V OMAP34XX_CM_REGADDR(CORE_MOD, CM_IDLEST1) |
Tero Kristo | 27d59a4 | 2008-10-13 13:15:00 +0300 | [diff] [blame] | 45 | #define SRAM_BASE_P 0x40200000 |
| 46 | #define CONTROL_STAT 0x480022F0 |
Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 47 | #define SCRATCHPAD_MEM_OFFS 0x310 /* Move this as correct place is |
| 48 | * available */ |
Rajendra Nayak | 61255ab | 2008-09-26 17:49:56 +0530 | [diff] [blame] | 49 | #define SCRATCHPAD_BASE_P (OMAP343X_CTRL_BASE + OMAP343X_CONTROL_MEM_WKUP\ |
| 50 | + SCRATCHPAD_MEM_OFFS) |
Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 51 | #define SDRC_POWER_V OMAP34XX_SDRC_REGADDR(SDRC_POWER) |
Tero Kristo | 0795a75 | 2008-10-13 17:58:50 +0300 | [diff] [blame] | 52 | #define SDRC_SYSCONFIG_P (OMAP343X_SDRC_BASE + SDRC_SYSCONFIG) |
| 53 | #define SDRC_MR_0_P (OMAP343X_SDRC_BASE + SDRC_MR_0) |
| 54 | #define SDRC_EMR2_0_P (OMAP343X_SDRC_BASE + SDRC_EMR2_0) |
| 55 | #define SDRC_MANUAL_0_P (OMAP343X_SDRC_BASE + SDRC_MANUAL_0) |
| 56 | #define SDRC_MR_1_P (OMAP343X_SDRC_BASE + SDRC_MR_1) |
| 57 | #define SDRC_EMR2_1_P (OMAP343X_SDRC_BASE + SDRC_EMR2_1) |
| 58 | #define SDRC_MANUAL_1_P (OMAP343X_SDRC_BASE + SDRC_MANUAL_1) |
Peter 'p2' De Schrijver | 89139dc | 2009-01-16 18:53:48 +0200 | [diff] [blame] | 59 | #define SDRC_DLLA_STATUS_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_STATUS) |
| 60 | #define SDRC_DLLA_CTRL_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_CTRL) |
Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 61 | |
Rajendra Nayak | a89b6f0 | 2009-05-28 18:13:06 +0530 | [diff] [blame^] | 62 | .text |
| 63 | /* Function to aquire the semaphore in scratchpad */ |
| 64 | ENTRY(lock_scratchpad_sem) |
| 65 | stmfd sp!, {lr} @ save registers on stack |
| 66 | wait_sem: |
| 67 | mov r0,#1 |
| 68 | ldr r1, sdrc_scratchpad_sem |
| 69 | wait_loop: |
| 70 | ldr r2, [r1] @ load the lock value |
| 71 | cmp r2, r0 @ is the lock free ? |
| 72 | beq wait_loop @ not free... |
| 73 | swp r2, r0, [r1] @ semaphore free so lock it and proceed |
| 74 | cmp r2, r0 @ did we succeed ? |
| 75 | beq wait_sem @ no - try again |
| 76 | ldmfd sp!, {pc} @ restore regs and return |
| 77 | sdrc_scratchpad_sem: |
| 78 | .word SDRC_SCRATCHPAD_SEM_V |
| 79 | ENTRY(lock_scratchpad_sem_sz) |
| 80 | .word . - lock_scratchpad_sem |
| 81 | |
| 82 | .text |
| 83 | /* Function to release the scratchpad semaphore */ |
| 84 | ENTRY(unlock_scratchpad_sem) |
| 85 | stmfd sp!, {lr} @ save registers on stack |
| 86 | ldr r3, sdrc_scratchpad_sem |
| 87 | mov r2,#0 |
| 88 | str r2,[r3] |
| 89 | ldmfd sp!, {pc} @ restore regs and return |
| 90 | ENTRY(unlock_scratchpad_sem_sz) |
| 91 | .word . - unlock_scratchpad_sem |
| 92 | |
Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 93 | .text |
| 94 | /* Function call to get the restore pointer for resume from OFF */ |
| 95 | ENTRY(get_restore_pointer) |
| 96 | stmfd sp!, {lr} @ save registers on stack |
| 97 | adr r0, restore |
| 98 | ldmfd sp!, {pc} @ restore regs and return |
| 99 | ENTRY(get_restore_pointer_sz) |
Tero Kristo | 0795a75 | 2008-10-13 17:58:50 +0300 | [diff] [blame] | 100 | .word . - get_restore_pointer |
| 101 | |
| 102 | .text |
| 103 | /* Function call to get the restore pointer for for ES3 to resume from OFF */ |
| 104 | ENTRY(get_es3_restore_pointer) |
| 105 | stmfd sp!, {lr} @ save registers on stack |
| 106 | adr r0, restore_es3 |
| 107 | ldmfd sp!, {pc} @ restore regs and return |
| 108 | ENTRY(get_es3_restore_pointer_sz) |
| 109 | .word . - get_es3_restore_pointer |
| 110 | |
| 111 | ENTRY(es3_sdrc_fix) |
| 112 | ldr r4, sdrc_syscfg @ get config addr |
| 113 | ldr r5, [r4] @ get value |
| 114 | tst r5, #0x100 @ is part access blocked |
| 115 | it eq |
| 116 | biceq r5, r5, #0x100 @ clear bit if set |
| 117 | str r5, [r4] @ write back change |
| 118 | ldr r4, sdrc_mr_0 @ get config addr |
| 119 | ldr r5, [r4] @ get value |
| 120 | str r5, [r4] @ write back change |
| 121 | ldr r4, sdrc_emr2_0 @ get config addr |
| 122 | ldr r5, [r4] @ get value |
| 123 | str r5, [r4] @ write back change |
| 124 | ldr r4, sdrc_manual_0 @ get config addr |
| 125 | mov r5, #0x2 @ autorefresh command |
| 126 | str r5, [r4] @ kick off refreshes |
| 127 | ldr r4, sdrc_mr_1 @ get config addr |
| 128 | ldr r5, [r4] @ get value |
| 129 | str r5, [r4] @ write back change |
| 130 | ldr r4, sdrc_emr2_1 @ get config addr |
| 131 | ldr r5, [r4] @ get value |
| 132 | str r5, [r4] @ write back change |
| 133 | ldr r4, sdrc_manual_1 @ get config addr |
| 134 | mov r5, #0x2 @ autorefresh command |
| 135 | str r5, [r4] @ kick off refreshes |
| 136 | bx lr |
| 137 | sdrc_syscfg: |
| 138 | .word SDRC_SYSCONFIG_P |
| 139 | sdrc_mr_0: |
| 140 | .word SDRC_MR_0_P |
| 141 | sdrc_emr2_0: |
| 142 | .word SDRC_EMR2_0_P |
| 143 | sdrc_manual_0: |
| 144 | .word SDRC_MANUAL_0_P |
| 145 | sdrc_mr_1: |
| 146 | .word SDRC_MR_1_P |
| 147 | sdrc_emr2_1: |
| 148 | .word SDRC_EMR2_1_P |
| 149 | sdrc_manual_1: |
| 150 | .word SDRC_MANUAL_1_P |
| 151 | ENTRY(es3_sdrc_fix_sz) |
| 152 | .word . - es3_sdrc_fix |
Tero Kristo | 27d59a4 | 2008-10-13 13:15:00 +0300 | [diff] [blame] | 153 | |
| 154 | /* Function to call rom code to save secure ram context */ |
| 155 | ENTRY(save_secure_ram_context) |
| 156 | stmfd sp!, {r1-r12, lr} @ save registers on stack |
| 157 | save_secure_ram_debug: |
| 158 | /* b save_secure_ram_debug */ @ enable to debug save code |
| 159 | adr r3, api_params @ r3 points to parameters |
| 160 | str r0, [r3,#0x4] @ r0 has sdram address |
| 161 | ldr r12, high_mask |
| 162 | and r3, r3, r12 |
| 163 | ldr r12, sram_phy_addr_mask |
| 164 | orr r3, r3, r12 |
| 165 | mov r0, #25 @ set service ID for PPA |
| 166 | mov r12, r0 @ copy secure service ID in r12 |
| 167 | mov r1, #0 @ set task id for ROM code in r1 |
Kalle Jokiniemi | ba50ea7 | 2009-03-26 15:59:00 +0200 | [diff] [blame] | 168 | mov r2, #4 @ set some flags in r2, r6 |
Tero Kristo | 27d59a4 | 2008-10-13 13:15:00 +0300 | [diff] [blame] | 169 | mov r6, #0xff |
| 170 | mcr p15, 0, r0, c7, c10, 4 @ data write barrier |
| 171 | mcr p15, 0, r0, c7, c10, 5 @ data memory barrier |
| 172 | .word 0xE1600071 @ call SMI monitor (smi #1) |
| 173 | nop |
| 174 | nop |
| 175 | nop |
| 176 | nop |
| 177 | ldmfd sp!, {r1-r12, pc} |
| 178 | sram_phy_addr_mask: |
| 179 | .word SRAM_BASE_P |
| 180 | high_mask: |
| 181 | .word 0xffff |
| 182 | api_params: |
| 183 | .word 0x4, 0x0, 0x0, 0x1, 0x1 |
| 184 | ENTRY(save_secure_ram_context_sz) |
| 185 | .word . - save_secure_ram_context |
| 186 | |
Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 187 | /* |
| 188 | * Forces OMAP into idle state |
| 189 | * |
| 190 | * omap34xx_suspend() - This bit of code just executes the WFI |
| 191 | * for normal idles. |
| 192 | * |
| 193 | * Note: This code get's copied to internal SRAM at boot. When the OMAP |
| 194 | * wakes up it continues execution at the point it went to sleep. |
| 195 | */ |
| 196 | ENTRY(omap34xx_cpu_suspend) |
| 197 | stmfd sp!, {r0-r12, lr} @ save registers on stack |
| 198 | loop: |
| 199 | /*b loop*/ @Enable to debug by stepping through code |
| 200 | /* r0 contains restore pointer in sdram */ |
| 201 | /* r1 contains information about saving context */ |
| 202 | ldr r4, sdrc_power @ read the SDRC_POWER register |
| 203 | ldr r5, [r4] @ read the contents of SDRC_POWER |
| 204 | orr r5, r5, #0x40 @ enable self refresh on idle req |
| 205 | str r5, [r4] @ write back to SDRC_POWER register |
| 206 | |
| 207 | cmp r1, #0x0 |
| 208 | /* If context save is required, do that and execute wfi */ |
| 209 | bne save_context_wfi |
| 210 | /* Data memory barrier and Data sync barrier */ |
| 211 | mov r1, #0 |
| 212 | mcr p15, 0, r1, c7, c10, 4 |
| 213 | mcr p15, 0, r1, c7, c10, 5 |
| 214 | |
| 215 | wfi @ wait for interrupt |
| 216 | |
| 217 | nop |
| 218 | nop |
| 219 | nop |
| 220 | nop |
| 221 | nop |
| 222 | nop |
| 223 | nop |
| 224 | nop |
| 225 | nop |
| 226 | nop |
Peter 'p2' De Schrijver | 89139dc | 2009-01-16 18:53:48 +0200 | [diff] [blame] | 227 | bl wait_sdrc_ok |
Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 228 | |
| 229 | ldmfd sp!, {r0-r12, pc} @ restore regs and return |
Tero Kristo | 0795a75 | 2008-10-13 17:58:50 +0300 | [diff] [blame] | 230 | restore_es3: |
| 231 | /*b restore_es3*/ @ Enable to debug restore code |
| 232 | ldr r5, pm_prepwstst_core_p |
| 233 | ldr r4, [r5] |
| 234 | and r4, r4, #0x3 |
| 235 | cmp r4, #0x0 @ Check if previous power state of CORE is OFF |
| 236 | bne restore |
| 237 | adr r0, es3_sdrc_fix |
| 238 | ldr r1, sram_base |
| 239 | ldr r2, es3_sdrc_fix_sz |
| 240 | mov r2, r2, ror #2 |
| 241 | copy_to_sram: |
| 242 | ldmia r0!, {r3} @ val = *src |
| 243 | stmia r1!, {r3} @ *dst = val |
| 244 | subs r2, r2, #0x1 @ num_words-- |
| 245 | bne copy_to_sram |
| 246 | ldr r1, sram_base |
| 247 | blx r1 |
Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 248 | restore: |
Rajendra Nayak | 61255ab | 2008-09-26 17:49:56 +0530 | [diff] [blame] | 249 | /* b restore*/ @ Enable to debug restore code |
Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 250 | /* Check what was the reason for mpu reset and store the reason in r9*/ |
| 251 | /* 1 - Only L1 and logic lost */ |
| 252 | /* 2 - Only L2 lost - In this case, we wont be here */ |
| 253 | /* 3 - Both L1 and L2 lost */ |
| 254 | ldr r1, pm_pwstctrl_mpu |
| 255 | ldr r2, [r1] |
| 256 | and r2, r2, #0x3 |
| 257 | cmp r2, #0x0 @ Check if target power state was OFF or RET |
| 258 | moveq r9, #0x3 @ MPU OFF => L1 and L2 lost |
| 259 | movne r9, #0x1 @ Only L1 and L2 lost => avoid L2 invalidation |
| 260 | bne logic_l1_restore |
Tero Kristo | 27d59a4 | 2008-10-13 13:15:00 +0300 | [diff] [blame] | 261 | ldr r0, control_stat |
| 262 | ldr r1, [r0] |
| 263 | and r1, #0x700 |
| 264 | cmp r1, #0x300 |
| 265 | beq l2_inv_gp |
| 266 | mov r0, #40 @ set service ID for PPA |
| 267 | mov r12, r0 @ copy secure Service ID in r12 |
| 268 | mov r1, #0 @ set task id for ROM code in r1 |
| 269 | mov r2, #4 @ set some flags in r2, r6 |
| 270 | mov r6, #0xff |
| 271 | adr r3, l2_inv_api_params @ r3 points to dummy parameters |
| 272 | mcr p15, 0, r0, c7, c10, 4 @ data write barrier |
| 273 | mcr p15, 0, r0, c7, c10, 5 @ data memory barrier |
| 274 | .word 0xE1600071 @ call SMI monitor (smi #1) |
| 275 | /* Write to Aux control register to set some bits */ |
| 276 | mov r0, #42 @ set service ID for PPA |
| 277 | mov r12, r0 @ copy secure Service ID in r12 |
| 278 | mov r1, #0 @ set task id for ROM code in r1 |
| 279 | mov r2, #4 @ set some flags in r2, r6 |
| 280 | mov r6, #0xff |
Tero Kristo | a087cad | 2009-11-12 12:07:20 +0200 | [diff] [blame] | 281 | ldr r4, scratchpad_base |
| 282 | ldr r3, [r4, #0xBC] @ r3 points to parameters |
Tero Kristo | 27d59a4 | 2008-10-13 13:15:00 +0300 | [diff] [blame] | 283 | mcr p15, 0, r0, c7, c10, 4 @ data write barrier |
| 284 | mcr p15, 0, r0, c7, c10, 5 @ data memory barrier |
| 285 | .word 0xE1600071 @ call SMI monitor (smi #1) |
| 286 | |
| 287 | b logic_l1_restore |
| 288 | l2_inv_api_params: |
| 289 | .word 0x1, 0x00 |
Tero Kristo | 27d59a4 | 2008-10-13 13:15:00 +0300 | [diff] [blame] | 290 | l2_inv_gp: |
Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 291 | /* Execute smi to invalidate L2 cache */ |
| 292 | mov r12, #0x1 @ set up to invalide L2 |
Tero Kristo | 27d59a4 | 2008-10-13 13:15:00 +0300 | [diff] [blame] | 293 | smi: .word 0xE1600070 @ Call SMI monitor (smieq) |
| 294 | /* Write to Aux control register to set some bits */ |
Tero Kristo | a087cad | 2009-11-12 12:07:20 +0200 | [diff] [blame] | 295 | ldr r4, scratchpad_base |
| 296 | ldr r3, [r4,#0xBC] |
| 297 | ldr r0, [r3,#4] |
Tero Kristo | 27d59a4 | 2008-10-13 13:15:00 +0300 | [diff] [blame] | 298 | mov r12, #0x3 |
| 299 | .word 0xE1600070 @ Call SMI monitor (smieq) |
Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 300 | logic_l1_restore: |
| 301 | mov r1, #0 |
| 302 | /* Invalidate all instruction caches to PoU |
| 303 | * and flush branch target cache */ |
| 304 | mcr p15, 0, r1, c7, c5, 0 |
| 305 | |
| 306 | ldr r4, scratchpad_base |
| 307 | ldr r3, [r4,#0xBC] |
Tero Kristo | a087cad | 2009-11-12 12:07:20 +0200 | [diff] [blame] | 308 | adds r3, r3, #8 |
Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 309 | ldmia r3!, {r4-r6} |
| 310 | mov sp, r4 |
| 311 | msr spsr_cxsf, r5 |
| 312 | mov lr, r6 |
| 313 | |
| 314 | ldmia r3!, {r4-r9} |
| 315 | /* Coprocessor access Control Register */ |
| 316 | mcr p15, 0, r4, c1, c0, 2 |
| 317 | |
| 318 | /* TTBR0 */ |
| 319 | MCR p15, 0, r5, c2, c0, 0 |
| 320 | /* TTBR1 */ |
| 321 | MCR p15, 0, r6, c2, c0, 1 |
| 322 | /* Translation table base control register */ |
| 323 | MCR p15, 0, r7, c2, c0, 2 |
| 324 | /*domain access Control Register */ |
| 325 | MCR p15, 0, r8, c3, c0, 0 |
| 326 | /* data fault status Register */ |
| 327 | MCR p15, 0, r9, c5, c0, 0 |
| 328 | |
| 329 | ldmia r3!,{r4-r8} |
| 330 | /* instruction fault status Register */ |
| 331 | MCR p15, 0, r4, c5, c0, 1 |
| 332 | /*Data Auxiliary Fault Status Register */ |
| 333 | MCR p15, 0, r5, c5, c1, 0 |
| 334 | /*Instruction Auxiliary Fault Status Register*/ |
| 335 | MCR p15, 0, r6, c5, c1, 1 |
| 336 | /*Data Fault Address Register */ |
| 337 | MCR p15, 0, r7, c6, c0, 0 |
| 338 | /*Instruction Fault Address Register*/ |
| 339 | MCR p15, 0, r8, c6, c0, 2 |
| 340 | ldmia r3!,{r4-r7} |
| 341 | |
| 342 | /* user r/w thread and process ID */ |
| 343 | MCR p15, 0, r4, c13, c0, 2 |
| 344 | /* user ro thread and process ID */ |
| 345 | MCR p15, 0, r5, c13, c0, 3 |
| 346 | /*Privileged only thread and process ID */ |
| 347 | MCR p15, 0, r6, c13, c0, 4 |
| 348 | /* cache size selection */ |
| 349 | MCR p15, 2, r7, c0, c0, 0 |
| 350 | ldmia r3!,{r4-r8} |
| 351 | /* Data TLB lockdown registers */ |
| 352 | MCR p15, 0, r4, c10, c0, 0 |
| 353 | /* Instruction TLB lockdown registers */ |
| 354 | MCR p15, 0, r5, c10, c0, 1 |
| 355 | /* Secure or Nonsecure Vector Base Address */ |
| 356 | MCR p15, 0, r6, c12, c0, 0 |
| 357 | /* FCSE PID */ |
| 358 | MCR p15, 0, r7, c13, c0, 0 |
| 359 | /* Context PID */ |
| 360 | MCR p15, 0, r8, c13, c0, 1 |
| 361 | |
| 362 | ldmia r3!,{r4-r5} |
| 363 | /* primary memory remap register */ |
| 364 | MCR p15, 0, r4, c10, c2, 0 |
| 365 | /*normal memory remap register */ |
| 366 | MCR p15, 0, r5, c10, c2, 1 |
| 367 | |
| 368 | /* Restore cpsr */ |
| 369 | ldmia r3!,{r4} /*load CPSR from SDRAM*/ |
| 370 | msr cpsr, r4 /*store cpsr */ |
| 371 | |
| 372 | /* Enabling MMU here */ |
| 373 | mrc p15, 0, r7, c2, c0, 2 /* Read TTBRControl */ |
| 374 | /* Extract N (0:2) bits and decide whether to use TTBR0 or TTBR1*/ |
| 375 | and r7, #0x7 |
| 376 | cmp r7, #0x0 |
| 377 | beq usettbr0 |
| 378 | ttbr_error: |
| 379 | /* More work needs to be done to support N[0:2] value other than 0 |
| 380 | * So looping here so that the error can be detected |
| 381 | */ |
| 382 | b ttbr_error |
| 383 | usettbr0: |
| 384 | mrc p15, 0, r2, c2, c0, 0 |
| 385 | ldr r5, ttbrbit_mask |
| 386 | and r2, r5 |
| 387 | mov r4, pc |
| 388 | ldr r5, table_index_mask |
| 389 | and r4, r5 /* r4 = 31 to 20 bits of pc */ |
| 390 | /* Extract the value to be written to table entry */ |
| 391 | ldr r1, table_entry |
| 392 | add r1, r1, r4 /* r1 has value to be written to table entry*/ |
| 393 | /* Getting the address of table entry to modify */ |
| 394 | lsr r4, #18 |
| 395 | add r2, r4 /* r2 has the location which needs to be modified */ |
| 396 | /* Storing previous entry of location being modified */ |
| 397 | ldr r5, scratchpad_base |
| 398 | ldr r4, [r2] |
| 399 | str r4, [r5, #0xC0] |
| 400 | /* Modify the table entry */ |
| 401 | str r1, [r2] |
| 402 | /* Storing address of entry being modified |
| 403 | * - will be restored after enabling MMU */ |
| 404 | ldr r5, scratchpad_base |
| 405 | str r2, [r5, #0xC4] |
| 406 | |
| 407 | mov r0, #0 |
| 408 | mcr p15, 0, r0, c7, c5, 4 @ Flush prefetch buffer |
| 409 | mcr p15, 0, r0, c7, c5, 6 @ Invalidate branch predictor array |
| 410 | mcr p15, 0, r0, c8, c5, 0 @ Invalidate instruction TLB |
| 411 | mcr p15, 0, r0, c8, c6, 0 @ Invalidate data TLB |
| 412 | /* Restore control register but dont enable caches here*/ |
| 413 | /* Caches will be enabled after restoring MMU table entry */ |
| 414 | ldmia r3!, {r4} |
| 415 | /* Store previous value of control register in scratchpad */ |
| 416 | str r4, [r5, #0xC8] |
| 417 | ldr r2, cache_pred_disable_mask |
| 418 | and r4, r2 |
| 419 | mcr p15, 0, r4, c1, c0, 0 |
| 420 | |
| 421 | ldmfd sp!, {r0-r12, pc} @ restore regs and return |
| 422 | save_context_wfi: |
| 423 | /*b save_context_wfi*/ @ enable to debug save code |
| 424 | mov r8, r0 /* Store SDRAM address in r8 */ |
Tero Kristo | a087cad | 2009-11-12 12:07:20 +0200 | [diff] [blame] | 425 | mrc p15, 0, r5, c1, c0, 1 @ Read Auxiliary Control Register |
| 426 | mov r4, #0x1 @ Number of parameters for restore call |
| 427 | stmia r8!, {r4-r5} |
Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 428 | /* Check what that target sleep state is:stored in r1*/ |
| 429 | /* 1 - Only L1 and logic lost */ |
| 430 | /* 2 - Only L2 lost */ |
| 431 | /* 3 - Both L1 and L2 lost */ |
| 432 | cmp r1, #0x2 /* Only L2 lost */ |
| 433 | beq clean_l2 |
| 434 | cmp r1, #0x1 /* L2 retained */ |
| 435 | /* r9 stores whether to clean L2 or not*/ |
| 436 | moveq r9, #0x0 /* Dont Clean L2 */ |
| 437 | movne r9, #0x1 /* Clean L2 */ |
| 438 | l1_logic_lost: |
| 439 | /* Store sp and spsr to SDRAM */ |
| 440 | mov r4, sp |
| 441 | mrs r5, spsr |
| 442 | mov r6, lr |
| 443 | stmia r8!, {r4-r6} |
| 444 | /* Save all ARM registers */ |
| 445 | /* Coprocessor access control register */ |
| 446 | mrc p15, 0, r6, c1, c0, 2 |
| 447 | stmia r8!, {r6} |
| 448 | /* TTBR0, TTBR1 and Translation table base control */ |
| 449 | mrc p15, 0, r4, c2, c0, 0 |
| 450 | mrc p15, 0, r5, c2, c0, 1 |
| 451 | mrc p15, 0, r6, c2, c0, 2 |
| 452 | stmia r8!, {r4-r6} |
| 453 | /* Domain access control register, data fault status register, |
| 454 | and instruction fault status register */ |
| 455 | mrc p15, 0, r4, c3, c0, 0 |
| 456 | mrc p15, 0, r5, c5, c0, 0 |
| 457 | mrc p15, 0, r6, c5, c0, 1 |
| 458 | stmia r8!, {r4-r6} |
| 459 | /* Data aux fault status register, instruction aux fault status, |
| 460 | datat fault address register and instruction fault address register*/ |
| 461 | mrc p15, 0, r4, c5, c1, 0 |
| 462 | mrc p15, 0, r5, c5, c1, 1 |
| 463 | mrc p15, 0, r6, c6, c0, 0 |
| 464 | mrc p15, 0, r7, c6, c0, 2 |
| 465 | stmia r8!, {r4-r7} |
| 466 | /* user r/w thread and process ID, user r/o thread and process ID, |
| 467 | priv only thread and process ID, cache size selection */ |
| 468 | mrc p15, 0, r4, c13, c0, 2 |
| 469 | mrc p15, 0, r5, c13, c0, 3 |
| 470 | mrc p15, 0, r6, c13, c0, 4 |
| 471 | mrc p15, 2, r7, c0, c0, 0 |
| 472 | stmia r8!, {r4-r7} |
| 473 | /* Data TLB lockdown, instruction TLB lockdown registers */ |
| 474 | mrc p15, 0, r5, c10, c0, 0 |
| 475 | mrc p15, 0, r6, c10, c0, 1 |
| 476 | stmia r8!, {r5-r6} |
| 477 | /* Secure or non secure vector base address, FCSE PID, Context PID*/ |
| 478 | mrc p15, 0, r4, c12, c0, 0 |
| 479 | mrc p15, 0, r5, c13, c0, 0 |
| 480 | mrc p15, 0, r6, c13, c0, 1 |
| 481 | stmia r8!, {r4-r6} |
| 482 | /* Primary remap, normal remap registers */ |
| 483 | mrc p15, 0, r4, c10, c2, 0 |
| 484 | mrc p15, 0, r5, c10, c2, 1 |
| 485 | stmia r8!,{r4-r5} |
| 486 | |
| 487 | /* Store current cpsr*/ |
| 488 | mrs r2, cpsr |
| 489 | stmia r8!, {r2} |
| 490 | |
| 491 | mrc p15, 0, r4, c1, c0, 0 |
| 492 | /* save control register */ |
| 493 | stmia r8!, {r4} |
| 494 | clean_caches: |
| 495 | /* Clean Data or unified cache to POU*/ |
| 496 | /* How to invalidate only L1 cache???? - #FIX_ME# */ |
| 497 | /* mcr p15, 0, r11, c7, c11, 1 */ |
| 498 | cmp r9, #1 /* Check whether L2 inval is required or not*/ |
| 499 | bne skip_l2_inval |
| 500 | clean_l2: |
| 501 | /* read clidr */ |
| 502 | mrc p15, 1, r0, c0, c0, 1 |
| 503 | /* extract loc from clidr */ |
| 504 | ands r3, r0, #0x7000000 |
| 505 | /* left align loc bit field */ |
| 506 | mov r3, r3, lsr #23 |
| 507 | /* if loc is 0, then no need to clean */ |
| 508 | beq finished |
| 509 | /* start clean at cache level 0 */ |
| 510 | mov r10, #0 |
| 511 | loop1: |
| 512 | /* work out 3x current cache level */ |
| 513 | add r2, r10, r10, lsr #1 |
| 514 | /* extract cache type bits from clidr*/ |
| 515 | mov r1, r0, lsr r2 |
| 516 | /* mask of the bits for current cache only */ |
| 517 | and r1, r1, #7 |
| 518 | /* see what cache we have at this level */ |
| 519 | cmp r1, #2 |
| 520 | /* skip if no cache, or just i-cache */ |
| 521 | blt skip |
| 522 | /* select current cache level in cssr */ |
| 523 | mcr p15, 2, r10, c0, c0, 0 |
| 524 | /* isb to sych the new cssr&csidr */ |
| 525 | isb |
| 526 | /* read the new csidr */ |
| 527 | mrc p15, 1, r1, c0, c0, 0 |
| 528 | /* extract the length of the cache lines */ |
| 529 | and r2, r1, #7 |
| 530 | /* add 4 (line length offset) */ |
| 531 | add r2, r2, #4 |
| 532 | ldr r4, assoc_mask |
| 533 | /* find maximum number on the way size */ |
| 534 | ands r4, r4, r1, lsr #3 |
| 535 | /* find bit position of way size increment */ |
| 536 | clz r5, r4 |
| 537 | ldr r7, numset_mask |
| 538 | /* extract max number of the index size*/ |
| 539 | ands r7, r7, r1, lsr #13 |
| 540 | loop2: |
| 541 | mov r9, r4 |
| 542 | /* create working copy of max way size*/ |
| 543 | loop3: |
| 544 | /* factor way and cache number into r11 */ |
| 545 | orr r11, r10, r9, lsl r5 |
| 546 | /* factor index number into r11 */ |
| 547 | orr r11, r11, r7, lsl r2 |
| 548 | /*clean & invalidate by set/way */ |
| 549 | mcr p15, 0, r11, c7, c10, 2 |
| 550 | /* decrement the way*/ |
| 551 | subs r9, r9, #1 |
| 552 | bge loop3 |
| 553 | /*decrement the index */ |
| 554 | subs r7, r7, #1 |
| 555 | bge loop2 |
| 556 | skip: |
| 557 | add r10, r10, #2 |
| 558 | /* increment cache number */ |
| 559 | cmp r3, r10 |
| 560 | bgt loop1 |
| 561 | finished: |
| 562 | /*swith back to cache level 0 */ |
| 563 | mov r10, #0 |
| 564 | /* select current cache level in cssr */ |
| 565 | mcr p15, 2, r10, c0, c0, 0 |
| 566 | isb |
| 567 | skip_l2_inval: |
| 568 | /* Data memory barrier and Data sync barrier */ |
| 569 | mov r1, #0 |
| 570 | mcr p15, 0, r1, c7, c10, 4 |
| 571 | mcr p15, 0, r1, c7, c10, 5 |
| 572 | |
| 573 | wfi @ wait for interrupt |
| 574 | nop |
| 575 | nop |
| 576 | nop |
| 577 | nop |
| 578 | nop |
| 579 | nop |
| 580 | nop |
| 581 | nop |
| 582 | nop |
| 583 | nop |
Peter 'p2' De Schrijver | 89139dc | 2009-01-16 18:53:48 +0200 | [diff] [blame] | 584 | bl wait_sdrc_ok |
Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 585 | /* restore regs and return */ |
| 586 | ldmfd sp!, {r0-r12, pc} |
| 587 | |
Peter 'p2' De Schrijver | 89139dc | 2009-01-16 18:53:48 +0200 | [diff] [blame] | 588 | /* Make sure SDRC accesses are ok */ |
| 589 | wait_sdrc_ok: |
| 590 | ldr r4, cm_idlest1_core |
| 591 | ldr r5, [r4] |
| 592 | and r5, r5, #0x2 |
| 593 | cmp r5, #0 |
| 594 | bne wait_sdrc_ok |
| 595 | ldr r4, sdrc_power |
| 596 | ldr r5, [r4] |
| 597 | bic r5, r5, #0x40 |
| 598 | str r5, [r4] |
| 599 | wait_dll_lock: |
| 600 | /* Is dll in lock mode? */ |
| 601 | ldr r4, sdrc_dlla_ctrl |
| 602 | ldr r5, [r4] |
| 603 | tst r5, #0x4 |
| 604 | bxne lr |
| 605 | /* wait till dll locks */ |
| 606 | ldr r4, sdrc_dlla_status |
| 607 | ldr r5, [r4] |
| 608 | and r5, r5, #0x4 |
| 609 | cmp r5, #0x4 |
| 610 | bne wait_dll_lock |
| 611 | bx lr |
Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 612 | |
Peter 'p2' De Schrijver | 89139dc | 2009-01-16 18:53:48 +0200 | [diff] [blame] | 613 | cm_idlest1_core: |
| 614 | .word CM_IDLEST1_CORE_V |
| 615 | sdrc_dlla_status: |
| 616 | .word SDRC_DLLA_STATUS_V |
| 617 | sdrc_dlla_ctrl: |
| 618 | .word SDRC_DLLA_CTRL_V |
Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 619 | pm_prepwstst_core: |
| 620 | .word PM_PREPWSTST_CORE_V |
Tero Kristo | 0795a75 | 2008-10-13 17:58:50 +0300 | [diff] [blame] | 621 | pm_prepwstst_core_p: |
| 622 | .word PM_PREPWSTST_CORE_P |
Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 623 | pm_prepwstst_mpu: |
| 624 | .word PM_PREPWSTST_MPU_V |
| 625 | pm_pwstctrl_mpu: |
| 626 | .word PM_PWSTCTRL_MPU_P |
| 627 | scratchpad_base: |
| 628 | .word SCRATCHPAD_BASE_P |
Tero Kristo | 0795a75 | 2008-10-13 17:58:50 +0300 | [diff] [blame] | 629 | sram_base: |
| 630 | .word SRAM_BASE_P + 0x8000 |
Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 631 | sdrc_power: |
| 632 | .word SDRC_POWER_V |
Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 633 | clk_stabilize_delay: |
| 634 | .word 0x000001FF |
| 635 | assoc_mask: |
| 636 | .word 0x3ff |
| 637 | numset_mask: |
| 638 | .word 0x7fff |
| 639 | ttbrbit_mask: |
| 640 | .word 0xFFFFC000 |
| 641 | table_index_mask: |
| 642 | .word 0xFFF00000 |
| 643 | table_entry: |
| 644 | .word 0x00000C02 |
| 645 | cache_pred_disable_mask: |
| 646 | .word 0xFFFFE7FB |
Tero Kristo | 27d59a4 | 2008-10-13 13:15:00 +0300 | [diff] [blame] | 647 | control_stat: |
| 648 | .word CONTROL_STAT |
Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 649 | ENTRY(omap34xx_cpu_suspend_sz) |
| 650 | .word . - omap34xx_cpu_suspend |