Bryan Huntsman | 3f2bc4d | 2011-08-16 17:27:22 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Idle processing for ARMv6-based Qualcomm SoCs. |
| 3 | * Work around bugs with SWFI. |
| 4 | * |
| 5 | * Copyright (C) 2007 Google, Inc. |
| 6 | * Copyright (c) 2007-2009, Code Aurora Forum. All rights reserved. |
| 7 | * |
| 8 | * This software is licensed under the terms of the GNU General Public |
| 9 | * License version 2, as published by the Free Software Foundation, and |
| 10 | * may be copied, distributed, and modified under those terms. |
| 11 | * |
| 12 | * This program is distributed in the hope that it will be useful, |
| 13 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
| 14 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
| 15 | * GNU General Public License for more details. |
| 16 | * |
| 17 | */ |
| 18 | |
| 19 | #include <linux/linkage.h> |
| 20 | #include <asm/assembler.h> |
| 21 | |
| 22 | .extern write_to_strongly_ordered_memory |
| 23 | |
| 24 | ENTRY(msm_arch_idle) |
| 25 | mrs r2, cpsr /* save the CPSR state */ |
| 26 | cpsid iaf /* explictly disable I,A and F */ |
| 27 | |
| 28 | #if defined(CONFIG_ARCH_MSM7X27) |
| 29 | mov r0, #0 |
| 30 | mcr p15, 0, r0, c7, c10, 0 /* flush entire data cache */ |
| 31 | mcr p15, 0, r0, c7, c10, 4 /* dsb */ |
| 32 | stmfd sp!, {r2, lr} /* preserve r2, thus CPSR and LR */ |
| 33 | bl write_to_strongly_ordered_memory /* flush AXI bus buffer */ |
| 34 | ldmfd sp!, {r2, lr} |
| 35 | mcr p15, 0, r0, c7, c0, 4 /* wait for interrupt */ |
| 36 | #else |
| 37 | mrc p15, 0, r1, c1, c0, 0 /* read current CR */ |
| 38 | bic r0, r1, #(1 << 2) /* clear dcache bit */ |
| 39 | bic r0, r0, #(1 << 12) /* clear icache bit */ |
| 40 | mcr p15, 0, r0, c1, c0, 0 /* disable d/i cache */ |
| 41 | |
| 42 | mov r0, #0 |
| 43 | mcr p15, 0, r0, c7, c5, 0 /* invalidate icache and flush */ |
| 44 | /* branch target cache */ |
| 45 | mcr p15, 0, r0, c7, c14, 0 /* clean and invalidate dcache */ |
| 46 | |
| 47 | mcr p15, 0, r0, c7, c10, 4 /* dsb */ |
| 48 | mcr p15, 0, r0, c7, c0, 4 /* wait for interrupt */ |
| 49 | |
| 50 | mcr p15, 0, r1, c1, c0, 0 /* restore d/i cache */ |
| 51 | mcr p15, 0, r0, c7, c5, 4 /* isb */ |
| 52 | #endif |
| 53 | |
| 54 | msr cpsr_c, r2 /* restore the CPSR state */ |
| 55 | mov pc, lr |
| 56 | |
| 57 | ENTRY(msm_pm_collapse) |
| 58 | ldr r0, =saved_state |
| 59 | stmia r0!, {r4-r14} |
| 60 | |
| 61 | cpsid f |
| 62 | |
| 63 | mrc p15, 0, r1, c1, c0, 0 /* MMU control */ |
| 64 | mrc p15, 0, r2, c2, c0, 0 /* ttb */ |
| 65 | mrc p15, 0, r3, c3, c0, 0 /* dacr */ |
| 66 | mrc p15, 0, ip, c13, c0, 1 /* context ID */ |
| 67 | stmia r0!, {r1-r3, ip} |
| 68 | #if defined(CONFIG_OPROFILE) |
| 69 | mrc p15, 0, r1, c15, c12, 0 /* pmnc */ |
| 70 | mrc p15, 0, r2, c15, c12, 1 /* ccnt */ |
| 71 | mrc p15, 0, r3, c15, c12, 2 /* pmn0 */ |
| 72 | mrc p15, 0, ip, c15, c12, 3 /* pmn1 */ |
| 73 | stmia r0!, {r1-r3, ip} |
| 74 | #endif |
| 75 | mrc p15, 0, r1, c1, c0, 2 /* read CACR */ |
| 76 | stmia r0!, {r1} |
| 77 | |
| 78 | mrc p15, 0, r1, c1, c0, 0 /* read current CR */ |
| 79 | bic r0, r1, #(1 << 2) /* clear dcache bit */ |
| 80 | bic r0, r0, #(1 << 12) /* clear icache bit */ |
| 81 | mcr p15, 0, r0, c1, c0, 0 /* disable d/i cache */ |
| 82 | |
| 83 | mov r0, #0 |
| 84 | mcr p15, 0, r0, c7, c5, 0 /* invalidate icache and flush */ |
| 85 | /* branch target cache */ |
| 86 | mcr p15, 0, r0, c7, c14, 0 /* clean and invalidate dcache */ |
| 87 | |
| 88 | mcr p15, 0, r0, c7, c10, 4 /* dsb */ |
| 89 | mcr p15, 0, r0, c7, c0, 4 /* wait for interrupt */ |
| 90 | |
| 91 | mcr p15, 0, r1, c1, c0, 0 /* restore d/i cache */ |
| 92 | mcr p15, 0, r0, c7, c5, 4 /* isb */ |
| 93 | |
| 94 | cpsie f |
| 95 | |
| 96 | ldr r0, =saved_state /* restore registers */ |
| 97 | ldmfd r0, {r4-r14} |
| 98 | mov r0, #0 /* return power collapse failed */ |
| 99 | mov pc, lr |
| 100 | |
| 101 | ENTRY(msm_pm_collapse_exit) |
| 102 | #if 0 /* serial debug */ |
| 103 | mov r0, #0x80000016 |
| 104 | mcr p15, 0, r0, c15, c2, 4 |
| 105 | mov r0, #0xA9000000 |
| 106 | add r0, r0, #0x00A00000 /* UART1 */ |
| 107 | /*add r0, r0, #0x00C00000*/ /* UART3 */ |
| 108 | mov r1, #'A' |
| 109 | str r1, [r0, #0x00C] |
| 110 | #endif |
| 111 | ldr r1, =saved_state_end |
| 112 | ldr r2, =msm_pm_collapse_exit |
| 113 | adr r3, msm_pm_collapse_exit |
| 114 | add r1, r1, r3 |
| 115 | sub r1, r1, r2 |
| 116 | |
| 117 | ldmdb r1!, {r2} |
| 118 | mcr p15, 0, r2, c1, c0, 2 /* restore CACR */ |
| 119 | #if defined(CONFIG_OPROFILE) |
| 120 | ldmdb r1!, {r2-r5} |
| 121 | mcr p15, 0, r3, c15, c12, 1 /* ccnt */ |
| 122 | mcr p15, 0, r4, c15, c12, 2 /* pmn0 */ |
| 123 | mcr p15, 0, r5, c15, c12, 3 /* pmn1 */ |
| 124 | mcr p15, 0, r2, c15, c12, 0 /* pmnc */ |
| 125 | #endif |
| 126 | ldmdb r1!, {r2-r5} |
| 127 | mcr p15, 0, r4, c3, c0, 0 /* dacr */ |
| 128 | mcr p15, 0, r3, c2, c0, 0 /* ttb */ |
| 129 | mcr p15, 0, r5, c13, c0, 1 /* context ID */ |
| 130 | mov r0, #0 |
| 131 | mcr p15, 0, r0, c7, c5, 4 /* isb */ |
| 132 | ldmdb r1!, {r4-r14} |
| 133 | |
| 134 | /* Add 1:1 map in the PMD to allow smooth switch when turning on MMU */ |
| 135 | and r3, r3, #~0x7F /* mask off lower 7 bits of TTB */ |
| 136 | adr r0, msm_pm_mapped_pa /* get address of the mapped instr */ |
| 137 | lsr r1, r0, #20 /* get the addr range of addr in MB */ |
| 138 | lsl r1, r1, #2 /* multiply by 4 to get to the pg index */ |
| 139 | add r3, r3, r1 /* pgd + pgd_index(addr) */ |
| 140 | ldr r1, [r3] /* save current entry to r1 */ |
| 141 | lsr r0, #20 /* align current addr to 1MB boundary */ |
| 142 | lsl r0, #20 |
| 143 | /* Create new entry for this 1MB page */ |
| 144 | orr r0, r0, #0x400 /* PMD_SECT_AP_WRITE */ |
| 145 | orr r0, r0, #0x2 /* PMD_TYPE_SECT|PMD_DOMAIN(DOMAIN_KERNEL) */ |
| 146 | str r0, [r3] /* put new entry into the MMU table */ |
| 147 | mov r0, #0 |
| 148 | mcr p15, 0, r0, c7, c10, 4 /* dsb */ |
| 149 | mcr p15, 0, r2, c1, c0, 0 /* MMU control */ |
| 150 | mcr p15, 0, r0, c7, c5, 4 /* isb */ |
| 151 | msm_pm_mapped_pa: |
| 152 | /* Switch to virtual */ |
| 153 | adr r2, msm_pm_pa_to_va |
| 154 | ldr r0, =msm_pm_pa_to_va |
| 155 | mov pc, r0 |
| 156 | msm_pm_pa_to_va: |
| 157 | sub r0, r0, r2 |
| 158 | /* Restore r1 in MMU table */ |
| 159 | add r3, r3, r0 |
| 160 | str r1, [r3] |
| 161 | |
| 162 | mov r0, #0 |
| 163 | mcr p15, 0, r0, c7, c10, 0 /* flush entire data cache */ |
| 164 | mcr p15, 0, r0, c7, c10, 4 /* dsb */ |
| 165 | mcr p15, 0, r0, c7, c5, 4 /* isb */ |
| 166 | mcr p15, 0, r0, c8, c7, 0 /* invalidate entire unified TLB */ |
| 167 | mcr p15, 0, r0, c7, c5, 6 /* invalidate entire branch target |
| 168 | * cache */ |
| 169 | mcr p15, 0, r0, c7, c7, 0 /* invalidate both data and instruction |
| 170 | * cache */ |
| 171 | mcr p15, 0, r0, c7, c10, 4 /* dsb */ |
| 172 | mcr p15, 0, r0, c7, c5, 4 /* isb */ |
| 173 | |
| 174 | mov r0, #1 |
| 175 | mov pc, lr |
| 176 | nop |
| 177 | nop |
| 178 | nop |
| 179 | nop |
| 180 | nop |
| 181 | 1: b 1b |
| 182 | |
| 183 | |
| 184 | .data |
| 185 | |
| 186 | saved_state: |
| 187 | .space 4 * 11 /* r4-14 */ |
| 188 | .space 4 * 4 /* cp15 - MMU control, ttb, dacr, context ID */ |
| 189 | #if defined(CONFIG_OPROFILE) |
| 190 | .space 4 * 4 /* more cp15 - pmnc, ccnt, pmn0, pmn1 */ |
| 191 | #endif |
| 192 | .space 4 /* cacr */ |
| 193 | saved_state_end: |
| 194 | |