David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 1 | /* |
| 2 | * This file is subject to the terms and conditions of the GNU General Public |
| 3 | * License. See the file "COPYING" in the main directory of this archive |
| 4 | * for more details. |
| 5 | * |
| 6 | * Copyright (C) 1994, 1995, 1996, 1998, 1999, 2002, 2003 Ralf Baechle |
Justin P. Mattock | 79add62 | 2011-04-04 14:15:29 -0700 | [diff] [blame] | 7 | * Copyright (C) 1996 David S. Miller (davem@davemloft.net) |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 8 | * Copyright (C) 1994, 1995, 1996, by Andreas Busse |
| 9 | * Copyright (C) 1999 Silicon Graphics, Inc. |
| 10 | * Copyright (C) 2000 MIPS Technologies, Inc. |
| 11 | * written by Carsten Langgaard, carstenl@mips.com |
| 12 | */ |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 13 | |
David Daney | a36d822 | 2014-05-28 23:52:04 +0200 | [diff] [blame^] | 14 | #define USE_ALTERNATE_RESUME_IMPL 1 |
| 15 | .set push |
| 16 | .set arch=mips64r2 |
| 17 | #include "r4k_switch.S" |
| 18 | .set pop |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 19 | /* |
| 20 | * task_struct *resume(task_struct *prev, task_struct *next, |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 21 | * struct thread_info *next_ti, int usedfpu) |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 22 | */ |
| 23 | .align 7 |
| 24 | LEAF(resume) |
| 25 | .set arch=octeon |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 26 | mfc0 t1, CP0_STATUS |
| 27 | LONG_S t1, THREAD_STATUS(a0) |
| 28 | cpu_save_nonscratch a0 |
| 29 | LONG_S ra, THREAD_REG31(a0) |
| 30 | |
David Daney | a36d822 | 2014-05-28 23:52:04 +0200 | [diff] [blame^] | 31 | /* |
| 32 | * check if we need to save FPU registers |
| 33 | */ |
| 34 | PTR_L t3, TASK_THREAD_INFO(a0) |
| 35 | LONG_L t0, TI_FLAGS(t3) |
| 36 | li t1, _TIF_USEDFPU |
| 37 | and t2, t0, t1 |
| 38 | beqz t2, 1f |
| 39 | nor t1, zero, t1 |
| 40 | |
| 41 | and t0, t0, t1 |
| 42 | LONG_S t0, TI_FLAGS(t3) |
| 43 | |
| 44 | /* |
| 45 | * clear saved user stack CU1 bit |
| 46 | */ |
| 47 | LONG_L t0, ST_OFF(t3) |
| 48 | li t1, ~ST0_CU1 |
| 49 | and t0, t0, t1 |
| 50 | LONG_S t0, ST_OFF(t3) |
| 51 | |
| 52 | .set push |
| 53 | .set arch=mips64r2 |
| 54 | fpu_save_double a0 t0 t1 # c0_status passed in t0 |
| 55 | # clobbers t1 |
| 56 | .set pop |
| 57 | 1: |
| 58 | |
| 59 | /* check if we need to save COP2 registers */ |
| 60 | PTR_L t2, TASK_THREAD_INFO(a0) |
| 61 | LONG_L t0, ST_OFF(t2) |
| 62 | bbit0 t0, 30, 1f |
| 63 | |
| 64 | /* Disable COP2 in the stored process state */ |
| 65 | li t1, ST0_CU2 |
| 66 | xor t0, t1 |
| 67 | LONG_S t0, ST_OFF(t2) |
| 68 | |
| 69 | /* Enable COP2 so we can save it */ |
| 70 | mfc0 t0, CP0_STATUS |
| 71 | or t0, t1 |
| 72 | mtc0 t0, CP0_STATUS |
| 73 | |
| 74 | /* Save COP2 */ |
| 75 | daddu a0, THREAD_CP2 |
| 76 | jal octeon_cop2_save |
| 77 | dsubu a0, THREAD_CP2 |
| 78 | |
| 79 | /* Disable COP2 now that we are done */ |
| 80 | mfc0 t0, CP0_STATUS |
| 81 | li t1, ST0_CU2 |
| 82 | xor t0, t1 |
| 83 | mtc0 t0, CP0_STATUS |
| 84 | |
| 85 | 1: |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 86 | #if CONFIG_CAVIUM_OCTEON_CVMSEG_SIZE > 0 |
| 87 | /* Check if we need to store CVMSEG state */ |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 88 | mfc0 t0, $11,7 /* CvmMemCtl */ |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 89 | bbit0 t0, 6, 3f /* Is user access enabled? */ |
| 90 | |
| 91 | /* Store the CVMSEG state */ |
| 92 | /* Extract the size of CVMSEG */ |
| 93 | andi t0, 0x3f |
| 94 | /* Multiply * (cache line size/sizeof(long)/2) */ |
| 95 | sll t0, 7-LONGLOG-1 |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 96 | li t1, -32768 /* Base address of CVMSEG */ |
| 97 | LONG_ADDI t2, a0, THREAD_CVMSEG /* Where to store CVMSEG to */ |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 98 | synciobdma |
| 99 | 2: |
| 100 | .set noreorder |
| 101 | LONG_L t8, 0(t1) /* Load from CVMSEG */ |
| 102 | subu t0, 1 /* Decrement loop var */ |
| 103 | LONG_L t9, LONGSIZE(t1)/* Load from CVMSEG */ |
| 104 | LONG_ADDU t1, LONGSIZE*2 /* Increment loc in CVMSEG */ |
| 105 | LONG_S t8, 0(t2) /* Store CVMSEG to thread storage */ |
| 106 | LONG_ADDU t2, LONGSIZE*2 /* Increment loc in thread storage */ |
| 107 | bnez t0, 2b /* Loop until we've copied it all */ |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 108 | LONG_S t9, -LONGSIZE(t2)/* Store CVMSEG to thread storage */ |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 109 | .set reorder |
| 110 | |
| 111 | /* Disable access to CVMSEG */ |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 112 | mfc0 t0, $11,7 /* CvmMemCtl */ |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 113 | xori t0, t0, 0x40 /* Bit 6 is CVMSEG user enable */ |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 114 | mtc0 t0, $11,7 /* CvmMemCtl */ |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 115 | #endif |
| 116 | 3: |
Gregory Fong | 1400eb6 | 2013-06-17 19:36:07 +0000 | [diff] [blame] | 117 | |
| 118 | #if defined(CONFIG_CC_STACKPROTECTOR) && !defined(CONFIG_SMP) |
James Hogan | 8b3c569 | 2013-10-07 12:14:26 +0100 | [diff] [blame] | 119 | PTR_LA t8, __stack_chk_guard |
Gregory Fong | 1400eb6 | 2013-06-17 19:36:07 +0000 | [diff] [blame] | 120 | LONG_L t9, TASK_STACK_CANARY(a1) |
| 121 | LONG_S t9, 0(t8) |
| 122 | #endif |
| 123 | |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 124 | /* |
| 125 | * The order of restoring the registers takes care of the race |
| 126 | * updating $28, $29 and kernelsp without disabling ints. |
| 127 | */ |
| 128 | move $28, a2 |
| 129 | cpu_restore_nonscratch a1 |
| 130 | |
David Daney | a36d822 | 2014-05-28 23:52:04 +0200 | [diff] [blame^] | 131 | PTR_ADDU t0, $28, _THREAD_SIZE - 32 |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 132 | set_saved_sp t0, t1, t2 |
| 133 | |
| 134 | mfc0 t1, CP0_STATUS /* Do we really need this? */ |
| 135 | li a3, 0xff01 |
| 136 | and t1, a3 |
| 137 | LONG_L a2, THREAD_STATUS(a1) |
| 138 | nor a3, $0, a3 |
| 139 | and a2, a3 |
| 140 | or a2, t1 |
| 141 | mtc0 a2, CP0_STATUS |
| 142 | move v0, a0 |
| 143 | jr ra |
| 144 | END(resume) |
| 145 | |
| 146 | /* |
| 147 | * void octeon_cop2_save(struct octeon_cop2_state *a0) |
| 148 | */ |
| 149 | .align 7 |
| 150 | LEAF(octeon_cop2_save) |
| 151 | |
| 152 | dmfc0 t9, $9,7 /* CvmCtl register. */ |
| 153 | |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 154 | /* Save the COP2 CRC state */ |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 155 | dmfc2 t0, 0x0201 |
| 156 | dmfc2 t1, 0x0202 |
| 157 | dmfc2 t2, 0x0200 |
| 158 | sd t0, OCTEON_CP2_CRC_IV(a0) |
| 159 | sd t1, OCTEON_CP2_CRC_LENGTH(a0) |
| 160 | sd t2, OCTEON_CP2_CRC_POLY(a0) |
| 161 | /* Skip next instructions if CvmCtl[NODFA_CP2] set */ |
| 162 | bbit1 t9, 28, 1f |
| 163 | |
| 164 | /* Save the LLM state */ |
| 165 | dmfc2 t0, 0x0402 |
| 166 | dmfc2 t1, 0x040A |
| 167 | sd t0, OCTEON_CP2_LLM_DAT(a0) |
| 168 | sd t1, OCTEON_CP2_LLM_DAT+8(a0) |
| 169 | |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 170 | 1: bbit1 t9, 26, 3f /* done if CvmCtl[NOCRYPTO] set */ |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 171 | |
| 172 | /* Save the COP2 crypto state */ |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 173 | /* this part is mostly common to both pass 1 and later revisions */ |
| 174 | dmfc2 t0, 0x0084 |
| 175 | dmfc2 t1, 0x0080 |
| 176 | dmfc2 t2, 0x0081 |
| 177 | dmfc2 t3, 0x0082 |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 178 | sd t0, OCTEON_CP2_3DES_IV(a0) |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 179 | dmfc2 t0, 0x0088 |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 180 | sd t1, OCTEON_CP2_3DES_KEY(a0) |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 181 | dmfc2 t1, 0x0111 /* only necessary for pass 1 */ |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 182 | sd t2, OCTEON_CP2_3DES_KEY+8(a0) |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 183 | dmfc2 t2, 0x0102 |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 184 | sd t3, OCTEON_CP2_3DES_KEY+16(a0) |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 185 | dmfc2 t3, 0x0103 |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 186 | sd t0, OCTEON_CP2_3DES_RESULT(a0) |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 187 | dmfc2 t0, 0x0104 |
| 188 | sd t1, OCTEON_CP2_AES_INP0(a0) /* only necessary for pass 1 */ |
| 189 | dmfc2 t1, 0x0105 |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 190 | sd t2, OCTEON_CP2_AES_IV(a0) |
| 191 | dmfc2 t2, 0x0106 |
| 192 | sd t3, OCTEON_CP2_AES_IV+8(a0) |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 193 | dmfc2 t3, 0x0107 |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 194 | sd t0, OCTEON_CP2_AES_KEY(a0) |
| 195 | dmfc2 t0, 0x0110 |
| 196 | sd t1, OCTEON_CP2_AES_KEY+8(a0) |
| 197 | dmfc2 t1, 0x0100 |
| 198 | sd t2, OCTEON_CP2_AES_KEY+16(a0) |
| 199 | dmfc2 t2, 0x0101 |
| 200 | sd t3, OCTEON_CP2_AES_KEY+24(a0) |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 201 | mfc0 t3, $15,0 /* Get the processor ID register */ |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 202 | sd t0, OCTEON_CP2_AES_KEYLEN(a0) |
| 203 | li t0, 0x000d0000 /* This is the processor ID of Octeon Pass1 */ |
| 204 | sd t1, OCTEON_CP2_AES_RESULT(a0) |
| 205 | sd t2, OCTEON_CP2_AES_RESULT+8(a0) |
| 206 | /* Skip to the Pass1 version of the remainder of the COP2 state */ |
| 207 | beq t3, t0, 2f |
| 208 | |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 209 | /* the non-pass1 state when !CvmCtl[NOCRYPTO] */ |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 210 | dmfc2 t1, 0x0240 |
| 211 | dmfc2 t2, 0x0241 |
| 212 | dmfc2 t3, 0x0242 |
| 213 | dmfc2 t0, 0x0243 |
| 214 | sd t1, OCTEON_CP2_HSH_DATW(a0) |
| 215 | dmfc2 t1, 0x0244 |
| 216 | sd t2, OCTEON_CP2_HSH_DATW+8(a0) |
| 217 | dmfc2 t2, 0x0245 |
| 218 | sd t3, OCTEON_CP2_HSH_DATW+16(a0) |
| 219 | dmfc2 t3, 0x0246 |
| 220 | sd t0, OCTEON_CP2_HSH_DATW+24(a0) |
| 221 | dmfc2 t0, 0x0247 |
| 222 | sd t1, OCTEON_CP2_HSH_DATW+32(a0) |
| 223 | dmfc2 t1, 0x0248 |
| 224 | sd t2, OCTEON_CP2_HSH_DATW+40(a0) |
| 225 | dmfc2 t2, 0x0249 |
| 226 | sd t3, OCTEON_CP2_HSH_DATW+48(a0) |
| 227 | dmfc2 t3, 0x024A |
| 228 | sd t0, OCTEON_CP2_HSH_DATW+56(a0) |
| 229 | dmfc2 t0, 0x024B |
| 230 | sd t1, OCTEON_CP2_HSH_DATW+64(a0) |
| 231 | dmfc2 t1, 0x024C |
| 232 | sd t2, OCTEON_CP2_HSH_DATW+72(a0) |
| 233 | dmfc2 t2, 0x024D |
| 234 | sd t3, OCTEON_CP2_HSH_DATW+80(a0) |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 235 | dmfc2 t3, 0x024E |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 236 | sd t0, OCTEON_CP2_HSH_DATW+88(a0) |
| 237 | dmfc2 t0, 0x0250 |
| 238 | sd t1, OCTEON_CP2_HSH_DATW+96(a0) |
| 239 | dmfc2 t1, 0x0251 |
| 240 | sd t2, OCTEON_CP2_HSH_DATW+104(a0) |
| 241 | dmfc2 t2, 0x0252 |
| 242 | sd t3, OCTEON_CP2_HSH_DATW+112(a0) |
| 243 | dmfc2 t3, 0x0253 |
| 244 | sd t0, OCTEON_CP2_HSH_IVW(a0) |
| 245 | dmfc2 t0, 0x0254 |
| 246 | sd t1, OCTEON_CP2_HSH_IVW+8(a0) |
| 247 | dmfc2 t1, 0x0255 |
| 248 | sd t2, OCTEON_CP2_HSH_IVW+16(a0) |
| 249 | dmfc2 t2, 0x0256 |
| 250 | sd t3, OCTEON_CP2_HSH_IVW+24(a0) |
| 251 | dmfc2 t3, 0x0257 |
| 252 | sd t0, OCTEON_CP2_HSH_IVW+32(a0) |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 253 | dmfc2 t0, 0x0258 |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 254 | sd t1, OCTEON_CP2_HSH_IVW+40(a0) |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 255 | dmfc2 t1, 0x0259 |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 256 | sd t2, OCTEON_CP2_HSH_IVW+48(a0) |
| 257 | dmfc2 t2, 0x025E |
| 258 | sd t3, OCTEON_CP2_HSH_IVW+56(a0) |
| 259 | dmfc2 t3, 0x025A |
| 260 | sd t0, OCTEON_CP2_GFM_MULT(a0) |
| 261 | dmfc2 t0, 0x025B |
| 262 | sd t1, OCTEON_CP2_GFM_MULT+8(a0) |
| 263 | sd t2, OCTEON_CP2_GFM_POLY(a0) |
| 264 | sd t3, OCTEON_CP2_GFM_RESULT(a0) |
| 265 | sd t0, OCTEON_CP2_GFM_RESULT+8(a0) |
| 266 | jr ra |
| 267 | |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 268 | 2: /* pass 1 special stuff when !CvmCtl[NOCRYPTO] */ |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 269 | dmfc2 t3, 0x0040 |
| 270 | dmfc2 t0, 0x0041 |
| 271 | dmfc2 t1, 0x0042 |
| 272 | dmfc2 t2, 0x0043 |
| 273 | sd t3, OCTEON_CP2_HSH_DATW(a0) |
| 274 | dmfc2 t3, 0x0044 |
| 275 | sd t0, OCTEON_CP2_HSH_DATW+8(a0) |
| 276 | dmfc2 t0, 0x0045 |
| 277 | sd t1, OCTEON_CP2_HSH_DATW+16(a0) |
| 278 | dmfc2 t1, 0x0046 |
| 279 | sd t2, OCTEON_CP2_HSH_DATW+24(a0) |
| 280 | dmfc2 t2, 0x0048 |
| 281 | sd t3, OCTEON_CP2_HSH_DATW+32(a0) |
| 282 | dmfc2 t3, 0x0049 |
| 283 | sd t0, OCTEON_CP2_HSH_DATW+40(a0) |
| 284 | dmfc2 t0, 0x004A |
| 285 | sd t1, OCTEON_CP2_HSH_DATW+48(a0) |
| 286 | sd t2, OCTEON_CP2_HSH_IVW(a0) |
| 287 | sd t3, OCTEON_CP2_HSH_IVW+8(a0) |
| 288 | sd t0, OCTEON_CP2_HSH_IVW+16(a0) |
| 289 | |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 290 | 3: /* pass 1 or CvmCtl[NOCRYPTO] set */ |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 291 | jr ra |
| 292 | END(octeon_cop2_save) |
| 293 | |
| 294 | /* |
| 295 | * void octeon_cop2_restore(struct octeon_cop2_state *a0) |
| 296 | */ |
| 297 | .align 7 |
| 298 | .set push |
| 299 | .set noreorder |
| 300 | LEAF(octeon_cop2_restore) |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 301 | /* First cache line was prefetched before the call */ |
| 302 | pref 4, 128(a0) |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 303 | dmfc0 t9, $9,7 /* CvmCtl register. */ |
| 304 | |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 305 | pref 4, 256(a0) |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 306 | ld t0, OCTEON_CP2_CRC_IV(a0) |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 307 | pref 4, 384(a0) |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 308 | ld t1, OCTEON_CP2_CRC_LENGTH(a0) |
| 309 | ld t2, OCTEON_CP2_CRC_POLY(a0) |
| 310 | |
| 311 | /* Restore the COP2 CRC state */ |
| 312 | dmtc2 t0, 0x0201 |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 313 | dmtc2 t1, 0x1202 |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 314 | bbit1 t9, 28, 2f /* Skip LLM if CvmCtl[NODFA_CP2] is set */ |
| 315 | dmtc2 t2, 0x4200 |
| 316 | |
| 317 | /* Restore the LLM state */ |
| 318 | ld t0, OCTEON_CP2_LLM_DAT(a0) |
| 319 | ld t1, OCTEON_CP2_LLM_DAT+8(a0) |
| 320 | dmtc2 t0, 0x0402 |
| 321 | dmtc2 t1, 0x040A |
| 322 | |
| 323 | 2: |
| 324 | bbit1 t9, 26, done_restore /* done if CvmCtl[NOCRYPTO] set */ |
| 325 | nop |
| 326 | |
| 327 | /* Restore the COP2 crypto state common to pass 1 and pass 2 */ |
| 328 | ld t0, OCTEON_CP2_3DES_IV(a0) |
| 329 | ld t1, OCTEON_CP2_3DES_KEY(a0) |
| 330 | ld t2, OCTEON_CP2_3DES_KEY+8(a0) |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 331 | dmtc2 t0, 0x0084 |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 332 | ld t0, OCTEON_CP2_3DES_KEY+16(a0) |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 333 | dmtc2 t1, 0x0080 |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 334 | ld t1, OCTEON_CP2_3DES_RESULT(a0) |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 335 | dmtc2 t2, 0x0081 |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 336 | ld t2, OCTEON_CP2_AES_INP0(a0) /* only really needed for pass 1 */ |
| 337 | dmtc2 t0, 0x0082 |
| 338 | ld t0, OCTEON_CP2_AES_IV(a0) |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 339 | dmtc2 t1, 0x0098 |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 340 | ld t1, OCTEON_CP2_AES_IV+8(a0) |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 341 | dmtc2 t2, 0x010A /* only really needed for pass 1 */ |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 342 | ld t2, OCTEON_CP2_AES_KEY(a0) |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 343 | dmtc2 t0, 0x0102 |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 344 | ld t0, OCTEON_CP2_AES_KEY+8(a0) |
| 345 | dmtc2 t1, 0x0103 |
| 346 | ld t1, OCTEON_CP2_AES_KEY+16(a0) |
| 347 | dmtc2 t2, 0x0104 |
| 348 | ld t2, OCTEON_CP2_AES_KEY+24(a0) |
| 349 | dmtc2 t0, 0x0105 |
| 350 | ld t0, OCTEON_CP2_AES_KEYLEN(a0) |
| 351 | dmtc2 t1, 0x0106 |
| 352 | ld t1, OCTEON_CP2_AES_RESULT(a0) |
| 353 | dmtc2 t2, 0x0107 |
| 354 | ld t2, OCTEON_CP2_AES_RESULT+8(a0) |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 355 | mfc0 t3, $15,0 /* Get the processor ID register */ |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 356 | dmtc2 t0, 0x0110 |
| 357 | li t0, 0x000d0000 /* This is the processor ID of Octeon Pass1 */ |
| 358 | dmtc2 t1, 0x0100 |
| 359 | bne t0, t3, 3f /* Skip the next stuff for non-pass1 */ |
| 360 | dmtc2 t2, 0x0101 |
| 361 | |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 362 | /* this code is specific for pass 1 */ |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 363 | ld t0, OCTEON_CP2_HSH_DATW(a0) |
| 364 | ld t1, OCTEON_CP2_HSH_DATW+8(a0) |
| 365 | ld t2, OCTEON_CP2_HSH_DATW+16(a0) |
| 366 | dmtc2 t0, 0x0040 |
| 367 | ld t0, OCTEON_CP2_HSH_DATW+24(a0) |
| 368 | dmtc2 t1, 0x0041 |
| 369 | ld t1, OCTEON_CP2_HSH_DATW+32(a0) |
| 370 | dmtc2 t2, 0x0042 |
| 371 | ld t2, OCTEON_CP2_HSH_DATW+40(a0) |
| 372 | dmtc2 t0, 0x0043 |
| 373 | ld t0, OCTEON_CP2_HSH_DATW+48(a0) |
| 374 | dmtc2 t1, 0x0044 |
| 375 | ld t1, OCTEON_CP2_HSH_IVW(a0) |
| 376 | dmtc2 t2, 0x0045 |
| 377 | ld t2, OCTEON_CP2_HSH_IVW+8(a0) |
| 378 | dmtc2 t0, 0x0046 |
| 379 | ld t0, OCTEON_CP2_HSH_IVW+16(a0) |
| 380 | dmtc2 t1, 0x0048 |
| 381 | dmtc2 t2, 0x0049 |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 382 | b done_restore /* unconditional branch */ |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 383 | dmtc2 t0, 0x004A |
| 384 | |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 385 | 3: /* this is post-pass1 code */ |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 386 | ld t2, OCTEON_CP2_HSH_DATW(a0) |
| 387 | ld t0, OCTEON_CP2_HSH_DATW+8(a0) |
| 388 | ld t1, OCTEON_CP2_HSH_DATW+16(a0) |
| 389 | dmtc2 t2, 0x0240 |
| 390 | ld t2, OCTEON_CP2_HSH_DATW+24(a0) |
| 391 | dmtc2 t0, 0x0241 |
| 392 | ld t0, OCTEON_CP2_HSH_DATW+32(a0) |
| 393 | dmtc2 t1, 0x0242 |
| 394 | ld t1, OCTEON_CP2_HSH_DATW+40(a0) |
| 395 | dmtc2 t2, 0x0243 |
| 396 | ld t2, OCTEON_CP2_HSH_DATW+48(a0) |
| 397 | dmtc2 t0, 0x0244 |
| 398 | ld t0, OCTEON_CP2_HSH_DATW+56(a0) |
| 399 | dmtc2 t1, 0x0245 |
| 400 | ld t1, OCTEON_CP2_HSH_DATW+64(a0) |
| 401 | dmtc2 t2, 0x0246 |
| 402 | ld t2, OCTEON_CP2_HSH_DATW+72(a0) |
| 403 | dmtc2 t0, 0x0247 |
| 404 | ld t0, OCTEON_CP2_HSH_DATW+80(a0) |
| 405 | dmtc2 t1, 0x0248 |
| 406 | ld t1, OCTEON_CP2_HSH_DATW+88(a0) |
| 407 | dmtc2 t2, 0x0249 |
| 408 | ld t2, OCTEON_CP2_HSH_DATW+96(a0) |
| 409 | dmtc2 t0, 0x024A |
| 410 | ld t0, OCTEON_CP2_HSH_DATW+104(a0) |
| 411 | dmtc2 t1, 0x024B |
| 412 | ld t1, OCTEON_CP2_HSH_DATW+112(a0) |
| 413 | dmtc2 t2, 0x024C |
| 414 | ld t2, OCTEON_CP2_HSH_IVW(a0) |
| 415 | dmtc2 t0, 0x024D |
| 416 | ld t0, OCTEON_CP2_HSH_IVW+8(a0) |
| 417 | dmtc2 t1, 0x024E |
| 418 | ld t1, OCTEON_CP2_HSH_IVW+16(a0) |
| 419 | dmtc2 t2, 0x0250 |
| 420 | ld t2, OCTEON_CP2_HSH_IVW+24(a0) |
| 421 | dmtc2 t0, 0x0251 |
| 422 | ld t0, OCTEON_CP2_HSH_IVW+32(a0) |
| 423 | dmtc2 t1, 0x0252 |
| 424 | ld t1, OCTEON_CP2_HSH_IVW+40(a0) |
| 425 | dmtc2 t2, 0x0253 |
| 426 | ld t2, OCTEON_CP2_HSH_IVW+48(a0) |
| 427 | dmtc2 t0, 0x0254 |
| 428 | ld t0, OCTEON_CP2_HSH_IVW+56(a0) |
| 429 | dmtc2 t1, 0x0255 |
| 430 | ld t1, OCTEON_CP2_GFM_MULT(a0) |
| 431 | dmtc2 t2, 0x0256 |
| 432 | ld t2, OCTEON_CP2_GFM_MULT+8(a0) |
| 433 | dmtc2 t0, 0x0257 |
| 434 | ld t0, OCTEON_CP2_GFM_POLY(a0) |
| 435 | dmtc2 t1, 0x0258 |
| 436 | ld t1, OCTEON_CP2_GFM_RESULT(a0) |
| 437 | dmtc2 t2, 0x0259 |
| 438 | ld t2, OCTEON_CP2_GFM_RESULT+8(a0) |
| 439 | dmtc2 t0, 0x025E |
| 440 | dmtc2 t1, 0x025A |
| 441 | dmtc2 t2, 0x025B |
| 442 | |
| 443 | done_restore: |
| 444 | jr ra |
| 445 | nop |
| 446 | END(octeon_cop2_restore) |
| 447 | .set pop |
| 448 | |
| 449 | /* |
| 450 | * void octeon_mult_save() |
| 451 | * sp is assumed to point to a struct pt_regs |
| 452 | * |
| 453 | * NOTE: This is called in SAVE_SOME in stackframe.h. It can only |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 454 | * safely modify k0 and k1. |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 455 | */ |
| 456 | .align 7 |
| 457 | .set push |
| 458 | .set noreorder |
| 459 | LEAF(octeon_mult_save) |
| 460 | dmfc0 k0, $9,7 /* CvmCtl register. */ |
| 461 | bbit1 k0, 27, 1f /* Skip CvmCtl[NOMUL] */ |
| 462 | nop |
| 463 | |
| 464 | /* Save the multiplier state */ |
| 465 | v3mulu k0, $0, $0 |
| 466 | v3mulu k1, $0, $0 |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 467 | sd k0, PT_MTP(sp) /* PT_MTP has P0 */ |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 468 | v3mulu k0, $0, $0 |
| 469 | sd k1, PT_MTP+8(sp) /* PT_MTP+8 has P1 */ |
| 470 | ori k1, $0, 1 |
| 471 | v3mulu k1, k1, $0 |
| 472 | sd k0, PT_MTP+16(sp) /* PT_MTP+16 has P2 */ |
| 473 | v3mulu k0, $0, $0 |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 474 | sd k1, PT_MPL(sp) /* PT_MPL has MPL0 */ |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 475 | v3mulu k1, $0, $0 |
| 476 | sd k0, PT_MPL+8(sp) /* PT_MPL+8 has MPL1 */ |
| 477 | jr ra |
| 478 | sd k1, PT_MPL+16(sp) /* PT_MPL+16 has MPL2 */ |
| 479 | |
| 480 | 1: /* Resume here if CvmCtl[NOMUL] */ |
| 481 | jr ra |
| 482 | END(octeon_mult_save) |
| 483 | .set pop |
| 484 | |
| 485 | /* |
| 486 | * void octeon_mult_restore() |
| 487 | * sp is assumed to point to a struct pt_regs |
| 488 | * |
| 489 | * NOTE: This is called in RESTORE_SOME in stackframe.h. |
| 490 | */ |
| 491 | .align 7 |
| 492 | .set push |
| 493 | .set noreorder |
| 494 | LEAF(octeon_mult_restore) |
| 495 | dmfc0 k1, $9,7 /* CvmCtl register. */ |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 496 | ld v0, PT_MPL(sp) /* MPL0 */ |
| 497 | ld v1, PT_MPL+8(sp) /* MPL1 */ |
| 498 | ld k0, PT_MPL+16(sp) /* MPL2 */ |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 499 | bbit1 k1, 27, 1f /* Skip CvmCtl[NOMUL] */ |
| 500 | /* Normally falls through, so no time wasted here */ |
| 501 | nop |
| 502 | |
| 503 | /* Restore the multiplier state */ |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 504 | ld k1, PT_MTP+16(sp) /* P2 */ |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 505 | MTM0 v0 /* MPL0 */ |
| 506 | ld v0, PT_MTP+8(sp) /* P1 */ |
| 507 | MTM1 v1 /* MPL1 */ |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 508 | ld v1, PT_MTP(sp) /* P0 */ |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 509 | MTM2 k0 /* MPL2 */ |
| 510 | MTP2 k1 /* P2 */ |
| 511 | MTP1 v0 /* P1 */ |
| 512 | jr ra |
| 513 | MTP0 v1 /* P0 */ |
| 514 | |
| 515 | 1: /* Resume here if CvmCtl[NOMUL] */ |
| 516 | jr ra |
| 517 | nop |
| 518 | END(octeon_mult_restore) |
| 519 | .set pop |