Saleem Abdulrasool | 675df58 | 2015-04-24 19:39:17 +0000 | [diff] [blame] | 1 | //===-------------------- UnwindRegistersRestore.S ------------------------===// |
| 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file is dual licensed under the MIT and the University of Illinois Open |
| 6 | // Source Licenses. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | |
| 10 | #include "assembly.h" |
| 11 | |
| 12 | .text |
| 13 | |
| 14 | #if defined(__i386__) |
| 15 | DEFINE_LIBUNWIND_PRIVATE_FUNCTION(_ZN9libunwind13Registers_x866jumptoEv) |
| 16 | # |
| 17 | # void libunwind::Registers_x86::jumpto() |
| 18 | # |
| 19 | # On entry: |
| 20 | # + + |
| 21 | # +-----------------------+ |
| 22 | # + thread_state pointer + |
| 23 | # +-----------------------+ |
| 24 | # + return address + |
| 25 | # +-----------------------+ <-- SP |
| 26 | # + + |
| 27 | movl 4(%esp), %eax |
| 28 | # set up eax and ret on new stack location |
| 29 | movl 28(%eax), %edx # edx holds new stack pointer |
| 30 | subl $8,%edx |
| 31 | movl %edx, 28(%eax) |
| 32 | movl 0(%eax), %ebx |
| 33 | movl %ebx, 0(%edx) |
| 34 | movl 40(%eax), %ebx |
| 35 | movl %ebx, 4(%edx) |
| 36 | # we now have ret and eax pushed onto where new stack will be |
| 37 | # restore all registers |
| 38 | movl 4(%eax), %ebx |
| 39 | movl 8(%eax), %ecx |
| 40 | movl 12(%eax), %edx |
| 41 | movl 16(%eax), %edi |
| 42 | movl 20(%eax), %esi |
| 43 | movl 24(%eax), %ebp |
| 44 | movl 28(%eax), %esp |
| 45 | # skip ss |
| 46 | # skip eflags |
| 47 | pop %eax # eax was already pushed on new stack |
| 48 | ret # eip was already pushed on new stack |
| 49 | # skip cs |
| 50 | # skip ds |
| 51 | # skip es |
| 52 | # skip fs |
| 53 | # skip gs |
| 54 | |
| 55 | #elif defined(__x86_64__) |
| 56 | |
| 57 | DEFINE_LIBUNWIND_PRIVATE_FUNCTION(_ZN9libunwind16Registers_x86_646jumptoEv) |
| 58 | # |
| 59 | # void libunwind::Registers_x86_64::jumpto() |
| 60 | # |
| 61 | # On entry, thread_state pointer is in rdi |
| 62 | |
| 63 | movq 56(%rdi), %rax # rax holds new stack pointer |
| 64 | subq $16, %rax |
| 65 | movq %rax, 56(%rdi) |
| 66 | movq 32(%rdi), %rbx # store new rdi on new stack |
| 67 | movq %rbx, 0(%rax) |
| 68 | movq 128(%rdi), %rbx # store new rip on new stack |
| 69 | movq %rbx, 8(%rax) |
| 70 | # restore all registers |
| 71 | movq 0(%rdi), %rax |
| 72 | movq 8(%rdi), %rbx |
| 73 | movq 16(%rdi), %rcx |
| 74 | movq 24(%rdi), %rdx |
| 75 | # restore rdi later |
| 76 | movq 40(%rdi), %rsi |
| 77 | movq 48(%rdi), %rbp |
| 78 | # restore rsp later |
| 79 | movq 64(%rdi), %r8 |
| 80 | movq 72(%rdi), %r9 |
| 81 | movq 80(%rdi), %r10 |
| 82 | movq 88(%rdi), %r11 |
| 83 | movq 96(%rdi), %r12 |
| 84 | movq 104(%rdi), %r13 |
| 85 | movq 112(%rdi), %r14 |
| 86 | movq 120(%rdi), %r15 |
| 87 | # skip rflags |
| 88 | # skip cs |
| 89 | # skip fs |
| 90 | # skip gs |
| 91 | movq 56(%rdi), %rsp # cut back rsp to new location |
| 92 | pop %rdi # rdi was saved here earlier |
| 93 | ret # rip was saved here |
| 94 | |
| 95 | |
| 96 | #elif defined(__ppc__) |
| 97 | |
| 98 | DEFINE_LIBUNWIND_PRIVATE_FUNCTION(_ZN9libunwind13Registers_ppc6jumptoEv) |
| 99 | ; |
| 100 | ; void libunwind::Registers_ppc::jumpto() |
| 101 | ; |
| 102 | ; On entry: |
| 103 | ; thread_state pointer is in r3 |
| 104 | ; |
| 105 | |
| 106 | ; restore integral registerrs |
| 107 | ; skip r0 for now |
| 108 | ; skip r1 for now |
| 109 | lwz r2, 16(r3) |
| 110 | ; skip r3 for now |
| 111 | ; skip r4 for now |
| 112 | ; skip r5 for now |
| 113 | lwz r6, 32(r3) |
| 114 | lwz r7, 36(r3) |
| 115 | lwz r8, 40(r3) |
| 116 | lwz r9, 44(r3) |
| 117 | lwz r10, 48(r3) |
| 118 | lwz r11, 52(r3) |
| 119 | lwz r12, 56(r3) |
| 120 | lwz r13, 60(r3) |
| 121 | lwz r14, 64(r3) |
| 122 | lwz r15, 68(r3) |
| 123 | lwz r16, 72(r3) |
| 124 | lwz r17, 76(r3) |
| 125 | lwz r18, 80(r3) |
| 126 | lwz r19, 84(r3) |
| 127 | lwz r20, 88(r3) |
| 128 | lwz r21, 92(r3) |
| 129 | lwz r22, 96(r3) |
| 130 | lwz r23,100(r3) |
| 131 | lwz r24,104(r3) |
| 132 | lwz r25,108(r3) |
| 133 | lwz r26,112(r3) |
| 134 | lwz r27,116(r3) |
| 135 | lwz r28,120(r3) |
| 136 | lwz r29,124(r3) |
| 137 | lwz r30,128(r3) |
| 138 | lwz r31,132(r3) |
| 139 | |
| 140 | ; restore float registers |
| 141 | lfd f0, 160(r3) |
| 142 | lfd f1, 168(r3) |
| 143 | lfd f2, 176(r3) |
| 144 | lfd f3, 184(r3) |
| 145 | lfd f4, 192(r3) |
| 146 | lfd f5, 200(r3) |
| 147 | lfd f6, 208(r3) |
| 148 | lfd f7, 216(r3) |
| 149 | lfd f8, 224(r3) |
| 150 | lfd f9, 232(r3) |
| 151 | lfd f10,240(r3) |
| 152 | lfd f11,248(r3) |
| 153 | lfd f12,256(r3) |
| 154 | lfd f13,264(r3) |
| 155 | lfd f14,272(r3) |
| 156 | lfd f15,280(r3) |
| 157 | lfd f16,288(r3) |
| 158 | lfd f17,296(r3) |
| 159 | lfd f18,304(r3) |
| 160 | lfd f19,312(r3) |
| 161 | lfd f20,320(r3) |
| 162 | lfd f21,328(r3) |
| 163 | lfd f22,336(r3) |
| 164 | lfd f23,344(r3) |
| 165 | lfd f24,352(r3) |
| 166 | lfd f25,360(r3) |
| 167 | lfd f26,368(r3) |
| 168 | lfd f27,376(r3) |
| 169 | lfd f28,384(r3) |
| 170 | lfd f29,392(r3) |
| 171 | lfd f30,400(r3) |
| 172 | lfd f31,408(r3) |
| 173 | |
| 174 | ; restore vector registers if any are in use |
| 175 | lwz r5,156(r3) ; test VRsave |
| 176 | cmpwi r5,0 |
| 177 | beq Lnovec |
| 178 | |
| 179 | subi r4,r1,16 |
| 180 | rlwinm r4,r4,0,0,27 ; mask low 4-bits |
| 181 | ; r4 is now a 16-byte aligned pointer into the red zone |
| 182 | ; the _vectorRegisters may not be 16-byte aligned so copy via red zone temp buffer |
| 183 | |
| 184 | |
| 185 | #define LOAD_VECTOR_UNALIGNEDl(_index) \ |
| 186 | andis. r0,r5,(1<<(15-_index)) @\ |
| 187 | beq Ldone ## _index @\ |
| 188 | lwz r0, 424+_index*16(r3) @\ |
| 189 | stw r0, 0(r4) @\ |
| 190 | lwz r0, 424+_index*16+4(r3) @\ |
| 191 | stw r0, 4(r4) @\ |
| 192 | lwz r0, 424+_index*16+8(r3) @\ |
| 193 | stw r0, 8(r4) @\ |
| 194 | lwz r0, 424+_index*16+12(r3)@\ |
| 195 | stw r0, 12(r4) @\ |
| 196 | lvx v ## _index,0,r4 @\ |
| 197 | Ldone ## _index: |
| 198 | |
| 199 | #define LOAD_VECTOR_UNALIGNEDh(_index) \ |
| 200 | andi. r0,r5,(1<<(31-_index)) @\ |
| 201 | beq Ldone ## _index @\ |
| 202 | lwz r0, 424+_index*16(r3) @\ |
| 203 | stw r0, 0(r4) @\ |
| 204 | lwz r0, 424+_index*16+4(r3) @\ |
| 205 | stw r0, 4(r4) @\ |
| 206 | lwz r0, 424+_index*16+8(r3) @\ |
| 207 | stw r0, 8(r4) @\ |
| 208 | lwz r0, 424+_index*16+12(r3)@\ |
| 209 | stw r0, 12(r4) @\ |
| 210 | lvx v ## _index,0,r4 @\ |
| 211 | Ldone ## _index: |
| 212 | |
| 213 | |
| 214 | LOAD_VECTOR_UNALIGNEDl(0) |
| 215 | LOAD_VECTOR_UNALIGNEDl(1) |
| 216 | LOAD_VECTOR_UNALIGNEDl(2) |
| 217 | LOAD_VECTOR_UNALIGNEDl(3) |
| 218 | LOAD_VECTOR_UNALIGNEDl(4) |
| 219 | LOAD_VECTOR_UNALIGNEDl(5) |
| 220 | LOAD_VECTOR_UNALIGNEDl(6) |
| 221 | LOAD_VECTOR_UNALIGNEDl(7) |
| 222 | LOAD_VECTOR_UNALIGNEDl(8) |
| 223 | LOAD_VECTOR_UNALIGNEDl(9) |
| 224 | LOAD_VECTOR_UNALIGNEDl(10) |
| 225 | LOAD_VECTOR_UNALIGNEDl(11) |
| 226 | LOAD_VECTOR_UNALIGNEDl(12) |
| 227 | LOAD_VECTOR_UNALIGNEDl(13) |
| 228 | LOAD_VECTOR_UNALIGNEDl(14) |
| 229 | LOAD_VECTOR_UNALIGNEDl(15) |
| 230 | LOAD_VECTOR_UNALIGNEDh(16) |
| 231 | LOAD_VECTOR_UNALIGNEDh(17) |
| 232 | LOAD_VECTOR_UNALIGNEDh(18) |
| 233 | LOAD_VECTOR_UNALIGNEDh(19) |
| 234 | LOAD_VECTOR_UNALIGNEDh(20) |
| 235 | LOAD_VECTOR_UNALIGNEDh(21) |
| 236 | LOAD_VECTOR_UNALIGNEDh(22) |
| 237 | LOAD_VECTOR_UNALIGNEDh(23) |
| 238 | LOAD_VECTOR_UNALIGNEDh(24) |
| 239 | LOAD_VECTOR_UNALIGNEDh(25) |
| 240 | LOAD_VECTOR_UNALIGNEDh(26) |
| 241 | LOAD_VECTOR_UNALIGNEDh(27) |
| 242 | LOAD_VECTOR_UNALIGNEDh(28) |
| 243 | LOAD_VECTOR_UNALIGNEDh(29) |
| 244 | LOAD_VECTOR_UNALIGNEDh(30) |
| 245 | LOAD_VECTOR_UNALIGNEDh(31) |
| 246 | |
| 247 | Lnovec: |
| 248 | lwz r0, 136(r3) ; __cr |
| 249 | mtocrf 255,r0 |
| 250 | lwz r0, 148(r3) ; __ctr |
| 251 | mtctr r0 |
| 252 | lwz r0, 0(r3) ; __ssr0 |
| 253 | mtctr r0 |
| 254 | lwz r0, 8(r3) ; do r0 now |
| 255 | lwz r5,28(r3) ; do r5 now |
| 256 | lwz r4,24(r3) ; do r4 now |
| 257 | lwz r1,12(r3) ; do sp now |
| 258 | lwz r3,20(r3) ; do r3 last |
| 259 | bctr |
| 260 | |
| 261 | #elif defined(__arm64__) || defined(__aarch64__) |
| 262 | |
| 263 | // |
| 264 | // void libunwind::Registers_arm64::jumpto() |
| 265 | // |
| 266 | // On entry: |
| 267 | // thread_state pointer is in x0 |
| 268 | // |
| 269 | .p2align 2 |
| 270 | DEFINE_LIBUNWIND_PRIVATE_FUNCTION(_ZN9libunwind15Registers_arm646jumptoEv) |
| 271 | // skip restore of x0,x1 for now |
| 272 | ldp x2, x3, [x0, #0x010] |
| 273 | ldp x4, x5, [x0, #0x020] |
| 274 | ldp x6, x7, [x0, #0x030] |
| 275 | ldp x8, x9, [x0, #0x040] |
| 276 | ldp x10,x11, [x0, #0x050] |
| 277 | ldp x12,x13, [x0, #0x060] |
| 278 | ldp x14,x15, [x0, #0x070] |
| 279 | ldp x16,x17, [x0, #0x080] |
| 280 | ldp x18,x19, [x0, #0x090] |
| 281 | ldp x20,x21, [x0, #0x0A0] |
| 282 | ldp x22,x23, [x0, #0x0B0] |
| 283 | ldp x24,x25, [x0, #0x0C0] |
| 284 | ldp x26,x27, [x0, #0x0D0] |
Renato Golin | 874230a | 2016-02-11 21:22:57 +0000 | [diff] [blame] | 285 | ldp x28,x29, [x0, #0x0E0] |
| 286 | ldr x30, [x0, #0x100] // restore pc into lr |
Saleem Abdulrasool | 675df58 | 2015-04-24 19:39:17 +0000 | [diff] [blame] | 287 | ldr x1, [x0, #0x0F8] |
| 288 | mov sp,x1 // restore sp |
| 289 | |
| 290 | ldp d0, d1, [x0, #0x110] |
| 291 | ldp d2, d3, [x0, #0x120] |
| 292 | ldp d4, d5, [x0, #0x130] |
| 293 | ldp d6, d7, [x0, #0x140] |
| 294 | ldp d8, d9, [x0, #0x150] |
| 295 | ldp d10,d11, [x0, #0x160] |
| 296 | ldp d12,d13, [x0, #0x170] |
| 297 | ldp d14,d15, [x0, #0x180] |
| 298 | ldp d16,d17, [x0, #0x190] |
| 299 | ldp d18,d19, [x0, #0x1A0] |
| 300 | ldp d20,d21, [x0, #0x1B0] |
| 301 | ldp d22,d23, [x0, #0x1C0] |
| 302 | ldp d24,d25, [x0, #0x1D0] |
| 303 | ldp d26,d27, [x0, #0x1E0] |
| 304 | ldp d28,d29, [x0, #0x1F0] |
| 305 | ldr d30, [x0, #0x200] |
| 306 | ldr d31, [x0, #0x208] |
| 307 | |
| 308 | ldp x0, x1, [x0, #0x000] // restore x0,x1 |
Renato Golin | 874230a | 2016-02-11 21:22:57 +0000 | [diff] [blame] | 309 | ret x30 // jump to pc |
Saleem Abdulrasool | 675df58 | 2015-04-24 19:39:17 +0000 | [diff] [blame] | 310 | |
| 311 | #elif defined(__arm__) && !defined(__APPLE__) |
| 312 | |
| 313 | #if !defined(__ARM_ARCH_ISA_ARM) |
| 314 | .thumb |
| 315 | #endif |
| 316 | |
| 317 | @ |
| 318 | @ void libunwind::Registers_arm::restoreCoreAndJumpTo() |
| 319 | @ |
| 320 | @ On entry: |
| 321 | @ thread_state pointer is in r0 |
| 322 | @ |
| 323 | .p2align 2 |
| 324 | DEFINE_LIBUNWIND_PRIVATE_FUNCTION(_ZN9libunwind13Registers_arm20restoreCoreAndJumpToEv) |
Oliver Stannard | 8b1a2bb | 2016-07-25 09:21:56 +0000 | [diff] [blame] | 325 | #if !defined(__ARM_ARCH_ISA_ARM) && __ARM_ARCH_ISA_THUMB == 1 |
| 326 | @ r8-r11: ldm into r1-r4, then mov to r8-r11 |
| 327 | adds r0, #0x20 |
| 328 | ldm r0!, {r1-r4} |
| 329 | subs r0, #0x30 |
| 330 | mov r8, r1 |
| 331 | mov r9, r2 |
| 332 | mov r10, r3 |
| 333 | mov r11, r4 |
| 334 | @ r12 does not need loading, it it the intra-procedure-call scratch register |
| 335 | ldr r2, [r0, #0x34] |
| 336 | ldr r3, [r0, #0x3c] |
Saleem Abdulrasool | 675df58 | 2015-04-24 19:39:17 +0000 | [diff] [blame] | 337 | mov sp, r2 |
| 338 | mov lr, r3 @ restore pc into lr |
| 339 | ldm r0, {r0-r7} |
| 340 | #else |
| 341 | @ Use lr as base so that r0 can be restored. |
| 342 | mov lr, r0 |
| 343 | @ 32bit thumb-2 restrictions for ldm: |
| 344 | @ . the sp (r13) cannot be in the list |
| 345 | @ . the pc (r15) and lr (r14) cannot both be in the list in an LDM instruction |
| 346 | ldm lr, {r0-r12} |
| 347 | ldr sp, [lr, #52] |
| 348 | ldr lr, [lr, #60] @ restore pc into lr |
| 349 | #endif |
| 350 | JMP(lr) |
| 351 | |
| 352 | @ |
| 353 | @ static void libunwind::Registers_arm::restoreVFPWithFLDMD(unw_fpreg_t* values) |
| 354 | @ |
| 355 | @ On entry: |
| 356 | @ values pointer is in r0 |
| 357 | @ |
| 358 | .p2align 2 |
| 359 | .fpu vfpv3-d16 |
| 360 | DEFINE_LIBUNWIND_PRIVATE_FUNCTION(_ZN9libunwind13Registers_arm19restoreVFPWithFLDMDEPy) |
| 361 | @ VFP and iwMMX instructions are only available when compiling with the flags |
| 362 | @ that enable them. We do not want to do that in the library (because we do not |
| 363 | @ want the compiler to generate instructions that access those) but this is |
| 364 | @ only accessed if the personality routine needs these registers. Use of |
| 365 | @ these registers implies they are, actually, available on the target, so |
| 366 | @ it's ok to execute. |
| 367 | @ So, generate the instruction using the corresponding coprocessor mnemonic. |
| 368 | vldmia r0, {d0-d15} |
| 369 | JMP(lr) |
| 370 | |
| 371 | @ |
| 372 | @ static void libunwind::Registers_arm::restoreVFPWithFLDMX(unw_fpreg_t* values) |
| 373 | @ |
| 374 | @ On entry: |
| 375 | @ values pointer is in r0 |
| 376 | @ |
| 377 | .p2align 2 |
| 378 | .fpu vfpv3-d16 |
| 379 | DEFINE_LIBUNWIND_PRIVATE_FUNCTION(_ZN9libunwind13Registers_arm19restoreVFPWithFLDMXEPy) |
| 380 | vldmia r0, {d0-d15} @ fldmiax is deprecated in ARMv7+ and now behaves like vldmia |
| 381 | JMP(lr) |
| 382 | |
| 383 | @ |
| 384 | @ static void libunwind::Registers_arm::restoreVFPv3(unw_fpreg_t* values) |
| 385 | @ |
| 386 | @ On entry: |
| 387 | @ values pointer is in r0 |
| 388 | @ |
| 389 | .p2align 2 |
| 390 | .fpu vfpv3 |
| 391 | DEFINE_LIBUNWIND_PRIVATE_FUNCTION(_ZN9libunwind13Registers_arm12restoreVFPv3EPy) |
| 392 | vldmia r0, {d16-d31} |
| 393 | JMP(lr) |
| 394 | |
Asiri Rathnayake | 0ddaf13 | 2016-07-07 10:55:39 +0000 | [diff] [blame] | 395 | #if defined(__ARM_WMMX) |
| 396 | |
Saleem Abdulrasool | 675df58 | 2015-04-24 19:39:17 +0000 | [diff] [blame] | 397 | @ |
| 398 | @ static void libunwind::Registers_arm::restoreiWMMX(unw_fpreg_t* values) |
| 399 | @ |
| 400 | @ On entry: |
| 401 | @ values pointer is in r0 |
| 402 | @ |
| 403 | .p2align 2 |
Renato Golin | a091c23 | 2016-08-26 21:45:39 +0000 | [diff] [blame] | 404 | .arch armv5te |
Saleem Abdulrasool | 675df58 | 2015-04-24 19:39:17 +0000 | [diff] [blame] | 405 | DEFINE_LIBUNWIND_PRIVATE_FUNCTION(_ZN9libunwind13Registers_arm12restoreiWMMXEPy) |
Saleem Abdulrasool | 675df58 | 2015-04-24 19:39:17 +0000 | [diff] [blame] | 406 | ldcl p1, cr0, [r0], #8 @ wldrd wR0, [r0], #8 |
| 407 | ldcl p1, cr1, [r0], #8 @ wldrd wR1, [r0], #8 |
| 408 | ldcl p1, cr2, [r0], #8 @ wldrd wR2, [r0], #8 |
| 409 | ldcl p1, cr3, [r0], #8 @ wldrd wR3, [r0], #8 |
| 410 | ldcl p1, cr4, [r0], #8 @ wldrd wR4, [r0], #8 |
| 411 | ldcl p1, cr5, [r0], #8 @ wldrd wR5, [r0], #8 |
| 412 | ldcl p1, cr6, [r0], #8 @ wldrd wR6, [r0], #8 |
| 413 | ldcl p1, cr7, [r0], #8 @ wldrd wR7, [r0], #8 |
| 414 | ldcl p1, cr8, [r0], #8 @ wldrd wR8, [r0], #8 |
| 415 | ldcl p1, cr9, [r0], #8 @ wldrd wR9, [r0], #8 |
| 416 | ldcl p1, cr10, [r0], #8 @ wldrd wR10, [r0], #8 |
| 417 | ldcl p1, cr11, [r0], #8 @ wldrd wR11, [r0], #8 |
| 418 | ldcl p1, cr12, [r0], #8 @ wldrd wR12, [r0], #8 |
| 419 | ldcl p1, cr13, [r0], #8 @ wldrd wR13, [r0], #8 |
| 420 | ldcl p1, cr14, [r0], #8 @ wldrd wR14, [r0], #8 |
| 421 | ldcl p1, cr15, [r0], #8 @ wldrd wR15, [r0], #8 |
Saleem Abdulrasool | 675df58 | 2015-04-24 19:39:17 +0000 | [diff] [blame] | 422 | JMP(lr) |
| 423 | |
| 424 | @ |
| 425 | @ static void libunwind::Registers_arm::restoreiWMMXControl(unw_uint32_t* values) |
| 426 | @ |
| 427 | @ On entry: |
| 428 | @ values pointer is in r0 |
| 429 | @ |
| 430 | .p2align 2 |
Renato Golin | a091c23 | 2016-08-26 21:45:39 +0000 | [diff] [blame] | 431 | .arch armv5te |
Saleem Abdulrasool | 675df58 | 2015-04-24 19:39:17 +0000 | [diff] [blame] | 432 | DEFINE_LIBUNWIND_PRIVATE_FUNCTION(_ZN9libunwind13Registers_arm19restoreiWMMXControlEPj) |
Saleem Abdulrasool | 675df58 | 2015-04-24 19:39:17 +0000 | [diff] [blame] | 433 | ldc2 p1, cr8, [r0], #4 @ wldrw wCGR0, [r0], #4 |
| 434 | ldc2 p1, cr9, [r0], #4 @ wldrw wCGR1, [r0], #4 |
| 435 | ldc2 p1, cr10, [r0], #4 @ wldrw wCGR2, [r0], #4 |
| 436 | ldc2 p1, cr11, [r0], #4 @ wldrw wCGR3, [r0], #4 |
Saleem Abdulrasool | 675df58 | 2015-04-24 19:39:17 +0000 | [diff] [blame] | 437 | JMP(lr) |
| 438 | |
Asiri Rathnayake | 0ddaf13 | 2016-07-07 10:55:39 +0000 | [diff] [blame] | 439 | #endif |
| 440 | |
Peter Zotov | d4255ab | 2015-08-31 05:26:37 +0000 | [diff] [blame] | 441 | #elif defined(__or1k__) |
| 442 | |
| 443 | DEFINE_LIBUNWIND_PRIVATE_FUNCTION(_ZN9libunwind14Registers_or1k6jumptoEv) |
| 444 | # |
| 445 | # void libunwind::Registers_or1k::jumpto() |
| 446 | # |
| 447 | # On entry: |
| 448 | # thread_state pointer is in r3 |
| 449 | # |
| 450 | |
| 451 | # restore integral registerrs |
| 452 | l.lwz r0, 0(r3) |
| 453 | l.lwz r1, 4(r3) |
| 454 | l.lwz r2, 8(r3) |
| 455 | # skip r3 for now |
| 456 | l.lwz r4, 16(r3) |
| 457 | l.lwz r5, 20(r3) |
| 458 | l.lwz r6, 24(r3) |
| 459 | l.lwz r7, 28(r3) |
| 460 | l.lwz r8, 32(r3) |
| 461 | l.lwz r9, 36(r3) |
| 462 | l.lwz r10, 40(r3) |
| 463 | l.lwz r11, 44(r3) |
| 464 | l.lwz r12, 48(r3) |
| 465 | l.lwz r13, 52(r3) |
| 466 | l.lwz r14, 56(r3) |
| 467 | l.lwz r15, 60(r3) |
| 468 | l.lwz r16, 64(r3) |
| 469 | l.lwz r17, 68(r3) |
| 470 | l.lwz r18, 72(r3) |
| 471 | l.lwz r19, 76(r3) |
| 472 | l.lwz r20, 80(r3) |
| 473 | l.lwz r21, 84(r3) |
| 474 | l.lwz r22, 88(r3) |
| 475 | l.lwz r23, 92(r3) |
| 476 | l.lwz r24, 96(r3) |
| 477 | l.lwz r25,100(r3) |
| 478 | l.lwz r26,104(r3) |
| 479 | l.lwz r27,108(r3) |
| 480 | l.lwz r28,112(r3) |
| 481 | l.lwz r29,116(r3) |
| 482 | l.lwz r30,120(r3) |
| 483 | l.lwz r31,124(r3) |
| 484 | |
| 485 | # at last, restore r3 |
| 486 | l.lwz r3, 12(r3) |
| 487 | |
| 488 | # jump to pc |
| 489 | l.jr r9 |
| 490 | l.nop |
| 491 | |
Saleem Abdulrasool | 675df58 | 2015-04-24 19:39:17 +0000 | [diff] [blame] | 492 | #endif |
Saleem Abdulrasool | 6a38e34 | 2016-08-05 21:35:28 +0000 | [diff] [blame] | 493 | |
| 494 | NO_EXEC_STACK_DIRECTIVE |
| 495 | |