Unbreak Win64 CC. Step one: honour register save area, fix some alignment and provide a different set of call-clobberred registers.
llvm-svn: 77962
diff --git a/llvm/lib/Target/X86/X86CompilationCallback_Win64.asm b/llvm/lib/Target/X86/X86CompilationCallback_Win64.asm
index 8002f98..a11c5c3 100644
--- a/llvm/lib/Target/X86/X86CompilationCallback_Win64.asm
+++ b/llvm/lib/Target/X86/X86CompilationCallback_Win64.asm
@@ -15,52 +15,52 @@
.code
X86CompilationCallback proc
+ ; Save all int arg registers into register spill area.
+ mov [rsp+ 8], rcx
+ mov [rsp+16], rdx
+ mov [rsp+24], r8
+ mov [rsp+32], r9
+
push rbp
- ; Save RSP
+ ; Save RSP.
mov rbp, rsp
- ; Save all int arg registers
- push rcx
- push rdx
- push r8
- push r9
-
; Align stack on 16-byte boundary.
and rsp, -16
- ; Save all XMM arg registers
- sub rsp, 64
- movaps [rsp], xmm0
- movaps [rsp+16], xmm1
- movaps [rsp+32], xmm2
- movaps [rsp+48], xmm3
+ ; Save all XMM arg registers. Also allocate reg spill area.
+ sub rsp, 96
+ movaps [rsp +32], xmm0
+ movaps [rsp+16+32], xmm1
+ movaps [rsp+32+32], xmm2
+ movaps [rsp+48+32], xmm3
; JIT callee
- ; Pass prev frame and return address
+ ; Pass prev frame and return address.
mov rcx, rbp
mov rdx, qword ptr [rbp+8]
call X86CompilationCallback2
- ; Restore all XMM arg registers
- movaps xmm3, [rsp+48]
- movaps xmm2, [rsp+32]
- movaps xmm1, [rsp+16]
- movaps xmm0, [rsp]
+ ; Restore all XMM arg registers.
+ movaps xmm3, [rsp+48+32]
+ movaps xmm2, [rsp+32+32]
+ movaps xmm1, [rsp+16+32]
+ movaps xmm0, [rsp +32]
- ; Restore RSP
+ ; Restore RSP.
mov rsp, rbp
- ; Restore all int arg registers
- sub rsp, 32
- pop r9
- pop r8
- pop rdx
- pop rcx
-
- ; Restore RBP
+ ; Restore RBP.
pop rbp
+
+ ; Restore all int arg registers.
+ mov r9, [rsp+32]
+ mov r8, [rsp+24]
+ mov rdx, [rsp+16]
+ mov rcx, [rsp+ 8]
+
ret
X86CompilationCallback endp