Ingo Molnar | 0c2bd5a | 2008-01-30 13:32:49 +0100 | [diff] [blame] | 1 | /* |
Ingo Molnar | 063f891 | 2009-02-03 18:02:36 +0100 | [diff] [blame] | 2 | |
| 3 | x86 function call convention, 64-bit: |
| 4 | ------------------------------------- |
| 5 | arguments | callee-saved | extra caller-saved | return |
| 6 | [callee-clobbered] | | [callee-clobbered] | |
| 7 | --------------------------------------------------------------------------- |
| 8 | rdi rsi rdx rcx r8-9 | rbx rbp [*] r12-15 | r10-11 | rax, rdx [**] |
| 9 | |
| 10 | ( rsp is obviously invariant across normal function calls. (gcc can 'merge' |
| 11 | functions when it sees tail-call optimization possibilities) rflags is |
| 12 | clobbered. Leftover arguments are passed over the stack frame.) |
| 13 | |
| 14 | [*] In the frame-pointers case rbp is fixed to the stack frame. |
| 15 | |
| 16 | [**] for struct return values wider than 64 bits the return convention is a |
| 17 | bit more complex: up to 128 bits width we return small structures |
| 18 | straight in rax, rdx. For structures larger than that (3 words or |
| 19 | larger) the caller puts a pointer to an on-stack return struct |
| 20 | [allocated in the caller's stack frame] into the first argument - i.e. |
| 21 | into rdi. All other arguments shift up by one in this case. |
| 22 | Fortunately this case is rare in the kernel. |
| 23 | |
| 24 | For 32-bit we have the following conventions - kernel is built with |
| 25 | -mregparm=3 and -freg-struct-return: |
| 26 | |
| 27 | x86 function calling convention, 32-bit: |
| 28 | ---------------------------------------- |
| 29 | arguments | callee-saved | extra caller-saved | return |
| 30 | [callee-clobbered] | | [callee-clobbered] | |
| 31 | ------------------------------------------------------------------------- |
| 32 | eax edx ecx | ebx edi esi ebp [*] | <none> | eax, edx [**] |
| 33 | |
| 34 | ( here too esp is obviously invariant across normal function calls. eflags |
| 35 | is clobbered. Leftover arguments are passed over the stack frame. ) |
| 36 | |
| 37 | [*] In the frame-pointers case ebp is fixed to the stack frame. |
| 38 | |
| 39 | [**] We build with -freg-struct-return, which on 32-bit means similar |
| 40 | semantics as on 64-bit: edx can be used for a second return value |
| 41 | (i.e. covering integer and structure sizes up to 64 bits) - after that |
| 42 | it gets more complex and more expensive: 3-word or larger struct returns |
| 43 | get done in the caller's frame and the pointer to the return struct goes |
| 44 | into regparm0, i.e. eax - the other arguments shift up and the |
| 45 | function's register parameters degenerate to regparm=2 in essence. |
| 46 | |
| 47 | */ |
| 48 | |
David Howells | a1ce392 | 2012-10-02 18:01:25 +0100 | [diff] [blame] | 49 | #include <asm/dwarf2.h> |
Ingo Molnar | 063f891 | 2009-02-03 18:02:36 +0100 | [diff] [blame] | 50 | |
Peter Zijlstra | 1a338ac | 2013-08-14 14:51:00 +0200 | [diff] [blame] | 51 | #ifdef CONFIG_X86_64 |
| 52 | |
Ingo Molnar | 063f891 | 2009-02-03 18:02:36 +0100 | [diff] [blame] | 53 | /* |
Tao Guo | 1b2b23d | 2012-09-26 04:28:22 -0400 | [diff] [blame] | 54 | * 64-bit system call stack frame layout defines and helpers, |
| 55 | * for assembly code: |
Ingo Molnar | 0c2bd5a | 2008-01-30 13:32:49 +0100 | [diff] [blame] | 56 | */ |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 57 | |
Tao Guo | 1b2b23d | 2012-09-26 04:28:22 -0400 | [diff] [blame] | 58 | #define R15 0 |
| 59 | #define R14 8 |
| 60 | #define R13 16 |
| 61 | #define R12 24 |
| 62 | #define RBP 32 |
| 63 | #define RBX 40 |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 64 | |
Ingo Molnar | 063f891 | 2009-02-03 18:02:36 +0100 | [diff] [blame] | 65 | /* arguments: interrupts/non tracing syscalls only save up to here: */ |
Tao Guo | 1b2b23d | 2012-09-26 04:28:22 -0400 | [diff] [blame] | 66 | #define R11 48 |
| 67 | #define R10 56 |
| 68 | #define R9 64 |
| 69 | #define R8 72 |
| 70 | #define RAX 80 |
| 71 | #define RCX 88 |
| 72 | #define RDX 96 |
| 73 | #define RSI 104 |
| 74 | #define RDI 112 |
| 75 | #define ORIG_RAX 120 /* + error_code */ |
Ingo Molnar | 0c2bd5a | 2008-01-30 13:32:49 +0100 | [diff] [blame] | 76 | /* end of arguments */ |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 77 | |
Ingo Molnar | 063f891 | 2009-02-03 18:02:36 +0100 | [diff] [blame] | 78 | /* cpu exception frame or undefined in case of fast syscall: */ |
Tao Guo | 1b2b23d | 2012-09-26 04:28:22 -0400 | [diff] [blame] | 79 | #define RIP 128 |
| 80 | #define CS 136 |
| 81 | #define EFLAGS 144 |
| 82 | #define RSP 152 |
| 83 | #define SS 160 |
Ingo Molnar | 0c2bd5a | 2008-01-30 13:32:49 +0100 | [diff] [blame] | 84 | |
| 85 | #define ARGOFFSET R11 |
Ingo Molnar | 0c2bd5a | 2008-01-30 13:32:49 +0100 | [diff] [blame] | 86 | |
Andy Lutomirski | 54eea99 | 2014-09-05 15:13:55 -0700 | [diff] [blame] | 87 | .macro SAVE_ARGS addskip=0, save_rcx=1, save_r891011=1, rax_enosys=0 |
Ingo Molnar | 0c2bd5a | 2008-01-30 13:32:49 +0100 | [diff] [blame] | 88 | subq $9*8+\addskip, %rsp |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 89 | CFI_ADJUST_CFA_OFFSET 9*8+\addskip |
Borislav Petkov | a268fcf | 2011-05-31 22:21:51 +0200 | [diff] [blame] | 90 | movq_cfi rdi, 8*8 |
| 91 | movq_cfi rsi, 7*8 |
| 92 | movq_cfi rdx, 6*8 |
| 93 | |
Borislav Petkov | cac0e0a | 2011-05-31 22:21:52 +0200 | [diff] [blame] | 94 | .if \save_rcx |
Borislav Petkov | a268fcf | 2011-05-31 22:21:51 +0200 | [diff] [blame] | 95 | movq_cfi rcx, 5*8 |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 96 | .endif |
Borislav Petkov | a268fcf | 2011-05-31 22:21:51 +0200 | [diff] [blame] | 97 | |
Andy Lutomirski | 54eea99 | 2014-09-05 15:13:55 -0700 | [diff] [blame] | 98 | .if \rax_enosys |
| 99 | movq $-ENOSYS, 4*8(%rsp) |
| 100 | .else |
Borislav Petkov | a268fcf | 2011-05-31 22:21:51 +0200 | [diff] [blame] | 101 | movq_cfi rax, 4*8 |
Andy Lutomirski | 54eea99 | 2014-09-05 15:13:55 -0700 | [diff] [blame] | 102 | .endif |
Borislav Petkov | a268fcf | 2011-05-31 22:21:51 +0200 | [diff] [blame] | 103 | |
Borislav Petkov | cac0e0a | 2011-05-31 22:21:52 +0200 | [diff] [blame] | 104 | .if \save_r891011 |
Borislav Petkov | a268fcf | 2011-05-31 22:21:51 +0200 | [diff] [blame] | 105 | movq_cfi r8, 3*8 |
| 106 | movq_cfi r9, 2*8 |
| 107 | movq_cfi r10, 1*8 |
| 108 | movq_cfi r11, 0*8 |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 109 | .endif |
Borislav Petkov | a268fcf | 2011-05-31 22:21:51 +0200 | [diff] [blame] | 110 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 111 | .endm |
| 112 | |
Jan Beulich | 3234282 | 2010-10-19 14:52:26 +0100 | [diff] [blame] | 113 | #define ARG_SKIP (9*8) |
Ingo Molnar | 0c2bd5a | 2008-01-30 13:32:49 +0100 | [diff] [blame] | 114 | |
Borislav Petkov | 838feb4 | 2011-05-31 22:21:53 +0200 | [diff] [blame] | 115 | .macro RESTORE_ARGS rstor_rax=1, addskip=0, rstor_rcx=1, rstor_r11=1, \ |
| 116 | rstor_r8910=1, rstor_rdx=1 |
| 117 | .if \rstor_r11 |
Borislav Petkov | a268fcf | 2011-05-31 22:21:51 +0200 | [diff] [blame] | 118 | movq_cfi_restore 0*8, r11 |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 119 | .endif |
Borislav Petkov | a268fcf | 2011-05-31 22:21:51 +0200 | [diff] [blame] | 120 | |
Borislav Petkov | 838feb4 | 2011-05-31 22:21:53 +0200 | [diff] [blame] | 121 | .if \rstor_r8910 |
Borislav Petkov | a268fcf | 2011-05-31 22:21:51 +0200 | [diff] [blame] | 122 | movq_cfi_restore 1*8, r10 |
| 123 | movq_cfi_restore 2*8, r9 |
| 124 | movq_cfi_restore 3*8, r8 |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 125 | .endif |
Borislav Petkov | a268fcf | 2011-05-31 22:21:51 +0200 | [diff] [blame] | 126 | |
Borislav Petkov | 838feb4 | 2011-05-31 22:21:53 +0200 | [diff] [blame] | 127 | .if \rstor_rax |
Borislav Petkov | a268fcf | 2011-05-31 22:21:51 +0200 | [diff] [blame] | 128 | movq_cfi_restore 4*8, rax |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 129 | .endif |
Borislav Petkov | a268fcf | 2011-05-31 22:21:51 +0200 | [diff] [blame] | 130 | |
Borislav Petkov | 838feb4 | 2011-05-31 22:21:53 +0200 | [diff] [blame] | 131 | .if \rstor_rcx |
Borislav Petkov | a268fcf | 2011-05-31 22:21:51 +0200 | [diff] [blame] | 132 | movq_cfi_restore 5*8, rcx |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 133 | .endif |
Borislav Petkov | a268fcf | 2011-05-31 22:21:51 +0200 | [diff] [blame] | 134 | |
Borislav Petkov | 838feb4 | 2011-05-31 22:21:53 +0200 | [diff] [blame] | 135 | .if \rstor_rdx |
Borislav Petkov | a268fcf | 2011-05-31 22:21:51 +0200 | [diff] [blame] | 136 | movq_cfi_restore 6*8, rdx |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 137 | .endif |
Borislav Petkov | a268fcf | 2011-05-31 22:21:51 +0200 | [diff] [blame] | 138 | |
| 139 | movq_cfi_restore 7*8, rsi |
| 140 | movq_cfi_restore 8*8, rdi |
| 141 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 142 | .if ARG_SKIP+\addskip > 0 |
Ingo Molnar | 0c2bd5a | 2008-01-30 13:32:49 +0100 | [diff] [blame] | 143 | addq $ARG_SKIP+\addskip, %rsp |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 144 | CFI_ADJUST_CFA_OFFSET -(ARG_SKIP+\addskip) |
| 145 | .endif |
Ingo Molnar | 0c2bd5a | 2008-01-30 13:32:49 +0100 | [diff] [blame] | 146 | .endm |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 147 | |
Roland McGrath | d4d6715 | 2008-07-09 02:38:07 -0700 | [diff] [blame] | 148 | .macro LOAD_ARGS offset, skiprax=0 |
Ingo Molnar | 0c2bd5a | 2008-01-30 13:32:49 +0100 | [diff] [blame] | 149 | movq \offset(%rsp), %r11 |
| 150 | movq \offset+8(%rsp), %r10 |
| 151 | movq \offset+16(%rsp), %r9 |
| 152 | movq \offset+24(%rsp), %r8 |
| 153 | movq \offset+40(%rsp), %rcx |
| 154 | movq \offset+48(%rsp), %rdx |
| 155 | movq \offset+56(%rsp), %rsi |
| 156 | movq \offset+64(%rsp), %rdi |
Roland McGrath | d4d6715 | 2008-07-09 02:38:07 -0700 | [diff] [blame] | 157 | .if \skiprax |
| 158 | .else |
Ingo Molnar | 0c2bd5a | 2008-01-30 13:32:49 +0100 | [diff] [blame] | 159 | movq \offset+72(%rsp), %rax |
Roland McGrath | d4d6715 | 2008-07-09 02:38:07 -0700 | [diff] [blame] | 160 | .endif |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 161 | .endm |
Ingo Molnar | 0c2bd5a | 2008-01-30 13:32:49 +0100 | [diff] [blame] | 162 | |
Jan Beulich | 3234282 | 2010-10-19 14:52:26 +0100 | [diff] [blame] | 163 | #define REST_SKIP (6*8) |
Ingo Molnar | 0c2bd5a | 2008-01-30 13:32:49 +0100 | [diff] [blame] | 164 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 165 | .macro SAVE_REST |
Ingo Molnar | 0c2bd5a | 2008-01-30 13:32:49 +0100 | [diff] [blame] | 166 | subq $REST_SKIP, %rsp |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 167 | CFI_ADJUST_CFA_OFFSET REST_SKIP |
Borislav Petkov | a268fcf | 2011-05-31 22:21:51 +0200 | [diff] [blame] | 168 | movq_cfi rbx, 5*8 |
| 169 | movq_cfi rbp, 4*8 |
| 170 | movq_cfi r12, 3*8 |
| 171 | movq_cfi r13, 2*8 |
| 172 | movq_cfi r14, 1*8 |
| 173 | movq_cfi r15, 0*8 |
Ingo Molnar | 0c2bd5a | 2008-01-30 13:32:49 +0100 | [diff] [blame] | 174 | .endm |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 175 | |
| 176 | .macro RESTORE_REST |
Borislav Petkov | a268fcf | 2011-05-31 22:21:51 +0200 | [diff] [blame] | 177 | movq_cfi_restore 0*8, r15 |
| 178 | movq_cfi_restore 1*8, r14 |
| 179 | movq_cfi_restore 2*8, r13 |
| 180 | movq_cfi_restore 3*8, r12 |
| 181 | movq_cfi_restore 4*8, rbp |
| 182 | movq_cfi_restore 5*8, rbx |
Ingo Molnar | 0c2bd5a | 2008-01-30 13:32:49 +0100 | [diff] [blame] | 183 | addq $REST_SKIP, %rsp |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 184 | CFI_ADJUST_CFA_OFFSET -(REST_SKIP) |
| 185 | .endm |
Ingo Molnar | 0c2bd5a | 2008-01-30 13:32:49 +0100 | [diff] [blame] | 186 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 187 | .macro SAVE_ALL |
| 188 | SAVE_ARGS |
| 189 | SAVE_REST |
| 190 | .endm |
Ingo Molnar | 0c2bd5a | 2008-01-30 13:32:49 +0100 | [diff] [blame] | 191 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 192 | .macro RESTORE_ALL addskip=0 |
| 193 | RESTORE_REST |
Borislav Petkov | 838feb4 | 2011-05-31 22:21:53 +0200 | [diff] [blame] | 194 | RESTORE_ARGS 1, \addskip |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 195 | .endm |
| 196 | |
| 197 | .macro icebp |
| 198 | .byte 0xf1 |
| 199 | .endm |
Peter Zijlstra | 1a338ac | 2013-08-14 14:51:00 +0200 | [diff] [blame] | 200 | |
| 201 | #else /* CONFIG_X86_64 */ |
| 202 | |
| 203 | /* |
| 204 | * For 32bit only simplified versions of SAVE_ALL/RESTORE_ALL. These |
| 205 | * are different from the entry_32.S versions in not changing the segment |
| 206 | * registers. So only suitable for in kernel use, not when transitioning |
| 207 | * from or to user space. The resulting stack frame is not a standard |
| 208 | * pt_regs frame. The main use case is calling C code from assembler |
| 209 | * when all the registers need to be preserved. |
| 210 | */ |
| 211 | |
| 212 | .macro SAVE_ALL |
Denys Vlasenko | 49db46a | 2015-02-26 14:40:25 -0800 | [diff] [blame^] | 213 | pushl_cfi_reg eax |
| 214 | pushl_cfi_reg ebp |
| 215 | pushl_cfi_reg edi |
| 216 | pushl_cfi_reg esi |
| 217 | pushl_cfi_reg edx |
| 218 | pushl_cfi_reg ecx |
| 219 | pushl_cfi_reg ebx |
Peter Zijlstra | 1a338ac | 2013-08-14 14:51:00 +0200 | [diff] [blame] | 220 | .endm |
| 221 | |
| 222 | .macro RESTORE_ALL |
Denys Vlasenko | 49db46a | 2015-02-26 14:40:25 -0800 | [diff] [blame^] | 223 | popl_cfi_reg ebx |
| 224 | popl_cfi_reg ecx |
| 225 | popl_cfi_reg edx |
| 226 | popl_cfi_reg esi |
| 227 | popl_cfi_reg edi |
| 228 | popl_cfi_reg ebp |
| 229 | popl_cfi_reg eax |
Peter Zijlstra | 1a338ac | 2013-08-14 14:51:00 +0200 | [diff] [blame] | 230 | .endm |
| 231 | |
| 232 | #endif /* CONFIG_X86_64 */ |
| 233 | |