Ingo Molnar | 0c2bd5a | 2008-01-30 13:32:49 +0100 | [diff] [blame] | 1 | /* |
Ingo Molnar | 063f891 | 2009-02-03 18:02:36 +0100 | [diff] [blame] | 2 | |
| 3 | x86 function call convention, 64-bit: |
| 4 | ------------------------------------- |
| 5 | arguments | callee-saved | extra caller-saved | return |
| 6 | [callee-clobbered] | | [callee-clobbered] | |
| 7 | --------------------------------------------------------------------------- |
| 8 | rdi rsi rdx rcx r8-9 | rbx rbp [*] r12-15 | r10-11 | rax, rdx [**] |
| 9 | |
| 10 | ( rsp is obviously invariant across normal function calls. (gcc can 'merge' |
| 11 | functions when it sees tail-call optimization possibilities) rflags is |
| 12 | clobbered. Leftover arguments are passed over the stack frame.) |
| 13 | |
| 14 | [*] In the frame-pointers case rbp is fixed to the stack frame. |
| 15 | |
| 16 | [**] for struct return values wider than 64 bits the return convention is a |
| 17 | bit more complex: up to 128 bits width we return small structures |
| 18 | straight in rax, rdx. For structures larger than that (3 words or |
| 19 | larger) the caller puts a pointer to an on-stack return struct |
| 20 | [allocated in the caller's stack frame] into the first argument - i.e. |
| 21 | into rdi. All other arguments shift up by one in this case. |
| 22 | Fortunately this case is rare in the kernel. |
| 23 | |
| 24 | For 32-bit we have the following conventions - kernel is built with |
| 25 | -mregparm=3 and -freg-struct-return: |
| 26 | |
| 27 | x86 function calling convention, 32-bit: |
| 28 | ---------------------------------------- |
| 29 | arguments | callee-saved | extra caller-saved | return |
| 30 | [callee-clobbered] | | [callee-clobbered] | |
| 31 | ------------------------------------------------------------------------- |
| 32 | eax edx ecx | ebx edi esi ebp [*] | <none> | eax, edx [**] |
| 33 | |
| 34 | ( here too esp is obviously invariant across normal function calls. eflags |
| 35 | is clobbered. Leftover arguments are passed over the stack frame. ) |
| 36 | |
| 37 | [*] In the frame-pointers case ebp is fixed to the stack frame. |
| 38 | |
| 39 | [**] We build with -freg-struct-return, which on 32-bit means similar |
| 40 | semantics as on 64-bit: edx can be used for a second return value |
| 41 | (i.e. covering integer and structure sizes up to 64 bits) - after that |
| 42 | it gets more complex and more expensive: 3-word or larger struct returns |
| 43 | get done in the caller's frame and the pointer to the return struct goes |
| 44 | into regparm0, i.e. eax - the other arguments shift up and the |
| 45 | function's register parameters degenerate to regparm=2 in essence. |
| 46 | |
| 47 | */ |
| 48 | |
Borislav Petkov | a268fcf | 2011-05-31 22:21:51 +0200 | [diff] [blame] | 49 | #include "dwarf2.h" |
Ingo Molnar | 063f891 | 2009-02-03 18:02:36 +0100 | [diff] [blame] | 50 | |
| 51 | /* |
Jan Beulich | 3234282 | 2010-10-19 14:52:26 +0100 | [diff] [blame] | 52 | * 64-bit system call stack frame layout defines and helpers, for |
| 53 | * assembly code (note that the seemingly unnecessary parentheses |
| 54 | * are to prevent cpp from inserting spaces in expressions that get |
| 55 | * passed to macros): |
Ingo Molnar | 0c2bd5a | 2008-01-30 13:32:49 +0100 | [diff] [blame] | 56 | */ |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 57 | |
Jan Beulich | 3234282 | 2010-10-19 14:52:26 +0100 | [diff] [blame] | 58 | #define R15 (0) |
| 59 | #define R14 (8) |
| 60 | #define R13 (16) |
| 61 | #define R12 (24) |
| 62 | #define RBP (32) |
| 63 | #define RBX (40) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 64 | |
Ingo Molnar | 063f891 | 2009-02-03 18:02:36 +0100 | [diff] [blame] | 65 | /* arguments: interrupts/non tracing syscalls only save up to here: */ |
Jan Beulich | 3234282 | 2010-10-19 14:52:26 +0100 | [diff] [blame] | 66 | #define R11 (48) |
| 67 | #define R10 (56) |
| 68 | #define R9 (64) |
| 69 | #define R8 (72) |
| 70 | #define RAX (80) |
| 71 | #define RCX (88) |
| 72 | #define RDX (96) |
| 73 | #define RSI (104) |
| 74 | #define RDI (112) |
| 75 | #define ORIG_RAX (120) /* + error_code */ |
Ingo Molnar | 0c2bd5a | 2008-01-30 13:32:49 +0100 | [diff] [blame] | 76 | /* end of arguments */ |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 77 | |
Ingo Molnar | 063f891 | 2009-02-03 18:02:36 +0100 | [diff] [blame] | 78 | /* cpu exception frame or undefined in case of fast syscall: */ |
Jan Beulich | 3234282 | 2010-10-19 14:52:26 +0100 | [diff] [blame] | 79 | #define RIP (128) |
| 80 | #define CS (136) |
| 81 | #define EFLAGS (144) |
| 82 | #define RSP (152) |
| 83 | #define SS (160) |
Ingo Molnar | 0c2bd5a | 2008-01-30 13:32:49 +0100 | [diff] [blame] | 84 | |
| 85 | #define ARGOFFSET R11 |
| 86 | #define SWFRAME ORIG_RAX |
| 87 | |
Borislav Petkov | cac0e0a | 2011-05-31 22:21:52 +0200 | [diff] [blame] | 88 | .macro SAVE_ARGS addskip=0, save_rcx=1, save_r891011=1 |
Ingo Molnar | 0c2bd5a | 2008-01-30 13:32:49 +0100 | [diff] [blame] | 89 | subq $9*8+\addskip, %rsp |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 90 | CFI_ADJUST_CFA_OFFSET 9*8+\addskip |
Borislav Petkov | a268fcf | 2011-05-31 22:21:51 +0200 | [diff] [blame] | 91 | movq_cfi rdi, 8*8 |
| 92 | movq_cfi rsi, 7*8 |
| 93 | movq_cfi rdx, 6*8 |
| 94 | |
Borislav Petkov | cac0e0a | 2011-05-31 22:21:52 +0200 | [diff] [blame] | 95 | .if \save_rcx |
Borislav Petkov | a268fcf | 2011-05-31 22:21:51 +0200 | [diff] [blame] | 96 | movq_cfi rcx, 5*8 |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 97 | .endif |
Borislav Petkov | a268fcf | 2011-05-31 22:21:51 +0200 | [diff] [blame] | 98 | |
| 99 | movq_cfi rax, 4*8 |
| 100 | |
Borislav Petkov | cac0e0a | 2011-05-31 22:21:52 +0200 | [diff] [blame] | 101 | .if \save_r891011 |
Borislav Petkov | a268fcf | 2011-05-31 22:21:51 +0200 | [diff] [blame] | 102 | movq_cfi r8, 3*8 |
| 103 | movq_cfi r9, 2*8 |
| 104 | movq_cfi r10, 1*8 |
| 105 | movq_cfi r11, 0*8 |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 106 | .endif |
Borislav Petkov | a268fcf | 2011-05-31 22:21:51 +0200 | [diff] [blame] | 107 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 108 | .endm |
| 109 | |
Jan Beulich | 3234282 | 2010-10-19 14:52:26 +0100 | [diff] [blame] | 110 | #define ARG_SKIP (9*8) |
Ingo Molnar | 0c2bd5a | 2008-01-30 13:32:49 +0100 | [diff] [blame] | 111 | |
Borislav Petkov | 838feb4 | 2011-05-31 22:21:53 +0200 | [diff] [blame^] | 112 | .macro RESTORE_ARGS rstor_rax=1, addskip=0, rstor_rcx=1, rstor_r11=1, \ |
| 113 | rstor_r8910=1, rstor_rdx=1 |
| 114 | .if \rstor_r11 |
Borislav Petkov | a268fcf | 2011-05-31 22:21:51 +0200 | [diff] [blame] | 115 | movq_cfi_restore 0*8, r11 |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 116 | .endif |
Borislav Petkov | a268fcf | 2011-05-31 22:21:51 +0200 | [diff] [blame] | 117 | |
Borislav Petkov | 838feb4 | 2011-05-31 22:21:53 +0200 | [diff] [blame^] | 118 | .if \rstor_r8910 |
Borislav Petkov | a268fcf | 2011-05-31 22:21:51 +0200 | [diff] [blame] | 119 | movq_cfi_restore 1*8, r10 |
| 120 | movq_cfi_restore 2*8, r9 |
| 121 | movq_cfi_restore 3*8, r8 |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 122 | .endif |
Borislav Petkov | a268fcf | 2011-05-31 22:21:51 +0200 | [diff] [blame] | 123 | |
Borislav Petkov | 838feb4 | 2011-05-31 22:21:53 +0200 | [diff] [blame^] | 124 | .if \rstor_rax |
Borislav Petkov | a268fcf | 2011-05-31 22:21:51 +0200 | [diff] [blame] | 125 | movq_cfi_restore 4*8, rax |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 126 | .endif |
Borislav Petkov | a268fcf | 2011-05-31 22:21:51 +0200 | [diff] [blame] | 127 | |
Borislav Petkov | 838feb4 | 2011-05-31 22:21:53 +0200 | [diff] [blame^] | 128 | .if \rstor_rcx |
Borislav Petkov | a268fcf | 2011-05-31 22:21:51 +0200 | [diff] [blame] | 129 | movq_cfi_restore 5*8, rcx |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 130 | .endif |
Borislav Petkov | a268fcf | 2011-05-31 22:21:51 +0200 | [diff] [blame] | 131 | |
Borislav Petkov | 838feb4 | 2011-05-31 22:21:53 +0200 | [diff] [blame^] | 132 | .if \rstor_rdx |
Borislav Petkov | a268fcf | 2011-05-31 22:21:51 +0200 | [diff] [blame] | 133 | movq_cfi_restore 6*8, rdx |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 134 | .endif |
Borislav Petkov | a268fcf | 2011-05-31 22:21:51 +0200 | [diff] [blame] | 135 | |
| 136 | movq_cfi_restore 7*8, rsi |
| 137 | movq_cfi_restore 8*8, rdi |
| 138 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 139 | .if ARG_SKIP+\addskip > 0 |
Ingo Molnar | 0c2bd5a | 2008-01-30 13:32:49 +0100 | [diff] [blame] | 140 | addq $ARG_SKIP+\addskip, %rsp |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 141 | CFI_ADJUST_CFA_OFFSET -(ARG_SKIP+\addskip) |
| 142 | .endif |
Ingo Molnar | 0c2bd5a | 2008-01-30 13:32:49 +0100 | [diff] [blame] | 143 | .endm |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 144 | |
Roland McGrath | d4d6715 | 2008-07-09 02:38:07 -0700 | [diff] [blame] | 145 | .macro LOAD_ARGS offset, skiprax=0 |
Ingo Molnar | 0c2bd5a | 2008-01-30 13:32:49 +0100 | [diff] [blame] | 146 | movq \offset(%rsp), %r11 |
| 147 | movq \offset+8(%rsp), %r10 |
| 148 | movq \offset+16(%rsp), %r9 |
| 149 | movq \offset+24(%rsp), %r8 |
| 150 | movq \offset+40(%rsp), %rcx |
| 151 | movq \offset+48(%rsp), %rdx |
| 152 | movq \offset+56(%rsp), %rsi |
| 153 | movq \offset+64(%rsp), %rdi |
Roland McGrath | d4d6715 | 2008-07-09 02:38:07 -0700 | [diff] [blame] | 154 | .if \skiprax |
| 155 | .else |
Ingo Molnar | 0c2bd5a | 2008-01-30 13:32:49 +0100 | [diff] [blame] | 156 | movq \offset+72(%rsp), %rax |
Roland McGrath | d4d6715 | 2008-07-09 02:38:07 -0700 | [diff] [blame] | 157 | .endif |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 158 | .endm |
Ingo Molnar | 0c2bd5a | 2008-01-30 13:32:49 +0100 | [diff] [blame] | 159 | |
Jan Beulich | 3234282 | 2010-10-19 14:52:26 +0100 | [diff] [blame] | 160 | #define REST_SKIP (6*8) |
Ingo Molnar | 0c2bd5a | 2008-01-30 13:32:49 +0100 | [diff] [blame] | 161 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 162 | .macro SAVE_REST |
Ingo Molnar | 0c2bd5a | 2008-01-30 13:32:49 +0100 | [diff] [blame] | 163 | subq $REST_SKIP, %rsp |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 164 | CFI_ADJUST_CFA_OFFSET REST_SKIP |
Borislav Petkov | a268fcf | 2011-05-31 22:21:51 +0200 | [diff] [blame] | 165 | movq_cfi rbx, 5*8 |
| 166 | movq_cfi rbp, 4*8 |
| 167 | movq_cfi r12, 3*8 |
| 168 | movq_cfi r13, 2*8 |
| 169 | movq_cfi r14, 1*8 |
| 170 | movq_cfi r15, 0*8 |
Ingo Molnar | 0c2bd5a | 2008-01-30 13:32:49 +0100 | [diff] [blame] | 171 | .endm |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 172 | |
| 173 | .macro RESTORE_REST |
Borislav Petkov | a268fcf | 2011-05-31 22:21:51 +0200 | [diff] [blame] | 174 | movq_cfi_restore 0*8, r15 |
| 175 | movq_cfi_restore 1*8, r14 |
| 176 | movq_cfi_restore 2*8, r13 |
| 177 | movq_cfi_restore 3*8, r12 |
| 178 | movq_cfi_restore 4*8, rbp |
| 179 | movq_cfi_restore 5*8, rbx |
Ingo Molnar | 0c2bd5a | 2008-01-30 13:32:49 +0100 | [diff] [blame] | 180 | addq $REST_SKIP, %rsp |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 181 | CFI_ADJUST_CFA_OFFSET -(REST_SKIP) |
| 182 | .endm |
Ingo Molnar | 0c2bd5a | 2008-01-30 13:32:49 +0100 | [diff] [blame] | 183 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 184 | .macro SAVE_ALL |
| 185 | SAVE_ARGS |
| 186 | SAVE_REST |
| 187 | .endm |
Ingo Molnar | 0c2bd5a | 2008-01-30 13:32:49 +0100 | [diff] [blame] | 188 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 189 | .macro RESTORE_ALL addskip=0 |
| 190 | RESTORE_REST |
Borislav Petkov | 838feb4 | 2011-05-31 22:21:53 +0200 | [diff] [blame^] | 191 | RESTORE_ARGS 1, \addskip |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 192 | .endm |
| 193 | |
| 194 | .macro icebp |
| 195 | .byte 0xf1 |
| 196 | .endm |