Richard Weinberger | 57d8e02 | 2011-04-27 15:26:51 -0700 | [diff] [blame] | 1 | /* |
| 2 | * atomic64_t for 586+ |
| 3 | * |
| 4 | * Copied from arch/x86/lib/atomic64_cx8_32.S |
| 5 | * |
| 6 | * Copyright © 2010 Luca Barbieri |
| 7 | * |
| 8 | * This program is free software; you can redistribute it and/or modify |
| 9 | * it under the terms of the GNU General Public License as published by |
| 10 | * the Free Software Foundation; either version 2 of the License, or |
| 11 | * (at your option) any later version. |
| 12 | * |
| 13 | */ |
| 14 | |
| 15 | #include <linux/linkage.h> |
| 16 | #include <asm/alternative-asm.h> |
| 17 | #include <asm/dwarf2.h> |
| 18 | |
| 19 | .macro SAVE reg |
| 20 | pushl_cfi %\reg |
| 21 | CFI_REL_OFFSET \reg, 0 |
| 22 | .endm |
| 23 | |
| 24 | .macro RESTORE reg |
| 25 | popl_cfi %\reg |
| 26 | CFI_RESTORE \reg |
| 27 | .endm |
| 28 | |
| 29 | .macro read64 reg |
| 30 | movl %ebx, %eax |
| 31 | movl %ecx, %edx |
| 32 | /* we need LOCK_PREFIX since otherwise cmpxchg8b always does the write */ |
| 33 | LOCK_PREFIX |
| 34 | cmpxchg8b (\reg) |
| 35 | .endm |
| 36 | |
| 37 | ENTRY(atomic64_read_cx8) |
| 38 | CFI_STARTPROC |
| 39 | |
| 40 | read64 %ecx |
| 41 | ret |
| 42 | CFI_ENDPROC |
| 43 | ENDPROC(atomic64_read_cx8) |
| 44 | |
| 45 | ENTRY(atomic64_set_cx8) |
| 46 | CFI_STARTPROC |
| 47 | |
| 48 | 1: |
| 49 | /* we don't need LOCK_PREFIX since aligned 64-bit writes |
| 50 | * are atomic on 586 and newer */ |
| 51 | cmpxchg8b (%esi) |
| 52 | jne 1b |
| 53 | |
| 54 | ret |
| 55 | CFI_ENDPROC |
| 56 | ENDPROC(atomic64_set_cx8) |
| 57 | |
| 58 | ENTRY(atomic64_xchg_cx8) |
| 59 | CFI_STARTPROC |
| 60 | |
| 61 | movl %ebx, %eax |
| 62 | movl %ecx, %edx |
| 63 | 1: |
| 64 | LOCK_PREFIX |
| 65 | cmpxchg8b (%esi) |
| 66 | jne 1b |
| 67 | |
| 68 | ret |
| 69 | CFI_ENDPROC |
| 70 | ENDPROC(atomic64_xchg_cx8) |
| 71 | |
| 72 | .macro addsub_return func ins insc |
| 73 | ENTRY(atomic64_\func\()_return_cx8) |
| 74 | CFI_STARTPROC |
| 75 | SAVE ebp |
| 76 | SAVE ebx |
| 77 | SAVE esi |
| 78 | SAVE edi |
| 79 | |
| 80 | movl %eax, %esi |
| 81 | movl %edx, %edi |
| 82 | movl %ecx, %ebp |
| 83 | |
| 84 | read64 %ebp |
| 85 | 1: |
| 86 | movl %eax, %ebx |
| 87 | movl %edx, %ecx |
| 88 | \ins\()l %esi, %ebx |
| 89 | \insc\()l %edi, %ecx |
| 90 | LOCK_PREFIX |
| 91 | cmpxchg8b (%ebp) |
| 92 | jne 1b |
| 93 | |
| 94 | 10: |
| 95 | movl %ebx, %eax |
| 96 | movl %ecx, %edx |
| 97 | RESTORE edi |
| 98 | RESTORE esi |
| 99 | RESTORE ebx |
| 100 | RESTORE ebp |
| 101 | ret |
| 102 | CFI_ENDPROC |
| 103 | ENDPROC(atomic64_\func\()_return_cx8) |
| 104 | .endm |
| 105 | |
| 106 | addsub_return add add adc |
| 107 | addsub_return sub sub sbb |
| 108 | |
| 109 | .macro incdec_return func ins insc |
| 110 | ENTRY(atomic64_\func\()_return_cx8) |
| 111 | CFI_STARTPROC |
| 112 | SAVE ebx |
| 113 | |
| 114 | read64 %esi |
| 115 | 1: |
| 116 | movl %eax, %ebx |
| 117 | movl %edx, %ecx |
| 118 | \ins\()l $1, %ebx |
| 119 | \insc\()l $0, %ecx |
| 120 | LOCK_PREFIX |
| 121 | cmpxchg8b (%esi) |
| 122 | jne 1b |
| 123 | |
| 124 | 10: |
| 125 | movl %ebx, %eax |
| 126 | movl %ecx, %edx |
| 127 | RESTORE ebx |
| 128 | ret |
| 129 | CFI_ENDPROC |
| 130 | ENDPROC(atomic64_\func\()_return_cx8) |
| 131 | .endm |
| 132 | |
| 133 | incdec_return inc add adc |
| 134 | incdec_return dec sub sbb |
| 135 | |
| 136 | ENTRY(atomic64_dec_if_positive_cx8) |
| 137 | CFI_STARTPROC |
| 138 | SAVE ebx |
| 139 | |
| 140 | read64 %esi |
| 141 | 1: |
| 142 | movl %eax, %ebx |
| 143 | movl %edx, %ecx |
| 144 | subl $1, %ebx |
| 145 | sbb $0, %ecx |
| 146 | js 2f |
| 147 | LOCK_PREFIX |
| 148 | cmpxchg8b (%esi) |
| 149 | jne 1b |
| 150 | |
| 151 | 2: |
| 152 | movl %ebx, %eax |
| 153 | movl %ecx, %edx |
| 154 | RESTORE ebx |
| 155 | ret |
| 156 | CFI_ENDPROC |
| 157 | ENDPROC(atomic64_dec_if_positive_cx8) |
| 158 | |
| 159 | ENTRY(atomic64_add_unless_cx8) |
| 160 | CFI_STARTPROC |
| 161 | SAVE ebp |
| 162 | SAVE ebx |
| 163 | /* these just push these two parameters on the stack */ |
| 164 | SAVE edi |
| 165 | SAVE esi |
| 166 | |
| 167 | movl %ecx, %ebp |
| 168 | movl %eax, %esi |
| 169 | movl %edx, %edi |
| 170 | |
| 171 | read64 %ebp |
| 172 | 1: |
| 173 | cmpl %eax, 0(%esp) |
| 174 | je 4f |
| 175 | 2: |
| 176 | movl %eax, %ebx |
| 177 | movl %edx, %ecx |
| 178 | addl %esi, %ebx |
| 179 | adcl %edi, %ecx |
| 180 | LOCK_PREFIX |
| 181 | cmpxchg8b (%ebp) |
| 182 | jne 1b |
| 183 | |
| 184 | movl $1, %eax |
| 185 | 3: |
| 186 | addl $8, %esp |
| 187 | CFI_ADJUST_CFA_OFFSET -8 |
| 188 | RESTORE ebx |
| 189 | RESTORE ebp |
| 190 | ret |
| 191 | 4: |
| 192 | cmpl %edx, 4(%esp) |
| 193 | jne 2b |
| 194 | xorl %eax, %eax |
| 195 | jmp 3b |
| 196 | CFI_ENDPROC |
| 197 | ENDPROC(atomic64_add_unless_cx8) |
| 198 | |
| 199 | ENTRY(atomic64_inc_not_zero_cx8) |
| 200 | CFI_STARTPROC |
| 201 | SAVE ebx |
| 202 | |
| 203 | read64 %esi |
| 204 | 1: |
| 205 | testl %eax, %eax |
| 206 | je 4f |
| 207 | 2: |
| 208 | movl %eax, %ebx |
| 209 | movl %edx, %ecx |
| 210 | addl $1, %ebx |
| 211 | adcl $0, %ecx |
| 212 | LOCK_PREFIX |
| 213 | cmpxchg8b (%esi) |
| 214 | jne 1b |
| 215 | |
| 216 | movl $1, %eax |
| 217 | 3: |
| 218 | RESTORE ebx |
| 219 | ret |
| 220 | 4: |
| 221 | testl %edx, %edx |
| 222 | jne 2b |
| 223 | jmp 3b |
| 224 | CFI_ENDPROC |
| 225 | ENDPROC(atomic64_inc_not_zero_cx8) |