Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 1 | /* Copyright 2002 Andi Kleen */ |
Dave Jones | 038b0a6 | 2006-10-04 03:38:54 -0400 | [diff] [blame] | 2 | |
Jan Beulich | 8d379da | 2006-09-26 10:52:32 +0200 | [diff] [blame] | 3 | #include <linux/linkage.h> |
Tony Luck | cbf8b5a | 2016-03-14 15:33:39 -0700 | [diff] [blame] | 4 | #include <asm/errno.h> |
Borislav Petkov | cd4d09e | 2016-01-26 22:12:04 +0100 | [diff] [blame] | 5 | #include <asm/cpufeatures.h> |
Fenghua Yu | 101068c | 2011-05-17 15:29:16 -0700 | [diff] [blame] | 6 | #include <asm/alternative-asm.h> |
Al Viro | 784d569 | 2016-01-11 11:04:34 -0500 | [diff] [blame] | 7 | #include <asm/export.h> |
Jan Beulich | 8d379da | 2006-09-26 10:52:32 +0200 | [diff] [blame] | 8 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 9 | /* |
Borislav Petkov | e0bc8d1 | 2015-02-04 15:36:49 +0100 | [diff] [blame] | 10 | * We build a jump to memcpy_orig by default which gets NOPped out on |
| 11 | * the majority of x86 CPUs which set REP_GOOD. In addition, CPUs which |
| 12 | * have the enhanced REP MOVSB/STOSB feature (ERMS), change those NOPs |
| 13 | * to a jmp to memcpy_erms which does the REP; MOVSB mem copy. |
| 14 | */ |
| 15 | |
| 16 | .weak memcpy |
| 17 | |
| 18 | /* |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 19 | * memcpy - Copy a memory block. |
| 20 | * |
Ingo Molnar | f3b6eaf | 2009-03-12 12:20:17 +0100 | [diff] [blame] | 21 | * Input: |
| 22 | * rdi destination |
| 23 | * rsi source |
| 24 | * rdx count |
| 25 | * |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 26 | * Output: |
| 27 | * rax original destination |
Ingo Molnar | f3b6eaf | 2009-03-12 12:20:17 +0100 | [diff] [blame] | 28 | */ |
Borislav Petkov | e0bc8d1 | 2015-02-04 15:36:49 +0100 | [diff] [blame] | 29 | ENTRY(__memcpy) |
| 30 | ENTRY(memcpy) |
| 31 | ALTERNATIVE_2 "jmp memcpy_orig", "", X86_FEATURE_REP_GOOD, \ |
| 32 | "jmp memcpy_erms", X86_FEATURE_ERMS |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 33 | |
Ingo Molnar | f3b6eaf | 2009-03-12 12:20:17 +0100 | [diff] [blame] | 34 | movq %rdi, %rax |
Jan Beulich | 2ab5609 | 2012-01-26 15:50:55 +0000 | [diff] [blame] | 35 | movq %rdx, %rcx |
| 36 | shrq $3, %rcx |
Ingo Molnar | f3b6eaf | 2009-03-12 12:20:17 +0100 | [diff] [blame] | 37 | andl $7, %edx |
Jan Beulich | 8d379da | 2006-09-26 10:52:32 +0200 | [diff] [blame] | 38 | rep movsq |
Ingo Molnar | f3b6eaf | 2009-03-12 12:20:17 +0100 | [diff] [blame] | 39 | movl %edx, %ecx |
Jan Beulich | 8d379da | 2006-09-26 10:52:32 +0200 | [diff] [blame] | 40 | rep movsb |
| 41 | ret |
Borislav Petkov | e0bc8d1 | 2015-02-04 15:36:49 +0100 | [diff] [blame] | 42 | ENDPROC(memcpy) |
| 43 | ENDPROC(__memcpy) |
Al Viro | 784d569 | 2016-01-11 11:04:34 -0500 | [diff] [blame] | 44 | EXPORT_SYMBOL(memcpy) |
| 45 | EXPORT_SYMBOL(__memcpy) |
Jan Beulich | 8d379da | 2006-09-26 10:52:32 +0200 | [diff] [blame] | 46 | |
Fenghua Yu | 101068c | 2011-05-17 15:29:16 -0700 | [diff] [blame] | 47 | /* |
Borislav Petkov | e0bc8d1 | 2015-02-04 15:36:49 +0100 | [diff] [blame] | 48 | * memcpy_erms() - enhanced fast string memcpy. This is faster and |
| 49 | * simpler than memcpy. Use memcpy_erms when possible. |
Fenghua Yu | 101068c | 2011-05-17 15:29:16 -0700 | [diff] [blame] | 50 | */ |
Borislav Petkov | e0bc8d1 | 2015-02-04 15:36:49 +0100 | [diff] [blame] | 51 | ENTRY(memcpy_erms) |
Fenghua Yu | 101068c | 2011-05-17 15:29:16 -0700 | [diff] [blame] | 52 | movq %rdi, %rax |
Jan Beulich | 2ab5609 | 2012-01-26 15:50:55 +0000 | [diff] [blame] | 53 | movq %rdx, %rcx |
Fenghua Yu | 101068c | 2011-05-17 15:29:16 -0700 | [diff] [blame] | 54 | rep movsb |
| 55 | ret |
Borislav Petkov | e0bc8d1 | 2015-02-04 15:36:49 +0100 | [diff] [blame] | 56 | ENDPROC(memcpy_erms) |
Fenghua Yu | 101068c | 2011-05-17 15:29:16 -0700 | [diff] [blame] | 57 | |
Borislav Petkov | e0bc8d1 | 2015-02-04 15:36:49 +0100 | [diff] [blame] | 58 | ENTRY(memcpy_orig) |
Ingo Molnar | f3b6eaf | 2009-03-12 12:20:17 +0100 | [diff] [blame] | 59 | movq %rdi, %rax |
Andi Kleen | 7bcd3f3 | 2006-02-03 21:51:02 +0100 | [diff] [blame] | 60 | |
Jan Beulich | 2ab5609 | 2012-01-26 15:50:55 +0000 | [diff] [blame] | 61 | cmpq $0x20, %rdx |
Ma Ling | 59daa70 | 2010-06-29 03:24:25 +0800 | [diff] [blame] | 62 | jb .Lhandle_tail |
| 63 | |
| 64 | /* |
Bart Van Assche | 9de4966 | 2011-05-01 14:09:21 +0200 | [diff] [blame] | 65 | * We check whether memory false dependence could occur, |
Ma Ling | 59daa70 | 2010-06-29 03:24:25 +0800 | [diff] [blame] | 66 | * then jump to corresponding copy mode. |
| 67 | */ |
| 68 | cmp %dil, %sil |
| 69 | jl .Lcopy_backward |
Jan Beulich | 2ab5609 | 2012-01-26 15:50:55 +0000 | [diff] [blame] | 70 | subq $0x20, %rdx |
Ma Ling | 59daa70 | 2010-06-29 03:24:25 +0800 | [diff] [blame] | 71 | .Lcopy_forward_loop: |
| 72 | subq $0x20, %rdx |
| 73 | |
| 74 | /* |
| 75 | * Move in blocks of 4x8 bytes: |
| 76 | */ |
| 77 | movq 0*8(%rsi), %r8 |
| 78 | movq 1*8(%rsi), %r9 |
| 79 | movq 2*8(%rsi), %r10 |
| 80 | movq 3*8(%rsi), %r11 |
| 81 | leaq 4*8(%rsi), %rsi |
| 82 | |
| 83 | movq %r8, 0*8(%rdi) |
| 84 | movq %r9, 1*8(%rdi) |
| 85 | movq %r10, 2*8(%rdi) |
| 86 | movq %r11, 3*8(%rdi) |
| 87 | leaq 4*8(%rdi), %rdi |
| 88 | jae .Lcopy_forward_loop |
Jan Beulich | 2ab5609 | 2012-01-26 15:50:55 +0000 | [diff] [blame] | 89 | addl $0x20, %edx |
Ma Ling | 59daa70 | 2010-06-29 03:24:25 +0800 | [diff] [blame] | 90 | jmp .Lhandle_tail |
| 91 | |
| 92 | .Lcopy_backward: |
| 93 | /* |
| 94 | * Calculate copy position to tail. |
| 95 | */ |
| 96 | addq %rdx, %rsi |
| 97 | addq %rdx, %rdi |
| 98 | subq $0x20, %rdx |
| 99 | /* |
| 100 | * At most 3 ALU operations in one cycle, |
Andy Shevchenko | d50ba36 | 2013-04-15 12:06:10 +0300 | [diff] [blame] | 101 | * so append NOPS in the same 16 bytes trunk. |
Ma Ling | 59daa70 | 2010-06-29 03:24:25 +0800 | [diff] [blame] | 102 | */ |
Andi Kleen | 7bcd3f3 | 2006-02-03 21:51:02 +0100 | [diff] [blame] | 103 | .p2align 4 |
Ma Ling | 59daa70 | 2010-06-29 03:24:25 +0800 | [diff] [blame] | 104 | .Lcopy_backward_loop: |
| 105 | subq $0x20, %rdx |
| 106 | movq -1*8(%rsi), %r8 |
| 107 | movq -2*8(%rsi), %r9 |
| 108 | movq -3*8(%rsi), %r10 |
| 109 | movq -4*8(%rsi), %r11 |
| 110 | leaq -4*8(%rsi), %rsi |
| 111 | movq %r8, -1*8(%rdi) |
| 112 | movq %r9, -2*8(%rdi) |
| 113 | movq %r10, -3*8(%rdi) |
| 114 | movq %r11, -4*8(%rdi) |
| 115 | leaq -4*8(%rdi), %rdi |
| 116 | jae .Lcopy_backward_loop |
Andi Kleen | 7bcd3f3 | 2006-02-03 21:51:02 +0100 | [diff] [blame] | 117 | |
Ingo Molnar | f3b6eaf | 2009-03-12 12:20:17 +0100 | [diff] [blame] | 118 | /* |
Ma Ling | 59daa70 | 2010-06-29 03:24:25 +0800 | [diff] [blame] | 119 | * Calculate copy position to head. |
Ingo Molnar | f3b6eaf | 2009-03-12 12:20:17 +0100 | [diff] [blame] | 120 | */ |
Jan Beulich | 2ab5609 | 2012-01-26 15:50:55 +0000 | [diff] [blame] | 121 | addl $0x20, %edx |
Ma Ling | 59daa70 | 2010-06-29 03:24:25 +0800 | [diff] [blame] | 122 | subq %rdx, %rsi |
| 123 | subq %rdx, %rdi |
Andi Kleen | 7bcd3f3 | 2006-02-03 21:51:02 +0100 | [diff] [blame] | 124 | .Lhandle_tail: |
Jan Beulich | 2ab5609 | 2012-01-26 15:50:55 +0000 | [diff] [blame] | 125 | cmpl $16, %edx |
Ma Ling | 59daa70 | 2010-06-29 03:24:25 +0800 | [diff] [blame] | 126 | jb .Lless_16bytes |
Ingo Molnar | f3b6eaf | 2009-03-12 12:20:17 +0100 | [diff] [blame] | 127 | |
Ma Ling | 59daa70 | 2010-06-29 03:24:25 +0800 | [diff] [blame] | 128 | /* |
| 129 | * Move data from 16 bytes to 31 bytes. |
| 130 | */ |
| 131 | movq 0*8(%rsi), %r8 |
| 132 | movq 1*8(%rsi), %r9 |
| 133 | movq -2*8(%rsi, %rdx), %r10 |
| 134 | movq -1*8(%rsi, %rdx), %r11 |
| 135 | movq %r8, 0*8(%rdi) |
| 136 | movq %r9, 1*8(%rdi) |
| 137 | movq %r10, -2*8(%rdi, %rdx) |
| 138 | movq %r11, -1*8(%rdi, %rdx) |
| 139 | retq |
Andi Kleen | 7bcd3f3 | 2006-02-03 21:51:02 +0100 | [diff] [blame] | 140 | .p2align 4 |
Ma Ling | 59daa70 | 2010-06-29 03:24:25 +0800 | [diff] [blame] | 141 | .Lless_16bytes: |
Jan Beulich | 2ab5609 | 2012-01-26 15:50:55 +0000 | [diff] [blame] | 142 | cmpl $8, %edx |
Ma Ling | 59daa70 | 2010-06-29 03:24:25 +0800 | [diff] [blame] | 143 | jb .Lless_8bytes |
| 144 | /* |
| 145 | * Move data from 8 bytes to 15 bytes. |
| 146 | */ |
| 147 | movq 0*8(%rsi), %r8 |
| 148 | movq -1*8(%rsi, %rdx), %r9 |
| 149 | movq %r8, 0*8(%rdi) |
| 150 | movq %r9, -1*8(%rdi, %rdx) |
| 151 | retq |
Andi Kleen | 7bcd3f3 | 2006-02-03 21:51:02 +0100 | [diff] [blame] | 152 | .p2align 4 |
Ma Ling | 59daa70 | 2010-06-29 03:24:25 +0800 | [diff] [blame] | 153 | .Lless_8bytes: |
Jan Beulich | 2ab5609 | 2012-01-26 15:50:55 +0000 | [diff] [blame] | 154 | cmpl $4, %edx |
Ma Ling | 59daa70 | 2010-06-29 03:24:25 +0800 | [diff] [blame] | 155 | jb .Lless_3bytes |
| 156 | |
| 157 | /* |
| 158 | * Move data from 4 bytes to 7 bytes. |
| 159 | */ |
| 160 | movl (%rsi), %ecx |
| 161 | movl -4(%rsi, %rdx), %r8d |
| 162 | movl %ecx, (%rdi) |
| 163 | movl %r8d, -4(%rdi, %rdx) |
| 164 | retq |
| 165 | .p2align 4 |
| 166 | .Lless_3bytes: |
Jan Beulich | 9d8e227 | 2012-01-26 15:55:32 +0000 | [diff] [blame] | 167 | subl $1, %edx |
| 168 | jb .Lend |
Ma Ling | 59daa70 | 2010-06-29 03:24:25 +0800 | [diff] [blame] | 169 | /* |
| 170 | * Move data from 1 bytes to 3 bytes. |
| 171 | */ |
Jan Beulich | 9d8e227 | 2012-01-26 15:55:32 +0000 | [diff] [blame] | 172 | movzbl (%rsi), %ecx |
| 173 | jz .Lstore_1byte |
| 174 | movzbq 1(%rsi), %r8 |
| 175 | movzbq (%rsi, %rdx), %r9 |
| 176 | movb %r8b, 1(%rdi) |
| 177 | movb %r9b, (%rdi, %rdx) |
| 178 | .Lstore_1byte: |
| 179 | movb %cl, (%rdi) |
Andi Kleen | 7bcd3f3 | 2006-02-03 21:51:02 +0100 | [diff] [blame] | 180 | |
Ingo Molnar | f3b6eaf | 2009-03-12 12:20:17 +0100 | [diff] [blame] | 181 | .Lend: |
Ma Ling | 59daa70 | 2010-06-29 03:24:25 +0800 | [diff] [blame] | 182 | retq |
Borislav Petkov | e0bc8d1 | 2015-02-04 15:36:49 +0100 | [diff] [blame] | 183 | ENDPROC(memcpy_orig) |
Tony Luck | 92b0729 | 2016-02-18 11:47:26 -0800 | [diff] [blame] | 184 | |
| 185 | #ifndef CONFIG_UML |
| 186 | /* |
Tony Luck | 9a6fb28 | 2016-09-01 11:39:33 -0700 | [diff] [blame] | 187 | * memcpy_mcsafe_unrolled - memory copy with machine check exception handling |
Tony Luck | 92b0729 | 2016-02-18 11:47:26 -0800 | [diff] [blame] | 188 | * Note that we only catch machine checks when reading the source addresses. |
| 189 | * Writes to target are posted and don't generate machine checks. |
| 190 | */ |
Tony Luck | 9a6fb28 | 2016-09-01 11:39:33 -0700 | [diff] [blame] | 191 | ENTRY(memcpy_mcsafe_unrolled) |
Tony Luck | 92b0729 | 2016-02-18 11:47:26 -0800 | [diff] [blame] | 192 | cmpl $8, %edx |
| 193 | /* Less than 8 bytes? Go to byte copy loop */ |
| 194 | jb .L_no_whole_words |
| 195 | |
| 196 | /* Check for bad alignment of source */ |
| 197 | testl $7, %esi |
| 198 | /* Already aligned */ |
| 199 | jz .L_8byte_aligned |
| 200 | |
| 201 | /* Copy one byte at a time until source is 8-byte aligned */ |
| 202 | movl %esi, %ecx |
| 203 | andl $7, %ecx |
| 204 | subl $8, %ecx |
| 205 | negl %ecx |
| 206 | subl %ecx, %edx |
| 207 | .L_copy_leading_bytes: |
| 208 | movb (%rsi), %al |
| 209 | movb %al, (%rdi) |
| 210 | incq %rsi |
| 211 | incq %rdi |
| 212 | decl %ecx |
| 213 | jnz .L_copy_leading_bytes |
| 214 | |
| 215 | .L_8byte_aligned: |
| 216 | /* Figure out how many whole cache lines (64-bytes) to copy */ |
| 217 | movl %edx, %ecx |
| 218 | andl $63, %edx |
| 219 | shrl $6, %ecx |
| 220 | jz .L_no_whole_cache_lines |
| 221 | |
| 222 | /* Loop copying whole cache lines */ |
| 223 | .L_cache_w0: movq (%rsi), %r8 |
| 224 | .L_cache_w1: movq 1*8(%rsi), %r9 |
| 225 | .L_cache_w2: movq 2*8(%rsi), %r10 |
| 226 | .L_cache_w3: movq 3*8(%rsi), %r11 |
| 227 | movq %r8, (%rdi) |
| 228 | movq %r9, 1*8(%rdi) |
| 229 | movq %r10, 2*8(%rdi) |
| 230 | movq %r11, 3*8(%rdi) |
| 231 | .L_cache_w4: movq 4*8(%rsi), %r8 |
| 232 | .L_cache_w5: movq 5*8(%rsi), %r9 |
| 233 | .L_cache_w6: movq 6*8(%rsi), %r10 |
| 234 | .L_cache_w7: movq 7*8(%rsi), %r11 |
| 235 | movq %r8, 4*8(%rdi) |
| 236 | movq %r9, 5*8(%rdi) |
| 237 | movq %r10, 6*8(%rdi) |
| 238 | movq %r11, 7*8(%rdi) |
| 239 | leaq 64(%rsi), %rsi |
| 240 | leaq 64(%rdi), %rdi |
| 241 | decl %ecx |
| 242 | jnz .L_cache_w0 |
| 243 | |
| 244 | /* Are there any trailing 8-byte words? */ |
| 245 | .L_no_whole_cache_lines: |
| 246 | movl %edx, %ecx |
| 247 | andl $7, %edx |
| 248 | shrl $3, %ecx |
| 249 | jz .L_no_whole_words |
| 250 | |
| 251 | /* Copy trailing words */ |
| 252 | .L_copy_trailing_words: |
| 253 | movq (%rsi), %r8 |
| 254 | mov %r8, (%rdi) |
| 255 | leaq 8(%rsi), %rsi |
| 256 | leaq 8(%rdi), %rdi |
| 257 | decl %ecx |
| 258 | jnz .L_copy_trailing_words |
| 259 | |
| 260 | /* Any trailing bytes? */ |
| 261 | .L_no_whole_words: |
| 262 | andl %edx, %edx |
| 263 | jz .L_done_memcpy_trap |
| 264 | |
| 265 | /* Copy trailing bytes */ |
| 266 | movl %edx, %ecx |
| 267 | .L_copy_trailing_bytes: |
| 268 | movb (%rsi), %al |
| 269 | movb %al, (%rdi) |
| 270 | incq %rsi |
| 271 | incq %rdi |
| 272 | decl %ecx |
| 273 | jnz .L_copy_trailing_bytes |
| 274 | |
Tony Luck | cbf8b5a | 2016-03-14 15:33:39 -0700 | [diff] [blame] | 275 | /* Copy successful. Return zero */ |
Tony Luck | 92b0729 | 2016-02-18 11:47:26 -0800 | [diff] [blame] | 276 | .L_done_memcpy_trap: |
| 277 | xorq %rax, %rax |
| 278 | ret |
Tony Luck | 9a6fb28 | 2016-09-01 11:39:33 -0700 | [diff] [blame] | 279 | ENDPROC(memcpy_mcsafe_unrolled) |
Linus Torvalds | 84d6984 | 2016-10-14 14:26:58 -0700 | [diff] [blame] | 280 | EXPORT_SYMBOL_GPL(memcpy_mcsafe_unrolled) |
Tony Luck | 92b0729 | 2016-02-18 11:47:26 -0800 | [diff] [blame] | 281 | |
| 282 | .section .fixup, "ax" |
Tony Luck | cbf8b5a | 2016-03-14 15:33:39 -0700 | [diff] [blame] | 283 | /* Return -EFAULT for any failure */ |
Tony Luck | 92b0729 | 2016-02-18 11:47:26 -0800 | [diff] [blame] | 284 | .L_memcpy_mcsafe_fail: |
Tony Luck | cbf8b5a | 2016-03-14 15:33:39 -0700 | [diff] [blame] | 285 | mov $-EFAULT, %rax |
Tony Luck | 92b0729 | 2016-02-18 11:47:26 -0800 | [diff] [blame] | 286 | ret |
| 287 | |
| 288 | .previous |
| 289 | |
| 290 | _ASM_EXTABLE_FAULT(.L_copy_leading_bytes, .L_memcpy_mcsafe_fail) |
| 291 | _ASM_EXTABLE_FAULT(.L_cache_w0, .L_memcpy_mcsafe_fail) |
| 292 | _ASM_EXTABLE_FAULT(.L_cache_w1, .L_memcpy_mcsafe_fail) |
| 293 | _ASM_EXTABLE_FAULT(.L_cache_w3, .L_memcpy_mcsafe_fail) |
| 294 | _ASM_EXTABLE_FAULT(.L_cache_w3, .L_memcpy_mcsafe_fail) |
| 295 | _ASM_EXTABLE_FAULT(.L_cache_w4, .L_memcpy_mcsafe_fail) |
| 296 | _ASM_EXTABLE_FAULT(.L_cache_w5, .L_memcpy_mcsafe_fail) |
| 297 | _ASM_EXTABLE_FAULT(.L_cache_w6, .L_memcpy_mcsafe_fail) |
| 298 | _ASM_EXTABLE_FAULT(.L_cache_w7, .L_memcpy_mcsafe_fail) |
| 299 | _ASM_EXTABLE_FAULT(.L_copy_trailing_words, .L_memcpy_mcsafe_fail) |
| 300 | _ASM_EXTABLE_FAULT(.L_copy_trailing_bytes, .L_memcpy_mcsafe_fail) |
| 301 | #endif |