Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 1 | /* Copyright 2002 Andi Kleen, SuSE Labs */ |
Jan Beulich | 8d379da | 2006-09-26 10:52:32 +0200 | [diff] [blame] | 2 | |
Jan Beulich | 8d379da | 2006-09-26 10:52:32 +0200 | [diff] [blame] | 3 | #include <linux/linkage.h> |
Borislav Petkov | cd4d09e | 2016-01-26 22:12:04 +0100 | [diff] [blame] | 4 | #include <asm/cpufeatures.h> |
Fenghua Yu | 2f19e06 | 2011-05-17 15:29:18 -0700 | [diff] [blame] | 5 | #include <asm/alternative-asm.h> |
Al Viro | 784d569 | 2016-01-11 11:04:34 -0500 | [diff] [blame] | 6 | #include <asm/export.h> |
Jan Beulich | 8d379da | 2006-09-26 10:52:32 +0200 | [diff] [blame] | 7 | |
Borislav Petkov | 84d95ad | 2015-02-04 08:57:00 +0100 | [diff] [blame] | 8 | .weak memset |
| 9 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 10 | /* |
Fenghua Yu | 2f19e06 | 2011-05-17 15:29:18 -0700 | [diff] [blame] | 11 | * ISO C memset - set a memory block to a byte value. This function uses fast |
| 12 | * string to get better performance than the original function. The code is |
Adam Buchbinder | 6a6256f | 2016-02-23 15:34:30 -0800 | [diff] [blame] | 13 | * simpler and shorter than the original function as well. |
Borislav Petkov | 84d95ad | 2015-02-04 08:57:00 +0100 | [diff] [blame] | 14 | * |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 15 | * rdi destination |
Borislav Petkov | 84d95ad | 2015-02-04 08:57:00 +0100 | [diff] [blame] | 16 | * rsi value (char) |
| 17 | * rdx count (bytes) |
| 18 | * |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 19 | * rax original destination |
Borislav Petkov | 84d95ad | 2015-02-04 08:57:00 +0100 | [diff] [blame] | 20 | */ |
| 21 | ENTRY(memset) |
| 22 | ENTRY(__memset) |
| 23 | /* |
| 24 | * Some CPUs support enhanced REP MOVSB/STOSB feature. It is recommended |
| 25 | * to use it when possible. If not available, use fast string instructions. |
| 26 | * |
| 27 | * Otherwise, use original memset function. |
| 28 | */ |
| 29 | ALTERNATIVE_2 "jmp memset_orig", "", X86_FEATURE_REP_GOOD, \ |
| 30 | "jmp memset_erms", X86_FEATURE_ERMS |
| 31 | |
Jan Beulich | 8d379da | 2006-09-26 10:52:32 +0200 | [diff] [blame] | 32 | movq %rdi,%r9 |
Jan Beulich | 5d7244e | 2012-01-05 16:10:42 +0000 | [diff] [blame] | 33 | movq %rdx,%rcx |
| 34 | andl $7,%edx |
| 35 | shrq $3,%rcx |
Jan Beulich | 8d379da | 2006-09-26 10:52:32 +0200 | [diff] [blame] | 36 | /* expand byte value */ |
| 37 | movzbl %sil,%esi |
| 38 | movabs $0x0101010101010101,%rax |
Jan Beulich | 5d7244e | 2012-01-05 16:10:42 +0000 | [diff] [blame] | 39 | imulq %rsi,%rax |
Jan Beulich | 8d379da | 2006-09-26 10:52:32 +0200 | [diff] [blame] | 40 | rep stosq |
Jan Beulich | 5d7244e | 2012-01-05 16:10:42 +0000 | [diff] [blame] | 41 | movl %edx,%ecx |
Jan Beulich | 8d379da | 2006-09-26 10:52:32 +0200 | [diff] [blame] | 42 | rep stosb |
| 43 | movq %r9,%rax |
| 44 | ret |
Borislav Petkov | 84d95ad | 2015-02-04 08:57:00 +0100 | [diff] [blame] | 45 | ENDPROC(memset) |
| 46 | ENDPROC(__memset) |
Al Viro | 784d569 | 2016-01-11 11:04:34 -0500 | [diff] [blame] | 47 | EXPORT_SYMBOL(memset) |
| 48 | EXPORT_SYMBOL(__memset) |
Jan Beulich | 8d379da | 2006-09-26 10:52:32 +0200 | [diff] [blame] | 49 | |
Fenghua Yu | 2f19e06 | 2011-05-17 15:29:18 -0700 | [diff] [blame] | 50 | /* |
| 51 | * ISO C memset - set a memory block to a byte value. This function uses |
| 52 | * enhanced rep stosb to override the fast string function. |
| 53 | * The code is simpler and shorter than the fast string function as well. |
| 54 | * |
| 55 | * rdi destination |
| 56 | * rsi value (char) |
| 57 | * rdx count (bytes) |
| 58 | * |
| 59 | * rax original destination |
| 60 | */ |
Borislav Petkov | 84d95ad | 2015-02-04 08:57:00 +0100 | [diff] [blame] | 61 | ENTRY(memset_erms) |
Fenghua Yu | 2f19e06 | 2011-05-17 15:29:18 -0700 | [diff] [blame] | 62 | movq %rdi,%r9 |
| 63 | movb %sil,%al |
Jan Beulich | 5d7244e | 2012-01-05 16:10:42 +0000 | [diff] [blame] | 64 | movq %rdx,%rcx |
Fenghua Yu | 2f19e06 | 2011-05-17 15:29:18 -0700 | [diff] [blame] | 65 | rep stosb |
| 66 | movq %r9,%rax |
| 67 | ret |
Borislav Petkov | 84d95ad | 2015-02-04 08:57:00 +0100 | [diff] [blame] | 68 | ENDPROC(memset_erms) |
Fenghua Yu | 2f19e06 | 2011-05-17 15:29:18 -0700 | [diff] [blame] | 69 | |
Borislav Petkov | 84d95ad | 2015-02-04 08:57:00 +0100 | [diff] [blame] | 70 | ENTRY(memset_orig) |
Andi Kleen | 7bcd3f3 | 2006-02-03 21:51:02 +0100 | [diff] [blame] | 71 | movq %rdi,%r10 |
Andi Kleen | 7bcd3f3 | 2006-02-03 21:51:02 +0100 | [diff] [blame] | 72 | |
| 73 | /* expand byte value */ |
| 74 | movzbl %sil,%ecx |
| 75 | movabs $0x0101010101010101,%rax |
Jan Beulich | 5d7244e | 2012-01-05 16:10:42 +0000 | [diff] [blame] | 76 | imulq %rcx,%rax |
Andi Kleen | 7bcd3f3 | 2006-02-03 21:51:02 +0100 | [diff] [blame] | 77 | |
| 78 | /* align dst */ |
| 79 | movl %edi,%r9d |
| 80 | andl $7,%r9d |
| 81 | jnz .Lbad_alignment |
| 82 | .Lafter_bad_alignment: |
| 83 | |
Jan Beulich | 5d7244e | 2012-01-05 16:10:42 +0000 | [diff] [blame] | 84 | movq %rdx,%rcx |
| 85 | shrq $6,%rcx |
Andi Kleen | 7bcd3f3 | 2006-02-03 21:51:02 +0100 | [diff] [blame] | 86 | jz .Lhandle_tail |
| 87 | |
| 88 | .p2align 4 |
| 89 | .Lloop_64: |
Jan Beulich | 5d7244e | 2012-01-05 16:10:42 +0000 | [diff] [blame] | 90 | decq %rcx |
Andi Kleen | 7bcd3f3 | 2006-02-03 21:51:02 +0100 | [diff] [blame] | 91 | movq %rax,(%rdi) |
| 92 | movq %rax,8(%rdi) |
| 93 | movq %rax,16(%rdi) |
| 94 | movq %rax,24(%rdi) |
| 95 | movq %rax,32(%rdi) |
| 96 | movq %rax,40(%rdi) |
| 97 | movq %rax,48(%rdi) |
| 98 | movq %rax,56(%rdi) |
| 99 | leaq 64(%rdi),%rdi |
| 100 | jnz .Lloop_64 |
| 101 | |
| 102 | /* Handle tail in loops. The loops should be faster than hard |
| 103 | to predict jump tables. */ |
| 104 | .p2align 4 |
| 105 | .Lhandle_tail: |
Jan Beulich | 5d7244e | 2012-01-05 16:10:42 +0000 | [diff] [blame] | 106 | movl %edx,%ecx |
Andi Kleen | 7bcd3f3 | 2006-02-03 21:51:02 +0100 | [diff] [blame] | 107 | andl $63&(~7),%ecx |
| 108 | jz .Lhandle_7 |
| 109 | shrl $3,%ecx |
| 110 | .p2align 4 |
| 111 | .Lloop_8: |
| 112 | decl %ecx |
| 113 | movq %rax,(%rdi) |
| 114 | leaq 8(%rdi),%rdi |
| 115 | jnz .Lloop_8 |
| 116 | |
| 117 | .Lhandle_7: |
Jan Beulich | 5d7244e | 2012-01-05 16:10:42 +0000 | [diff] [blame] | 118 | andl $7,%edx |
Andi Kleen | 7bcd3f3 | 2006-02-03 21:51:02 +0100 | [diff] [blame] | 119 | jz .Lende |
| 120 | .p2align 4 |
| 121 | .Lloop_1: |
Jan Beulich | 5d7244e | 2012-01-05 16:10:42 +0000 | [diff] [blame] | 122 | decl %edx |
Andi Kleen | 7bcd3f3 | 2006-02-03 21:51:02 +0100 | [diff] [blame] | 123 | movb %al,(%rdi) |
| 124 | leaq 1(%rdi),%rdi |
| 125 | jnz .Lloop_1 |
| 126 | |
| 127 | .Lende: |
| 128 | movq %r10,%rax |
| 129 | ret |
| 130 | |
| 131 | .Lbad_alignment: |
Jan Beulich | 5d7244e | 2012-01-05 16:10:42 +0000 | [diff] [blame] | 132 | cmpq $7,%rdx |
Andi Kleen | 7bcd3f3 | 2006-02-03 21:51:02 +0100 | [diff] [blame] | 133 | jbe .Lhandle_7 |
| 134 | movq %rax,(%rdi) /* unaligned store */ |
| 135 | movq $8,%r8 |
| 136 | subq %r9,%r8 |
| 137 | addq %r8,%rdi |
Jan Beulich | 5d7244e | 2012-01-05 16:10:42 +0000 | [diff] [blame] | 138 | subq %r8,%rdx |
Andi Kleen | 7bcd3f3 | 2006-02-03 21:51:02 +0100 | [diff] [blame] | 139 | jmp .Lafter_bad_alignment |
Jan Beulich | 8d379da | 2006-09-26 10:52:32 +0200 | [diff] [blame] | 140 | .Lfinal: |
Borislav Petkov | 84d95ad | 2015-02-04 08:57:00 +0100 | [diff] [blame] | 141 | ENDPROC(memset_orig) |