blob: 55b95db30a61c08df145e2c321fb0362696a623f [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001/* Copyright 2002 Andi Kleen, SuSE Labs */
Jan Beulich8d379da2006-09-26 10:52:32 +02002
Jan Beulich8d379da2006-09-26 10:52:32 +02003#include <linux/linkage.h>
Borislav Petkovcd4d09e2016-01-26 22:12:04 +01004#include <asm/cpufeatures.h>
Fenghua Yu2f19e062011-05-17 15:29:18 -07005#include <asm/alternative-asm.h>
Al Viro784d5692016-01-11 11:04:34 -05006#include <asm/export.h>
Jan Beulich8d379da2006-09-26 10:52:32 +02007
Borislav Petkov84d95ad2015-02-04 08:57:00 +01008.weak memset
9
Linus Torvalds1da177e2005-04-16 15:20:36 -070010/*
Fenghua Yu2f19e062011-05-17 15:29:18 -070011 * ISO C memset - set a memory block to a byte value. This function uses fast
12 * string to get better performance than the original function. The code is
Adam Buchbinder6a6256f2016-02-23 15:34:30 -080013 * simpler and shorter than the original function as well.
Borislav Petkov84d95ad2015-02-04 08:57:00 +010014 *
Linus Torvalds1da177e2005-04-16 15:20:36 -070015 * rdi destination
Borislav Petkov84d95ad2015-02-04 08:57:00 +010016 * rsi value (char)
17 * rdx count (bytes)
18 *
Linus Torvalds1da177e2005-04-16 15:20:36 -070019 * rax original destination
Borislav Petkov84d95ad2015-02-04 08:57:00 +010020 */
21ENTRY(memset)
22ENTRY(__memset)
23 /*
24 * Some CPUs support enhanced REP MOVSB/STOSB feature. It is recommended
25 * to use it when possible. If not available, use fast string instructions.
26 *
27 * Otherwise, use original memset function.
28 */
29 ALTERNATIVE_2 "jmp memset_orig", "", X86_FEATURE_REP_GOOD, \
30 "jmp memset_erms", X86_FEATURE_ERMS
31
Jan Beulich8d379da2006-09-26 10:52:32 +020032 movq %rdi,%r9
Jan Beulich5d7244e2012-01-05 16:10:42 +000033 movq %rdx,%rcx
34 andl $7,%edx
35 shrq $3,%rcx
Jan Beulich8d379da2006-09-26 10:52:32 +020036 /* expand byte value */
37 movzbl %sil,%esi
38 movabs $0x0101010101010101,%rax
Jan Beulich5d7244e2012-01-05 16:10:42 +000039 imulq %rsi,%rax
Jan Beulich8d379da2006-09-26 10:52:32 +020040 rep stosq
Jan Beulich5d7244e2012-01-05 16:10:42 +000041 movl %edx,%ecx
Jan Beulich8d379da2006-09-26 10:52:32 +020042 rep stosb
43 movq %r9,%rax
44 ret
Borislav Petkov84d95ad2015-02-04 08:57:00 +010045ENDPROC(memset)
46ENDPROC(__memset)
Al Viro784d5692016-01-11 11:04:34 -050047EXPORT_SYMBOL(memset)
48EXPORT_SYMBOL(__memset)
Jan Beulich8d379da2006-09-26 10:52:32 +020049
Fenghua Yu2f19e062011-05-17 15:29:18 -070050/*
51 * ISO C memset - set a memory block to a byte value. This function uses
52 * enhanced rep stosb to override the fast string function.
53 * The code is simpler and shorter than the fast string function as well.
54 *
55 * rdi destination
56 * rsi value (char)
57 * rdx count (bytes)
58 *
59 * rax original destination
60 */
Borislav Petkov84d95ad2015-02-04 08:57:00 +010061ENTRY(memset_erms)
Fenghua Yu2f19e062011-05-17 15:29:18 -070062 movq %rdi,%r9
63 movb %sil,%al
Jan Beulich5d7244e2012-01-05 16:10:42 +000064 movq %rdx,%rcx
Fenghua Yu2f19e062011-05-17 15:29:18 -070065 rep stosb
66 movq %r9,%rax
67 ret
Borislav Petkov84d95ad2015-02-04 08:57:00 +010068ENDPROC(memset_erms)
Fenghua Yu2f19e062011-05-17 15:29:18 -070069
Borislav Petkov84d95ad2015-02-04 08:57:00 +010070ENTRY(memset_orig)
Andi Kleen7bcd3f32006-02-03 21:51:02 +010071 movq %rdi,%r10
Andi Kleen7bcd3f32006-02-03 21:51:02 +010072
73 /* expand byte value */
74 movzbl %sil,%ecx
75 movabs $0x0101010101010101,%rax
Jan Beulich5d7244e2012-01-05 16:10:42 +000076 imulq %rcx,%rax
Andi Kleen7bcd3f32006-02-03 21:51:02 +010077
78 /* align dst */
79 movl %edi,%r9d
80 andl $7,%r9d
81 jnz .Lbad_alignment
82.Lafter_bad_alignment:
83
Jan Beulich5d7244e2012-01-05 16:10:42 +000084 movq %rdx,%rcx
85 shrq $6,%rcx
Andi Kleen7bcd3f32006-02-03 21:51:02 +010086 jz .Lhandle_tail
87
88 .p2align 4
89.Lloop_64:
Jan Beulich5d7244e2012-01-05 16:10:42 +000090 decq %rcx
Andi Kleen7bcd3f32006-02-03 21:51:02 +010091 movq %rax,(%rdi)
92 movq %rax,8(%rdi)
93 movq %rax,16(%rdi)
94 movq %rax,24(%rdi)
95 movq %rax,32(%rdi)
96 movq %rax,40(%rdi)
97 movq %rax,48(%rdi)
98 movq %rax,56(%rdi)
99 leaq 64(%rdi),%rdi
100 jnz .Lloop_64
101
102 /* Handle tail in loops. The loops should be faster than hard
103 to predict jump tables. */
104 .p2align 4
105.Lhandle_tail:
Jan Beulich5d7244e2012-01-05 16:10:42 +0000106 movl %edx,%ecx
Andi Kleen7bcd3f32006-02-03 21:51:02 +0100107 andl $63&(~7),%ecx
108 jz .Lhandle_7
109 shrl $3,%ecx
110 .p2align 4
111.Lloop_8:
112 decl %ecx
113 movq %rax,(%rdi)
114 leaq 8(%rdi),%rdi
115 jnz .Lloop_8
116
117.Lhandle_7:
Jan Beulich5d7244e2012-01-05 16:10:42 +0000118 andl $7,%edx
Andi Kleen7bcd3f32006-02-03 21:51:02 +0100119 jz .Lende
120 .p2align 4
121.Lloop_1:
Jan Beulich5d7244e2012-01-05 16:10:42 +0000122 decl %edx
Andi Kleen7bcd3f32006-02-03 21:51:02 +0100123 movb %al,(%rdi)
124 leaq 1(%rdi),%rdi
125 jnz .Lloop_1
126
127.Lende:
128 movq %r10,%rax
129 ret
130
131.Lbad_alignment:
Jan Beulich5d7244e2012-01-05 16:10:42 +0000132 cmpq $7,%rdx
Andi Kleen7bcd3f32006-02-03 21:51:02 +0100133 jbe .Lhandle_7
134 movq %rax,(%rdi) /* unaligned store */
135 movq $8,%r8
136 subq %r9,%r8
137 addq %r8,%rdi
Jan Beulich5d7244e2012-01-05 16:10:42 +0000138 subq %r8,%rdx
Andi Kleen7bcd3f32006-02-03 21:51:02 +0100139 jmp .Lafter_bad_alignment
Jan Beulich8d379da2006-09-26 10:52:32 +0200140.Lfinal:
Borislav Petkov84d95ad2015-02-04 08:57:00 +0100141ENDPROC(memset_orig)