blob: 4be3c415b3e984e9507113849a238faf63200cce [file] [log] [blame]
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +02001/*
2 * Copyright 2008 Vitaly Mayatskikh <vmayatsk@redhat.com>
3 * Copyright 2002 Andi Kleen, SuSE Labs.
Linus Torvalds1da177e2005-04-16 15:20:36 -07004 * Subject to the GNU Public License v2.
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +02005 *
6 * Functions to copy from and to user space.
7 */
Linus Torvalds1da177e2005-04-16 15:20:36 -07008
Jan Beulich8d379da2006-09-26 10:52:32 +02009#include <linux/linkage.h>
10#include <asm/dwarf2.h>
11
Andi Kleen7bcd3f32006-02-03 21:51:02 +010012#define FIX_ALIGNMENT 1
13
Andi Kleen3022d732006-09-26 10:52:39 +020014#include <asm/current.h>
15#include <asm/asm-offsets.h>
16#include <asm/thread_info.h>
17#include <asm/cpufeature.h>
18
19 .macro ALTERNATIVE_JUMP feature,orig,alt
200:
21 .byte 0xe9 /* 32bit jump */
22 .long \orig-1f /* by default jump to orig */
231:
24 .section .altinstr_replacement,"ax"
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +0200252: .byte 0xe9 /* near jump with 32bit immediate */
Andi Kleen3022d732006-09-26 10:52:39 +020026 .long \alt-1b /* offset */ /* or alternatively to alt */
27 .previous
28 .section .altinstructions,"a"
29 .align 8
30 .quad 0b
31 .quad 2b
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +020032 .byte \feature /* when feature is set */
Andi Kleen3022d732006-09-26 10:52:39 +020033 .byte 5
34 .byte 5
35 .previous
36 .endm
Linus Torvalds1da177e2005-04-16 15:20:36 -070037
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +020038 .macro ALIGN_DESTINATION
39#ifdef FIX_ALIGNMENT
40 /* check for bad alignment of destination */
41 movl %edi,%ecx
42 andl $7,%ecx
43 jz 102f /* already aligned */
44 subl $8,%ecx
45 negl %ecx
46 subl %ecx,%edx
47100: movb (%rsi),%al
48101: movb %al,(%rdi)
49 incq %rsi
50 incq %rdi
51 decl %ecx
52 jnz 100b
53102:
54 .section .fixup,"ax"
Vitaly Mayatskikhafd962a2008-07-30 13:30:14 +020055103: addl %ecx,%edx /* ecx is zerorest also */
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +020056 jmp copy_user_handle_tail
57 .previous
58
59 .section __ex_table,"a"
60 .align 8
61 .quad 100b,103b
62 .quad 101b,103b
63 .previous
64#endif
65 .endm
66
67/* Standard copy_to_user with segment limit checking */
Jan Beulich8d379da2006-09-26 10:52:32 +020068ENTRY(copy_to_user)
69 CFI_STARTPROC
Linus Torvalds1da177e2005-04-16 15:20:36 -070070 GET_THREAD_INFO(%rax)
71 movq %rdi,%rcx
72 addq %rdx,%rcx
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +020073 jc bad_to_user
Glauber Costa26ccb8a2008-06-24 11:19:35 -030074 cmpq TI_addr_limit(%rax),%rcx
Linus Torvalds1da177e2005-04-16 15:20:36 -070075 jae bad_to_user
Andi Kleen3022d732006-09-26 10:52:39 +020076 ALTERNATIVE_JUMP X86_FEATURE_REP_GOOD,copy_user_generic_unrolled,copy_user_generic_string
Jan Beulich8d379da2006-09-26 10:52:32 +020077 CFI_ENDPROC
Mike Galbraith3fd382c2009-07-02 07:27:20 +020078ENDPROC(copy_to_user)
Andi Kleen7bcd3f32006-02-03 21:51:02 +010079
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +020080/* Standard copy_from_user with segment limit checking */
Arjan van de Ven9f0cf4a2009-09-26 14:33:01 +020081ENTRY(_copy_from_user)
Jan Beulich8d379da2006-09-26 10:52:32 +020082 CFI_STARTPROC
Linus Torvalds1da177e2005-04-16 15:20:36 -070083 GET_THREAD_INFO(%rax)
84 movq %rsi,%rcx
85 addq %rdx,%rcx
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +020086 jc bad_from_user
Glauber Costa26ccb8a2008-06-24 11:19:35 -030087 cmpq TI_addr_limit(%rax),%rcx
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +020088 jae bad_from_user
Andi Kleen3022d732006-09-26 10:52:39 +020089 ALTERNATIVE_JUMP X86_FEATURE_REP_GOOD,copy_user_generic_unrolled,copy_user_generic_string
Jan Beulich8d379da2006-09-26 10:52:32 +020090 CFI_ENDPROC
Arjan van de Ven9f0cf4a2009-09-26 14:33:01 +020091ENDPROC(_copy_from_user)
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +020092
93ENTRY(copy_user_generic)
94 CFI_STARTPROC
95 ALTERNATIVE_JUMP X86_FEATURE_REP_GOOD,copy_user_generic_unrolled,copy_user_generic_string
96 CFI_ENDPROC
97ENDPROC(copy_user_generic)
98
99ENTRY(__copy_from_user_inatomic)
100 CFI_STARTPROC
101 ALTERNATIVE_JUMP X86_FEATURE_REP_GOOD,copy_user_generic_unrolled,copy_user_generic_string
102 CFI_ENDPROC
103ENDPROC(__copy_from_user_inatomic)
104
Linus Torvalds1da177e2005-04-16 15:20:36 -0700105 .section .fixup,"ax"
106 /* must zero dest */
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +0200107ENTRY(bad_from_user)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700108bad_from_user:
Jan Beulich8d379da2006-09-26 10:52:32 +0200109 CFI_STARTPROC
Linus Torvalds1da177e2005-04-16 15:20:36 -0700110 movl %edx,%ecx
111 xorl %eax,%eax
112 rep
113 stosb
114bad_to_user:
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +0200115 movl %edx,%eax
Linus Torvalds1da177e2005-04-16 15:20:36 -0700116 ret
Jan Beulich8d379da2006-09-26 10:52:32 +0200117 CFI_ENDPROC
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +0200118ENDPROC(bad_from_user)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700119 .previous
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +0200120
Linus Torvalds1da177e2005-04-16 15:20:36 -0700121/*
Andi Kleen3022d732006-09-26 10:52:39 +0200122 * copy_user_generic_unrolled - memory copy with exception handling.
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +0200123 * This version is for CPUs like P4 that don't have efficient micro
124 * code for rep movsq
125 *
126 * Input:
Linus Torvalds1da177e2005-04-16 15:20:36 -0700127 * rdi destination
128 * rsi source
129 * rdx count
130 *
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +0200131 * Output:
132 * eax uncopied bytes or 0 if successfull.
Linus Torvalds1da177e2005-04-16 15:20:36 -0700133 */
Andi Kleen3022d732006-09-26 10:52:39 +0200134ENTRY(copy_user_generic_unrolled)
Jan Beulich8d379da2006-09-26 10:52:32 +0200135 CFI_STARTPROC
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +0200136 cmpl $8,%edx
137 jb 20f /* less then 8 bytes, go to byte copy loop */
138 ALIGN_DESTINATION
139 movl %edx,%ecx
140 andl $63,%edx
141 shrl $6,%ecx
142 jz 17f
1431: movq (%rsi),%r8
1442: movq 1*8(%rsi),%r9
1453: movq 2*8(%rsi),%r10
1464: movq 3*8(%rsi),%r11
1475: movq %r8,(%rdi)
1486: movq %r9,1*8(%rdi)
1497: movq %r10,2*8(%rdi)
1508: movq %r11,3*8(%rdi)
1519: movq 4*8(%rsi),%r8
15210: movq 5*8(%rsi),%r9
15311: movq 6*8(%rsi),%r10
15412: movq 7*8(%rsi),%r11
15513: movq %r8,4*8(%rdi)
15614: movq %r9,5*8(%rdi)
15715: movq %r10,6*8(%rdi)
15816: movq %r11,7*8(%rdi)
Andi Kleen7bcd3f32006-02-03 21:51:02 +0100159 leaq 64(%rsi),%rsi
160 leaq 64(%rdi),%rdi
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +0200161 decl %ecx
162 jnz 1b
16317: movl %edx,%ecx
164 andl $7,%edx
Andi Kleen7bcd3f32006-02-03 21:51:02 +0100165 shrl $3,%ecx
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +0200166 jz 20f
16718: movq (%rsi),%r8
16819: movq %r8,(%rdi)
Andi Kleen7bcd3f32006-02-03 21:51:02 +0100169 leaq 8(%rsi),%rsi
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +0200170 leaq 8(%rdi),%rdi
171 decl %ecx
172 jnz 18b
17320: andl %edx,%edx
174 jz 23f
Andi Kleen7bcd3f32006-02-03 21:51:02 +0100175 movl %edx,%ecx
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +020017621: movb (%rsi),%al
17722: movb %al,(%rdi)
Andi Kleen7bcd3f32006-02-03 21:51:02 +0100178 incq %rsi
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +0200179 incq %rdi
Andi Kleen7bcd3f32006-02-03 21:51:02 +0100180 decl %ecx
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +0200181 jnz 21b
18223: xor %eax,%eax
Andi Kleen7bcd3f32006-02-03 21:51:02 +0100183 ret
184
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +0200185 .section .fixup,"ax"
18630: shll $6,%ecx
187 addl %ecx,%edx
188 jmp 60f
Jeremy Fitzhardinge27cb0a72008-07-10 12:52:52 -070018940: lea (%rdx,%rcx,8),%rdx
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +0200190 jmp 60f
19150: movl %ecx,%edx
19260: jmp copy_user_handle_tail /* ecx is zerorest also */
193 .previous
Andi Kleen7bcd3f32006-02-03 21:51:02 +0100194
Andi Kleen7bcd3f32006-02-03 21:51:02 +0100195 .section __ex_table,"a"
196 .align 8
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +0200197 .quad 1b,30b
198 .quad 2b,30b
199 .quad 3b,30b
200 .quad 4b,30b
201 .quad 5b,30b
202 .quad 6b,30b
203 .quad 7b,30b
204 .quad 8b,30b
205 .quad 9b,30b
206 .quad 10b,30b
207 .quad 11b,30b
208 .quad 12b,30b
209 .quad 13b,30b
210 .quad 14b,30b
211 .quad 15b,30b
212 .quad 16b,30b
213 .quad 18b,40b
214 .quad 19b,40b
215 .quad 21b,50b
216 .quad 22b,50b
Andi Kleen7bcd3f32006-02-03 21:51:02 +0100217 .previous
Jan Beulich8d379da2006-09-26 10:52:32 +0200218 CFI_ENDPROC
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +0200219ENDPROC(copy_user_generic_unrolled)
Jan Beulich8d379da2006-09-26 10:52:32 +0200220
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +0200221/* Some CPUs run faster using the string copy instructions.
222 * This is also a lot simpler. Use them when possible.
223 *
224 * Only 4GB of copy is supported. This shouldn't be a problem
225 * because the kernel normally only writes from/to page sized chunks
226 * even if user space passed a longer buffer.
227 * And more would be dangerous because both Intel and AMD have
228 * errata with rep movsq > 4GB. If someone feels the need to fix
229 * this please consider this.
230 *
231 * Input:
232 * rdi destination
233 * rsi source
234 * rdx count
235 *
236 * Output:
237 * eax uncopied bytes or 0 if successful.
238 */
Andi Kleen3022d732006-09-26 10:52:39 +0200239ENTRY(copy_user_generic_string)
Jan Beulich8d379da2006-09-26 10:52:32 +0200240 CFI_STARTPROC
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +0200241 andl %edx,%edx
242 jz 4f
243 cmpl $8,%edx
244 jb 2f /* less than 8 bytes, go to byte copy loop */
245 ALIGN_DESTINATION
Linus Torvalds1da177e2005-04-16 15:20:36 -0700246 movl %edx,%ecx
247 shrl $3,%ecx
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +0200248 andl $7,%edx
2491: rep
Andi Kleen3022d732006-09-26 10:52:39 +0200250 movsq
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +02002512: movl %edx,%ecx
2523: rep
253 movsb
2544: xorl %eax,%eax
Andi Kleen7bcd3f32006-02-03 21:51:02 +0100255 ret
Andi Kleen3022d732006-09-26 10:52:39 +0200256
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +0200257 .section .fixup,"ax"
Jeremy Fitzhardinge27cb0a72008-07-10 12:52:52 -070025811: lea (%rdx,%rcx,8),%rcx
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +020025912: movl %ecx,%edx /* ecx is zerorest also */
260 jmp copy_user_handle_tail
261 .previous
Andi Kleen2cbc9ee2006-01-11 22:44:45 +0100262
Linus Torvalds1da177e2005-04-16 15:20:36 -0700263 .section __ex_table,"a"
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +0200264 .align 8
265 .quad 1b,11b
266 .quad 3b,12b
Linus Torvalds1da177e2005-04-16 15:20:36 -0700267 .previous
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +0200268 CFI_ENDPROC
269ENDPROC(copy_user_generic_string)