blob: cacddc7163eb17969dac60cad117ca79fb54e8f2 [file] [log] [blame]
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +02001/*
2 * Copyright 2008 Vitaly Mayatskikh <vmayatsk@redhat.com>
3 * Copyright 2002 Andi Kleen, SuSE Labs.
Andi Kleen0812a572007-02-13 13:26:19 +01004 * Subject to the GNU Public License v2.
5 *
6 * Functions to copy from and to user space.
7 */
8
9#include <linux/linkage.h>
10#include <asm/dwarf2.h>
11
12#define FIX_ALIGNMENT 1
13
14#include <asm/current.h>
15#include <asm/asm-offsets.h>
16#include <asm/thread_info.h>
H. Peter Anvin0d8559f2012-04-20 12:19:51 -070017#include <asm/asm.h>
Andi Kleen0812a572007-02-13 13:26:19 +010018
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +020019 .macro ALIGN_DESTINATION
Andi Kleen0812a572007-02-13 13:26:19 +010020#ifdef FIX_ALIGNMENT
21 /* check for bad alignment of destination */
22 movl %edi,%ecx
23 andl $7,%ecx
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +020024 jz 102f /* already aligned */
25 subl $8,%ecx
26 negl %ecx
27 subl %ecx,%edx
28100: movb (%rsi),%al
29101: movb %al,(%rdi)
Andi Kleen0812a572007-02-13 13:26:19 +010030 incq %rsi
31 incq %rdi
32 decl %ecx
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +020033 jnz 100b
34102:
35 .section .fixup,"ax"
Vitaly Mayatskikhafd962a2008-07-30 13:30:14 +020036103: addl %ecx,%edx /* ecx is zerorest also */
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +020037 jmp copy_user_handle_tail
Andi Kleen0812a572007-02-13 13:26:19 +010038 .previous
39
H. Peter Anvin0d8559f2012-04-20 12:19:51 -070040 _ASM_EXTABLE(100b,103b)
41 _ASM_EXTABLE(101b,103b)
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +020042#endif
43 .endm
Andi Kleen0812a572007-02-13 13:26:19 +010044
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +020045/*
46 * copy_user_nocache - Uncached memory copy with exception handling
47 * This will force destination/source out of cache for more performance.
48 */
49ENTRY(__copy_user_nocache)
50 CFI_STARTPROC
51 cmpl $8,%edx
52 jb 20f /* less then 8 bytes, go to byte copy loop */
53 ALIGN_DESTINATION
54 movl %edx,%ecx
55 andl $63,%edx
56 shrl $6,%ecx
57 jz 17f
581: movq (%rsi),%r8
592: movq 1*8(%rsi),%r9
603: movq 2*8(%rsi),%r10
614: movq 3*8(%rsi),%r11
625: movnti %r8,(%rdi)
636: movnti %r9,1*8(%rdi)
647: movnti %r10,2*8(%rdi)
658: movnti %r11,3*8(%rdi)
669: movq 4*8(%rsi),%r8
6710: movq 5*8(%rsi),%r9
6811: movq 6*8(%rsi),%r10
6912: movq 7*8(%rsi),%r11
7013: movnti %r8,4*8(%rdi)
7114: movnti %r9,5*8(%rdi)
7215: movnti %r10,6*8(%rdi)
7316: movnti %r11,7*8(%rdi)
74 leaq 64(%rsi),%rsi
75 leaq 64(%rdi),%rdi
76 decl %ecx
77 jnz 1b
7817: movl %edx,%ecx
Andi Kleen0812a572007-02-13 13:26:19 +010079 andl $7,%edx
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +020080 shrl $3,%ecx
81 jz 20f
8218: movq (%rsi),%r8
8319: movnti %r8,(%rdi)
84 leaq 8(%rsi),%rsi
85 leaq 8(%rdi),%rdi
86 decl %ecx
87 jnz 18b
8820: andl %edx,%edx
89 jz 23f
90 movl %edx,%ecx
9121: movb (%rsi),%al
9222: movb %al,(%rdi)
93 incq %rsi
94 incq %rdi
95 decl %ecx
96 jnz 21b
9723: xorl %eax,%eax
98 sfence
99 ret
100
101 .section .fixup,"ax"
10230: shll $6,%ecx
Andi Kleen0812a572007-02-13 13:26:19 +0100103 addl %ecx,%edx
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +0200104 jmp 60f
Jeremy Fitzhardinge27cb0a72008-07-10 12:52:52 -070010540: lea (%rdx,%rcx,8),%rdx
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +0200106 jmp 60f
10750: movl %ecx,%edx
10860: sfence
Vitaly Mayatskikhad2fc2c2008-07-02 15:53:13 +0200109 jmp copy_user_handle_tail
110 .previous
111
H. Peter Anvin0d8559f2012-04-20 12:19:51 -0700112 _ASM_EXTABLE(1b,30b)
113 _ASM_EXTABLE(2b,30b)
114 _ASM_EXTABLE(3b,30b)
115 _ASM_EXTABLE(4b,30b)
116 _ASM_EXTABLE(5b,30b)
117 _ASM_EXTABLE(6b,30b)
118 _ASM_EXTABLE(7b,30b)
119 _ASM_EXTABLE(8b,30b)
120 _ASM_EXTABLE(9b,30b)
121 _ASM_EXTABLE(10b,30b)
122 _ASM_EXTABLE(11b,30b)
123 _ASM_EXTABLE(12b,30b)
124 _ASM_EXTABLE(13b,30b)
125 _ASM_EXTABLE(14b,30b)
126 _ASM_EXTABLE(15b,30b)
127 _ASM_EXTABLE(16b,30b)
128 _ASM_EXTABLE(18b,40b)
129 _ASM_EXTABLE(19b,40b)
130 _ASM_EXTABLE(21b,50b)
131 _ASM_EXTABLE(22b,50b)
Andi Kleen0812a572007-02-13 13:26:19 +0100132 CFI_ENDPROC
133ENDPROC(__copy_user_nocache)