Vitaly Mayatskikh | ad2fc2c | 2008-07-02 15:53:13 +0200 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2008 Vitaly Mayatskikh <vmayatsk@redhat.com> |
| 3 | * Copyright 2002 Andi Kleen, SuSE Labs. |
Andi Kleen | 0812a57 | 2007-02-13 13:26:19 +0100 | [diff] [blame] | 4 | * Subject to the GNU Public License v2. |
| 5 | * |
| 6 | * Functions to copy from and to user space. |
| 7 | */ |
| 8 | |
| 9 | #include <linux/linkage.h> |
| 10 | #include <asm/dwarf2.h> |
| 11 | |
| 12 | #define FIX_ALIGNMENT 1 |
| 13 | |
| 14 | #include <asm/current.h> |
| 15 | #include <asm/asm-offsets.h> |
| 16 | #include <asm/thread_info.h> |
H. Peter Anvin | 0d8559f | 2012-04-20 12:19:51 -0700 | [diff] [blame] | 17 | #include <asm/asm.h> |
H. Peter Anvin | 63bcff2 | 2012-09-21 12:43:12 -0700 | [diff] [blame] | 18 | #include <asm/smap.h> |
Andi Kleen | 0812a57 | 2007-02-13 13:26:19 +0100 | [diff] [blame] | 19 | |
Vitaly Mayatskikh | ad2fc2c | 2008-07-02 15:53:13 +0200 | [diff] [blame] | 20 | .macro ALIGN_DESTINATION |
Andi Kleen | 0812a57 | 2007-02-13 13:26:19 +0100 | [diff] [blame] | 21 | #ifdef FIX_ALIGNMENT |
| 22 | /* check for bad alignment of destination */ |
| 23 | movl %edi,%ecx |
| 24 | andl $7,%ecx |
Vitaly Mayatskikh | ad2fc2c | 2008-07-02 15:53:13 +0200 | [diff] [blame] | 25 | jz 102f /* already aligned */ |
| 26 | subl $8,%ecx |
| 27 | negl %ecx |
| 28 | subl %ecx,%edx |
| 29 | 100: movb (%rsi),%al |
| 30 | 101: movb %al,(%rdi) |
Andi Kleen | 0812a57 | 2007-02-13 13:26:19 +0100 | [diff] [blame] | 31 | incq %rsi |
| 32 | incq %rdi |
| 33 | decl %ecx |
Vitaly Mayatskikh | ad2fc2c | 2008-07-02 15:53:13 +0200 | [diff] [blame] | 34 | jnz 100b |
| 35 | 102: |
| 36 | .section .fixup,"ax" |
Vitaly Mayatskikh | afd962a | 2008-07-30 13:30:14 +0200 | [diff] [blame] | 37 | 103: addl %ecx,%edx /* ecx is zerorest also */ |
Vitaly Mayatskikh | ad2fc2c | 2008-07-02 15:53:13 +0200 | [diff] [blame] | 38 | jmp copy_user_handle_tail |
Andi Kleen | 0812a57 | 2007-02-13 13:26:19 +0100 | [diff] [blame] | 39 | .previous |
| 40 | |
H. Peter Anvin | 0d8559f | 2012-04-20 12:19:51 -0700 | [diff] [blame] | 41 | _ASM_EXTABLE(100b,103b) |
| 42 | _ASM_EXTABLE(101b,103b) |
Vitaly Mayatskikh | ad2fc2c | 2008-07-02 15:53:13 +0200 | [diff] [blame] | 43 | #endif |
| 44 | .endm |
Andi Kleen | 0812a57 | 2007-02-13 13:26:19 +0100 | [diff] [blame] | 45 | |
Vitaly Mayatskikh | ad2fc2c | 2008-07-02 15:53:13 +0200 | [diff] [blame] | 46 | /* |
| 47 | * copy_user_nocache - Uncached memory copy with exception handling |
| 48 | * This will force destination/source out of cache for more performance. |
| 49 | */ |
| 50 | ENTRY(__copy_user_nocache) |
| 51 | CFI_STARTPROC |
H. Peter Anvin | 63bcff2 | 2012-09-21 12:43:12 -0700 | [diff] [blame] | 52 | ASM_STAC |
Vitaly Mayatskikh | ad2fc2c | 2008-07-02 15:53:13 +0200 | [diff] [blame] | 53 | cmpl $8,%edx |
| 54 | jb 20f /* less then 8 bytes, go to byte copy loop */ |
| 55 | ALIGN_DESTINATION |
| 56 | movl %edx,%ecx |
| 57 | andl $63,%edx |
| 58 | shrl $6,%ecx |
| 59 | jz 17f |
| 60 | 1: movq (%rsi),%r8 |
| 61 | 2: movq 1*8(%rsi),%r9 |
| 62 | 3: movq 2*8(%rsi),%r10 |
| 63 | 4: movq 3*8(%rsi),%r11 |
| 64 | 5: movnti %r8,(%rdi) |
| 65 | 6: movnti %r9,1*8(%rdi) |
| 66 | 7: movnti %r10,2*8(%rdi) |
| 67 | 8: movnti %r11,3*8(%rdi) |
| 68 | 9: movq 4*8(%rsi),%r8 |
| 69 | 10: movq 5*8(%rsi),%r9 |
| 70 | 11: movq 6*8(%rsi),%r10 |
| 71 | 12: movq 7*8(%rsi),%r11 |
| 72 | 13: movnti %r8,4*8(%rdi) |
| 73 | 14: movnti %r9,5*8(%rdi) |
| 74 | 15: movnti %r10,6*8(%rdi) |
| 75 | 16: movnti %r11,7*8(%rdi) |
| 76 | leaq 64(%rsi),%rsi |
| 77 | leaq 64(%rdi),%rdi |
| 78 | decl %ecx |
| 79 | jnz 1b |
| 80 | 17: movl %edx,%ecx |
Andi Kleen | 0812a57 | 2007-02-13 13:26:19 +0100 | [diff] [blame] | 81 | andl $7,%edx |
Vitaly Mayatskikh | ad2fc2c | 2008-07-02 15:53:13 +0200 | [diff] [blame] | 82 | shrl $3,%ecx |
| 83 | jz 20f |
| 84 | 18: movq (%rsi),%r8 |
| 85 | 19: movnti %r8,(%rdi) |
| 86 | leaq 8(%rsi),%rsi |
| 87 | leaq 8(%rdi),%rdi |
| 88 | decl %ecx |
| 89 | jnz 18b |
| 90 | 20: andl %edx,%edx |
| 91 | jz 23f |
| 92 | movl %edx,%ecx |
| 93 | 21: movb (%rsi),%al |
| 94 | 22: movb %al,(%rdi) |
| 95 | incq %rsi |
| 96 | incq %rdi |
| 97 | decl %ecx |
| 98 | jnz 21b |
| 99 | 23: xorl %eax,%eax |
H. Peter Anvin | 63bcff2 | 2012-09-21 12:43:12 -0700 | [diff] [blame] | 100 | ASM_CLAC |
Vitaly Mayatskikh | ad2fc2c | 2008-07-02 15:53:13 +0200 | [diff] [blame] | 101 | sfence |
| 102 | ret |
| 103 | |
| 104 | .section .fixup,"ax" |
| 105 | 30: shll $6,%ecx |
Andi Kleen | 0812a57 | 2007-02-13 13:26:19 +0100 | [diff] [blame] | 106 | addl %ecx,%edx |
Vitaly Mayatskikh | ad2fc2c | 2008-07-02 15:53:13 +0200 | [diff] [blame] | 107 | jmp 60f |
Jeremy Fitzhardinge | 27cb0a7 | 2008-07-10 12:52:52 -0700 | [diff] [blame] | 108 | 40: lea (%rdx,%rcx,8),%rdx |
Vitaly Mayatskikh | ad2fc2c | 2008-07-02 15:53:13 +0200 | [diff] [blame] | 109 | jmp 60f |
| 110 | 50: movl %ecx,%edx |
| 111 | 60: sfence |
Vitaly Mayatskikh | ad2fc2c | 2008-07-02 15:53:13 +0200 | [diff] [blame] | 112 | jmp copy_user_handle_tail |
| 113 | .previous |
| 114 | |
H. Peter Anvin | 0d8559f | 2012-04-20 12:19:51 -0700 | [diff] [blame] | 115 | _ASM_EXTABLE(1b,30b) |
| 116 | _ASM_EXTABLE(2b,30b) |
| 117 | _ASM_EXTABLE(3b,30b) |
| 118 | _ASM_EXTABLE(4b,30b) |
| 119 | _ASM_EXTABLE(5b,30b) |
| 120 | _ASM_EXTABLE(6b,30b) |
| 121 | _ASM_EXTABLE(7b,30b) |
| 122 | _ASM_EXTABLE(8b,30b) |
| 123 | _ASM_EXTABLE(9b,30b) |
| 124 | _ASM_EXTABLE(10b,30b) |
| 125 | _ASM_EXTABLE(11b,30b) |
| 126 | _ASM_EXTABLE(12b,30b) |
| 127 | _ASM_EXTABLE(13b,30b) |
| 128 | _ASM_EXTABLE(14b,30b) |
| 129 | _ASM_EXTABLE(15b,30b) |
| 130 | _ASM_EXTABLE(16b,30b) |
| 131 | _ASM_EXTABLE(18b,40b) |
| 132 | _ASM_EXTABLE(19b,40b) |
| 133 | _ASM_EXTABLE(21b,50b) |
| 134 | _ASM_EXTABLE(22b,50b) |
Andi Kleen | 0812a57 | 2007-02-13 13:26:19 +0100 | [diff] [blame] | 135 | CFI_ENDPROC |
| 136 | ENDPROC(__copy_user_nocache) |