Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 1 | /* |
| 2 | * User address space access functions. |
| 3 | * |
| 4 | * Copyright 1997 Andi Kleen <ak@muc.de> |
| 5 | * Copyright 1997 Linus Torvalds |
| 6 | * Copyright 2002 Andi Kleen <ak@suse.de> |
| 7 | */ |
Andi Kleen | 2ee60e17 | 2006-06-26 13:59:44 +0200 | [diff] [blame] | 8 | #include <linux/module.h> |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 9 | #include <asm/uaccess.h> |
| 10 | |
| 11 | /* |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 12 | * Zero Userspace |
| 13 | */ |
| 14 | |
| 15 | unsigned long __clear_user(void __user *addr, unsigned long size) |
| 16 | { |
| 17 | long __d0; |
Nick Piggin | 3ee1afa | 2008-09-10 13:37:17 +0200 | [diff] [blame] | 18 | might_fault(); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 19 | /* no memory constraint because it doesn't change any memory gcc knows |
| 20 | about */ |
H. Peter Anvin | 63bcff2 | 2012-09-21 12:43:12 -0700 | [diff] [blame] | 21 | stac(); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 22 | asm volatile( |
| 23 | " testq %[size8],%[size8]\n" |
| 24 | " jz 4f\n" |
| 25 | "0: movq %[zero],(%[dst])\n" |
| 26 | " addq %[eight],%[dst]\n" |
| 27 | " decl %%ecx ; jnz 0b\n" |
| 28 | "4: movq %[size1],%%rcx\n" |
| 29 | " testl %%ecx,%%ecx\n" |
| 30 | " jz 2f\n" |
| 31 | "1: movb %b[zero],(%[dst])\n" |
| 32 | " incq %[dst]\n" |
| 33 | " decl %%ecx ; jnz 1b\n" |
| 34 | "2:\n" |
| 35 | ".section .fixup,\"ax\"\n" |
| 36 | "3: lea 0(%[size1],%[size8],8),%[size8]\n" |
| 37 | " jmp 2b\n" |
| 38 | ".previous\n" |
H. Peter Anvin | 8da804f | 2008-02-04 16:47:57 +0100 | [diff] [blame] | 39 | _ASM_EXTABLE(0b,3b) |
| 40 | _ASM_EXTABLE(1b,2b) |
Andi Kleen | e0a9612 | 2009-01-16 15:22:11 +0100 | [diff] [blame] | 41 | : [size8] "=&c"(size), [dst] "=&D" (__d0) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 42 | : [size1] "r"(size & 7), "[size8]" (size / 8), "[dst]"(addr), |
| 43 | [zero] "r" (0UL), [eight] "r" (8UL)); |
H. Peter Anvin | 63bcff2 | 2012-09-21 12:43:12 -0700 | [diff] [blame] | 44 | clac(); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 45 | return size; |
| 46 | } |
Andi Kleen | 2ee60e17 | 2006-06-26 13:59:44 +0200 | [diff] [blame] | 47 | EXPORT_SYMBOL(__clear_user); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 48 | |
| 49 | unsigned long clear_user(void __user *to, unsigned long n) |
| 50 | { |
| 51 | if (access_ok(VERIFY_WRITE, to, n)) |
| 52 | return __clear_user(to, n); |
| 53 | return n; |
| 54 | } |
Andi Kleen | 2ee60e17 | 2006-06-26 13:59:44 +0200 | [diff] [blame] | 55 | EXPORT_SYMBOL(clear_user); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 56 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 57 | unsigned long copy_in_user(void __user *to, const void __user *from, unsigned len) |
| 58 | { |
| 59 | if (access_ok(VERIFY_WRITE, to, len) && access_ok(VERIFY_READ, from, len)) { |
| 60 | return copy_user_generic((__force void *)to, (__force void *)from, len); |
| 61 | } |
| 62 | return len; |
| 63 | } |
Andi Kleen | 2ee60e17 | 2006-06-26 13:59:44 +0200 | [diff] [blame] | 64 | EXPORT_SYMBOL(copy_in_user); |
| 65 | |
Vitaly Mayatskikh | 1129585 | 2008-07-02 15:48:21 +0200 | [diff] [blame] | 66 | /* |
| 67 | * Try to copy last bytes and clear the rest if needed. |
| 68 | * Since protection fault in copy_from/to_user is not a normal situation, |
| 69 | * it is not necessary to optimize tail handling. |
| 70 | */ |
Andi Kleen | 277d5b4 | 2013-08-05 15:02:43 -0700 | [diff] [blame] | 71 | __visible unsigned long |
Vitaly Mayatskikh | 1129585 | 2008-07-02 15:48:21 +0200 | [diff] [blame] | 72 | copy_user_handle_tail(char *to, char *from, unsigned len, unsigned zerorest) |
| 73 | { |
| 74 | char c; |
| 75 | unsigned zero_len; |
| 76 | |
CQ Tang | 66db3fe | 2013-03-18 11:02:21 -0400 | [diff] [blame] | 77 | for (; len; --len, to++) { |
Vitaly Mayatskikh | 1129585 | 2008-07-02 15:48:21 +0200 | [diff] [blame] | 78 | if (__get_user_nocheck(c, from++, sizeof(char))) |
| 79 | break; |
CQ Tang | 66db3fe | 2013-03-18 11:02:21 -0400 | [diff] [blame] | 80 | if (__put_user_nocheck(c, to, sizeof(char))) |
Vitaly Mayatskikh | 1129585 | 2008-07-02 15:48:21 +0200 | [diff] [blame] | 81 | break; |
| 82 | } |
| 83 | |
| 84 | for (c = 0, zero_len = len; zerorest && zero_len; --zero_len) |
| 85 | if (__put_user_nocheck(c, to++, sizeof(char))) |
| 86 | break; |
H. Peter Anvin | 63bcff2 | 2012-09-21 12:43:12 -0700 | [diff] [blame] | 87 | clac(); |
Vitaly Mayatskikh | 1129585 | 2008-07-02 15:48:21 +0200 | [diff] [blame] | 88 | return len; |
| 89 | } |