blob: 7107f3fbdbb66885bd000ec206648b5013bbcaf9 [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001#ifndef __M68K_UACCESS_H
2#define __M68K_UACCESS_H
3
4/*
5 * User space memory access functions
6 */
Roman Zippeld94af932006-06-23 02:05:00 -07007#include <linux/compiler.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -07008#include <linux/errno.h>
Roman Zippeld94af932006-06-23 02:05:00 -07009#include <linux/types.h>
Andrew Mortona0f7b67a2007-01-29 13:19:50 -080010#include <linux/sched.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070011#include <asm/segment.h>
12
13#define VERIFY_READ 0
14#define VERIFY_WRITE 1
15
16/* We let the MMU do all checking */
Geert Uytterhoeven70f9cac2008-05-18 20:47:07 +020017static inline int access_ok(int type, const void __user *addr,
18 unsigned long size)
19{
20 return 1;
21}
Linus Torvalds1da177e2005-04-16 15:20:36 -070022
Linus Torvalds1da177e2005-04-16 15:20:36 -070023/*
24 * The exception table consists of pairs of addresses: the first is the
25 * address of an instruction that is allowed to fault, and the second is
26 * the address at which the program should continue. No registers are
27 * modified, so it is entirely up to the continuation code to figure out
28 * what to do.
29 *
30 * All the routines below use bits of fixup code that are out of line
31 * with the main instruction path. This means when everything is well,
32 * we don't even have to jump over them. Further, they do not intrude
33 * on our cache or tlb entries.
34 */
35
36struct exception_table_entry
37{
38 unsigned long insn, fixup;
39};
40
Roman Zippeld94af932006-06-23 02:05:00 -070041extern int __put_user_bad(void);
42extern int __get_user_bad(void);
43
44#define __put_user_asm(res, x, ptr, bwl, reg, err) \
45asm volatile ("\n" \
46 "1: moves."#bwl" %2,%1\n" \
47 "2:\n" \
48 " .section .fixup,\"ax\"\n" \
49 " .even\n" \
50 "10: moveq.l %3,%0\n" \
51 " jra 2b\n" \
52 " .previous\n" \
53 "\n" \
54 " .section __ex_table,\"a\"\n" \
55 " .align 4\n" \
56 " .long 1b,10b\n" \
57 " .long 2b,10b\n" \
58 " .previous" \
59 : "+d" (res), "=m" (*(ptr)) \
60 : #reg (x), "i" (err))
Linus Torvalds1da177e2005-04-16 15:20:36 -070061
62/*
63 * These are the main single-value transfer routines. They automatically
64 * use the right size if we just have the right pointer type.
65 */
66
Roman Zippeld94af932006-06-23 02:05:00 -070067#define __put_user(x, ptr) \
68({ \
69 typeof(*(ptr)) __pu_val = (x); \
70 int __pu_err = 0; \
71 __chk_user_ptr(ptr); \
72 switch (sizeof (*(ptr))) { \
73 case 1: \
74 __put_user_asm(__pu_err, __pu_val, ptr, b, d, -EFAULT); \
75 break; \
76 case 2: \
77 __put_user_asm(__pu_err, __pu_val, ptr, w, d, -EFAULT); \
78 break; \
79 case 4: \
80 __put_user_asm(__pu_err, __pu_val, ptr, l, r, -EFAULT); \
81 break; \
82 case 8: \
83 { \
Al Virob9710182006-10-11 17:27:57 +010084 const void __user *__pu_ptr = (ptr); \
Roman Zippeld94af932006-06-23 02:05:00 -070085 asm volatile ("\n" \
86 "1: moves.l %2,(%1)+\n" \
87 "2: moves.l %R2,(%1)\n" \
88 "3:\n" \
89 " .section .fixup,\"ax\"\n" \
90 " .even\n" \
91 "10: movel %3,%0\n" \
92 " jra 3b\n" \
93 " .previous\n" \
94 "\n" \
95 " .section __ex_table,\"a\"\n" \
96 " .align 4\n" \
97 " .long 1b,10b\n" \
98 " .long 2b,10b\n" \
99 " .long 3b,10b\n" \
100 " .previous" \
101 : "+d" (__pu_err), "+a" (__pu_ptr) \
102 : "r" (__pu_val), "i" (-EFAULT) \
103 : "memory"); \
104 break; \
105 } \
106 default: \
107 __pu_err = __put_user_bad(); \
108 break; \
109 } \
110 __pu_err; \
Linus Torvalds1da177e2005-04-16 15:20:36 -0700111})
Roman Zippeld94af932006-06-23 02:05:00 -0700112#define put_user(x, ptr) __put_user(x, ptr)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700113
Linus Torvalds1da177e2005-04-16 15:20:36 -0700114
Roman Zippeld94af932006-06-23 02:05:00 -0700115#define __get_user_asm(res, x, ptr, type, bwl, reg, err) ({ \
116 type __gu_val; \
117 asm volatile ("\n" \
118 "1: moves."#bwl" %2,%1\n" \
119 "2:\n" \
120 " .section .fixup,\"ax\"\n" \
121 " .even\n" \
122 "10: move.l %3,%0\n" \
123 " sub."#bwl" %1,%1\n" \
124 " jra 2b\n" \
125 " .previous\n" \
126 "\n" \
127 " .section __ex_table,\"a\"\n" \
128 " .align 4\n" \
129 " .long 1b,10b\n" \
130 " .previous" \
131 : "+d" (res), "=&" #reg (__gu_val) \
132 : "m" (*(ptr)), "i" (err)); \
Al Virob9710182006-10-11 17:27:57 +0100133 (x) = (typeof(*(ptr)))(unsigned long)__gu_val; \
Linus Torvalds1da177e2005-04-16 15:20:36 -0700134})
Linus Torvalds1da177e2005-04-16 15:20:36 -0700135
Roman Zippeld94af932006-06-23 02:05:00 -0700136#define __get_user(x, ptr) \
137({ \
138 int __gu_err = 0; \
139 __chk_user_ptr(ptr); \
140 switch (sizeof(*(ptr))) { \
141 case 1: \
142 __get_user_asm(__gu_err, x, ptr, u8, b, d, -EFAULT); \
143 break; \
144 case 2: \
145 __get_user_asm(__gu_err, x, ptr, u16, w, d, -EFAULT); \
146 break; \
147 case 4: \
148 __get_user_asm(__gu_err, x, ptr, u32, l, r, -EFAULT); \
149 break; \
150/* case 8: disabled because gcc-4.1 has a broken typeof \
151 { \
152 const void *__gu_ptr = (ptr); \
153 u64 __gu_val; \
154 asm volatile ("\n" \
155 "1: moves.l (%2)+,%1\n" \
156 "2: moves.l (%2),%R1\n" \
157 "3:\n" \
158 " .section .fixup,\"ax\"\n" \
159 " .even\n" \
160 "10: move.l %3,%0\n" \
161 " sub.l %1,%1\n" \
162 " sub.l %R1,%R1\n" \
163 " jra 3b\n" \
164 " .previous\n" \
165 "\n" \
166 " .section __ex_table,\"a\"\n" \
167 " .align 4\n" \
168 " .long 1b,10b\n" \
169 " .long 2b,10b\n" \
170 " .previous" \
171 : "+d" (__gu_err), "=&r" (__gu_val), \
172 "+a" (__gu_ptr) \
173 : "i" (-EFAULT) \
174 : "memory"); \
175 (x) = (typeof(*(ptr)))__gu_val; \
176 break; \
177 } */ \
178 default: \
179 __gu_err = __get_user_bad(); \
180 break; \
181 } \
182 __gu_err; \
183})
184#define get_user(x, ptr) __get_user(x, ptr)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700185
Roman Zippeld94af932006-06-23 02:05:00 -0700186unsigned long __generic_copy_from_user(void *to, const void __user *from, unsigned long n);
187unsigned long __generic_copy_to_user(void __user *to, const void *from, unsigned long n);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700188
Roman Zippel53617822006-06-25 05:46:53 -0700189#define __constant_copy_from_user_asm(res, to, from, tmp, n, s1, s2, s3)\
190 asm volatile ("\n" \
191 "1: moves."#s1" (%2)+,%3\n" \
192 " move."#s1" %3,(%1)+\n" \
193 "2: moves."#s2" (%2)+,%3\n" \
194 " move."#s2" %3,(%1)+\n" \
195 " .ifnc \""#s3"\",\"\"\n" \
196 "3: moves."#s3" (%2)+,%3\n" \
197 " move."#s3" %3,(%1)+\n" \
198 " .endif\n" \
199 "4:\n" \
200 " .section __ex_table,\"a\"\n" \
201 " .align 4\n" \
202 " .long 1b,10f\n" \
203 " .long 2b,20f\n" \
204 " .ifnc \""#s3"\",\"\"\n" \
205 " .long 3b,30f\n" \
206 " .endif\n" \
207 " .previous\n" \
208 "\n" \
209 " .section .fixup,\"ax\"\n" \
210 " .even\n" \
211 "10: clr."#s1" (%1)+\n" \
212 "20: clr."#s2" (%1)+\n" \
213 " .ifnc \""#s3"\",\"\"\n" \
214 "30: clr."#s3" (%1)+\n" \
215 " .endif\n" \
216 " moveq.l #"#n",%0\n" \
217 " jra 4b\n" \
218 " .previous\n" \
219 : "+d" (res), "+&a" (to), "+a" (from), "=&d" (tmp) \
220 : : "memory")
221
Roman Zippeld94af932006-06-23 02:05:00 -0700222static __always_inline unsigned long
Al Viro11c40f82006-01-12 01:06:24 -0800223__constant_copy_from_user(void *to, const void __user *from, unsigned long n)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700224{
Roman Zippeld94af932006-06-23 02:05:00 -0700225 unsigned long res = 0, tmp;
226
Roman Zippeld94af932006-06-23 02:05:00 -0700227 switch (n) {
Linus Torvalds1da177e2005-04-16 15:20:36 -0700228 case 1:
Al Virob9710182006-10-11 17:27:57 +0100229 __get_user_asm(res, *(u8 *)to, (u8 __user *)from, u8, b, d, 1);
Roman Zippel53617822006-06-25 05:46:53 -0700230 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700231 case 2:
Al Virob9710182006-10-11 17:27:57 +0100232 __get_user_asm(res, *(u16 *)to, (u16 __user *)from, u16, w, d, 2);
Roman Zippel53617822006-06-25 05:46:53 -0700233 break;
234 case 3:
235 __constant_copy_from_user_asm(res, to, from, tmp, 3, w, b,);
236 break;
Roman Zippeld94af932006-06-23 02:05:00 -0700237 case 4:
Al Virob9710182006-10-11 17:27:57 +0100238 __get_user_asm(res, *(u32 *)to, (u32 __user *)from, u32, l, r, 4);
Roman Zippel53617822006-06-25 05:46:53 -0700239 break;
240 case 5:
241 __constant_copy_from_user_asm(res, to, from, tmp, 5, l, b,);
242 break;
243 case 6:
244 __constant_copy_from_user_asm(res, to, from, tmp, 6, l, w,);
245 break;
246 case 7:
247 __constant_copy_from_user_asm(res, to, from, tmp, 7, l, w, b);
248 break;
249 case 8:
250 __constant_copy_from_user_asm(res, to, from, tmp, 8, l, l,);
251 break;
252 case 9:
253 __constant_copy_from_user_asm(res, to, from, tmp, 9, l, l, b);
254 break;
255 case 10:
256 __constant_copy_from_user_asm(res, to, from, tmp, 10, l, l, w);
257 break;
258 case 12:
259 __constant_copy_from_user_asm(res, to, from, tmp, 12, l, l, l);
260 break;
261 default:
262 /* we limit the inlined version to 3 moves */
263 return __generic_copy_from_user(to, from, n);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700264 }
Roman Zippeld94af932006-06-23 02:05:00 -0700265
Roman Zippeld94af932006-06-23 02:05:00 -0700266 return res;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700267}
268
Roman Zippel53617822006-06-25 05:46:53 -0700269#define __constant_copy_to_user_asm(res, to, from, tmp, n, s1, s2, s3) \
270 asm volatile ("\n" \
271 " move."#s1" (%2)+,%3\n" \
272 "11: moves."#s1" %3,(%1)+\n" \
273 "12: move."#s2" (%2)+,%3\n" \
274 "21: moves."#s2" %3,(%1)+\n" \
275 "22:\n" \
276 " .ifnc \""#s3"\",\"\"\n" \
277 " move."#s3" (%2)+,%3\n" \
278 "31: moves."#s3" %3,(%1)+\n" \
279 "32:\n" \
280 " .endif\n" \
281 "4:\n" \
282 "\n" \
283 " .section __ex_table,\"a\"\n" \
284 " .align 4\n" \
285 " .long 11b,5f\n" \
286 " .long 12b,5f\n" \
287 " .long 21b,5f\n" \
288 " .long 22b,5f\n" \
289 " .ifnc \""#s3"\",\"\"\n" \
290 " .long 31b,5f\n" \
291 " .long 32b,5f\n" \
292 " .endif\n" \
293 " .previous\n" \
294 "\n" \
295 " .section .fixup,\"ax\"\n" \
296 " .even\n" \
297 "5: moveq.l #"#n",%0\n" \
298 " jra 4b\n" \
299 " .previous\n" \
300 : "+d" (res), "+a" (to), "+a" (from), "=&d" (tmp) \
301 : : "memory")
302
Roman Zippeld94af932006-06-23 02:05:00 -0700303static __always_inline unsigned long
Al Viro11c40f82006-01-12 01:06:24 -0800304__constant_copy_to_user(void __user *to, const void *from, unsigned long n)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700305{
Roman Zippeld94af932006-06-23 02:05:00 -0700306 unsigned long res = 0, tmp;
307
Roman Zippeld94af932006-06-23 02:05:00 -0700308 switch (n) {
Linus Torvalds1da177e2005-04-16 15:20:36 -0700309 case 1:
Al Virob9710182006-10-11 17:27:57 +0100310 __put_user_asm(res, *(u8 *)from, (u8 __user *)to, b, d, 1);
Roman Zippel53617822006-06-25 05:46:53 -0700311 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700312 case 2:
Al Virob9710182006-10-11 17:27:57 +0100313 __put_user_asm(res, *(u16 *)from, (u16 __user *)to, w, d, 2);
Roman Zippel53617822006-06-25 05:46:53 -0700314 break;
315 case 3:
316 __constant_copy_to_user_asm(res, to, from, tmp, 3, w, b,);
317 break;
Roman Zippeld94af932006-06-23 02:05:00 -0700318 case 4:
Al Virob9710182006-10-11 17:27:57 +0100319 __put_user_asm(res, *(u32 *)from, (u32 __user *)to, l, r, 4);
Roman Zippel53617822006-06-25 05:46:53 -0700320 break;
321 case 5:
322 __constant_copy_to_user_asm(res, to, from, tmp, 5, l, b,);
323 break;
324 case 6:
325 __constant_copy_to_user_asm(res, to, from, tmp, 6, l, w,);
326 break;
327 case 7:
328 __constant_copy_to_user_asm(res, to, from, tmp, 7, l, w, b);
329 break;
330 case 8:
331 __constant_copy_to_user_asm(res, to, from, tmp, 8, l, l,);
332 break;
333 case 9:
334 __constant_copy_to_user_asm(res, to, from, tmp, 9, l, l, b);
335 break;
336 case 10:
337 __constant_copy_to_user_asm(res, to, from, tmp, 10, l, l, w);
338 break;
339 case 12:
340 __constant_copy_to_user_asm(res, to, from, tmp, 12, l, l, l);
341 break;
342 default:
343 /* limit the inlined version to 3 moves */
344 return __generic_copy_to_user(to, from, n);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700345 }
Roman Zippeld94af932006-06-23 02:05:00 -0700346
Roman Zippeld94af932006-06-23 02:05:00 -0700347 return res;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700348}
349
Roman Zippeld94af932006-06-23 02:05:00 -0700350#define __copy_from_user(to, from, n) \
Linus Torvalds1da177e2005-04-16 15:20:36 -0700351(__builtin_constant_p(n) ? \
352 __constant_copy_from_user(to, from, n) : \
353 __generic_copy_from_user(to, from, n))
354
Roman Zippeld94af932006-06-23 02:05:00 -0700355#define __copy_to_user(to, from, n) \
Linus Torvalds1da177e2005-04-16 15:20:36 -0700356(__builtin_constant_p(n) ? \
357 __constant_copy_to_user(to, from, n) : \
358 __generic_copy_to_user(to, from, n))
359
Roman Zippeld94af932006-06-23 02:05:00 -0700360#define __copy_to_user_inatomic __copy_to_user
361#define __copy_from_user_inatomic __copy_from_user
Linus Torvalds1da177e2005-04-16 15:20:36 -0700362
Roman Zippeld94af932006-06-23 02:05:00 -0700363#define copy_from_user(to, from, n) __copy_from_user(to, from, n)
364#define copy_to_user(to, from, n) __copy_to_user(to, from, n)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700365
Roman Zippeld94af932006-06-23 02:05:00 -0700366long strncpy_from_user(char *dst, const char __user *src, long count);
367long strnlen_user(const char __user *src, long n);
Geert Uytterhoeven3c46bdc2007-05-15 01:41:29 -0700368unsigned long __clear_user(void __user *to, unsigned long n);
369
370#define clear_user __clear_user
Linus Torvalds1da177e2005-04-16 15:20:36 -0700371
372#define strlen_user(str) strnlen_user(str, 32767)
373
Linus Torvalds1da177e2005-04-16 15:20:36 -0700374#endif /* _M68K_UACCESS_H */