aboutsummaryrefslogtreecommitdiffstats
path: root/include
diff options
context:
space:
mode:
authorRoman Zippel <zippel@linux-m68k.org>2006-06-23 05:05:00 -0400
committerLinus Torvalds <torvalds@g5.osdl.org>2006-06-23 10:43:01 -0400
commitd94af931af42152e34539dd4782b1724084a89fb (patch)
tree7cf8c1cce891ef6b87635a643fe62ea9d231b474 /include
parent1a23989475846547e5b7ce14e77f072894aaff54 (diff)
[PATCH] m68k: clean up uaccess.h
This uninlines a few large functions in uaccess.h and cleans up the rest. It includes a (hopefully temporary) workaround for the broken typeof of gcc-4.1. Signed-off-by: Roman Zippel <zippel@linux-m68k.org> Cc: Geert Uytterhoeven <geert@linux-m68k.org> Signed-off-by: Andrew Morton <akpm@osdl.org> Signed-off-by: Linus Torvalds <torvalds@osdl.org>
Diffstat (limited to 'include')
-rw-r--r--include/asm-m68k/uaccess.h1084
1 files changed, 271 insertions, 813 deletions
diff --git a/include/asm-m68k/uaccess.h b/include/asm-m68k/uaccess.h
index 2ffd87b0a769..b761ef218cea 100644
--- a/include/asm-m68k/uaccess.h
+++ b/include/asm-m68k/uaccess.h
@@ -4,8 +4,9 @@
4/* 4/*
5 * User space memory access functions 5 * User space memory access functions
6 */ 6 */
7#include <linux/compiler.h>
7#include <linux/errno.h> 8#include <linux/errno.h>
8#include <linux/sched.h> 9#include <linux/types.h>
9#include <asm/segment.h> 10#include <asm/segment.h>
10 11
11#define VERIFY_READ 0 12#define VERIFY_READ 0
@@ -32,858 +33,315 @@ struct exception_table_entry
32 unsigned long insn, fixup; 33 unsigned long insn, fixup;
33}; 34};
34 35
36extern int __put_user_bad(void);
37extern int __get_user_bad(void);
38
39#define __put_user_asm(res, x, ptr, bwl, reg, err) \
40asm volatile ("\n" \
41 "1: moves."#bwl" %2,%1\n" \
42 "2:\n" \
43 " .section .fixup,\"ax\"\n" \
44 " .even\n" \
45 "10: moveq.l %3,%0\n" \
46 " jra 2b\n" \
47 " .previous\n" \
48 "\n" \
49 " .section __ex_table,\"a\"\n" \
50 " .align 4\n" \
51 " .long 1b,10b\n" \
52 " .long 2b,10b\n" \
53 " .previous" \
54 : "+d" (res), "=m" (*(ptr)) \
55 : #reg (x), "i" (err))
35 56
36/* 57/*
37 * These are the main single-value transfer routines. They automatically 58 * These are the main single-value transfer routines. They automatically
38 * use the right size if we just have the right pointer type. 59 * use the right size if we just have the right pointer type.
39 */ 60 */
40 61
41#define put_user(x, ptr) \ 62#define __put_user(x, ptr) \
42({ \ 63({ \
43 int __pu_err; \ 64 typeof(*(ptr)) __pu_val = (x); \
44 typeof(*(ptr)) __pu_val = (x); \ 65 int __pu_err = 0; \
45 __chk_user_ptr(ptr); \ 66 __chk_user_ptr(ptr); \
46 switch (sizeof (*(ptr))) { \ 67 switch (sizeof (*(ptr))) { \
47 case 1: \ 68 case 1: \
48 __put_user_asm(__pu_err, __pu_val, ptr, b); \ 69 __put_user_asm(__pu_err, __pu_val, ptr, b, d, -EFAULT); \
49 break; \ 70 break; \
50 case 2: \ 71 case 2: \
51 __put_user_asm(__pu_err, __pu_val, ptr, w); \ 72 __put_user_asm(__pu_err, __pu_val, ptr, w, d, -EFAULT); \
52 break; \ 73 break; \
53 case 4: \ 74 case 4: \
54 __put_user_asm(__pu_err, __pu_val, ptr, l); \ 75 __put_user_asm(__pu_err, __pu_val, ptr, l, r, -EFAULT); \
55 break; \ 76 break; \
56 case 8: \ 77 case 8: \
57 __pu_err = __constant_copy_to_user(ptr, &__pu_val, 8); \ 78 { \
58 break; \ 79 const void *__pu_ptr = (ptr); \
59 default: \ 80 asm volatile ("\n" \
60 __pu_err = __put_user_bad(); \ 81 "1: moves.l %2,(%1)+\n" \
61 break; \ 82 "2: moves.l %R2,(%1)\n" \
62 } \ 83 "3:\n" \
63 __pu_err; \ 84 " .section .fixup,\"ax\"\n" \
85 " .even\n" \
86 "10: movel %3,%0\n" \
87 " jra 3b\n" \
88 " .previous\n" \
89 "\n" \
90 " .section __ex_table,\"a\"\n" \
91 " .align 4\n" \
92 " .long 1b,10b\n" \
93 " .long 2b,10b\n" \
94 " .long 3b,10b\n" \
95 " .previous" \
96 : "+d" (__pu_err), "+a" (__pu_ptr) \
97 : "r" (__pu_val), "i" (-EFAULT) \
98 : "memory"); \
99 break; \
100 } \
101 default: \
102 __pu_err = __put_user_bad(); \
103 break; \
104 } \
105 __pu_err; \
64}) 106})
65#define __put_user(x, ptr) put_user(x, ptr) 107#define put_user(x, ptr) __put_user(x, ptr)
66 108
67extern int __put_user_bad(void);
68 109
69/* 110#define __get_user_asm(res, x, ptr, type, bwl, reg, err) ({ \
70 * Tell gcc we read from memory instead of writing: this is because 111 type __gu_val; \
71 * we do not write to any memory gcc knows about, so there are no 112 asm volatile ("\n" \
72 * aliasing issues. 113 "1: moves."#bwl" %2,%1\n" \
73 */ 114 "2:\n" \
74#define __put_user_asm(err,x,ptr,bwl) \ 115 " .section .fixup,\"ax\"\n" \
75__asm__ __volatile__ \ 116 " .even\n" \
76 ("21:moves" #bwl " %2,%1\n" \ 117 "10: move.l %3,%0\n" \
77 "1:\n" \ 118 " sub."#bwl" %1,%1\n" \
78 ".section .fixup,\"ax\"\n" \ 119 " jra 2b\n" \
79 " .even\n" \ 120 " .previous\n" \
80 "2: movel %3,%0\n" \ 121 "\n" \
81 " jra 1b\n" \ 122 " .section __ex_table,\"a\"\n" \
82 ".previous\n" \ 123 " .align 4\n" \
83 ".section __ex_table,\"a\"\n" \ 124 " .long 1b,10b\n" \
84 " .align 4\n" \ 125 " .previous" \
85 " .long 21b,2b\n" \ 126 : "+d" (res), "=&" #reg (__gu_val) \
86 " .long 1b,2b\n" \ 127 : "m" (*(ptr)), "i" (err)); \
87 ".previous" \ 128 (x) = (typeof(*(ptr)))(long)__gu_val; \
88 : "=d"(err) \
89 : "m"(*(ptr)), "r"(x), "i"(-EFAULT), "0"(0))
90
91#define get_user(x, ptr) \
92({ \
93 int __gu_err; \
94 typeof(*(ptr)) __gu_val; \
95 __chk_user_ptr(ptr); \
96 switch (sizeof(*(ptr))) { \
97 case 1: \
98 __get_user_asm(__gu_err, __gu_val, ptr, b, "=d"); \
99 break; \
100 case 2: \
101 __get_user_asm(__gu_err, __gu_val, ptr, w, "=r"); \
102 break; \
103 case 4: \
104 __get_user_asm(__gu_err, __gu_val, ptr, l, "=r"); \
105 break; \
106 case 8: \
107 __gu_err = __constant_copy_from_user(&__gu_val, ptr, 8); \
108 break; \
109 default: \
110 __gu_val = (typeof(*(ptr)))0; \
111 __gu_err = __get_user_bad(); \
112 break; \
113 } \
114 (x) = __gu_val; \
115 __gu_err; \
116}) 129})
117#define __get_user(x, ptr) get_user(x, ptr)
118 130
119extern int __get_user_bad(void); 131#define __get_user(x, ptr) \
132({ \
133 int __gu_err = 0; \
134 __chk_user_ptr(ptr); \
135 switch (sizeof(*(ptr))) { \
136 case 1: \
137 __get_user_asm(__gu_err, x, ptr, u8, b, d, -EFAULT); \
138 break; \
139 case 2: \
140 __get_user_asm(__gu_err, x, ptr, u16, w, d, -EFAULT); \
141 break; \
142 case 4: \
143 __get_user_asm(__gu_err, x, ptr, u32, l, r, -EFAULT); \
144 break; \
145/* case 8: disabled because gcc-4.1 has a broken typeof \
146 { \
147 const void *__gu_ptr = (ptr); \
148 u64 __gu_val; \
149 asm volatile ("\n" \
150 "1: moves.l (%2)+,%1\n" \
151 "2: moves.l (%2),%R1\n" \
152 "3:\n" \
153 " .section .fixup,\"ax\"\n" \
154 " .even\n" \
155 "10: move.l %3,%0\n" \
156 " sub.l %1,%1\n" \
157 " sub.l %R1,%R1\n" \
158 " jra 3b\n" \
159 " .previous\n" \
160 "\n" \
161 " .section __ex_table,\"a\"\n" \
162 " .align 4\n" \
163 " .long 1b,10b\n" \
164 " .long 2b,10b\n" \
165 " .previous" \
166 : "+d" (__gu_err), "=&r" (__gu_val), \
167 "+a" (__gu_ptr) \
168 : "i" (-EFAULT) \
169 : "memory"); \
170 (x) = (typeof(*(ptr)))__gu_val; \
171 break; \
172 } */ \
173 default: \
174 __gu_err = __get_user_bad(); \
175 break; \
176 } \
177 __gu_err; \
178})
179#define get_user(x, ptr) __get_user(x, ptr)
120 180
121#define __get_user_asm(err,x,ptr,bwl,reg) \ 181unsigned long __generic_copy_from_user(void *to, const void __user *from, unsigned long n);
122__asm__ __volatile__ \ 182unsigned long __generic_copy_to_user(void __user *to, const void *from, unsigned long n);
123 ("1: moves" #bwl " %2,%1\n" \
124 "2:\n" \
125 ".section .fixup,\"ax\"\n" \
126 " .even\n" \
127 "3: movel %3,%0\n" \
128 " sub" #bwl " %1,%1\n" \
129 " jra 2b\n" \
130 ".previous\n" \
131 ".section __ex_table,\"a\"\n" \
132 " .align 4\n" \
133 " .long 1b,3b\n" \
134 ".previous" \
135 : "=d"(err), reg(x) \
136 : "m"(*(ptr)), "i" (-EFAULT), "0"(0))
137 183
138static inline unsigned long 184static __always_inline unsigned long
139__generic_copy_from_user(void *to, const void __user *from, unsigned long n) 185__constant_copy_from_user(void *to, const void __user *from, unsigned long n)
140{
141 unsigned long tmp;
142 __asm__ __volatile__
143 (" tstl %2\n"
144 " jeq 2f\n"
145 "1: movesl (%1)+,%3\n"
146 " movel %3,(%0)+\n"
147 " subql #1,%2\n"
148 " jne 1b\n"
149 "2: movel %4,%2\n"
150 " bclr #1,%2\n"
151 " jeq 4f\n"
152 "3: movesw (%1)+,%3\n"
153 " movew %3,(%0)+\n"
154 "4: bclr #0,%2\n"
155 " jeq 6f\n"
156 "5: movesb (%1)+,%3\n"
157 " moveb %3,(%0)+\n"
158 "6:\n"
159 ".section .fixup,\"ax\"\n"
160 " .even\n"
161 "7: movel %2,%%d0\n"
162 "71:clrl (%0)+\n"
163 " subql #1,%%d0\n"
164 " jne 71b\n"
165 " lsll #2,%2\n"
166 " addl %4,%2\n"
167 " btst #1,%4\n"
168 " jne 81f\n"
169 " btst #0,%4\n"
170 " jne 91f\n"
171 " jra 6b\n"
172 "8: addql #2,%2\n"
173 "81:clrw (%0)+\n"
174 " btst #0,%4\n"
175 " jne 91f\n"
176 " jra 6b\n"
177 "9: addql #1,%2\n"
178 "91:clrb (%0)+\n"
179 " jra 6b\n"
180 ".previous\n"
181 ".section __ex_table,\"a\"\n"
182 " .align 4\n"
183 " .long 1b,7b\n"
184 " .long 3b,8b\n"
185 " .long 5b,9b\n"
186 ".previous"
187 : "=a"(to), "=a"(from), "=d"(n), "=&d"(tmp)
188 : "d"(n & 3), "0"(to), "1"(from), "2"(n/4)
189 : "d0", "memory");
190 return n;
191}
192
193static inline unsigned long
194__generic_copy_to_user(void __user *to, const void *from, unsigned long n)
195{ 186{
196 unsigned long tmp; 187 unsigned long res = 0, tmp;
197 __asm__ __volatile__
198 (" tstl %2\n"
199 " jeq 3f\n"
200 "1: movel (%1)+,%3\n"
201 "22:movesl %3,(%0)+\n"
202 "2: subql #1,%2\n"
203 " jne 1b\n"
204 "3: movel %4,%2\n"
205 " bclr #1,%2\n"
206 " jeq 4f\n"
207 " movew (%1)+,%3\n"
208 "24:movesw %3,(%0)+\n"
209 "4: bclr #0,%2\n"
210 " jeq 5f\n"
211 " moveb (%1)+,%3\n"
212 "25:movesb %3,(%0)+\n"
213 "5:\n"
214 ".section .fixup,\"ax\"\n"
215 " .even\n"
216 "60:addql #1,%2\n"
217 "6: lsll #2,%2\n"
218 " addl %4,%2\n"
219 " jra 5b\n"
220 "7: addql #2,%2\n"
221 " jra 5b\n"
222 "8: addql #1,%2\n"
223 " jra 5b\n"
224 ".previous\n"
225 ".section __ex_table,\"a\"\n"
226 " .align 4\n"
227 " .long 1b,60b\n"
228 " .long 22b,6b\n"
229 " .long 2b,6b\n"
230 " .long 24b,7b\n"
231 " .long 3b,60b\n"
232 " .long 4b,7b\n"
233 " .long 25b,8b\n"
234 " .long 5b,8b\n"
235 ".previous"
236 : "=a"(to), "=a"(from), "=d"(n), "=&d"(tmp)
237 : "r"(n & 3), "0"(to), "1"(from), "2"(n / 4)
238 : "memory");
239 return n;
240}
241 188
242#define __copy_from_user_big(to, from, n, fixup, copy) \ 189 /* limit the inlined version to 3 moves */
243 __asm__ __volatile__ \ 190 if (n == 11 || n > 12)
244 ("10: movesl (%1)+,%%d0\n" \ 191 return __generic_copy_from_user(to, from, n);
245 " movel %%d0,(%0)+\n" \
246 " subql #1,%2\n" \
247 " jne 10b\n" \
248 ".section .fixup,\"ax\"\n" \
249 " .even\n" \
250 "11: movel %2,%%d0\n" \
251 "13: clrl (%0)+\n" \
252 " subql #1,%%d0\n" \
253 " jne 13b\n" \
254 " lsll #2,%2\n" \
255 fixup "\n" \
256 " jra 12f\n" \
257 ".previous\n" \
258 ".section __ex_table,\"a\"\n" \
259 " .align 4\n" \
260 " .long 10b,11b\n" \
261 ".previous\n" \
262 copy "\n" \
263 "12:" \
264 : "=a"(to), "=a"(from), "=d"(n) \
265 : "0"(to), "1"(from), "2"(n/4) \
266 : "d0", "memory")
267 192
268static inline unsigned long 193 switch (n) {
269__constant_copy_from_user(void *to, const void __user *from, unsigned long n)
270{
271 switch (n) {
272 case 0:
273 break;
274 case 1:
275 __asm__ __volatile__
276 ("1: movesb (%1)+,%%d0\n"
277 " moveb %%d0,(%0)+\n"
278 "2:\n"
279 ".section .fixup,\"ax\"\n"
280 " .even\n"
281 "3: addql #1,%2\n"
282 " clrb (%0)+\n"
283 " jra 2b\n"
284 ".previous\n"
285 ".section __ex_table,\"a\"\n"
286 " .align 4\n"
287 " .long 1b,3b\n"
288 ".previous"
289 : "=a"(to), "=a"(from), "=d"(n)
290 : "0"(to), "1"(from), "2"(0)
291 : "d0", "memory");
292 break;
293 case 2:
294 __asm__ __volatile__
295 ("1: movesw (%1)+,%%d0\n"
296 " movew %%d0,(%0)+\n"
297 "2:\n"
298 ".section .fixup,\"ax\"\n"
299 " .even\n"
300 "3: addql #2,%2\n"
301 " clrw (%0)+\n"
302 " jra 2b\n"
303 ".previous\n"
304 ".section __ex_table,\"a\"\n"
305 " .align 4\n"
306 " .long 1b,3b\n"
307 ".previous"
308 : "=a"(to), "=a"(from), "=d"(n)
309 : "0"(to), "1"(from), "2"(0)
310 : "d0", "memory");
311 break;
312 case 3:
313 __asm__ __volatile__
314 ("1: movesw (%1)+,%%d0\n"
315 " movew %%d0,(%0)+\n"
316 "2: movesb (%1)+,%%d0\n"
317 " moveb %%d0,(%0)+\n"
318 "3:"
319 ".section .fixup,\"ax\"\n"
320 " .even\n"
321 "4: addql #2,%2\n"
322 " clrw (%0)+\n"
323 "5: addql #1,%2\n"
324 " clrb (%0)+\n"
325 " jra 3b\n"
326 ".previous\n"
327 ".section __ex_table,\"a\"\n"
328 " .align 4\n"
329 " .long 1b,4b\n"
330 " .long 2b,5b\n"
331 ".previous"
332 : "=a"(to), "=a"(from), "=d"(n)
333 : "0"(to), "1"(from), "2"(0)
334 : "d0", "memory");
335 break;
336 case 4:
337 __asm__ __volatile__
338 ("1: movesl (%1)+,%%d0\n"
339 " movel %%d0,(%0)+\n"
340 "2:"
341 ".section .fixup,\"ax\"\n"
342 " .even\n"
343 "3: addql #4,%2\n"
344 " clrl (%0)+\n"
345 " jra 2b\n"
346 ".previous\n"
347 ".section __ex_table,\"a\"\n"
348 " .align 4\n"
349 " .long 1b,3b\n"
350 ".previous"
351 : "=a"(to), "=a"(from), "=d"(n)
352 : "0"(to), "1"(from), "2"(0)
353 : "d0", "memory");
354 break;
355 case 8:
356 __asm__ __volatile__
357 ("1: movesl (%1)+,%%d0\n"
358 " movel %%d0,(%0)+\n"
359 "2: movesl (%1)+,%%d0\n"
360 " movel %%d0,(%0)+\n"
361 "3:"
362 ".section .fixup,\"ax\"\n"
363 " .even\n"
364 "4: addql #4,%2\n"
365 " clrl (%0)+\n"
366 "5: addql #4,%2\n"
367 " clrl (%0)+\n"
368 " jra 3b\n"
369 ".previous\n"
370 ".section __ex_table,\"a\"\n"
371 " .align 4\n"
372 " .long 1b,4b\n"
373 " .long 2b,5b\n"
374 ".previous"
375 : "=a"(to), "=a"(from), "=d"(n)
376 : "0"(to), "1"(from), "2"(0)
377 : "d0", "memory");
378 break;
379 case 12:
380 __asm__ __volatile__
381 ("1: movesl (%1)+,%%d0\n"
382 " movel %%d0,(%0)+\n"
383 "2: movesl (%1)+,%%d0\n"
384 " movel %%d0,(%0)+\n"
385 "3: movesl (%1)+,%%d0\n"
386 " movel %%d0,(%0)+\n"
387 "4:"
388 ".section .fixup,\"ax\"\n"
389 " .even\n"
390 "5: addql #4,%2\n"
391 " clrl (%0)+\n"
392 "6: addql #4,%2\n"
393 " clrl (%0)+\n"
394 "7: addql #4,%2\n"
395 " clrl (%0)+\n"
396 " jra 4b\n"
397 ".previous\n"
398 ".section __ex_table,\"a\"\n"
399 " .align 4\n"
400 " .long 1b,5b\n"
401 " .long 2b,6b\n"
402 " .long 3b,7b\n"
403 ".previous"
404 : "=a"(to), "=a"(from), "=d"(n)
405 : "0"(to), "1"(from), "2"(0)
406 : "d0", "memory");
407 break;
408 case 16:
409 __asm__ __volatile__
410 ("1: movesl (%1)+,%%d0\n"
411 " movel %%d0,(%0)+\n"
412 "2: movesl (%1)+,%%d0\n"
413 " movel %%d0,(%0)+\n"
414 "3: movesl (%1)+,%%d0\n"
415 " movel %%d0,(%0)+\n"
416 "4: movesl (%1)+,%%d0\n"
417 " movel %%d0,(%0)+\n"
418 "5:"
419 ".section .fixup,\"ax\"\n"
420 " .even\n"
421 "6: addql #4,%2\n"
422 " clrl (%0)+\n"
423 "7: addql #4,%2\n"
424 " clrl (%0)+\n"
425 "8: addql #4,%2\n"
426 " clrl (%0)+\n"
427 "9: addql #4,%2\n"
428 " clrl (%0)+\n"
429 " jra 5b\n"
430 ".previous\n"
431 ".section __ex_table,\"a\"\n"
432 " .align 4\n"
433 " .long 1b,6b\n"
434 " .long 2b,7b\n"
435 " .long 3b,8b\n"
436 " .long 4b,9b\n"
437 ".previous"
438 : "=a"(to), "=a"(from), "=d"(n)
439 : "0"(to), "1"(from), "2"(0)
440 : "d0", "memory");
441 break;
442 default:
443 switch (n & 3) {
444 case 0:
445 __copy_from_user_big(to, from, n, "", "");
446 break;
447 case 1: 194 case 1:
448 __copy_from_user_big(to, from, n, 195 __get_user_asm(res, *(u8 *)to, (u8 *)from, u8, b, d, 1);
449 /* fixup */ 196 return res;
450 "1: addql #1,%2\n"
451 " clrb (%0)+",
452 /* copy */
453 "2: movesb (%1)+,%%d0\n"
454 " moveb %%d0,(%0)+\n"
455 ".section __ex_table,\"a\"\n"
456 " .long 2b,1b\n"
457 ".previous");
458 break;
459 case 2: 197 case 2:
460 __copy_from_user_big(to, from, n, 198 __get_user_asm(res, *(u16 *)to, (u16 *)from, u16, w, d, 2);
461 /* fixup */ 199 return res;
462 "1: addql #2,%2\n" 200 case 4:
463 " clrw (%0)+", 201 __get_user_asm(res, *(u32 *)to, (u32 *)from, u32, l, r, 4);
464 /* copy */ 202 return res;
465 "2: movesw (%1)+,%%d0\n"
466 " movew %%d0,(%0)+\n"
467 ".section __ex_table,\"a\"\n"
468 " .long 2b,1b\n"
469 ".previous");
470 break;
471 case 3:
472 __copy_from_user_big(to, from, n,
473 /* fixup */
474 "1: addql #2,%2\n"
475 " clrw (%0)+\n"
476 "2: addql #1,%2\n"
477 " clrb (%0)+",
478 /* copy */
479 "3: movesw (%1)+,%%d0\n"
480 " movew %%d0,(%0)+\n"
481 "4: movesb (%1)+,%%d0\n"
482 " moveb %%d0,(%0)+\n"
483 ".section __ex_table,\"a\"\n"
484 " .long 3b,1b\n"
485 " .long 4b,2b\n"
486 ".previous");
487 break;
488 } 203 }
489 break;
490 }
491 return n;
492}
493 204
494#define __copy_to_user_big(to, from, n, fixup, copy) \ 205 asm volatile ("\n"
495 __asm__ __volatile__ \ 206 " .ifndef .Lfrom_user\n"
496 ("10: movel (%1)+,%%d0\n" \ 207 " .set .Lfrom_user,1\n"
497 "31: movesl %%d0,(%0)+\n" \ 208 " .macro copy_from_user to,from,tmp\n"
498 "11: subql #1,%2\n" \ 209 " .if .Lcnt >= 4\n"
499 " jne 10b\n" \ 210 "1: moves.l (\\from)+,\\tmp\n"
500 "41:\n" \ 211 " move.l \\tmp,(\\to)+\n"
501 ".section .fixup,\"ax\"\n" \ 212 " .set .Lcnt,.Lcnt-4\n"
502 " .even\n" \ 213 " .elseif .Lcnt & 2\n"
503 "22: addql #1,%2\n" \ 214 "1: moves.w (\\from)+,\\tmp\n"
504 "12: lsll #2,%2\n" \ 215 " move.w \\tmp,(\\to)+\n"
505 fixup "\n" \ 216 " .set .Lcnt,.Lcnt-2\n"
506 " jra 13f\n" \ 217 " .elseif .Lcnt & 1\n"
507 ".previous\n" \ 218 "1: moves.b (\\from)+,\\tmp\n"
508 ".section __ex_table,\"a\"\n" \ 219 " move.b \\tmp,(\\to)+\n"
509 " .align 4\n" \ 220 " .set .Lcnt,.Lcnt-1\n"
510 " .long 10b,22b\n" \ 221 " .else\n"
511 " .long 31b,12b\n" \ 222 " .exitm\n"
512 " .long 11b,12b\n" \ 223 " .endif\n"
513 " .long 41b,22b\n" \ 224 "\n"
514 ".previous\n" \ 225 " .section __ex_table,\"a\"\n"
515 copy "\n" \ 226 " .align 4\n"
516 "13:" \ 227 " .long 1b,3f\n"
517 : "=a"(to), "=a"(from), "=d"(n) \ 228 " .previous\n"
518 : "0"(to), "1"(from), "2"(n/4) \ 229 " .endm\n"
519 : "d0", "memory") 230 " .endif\n"
231 "\n"
232 " .set .Lcnt,%c4\n"
233 " copy_from_user %1,%2,%3\n"
234 " copy_from_user %1,%2,%3\n"
235 " copy_from_user %1,%2,%3\n"
236 "2:\n"
237 " .section .fixup,\"ax\"\n"
238 " .even\n"
239 "3: moveq.l %4,%0\n"
240 " move.l %5,%1\n"
241 " .rept %c4 / 4\n"
242 " clr.l (%1)+\n"
243 " .endr\n"
244 " .if %c4 & 2\n"
245 " clr.w (%1)+\n"
246 " .endif\n"
247 " .if %c4 & 1\n"
248 " clr.b (%1)+\n"
249 " .endif\n"
250 " jra 2b\n"
251 " .previous\n"
252 : "+r" (res), "+a" (to), "+a" (from), "=&d" (tmp)
253 : "i" (n), "g" (to)
254 : "memory");
520 255
521#define __copy_to_user_inatomic __copy_to_user 256 return res;
522#define __copy_from_user_inatomic __copy_from_user 257}
523 258
524static inline unsigned long 259static __always_inline unsigned long
525__constant_copy_to_user(void __user *to, const void *from, unsigned long n) 260__constant_copy_to_user(void __user *to, const void *from, unsigned long n)
526{ 261{
527 switch (n) { 262 unsigned long res = 0, tmp;
528 case 0: 263
529 break; 264 /* limit the inlined version to 3 moves */
530 case 1: 265 if (n == 11 || n > 12)
531 __asm__ __volatile__ 266 return __generic_copy_to_user(to, from, n);
532 (" moveb (%1)+,%%d0\n" 267
533 "21:movesb %%d0,(%0)+\n" 268 switch (n) {
534 "1:\n"
535 ".section .fixup,\"ax\"\n"
536 " .even\n"
537 "2: addql #1,%2\n"
538 " jra 1b\n"
539 ".previous\n"
540 ".section __ex_table,\"a\"\n"
541 " .align 4\n "
542 " .long 21b,2b\n"
543 " .long 1b,2b\n"
544 ".previous"
545 : "=a"(to), "=a"(from), "=d"(n)
546 : "0"(to), "1"(from), "2"(0)
547 : "d0", "memory");
548 break;
549 case 2:
550 __asm__ __volatile__
551 (" movew (%1)+,%%d0\n"
552 "21:movesw %%d0,(%0)+\n"
553 "1:\n"
554 ".section .fixup,\"ax\"\n"
555 " .even\n"
556 "2: addql #2,%2\n"
557 " jra 1b\n"
558 ".previous\n"
559 ".section __ex_table,\"a\"\n"
560 " .align 4\n"
561 " .long 21b,2b\n"
562 " .long 1b,2b\n"
563 ".previous"
564 : "=a"(to), "=a"(from), "=d"(n)
565 : "0"(to), "1"(from), "2"(0)
566 : "d0", "memory");
567 break;
568 case 3:
569 __asm__ __volatile__
570 (" movew (%1)+,%%d0\n"
571 "21:movesw %%d0,(%0)+\n"
572 "1: moveb (%1)+,%%d0\n"
573 "22:movesb %%d0,(%0)+\n"
574 "2:\n"
575 ".section .fixup,\"ax\"\n"
576 " .even\n"
577 "3: addql #2,%2\n"
578 "4: addql #1,%2\n"
579 " jra 2b\n"
580 ".previous\n"
581 ".section __ex_table,\"a\"\n"
582 " .align 4\n"
583 " .long 21b,3b\n"
584 " .long 1b,3b\n"
585 " .long 22b,4b\n"
586 " .long 2b,4b\n"
587 ".previous"
588 : "=a"(to), "=a"(from), "=d"(n)
589 : "0"(to), "1"(from), "2"(0)
590 : "d0", "memory");
591 break;
592 case 4:
593 __asm__ __volatile__
594 (" movel (%1)+,%%d0\n"
595 "21:movesl %%d0,(%0)+\n"
596 "1:\n"
597 ".section .fixup,\"ax\"\n"
598 " .even\n"
599 "2: addql #4,%2\n"
600 " jra 1b\n"
601 ".previous\n"
602 ".section __ex_table,\"a\"\n"
603 " .align 4\n"
604 " .long 21b,2b\n"
605 " .long 1b,2b\n"
606 ".previous"
607 : "=a"(to), "=a"(from), "=d"(n)
608 : "0"(to), "1"(from), "2"(0)
609 : "d0", "memory");
610 break;
611 case 8:
612 __asm__ __volatile__
613 (" movel (%1)+,%%d0\n"
614 "21:movesl %%d0,(%0)+\n"
615 "1: movel (%1)+,%%d0\n"
616 "22:movesl %%d0,(%0)+\n"
617 "2:\n"
618 ".section .fixup,\"ax\"\n"
619 " .even\n"
620 "3: addql #4,%2\n"
621 "4: addql #4,%2\n"
622 " jra 2b\n"
623 ".previous\n"
624 ".section __ex_table,\"a\"\n"
625 " .align 4\n"
626 " .long 21b,3b\n"
627 " .long 1b,3b\n"
628 " .long 22b,4b\n"
629 " .long 2b,4b\n"
630 ".previous"
631 : "=a"(to), "=a"(from), "=d"(n)
632 : "0"(to), "1"(from), "2"(0)
633 : "d0", "memory");
634 break;
635 case 12:
636 __asm__ __volatile__
637 (" movel (%1)+,%%d0\n"
638 "21:movesl %%d0,(%0)+\n"
639 "1: movel (%1)+,%%d0\n"
640 "22:movesl %%d0,(%0)+\n"
641 "2: movel (%1)+,%%d0\n"
642 "23:movesl %%d0,(%0)+\n"
643 "3:\n"
644 ".section .fixup,\"ax\"\n"
645 " .even\n"
646 "4: addql #4,%2\n"
647 "5: addql #4,%2\n"
648 "6: addql #4,%2\n"
649 " jra 3b\n"
650 ".previous\n"
651 ".section __ex_table,\"a\"\n"
652 " .align 4\n"
653 " .long 21b,4b\n"
654 " .long 1b,4b\n"
655 " .long 22b,5b\n"
656 " .long 2b,5b\n"
657 " .long 23b,6b\n"
658 " .long 3b,6b\n"
659 ".previous"
660 : "=a"(to), "=a"(from), "=d"(n)
661 : "0"(to), "1"(from), "2"(0)
662 : "d0", "memory");
663 break;
664 case 16:
665 __asm__ __volatile__
666 (" movel (%1)+,%%d0\n"
667 "21:movesl %%d0,(%0)+\n"
668 "1: movel (%1)+,%%d0\n"
669 "22:movesl %%d0,(%0)+\n"
670 "2: movel (%1)+,%%d0\n"
671 "23:movesl %%d0,(%0)+\n"
672 "3: movel (%1)+,%%d0\n"
673 "24:movesl %%d0,(%0)+\n"
674 "4:"
675 ".section .fixup,\"ax\"\n"
676 " .even\n"
677 "5: addql #4,%2\n"
678 "6: addql #4,%2\n"
679 "7: addql #4,%2\n"
680 "8: addql #4,%2\n"
681 " jra 4b\n"
682 ".previous\n"
683 ".section __ex_table,\"a\"\n"
684 " .align 4\n"
685 " .long 21b,5b\n"
686 " .long 1b,5b\n"
687 " .long 22b,6b\n"
688 " .long 2b,6b\n"
689 " .long 23b,7b\n"
690 " .long 3b,7b\n"
691 " .long 24b,8b\n"
692 " .long 4b,8b\n"
693 ".previous"
694 : "=a"(to), "=a"(from), "=d"(n)
695 : "0"(to), "1"(from), "2"(0)
696 : "d0", "memory");
697 break;
698 default:
699 switch (n & 3) {
700 case 0:
701 __copy_to_user_big(to, from, n, "", "");
702 break;
703 case 1: 269 case 1:
704 __copy_to_user_big(to, from, n, 270 __put_user_asm(res, *(u8 *)from, (u8 *)to, b, d, 1);
705 /* fixup */ 271 return res;
706 "1: addql #1,%2",
707 /* copy */
708 " moveb (%1)+,%%d0\n"
709 "22:movesb %%d0,(%0)+\n"
710 "2:"
711 ".section __ex_table,\"a\"\n"
712 " .long 22b,1b\n"
713 " .long 2b,1b\n"
714 ".previous");
715 break;
716 case 2: 272 case 2:
717 __copy_to_user_big(to, from, n, 273 __put_user_asm(res, *(u16 *)from, (u16 *)to, w, d, 2);
718 /* fixup */ 274 return res;
719 "1: addql #2,%2", 275 case 4:
720 /* copy */ 276 __put_user_asm(res, *(u32 *)from, (u32 *)to, l, r, 4);
721 " movew (%1)+,%%d0\n" 277 return res;
722 "22:movesw %%d0,(%0)+\n"
723 "2:"
724 ".section __ex_table,\"a\"\n"
725 " .long 22b,1b\n"
726 " .long 2b,1b\n"
727 ".previous");
728 break;
729 case 3:
730 __copy_to_user_big(to, from, n,
731 /* fixup */
732 "1: addql #2,%2\n"
733 "2: addql #1,%2",
734 /* copy */
735 " movew (%1)+,%%d0\n"
736 "23:movesw %%d0,(%0)+\n"
737 "3: moveb (%1)+,%%d0\n"
738 "24:movesb %%d0,(%0)+\n"
739 "4:"
740 ".section __ex_table,\"a\"\n"
741 " .long 23b,1b\n"
742 " .long 3b,1b\n"
743 " .long 24b,2b\n"
744 " .long 4b,2b\n"
745 ".previous");
746 break;
747 } 278 }
748 break; 279
749 } 280 asm volatile ("\n"
750 return n; 281 " .ifndef .Lto_user\n"
282 " .set .Lto_user,1\n"
283 " .macro copy_to_user to,from,tmp\n"
284 " .if .Lcnt >= 4\n"
285 " move.l (\\from)+,\\tmp\n"
286 "11: moves.l \\tmp,(\\to)+\n"
287 "12: .set .Lcnt,.Lcnt-4\n"
288 " .elseif .Lcnt & 2\n"
289 " move.w (\\from)+,\\tmp\n"
290 "11: moves.w \\tmp,(\\to)+\n"
291 "12: .set .Lcnt,.Lcnt-2\n"
292 " .elseif .Lcnt & 1\n"
293 " move.b (\\from)+,\\tmp\n"
294 "11: moves.b \\tmp,(\\to)+\n"
295 "12: .set .Lcnt,.Lcnt-1\n"
296 " .else\n"
297 " .exitm\n"
298 " .endif\n"
299 "\n"
300 " .section __ex_table,\"a\"\n"
301 " .align 4\n"
302 " .long 11b,3f\n"
303 " .long 12b,3f\n"
304 " .previous\n"
305 " .endm\n"
306 " .endif\n"
307 "\n"
308 " .set .Lcnt,%c4\n"
309 " copy_to_user %1,%2,%3\n"
310 " copy_to_user %1,%2,%3\n"
311 " copy_to_user %1,%2,%3\n"
312 "2:\n"
313 " .section .fixup,\"ax\"\n"
314 " .even\n"
315 "3: moveq.l %4,%0\n"
316 " jra 2b\n"
317 " .previous\n"
318 : "+r" (res), "+a" (to), "+a" (from), "=&d" (tmp)
319 : "i" (n)
320 : "memory");
321
322 return res;
751} 323}
752 324
753#define copy_from_user(to, from, n) \ 325#define __copy_from_user(to, from, n) \
754(__builtin_constant_p(n) ? \ 326(__builtin_constant_p(n) ? \
755 __constant_copy_from_user(to, from, n) : \ 327 __constant_copy_from_user(to, from, n) : \
756 __generic_copy_from_user(to, from, n)) 328 __generic_copy_from_user(to, from, n))
757 329
758#define copy_to_user(to, from, n) \ 330#define __copy_to_user(to, from, n) \
759(__builtin_constant_p(n) ? \ 331(__builtin_constant_p(n) ? \
760 __constant_copy_to_user(to, from, n) : \ 332 __constant_copy_to_user(to, from, n) : \
761 __generic_copy_to_user(to, from, n)) 333 __generic_copy_to_user(to, from, n))
762 334
763#define __copy_from_user(to, from, n) copy_from_user(to, from, n) 335#define __copy_to_user_inatomic __copy_to_user
764#define __copy_to_user(to, from, n) copy_to_user(to, from, n) 336#define __copy_from_user_inatomic __copy_from_user
765 337
766/* 338#define copy_from_user(to, from, n) __copy_from_user(to, from, n)
767 * Copy a null terminated string from userspace. 339#define copy_to_user(to, from, n) __copy_to_user(to, from, n)
768 */
769 340
770static inline long 341long strncpy_from_user(char *dst, const char __user *src, long count);
771strncpy_from_user(char *dst, const char __user *src, long count) 342long strnlen_user(const char __user *src, long n);
772{ 343unsigned long clear_user(void __user *to, unsigned long n);
773 long res;
774 if (count == 0) return count;
775 __asm__ __volatile__
776 ("1: movesb (%2)+,%%d0\n"
777 "12:moveb %%d0,(%1)+\n"
778 " jeq 2f\n"
779 " subql #1,%3\n"
780 " jne 1b\n"
781 "2: subl %3,%0\n"
782 "3:\n"
783 ".section .fixup,\"ax\"\n"
784 " .even\n"
785 "4: movel %4,%0\n"
786 " jra 3b\n"
787 ".previous\n"
788 ".section __ex_table,\"a\"\n"
789 " .align 4\n"
790 " .long 1b,4b\n"
791 " .long 12b,4b\n"
792 ".previous"
793 : "=d"(res), "=a"(dst), "=a"(src), "=d"(count)
794 : "i"(-EFAULT), "0"(count), "1"(dst), "2"(src), "3"(count)
795 : "d0", "memory");
796 return res;
797}
798
799/*
800 * Return the size of a string (including the ending 0)
801 *
802 * Return 0 on exception, a value greater than N if too long
803 */
804static inline long strnlen_user(const char __user *src, long n)
805{
806 long res;
807
808 res = -(unsigned long)src;
809 __asm__ __volatile__
810 ("1:\n"
811 " tstl %2\n"
812 " jeq 3f\n"
813 "2: movesb (%1)+,%%d0\n"
814 "22:\n"
815 " subql #1,%2\n"
816 " tstb %%d0\n"
817 " jne 1b\n"
818 " jra 4f\n"
819 "3:\n"
820 " addql #1,%0\n"
821 "4:\n"
822 " addl %1,%0\n"
823 "5:\n"
824 ".section .fixup,\"ax\"\n"
825 " .even\n"
826 "6: moveq %3,%0\n"
827 " jra 5b\n"
828 ".previous\n"
829 ".section __ex_table,\"a\"\n"
830 " .align 4\n"
831 " .long 2b,6b\n"
832 " .long 22b,6b\n"
833 ".previous"
834 : "=d"(res), "=a"(src), "=d"(n)
835 : "i"(0), "0"(res), "1"(src), "2"(n)
836 : "d0");
837 return res;
838}
839 344
840#define strlen_user(str) strnlen_user(str, 32767) 345#define strlen_user(str) strnlen_user(str, 32767)
841 346
842/*
843 * Zero Userspace
844 */
845
846static inline unsigned long
847clear_user(void __user *to, unsigned long n)
848{
849 __asm__ __volatile__
850 (" tstl %1\n"
851 " jeq 3f\n"
852 "1: movesl %3,(%0)+\n"
853 "2: subql #1,%1\n"
854 " jne 1b\n"
855 "3: movel %2,%1\n"
856 " bclr #1,%1\n"
857 " jeq 4f\n"
858 "24:movesw %3,(%0)+\n"
859 "4: bclr #0,%1\n"
860 " jeq 5f\n"
861 "25:movesb %3,(%0)+\n"
862 "5:\n"
863 ".section .fixup,\"ax\"\n"
864 " .even\n"
865 "61:addql #1,%1\n"
866 "6: lsll #2,%1\n"
867 " addl %2,%1\n"
868 " jra 5b\n"
869 "7: addql #2,%1\n"
870 " jra 5b\n"
871 "8: addql #1,%1\n"
872 " jra 5b\n"
873 ".previous\n"
874 ".section __ex_table,\"a\"\n"
875 " .align 4\n"
876 " .long 1b,61b\n"
877 " .long 2b,6b\n"
878 " .long 3b,61b\n"
879 " .long 24b,7b\n"
880 " .long 4b,7b\n"
881 " .long 25b,8b\n"
882 " .long 5b,8b\n"
883 ".previous"
884 : "=a"(to), "=d"(n)
885 : "r"(n & 3), "r"(0), "0"(to), "1"(n/4));
886 return n;
887}
888
889#endif /* _M68K_UACCESS_H */ 347#endif /* _M68K_UACCESS_H */