diff options
Diffstat (limited to 'include/asm-xtensa/uaccess.h')
-rw-r--r-- | include/asm-xtensa/uaccess.h | 99 |
1 files changed, 51 insertions, 48 deletions
diff --git a/include/asm-xtensa/uaccess.h b/include/asm-xtensa/uaccess.h index d6352da05b10..b8528426ab1f 100644 --- a/include/asm-xtensa/uaccess.h +++ b/include/asm-xtensa/uaccess.h | |||
@@ -26,6 +26,7 @@ | |||
26 | #include <asm/current.h> | 26 | #include <asm/current.h> |
27 | #include <asm/asm-offsets.h> | 27 | #include <asm/asm-offsets.h> |
28 | #include <asm/processor.h> | 28 | #include <asm/processor.h> |
29 | #include <asm/types.h> | ||
29 | 30 | ||
30 | /* | 31 | /* |
31 | * These assembly macros mirror the C macros that follow below. They | 32 | * These assembly macros mirror the C macros that follow below. They |
@@ -118,7 +119,7 @@ | |||
118 | * <at> destroyed (actually, (TASK_SIZE + 1 - size)) | 119 | * <at> destroyed (actually, (TASK_SIZE + 1 - size)) |
119 | */ | 120 | */ |
120 | .macro user_ok aa, as, at, error | 121 | .macro user_ok aa, as, at, error |
121 | movi \at, (TASK_SIZE+1) | 122 | movi \at, __XTENSA_UL_CONST(TASK_SIZE) |
122 | bgeu \as, \at, \error | 123 | bgeu \as, \at, \error |
123 | sub \at, \at, \as | 124 | sub \at, \at, \as |
124 | bgeu \aa, \at, \error | 125 | bgeu \aa, \at, \error |
@@ -226,20 +227,21 @@ extern long __put_user_bad(void); | |||
226 | __pu_err; \ | 227 | __pu_err; \ |
227 | }) | 228 | }) |
228 | 229 | ||
229 | #define __put_user_size(x,ptr,size,retval) \ | 230 | #define __put_user_size(x,ptr,size,retval) \ |
230 | do { \ | 231 | do { \ |
231 | retval = 0; \ | 232 | int __cb; \ |
232 | switch (size) { \ | 233 | retval = 0; \ |
233 | case 1: __put_user_asm(x,ptr,retval,1,"s8i"); break; \ | 234 | switch (size) { \ |
234 | case 2: __put_user_asm(x,ptr,retval,2,"s16i"); break; \ | 235 | case 1: __put_user_asm(x,ptr,retval,1,"s8i",__cb); break; \ |
235 | case 4: __put_user_asm(x,ptr,retval,4,"s32i"); break; \ | 236 | case 2: __put_user_asm(x,ptr,retval,2,"s16i",__cb); break; \ |
236 | case 8: { \ | 237 | case 4: __put_user_asm(x,ptr,retval,4,"s32i",__cb); break; \ |
237 | __typeof__(*ptr) __v64 = x; \ | 238 | case 8: { \ |
238 | retval = __copy_to_user(ptr,&__v64,8); \ | 239 | __typeof__(*ptr) __v64 = x; \ |
239 | break; \ | 240 | retval = __copy_to_user(ptr,&__v64,8); \ |
240 | } \ | 241 | break; \ |
241 | default: __put_user_bad(); \ | 242 | } \ |
242 | } \ | 243 | default: __put_user_bad(); \ |
244 | } \ | ||
243 | } while (0) | 245 | } while (0) |
244 | 246 | ||
245 | 247 | ||
@@ -267,14 +269,14 @@ do { \ | |||
267 | #define __check_align_1 "" | 269 | #define __check_align_1 "" |
268 | 270 | ||
269 | #define __check_align_2 \ | 271 | #define __check_align_2 \ |
270 | " _bbci.l %2, 0, 1f \n" \ | 272 | " _bbci.l %3, 0, 1f \n" \ |
271 | " movi %0, %3 \n" \ | 273 | " movi %0, %4 \n" \ |
272 | " _j 2f \n" | 274 | " _j 2f \n" |
273 | 275 | ||
274 | #define __check_align_4 \ | 276 | #define __check_align_4 \ |
275 | " _bbsi.l %2, 0, 0f \n" \ | 277 | " _bbsi.l %3, 0, 0f \n" \ |
276 | " _bbci.l %2, 1, 1f \n" \ | 278 | " _bbci.l %3, 1, 1f \n" \ |
277 | "0: movi %0, %3 \n" \ | 279 | "0: movi %0, %4 \n" \ |
278 | " _j 2f \n" | 280 | " _j 2f \n" |
279 | 281 | ||
280 | 282 | ||
@@ -286,24 +288,24 @@ do { \ | |||
286 | * WARNING: If you modify this macro at all, verify that the | 288 | * WARNING: If you modify this macro at all, verify that the |
287 | * __check_align_* macros still work. | 289 | * __check_align_* macros still work. |
288 | */ | 290 | */ |
289 | #define __put_user_asm(x, addr, err, align, insn) \ | 291 | #define __put_user_asm(x, addr, err, align, insn, cb) \ |
290 | __asm__ __volatile__( \ | 292 | __asm__ __volatile__( \ |
291 | __check_align_##align \ | 293 | __check_align_##align \ |
292 | "1: "insn" %1, %2, 0 \n" \ | 294 | "1: "insn" %2, %3, 0 \n" \ |
293 | "2: \n" \ | 295 | "2: \n" \ |
294 | " .section .fixup,\"ax\" \n" \ | 296 | " .section .fixup,\"ax\" \n" \ |
295 | " .align 4 \n" \ | 297 | " .align 4 \n" \ |
296 | "4: \n" \ | 298 | "4: \n" \ |
297 | " .long 2b \n" \ | 299 | " .long 2b \n" \ |
298 | "5: \n" \ | 300 | "5: \n" \ |
299 | " l32r %2, 4b \n" \ | 301 | " l32r %1, 4b \n" \ |
300 | " movi %0, %3 \n" \ | 302 | " movi %0, %4 \n" \ |
301 | " jx %2 \n" \ | 303 | " jx %1 \n" \ |
302 | " .previous \n" \ | 304 | " .previous \n" \ |
303 | " .section __ex_table,\"a\" \n" \ | 305 | " .section __ex_table,\"a\" \n" \ |
304 | " .long 1b, 5b \n" \ | 306 | " .long 1b, 5b \n" \ |
305 | " .previous" \ | 307 | " .previous" \ |
306 | :"=r" (err) \ | 308 | :"=r" (err), "=r" (cb) \ |
307 | :"r" ((int)(x)), "r" (addr), "i" (-EFAULT), "0" (err)) | 309 | :"r" ((int)(x)), "r" (addr), "i" (-EFAULT), "0" (err)) |
308 | 310 | ||
309 | #define __get_user_nocheck(x,ptr,size) \ | 311 | #define __get_user_nocheck(x,ptr,size) \ |
@@ -328,11 +330,12 @@ extern long __get_user_bad(void); | |||
328 | 330 | ||
329 | #define __get_user_size(x,ptr,size,retval) \ | 331 | #define __get_user_size(x,ptr,size,retval) \ |
330 | do { \ | 332 | do { \ |
333 | int __cb; \ | ||
331 | retval = 0; \ | 334 | retval = 0; \ |
332 | switch (size) { \ | 335 | switch (size) { \ |
333 | case 1: __get_user_asm(x,ptr,retval,1,"l8ui"); break; \ | 336 | case 1: __get_user_asm(x,ptr,retval,1,"l8ui",__cb); break; \ |
334 | case 2: __get_user_asm(x,ptr,retval,2,"l16ui"); break; \ | 337 | case 2: __get_user_asm(x,ptr,retval,2,"l16ui",__cb); break; \ |
335 | case 4: __get_user_asm(x,ptr,retval,4,"l32i"); break; \ | 338 | case 4: __get_user_asm(x,ptr,retval,4,"l32i",__cb); break; \ |
336 | case 8: retval = __copy_from_user(&x,ptr,8); break; \ | 339 | case 8: retval = __copy_from_user(&x,ptr,8); break; \ |
337 | default: (x) = __get_user_bad(); \ | 340 | default: (x) = __get_user_bad(); \ |
338 | } \ | 341 | } \ |
@@ -343,25 +346,25 @@ do { \ | |||
343 | * WARNING: If you modify this macro at all, verify that the | 346 | * WARNING: If you modify this macro at all, verify that the |
344 | * __check_align_* macros still work. | 347 | * __check_align_* macros still work. |
345 | */ | 348 | */ |
346 | #define __get_user_asm(x, addr, err, align, insn) \ | 349 | #define __get_user_asm(x, addr, err, align, insn, cb) \ |
347 | __asm__ __volatile__( \ | 350 | __asm__ __volatile__( \ |
348 | __check_align_##align \ | 351 | __check_align_##align \ |
349 | "1: "insn" %1, %2, 0 \n" \ | 352 | "1: "insn" %2, %3, 0 \n" \ |
350 | "2: \n" \ | 353 | "2: \n" \ |
351 | " .section .fixup,\"ax\" \n" \ | 354 | " .section .fixup,\"ax\" \n" \ |
352 | " .align 4 \n" \ | 355 | " .align 4 \n" \ |
353 | "4: \n" \ | 356 | "4: \n" \ |
354 | " .long 2b \n" \ | 357 | " .long 2b \n" \ |
355 | "5: \n" \ | 358 | "5: \n" \ |
356 | " l32r %2, 4b \n" \ | 359 | " l32r %1, 4b \n" \ |
357 | " movi %1, 0 \n" \ | 360 | " movi %2, 0 \n" \ |
358 | " movi %0, %3 \n" \ | 361 | " movi %0, %4 \n" \ |
359 | " jx %2 \n" \ | 362 | " jx %1 \n" \ |
360 | " .previous \n" \ | 363 | " .previous \n" \ |
361 | " .section __ex_table,\"a\" \n" \ | 364 | " .section __ex_table,\"a\" \n" \ |
362 | " .long 1b, 5b \n" \ | 365 | " .long 1b, 5b \n" \ |
363 | " .previous" \ | 366 | " .previous" \ |
364 | :"=r" (err), "=r" (x) \ | 367 | :"=r" (err), "=r" (cb), "=r" (x) \ |
365 | :"r" (addr), "i" (-EFAULT), "0" (err)) | 368 | :"r" (addr), "i" (-EFAULT), "0" (err)) |
366 | 369 | ||
367 | 370 | ||