aboutsummaryrefslogtreecommitdiffstats
path: root/arch/mips/include
diff options
context:
space:
mode:
authorMarkos Chandras <markos.chandras@imgtec.com>2014-01-06 07:48:28 -0500
committerRalf Baechle <ralf@linux-mips.org>2014-03-26 18:09:16 -0400
commit0081ad2486ed75cf09b99e4bf997c513567f5b6d (patch)
tree997f6767940b3ccc1fa5a97fbaf4d5247502d3c2 /arch/mips/include
parentac1d8590d3ae855d747b12c9e85dfb40bda709e4 (diff)
MIPS: asm: uaccess: Rename {get,put}_user_asm macros
The {get,put}_user_asm functions can be used to load data from kernel or the user address space so rename them to avoid confusion. Signed-off-by: Markos Chandras <markos.chandras@imgtec.com>
Diffstat (limited to 'arch/mips/include')
-rw-r--r--arch/mips/include/asm/uaccess.h60
1 files changed, 30 insertions, 30 deletions
diff --git a/arch/mips/include/asm/uaccess.h b/arch/mips/include/asm/uaccess.h
index 499080447540..f3624b73050f 100644
--- a/arch/mips/include/asm/uaccess.h
+++ b/arch/mips/include/asm/uaccess.h
@@ -248,20 +248,20 @@ struct __large_struct { unsigned long buf[100]; };
248#define __get_kernel_common(val, size, ptr) \ 248#define __get_kernel_common(val, size, ptr) \
249do { \ 249do { \
250 switch (size) { \ 250 switch (size) { \
251 case 1: __get_user_asm(val, _loadb, ptr); break; \ 251 case 1: __get_data_asm(val, _loadb, ptr); break; \
252 case 2: __get_user_asm(val, _loadh, ptr); break; \ 252 case 2: __get_data_asm(val, _loadh, ptr); break; \
253 case 4: __get_user_asm(val, _loadw, ptr); break; \ 253 case 4: __get_data_asm(val, _loadw, ptr); break; \
254 case 8: __GET_USER_DW(val, _loadd, ptr); break; \ 254 case 8: __GET_DW(val, _loadd, ptr); break; \
255 default: __get_user_unknown(); break; \ 255 default: __get_user_unknown(); break; \
256 } \ 256 } \
257} while (0) 257} while (0)
258#endif 258#endif
259 259
260#ifdef CONFIG_32BIT 260#ifdef CONFIG_32BIT
261#define __GET_USER_DW(val, insn, ptr) __get_user_asm_ll32(val, insn, ptr) 261#define __GET_DW(val, insn, ptr) __get_data_asm_ll32(val, insn, ptr)
262#endif 262#endif
263#ifdef CONFIG_64BIT 263#ifdef CONFIG_64BIT
264#define __GET_USER_DW(val, insn, ptr) __get_user_asm(val, insn, ptr) 264#define __GET_DW(val, insn, ptr) __get_data_asm(val, insn, ptr)
265#endif 265#endif
266 266
267extern void __get_user_unknown(void); 267extern void __get_user_unknown(void);
@@ -269,10 +269,10 @@ extern void __get_user_unknown(void);
269#define __get_user_common(val, size, ptr) \ 269#define __get_user_common(val, size, ptr) \
270do { \ 270do { \
271 switch (size) { \ 271 switch (size) { \
272 case 1: __get_user_asm(val, user_lb, ptr); break; \ 272 case 1: __get_data_asm(val, user_lb, ptr); break; \
273 case 2: __get_user_asm(val, user_lh, ptr); break; \ 273 case 2: __get_data_asm(val, user_lh, ptr); break; \
274 case 4: __get_user_asm(val, user_lw, ptr); break; \ 274 case 4: __get_data_asm(val, user_lw, ptr); break; \
275 case 8: __GET_USER_DW(val, user_ld, ptr); break; \ 275 case 8: __GET_DW(val, user_ld, ptr); break; \
276 default: __get_user_unknown(); break; \ 276 default: __get_user_unknown(); break; \
277 } \ 277 } \
278} while (0) 278} while (0)
@@ -306,7 +306,7 @@ do { \
306 __gu_err; \ 306 __gu_err; \
307}) 307})
308 308
309#define __get_user_asm(val, insn, addr) \ 309#define __get_data_asm(val, insn, addr) \
310{ \ 310{ \
311 long __gu_tmp; \ 311 long __gu_tmp; \
312 \ 312 \
@@ -330,7 +330,7 @@ do { \
330/* 330/*
331 * Get a long long 64 using 32 bit registers. 331 * Get a long long 64 using 32 bit registers.
332 */ 332 */
333#define __get_user_asm_ll32(val, insn, addr) \ 333#define __get_data_asm_ll32(val, insn, addr) \
334{ \ 334{ \
335 union { \ 335 union { \
336 unsigned long long l; \ 336 unsigned long long l; \
@@ -364,7 +364,7 @@ do { \
364/* 364/*
365 * Kernel specific functions for EVA. We need to use normal load instructions 365 * Kernel specific functions for EVA. We need to use normal load instructions
366 * to read data from kernel when operating in EVA mode. We use these macros to 366 * to read data from kernel when operating in EVA mode. We use these macros to
367 * avoid redefining __get_user_asm for EVA. 367 * avoid redefining __get_data_asm for EVA.
368 */ 368 */
369#undef _stored 369#undef _stored
370#undef _storew 370#undef _storew
@@ -383,10 +383,10 @@ do { \
383#define __put_kernel_common(ptr, size) \ 383#define __put_kernel_common(ptr, size) \
384do { \ 384do { \
385 switch (size) { \ 385 switch (size) { \
386 case 1: __put_user_asm(_storeb, ptr); break; \ 386 case 1: __put_data_asm(_storeb, ptr); break; \
387 case 2: __put_user_asm(_storeh, ptr); break; \ 387 case 2: __put_data_asm(_storeh, ptr); break; \
388 case 4: __put_user_asm(_storew, ptr); break; \ 388 case 4: __put_data_asm(_storew, ptr); break; \
389 case 8: __PUT_USER_DW(_stored, ptr); break; \ 389 case 8: __PUT_DW(_stored, ptr); break; \
390 default: __put_user_unknown(); break; \ 390 default: __put_user_unknown(); break; \
391 } \ 391 } \
392} while(0) 392} while(0)
@@ -397,19 +397,19 @@ do { \
397 * for 32 bit mode and old iron. 397 * for 32 bit mode and old iron.
398 */ 398 */
399#ifdef CONFIG_32BIT 399#ifdef CONFIG_32BIT
400#define __PUT_USER_DW(insn, ptr) __put_user_asm_ll32(insn, ptr) 400#define __PUT_DW(insn, ptr) __put_data_asm_ll32(insn, ptr)
401#endif 401#endif
402#ifdef CONFIG_64BIT 402#ifdef CONFIG_64BIT
403#define __PUT_USER_DW(insn, ptr) __put_user_asm(insn, ptr) 403#define __PUT_DW(insn, ptr) __put_data_asm(insn, ptr)
404#endif 404#endif
405 405
406#define __put_user_common(ptr, size) \ 406#define __put_user_common(ptr, size) \
407do { \ 407do { \
408 switch (size) { \ 408 switch (size) { \
409 case 1: __put_user_asm(user_sb, ptr); break; \ 409 case 1: __put_data_asm(user_sb, ptr); break; \
410 case 2: __put_user_asm(user_sh, ptr); break; \ 410 case 2: __put_data_asm(user_sh, ptr); break; \
411 case 4: __put_user_asm(user_sw, ptr); break; \ 411 case 4: __put_data_asm(user_sw, ptr); break; \
412 case 8: __PUT_USER_DW(user_sd, ptr); break; \ 412 case 8: __PUT_DW(user_sd, ptr); break; \
413 default: __put_user_unknown(); break; \ 413 default: __put_user_unknown(); break; \
414 } \ 414 } \
415} while (0) 415} while (0)
@@ -446,10 +446,10 @@ do { \
446 __pu_err; \ 446 __pu_err; \
447}) 447})
448 448
449#define __put_user_asm(insn, ptr) \ 449#define __put_data_asm(insn, ptr) \
450{ \ 450{ \
451 __asm__ __volatile__( \ 451 __asm__ __volatile__( \
452 "1: "insn("%z2", "%3")" # __put_user_asm \n" \ 452 "1: "insn("%z2", "%3")" # __put_data_asm \n" \
453 "2: \n" \ 453 "2: \n" \
454 " .insn \n" \ 454 " .insn \n" \
455 " .section .fixup,\"ax\" \n" \ 455 " .section .fixup,\"ax\" \n" \
@@ -464,10 +464,10 @@ do { \
464 "i" (-EFAULT)); \ 464 "i" (-EFAULT)); \
465} 465}
466 466
467#define __put_user_asm_ll32(insn, ptr) \ 467#define __put_data_asm_ll32(insn, ptr) \
468{ \ 468{ \
469 __asm__ __volatile__( \ 469 __asm__ __volatile__( \
470 "1: "insn("%2", "(%3)")" # __put_user_asm_ll32 \n" \ 470 "1: "insn("%2", "(%3)")" # __put_data_asm_ll32 \n" \
471 "2: "insn("%D2", "4(%3)")" \n" \ 471 "2: "insn("%D2", "4(%3)")" \n" \
472 "3: \n" \ 472 "3: \n" \
473 " .insn \n" \ 473 " .insn \n" \
@@ -593,7 +593,7 @@ extern void __get_user_unaligned_unknown(void);
593#define __get_user_unaligned_common(val, size, ptr) \ 593#define __get_user_unaligned_common(val, size, ptr) \
594do { \ 594do { \
595 switch (size) { \ 595 switch (size) { \
596 case 1: __get_user_asm(val, "lb", ptr); break; \ 596 case 1: __get_data_asm(val, "lb", ptr); break; \
597 case 2: __get_user_unaligned_asm(val, "ulh", ptr); break; \ 597 case 2: __get_user_unaligned_asm(val, "ulh", ptr); break; \
598 case 4: __get_user_unaligned_asm(val, "ulw", ptr); break; \ 598 case 4: __get_user_unaligned_asm(val, "ulw", ptr); break; \
599 case 8: __GET_USER_UNALIGNED_DW(val, ptr); break; \ 599 case 8: __GET_USER_UNALIGNED_DW(val, ptr); break; \
@@ -620,7 +620,7 @@ do { \
620 __gu_err; \ 620 __gu_err; \
621}) 621})
622 622
623#define __get_user_unaligned_asm(val, insn, addr) \ 623#define __get_data_unaligned_asm(val, insn, addr) \
624{ \ 624{ \
625 long __gu_tmp; \ 625 long __gu_tmp; \
626 \ 626 \
@@ -686,7 +686,7 @@ do { \
686#define __put_user_unaligned_common(ptr, size) \ 686#define __put_user_unaligned_common(ptr, size) \
687do { \ 687do { \
688 switch (size) { \ 688 switch (size) { \
689 case 1: __put_user_asm("sb", ptr); break; \ 689 case 1: __put_data_asm("sb", ptr); break; \
690 case 2: __put_user_unaligned_asm("ush", ptr); break; \ 690 case 2: __put_user_unaligned_asm("ush", ptr); break; \
691 case 4: __put_user_unaligned_asm("usw", ptr); break; \ 691 case 4: __put_user_unaligned_asm("usw", ptr); break; \
692 case 8: __PUT_USER_UNALIGNED_DW(ptr); break; \ 692 case 8: __PUT_USER_UNALIGNED_DW(ptr); break; \