diff options
author | Joe Perches <joe@perches.com> | 2008-03-23 04:02:12 -0400 |
---|---|---|
committer | Ingo Molnar <mingo@elte.hu> | 2008-04-17 11:41:23 -0400 |
commit | 9407913fc16dde0e632b9639557422c6a792469d (patch) | |
tree | f4a601b773e922f9c4c4c110f8bd6750603885ac /include/asm-x86/futex.h | |
parent | 4637bc07c85621b0c10320da8cf3b34de83efa0f (diff) |
include/asm-x86/futex.h: checkpatch cleanups - formatting only
Signed-off-by: Joe Perches <joe@perches.com>
Signed-off-by: Ingo Molnar <mingo@elte.hu>
Diffstat (limited to 'include/asm-x86/futex.h')
-rw-r--r-- | include/asm-x86/futex.h | 101 |
1 files changed, 55 insertions, 46 deletions
diff --git a/include/asm-x86/futex.h b/include/asm-x86/futex.h index c9952ea9f698..ac0fbf24d722 100644 --- a/include/asm-x86/futex.h +++ b/include/asm-x86/futex.h | |||
@@ -12,35 +12,32 @@ | |||
12 | #include <asm/uaccess.h> | 12 | #include <asm/uaccess.h> |
13 | 13 | ||
14 | #define __futex_atomic_op1(insn, ret, oldval, uaddr, oparg) \ | 14 | #define __futex_atomic_op1(insn, ret, oldval, uaddr, oparg) \ |
15 | __asm__ __volatile( \ | 15 | asm volatile("1:\t" insn "\n" \ |
16 | "1: " insn "\n" \ | 16 | "2:\t.section .fixup,\"ax\"\n" \ |
17 | "2: .section .fixup,\"ax\"\n \ | 17 | "3:\tmov\t%3, %1\n" \ |
18 | 3: mov %3, %1\n \ | 18 | "\tjmp\t2b\n" \ |
19 | jmp 2b\n \ | 19 | "\t.previous\n" \ |
20 | .previous\n" \ | 20 | _ASM_EXTABLE(1b, 3b) \ |
21 | _ASM_EXTABLE(1b,3b) \ | 21 | : "=r" (oldval), "=r" (ret), "+m" (*uaddr) \ |
22 | : "=r" (oldval), "=r" (ret), "+m" (*uaddr) \ | 22 | : "i" (-EFAULT), "0" (oparg), "1" (0)) |
23 | : "i" (-EFAULT), "0" (oparg), "1" (0)) | ||
24 | 23 | ||
25 | #define __futex_atomic_op2(insn, ret, oldval, uaddr, oparg) \ | 24 | #define __futex_atomic_op2(insn, ret, oldval, uaddr, oparg) \ |
26 | __asm__ __volatile( \ | 25 | asm volatile("1:\tmovl %2, %0\n" \ |
27 | "1: movl %2, %0\n \ | 26 | "\tmovl\t%0, %3\n" \ |
28 | movl %0, %3\n" \ | 27 | "\t" insn "\n" \ |
29 | insn "\n" \ | 28 | "2:\tlock; cmpxchgl %3, %2\n" \ |
30 | "2: lock; cmpxchgl %3, %2\n \ | 29 | "\tjnz\t1b\n" \ |
31 | jnz 1b\n \ | 30 | "3:\t.section .fixup,\"ax\"\n" \ |
32 | 3: .section .fixup,\"ax\"\n \ | 31 | "4:\tmov\t%5, %1\n" \ |
33 | 4: mov %5, %1\n \ | 32 | "\tjmp\t3b\n" \ |
34 | jmp 3b\n \ | 33 | "\t.previous\n" \ |
35 | .previous\n" \ | 34 | _ASM_EXTABLE(1b, 4b) \ |
36 | _ASM_EXTABLE(1b,4b) \ | 35 | _ASM_EXTABLE(2b, 4b) \ |
37 | _ASM_EXTABLE(2b,4b) \ | 36 | : "=&a" (oldval), "=&r" (ret), \ |
38 | : "=&a" (oldval), "=&r" (ret), "+m" (*uaddr), \ | 37 | "+m" (*uaddr), "=&r" (tem) \ |
39 | "=&r" (tem) \ | 38 | : "r" (oparg), "i" (-EFAULT), "1" (0)) |
40 | : "r" (oparg), "i" (-EFAULT), "1" (0)) | 39 | |
41 | 40 | static inline int futex_atomic_op_inuser(int encoded_op, int __user *uaddr) | |
42 | static inline int | ||
43 | futex_atomic_op_inuser(int encoded_op, int __user *uaddr) | ||
44 | { | 41 | { |
45 | int op = (encoded_op >> 28) & 7; | 42 | int op = (encoded_op >> 28) & 7; |
46 | int cmp = (encoded_op >> 24) & 15; | 43 | int cmp = (encoded_op >> 24) & 15; |
@@ -87,20 +84,33 @@ futex_atomic_op_inuser(int encoded_op, int __user *uaddr) | |||
87 | 84 | ||
88 | if (!ret) { | 85 | if (!ret) { |
89 | switch (cmp) { | 86 | switch (cmp) { |
90 | case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break; | 87 | case FUTEX_OP_CMP_EQ: |
91 | case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break; | 88 | ret = (oldval == cmparg); |
92 | case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break; | 89 | break; |
93 | case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break; | 90 | case FUTEX_OP_CMP_NE: |
94 | case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break; | 91 | ret = (oldval != cmparg); |
95 | case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break; | 92 | break; |
96 | default: ret = -ENOSYS; | 93 | case FUTEX_OP_CMP_LT: |
94 | ret = (oldval < cmparg); | ||
95 | break; | ||
96 | case FUTEX_OP_CMP_GE: | ||
97 | ret = (oldval >= cmparg); | ||
98 | break; | ||
99 | case FUTEX_OP_CMP_LE: | ||
100 | ret = (oldval <= cmparg); | ||
101 | break; | ||
102 | case FUTEX_OP_CMP_GT: | ||
103 | ret = (oldval > cmparg); | ||
104 | break; | ||
105 | default: | ||
106 | ret = -ENOSYS; | ||
97 | } | 107 | } |
98 | } | 108 | } |
99 | return ret; | 109 | return ret; |
100 | } | 110 | } |
101 | 111 | ||
102 | static inline int | 112 | static inline int futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, |
103 | futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval) | 113 | int newval) |
104 | { | 114 | { |
105 | 115 | ||
106 | #if defined(CONFIG_X86_32) && !defined(CONFIG_X86_BSWAP) | 116 | #if defined(CONFIG_X86_32) && !defined(CONFIG_X86_BSWAP) |
@@ -112,16 +122,15 @@ futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval) | |||
112 | if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int))) | 122 | if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int))) |
113 | return -EFAULT; | 123 | return -EFAULT; |
114 | 124 | ||
115 | __asm__ __volatile__( | 125 | asm volatile("1:\tlock; cmpxchgl %3, %1\n" |
116 | "1: lock; cmpxchgl %3, %1 \n" | 126 | "2:\t.section .fixup, \"ax\"\n" |
117 | "2: .section .fixup, \"ax\" \n" | 127 | "3:\tmov %2, %0\n" |
118 | "3: mov %2, %0 \n" | 128 | "\tjmp 2b\n" |
119 | " jmp 2b \n" | 129 | "\t.previous\n" |
120 | " .previous \n" | 130 | _ASM_EXTABLE(1b, 3b) |
121 | _ASM_EXTABLE(1b,3b) | 131 | : "=a" (oldval), "+m" (*uaddr) |
122 | : "=a" (oldval), "+m" (*uaddr) | 132 | : "i" (-EFAULT), "r" (newval), "0" (oldval) |
123 | : "i" (-EFAULT), "r" (newval), "0" (oldval) | 133 | : "memory" |
124 | : "memory" | ||
125 | ); | 134 | ); |
126 | 135 | ||
127 | return oldval; | 136 | return oldval; |