diff options
Diffstat (limited to 'include')
-rw-r--r-- | include/asm-x86/futex.h | 6 | ||||
-rw-r--r-- | include/asm-x86/spinlock.h | 6 |
2 files changed, 6 insertions, 6 deletions
diff --git a/include/asm-x86/futex.h b/include/asm-x86/futex.h index 45dc24d84186..06b924ef6fa5 100644 --- a/include/asm-x86/futex.h +++ b/include/asm-x86/futex.h | |||
@@ -25,7 +25,7 @@ | |||
25 | asm volatile("1:\tmovl %2, %0\n" \ | 25 | asm volatile("1:\tmovl %2, %0\n" \ |
26 | "\tmovl\t%0, %3\n" \ | 26 | "\tmovl\t%0, %3\n" \ |
27 | "\t" insn "\n" \ | 27 | "\t" insn "\n" \ |
28 | "2:\tlock; cmpxchgl %3, %2\n" \ | 28 | "2:\t" LOCK_PREFIX "cmpxchgl %3, %2\n" \ |
29 | "\tjnz\t1b\n" \ | 29 | "\tjnz\t1b\n" \ |
30 | "3:\t.section .fixup,\"ax\"\n" \ | 30 | "3:\t.section .fixup,\"ax\"\n" \ |
31 | "4:\tmov\t%5, %1\n" \ | 31 | "4:\tmov\t%5, %1\n" \ |
@@ -64,7 +64,7 @@ static inline int futex_atomic_op_inuser(int encoded_op, int __user *uaddr) | |||
64 | __futex_atomic_op1("xchgl %0, %2", ret, oldval, uaddr, oparg); | 64 | __futex_atomic_op1("xchgl %0, %2", ret, oldval, uaddr, oparg); |
65 | break; | 65 | break; |
66 | case FUTEX_OP_ADD: | 66 | case FUTEX_OP_ADD: |
67 | __futex_atomic_op1("lock; xaddl %0, %2", ret, oldval, | 67 | __futex_atomic_op1(LOCK_PREFIX "xaddl %0, %2", ret, oldval, |
68 | uaddr, oparg); | 68 | uaddr, oparg); |
69 | break; | 69 | break; |
70 | case FUTEX_OP_OR: | 70 | case FUTEX_OP_OR: |
@@ -122,7 +122,7 @@ static inline int futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, | |||
122 | if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int))) | 122 | if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int))) |
123 | return -EFAULT; | 123 | return -EFAULT; |
124 | 124 | ||
125 | asm volatile("1:\tlock; cmpxchgl %3, %1\n" | 125 | asm volatile("1:\t" LOCK_PREFIX "cmpxchgl %3, %1\n" |
126 | "2:\t.section .fixup, \"ax\"\n" | 126 | "2:\t.section .fixup, \"ax\"\n" |
127 | "3:\tmov %2, %0\n" | 127 | "3:\tmov %2, %0\n" |
128 | "\tjmp 2b\n" | 128 | "\tjmp 2b\n" |
diff --git a/include/asm-x86/spinlock.h b/include/asm-x86/spinlock.h index 5d08fa280fdf..93adae338ac6 100644 --- a/include/asm-x86/spinlock.h +++ b/include/asm-x86/spinlock.h | |||
@@ -97,7 +97,7 @@ static __always_inline int __ticket_spin_trylock(raw_spinlock_t *lock) | |||
97 | "jne 1f\n\t" | 97 | "jne 1f\n\t" |
98 | "movw %w0,%w1\n\t" | 98 | "movw %w0,%w1\n\t" |
99 | "incb %h1\n\t" | 99 | "incb %h1\n\t" |
100 | "lock ; cmpxchgw %w1,%2\n\t" | 100 | LOCK_PREFIX "cmpxchgw %w1,%2\n\t" |
101 | "1:" | 101 | "1:" |
102 | "sete %b1\n\t" | 102 | "sete %b1\n\t" |
103 | "movzbl %b1,%0\n\t" | 103 | "movzbl %b1,%0\n\t" |
@@ -135,7 +135,7 @@ static __always_inline void __ticket_spin_lock(raw_spinlock_t *lock) | |||
135 | int inc = 0x00010000; | 135 | int inc = 0x00010000; |
136 | int tmp; | 136 | int tmp; |
137 | 137 | ||
138 | asm volatile("lock ; xaddl %0, %1\n" | 138 | asm volatile(LOCK_PREFIX "xaddl %0, %1\n" |
139 | "movzwl %w0, %2\n\t" | 139 | "movzwl %w0, %2\n\t" |
140 | "shrl $16, %0\n\t" | 140 | "shrl $16, %0\n\t" |
141 | "1:\t" | 141 | "1:\t" |
@@ -162,7 +162,7 @@ static __always_inline int __ticket_spin_trylock(raw_spinlock_t *lock) | |||
162 | "cmpl %0,%1\n\t" | 162 | "cmpl %0,%1\n\t" |
163 | "jne 1f\n\t" | 163 | "jne 1f\n\t" |
164 | "addl $0x00010000, %1\n\t" | 164 | "addl $0x00010000, %1\n\t" |
165 | "lock ; cmpxchgl %1,%2\n\t" | 165 | LOCK_PREFIX "cmpxchgl %1,%2\n\t" |
166 | "1:" | 166 | "1:" |
167 | "sete %b1\n\t" | 167 | "sete %b1\n\t" |
168 | "movzbl %b1,%0\n\t" | 168 | "movzbl %b1,%0\n\t" |