aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-i386/rwlock.h
diff options
context:
space:
mode:
authorAndi Kleen <ak@suse.de>2006-08-30 13:37:14 -0400
committerLinus Torvalds <torvalds@g5.osdl.org>2006-08-30 19:05:15 -0400
commit8c74932779fc6f61b4c30145863a17125c1a296c (patch)
tree664c5c1ea4674ec35dbc499294c3830f8176fb43 /include/asm-i386/rwlock.h
parent841be8ddf92578e5b481ed9f9abb85649fc13238 (diff)
[PATCH] i386: Remove alternative_smp
The .fill causes miscompilations with some binutils version. Instead just patch the lock prefix in the lock constructs. That is the majority of the cost and should be good enough. Cc: Gerd Hoffmann <kraxel@suse.de> Signed-off-by: Andi Kleen <ak@suse.de> Signed-off-by: Linus Torvalds <torvalds@osdl.org>
Diffstat (limited to 'include/asm-i386/rwlock.h')
-rw-r--r--include/asm-i386/rwlock.h14
1 files changed, 6 insertions, 8 deletions
diff --git a/include/asm-i386/rwlock.h b/include/asm-i386/rwlock.h
index 96b0bef2ea56..3ac1ba98b1bc 100644
--- a/include/asm-i386/rwlock.h
+++ b/include/asm-i386/rwlock.h
@@ -21,22 +21,20 @@
21#define RW_LOCK_BIAS_STR "0x01000000" 21#define RW_LOCK_BIAS_STR "0x01000000"
22 22
23#define __build_read_lock_ptr(rw, helper) \ 23#define __build_read_lock_ptr(rw, helper) \
24 alternative_smp("lock; subl $1,(%0)\n\t" \ 24 asm volatile(LOCK_PREFIX " ; subl $1,(%0)\n\t" \
25 "jns 1f\n" \ 25 "jns 1f\n" \
26 "call " helper "\n\t" \ 26 "call " helper "\n\t" \
27 "1:\n", \ 27 "1:\n" \
28 "subl $1,(%0)\n\t", \
29 :"a" (rw) : "memory") 28 :"a" (rw) : "memory")
30 29
31#define __build_read_lock_const(rw, helper) \ 30#define __build_read_lock_const(rw, helper) \
32 alternative_smp("lock; subl $1,%0\n\t" \ 31 asm volatile(LOCK_PREFIX " ; subl $1,%0\n\t" \
33 "jns 1f\n" \ 32 "jns 1f\n" \
34 "pushl %%eax\n\t" \ 33 "pushl %%eax\n\t" \
35 "leal %0,%%eax\n\t" \ 34 "leal %0,%%eax\n\t" \
36 "call " helper "\n\t" \ 35 "call " helper "\n\t" \
37 "popl %%eax\n\t" \ 36 "popl %%eax\n\t" \
38 "1:\n", \ 37 "1:\n" : \
39 "subl $1,%0\n\t", \
40 "+m" (*(volatile int *)rw) : : "memory") 38 "+m" (*(volatile int *)rw) : : "memory")
41 39
42#define __build_read_lock(rw, helper) do { \ 40#define __build_read_lock(rw, helper) do { \
@@ -47,7 +45,7 @@
47 } while (0) 45 } while (0)
48 46
49#define __build_write_lock_ptr(rw, helper) \ 47#define __build_write_lock_ptr(rw, helper) \
50 alternative_smp("lock; subl $" RW_LOCK_BIAS_STR ",(%0)\n\t" \ 48 asm volatile(LOCK_PREFIX " ; subl $" RW_LOCK_BIAS_STR ",(%0)\n\t" \
51 "jz 1f\n" \ 49 "jz 1f\n" \
52 "call " helper "\n\t" \ 50 "call " helper "\n\t" \
53 "1:\n", \ 51 "1:\n", \
@@ -55,7 +53,7 @@
55 :"a" (rw) : "memory") 53 :"a" (rw) : "memory")
56 54
57#define __build_write_lock_const(rw, helper) \ 55#define __build_write_lock_const(rw, helper) \
58 alternative_smp("lock; subl $" RW_LOCK_BIAS_STR ",%0\n\t" \ 56 asm volatile(LOCK_PREFIX " ; subl $" RW_LOCK_BIAS_STR ",%0\n\t" \
59 "jz 1f\n" \ 57 "jz 1f\n" \
60 "pushl %%eax\n\t" \ 58 "pushl %%eax\n\t" \
61 "leal %0,%%eax\n\t" \ 59 "leal %0,%%eax\n\t" \