aboutsummaryrefslogtreecommitdiffstats
path: root/include
diff options
context:
space:
mode:
authorAndi Kleen <ak@suse.de>2006-08-30 13:37:14 -0400
committerLinus Torvalds <torvalds@g5.osdl.org>2006-08-30 19:05:15 -0400
commit8c74932779fc6f61b4c30145863a17125c1a296c (patch)
tree664c5c1ea4674ec35dbc499294c3830f8176fb43 /include
parent841be8ddf92578e5b481ed9f9abb85649fc13238 (diff)
[PATCH] i386: Remove alternative_smp
The .fill causes miscompilations with some binutils version. Instead just patch the lock prefix in the lock constructs. That is the majority of the cost and should be good enough. Cc: Gerd Hoffmann <kraxel@suse.de> Signed-off-by: Andi Kleen <ak@suse.de> Signed-off-by: Linus Torvalds <torvalds@osdl.org>
Diffstat (limited to 'include')
-rw-r--r--include/asm-i386/alternative.h20
-rw-r--r--include/asm-i386/rwlock.h14
-rw-r--r--include/asm-i386/spinlock.h17
3 files changed, 10 insertions, 41 deletions
diff --git a/include/asm-i386/alternative.h b/include/asm-i386/alternative.h
index bb3b6317c810..b01a7ec409ce 100644
--- a/include/asm-i386/alternative.h
+++ b/include/asm-i386/alternative.h
@@ -88,9 +88,6 @@ static inline void alternatives_smp_switch(int smp) {}
88/* 88/*
89 * Alternative inline assembly for SMP. 89 * Alternative inline assembly for SMP.
90 * 90 *
91 * alternative_smp() takes two versions (SMP first, UP second) and is
92 * for more complex stuff such as spinlocks.
93 *
94 * The LOCK_PREFIX macro defined here replaces the LOCK and 91 * The LOCK_PREFIX macro defined here replaces the LOCK and
95 * LOCK_PREFIX macros used everywhere in the source tree. 92 * LOCK_PREFIX macros used everywhere in the source tree.
96 * 93 *
@@ -110,21 +107,6 @@ static inline void alternatives_smp_switch(int smp) {}
110 */ 107 */
111 108
112#ifdef CONFIG_SMP 109#ifdef CONFIG_SMP
113#define alternative_smp(smpinstr, upinstr, args...) \
114 asm volatile ("661:\n\t" smpinstr "\n662:\n" \
115 ".section .smp_altinstructions,\"a\"\n" \
116 " .align 4\n" \
117 " .long 661b\n" /* label */ \
118 " .long 663f\n" /* new instruction */ \
119 " .byte " __stringify(X86_FEATURE_UP) "\n" \
120 " .byte 662b-661b\n" /* sourcelen */ \
121 " .byte 664f-663f\n" /* replacementlen */ \
122 ".previous\n" \
123 ".section .smp_altinstr_replacement,\"awx\"\n" \
124 "663:\n\t" upinstr "\n" /* replacement */ \
125 "664:\n\t.fill 662b-661b,1,0x42\n" /* space for original */ \
126 ".previous" : args)
127
128#define LOCK_PREFIX \ 110#define LOCK_PREFIX \
129 ".section .smp_locks,\"a\"\n" \ 111 ".section .smp_locks,\"a\"\n" \
130 " .align 4\n" \ 112 " .align 4\n" \
@@ -133,8 +115,6 @@ static inline void alternatives_smp_switch(int smp) {}
133 "661:\n\tlock; " 115 "661:\n\tlock; "
134 116
135#else /* ! CONFIG_SMP */ 117#else /* ! CONFIG_SMP */
136#define alternative_smp(smpinstr, upinstr, args...) \
137 asm volatile (upinstr : args)
138#define LOCK_PREFIX "" 118#define LOCK_PREFIX ""
139#endif 119#endif
140 120
diff --git a/include/asm-i386/rwlock.h b/include/asm-i386/rwlock.h
index 96b0bef2ea56..3ac1ba98b1bc 100644
--- a/include/asm-i386/rwlock.h
+++ b/include/asm-i386/rwlock.h
@@ -21,22 +21,20 @@
21#define RW_LOCK_BIAS_STR "0x01000000" 21#define RW_LOCK_BIAS_STR "0x01000000"
22 22
23#define __build_read_lock_ptr(rw, helper) \ 23#define __build_read_lock_ptr(rw, helper) \
24 alternative_smp("lock; subl $1,(%0)\n\t" \ 24 asm volatile(LOCK_PREFIX " ; subl $1,(%0)\n\t" \
25 "jns 1f\n" \ 25 "jns 1f\n" \
26 "call " helper "\n\t" \ 26 "call " helper "\n\t" \
27 "1:\n", \ 27 "1:\n" \
28 "subl $1,(%0)\n\t", \
29 :"a" (rw) : "memory") 28 :"a" (rw) : "memory")
30 29
31#define __build_read_lock_const(rw, helper) \ 30#define __build_read_lock_const(rw, helper) \
32 alternative_smp("lock; subl $1,%0\n\t" \ 31 asm volatile(LOCK_PREFIX " ; subl $1,%0\n\t" \
33 "jns 1f\n" \ 32 "jns 1f\n" \
34 "pushl %%eax\n\t" \ 33 "pushl %%eax\n\t" \
35 "leal %0,%%eax\n\t" \ 34 "leal %0,%%eax\n\t" \
36 "call " helper "\n\t" \ 35 "call " helper "\n\t" \
37 "popl %%eax\n\t" \ 36 "popl %%eax\n\t" \
38 "1:\n", \ 37 "1:\n" : \
39 "subl $1,%0\n\t", \
40 "+m" (*(volatile int *)rw) : : "memory") 38 "+m" (*(volatile int *)rw) : : "memory")
41 39
42#define __build_read_lock(rw, helper) do { \ 40#define __build_read_lock(rw, helper) do { \
@@ -47,7 +45,7 @@
47 } while (0) 45 } while (0)
48 46
49#define __build_write_lock_ptr(rw, helper) \ 47#define __build_write_lock_ptr(rw, helper) \
50 alternative_smp("lock; subl $" RW_LOCK_BIAS_STR ",(%0)\n\t" \ 48 asm volatile(LOCK_PREFIX " ; subl $" RW_LOCK_BIAS_STR ",(%0)\n\t" \
51 "jz 1f\n" \ 49 "jz 1f\n" \
52 "call " helper "\n\t" \ 50 "call " helper "\n\t" \
53 "1:\n", \ 51 "1:\n", \
@@ -55,7 +53,7 @@
55 :"a" (rw) : "memory") 53 :"a" (rw) : "memory")
56 54
57#define __build_write_lock_const(rw, helper) \ 55#define __build_write_lock_const(rw, helper) \
58 alternative_smp("lock; subl $" RW_LOCK_BIAS_STR ",%0\n\t" \ 56 asm volatile(LOCK_PREFIX " ; subl $" RW_LOCK_BIAS_STR ",%0\n\t" \
59 "jz 1f\n" \ 57 "jz 1f\n" \
60 "pushl %%eax\n\t" \ 58 "pushl %%eax\n\t" \
61 "leal %0,%%eax\n\t" \ 59 "leal %0,%%eax\n\t" \
diff --git a/include/asm-i386/spinlock.h b/include/asm-i386/spinlock.h
index d816c62a7a1d..d1020363c41a 100644
--- a/include/asm-i386/spinlock.h
+++ b/include/asm-i386/spinlock.h
@@ -22,7 +22,7 @@
22 22
23#define __raw_spin_lock_string \ 23#define __raw_spin_lock_string \
24 "\n1:\t" \ 24 "\n1:\t" \
25 "lock ; decb %0\n\t" \ 25 LOCK_PREFIX " ; decb %0\n\t" \
26 "jns 3f\n" \ 26 "jns 3f\n" \
27 "2:\t" \ 27 "2:\t" \
28 "rep;nop\n\t" \ 28 "rep;nop\n\t" \
@@ -38,7 +38,7 @@
38 */ 38 */
39#define __raw_spin_lock_string_flags \ 39#define __raw_spin_lock_string_flags \
40 "\n1:\t" \ 40 "\n1:\t" \
41 "lock ; decb %0\n\t" \ 41 LOCK_PREFIX " ; decb %0\n\t" \
42 "jns 5f\n" \ 42 "jns 5f\n" \
43 "2:\t" \ 43 "2:\t" \
44 "testl $0x200, %1\n\t" \ 44 "testl $0x200, %1\n\t" \
@@ -57,15 +57,9 @@
57 "jmp 4b\n" \ 57 "jmp 4b\n" \
58 "5:\n\t" 58 "5:\n\t"
59 59
60#define __raw_spin_lock_string_up \
61 "\n\tdecb %0"
62
63static inline void __raw_spin_lock(raw_spinlock_t *lock) 60static inline void __raw_spin_lock(raw_spinlock_t *lock)
64{ 61{
65 alternative_smp( 62 asm(__raw_spin_lock_string : "+m" (lock->slock) : : "memory");
66 __raw_spin_lock_string,
67 __raw_spin_lock_string_up,
68 "+m" (lock->slock) : : "memory");
69} 63}
70 64
71/* 65/*
@@ -76,10 +70,7 @@ static inline void __raw_spin_lock(raw_spinlock_t *lock)
76#ifndef CONFIG_PROVE_LOCKING 70#ifndef CONFIG_PROVE_LOCKING
77static inline void __raw_spin_lock_flags(raw_spinlock_t *lock, unsigned long flags) 71static inline void __raw_spin_lock_flags(raw_spinlock_t *lock, unsigned long flags)
78{ 72{
79 alternative_smp( 73 asm(__raw_spin_lock_string_flags : "+m" (lock->slock) : "r" (flags) : "memory");
80 __raw_spin_lock_string_flags,
81 __raw_spin_lock_string_up,
82 "+m" (lock->slock) : "r" (flags) : "memory");
83} 74}
84#endif 75#endif
85 76