aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-x86_64
diff options
context:
space:
mode:
authorAndi Kleen <ak@suse.de>2006-08-30 13:37:13 -0400
committerLinus Torvalds <torvalds@g5.osdl.org>2006-08-30 19:05:15 -0400
commit841be8ddf92578e5b481ed9f9abb85649fc13238 (patch)
tree55d2d25350eb15b6822d91b38a7cddd9c697b58f /include/asm-x86_64
parentceee88223047749ad683d397b19904c3dfb6adeb (diff)
[PATCH] x86_64: Remove alternative_smp
The .fill causes miscompilations with some binutils version. Instead just patch the lock prefix in the lock constructs. That is the majority of the cost and should be good enough. Signed-off-by: Andi Kleen <ak@suse.de> Signed-off-by: Linus Torvalds <torvalds@osdl.org>
Diffstat (limited to 'include/asm-x86_64')
-rw-r--r--include/asm-x86_64/alternative.h20
-rw-r--r--include/asm-x86_64/spinlock.h11
2 files changed, 4 insertions, 27 deletions
diff --git a/include/asm-x86_64/alternative.h b/include/asm-x86_64/alternative.h
index 709ad3f0d354..a584826cc570 100644
--- a/include/asm-x86_64/alternative.h
+++ b/include/asm-x86_64/alternative.h
@@ -103,9 +103,6 @@ static inline void alternatives_smp_switch(int smp) {}
103/* 103/*
104 * Alternative inline assembly for SMP. 104 * Alternative inline assembly for SMP.
105 * 105 *
106 * alternative_smp() takes two versions (SMP first, UP second) and is
107 * for more complex stuff such as spinlocks.
108 *
109 * The LOCK_PREFIX macro defined here replaces the LOCK and 106 * The LOCK_PREFIX macro defined here replaces the LOCK and
110 * LOCK_PREFIX macros used everywhere in the source tree. 107 * LOCK_PREFIX macros used everywhere in the source tree.
111 * 108 *
@@ -125,21 +122,6 @@ static inline void alternatives_smp_switch(int smp) {}
125 */ 122 */
126 123
127#ifdef CONFIG_SMP 124#ifdef CONFIG_SMP
128#define alternative_smp(smpinstr, upinstr, args...) \
129 asm volatile ("661:\n\t" smpinstr "\n662:\n" \
130 ".section .smp_altinstructions,\"a\"\n" \
131 " .align 8\n" \
132 " .quad 661b\n" /* label */ \
133 " .quad 663f\n" /* new instruction */ \
134 " .byte " __stringify(X86_FEATURE_UP) "\n" \
135 " .byte 662b-661b\n" /* sourcelen */ \
136 " .byte 664f-663f\n" /* replacementlen */ \
137 ".previous\n" \
138 ".section .smp_altinstr_replacement,\"awx\"\n" \
139 "663:\n\t" upinstr "\n" /* replacement */ \
140 "664:\n\t.fill 662b-661b,1,0x42\n" /* space for original */ \
141 ".previous" : args)
142
143#define LOCK_PREFIX \ 125#define LOCK_PREFIX \
144 ".section .smp_locks,\"a\"\n" \ 126 ".section .smp_locks,\"a\"\n" \
145 " .align 8\n" \ 127 " .align 8\n" \
@@ -148,8 +130,6 @@ static inline void alternatives_smp_switch(int smp) {}
148 "661:\n\tlock; " 130 "661:\n\tlock; "
149 131
150#else /* ! CONFIG_SMP */ 132#else /* ! CONFIG_SMP */
151#define alternative_smp(smpinstr, upinstr, args...) \
152 asm volatile (upinstr : args)
153#define LOCK_PREFIX "" 133#define LOCK_PREFIX ""
154#endif 134#endif
155 135
diff --git a/include/asm-x86_64/spinlock.h b/include/asm-x86_64/spinlock.h
index 8d3421996f94..248a79f0eaff 100644
--- a/include/asm-x86_64/spinlock.h
+++ b/include/asm-x86_64/spinlock.h
@@ -21,7 +21,7 @@
21 21
22#define __raw_spin_lock_string \ 22#define __raw_spin_lock_string \
23 "\n1:\t" \ 23 "\n1:\t" \
24 "lock ; decl %0\n\t" \ 24 LOCK_PREFIX " ; decl %0\n\t" \
25 "js 2f\n" \ 25 "js 2f\n" \
26 LOCK_SECTION_START("") \ 26 LOCK_SECTION_START("") \
27 "2:\t" \ 27 "2:\t" \
@@ -40,10 +40,7 @@
40 40
41static inline void __raw_spin_lock(raw_spinlock_t *lock) 41static inline void __raw_spin_lock(raw_spinlock_t *lock)
42{ 42{
43 alternative_smp( 43 asm volatile(__raw_spin_lock_string : "=m" (lock->slock) : : "memory");
44 __raw_spin_lock_string,
45 __raw_spin_lock_string_up,
46 "=m" (lock->slock) : : "memory");
47} 44}
48 45
49#define __raw_spin_lock_flags(lock, flags) __raw_spin_lock(lock) 46#define __raw_spin_lock_flags(lock, flags) __raw_spin_lock(lock)
@@ -125,12 +122,12 @@ static inline int __raw_write_trylock(raw_rwlock_t *lock)
125 122
126static inline void __raw_read_unlock(raw_rwlock_t *rw) 123static inline void __raw_read_unlock(raw_rwlock_t *rw)
127{ 124{
128 asm volatile("lock ; incl %0" :"=m" (rw->lock) : : "memory"); 125 asm volatile(LOCK_PREFIX " ; incl %0" :"=m" (rw->lock) : : "memory");
129} 126}
130 127
131static inline void __raw_write_unlock(raw_rwlock_t *rw) 128static inline void __raw_write_unlock(raw_rwlock_t *rw)
132{ 129{
133 asm volatile("lock ; addl $" RW_LOCK_BIAS_STR ",%0" 130 asm volatile(LOCK_PREFIX " ; addl $" RW_LOCK_BIAS_STR ",%0"
134 : "=m" (rw->lock) : : "memory"); 131 : "=m" (rw->lock) : : "memory");
135} 132}
136 133