aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-x86_64/rwlock.h
diff options
context:
space:
mode:
authorGerd Hoffmann <kraxel@suse.de>2006-06-26 07:56:16 -0400
committerLinus Torvalds <torvalds@g5.osdl.org>2006-06-26 13:48:14 -0400
commitd167a51877e94dda73dd656c51f363502309f713 (patch)
treeeb02c2974b61777f575dfdc07d4c2adf83bde434 /include/asm-x86_64/rwlock.h
parent240cd6a80642da528bfa382ec2ae4e3cb8991ea7 (diff)
[PATCH] x86_64: x86_64 version of the smp alternative patch.
Changes are largely identical to the i386 version: * alternative #define are moved to the new alternative.h file. * one new elf section with pointers to the lock prefixes which can be nop'ed out for non-smp. * two new elf sections simliar to the "classic" alternatives to replace SMP code with simpler UP code. * fixup headers to use alternative.h instead of defining their own LOCK / LOCK_PREFIX macros. The patch reuses the i386 version of the alternatives code to avoid code duplication. The code in alternatives.c was shuffled around a bit to reduce the number of #ifdefs needed. It also got some tweaks needed for x86_64 (vsyscall page handling) and new features (noreplacement option which was x86_64 only up to now). Debug printk's are changed from compile-time to runtime. Loosely based on a early version from Bastian Blank <waldi@debian.org> Signed-off-by: Gerd Hoffmann <kraxel@suse.de> Signed-off-by: Andi Kleen <ak@suse.de> Signed-off-by: Linus Torvalds <torvalds@osdl.org>
Diffstat (limited to 'include/asm-x86_64/rwlock.h')
-rw-r--r--include/asm-x86_64/rwlock.h8
1 files changed, 4 insertions, 4 deletions
diff --git a/include/asm-x86_64/rwlock.h b/include/asm-x86_64/rwlock.h
index 9942cc393064..dea0e9459264 100644
--- a/include/asm-x86_64/rwlock.h
+++ b/include/asm-x86_64/rwlock.h
@@ -24,7 +24,7 @@
24#define RW_LOCK_BIAS_STR "0x01000000" 24#define RW_LOCK_BIAS_STR "0x01000000"
25 25
26#define __build_read_lock_ptr(rw, helper) \ 26#define __build_read_lock_ptr(rw, helper) \
27 asm volatile(LOCK "subl $1,(%0)\n\t" \ 27 asm volatile(LOCK_PREFIX "subl $1,(%0)\n\t" \
28 "js 2f\n" \ 28 "js 2f\n" \
29 "1:\n" \ 29 "1:\n" \
30 LOCK_SECTION_START("") \ 30 LOCK_SECTION_START("") \
@@ -34,7 +34,7 @@
34 ::"a" (rw) : "memory") 34 ::"a" (rw) : "memory")
35 35
36#define __build_read_lock_const(rw, helper) \ 36#define __build_read_lock_const(rw, helper) \
37 asm volatile(LOCK "subl $1,%0\n\t" \ 37 asm volatile(LOCK_PREFIX "subl $1,%0\n\t" \
38 "js 2f\n" \ 38 "js 2f\n" \
39 "1:\n" \ 39 "1:\n" \
40 LOCK_SECTION_START("") \ 40 LOCK_SECTION_START("") \
@@ -54,7 +54,7 @@
54 } while (0) 54 } while (0)
55 55
56#define __build_write_lock_ptr(rw, helper) \ 56#define __build_write_lock_ptr(rw, helper) \
57 asm volatile(LOCK "subl $" RW_LOCK_BIAS_STR ",(%0)\n\t" \ 57 asm volatile(LOCK_PREFIX "subl $" RW_LOCK_BIAS_STR ",(%0)\n\t" \
58 "jnz 2f\n" \ 58 "jnz 2f\n" \
59 "1:\n" \ 59 "1:\n" \
60 LOCK_SECTION_START("") \ 60 LOCK_SECTION_START("") \
@@ -64,7 +64,7 @@
64 ::"a" (rw) : "memory") 64 ::"a" (rw) : "memory")
65 65
66#define __build_write_lock_const(rw, helper) \ 66#define __build_write_lock_const(rw, helper) \
67 asm volatile(LOCK "subl $" RW_LOCK_BIAS_STR ",%0\n\t" \ 67 asm volatile(LOCK_PREFIX "subl $" RW_LOCK_BIAS_STR ",%0\n\t" \
68 "jnz 2f\n" \ 68 "jnz 2f\n" \
69 "1:\n" \ 69 "1:\n" \
70 LOCK_SECTION_START("") \ 70 LOCK_SECTION_START("") \