aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-i386/rwlock.h
diff options
context:
space:
mode:
authorGerd Hoffmann <kraxel@suse.de>2006-03-23 05:59:32 -0500
committerLinus Torvalds <torvalds@g5.osdl.org>2006-03-23 10:38:04 -0500
commit9a0b5817ad97bb718ab85322759d19a238712b47 (patch)
tree39bd21eb69c4001b99096d96a76a2e5d37904108 /include/asm-i386/rwlock.h
parent4d7d8c82c181711d28c8336108330a9121f5ef07 (diff)
[PATCH] x86: SMP alternatives
Implement SMP alternatives, i.e. switching at runtime between different code versions for UP and SMP. The code can patch both SMP->UP and UP->SMP. The UP->SMP case is useful for CPU hotplug. With CONFIG_CPU_HOTPLUG enabled the code switches to UP at boot time and when the number of CPUs goes down to 1, and switches to SMP when the number of CPUs goes up to 2. Without CONFIG_CPU_HOTPLUG or on non-SMP-capable systems the code is patched once at boot time (if needed) and the tables are released afterwards. The changes in detail: * The current alternatives bits are moved to a separate file, the SMP alternatives code is added there. * The patch adds some new elf sections to the kernel: .smp_altinstructions like .altinstructions, also contains a list of alt_instr structs. .smp_altinstr_replacement like .altinstr_replacement, but also has some space to save original instruction before replaving it. .smp_locks list of pointers to lock prefixes which can be nop'ed out on UP. The first two are used to replace more complex instruction sequences such as spinlocks and semaphores. It would be possible to deal with the lock prefixes with that as well, but by handling them as special case the table sizes become much smaller. * The sections are page-aligned and padded up to page size, so they can be free if they are not needed. * Splitted the code to release init pages to a separate function and use it to release the elf sections if they are unused. Signed-off-by: Gerd Hoffmann <kraxel@suse.de> Signed-off-by: Chuck Ebbert <76306.1226@compuserve.com> Signed-off-by: Andrew Morton <akpm@osdl.org> Signed-off-by: Linus Torvalds <torvalds@osdl.org>
Diffstat (limited to 'include/asm-i386/rwlock.h')
-rw-r--r--include/asm-i386/rwlock.h56
1 files changed, 30 insertions, 26 deletions
diff --git a/include/asm-i386/rwlock.h b/include/asm-i386/rwlock.h
index b57cc7afdf7e..94f00195d543 100644
--- a/include/asm-i386/rwlock.h
+++ b/include/asm-i386/rwlock.h
@@ -21,21 +21,23 @@
21#define RW_LOCK_BIAS_STR "0x01000000" 21#define RW_LOCK_BIAS_STR "0x01000000"
22 22
23#define __build_read_lock_ptr(rw, helper) \ 23#define __build_read_lock_ptr(rw, helper) \
24 asm volatile(LOCK "subl $1,(%0)\n\t" \ 24 alternative_smp("lock; subl $1,(%0)\n\t" \
25 "jns 1f\n" \ 25 "jns 1f\n" \
26 "call " helper "\n\t" \ 26 "call " helper "\n\t" \
27 "1:\n" \ 27 "1:\n", \
28 ::"a" (rw) : "memory") 28 "subl $1,(%0)\n\t", \
29 :"a" (rw) : "memory")
29 30
30#define __build_read_lock_const(rw, helper) \ 31#define __build_read_lock_const(rw, helper) \
31 asm volatile(LOCK "subl $1,%0\n\t" \ 32 alternative_smp("lock; subl $1,%0\n\t" \
32 "jns 1f\n" \ 33 "jns 1f\n" \
33 "pushl %%eax\n\t" \ 34 "pushl %%eax\n\t" \
34 "leal %0,%%eax\n\t" \ 35 "leal %0,%%eax\n\t" \
35 "call " helper "\n\t" \ 36 "call " helper "\n\t" \
36 "popl %%eax\n\t" \ 37 "popl %%eax\n\t" \
37 "1:\n" \ 38 "1:\n", \
38 :"=m" (*(volatile int *)rw) : : "memory") 39 "subl $1,%0\n\t", \
40 "=m" (*(volatile int *)rw) : : "memory")
39 41
40#define __build_read_lock(rw, helper) do { \ 42#define __build_read_lock(rw, helper) do { \
41 if (__builtin_constant_p(rw)) \ 43 if (__builtin_constant_p(rw)) \
@@ -45,21 +47,23 @@
45 } while (0) 47 } while (0)
46 48
47#define __build_write_lock_ptr(rw, helper) \ 49#define __build_write_lock_ptr(rw, helper) \
48 asm volatile(LOCK "subl $" RW_LOCK_BIAS_STR ",(%0)\n\t" \ 50 alternative_smp("lock; subl $" RW_LOCK_BIAS_STR ",(%0)\n\t" \
49 "jz 1f\n" \ 51 "jz 1f\n" \
50 "call " helper "\n\t" \ 52 "call " helper "\n\t" \
51 "1:\n" \ 53 "1:\n", \
52 ::"a" (rw) : "memory") 54 "subl $" RW_LOCK_BIAS_STR ",(%0)\n\t", \
55 :"a" (rw) : "memory")
53 56
54#define __build_write_lock_const(rw, helper) \ 57#define __build_write_lock_const(rw, helper) \
55 asm volatile(LOCK "subl $" RW_LOCK_BIAS_STR ",%0\n\t" \ 58 alternative_smp("lock; subl $" RW_LOCK_BIAS_STR ",%0\n\t" \
56 "jz 1f\n" \ 59 "jz 1f\n" \
57 "pushl %%eax\n\t" \ 60 "pushl %%eax\n\t" \
58 "leal %0,%%eax\n\t" \ 61 "leal %0,%%eax\n\t" \
59 "call " helper "\n\t" \ 62 "call " helper "\n\t" \
60 "popl %%eax\n\t" \ 63 "popl %%eax\n\t" \
61 "1:\n" \ 64 "1:\n", \
62 :"=m" (*(volatile int *)rw) : : "memory") 65 "subl $" RW_LOCK_BIAS_STR ",%0\n\t", \
66 "=m" (*(volatile int *)rw) : : "memory")
63 67
64#define __build_write_lock(rw, helper) do { \ 68#define __build_write_lock(rw, helper) do { \
65 if (__builtin_constant_p(rw)) \ 69 if (__builtin_constant_p(rw)) \