aboutsummaryrefslogtreecommitdiffstats
path: root/arch/x86/include/asm/system.h
diff options
context:
space:
mode:
authorTejun Heo <tj@kernel.org>2010-01-04 19:17:33 -0500
committerTejun Heo <tj@kernel.org>2010-01-04 19:17:33 -0500
commit32032df6c2f6c9c6b2ada2ce42322231824f70c2 (patch)
treeb1ce838a37044bb38dfc128e2116ca35630e629a /arch/x86/include/asm/system.h
parent22b737f4c75197372d64afc6ed1bccd58c00e549 (diff)
parentc5974b835a909ff15c3b7e6cf6789b5eb919f419 (diff)
Merge branch 'master' into percpu
Conflicts: arch/powerpc/platforms/pseries/hvCall.S include/linux/percpu.h
Diffstat (limited to 'arch/x86/include/asm/system.h')
-rw-r--r--arch/x86/include/asm/system.h32
1 files changed, 17 insertions, 15 deletions
diff --git a/arch/x86/include/asm/system.h b/arch/x86/include/asm/system.h
index de10c19d9558..e529f26c3292 100644
--- a/arch/x86/include/asm/system.h
+++ b/arch/x86/include/asm/system.h
@@ -23,6 +23,7 @@ struct task_struct *__switch_to(struct task_struct *prev,
23struct tss_struct; 23struct tss_struct;
24void __switch_to_xtra(struct task_struct *prev_p, struct task_struct *next_p, 24void __switch_to_xtra(struct task_struct *prev_p, struct task_struct *next_p,
25 struct tss_struct *tss); 25 struct tss_struct *tss);
26extern void show_regs_common(void);
26 27
27#ifdef CONFIG_X86_32 28#ifdef CONFIG_X86_32
28 29
@@ -128,8 +129,6 @@ do { \
128 "movq %%rsp,%P[threadrsp](%[prev])\n\t" /* save RSP */ \ 129 "movq %%rsp,%P[threadrsp](%[prev])\n\t" /* save RSP */ \
129 "movq %P[threadrsp](%[next]),%%rsp\n\t" /* restore RSP */ \ 130 "movq %P[threadrsp](%[next]),%%rsp\n\t" /* restore RSP */ \
130 "call __switch_to\n\t" \ 131 "call __switch_to\n\t" \
131 ".globl thread_return\n" \
132 "thread_return:\n\t" \
133 "movq "__percpu_arg([current_task])",%%rsi\n\t" \ 132 "movq "__percpu_arg([current_task])",%%rsi\n\t" \
134 __switch_canary \ 133 __switch_canary \
135 "movq %P[thread_info](%%rsi),%%r8\n\t" \ 134 "movq %P[thread_info](%%rsi),%%r8\n\t" \
@@ -157,19 +156,22 @@ extern void native_load_gs_index(unsigned);
157 * Load a segment. Fall back on loading the zero 156 * Load a segment. Fall back on loading the zero
158 * segment if something goes wrong.. 157 * segment if something goes wrong..
159 */ 158 */
160#define loadsegment(seg, value) \ 159#define loadsegment(seg, value) \
161 asm volatile("\n" \ 160do { \
162 "1:\t" \ 161 unsigned short __val = (value); \
163 "movl %k0,%%" #seg "\n" \ 162 \
164 "2:\n" \ 163 asm volatile(" \n" \
165 ".section .fixup,\"ax\"\n" \ 164 "1: movl %k0,%%" #seg " \n" \
166 "3:\t" \ 165 \
167 "movl %k1, %%" #seg "\n\t" \ 166 ".section .fixup,\"ax\" \n" \
168 "jmp 2b\n" \ 167 "2: xorl %k0,%k0 \n" \
169 ".previous\n" \ 168 " jmp 1b \n" \
170 _ASM_EXTABLE(1b,3b) \ 169 ".previous \n" \
171 : :"r" (value), "r" (0) : "memory") 170 \
172 171 _ASM_EXTABLE(1b, 2b) \
172 \
173 : "+r" (__val) : : "memory"); \
174} while (0)
173 175
174/* 176/*
175 * Save a segment register away 177 * Save a segment register away