aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-x86_64/system.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-x86_64/system.h')
-rw-r--r--include/asm-x86_64/system.h61
1 files changed, 48 insertions, 13 deletions
diff --git a/include/asm-x86_64/system.h b/include/asm-x86_64/system.h
index b34cc2ee222b..a73f0c789d8b 100644
--- a/include/asm-x86_64/system.h
+++ b/include/asm-x86_64/system.h
@@ -20,8 +20,8 @@
20#define __RESTORE(reg,offset) "movq (14-" #offset ")*8(%%rsp),%%" #reg "\n\t" 20#define __RESTORE(reg,offset) "movq (14-" #offset ")*8(%%rsp),%%" #reg "\n\t"
21 21
22/* frame pointer must be last for get_wchan */ 22/* frame pointer must be last for get_wchan */
23#define SAVE_CONTEXT "pushfq ; pushq %%rbp ; movq %%rsi,%%rbp\n\t" 23#define SAVE_CONTEXT "pushq %%rbp ; movq %%rsi,%%rbp\n\t"
24#define RESTORE_CONTEXT "movq %%rbp,%%rsi ; popq %%rbp ; popfq\n\t" 24#define RESTORE_CONTEXT "movq %%rbp,%%rsi ; popq %%rbp\n\t"
25 25
26#define __EXTRA_CLOBBER \ 26#define __EXTRA_CLOBBER \
27 ,"rcx","rbx","rdx","r8","r9","r10","r11","r12","r13","r14","r15" 27 ,"rcx","rbx","rdx","r8","r9","r10","r11","r12","r13","r14","r15"
@@ -137,6 +137,21 @@ struct alt_instr {
137 "663:\n\t" newinstr "\n664:\n" /* replacement */ \ 137 "663:\n\t" newinstr "\n664:\n" /* replacement */ \
138 ".previous" :: "i" (feature), ##input) 138 ".previous" :: "i" (feature), ##input)
139 139
140/* Like alternative_input, but with a single output argument */
141#define alternative_io(oldinstr, newinstr, feature, output, input...) \
142 asm volatile ("661:\n\t" oldinstr "\n662:\n" \
143 ".section .altinstructions,\"a\"\n" \
144 " .align 8\n" \
145 " .quad 661b\n" /* label */ \
146 " .quad 663f\n" /* new instruction */ \
147 " .byte %c[feat]\n" /* feature bit */ \
148 " .byte 662b-661b\n" /* sourcelen */ \
149 " .byte 664f-663f\n" /* replacementlen */ \
150 ".previous\n" \
151 ".section .altinstr_replacement,\"ax\"\n" \
152 "663:\n\t" newinstr "\n664:\n" /* replacement */ \
153 ".previous" : output : [feat] "i" (feature), ##input)
154
140/* 155/*
141 * Clear and set 'TS' bit respectively 156 * Clear and set 'TS' bit respectively
142 */ 157 */
@@ -178,6 +193,15 @@ static inline void write_cr4(unsigned long val)
178#define wbinvd() \ 193#define wbinvd() \
179 __asm__ __volatile__ ("wbinvd": : :"memory"); 194 __asm__ __volatile__ ("wbinvd": : :"memory");
180 195
196/*
197 * On SMP systems, when the scheduler does migration-cost autodetection,
198 * it needs a way to flush as much of the CPU's caches as possible.
199 */
200static inline void sched_cacheflush(void)
201{
202 wbinvd();
203}
204
181#endif /* __KERNEL__ */ 205#endif /* __KERNEL__ */
182 206
183#define nop() __asm__ __volatile__ ("nop") 207#define nop() __asm__ __volatile__ ("nop")
@@ -311,12 +335,24 @@ static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
311/* interrupt control.. */ 335/* interrupt control.. */
312#define local_save_flags(x) do { warn_if_not_ulong(x); __asm__ __volatile__("# save_flags \n\t pushfq ; popq %q0":"=g" (x): /* no input */ :"memory"); } while (0) 336#define local_save_flags(x) do { warn_if_not_ulong(x); __asm__ __volatile__("# save_flags \n\t pushfq ; popq %q0":"=g" (x): /* no input */ :"memory"); } while (0)
313#define local_irq_restore(x) __asm__ __volatile__("# restore_flags \n\t pushq %0 ; popfq": /* no output */ :"g" (x):"memory", "cc") 337#define local_irq_restore(x) __asm__ __volatile__("# restore_flags \n\t pushq %0 ; popfq": /* no output */ :"g" (x):"memory", "cc")
338
339#ifdef CONFIG_X86_VSMP
340/* Interrupt control for VSMP architecture */
341#define local_irq_disable() do { unsigned long flags; local_save_flags(flags); local_irq_restore((flags & ~(1 << 9)) | (1 << 18)); } while (0)
342#define local_irq_enable() do { unsigned long flags; local_save_flags(flags); local_irq_restore((flags | (1 << 9)) & ~(1 << 18)); } while (0)
343
344#define irqs_disabled() \
345({ \
346 unsigned long flags; \
347 local_save_flags(flags); \
348 (flags & (1<<18)) || !(flags & (1<<9)); \
349})
350
351/* For spinlocks etc */
352#define local_irq_save(x) do { local_save_flags(x); local_irq_restore((x & ~(1 << 9)) | (1 << 18)); } while (0)
353#else /* CONFIG_X86_VSMP */
314#define local_irq_disable() __asm__ __volatile__("cli": : :"memory") 354#define local_irq_disable() __asm__ __volatile__("cli": : :"memory")
315#define local_irq_enable() __asm__ __volatile__("sti": : :"memory") 355#define local_irq_enable() __asm__ __volatile__("sti": : :"memory")
316/* used in the idle loop; sti takes one instruction cycle to complete */
317#define safe_halt() __asm__ __volatile__("sti; hlt": : :"memory")
318/* used when interrupts are already enabled or to shutdown the processor */
319#define halt() __asm__ __volatile__("hlt": : :"memory")
320 356
321#define irqs_disabled() \ 357#define irqs_disabled() \
322({ \ 358({ \
@@ -327,15 +363,14 @@ static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
327 363
328/* For spinlocks etc */ 364/* For spinlocks etc */
329#define local_irq_save(x) do { warn_if_not_ulong(x); __asm__ __volatile__("# local_irq_save \n\t pushfq ; popq %0 ; cli":"=g" (x): /* no input */ :"memory"); } while (0) 365#define local_irq_save(x) do { warn_if_not_ulong(x); __asm__ __volatile__("# local_irq_save \n\t pushfq ; popq %0 ; cli":"=g" (x): /* no input */ :"memory"); } while (0)
366#endif
330 367
331void cpu_idle_wait(void); 368/* used in the idle loop; sti takes one instruction cycle to complete */
369#define safe_halt() __asm__ __volatile__("sti; hlt": : :"memory")
370/* used when interrupts are already enabled or to shutdown the processor */
371#define halt() __asm__ __volatile__("hlt": : :"memory")
332 372
333/* 373void cpu_idle_wait(void);
334 * disable hlt during certain critical i/o operations
335 */
336#define HAVE_DISABLE_HLT
337void disable_hlt(void);
338void enable_hlt(void);
339 374
340extern unsigned long arch_align_stack(unsigned long sp); 375extern unsigned long arch_align_stack(unsigned long sp);
341 376