diff options
Diffstat (limited to 'include/asm-i386/system.h')
| -rw-r--r-- | include/asm-i386/system.h | 40 |
1 files changed, 23 insertions, 17 deletions
diff --git a/include/asm-i386/system.h b/include/asm-i386/system.h index 772f85da1206..36a92ed6a9d0 100644 --- a/include/asm-i386/system.h +++ b/include/asm-i386/system.h | |||
| @@ -54,23 +54,7 @@ __asm__ __volatile__ ("movw %%dx,%1\n\t" \ | |||
| 54 | ); } while(0) | 54 | ); } while(0) |
| 55 | 55 | ||
| 56 | #define set_base(ldt,base) _set_base( ((char *)&(ldt)) , (base) ) | 56 | #define set_base(ldt,base) _set_base( ((char *)&(ldt)) , (base) ) |
| 57 | #define set_limit(ldt,limit) _set_limit( ((char *)&(ldt)) , ((limit)-1)>>12 ) | 57 | #define set_limit(ldt,limit) _set_limit( ((char *)&(ldt)) , ((limit)-1) ) |
| 58 | |||
| 59 | static inline unsigned long _get_base(char * addr) | ||
| 60 | { | ||
| 61 | unsigned long __base; | ||
| 62 | __asm__("movb %3,%%dh\n\t" | ||
| 63 | "movb %2,%%dl\n\t" | ||
| 64 | "shll $16,%%edx\n\t" | ||
| 65 | "movw %1,%%dx" | ||
| 66 | :"=&d" (__base) | ||
| 67 | :"m" (*((addr)+2)), | ||
| 68 | "m" (*((addr)+4)), | ||
| 69 | "m" (*((addr)+7))); | ||
| 70 | return __base; | ||
| 71 | } | ||
| 72 | |||
| 73 | #define get_base(ldt) _get_base( ((char *)&(ldt)) ) | ||
| 74 | 58 | ||
| 75 | /* | 59 | /* |
| 76 | * Load a segment. Fall back on loading the zero | 60 | * Load a segment. Fall back on loading the zero |
| @@ -140,6 +124,19 @@ static inline unsigned long _get_base(char * addr) | |||
| 140 | :"=r" (__dummy)); \ | 124 | :"=r" (__dummy)); \ |
| 141 | __dummy; \ | 125 | __dummy; \ |
| 142 | }) | 126 | }) |
| 127 | |||
| 128 | #define read_cr4_safe() ({ \ | ||
| 129 | unsigned int __dummy; \ | ||
| 130 | /* This could fault if %cr4 does not exist */ \ | ||
| 131 | __asm__("1: movl %%cr4, %0 \n" \ | ||
| 132 | "2: \n" \ | ||
| 133 | ".section __ex_table,\"a\" \n" \ | ||
| 134 | ".long 1b,2b \n" \ | ||
| 135 | ".previous \n" \ | ||
| 136 | : "=r" (__dummy): "0" (0)); \ | ||
| 137 | __dummy; \ | ||
| 138 | }) | ||
| 139 | |||
| 143 | #define write_cr4(x) \ | 140 | #define write_cr4(x) \ |
| 144 | __asm__ __volatile__("movl %0,%%cr4": :"r" (x)); | 141 | __asm__ __volatile__("movl %0,%%cr4": :"r" (x)); |
| 145 | #define stts() write_cr0(8 | read_cr0()) | 142 | #define stts() write_cr0(8 | read_cr0()) |
| @@ -551,6 +548,15 @@ void enable_hlt(void); | |||
| 551 | extern int es7000_plat; | 548 | extern int es7000_plat; |
| 552 | void cpu_idle_wait(void); | 549 | void cpu_idle_wait(void); |
| 553 | 550 | ||
| 551 | /* | ||
| 552 | * On SMP systems, when the scheduler does migration-cost autodetection, | ||
| 553 | * it needs a way to flush as much of the CPU's caches as possible: | ||
| 554 | */ | ||
| 555 | static inline void sched_cacheflush(void) | ||
| 556 | { | ||
| 557 | wbinvd(); | ||
| 558 | } | ||
| 559 | |||
| 554 | extern unsigned long arch_align_stack(unsigned long sp); | 560 | extern unsigned long arch_align_stack(unsigned long sp); |
| 555 | 561 | ||
| 556 | #endif | 562 | #endif |
