aboutsummaryrefslogtreecommitdiffstats
path: root/include
diff options
context:
space:
mode:
Diffstat (limited to 'include')
-rw-r--r--include/asm-x86/mmu_context_32.h12
1 files changed, 6 insertions, 6 deletions
diff --git a/include/asm-x86/mmu_context_32.h b/include/asm-x86/mmu_context_32.h
index 8198d1cca1f3..9756ae0f1dd3 100644
--- a/include/asm-x86/mmu_context_32.h
+++ b/include/asm-x86/mmu_context_32.h
@@ -62,7 +62,7 @@ static inline void switch_mm(struct mm_struct *prev,
62 BUG_ON(per_cpu(cpu_tlbstate, cpu).active_mm != next); 62 BUG_ON(per_cpu(cpu_tlbstate, cpu).active_mm != next);
63 63
64 if (!cpu_test_and_set(cpu, next->cpu_vm_mask)) { 64 if (!cpu_test_and_set(cpu, next->cpu_vm_mask)) {
65 /* We were in lazy tlb mode and leave_mm disabled 65 /* We were in lazy tlb mode and leave_mm disabled
66 * tlb flush IPI delivery. We must reload %cr3. 66 * tlb flush IPI delivery. We must reload %cr3.
67 */ 67 */
68 load_cr3(next->pgd); 68 load_cr3(next->pgd);
@@ -75,10 +75,10 @@ static inline void switch_mm(struct mm_struct *prev,
75#define deactivate_mm(tsk, mm) \ 75#define deactivate_mm(tsk, mm) \
76 asm("movl %0,%%gs": :"r" (0)); 76 asm("movl %0,%%gs": :"r" (0));
77 77
78#define activate_mm(prev, next) \ 78#define activate_mm(prev, next) \
79 do { \ 79do { \
80 paravirt_activate_mm(prev, next); \ 80 paravirt_activate_mm((prev), (next)); \
81 switch_mm((prev),(next),NULL); \ 81 switch_mm((prev), (next), NULL); \
82 } while(0); 82} while (0);
83 83
84#endif 84#endif