diff options
Diffstat (limited to 'arch/arm/include/asm/mmu_context.h')
-rw-r--r-- | arch/arm/include/asm/mmu_context.h | 20 |
1 files changed, 16 insertions, 4 deletions
diff --git a/arch/arm/include/asm/mmu_context.h b/arch/arm/include/asm/mmu_context.h index b5792b7fd8d3..9b32f76bb0dd 100644 --- a/arch/arm/include/asm/mmu_context.h +++ b/arch/arm/include/asm/mmu_context.h | |||
@@ -56,7 +56,7 @@ static inline void check_and_switch_context(struct mm_struct *mm, | |||
56 | * on non-ASID CPUs, the old mm will remain valid until the | 56 | * on non-ASID CPUs, the old mm will remain valid until the |
57 | * finish_arch_post_lock_switch() call. | 57 | * finish_arch_post_lock_switch() call. |
58 | */ | 58 | */ |
59 | set_ti_thread_flag(task_thread_info(tsk), TIF_SWITCH_MM); | 59 | mm->context.switch_pending = 1; |
60 | else | 60 | else |
61 | cpu_switch_mm(mm->pgd, mm); | 61 | cpu_switch_mm(mm->pgd, mm); |
62 | } | 62 | } |
@@ -65,9 +65,21 @@ static inline void check_and_switch_context(struct mm_struct *mm, | |||
65 | finish_arch_post_lock_switch | 65 | finish_arch_post_lock_switch |
66 | static inline void finish_arch_post_lock_switch(void) | 66 | static inline void finish_arch_post_lock_switch(void) |
67 | { | 67 | { |
68 | if (test_and_clear_thread_flag(TIF_SWITCH_MM)) { | 68 | struct mm_struct *mm = current->mm; |
69 | struct mm_struct *mm = current->mm; | 69 | |
70 | cpu_switch_mm(mm->pgd, mm); | 70 | if (mm && mm->context.switch_pending) { |
71 | /* | ||
72 | * Preemption must be disabled during cpu_switch_mm() as we | ||
73 | * have some stateful cache flush implementations. Check | ||
74 | * switch_pending again in case we were preempted and the | ||
75 | * switch to this mm was already done. | ||
76 | */ | ||
77 | preempt_disable(); | ||
78 | if (mm->context.switch_pending) { | ||
79 | mm->context.switch_pending = 0; | ||
80 | cpu_switch_mm(mm->pgd, mm); | ||
81 | } | ||
82 | preempt_enable_no_resched(); | ||
71 | } | 83 | } |
72 | } | 84 | } |
73 | 85 | ||