diff options
Diffstat (limited to 'arch/mips/include/asm/system.h')
-rw-r--r-- | arch/mips/include/asm/system.h | 18 |
1 files changed, 14 insertions, 4 deletions
diff --git a/arch/mips/include/asm/system.h b/arch/mips/include/asm/system.h index cd30f83235bb..fcf5f98d90cc 100644 --- a/arch/mips/include/asm/system.h +++ b/arch/mips/include/asm/system.h | |||
@@ -32,6 +32,9 @@ extern asmlinkage void *resume(void *last, void *next, void *next_ti); | |||
32 | 32 | ||
33 | struct task_struct; | 33 | struct task_struct; |
34 | 34 | ||
35 | extern unsigned int ll_bit; | ||
36 | extern struct task_struct *ll_task; | ||
37 | |||
35 | #ifdef CONFIG_MIPS_MT_FPAFF | 38 | #ifdef CONFIG_MIPS_MT_FPAFF |
36 | 39 | ||
37 | /* | 40 | /* |
@@ -63,11 +66,18 @@ do { \ | |||
63 | #define __mips_mt_fpaff_switch_to(prev) do { (void) (prev); } while (0) | 66 | #define __mips_mt_fpaff_switch_to(prev) do { (void) (prev); } while (0) |
64 | #endif | 67 | #endif |
65 | 68 | ||
69 | #define __clear_software_ll_bit() \ | ||
70 | do { \ | ||
71 | if (!__builtin_constant_p(cpu_has_llsc) || !cpu_has_llsc) \ | ||
72 | ll_bit = 0; \ | ||
73 | } while (0) | ||
74 | |||
66 | #define switch_to(prev, next, last) \ | 75 | #define switch_to(prev, next, last) \ |
67 | do { \ | 76 | do { \ |
68 | __mips_mt_fpaff_switch_to(prev); \ | 77 | __mips_mt_fpaff_switch_to(prev); \ |
69 | if (cpu_has_dsp) \ | 78 | if (cpu_has_dsp) \ |
70 | __save_dsp(prev); \ | 79 | __save_dsp(prev); \ |
80 | __clear_software_ll_bit(); \ | ||
71 | (last) = resume(prev, next, task_thread_info(next)); \ | 81 | (last) = resume(prev, next, task_thread_info(next)); \ |
72 | } while (0) | 82 | } while (0) |
73 | 83 | ||
@@ -84,7 +94,7 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val) | |||
84 | { | 94 | { |
85 | __u32 retval; | 95 | __u32 retval; |
86 | 96 | ||
87 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 97 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
88 | unsigned long dummy; | 98 | unsigned long dummy; |
89 | 99 | ||
90 | __asm__ __volatile__( | 100 | __asm__ __volatile__( |
@@ -99,7 +109,7 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val) | |||
99 | : "=&r" (retval), "=m" (*m), "=&r" (dummy) | 109 | : "=&r" (retval), "=m" (*m), "=&r" (dummy) |
100 | : "R" (*m), "Jr" (val) | 110 | : "R" (*m), "Jr" (val) |
101 | : "memory"); | 111 | : "memory"); |
102 | } else if (cpu_has_llsc) { | 112 | } else if (kernel_uses_llsc) { |
103 | unsigned long dummy; | 113 | unsigned long dummy; |
104 | 114 | ||
105 | __asm__ __volatile__( | 115 | __asm__ __volatile__( |
@@ -136,7 +146,7 @@ static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val) | |||
136 | { | 146 | { |
137 | __u64 retval; | 147 | __u64 retval; |
138 | 148 | ||
139 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 149 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
140 | unsigned long dummy; | 150 | unsigned long dummy; |
141 | 151 | ||
142 | __asm__ __volatile__( | 152 | __asm__ __volatile__( |
@@ -149,7 +159,7 @@ static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val) | |||
149 | : "=&r" (retval), "=m" (*m), "=&r" (dummy) | 159 | : "=&r" (retval), "=m" (*m), "=&r" (dummy) |
150 | : "R" (*m), "Jr" (val) | 160 | : "R" (*m), "Jr" (val) |
151 | : "memory"); | 161 | : "memory"); |
152 | } else if (cpu_has_llsc) { | 162 | } else if (kernel_uses_llsc) { |
153 | unsigned long dummy; | 163 | unsigned long dummy; |
154 | 164 | ||
155 | __asm__ __volatile__( | 165 | __asm__ __volatile__( |