diff options
author | Ralf Baechle <ralf@linux-mips.org> | 2015-02-19 10:00:34 -0500 |
---|---|---|
committer | Ralf Baechle <ralf@linux-mips.org> | 2015-02-19 10:00:34 -0500 |
commit | 661af35e5fd878f915ed05dbbfe383f64133f98c (patch) | |
tree | 956b7efd662b682224e61060552fdcf4201101bf /arch/mips/include | |
parent | ca5d25642e212f73492d332d95dc90ef46a0e8dc (diff) | |
parent | f296e7c48d3155991b99f41372e1786c5be03457 (diff) |
Merge branch 'mipsr6-for-3.20' of git://git.linux-mips.org/pub/scm/mchandras/linux into mips-for-linux-next
Diffstat (limited to 'arch/mips/include')
29 files changed, 497 insertions, 180 deletions
diff --git a/arch/mips/include/asm/Kbuild b/arch/mips/include/asm/Kbuild index 200efeac4181..526539cbc99f 100644 --- a/arch/mips/include/asm/Kbuild +++ b/arch/mips/include/asm/Kbuild | |||
@@ -1,4 +1,5 @@ | |||
1 | # MIPS headers | 1 | # MIPS headers |
2 | generic-(CONFIG_GENERIC_CSUM) += checksum.h | ||
2 | generic-y += cputime.h | 3 | generic-y += cputime.h |
3 | generic-y += current.h | 4 | generic-y += current.h |
4 | generic-y += dma-contiguous.h | 5 | generic-y += dma-contiguous.h |
diff --git a/arch/mips/include/asm/asmmacro.h b/arch/mips/include/asm/asmmacro.h index 6caf8766b80f..0cae4595e985 100644 --- a/arch/mips/include/asm/asmmacro.h +++ b/arch/mips/include/asm/asmmacro.h | |||
@@ -19,7 +19,7 @@ | |||
19 | #include <asm/asmmacro-64.h> | 19 | #include <asm/asmmacro-64.h> |
20 | #endif | 20 | #endif |
21 | 21 | ||
22 | #ifdef CONFIG_CPU_MIPSR2 | 22 | #if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6) |
23 | .macro local_irq_enable reg=t0 | 23 | .macro local_irq_enable reg=t0 |
24 | ei | 24 | ei |
25 | irq_enable_hazard | 25 | irq_enable_hazard |
@@ -104,7 +104,8 @@ | |||
104 | .endm | 104 | .endm |
105 | 105 | ||
106 | .macro fpu_save_double thread status tmp | 106 | .macro fpu_save_double thread status tmp |
107 | #if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2) | 107 | #if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2) || \ |
108 | defined(CONFIG_CPU_MIPS32_R6) | ||
108 | sll \tmp, \status, 5 | 109 | sll \tmp, \status, 5 |
109 | bgez \tmp, 10f | 110 | bgez \tmp, 10f |
110 | fpu_save_16odd \thread | 111 | fpu_save_16odd \thread |
@@ -160,7 +161,8 @@ | |||
160 | .endm | 161 | .endm |
161 | 162 | ||
162 | .macro fpu_restore_double thread status tmp | 163 | .macro fpu_restore_double thread status tmp |
163 | #if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2) | 164 | #if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2) || \ |
165 | defined(CONFIG_CPU_MIPS32_R6) | ||
164 | sll \tmp, \status, 5 | 166 | sll \tmp, \status, 5 |
165 | bgez \tmp, 10f # 16 register mode? | 167 | bgez \tmp, 10f # 16 register mode? |
166 | 168 | ||
@@ -170,16 +172,16 @@ | |||
170 | fpu_restore_16even \thread \tmp | 172 | fpu_restore_16even \thread \tmp |
171 | .endm | 173 | .endm |
172 | 174 | ||
173 | #ifdef CONFIG_CPU_MIPSR2 | 175 | #if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6) |
174 | .macro _EXT rd, rs, p, s | 176 | .macro _EXT rd, rs, p, s |
175 | ext \rd, \rs, \p, \s | 177 | ext \rd, \rs, \p, \s |
176 | .endm | 178 | .endm |
177 | #else /* !CONFIG_CPU_MIPSR2 */ | 179 | #else /* !CONFIG_CPU_MIPSR2 || !CONFIG_CPU_MIPSR6 */ |
178 | .macro _EXT rd, rs, p, s | 180 | .macro _EXT rd, rs, p, s |
179 | srl \rd, \rs, \p | 181 | srl \rd, \rs, \p |
180 | andi \rd, \rd, (1 << \s) - 1 | 182 | andi \rd, \rd, (1 << \s) - 1 |
181 | .endm | 183 | .endm |
182 | #endif /* !CONFIG_CPU_MIPSR2 */ | 184 | #endif /* !CONFIG_CPU_MIPSR2 || !CONFIG_CPU_MIPSR6 */ |
183 | 185 | ||
184 | /* | 186 | /* |
185 | * Temporary until all gas have MT ASE support | 187 | * Temporary until all gas have MT ASE support |
@@ -304,7 +306,7 @@ | |||
304 | .set push | 306 | .set push |
305 | .set noat | 307 | .set noat |
306 | SET_HARDFLOAT | 308 | SET_HARDFLOAT |
307 | add $1, \base, \off | 309 | addu $1, \base, \off |
308 | .word LDD_MSA_INSN | (\wd << 6) | 310 | .word LDD_MSA_INSN | (\wd << 6) |
309 | .set pop | 311 | .set pop |
310 | .endm | 312 | .endm |
@@ -313,7 +315,7 @@ | |||
313 | .set push | 315 | .set push |
314 | .set noat | 316 | .set noat |
315 | SET_HARDFLOAT | 317 | SET_HARDFLOAT |
316 | add $1, \base, \off | 318 | addu $1, \base, \off |
317 | .word STD_MSA_INSN | (\wd << 6) | 319 | .word STD_MSA_INSN | (\wd << 6) |
318 | .set pop | 320 | .set pop |
319 | .endm | 321 | .endm |
diff --git a/arch/mips/include/asm/atomic.h b/arch/mips/include/asm/atomic.h index 857da84cfc92..26d436336f2e 100644 --- a/arch/mips/include/asm/atomic.h +++ b/arch/mips/include/asm/atomic.h | |||
@@ -54,19 +54,19 @@ static __inline__ void atomic_##op(int i, atomic_t * v) \ | |||
54 | " sc %0, %1 \n" \ | 54 | " sc %0, %1 \n" \ |
55 | " beqzl %0, 1b \n" \ | 55 | " beqzl %0, 1b \n" \ |
56 | " .set mips0 \n" \ | 56 | " .set mips0 \n" \ |
57 | : "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter) \ | 57 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \ |
58 | : "Ir" (i)); \ | 58 | : "Ir" (i)); \ |
59 | } else if (kernel_uses_llsc) { \ | 59 | } else if (kernel_uses_llsc) { \ |
60 | int temp; \ | 60 | int temp; \ |
61 | \ | 61 | \ |
62 | do { \ | 62 | do { \ |
63 | __asm__ __volatile__( \ | 63 | __asm__ __volatile__( \ |
64 | " .set arch=r4000 \n" \ | 64 | " .set "MIPS_ISA_LEVEL" \n" \ |
65 | " ll %0, %1 # atomic_" #op "\n" \ | 65 | " ll %0, %1 # atomic_" #op "\n" \ |
66 | " " #asm_op " %0, %2 \n" \ | 66 | " " #asm_op " %0, %2 \n" \ |
67 | " sc %0, %1 \n" \ | 67 | " sc %0, %1 \n" \ |
68 | " .set mips0 \n" \ | 68 | " .set mips0 \n" \ |
69 | : "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter) \ | 69 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \ |
70 | : "Ir" (i)); \ | 70 | : "Ir" (i)); \ |
71 | } while (unlikely(!temp)); \ | 71 | } while (unlikely(!temp)); \ |
72 | } else { \ | 72 | } else { \ |
@@ -97,20 +97,20 @@ static __inline__ int atomic_##op##_return(int i, atomic_t * v) \ | |||
97 | " " #asm_op " %0, %1, %3 \n" \ | 97 | " " #asm_op " %0, %1, %3 \n" \ |
98 | " .set mips0 \n" \ | 98 | " .set mips0 \n" \ |
99 | : "=&r" (result), "=&r" (temp), \ | 99 | : "=&r" (result), "=&r" (temp), \ |
100 | "+" GCC_OFF12_ASM() (v->counter) \ | 100 | "+" GCC_OFF_SMALL_ASM() (v->counter) \ |
101 | : "Ir" (i)); \ | 101 | : "Ir" (i)); \ |
102 | } else if (kernel_uses_llsc) { \ | 102 | } else if (kernel_uses_llsc) { \ |
103 | int temp; \ | 103 | int temp; \ |
104 | \ | 104 | \ |
105 | do { \ | 105 | do { \ |
106 | __asm__ __volatile__( \ | 106 | __asm__ __volatile__( \ |
107 | " .set arch=r4000 \n" \ | 107 | " .set "MIPS_ISA_LEVEL" \n" \ |
108 | " ll %1, %2 # atomic_" #op "_return \n" \ | 108 | " ll %1, %2 # atomic_" #op "_return \n" \ |
109 | " " #asm_op " %0, %1, %3 \n" \ | 109 | " " #asm_op " %0, %1, %3 \n" \ |
110 | " sc %0, %2 \n" \ | 110 | " sc %0, %2 \n" \ |
111 | " .set mips0 \n" \ | 111 | " .set mips0 \n" \ |
112 | : "=&r" (result), "=&r" (temp), \ | 112 | : "=&r" (result), "=&r" (temp), \ |
113 | "+" GCC_OFF12_ASM() (v->counter) \ | 113 | "+" GCC_OFF_SMALL_ASM() (v->counter) \ |
114 | : "Ir" (i)); \ | 114 | : "Ir" (i)); \ |
115 | } while (unlikely(!result)); \ | 115 | } while (unlikely(!result)); \ |
116 | \ | 116 | \ |
@@ -171,14 +171,14 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) | |||
171 | "1: \n" | 171 | "1: \n" |
172 | " .set mips0 \n" | 172 | " .set mips0 \n" |
173 | : "=&r" (result), "=&r" (temp), | 173 | : "=&r" (result), "=&r" (temp), |
174 | "+" GCC_OFF12_ASM() (v->counter) | 174 | "+" GCC_OFF_SMALL_ASM() (v->counter) |
175 | : "Ir" (i), GCC_OFF12_ASM() (v->counter) | 175 | : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter) |
176 | : "memory"); | 176 | : "memory"); |
177 | } else if (kernel_uses_llsc) { | 177 | } else if (kernel_uses_llsc) { |
178 | int temp; | 178 | int temp; |
179 | 179 | ||
180 | __asm__ __volatile__( | 180 | __asm__ __volatile__( |
181 | " .set arch=r4000 \n" | 181 | " .set "MIPS_ISA_LEVEL" \n" |
182 | "1: ll %1, %2 # atomic_sub_if_positive\n" | 182 | "1: ll %1, %2 # atomic_sub_if_positive\n" |
183 | " subu %0, %1, %3 \n" | 183 | " subu %0, %1, %3 \n" |
184 | " bltz %0, 1f \n" | 184 | " bltz %0, 1f \n" |
@@ -190,7 +190,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) | |||
190 | "1: \n" | 190 | "1: \n" |
191 | " .set mips0 \n" | 191 | " .set mips0 \n" |
192 | : "=&r" (result), "=&r" (temp), | 192 | : "=&r" (result), "=&r" (temp), |
193 | "+" GCC_OFF12_ASM() (v->counter) | 193 | "+" GCC_OFF_SMALL_ASM() (v->counter) |
194 | : "Ir" (i)); | 194 | : "Ir" (i)); |
195 | } else { | 195 | } else { |
196 | unsigned long flags; | 196 | unsigned long flags; |
@@ -333,19 +333,19 @@ static __inline__ void atomic64_##op(long i, atomic64_t * v) \ | |||
333 | " scd %0, %1 \n" \ | 333 | " scd %0, %1 \n" \ |
334 | " beqzl %0, 1b \n" \ | 334 | " beqzl %0, 1b \n" \ |
335 | " .set mips0 \n" \ | 335 | " .set mips0 \n" \ |
336 | : "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter) \ | 336 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \ |
337 | : "Ir" (i)); \ | 337 | : "Ir" (i)); \ |
338 | } else if (kernel_uses_llsc) { \ | 338 | } else if (kernel_uses_llsc) { \ |
339 | long temp; \ | 339 | long temp; \ |
340 | \ | 340 | \ |
341 | do { \ | 341 | do { \ |
342 | __asm__ __volatile__( \ | 342 | __asm__ __volatile__( \ |
343 | " .set arch=r4000 \n" \ | 343 | " .set "MIPS_ISA_LEVEL" \n" \ |
344 | " lld %0, %1 # atomic64_" #op "\n" \ | 344 | " lld %0, %1 # atomic64_" #op "\n" \ |
345 | " " #asm_op " %0, %2 \n" \ | 345 | " " #asm_op " %0, %2 \n" \ |
346 | " scd %0, %1 \n" \ | 346 | " scd %0, %1 \n" \ |
347 | " .set mips0 \n" \ | 347 | " .set mips0 \n" \ |
348 | : "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter) \ | 348 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \ |
349 | : "Ir" (i)); \ | 349 | : "Ir" (i)); \ |
350 | } while (unlikely(!temp)); \ | 350 | } while (unlikely(!temp)); \ |
351 | } else { \ | 351 | } else { \ |
@@ -376,21 +376,21 @@ static __inline__ long atomic64_##op##_return(long i, atomic64_t * v) \ | |||
376 | " " #asm_op " %0, %1, %3 \n" \ | 376 | " " #asm_op " %0, %1, %3 \n" \ |
377 | " .set mips0 \n" \ | 377 | " .set mips0 \n" \ |
378 | : "=&r" (result), "=&r" (temp), \ | 378 | : "=&r" (result), "=&r" (temp), \ |
379 | "+" GCC_OFF12_ASM() (v->counter) \ | 379 | "+" GCC_OFF_SMALL_ASM() (v->counter) \ |
380 | : "Ir" (i)); \ | 380 | : "Ir" (i)); \ |
381 | } else if (kernel_uses_llsc) { \ | 381 | } else if (kernel_uses_llsc) { \ |
382 | long temp; \ | 382 | long temp; \ |
383 | \ | 383 | \ |
384 | do { \ | 384 | do { \ |
385 | __asm__ __volatile__( \ | 385 | __asm__ __volatile__( \ |
386 | " .set arch=r4000 \n" \ | 386 | " .set "MIPS_ISA_LEVEL" \n" \ |
387 | " lld %1, %2 # atomic64_" #op "_return\n" \ | 387 | " lld %1, %2 # atomic64_" #op "_return\n" \ |
388 | " " #asm_op " %0, %1, %3 \n" \ | 388 | " " #asm_op " %0, %1, %3 \n" \ |
389 | " scd %0, %2 \n" \ | 389 | " scd %0, %2 \n" \ |
390 | " .set mips0 \n" \ | 390 | " .set mips0 \n" \ |
391 | : "=&r" (result), "=&r" (temp), \ | 391 | : "=&r" (result), "=&r" (temp), \ |
392 | "=" GCC_OFF12_ASM() (v->counter) \ | 392 | "=" GCC_OFF_SMALL_ASM() (v->counter) \ |
393 | : "Ir" (i), GCC_OFF12_ASM() (v->counter) \ | 393 | : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter) \ |
394 | : "memory"); \ | 394 | : "memory"); \ |
395 | } while (unlikely(!result)); \ | 395 | } while (unlikely(!result)); \ |
396 | \ | 396 | \ |
@@ -452,14 +452,14 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) | |||
452 | "1: \n" | 452 | "1: \n" |
453 | " .set mips0 \n" | 453 | " .set mips0 \n" |
454 | : "=&r" (result), "=&r" (temp), | 454 | : "=&r" (result), "=&r" (temp), |
455 | "=" GCC_OFF12_ASM() (v->counter) | 455 | "=" GCC_OFF_SMALL_ASM() (v->counter) |
456 | : "Ir" (i), GCC_OFF12_ASM() (v->counter) | 456 | : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter) |
457 | : "memory"); | 457 | : "memory"); |
458 | } else if (kernel_uses_llsc) { | 458 | } else if (kernel_uses_llsc) { |
459 | long temp; | 459 | long temp; |
460 | 460 | ||
461 | __asm__ __volatile__( | 461 | __asm__ __volatile__( |
462 | " .set arch=r4000 \n" | 462 | " .set "MIPS_ISA_LEVEL" \n" |
463 | "1: lld %1, %2 # atomic64_sub_if_positive\n" | 463 | "1: lld %1, %2 # atomic64_sub_if_positive\n" |
464 | " dsubu %0, %1, %3 \n" | 464 | " dsubu %0, %1, %3 \n" |
465 | " bltz %0, 1f \n" | 465 | " bltz %0, 1f \n" |
@@ -471,7 +471,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) | |||
471 | "1: \n" | 471 | "1: \n" |
472 | " .set mips0 \n" | 472 | " .set mips0 \n" |
473 | : "=&r" (result), "=&r" (temp), | 473 | : "=&r" (result), "=&r" (temp), |
474 | "+" GCC_OFF12_ASM() (v->counter) | 474 | "+" GCC_OFF_SMALL_ASM() (v->counter) |
475 | : "Ir" (i)); | 475 | : "Ir" (i)); |
476 | } else { | 476 | } else { |
477 | unsigned long flags; | 477 | unsigned long flags; |
diff --git a/arch/mips/include/asm/bitops.h b/arch/mips/include/asm/bitops.h index 6663bcca9d0c..9f935f6aa996 100644 --- a/arch/mips/include/asm/bitops.h +++ b/arch/mips/include/asm/bitops.h | |||
@@ -79,28 +79,28 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr) | |||
79 | " " __SC "%0, %1 \n" | 79 | " " __SC "%0, %1 \n" |
80 | " beqzl %0, 1b \n" | 80 | " beqzl %0, 1b \n" |
81 | " .set mips0 \n" | 81 | " .set mips0 \n" |
82 | : "=&r" (temp), "=" GCC_OFF12_ASM() (*m) | 82 | : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*m) |
83 | : "ir" (1UL << bit), GCC_OFF12_ASM() (*m)); | 83 | : "ir" (1UL << bit), GCC_OFF_SMALL_ASM() (*m)); |
84 | #ifdef CONFIG_CPU_MIPSR2 | 84 | #if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6) |
85 | } else if (kernel_uses_llsc && __builtin_constant_p(bit)) { | 85 | } else if (kernel_uses_llsc && __builtin_constant_p(bit)) { |
86 | do { | 86 | do { |
87 | __asm__ __volatile__( | 87 | __asm__ __volatile__( |
88 | " " __LL "%0, %1 # set_bit \n" | 88 | " " __LL "%0, %1 # set_bit \n" |
89 | " " __INS "%0, %3, %2, 1 \n" | 89 | " " __INS "%0, %3, %2, 1 \n" |
90 | " " __SC "%0, %1 \n" | 90 | " " __SC "%0, %1 \n" |
91 | : "=&r" (temp), "+" GCC_OFF12_ASM() (*m) | 91 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) |
92 | : "ir" (bit), "r" (~0)); | 92 | : "ir" (bit), "r" (~0)); |
93 | } while (unlikely(!temp)); | 93 | } while (unlikely(!temp)); |
94 | #endif /* CONFIG_CPU_MIPSR2 */ | 94 | #endif /* CONFIG_CPU_MIPSR2 || CONFIG_CPU_MIPSR6 */ |
95 | } else if (kernel_uses_llsc) { | 95 | } else if (kernel_uses_llsc) { |
96 | do { | 96 | do { |
97 | __asm__ __volatile__( | 97 | __asm__ __volatile__( |
98 | " .set arch=r4000 \n" | 98 | " .set "MIPS_ISA_ARCH_LEVEL" \n" |
99 | " " __LL "%0, %1 # set_bit \n" | 99 | " " __LL "%0, %1 # set_bit \n" |
100 | " or %0, %2 \n" | 100 | " or %0, %2 \n" |
101 | " " __SC "%0, %1 \n" | 101 | " " __SC "%0, %1 \n" |
102 | " .set mips0 \n" | 102 | " .set mips0 \n" |
103 | : "=&r" (temp), "+" GCC_OFF12_ASM() (*m) | 103 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) |
104 | : "ir" (1UL << bit)); | 104 | : "ir" (1UL << bit)); |
105 | } while (unlikely(!temp)); | 105 | } while (unlikely(!temp)); |
106 | } else | 106 | } else |
@@ -131,28 +131,28 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr) | |||
131 | " " __SC "%0, %1 \n" | 131 | " " __SC "%0, %1 \n" |
132 | " beqzl %0, 1b \n" | 132 | " beqzl %0, 1b \n" |
133 | " .set mips0 \n" | 133 | " .set mips0 \n" |
134 | : "=&r" (temp), "+" GCC_OFF12_ASM() (*m) | 134 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) |
135 | : "ir" (~(1UL << bit))); | 135 | : "ir" (~(1UL << bit))); |
136 | #ifdef CONFIG_CPU_MIPSR2 | 136 | #if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6) |
137 | } else if (kernel_uses_llsc && __builtin_constant_p(bit)) { | 137 | } else if (kernel_uses_llsc && __builtin_constant_p(bit)) { |
138 | do { | 138 | do { |
139 | __asm__ __volatile__( | 139 | __asm__ __volatile__( |
140 | " " __LL "%0, %1 # clear_bit \n" | 140 | " " __LL "%0, %1 # clear_bit \n" |
141 | " " __INS "%0, $0, %2, 1 \n" | 141 | " " __INS "%0, $0, %2, 1 \n" |
142 | " " __SC "%0, %1 \n" | 142 | " " __SC "%0, %1 \n" |
143 | : "=&r" (temp), "+" GCC_OFF12_ASM() (*m) | 143 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) |
144 | : "ir" (bit)); | 144 | : "ir" (bit)); |
145 | } while (unlikely(!temp)); | 145 | } while (unlikely(!temp)); |
146 | #endif /* CONFIG_CPU_MIPSR2 */ | 146 | #endif /* CONFIG_CPU_MIPSR2 || CONFIG_CPU_MIPSR6 */ |
147 | } else if (kernel_uses_llsc) { | 147 | } else if (kernel_uses_llsc) { |
148 | do { | 148 | do { |
149 | __asm__ __volatile__( | 149 | __asm__ __volatile__( |
150 | " .set arch=r4000 \n" | 150 | " .set "MIPS_ISA_ARCH_LEVEL" \n" |
151 | " " __LL "%0, %1 # clear_bit \n" | 151 | " " __LL "%0, %1 # clear_bit \n" |
152 | " and %0, %2 \n" | 152 | " and %0, %2 \n" |
153 | " " __SC "%0, %1 \n" | 153 | " " __SC "%0, %1 \n" |
154 | " .set mips0 \n" | 154 | " .set mips0 \n" |
155 | : "=&r" (temp), "+" GCC_OFF12_ASM() (*m) | 155 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) |
156 | : "ir" (~(1UL << bit))); | 156 | : "ir" (~(1UL << bit))); |
157 | } while (unlikely(!temp)); | 157 | } while (unlikely(!temp)); |
158 | } else | 158 | } else |
@@ -197,7 +197,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr) | |||
197 | " " __SC "%0, %1 \n" | 197 | " " __SC "%0, %1 \n" |
198 | " beqzl %0, 1b \n" | 198 | " beqzl %0, 1b \n" |
199 | " .set mips0 \n" | 199 | " .set mips0 \n" |
200 | : "=&r" (temp), "+" GCC_OFF12_ASM() (*m) | 200 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) |
201 | : "ir" (1UL << bit)); | 201 | : "ir" (1UL << bit)); |
202 | } else if (kernel_uses_llsc) { | 202 | } else if (kernel_uses_llsc) { |
203 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | 203 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); |
@@ -205,12 +205,12 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr) | |||
205 | 205 | ||
206 | do { | 206 | do { |
207 | __asm__ __volatile__( | 207 | __asm__ __volatile__( |
208 | " .set arch=r4000 \n" | 208 | " .set "MIPS_ISA_ARCH_LEVEL" \n" |
209 | " " __LL "%0, %1 # change_bit \n" | 209 | " " __LL "%0, %1 # change_bit \n" |
210 | " xor %0, %2 \n" | 210 | " xor %0, %2 \n" |
211 | " " __SC "%0, %1 \n" | 211 | " " __SC "%0, %1 \n" |
212 | " .set mips0 \n" | 212 | " .set mips0 \n" |
213 | : "=&r" (temp), "+" GCC_OFF12_ASM() (*m) | 213 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) |
214 | : "ir" (1UL << bit)); | 214 | : "ir" (1UL << bit)); |
215 | } while (unlikely(!temp)); | 215 | } while (unlikely(!temp)); |
216 | } else | 216 | } else |
@@ -245,7 +245,7 @@ static inline int test_and_set_bit(unsigned long nr, | |||
245 | " beqzl %2, 1b \n" | 245 | " beqzl %2, 1b \n" |
246 | " and %2, %0, %3 \n" | 246 | " and %2, %0, %3 \n" |
247 | " .set mips0 \n" | 247 | " .set mips0 \n" |
248 | : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res) | 248 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) |
249 | : "r" (1UL << bit) | 249 | : "r" (1UL << bit) |
250 | : "memory"); | 250 | : "memory"); |
251 | } else if (kernel_uses_llsc) { | 251 | } else if (kernel_uses_llsc) { |
@@ -254,12 +254,12 @@ static inline int test_and_set_bit(unsigned long nr, | |||
254 | 254 | ||
255 | do { | 255 | do { |
256 | __asm__ __volatile__( | 256 | __asm__ __volatile__( |
257 | " .set arch=r4000 \n" | 257 | " .set "MIPS_ISA_ARCH_LEVEL" \n" |
258 | " " __LL "%0, %1 # test_and_set_bit \n" | 258 | " " __LL "%0, %1 # test_and_set_bit \n" |
259 | " or %2, %0, %3 \n" | 259 | " or %2, %0, %3 \n" |
260 | " " __SC "%2, %1 \n" | 260 | " " __SC "%2, %1 \n" |
261 | " .set mips0 \n" | 261 | " .set mips0 \n" |
262 | : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res) | 262 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) |
263 | : "r" (1UL << bit) | 263 | : "r" (1UL << bit) |
264 | : "memory"); | 264 | : "memory"); |
265 | } while (unlikely(!res)); | 265 | } while (unlikely(!res)); |
@@ -308,12 +308,12 @@ static inline int test_and_set_bit_lock(unsigned long nr, | |||
308 | 308 | ||
309 | do { | 309 | do { |
310 | __asm__ __volatile__( | 310 | __asm__ __volatile__( |
311 | " .set arch=r4000 \n" | 311 | " .set "MIPS_ISA_ARCH_LEVEL" \n" |
312 | " " __LL "%0, %1 # test_and_set_bit \n" | 312 | " " __LL "%0, %1 # test_and_set_bit \n" |
313 | " or %2, %0, %3 \n" | 313 | " or %2, %0, %3 \n" |
314 | " " __SC "%2, %1 \n" | 314 | " " __SC "%2, %1 \n" |
315 | " .set mips0 \n" | 315 | " .set mips0 \n" |
316 | : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res) | 316 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) |
317 | : "r" (1UL << bit) | 317 | : "r" (1UL << bit) |
318 | : "memory"); | 318 | : "memory"); |
319 | } while (unlikely(!res)); | 319 | } while (unlikely(!res)); |
@@ -355,10 +355,10 @@ static inline int test_and_clear_bit(unsigned long nr, | |||
355 | " beqzl %2, 1b \n" | 355 | " beqzl %2, 1b \n" |
356 | " and %2, %0, %3 \n" | 356 | " and %2, %0, %3 \n" |
357 | " .set mips0 \n" | 357 | " .set mips0 \n" |
358 | : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res) | 358 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) |
359 | : "r" (1UL << bit) | 359 | : "r" (1UL << bit) |
360 | : "memory"); | 360 | : "memory"); |
361 | #ifdef CONFIG_CPU_MIPSR2 | 361 | #if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6) |
362 | } else if (kernel_uses_llsc && __builtin_constant_p(nr)) { | 362 | } else if (kernel_uses_llsc && __builtin_constant_p(nr)) { |
363 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | 363 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); |
364 | unsigned long temp; | 364 | unsigned long temp; |
@@ -369,7 +369,7 @@ static inline int test_and_clear_bit(unsigned long nr, | |||
369 | " " __EXT "%2, %0, %3, 1 \n" | 369 | " " __EXT "%2, %0, %3, 1 \n" |
370 | " " __INS "%0, $0, %3, 1 \n" | 370 | " " __INS "%0, $0, %3, 1 \n" |
371 | " " __SC "%0, %1 \n" | 371 | " " __SC "%0, %1 \n" |
372 | : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res) | 372 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) |
373 | : "ir" (bit) | 373 | : "ir" (bit) |
374 | : "memory"); | 374 | : "memory"); |
375 | } while (unlikely(!temp)); | 375 | } while (unlikely(!temp)); |
@@ -380,13 +380,13 @@ static inline int test_and_clear_bit(unsigned long nr, | |||
380 | 380 | ||
381 | do { | 381 | do { |
382 | __asm__ __volatile__( | 382 | __asm__ __volatile__( |
383 | " .set arch=r4000 \n" | 383 | " .set "MIPS_ISA_ARCH_LEVEL" \n" |
384 | " " __LL "%0, %1 # test_and_clear_bit \n" | 384 | " " __LL "%0, %1 # test_and_clear_bit \n" |
385 | " or %2, %0, %3 \n" | 385 | " or %2, %0, %3 \n" |
386 | " xor %2, %3 \n" | 386 | " xor %2, %3 \n" |
387 | " " __SC "%2, %1 \n" | 387 | " " __SC "%2, %1 \n" |
388 | " .set mips0 \n" | 388 | " .set mips0 \n" |
389 | : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res) | 389 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) |
390 | : "r" (1UL << bit) | 390 | : "r" (1UL << bit) |
391 | : "memory"); | 391 | : "memory"); |
392 | } while (unlikely(!res)); | 392 | } while (unlikely(!res)); |
@@ -428,7 +428,7 @@ static inline int test_and_change_bit(unsigned long nr, | |||
428 | " beqzl %2, 1b \n" | 428 | " beqzl %2, 1b \n" |
429 | " and %2, %0, %3 \n" | 429 | " and %2, %0, %3 \n" |
430 | " .set mips0 \n" | 430 | " .set mips0 \n" |
431 | : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res) | 431 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) |
432 | : "r" (1UL << bit) | 432 | : "r" (1UL << bit) |
433 | : "memory"); | 433 | : "memory"); |
434 | } else if (kernel_uses_llsc) { | 434 | } else if (kernel_uses_llsc) { |
@@ -437,12 +437,12 @@ static inline int test_and_change_bit(unsigned long nr, | |||
437 | 437 | ||
438 | do { | 438 | do { |
439 | __asm__ __volatile__( | 439 | __asm__ __volatile__( |
440 | " .set arch=r4000 \n" | 440 | " .set "MIPS_ISA_ARCH_LEVEL" \n" |
441 | " " __LL "%0, %1 # test_and_change_bit \n" | 441 | " " __LL "%0, %1 # test_and_change_bit \n" |
442 | " xor %2, %0, %3 \n" | 442 | " xor %2, %0, %3 \n" |
443 | " " __SC "\t%2, %1 \n" | 443 | " " __SC "\t%2, %1 \n" |
444 | " .set mips0 \n" | 444 | " .set mips0 \n" |
445 | : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res) | 445 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) |
446 | : "r" (1UL << bit) | 446 | : "r" (1UL << bit) |
447 | : "memory"); | 447 | : "memory"); |
448 | } while (unlikely(!res)); | 448 | } while (unlikely(!res)); |
@@ -485,7 +485,7 @@ static inline unsigned long __fls(unsigned long word) | |||
485 | __builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) { | 485 | __builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) { |
486 | __asm__( | 486 | __asm__( |
487 | " .set push \n" | 487 | " .set push \n" |
488 | " .set mips32 \n" | 488 | " .set "MIPS_ISA_LEVEL" \n" |
489 | " clz %0, %1 \n" | 489 | " clz %0, %1 \n" |
490 | " .set pop \n" | 490 | " .set pop \n" |
491 | : "=r" (num) | 491 | : "=r" (num) |
@@ -498,7 +498,7 @@ static inline unsigned long __fls(unsigned long word) | |||
498 | __builtin_constant_p(cpu_has_mips64) && cpu_has_mips64) { | 498 | __builtin_constant_p(cpu_has_mips64) && cpu_has_mips64) { |
499 | __asm__( | 499 | __asm__( |
500 | " .set push \n" | 500 | " .set push \n" |
501 | " .set mips64 \n" | 501 | " .set "MIPS_ISA_LEVEL" \n" |
502 | " dclz %0, %1 \n" | 502 | " dclz %0, %1 \n" |
503 | " .set pop \n" | 503 | " .set pop \n" |
504 | : "=r" (num) | 504 | : "=r" (num) |
@@ -562,7 +562,7 @@ static inline int fls(int x) | |||
562 | if (__builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) { | 562 | if (__builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) { |
563 | __asm__( | 563 | __asm__( |
564 | " .set push \n" | 564 | " .set push \n" |
565 | " .set mips32 \n" | 565 | " .set "MIPS_ISA_LEVEL" \n" |
566 | " clz %0, %1 \n" | 566 | " clz %0, %1 \n" |
567 | " .set pop \n" | 567 | " .set pop \n" |
568 | : "=r" (x) | 568 | : "=r" (x) |
diff --git a/arch/mips/include/asm/checksum.h b/arch/mips/include/asm/checksum.h index 5996252680c6..5c585c5c1c3e 100644 --- a/arch/mips/include/asm/checksum.h +++ b/arch/mips/include/asm/checksum.h | |||
@@ -12,6 +12,10 @@ | |||
12 | #ifndef _ASM_CHECKSUM_H | 12 | #ifndef _ASM_CHECKSUM_H |
13 | #define _ASM_CHECKSUM_H | 13 | #define _ASM_CHECKSUM_H |
14 | 14 | ||
15 | #ifdef CONFIG_GENERIC_CSUM | ||
16 | #include <asm-generic/checksum.h> | ||
17 | #else | ||
18 | |||
15 | #include <linux/in6.h> | 19 | #include <linux/in6.h> |
16 | 20 | ||
17 | #include <asm/uaccess.h> | 21 | #include <asm/uaccess.h> |
@@ -274,5 +278,6 @@ static __inline__ __sum16 csum_ipv6_magic(const struct in6_addr *saddr, | |||
274 | } | 278 | } |
275 | 279 | ||
276 | #include <asm-generic/checksum.h> | 280 | #include <asm-generic/checksum.h> |
281 | #endif /* CONFIG_GENERIC_CSUM */ | ||
277 | 282 | ||
278 | #endif /* _ASM_CHECKSUM_H */ | 283 | #endif /* _ASM_CHECKSUM_H */ |
diff --git a/arch/mips/include/asm/cmpxchg.h b/arch/mips/include/asm/cmpxchg.h index 28b1edf19501..d0a2a68ca600 100644 --- a/arch/mips/include/asm/cmpxchg.h +++ b/arch/mips/include/asm/cmpxchg.h | |||
@@ -31,24 +31,24 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val) | |||
31 | " sc %2, %1 \n" | 31 | " sc %2, %1 \n" |
32 | " beqzl %2, 1b \n" | 32 | " beqzl %2, 1b \n" |
33 | " .set mips0 \n" | 33 | " .set mips0 \n" |
34 | : "=&r" (retval), "=" GCC_OFF12_ASM() (*m), "=&r" (dummy) | 34 | : "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m), "=&r" (dummy) |
35 | : GCC_OFF12_ASM() (*m), "Jr" (val) | 35 | : GCC_OFF_SMALL_ASM() (*m), "Jr" (val) |
36 | : "memory"); | 36 | : "memory"); |
37 | } else if (kernel_uses_llsc) { | 37 | } else if (kernel_uses_llsc) { |
38 | unsigned long dummy; | 38 | unsigned long dummy; |
39 | 39 | ||
40 | do { | 40 | do { |
41 | __asm__ __volatile__( | 41 | __asm__ __volatile__( |
42 | " .set arch=r4000 \n" | 42 | " .set "MIPS_ISA_ARCH_LEVEL" \n" |
43 | " ll %0, %3 # xchg_u32 \n" | 43 | " ll %0, %3 # xchg_u32 \n" |
44 | " .set mips0 \n" | 44 | " .set mips0 \n" |
45 | " move %2, %z4 \n" | 45 | " move %2, %z4 \n" |
46 | " .set arch=r4000 \n" | 46 | " .set "MIPS_ISA_ARCH_LEVEL" \n" |
47 | " sc %2, %1 \n" | 47 | " sc %2, %1 \n" |
48 | " .set mips0 \n" | 48 | " .set mips0 \n" |
49 | : "=&r" (retval), "=" GCC_OFF12_ASM() (*m), | 49 | : "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m), |
50 | "=&r" (dummy) | 50 | "=&r" (dummy) |
51 | : GCC_OFF12_ASM() (*m), "Jr" (val) | 51 | : GCC_OFF_SMALL_ASM() (*m), "Jr" (val) |
52 | : "memory"); | 52 | : "memory"); |
53 | } while (unlikely(!dummy)); | 53 | } while (unlikely(!dummy)); |
54 | } else { | 54 | } else { |
@@ -82,22 +82,22 @@ static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val) | |||
82 | " scd %2, %1 \n" | 82 | " scd %2, %1 \n" |
83 | " beqzl %2, 1b \n" | 83 | " beqzl %2, 1b \n" |
84 | " .set mips0 \n" | 84 | " .set mips0 \n" |
85 | : "=&r" (retval), "=" GCC_OFF12_ASM() (*m), "=&r" (dummy) | 85 | : "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m), "=&r" (dummy) |
86 | : GCC_OFF12_ASM() (*m), "Jr" (val) | 86 | : GCC_OFF_SMALL_ASM() (*m), "Jr" (val) |
87 | : "memory"); | 87 | : "memory"); |
88 | } else if (kernel_uses_llsc) { | 88 | } else if (kernel_uses_llsc) { |
89 | unsigned long dummy; | 89 | unsigned long dummy; |
90 | 90 | ||
91 | do { | 91 | do { |
92 | __asm__ __volatile__( | 92 | __asm__ __volatile__( |
93 | " .set arch=r4000 \n" | 93 | " .set "MIPS_ISA_ARCH_LEVEL" \n" |
94 | " lld %0, %3 # xchg_u64 \n" | 94 | " lld %0, %3 # xchg_u64 \n" |
95 | " move %2, %z4 \n" | 95 | " move %2, %z4 \n" |
96 | " scd %2, %1 \n" | 96 | " scd %2, %1 \n" |
97 | " .set mips0 \n" | 97 | " .set mips0 \n" |
98 | : "=&r" (retval), "=" GCC_OFF12_ASM() (*m), | 98 | : "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m), |
99 | "=&r" (dummy) | 99 | "=&r" (dummy) |
100 | : GCC_OFF12_ASM() (*m), "Jr" (val) | 100 | : GCC_OFF_SMALL_ASM() (*m), "Jr" (val) |
101 | : "memory"); | 101 | : "memory"); |
102 | } while (unlikely(!dummy)); | 102 | } while (unlikely(!dummy)); |
103 | } else { | 103 | } else { |
@@ -158,25 +158,25 @@ static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int siz | |||
158 | " beqzl $1, 1b \n" \ | 158 | " beqzl $1, 1b \n" \ |
159 | "2: \n" \ | 159 | "2: \n" \ |
160 | " .set pop \n" \ | 160 | " .set pop \n" \ |
161 | : "=&r" (__ret), "=" GCC_OFF12_ASM() (*m) \ | 161 | : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \ |
162 | : GCC_OFF12_ASM() (*m), "Jr" (old), "Jr" (new) \ | 162 | : GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new) \ |
163 | : "memory"); \ | 163 | : "memory"); \ |
164 | } else if (kernel_uses_llsc) { \ | 164 | } else if (kernel_uses_llsc) { \ |
165 | __asm__ __volatile__( \ | 165 | __asm__ __volatile__( \ |
166 | " .set push \n" \ | 166 | " .set push \n" \ |
167 | " .set noat \n" \ | 167 | " .set noat \n" \ |
168 | " .set arch=r4000 \n" \ | 168 | " .set "MIPS_ISA_ARCH_LEVEL" \n" \ |
169 | "1: " ld " %0, %2 # __cmpxchg_asm \n" \ | 169 | "1: " ld " %0, %2 # __cmpxchg_asm \n" \ |
170 | " bne %0, %z3, 2f \n" \ | 170 | " bne %0, %z3, 2f \n" \ |
171 | " .set mips0 \n" \ | 171 | " .set mips0 \n" \ |
172 | " move $1, %z4 \n" \ | 172 | " move $1, %z4 \n" \ |
173 | " .set arch=r4000 \n" \ | 173 | " .set "MIPS_ISA_ARCH_LEVEL" \n" \ |
174 | " " st " $1, %1 \n" \ | 174 | " " st " $1, %1 \n" \ |
175 | " beqz $1, 1b \n" \ | 175 | " beqz $1, 1b \n" \ |
176 | " .set pop \n" \ | 176 | " .set pop \n" \ |
177 | "2: \n" \ | 177 | "2: \n" \ |
178 | : "=&r" (__ret), "=" GCC_OFF12_ASM() (*m) \ | 178 | : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \ |
179 | : GCC_OFF12_ASM() (*m), "Jr" (old), "Jr" (new) \ | 179 | : GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new) \ |
180 | : "memory"); \ | 180 | : "memory"); \ |
181 | } else { \ | 181 | } else { \ |
182 | unsigned long __flags; \ | 182 | unsigned long __flags; \ |
diff --git a/arch/mips/include/asm/compiler.h b/arch/mips/include/asm/compiler.h index c73815e0123a..e081a265f422 100644 --- a/arch/mips/include/asm/compiler.h +++ b/arch/mips/include/asm/compiler.h | |||
@@ -16,12 +16,30 @@ | |||
16 | #define GCC_REG_ACCUM "accum" | 16 | #define GCC_REG_ACCUM "accum" |
17 | #endif | 17 | #endif |
18 | 18 | ||
19 | #ifdef CONFIG_CPU_MIPSR6 | ||
20 | /* All MIPS R6 toolchains support the ZC constrain */ | ||
21 | #define GCC_OFF_SMALL_ASM() "ZC" | ||
22 | #else | ||
19 | #ifndef CONFIG_CPU_MICROMIPS | 23 | #ifndef CONFIG_CPU_MICROMIPS |
20 | #define GCC_OFF12_ASM() "R" | 24 | #define GCC_OFF_SMALL_ASM() "R" |
21 | #elif __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 9) | 25 | #elif __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 9) |
22 | #define GCC_OFF12_ASM() "ZC" | 26 | #define GCC_OFF_SMALL_ASM() "ZC" |
23 | #else | 27 | #else |
24 | #error "microMIPS compilation unsupported with GCC older than 4.9" | 28 | #error "microMIPS compilation unsupported with GCC older than 4.9" |
25 | #endif | 29 | #endif /* CONFIG_CPU_MICROMIPS */ |
30 | #endif /* CONFIG_CPU_MIPSR6 */ | ||
31 | |||
32 | #ifdef CONFIG_CPU_MIPSR6 | ||
33 | #define MIPS_ISA_LEVEL "mips64r6" | ||
34 | #define MIPS_ISA_ARCH_LEVEL MIPS_ISA_LEVEL | ||
35 | #define MIPS_ISA_LEVEL_RAW mips64r6 | ||
36 | #define MIPS_ISA_ARCH_LEVEL_RAW MIPS_ISA_LEVEL_RAW | ||
37 | #else | ||
38 | /* MIPS64 is a superset of MIPS32 */ | ||
39 | #define MIPS_ISA_LEVEL "mips64r2" | ||
40 | #define MIPS_ISA_ARCH_LEVEL "arch=r4000" | ||
41 | #define MIPS_ISA_LEVEL_RAW mips64r2 | ||
42 | #define MIPS_ISA_ARCH_LEVEL_RAW MIPS_ISA_LEVEL_RAW | ||
43 | #endif /* CONFIG_CPU_MIPSR6 */ | ||
26 | 44 | ||
27 | #endif /* _ASM_COMPILER_H */ | 45 | #endif /* _ASM_COMPILER_H */ |
diff --git a/arch/mips/include/asm/cpu-features.h b/arch/mips/include/asm/cpu-features.h index 2897cfafcaf0..0d8208de9a3f 100644 --- a/arch/mips/include/asm/cpu-features.h +++ b/arch/mips/include/asm/cpu-features.h | |||
@@ -38,6 +38,9 @@ | |||
38 | #ifndef cpu_has_maar | 38 | #ifndef cpu_has_maar |
39 | #define cpu_has_maar (cpu_data[0].options & MIPS_CPU_MAAR) | 39 | #define cpu_has_maar (cpu_data[0].options & MIPS_CPU_MAAR) |
40 | #endif | 40 | #endif |
41 | #ifndef cpu_has_rw_llb | ||
42 | #define cpu_has_rw_llb (cpu_data[0].options & MIPS_CPU_RW_LLB) | ||
43 | #endif | ||
41 | 44 | ||
42 | /* | 45 | /* |
43 | * For the moment we don't consider R6000 and R8000 so we can assume that | 46 | * For the moment we don't consider R6000 and R8000 so we can assume that |
@@ -171,6 +174,9 @@ | |||
171 | #endif | 174 | #endif |
172 | #endif | 175 | #endif |
173 | 176 | ||
177 | #ifndef cpu_has_mips_1 | ||
178 | # define cpu_has_mips_1 (!cpu_has_mips_r6) | ||
179 | #endif | ||
174 | #ifndef cpu_has_mips_2 | 180 | #ifndef cpu_has_mips_2 |
175 | # define cpu_has_mips_2 (cpu_data[0].isa_level & MIPS_CPU_ISA_II) | 181 | # define cpu_has_mips_2 (cpu_data[0].isa_level & MIPS_CPU_ISA_II) |
176 | #endif | 182 | #endif |
@@ -189,12 +195,18 @@ | |||
189 | #ifndef cpu_has_mips32r2 | 195 | #ifndef cpu_has_mips32r2 |
190 | # define cpu_has_mips32r2 (cpu_data[0].isa_level & MIPS_CPU_ISA_M32R2) | 196 | # define cpu_has_mips32r2 (cpu_data[0].isa_level & MIPS_CPU_ISA_M32R2) |
191 | #endif | 197 | #endif |
198 | #ifndef cpu_has_mips32r6 | ||
199 | # define cpu_has_mips32r6 (cpu_data[0].isa_level & MIPS_CPU_ISA_M32R6) | ||
200 | #endif | ||
192 | #ifndef cpu_has_mips64r1 | 201 | #ifndef cpu_has_mips64r1 |
193 | # define cpu_has_mips64r1 (cpu_data[0].isa_level & MIPS_CPU_ISA_M64R1) | 202 | # define cpu_has_mips64r1 (cpu_data[0].isa_level & MIPS_CPU_ISA_M64R1) |
194 | #endif | 203 | #endif |
195 | #ifndef cpu_has_mips64r2 | 204 | #ifndef cpu_has_mips64r2 |
196 | # define cpu_has_mips64r2 (cpu_data[0].isa_level & MIPS_CPU_ISA_M64R2) | 205 | # define cpu_has_mips64r2 (cpu_data[0].isa_level & MIPS_CPU_ISA_M64R2) |
197 | #endif | 206 | #endif |
207 | #ifndef cpu_has_mips64r6 | ||
208 | # define cpu_has_mips64r6 (cpu_data[0].isa_level & MIPS_CPU_ISA_M64R6) | ||
209 | #endif | ||
198 | 210 | ||
199 | /* | 211 | /* |
200 | * Shortcuts ... | 212 | * Shortcuts ... |
@@ -208,17 +220,23 @@ | |||
208 | #define cpu_has_mips_4_5_r (cpu_has_mips_4 | cpu_has_mips_5_r) | 220 | #define cpu_has_mips_4_5_r (cpu_has_mips_4 | cpu_has_mips_5_r) |
209 | #define cpu_has_mips_5_r (cpu_has_mips_5 | cpu_has_mips_r) | 221 | #define cpu_has_mips_5_r (cpu_has_mips_5 | cpu_has_mips_r) |
210 | 222 | ||
211 | #define cpu_has_mips_4_5_r2 (cpu_has_mips_4_5 | cpu_has_mips_r2) | 223 | #define cpu_has_mips_4_5_r2_r6 (cpu_has_mips_4_5 | cpu_has_mips_r2 | \ |
224 | cpu_has_mips_r6) | ||
212 | 225 | ||
213 | #define cpu_has_mips32 (cpu_has_mips32r1 | cpu_has_mips32r2) | 226 | #define cpu_has_mips32 (cpu_has_mips32r1 | cpu_has_mips32r2 | cpu_has_mips32r6) |
214 | #define cpu_has_mips64 (cpu_has_mips64r1 | cpu_has_mips64r2) | 227 | #define cpu_has_mips64 (cpu_has_mips64r1 | cpu_has_mips64r2 | cpu_has_mips64r6) |
215 | #define cpu_has_mips_r1 (cpu_has_mips32r1 | cpu_has_mips64r1) | 228 | #define cpu_has_mips_r1 (cpu_has_mips32r1 | cpu_has_mips64r1) |
216 | #define cpu_has_mips_r2 (cpu_has_mips32r2 | cpu_has_mips64r2) | 229 | #define cpu_has_mips_r2 (cpu_has_mips32r2 | cpu_has_mips64r2) |
230 | #define cpu_has_mips_r6 (cpu_has_mips32r6 | cpu_has_mips64r6) | ||
217 | #define cpu_has_mips_r (cpu_has_mips32r1 | cpu_has_mips32r2 | \ | 231 | #define cpu_has_mips_r (cpu_has_mips32r1 | cpu_has_mips32r2 | \ |
218 | cpu_has_mips64r1 | cpu_has_mips64r2) | 232 | cpu_has_mips32r6 | cpu_has_mips64r1 | \ |
233 | cpu_has_mips64r2 | cpu_has_mips64r6) | ||
234 | |||
235 | /* MIPSR2 and MIPSR6 have a lot of similarities */ | ||
236 | #define cpu_has_mips_r2_r6 (cpu_has_mips_r2 | cpu_has_mips_r6) | ||
219 | 237 | ||
220 | #ifndef cpu_has_mips_r2_exec_hazard | 238 | #ifndef cpu_has_mips_r2_exec_hazard |
221 | #define cpu_has_mips_r2_exec_hazard cpu_has_mips_r2 | 239 | #define cpu_has_mips_r2_exec_hazard (cpu_has_mips_r2 | cpu_has_mips_r6) |
222 | #endif | 240 | #endif |
223 | 241 | ||
224 | /* | 242 | /* |
diff --git a/arch/mips/include/asm/cpu-type.h b/arch/mips/include/asm/cpu-type.h index b4e2bd87df50..8245875f8b33 100644 --- a/arch/mips/include/asm/cpu-type.h +++ b/arch/mips/include/asm/cpu-type.h | |||
@@ -54,6 +54,13 @@ static inline int __pure __get_cpu_type(const int cpu_type) | |||
54 | case CPU_M5150: | 54 | case CPU_M5150: |
55 | #endif | 55 | #endif |
56 | 56 | ||
57 | #if defined(CONFIG_SYS_HAS_CPU_MIPS32_R2) || \ | ||
58 | defined(CONFIG_SYS_HAS_CPU_MIPS32_R6) || \ | ||
59 | defined(CONFIG_SYS_HAS_CPU_MIPS64_R2) || \ | ||
60 | defined(CONFIG_SYS_HAS_CPU_MIPS64_R6) | ||
61 | case CPU_QEMU_GENERIC: | ||
62 | #endif | ||
63 | |||
57 | #ifdef CONFIG_SYS_HAS_CPU_MIPS64_R1 | 64 | #ifdef CONFIG_SYS_HAS_CPU_MIPS64_R1 |
58 | case CPU_5KC: | 65 | case CPU_5KC: |
59 | case CPU_5KE: | 66 | case CPU_5KE: |
diff --git a/arch/mips/include/asm/cpu.h b/arch/mips/include/asm/cpu.h index 33866fce4d63..15687234d70a 100644 --- a/arch/mips/include/asm/cpu.h +++ b/arch/mips/include/asm/cpu.h | |||
@@ -93,6 +93,7 @@ | |||
93 | * These are the PRID's for when 23:16 == PRID_COMP_MIPS | 93 | * These are the PRID's for when 23:16 == PRID_COMP_MIPS |
94 | */ | 94 | */ |
95 | 95 | ||
96 | #define PRID_IMP_QEMU_GENERIC 0x0000 | ||
96 | #define PRID_IMP_4KC 0x8000 | 97 | #define PRID_IMP_4KC 0x8000 |
97 | #define PRID_IMP_5KC 0x8100 | 98 | #define PRID_IMP_5KC 0x8100 |
98 | #define PRID_IMP_20KC 0x8200 | 99 | #define PRID_IMP_20KC 0x8200 |
@@ -312,6 +313,8 @@ enum cpu_type_enum { | |||
312 | CPU_LOONGSON3, CPU_CAVIUM_OCTEON, CPU_CAVIUM_OCTEON_PLUS, | 313 | CPU_LOONGSON3, CPU_CAVIUM_OCTEON, CPU_CAVIUM_OCTEON_PLUS, |
313 | CPU_CAVIUM_OCTEON2, CPU_CAVIUM_OCTEON3, CPU_XLR, CPU_XLP, | 314 | CPU_CAVIUM_OCTEON2, CPU_CAVIUM_OCTEON3, CPU_XLR, CPU_XLP, |
314 | 315 | ||
316 | CPU_QEMU_GENERIC, | ||
317 | |||
315 | CPU_LAST | 318 | CPU_LAST |
316 | }; | 319 | }; |
317 | 320 | ||
@@ -329,11 +332,14 @@ enum cpu_type_enum { | |||
329 | #define MIPS_CPU_ISA_M32R2 0x00000020 | 332 | #define MIPS_CPU_ISA_M32R2 0x00000020 |
330 | #define MIPS_CPU_ISA_M64R1 0x00000040 | 333 | #define MIPS_CPU_ISA_M64R1 0x00000040 |
331 | #define MIPS_CPU_ISA_M64R2 0x00000080 | 334 | #define MIPS_CPU_ISA_M64R2 0x00000080 |
335 | #define MIPS_CPU_ISA_M32R6 0x00000100 | ||
336 | #define MIPS_CPU_ISA_M64R6 0x00000200 | ||
332 | 337 | ||
333 | #define MIPS_CPU_ISA_32BIT (MIPS_CPU_ISA_II | MIPS_CPU_ISA_M32R1 | \ | 338 | #define MIPS_CPU_ISA_32BIT (MIPS_CPU_ISA_II | MIPS_CPU_ISA_M32R1 | \ |
334 | MIPS_CPU_ISA_M32R2) | 339 | MIPS_CPU_ISA_M32R2 | MIPS_CPU_ISA_M32R6) |
335 | #define MIPS_CPU_ISA_64BIT (MIPS_CPU_ISA_III | MIPS_CPU_ISA_IV | \ | 340 | #define MIPS_CPU_ISA_64BIT (MIPS_CPU_ISA_III | MIPS_CPU_ISA_IV | \ |
336 | MIPS_CPU_ISA_V | MIPS_CPU_ISA_M64R1 | MIPS_CPU_ISA_M64R2) | 341 | MIPS_CPU_ISA_V | MIPS_CPU_ISA_M64R1 | MIPS_CPU_ISA_M64R2 | \ |
342 | MIPS_CPU_ISA_M64R6) | ||
337 | 343 | ||
338 | /* | 344 | /* |
339 | * CPU Option encodings | 345 | * CPU Option encodings |
@@ -370,6 +376,7 @@ enum cpu_type_enum { | |||
370 | #define MIPS_CPU_RIXIEX 0x200000000ull /* CPU has unique exception codes for {Read, Execute}-Inhibit exceptions */ | 376 | #define MIPS_CPU_RIXIEX 0x200000000ull /* CPU has unique exception codes for {Read, Execute}-Inhibit exceptions */ |
371 | #define MIPS_CPU_MAAR 0x400000000ull /* MAAR(I) registers are present */ | 377 | #define MIPS_CPU_MAAR 0x400000000ull /* MAAR(I) registers are present */ |
372 | #define MIPS_CPU_FRE 0x800000000ull /* FRE & UFE bits implemented */ | 378 | #define MIPS_CPU_FRE 0x800000000ull /* FRE & UFE bits implemented */ |
379 | #define MIPS_CPU_RW_LLB 0x1000000000ull /* LLADDR/LLB writes are allowed */ | ||
373 | 380 | ||
374 | /* | 381 | /* |
375 | * CPU ASE encodings | 382 | * CPU ASE encodings |
diff --git a/arch/mips/include/asm/edac.h b/arch/mips/include/asm/edac.h index ae6fedcb0060..94105d3f58f4 100644 --- a/arch/mips/include/asm/edac.h +++ b/arch/mips/include/asm/edac.h | |||
@@ -26,8 +26,8 @@ static inline void atomic_scrub(void *va, u32 size) | |||
26 | " sc %0, %1 \n" | 26 | " sc %0, %1 \n" |
27 | " beqz %0, 1b \n" | 27 | " beqz %0, 1b \n" |
28 | " .set mips0 \n" | 28 | " .set mips0 \n" |
29 | : "=&r" (temp), "=" GCC_OFF12_ASM() (*virt_addr) | 29 | : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*virt_addr) |
30 | : GCC_OFF12_ASM() (*virt_addr)); | 30 | : GCC_OFF_SMALL_ASM() (*virt_addr)); |
31 | 31 | ||
32 | virt_addr++; | 32 | virt_addr++; |
33 | } | 33 | } |
diff --git a/arch/mips/include/asm/elf.h b/arch/mips/include/asm/elf.h index eb4d95de619c..535f196ffe02 100644 --- a/arch/mips/include/asm/elf.h +++ b/arch/mips/include/asm/elf.h | |||
@@ -417,13 +417,15 @@ extern unsigned long arch_randomize_brk(struct mm_struct *mm); | |||
417 | struct arch_elf_state { | 417 | struct arch_elf_state { |
418 | int fp_abi; | 418 | int fp_abi; |
419 | int interp_fp_abi; | 419 | int interp_fp_abi; |
420 | int overall_abi; | 420 | int overall_fp_mode; |
421 | }; | 421 | }; |
422 | 422 | ||
423 | #define MIPS_ABI_FP_UNKNOWN (-1) /* Unknown FP ABI (kernel internal) */ | ||
424 | |||
423 | #define INIT_ARCH_ELF_STATE { \ | 425 | #define INIT_ARCH_ELF_STATE { \ |
424 | .fp_abi = -1, \ | 426 | .fp_abi = MIPS_ABI_FP_UNKNOWN, \ |
425 | .interp_fp_abi = -1, \ | 427 | .interp_fp_abi = MIPS_ABI_FP_UNKNOWN, \ |
426 | .overall_abi = -1, \ | 428 | .overall_fp_mode = -1, \ |
427 | } | 429 | } |
428 | 430 | ||
429 | extern int arch_elf_pt_proc(void *ehdr, void *phdr, struct file *elf, | 431 | extern int arch_elf_pt_proc(void *ehdr, void *phdr, struct file *elf, |
diff --git a/arch/mips/include/asm/fpu.h b/arch/mips/include/asm/fpu.h index 994d21939676..b96d9d327626 100644 --- a/arch/mips/include/asm/fpu.h +++ b/arch/mips/include/asm/fpu.h | |||
@@ -68,7 +68,8 @@ static inline int __enable_fpu(enum fpu_mode mode) | |||
68 | goto fr_common; | 68 | goto fr_common; |
69 | 69 | ||
70 | case FPU_64BIT: | 70 | case FPU_64BIT: |
71 | #if !(defined(CONFIG_CPU_MIPS32_R2) || defined(CONFIG_64BIT)) | 71 | #if !(defined(CONFIG_CPU_MIPS32_R2) || defined(CONFIG_CPU_MIPS32_R6) \ |
72 | || defined(CONFIG_64BIT)) | ||
72 | /* we only have a 32-bit FPU */ | 73 | /* we only have a 32-bit FPU */ |
73 | return SIGFPE; | 74 | return SIGFPE; |
74 | #endif | 75 | #endif |
diff --git a/arch/mips/include/asm/futex.h b/arch/mips/include/asm/futex.h index ef9987a61d88..1de190bdfb9c 100644 --- a/arch/mips/include/asm/futex.h +++ b/arch/mips/include/asm/futex.h | |||
@@ -45,19 +45,19 @@ | |||
45 | " "__UA_ADDR "\t2b, 4b \n" \ | 45 | " "__UA_ADDR "\t2b, 4b \n" \ |
46 | " .previous \n" \ | 46 | " .previous \n" \ |
47 | : "=r" (ret), "=&r" (oldval), \ | 47 | : "=r" (ret), "=&r" (oldval), \ |
48 | "=" GCC_OFF12_ASM() (*uaddr) \ | 48 | "=" GCC_OFF_SMALL_ASM() (*uaddr) \ |
49 | : "0" (0), GCC_OFF12_ASM() (*uaddr), "Jr" (oparg), \ | 49 | : "0" (0), GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oparg), \ |
50 | "i" (-EFAULT) \ | 50 | "i" (-EFAULT) \ |
51 | : "memory"); \ | 51 | : "memory"); \ |
52 | } else if (cpu_has_llsc) { \ | 52 | } else if (cpu_has_llsc) { \ |
53 | __asm__ __volatile__( \ | 53 | __asm__ __volatile__( \ |
54 | " .set push \n" \ | 54 | " .set push \n" \ |
55 | " .set noat \n" \ | 55 | " .set noat \n" \ |
56 | " .set arch=r4000 \n" \ | 56 | " .set "MIPS_ISA_ARCH_LEVEL" \n" \ |
57 | "1: "user_ll("%1", "%4")" # __futex_atomic_op\n" \ | 57 | "1: "user_ll("%1", "%4")" # __futex_atomic_op\n" \ |
58 | " .set mips0 \n" \ | 58 | " .set mips0 \n" \ |
59 | " " insn " \n" \ | 59 | " " insn " \n" \ |
60 | " .set arch=r4000 \n" \ | 60 | " .set "MIPS_ISA_ARCH_LEVEL" \n" \ |
61 | "2: "user_sc("$1", "%2")" \n" \ | 61 | "2: "user_sc("$1", "%2")" \n" \ |
62 | " beqz $1, 1b \n" \ | 62 | " beqz $1, 1b \n" \ |
63 | __WEAK_LLSC_MB \ | 63 | __WEAK_LLSC_MB \ |
@@ -74,8 +74,8 @@ | |||
74 | " "__UA_ADDR "\t2b, 4b \n" \ | 74 | " "__UA_ADDR "\t2b, 4b \n" \ |
75 | " .previous \n" \ | 75 | " .previous \n" \ |
76 | : "=r" (ret), "=&r" (oldval), \ | 76 | : "=r" (ret), "=&r" (oldval), \ |
77 | "=" GCC_OFF12_ASM() (*uaddr) \ | 77 | "=" GCC_OFF_SMALL_ASM() (*uaddr) \ |
78 | : "0" (0), GCC_OFF12_ASM() (*uaddr), "Jr" (oparg), \ | 78 | : "0" (0), GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oparg), \ |
79 | "i" (-EFAULT) \ | 79 | "i" (-EFAULT) \ |
80 | : "memory"); \ | 80 | : "memory"); \ |
81 | } else \ | 81 | } else \ |
@@ -174,8 +174,8 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, | |||
174 | " "__UA_ADDR "\t1b, 4b \n" | 174 | " "__UA_ADDR "\t1b, 4b \n" |
175 | " "__UA_ADDR "\t2b, 4b \n" | 175 | " "__UA_ADDR "\t2b, 4b \n" |
176 | " .previous \n" | 176 | " .previous \n" |
177 | : "+r" (ret), "=&r" (val), "=" GCC_OFF12_ASM() (*uaddr) | 177 | : "+r" (ret), "=&r" (val), "=" GCC_OFF_SMALL_ASM() (*uaddr) |
178 | : GCC_OFF12_ASM() (*uaddr), "Jr" (oldval), "Jr" (newval), | 178 | : GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oldval), "Jr" (newval), |
179 | "i" (-EFAULT) | 179 | "i" (-EFAULT) |
180 | : "memory"); | 180 | : "memory"); |
181 | } else if (cpu_has_llsc) { | 181 | } else if (cpu_has_llsc) { |
@@ -183,12 +183,12 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, | |||
183 | "# futex_atomic_cmpxchg_inatomic \n" | 183 | "# futex_atomic_cmpxchg_inatomic \n" |
184 | " .set push \n" | 184 | " .set push \n" |
185 | " .set noat \n" | 185 | " .set noat \n" |
186 | " .set arch=r4000 \n" | 186 | " .set "MIPS_ISA_ARCH_LEVEL" \n" |
187 | "1: "user_ll("%1", "%3")" \n" | 187 | "1: "user_ll("%1", "%3")" \n" |
188 | " bne %1, %z4, 3f \n" | 188 | " bne %1, %z4, 3f \n" |
189 | " .set mips0 \n" | 189 | " .set mips0 \n" |
190 | " move $1, %z5 \n" | 190 | " move $1, %z5 \n" |
191 | " .set arch=r4000 \n" | 191 | " .set "MIPS_ISA_ARCH_LEVEL" \n" |
192 | "2: "user_sc("$1", "%2")" \n" | 192 | "2: "user_sc("$1", "%2")" \n" |
193 | " beqz $1, 1b \n" | 193 | " beqz $1, 1b \n" |
194 | __WEAK_LLSC_MB | 194 | __WEAK_LLSC_MB |
@@ -203,8 +203,8 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, | |||
203 | " "__UA_ADDR "\t1b, 4b \n" | 203 | " "__UA_ADDR "\t1b, 4b \n" |
204 | " "__UA_ADDR "\t2b, 4b \n" | 204 | " "__UA_ADDR "\t2b, 4b \n" |
205 | " .previous \n" | 205 | " .previous \n" |
206 | : "+r" (ret), "=&r" (val), "=" GCC_OFF12_ASM() (*uaddr) | 206 | : "+r" (ret), "=&r" (val), "=" GCC_OFF_SMALL_ASM() (*uaddr) |
207 | : GCC_OFF12_ASM() (*uaddr), "Jr" (oldval), "Jr" (newval), | 207 | : GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oldval), "Jr" (newval), |
208 | "i" (-EFAULT) | 208 | "i" (-EFAULT) |
209 | : "memory"); | 209 | : "memory"); |
210 | } else | 210 | } else |
diff --git a/arch/mips/include/asm/hazards.h b/arch/mips/include/asm/hazards.h index e3ee92d4dbe7..4087b47ad1cb 100644 --- a/arch/mips/include/asm/hazards.h +++ b/arch/mips/include/asm/hazards.h | |||
@@ -11,6 +11,7 @@ | |||
11 | #define _ASM_HAZARDS_H | 11 | #define _ASM_HAZARDS_H |
12 | 12 | ||
13 | #include <linux/stringify.h> | 13 | #include <linux/stringify.h> |
14 | #include <asm/compiler.h> | ||
14 | 15 | ||
15 | #define ___ssnop \ | 16 | #define ___ssnop \ |
16 | sll $0, $0, 1 | 17 | sll $0, $0, 1 |
@@ -21,7 +22,7 @@ | |||
21 | /* | 22 | /* |
22 | * TLB hazards | 23 | * TLB hazards |
23 | */ | 24 | */ |
24 | #if defined(CONFIG_CPU_MIPSR2) && !defined(CONFIG_CPU_CAVIUM_OCTEON) | 25 | #if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6) && !defined(CONFIG_CPU_CAVIUM_OCTEON) |
25 | 26 | ||
26 | /* | 27 | /* |
27 | * MIPSR2 defines ehb for hazard avoidance | 28 | * MIPSR2 defines ehb for hazard avoidance |
@@ -58,7 +59,7 @@ do { \ | |||
58 | unsigned long tmp; \ | 59 | unsigned long tmp; \ |
59 | \ | 60 | \ |
60 | __asm__ __volatile__( \ | 61 | __asm__ __volatile__( \ |
61 | " .set mips64r2 \n" \ | 62 | " .set "MIPS_ISA_LEVEL" \n" \ |
62 | " dla %0, 1f \n" \ | 63 | " dla %0, 1f \n" \ |
63 | " jr.hb %0 \n" \ | 64 | " jr.hb %0 \n" \ |
64 | " .set mips0 \n" \ | 65 | " .set mips0 \n" \ |
@@ -132,7 +133,7 @@ do { \ | |||
132 | 133 | ||
133 | #define instruction_hazard() \ | 134 | #define instruction_hazard() \ |
134 | do { \ | 135 | do { \ |
135 | if (cpu_has_mips_r2) \ | 136 | if (cpu_has_mips_r2_r6) \ |
136 | __instruction_hazard(); \ | 137 | __instruction_hazard(); \ |
137 | } while (0) | 138 | } while (0) |
138 | 139 | ||
@@ -240,7 +241,7 @@ do { \ | |||
240 | 241 | ||
241 | #define __disable_fpu_hazard | 242 | #define __disable_fpu_hazard |
242 | 243 | ||
243 | #elif defined(CONFIG_CPU_MIPSR2) | 244 | #elif defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6) |
244 | 245 | ||
245 | #define __enable_fpu_hazard \ | 246 | #define __enable_fpu_hazard \ |
246 | ___ehb | 247 | ___ehb |
diff --git a/arch/mips/include/asm/irqflags.h b/arch/mips/include/asm/irqflags.h index 0fa5fdcd1f01..d60cc68fa31e 100644 --- a/arch/mips/include/asm/irqflags.h +++ b/arch/mips/include/asm/irqflags.h | |||
@@ -15,9 +15,10 @@ | |||
15 | 15 | ||
16 | #include <linux/compiler.h> | 16 | #include <linux/compiler.h> |
17 | #include <linux/stringify.h> | 17 | #include <linux/stringify.h> |
18 | #include <asm/compiler.h> | ||
18 | #include <asm/hazards.h> | 19 | #include <asm/hazards.h> |
19 | 20 | ||
20 | #ifdef CONFIG_CPU_MIPSR2 | 21 | #if defined(CONFIG_CPU_MIPSR2) || defined (CONFIG_CPU_MIPSR6) |
21 | 22 | ||
22 | static inline void arch_local_irq_disable(void) | 23 | static inline void arch_local_irq_disable(void) |
23 | { | 24 | { |
@@ -118,7 +119,7 @@ void arch_local_irq_disable(void); | |||
118 | unsigned long arch_local_irq_save(void); | 119 | unsigned long arch_local_irq_save(void); |
119 | void arch_local_irq_restore(unsigned long flags); | 120 | void arch_local_irq_restore(unsigned long flags); |
120 | void __arch_local_irq_restore(unsigned long flags); | 121 | void __arch_local_irq_restore(unsigned long flags); |
121 | #endif /* CONFIG_CPU_MIPSR2 */ | 122 | #endif /* CONFIG_CPU_MIPSR2 || CONFIG_CPU_MIPSR6 */ |
122 | 123 | ||
123 | static inline void arch_local_irq_enable(void) | 124 | static inline void arch_local_irq_enable(void) |
124 | { | 125 | { |
@@ -126,7 +127,7 @@ static inline void arch_local_irq_enable(void) | |||
126 | " .set push \n" | 127 | " .set push \n" |
127 | " .set reorder \n" | 128 | " .set reorder \n" |
128 | " .set noat \n" | 129 | " .set noat \n" |
129 | #if defined(CONFIG_CPU_MIPSR2) | 130 | #if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6) |
130 | " ei \n" | 131 | " ei \n" |
131 | #else | 132 | #else |
132 | " mfc0 $1,$12 \n" | 133 | " mfc0 $1,$12 \n" |
diff --git a/arch/mips/include/asm/local.h b/arch/mips/include/asm/local.h index 46dfc3c1fd49..8feaed62a2ab 100644 --- a/arch/mips/include/asm/local.h +++ b/arch/mips/include/asm/local.h | |||
@@ -5,6 +5,7 @@ | |||
5 | #include <linux/bitops.h> | 5 | #include <linux/bitops.h> |
6 | #include <linux/atomic.h> | 6 | #include <linux/atomic.h> |
7 | #include <asm/cmpxchg.h> | 7 | #include <asm/cmpxchg.h> |
8 | #include <asm/compiler.h> | ||
8 | #include <asm/war.h> | 9 | #include <asm/war.h> |
9 | 10 | ||
10 | typedef struct | 11 | typedef struct |
@@ -47,7 +48,7 @@ static __inline__ long local_add_return(long i, local_t * l) | |||
47 | unsigned long temp; | 48 | unsigned long temp; |
48 | 49 | ||
49 | __asm__ __volatile__( | 50 | __asm__ __volatile__( |
50 | " .set arch=r4000 \n" | 51 | " .set "MIPS_ISA_ARCH_LEVEL" \n" |
51 | "1:" __LL "%1, %2 # local_add_return \n" | 52 | "1:" __LL "%1, %2 # local_add_return \n" |
52 | " addu %0, %1, %3 \n" | 53 | " addu %0, %1, %3 \n" |
53 | __SC "%0, %2 \n" | 54 | __SC "%0, %2 \n" |
@@ -92,7 +93,7 @@ static __inline__ long local_sub_return(long i, local_t * l) | |||
92 | unsigned long temp; | 93 | unsigned long temp; |
93 | 94 | ||
94 | __asm__ __volatile__( | 95 | __asm__ __volatile__( |
95 | " .set arch=r4000 \n" | 96 | " .set "MIPS_ISA_ARCH_LEVEL" \n" |
96 | "1:" __LL "%1, %2 # local_sub_return \n" | 97 | "1:" __LL "%1, %2 # local_sub_return \n" |
97 | " subu %0, %1, %3 \n" | 98 | " subu %0, %1, %3 \n" |
98 | __SC "%0, %2 \n" | 99 | __SC "%0, %2 \n" |
diff --git a/arch/mips/include/asm/mach-pmcs-msp71xx/msp_regops.h b/arch/mips/include/asm/mach-pmcs-msp71xx/msp_regops.h index 2e54b4bff5cf..90dbe43c8d27 100644 --- a/arch/mips/include/asm/mach-pmcs-msp71xx/msp_regops.h +++ b/arch/mips/include/asm/mach-pmcs-msp71xx/msp_regops.h | |||
@@ -85,8 +85,8 @@ static inline void set_value_reg32(volatile u32 *const addr, | |||
85 | " "__beqz"%0, 1b \n" | 85 | " "__beqz"%0, 1b \n" |
86 | " nop \n" | 86 | " nop \n" |
87 | " .set pop \n" | 87 | " .set pop \n" |
88 | : "=&r" (temp), "=" GCC_OFF12_ASM() (*addr) | 88 | : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*addr) |
89 | : "ir" (~mask), "ir" (value), GCC_OFF12_ASM() (*addr)); | 89 | : "ir" (~mask), "ir" (value), GCC_OFF_SMALL_ASM() (*addr)); |
90 | } | 90 | } |
91 | 91 | ||
92 | /* | 92 | /* |
@@ -106,8 +106,8 @@ static inline void set_reg32(volatile u32 *const addr, | |||
106 | " "__beqz"%0, 1b \n" | 106 | " "__beqz"%0, 1b \n" |
107 | " nop \n" | 107 | " nop \n" |
108 | " .set pop \n" | 108 | " .set pop \n" |
109 | : "=&r" (temp), "=" GCC_OFF12_ASM() (*addr) | 109 | : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*addr) |
110 | : "ir" (mask), GCC_OFF12_ASM() (*addr)); | 110 | : "ir" (mask), GCC_OFF_SMALL_ASM() (*addr)); |
111 | } | 111 | } |
112 | 112 | ||
113 | /* | 113 | /* |
@@ -127,8 +127,8 @@ static inline void clear_reg32(volatile u32 *const addr, | |||
127 | " "__beqz"%0, 1b \n" | 127 | " "__beqz"%0, 1b \n" |
128 | " nop \n" | 128 | " nop \n" |
129 | " .set pop \n" | 129 | " .set pop \n" |
130 | : "=&r" (temp), "=" GCC_OFF12_ASM() (*addr) | 130 | : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*addr) |
131 | : "ir" (~mask), GCC_OFF12_ASM() (*addr)); | 131 | : "ir" (~mask), GCC_OFF_SMALL_ASM() (*addr)); |
132 | } | 132 | } |
133 | 133 | ||
134 | /* | 134 | /* |
@@ -148,8 +148,8 @@ static inline void toggle_reg32(volatile u32 *const addr, | |||
148 | " "__beqz"%0, 1b \n" | 148 | " "__beqz"%0, 1b \n" |
149 | " nop \n" | 149 | " nop \n" |
150 | " .set pop \n" | 150 | " .set pop \n" |
151 | : "=&r" (temp), "=" GCC_OFF12_ASM() (*addr) | 151 | : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*addr) |
152 | : "ir" (mask), GCC_OFF12_ASM() (*addr)); | 152 | : "ir" (mask), GCC_OFF_SMALL_ASM() (*addr)); |
153 | } | 153 | } |
154 | 154 | ||
155 | /* | 155 | /* |
@@ -220,8 +220,8 @@ static inline u32 blocking_read_reg32(volatile u32 *const addr) | |||
220 | " .set arch=r4000 \n" \ | 220 | " .set arch=r4000 \n" \ |
221 | "1: ll %0, %1 #custom_read_reg32 \n" \ | 221 | "1: ll %0, %1 #custom_read_reg32 \n" \ |
222 | " .set pop \n" \ | 222 | " .set pop \n" \ |
223 | : "=r" (tmp), "=" GCC_OFF12_ASM() (*address) \ | 223 | : "=r" (tmp), "=" GCC_OFF_SMALL_ASM() (*address) \ |
224 | : GCC_OFF12_ASM() (*address)) | 224 | : GCC_OFF_SMALL_ASM() (*address)) |
225 | 225 | ||
226 | #define custom_write_reg32(address, tmp) \ | 226 | #define custom_write_reg32(address, tmp) \ |
227 | __asm__ __volatile__( \ | 227 | __asm__ __volatile__( \ |
@@ -231,7 +231,7 @@ static inline u32 blocking_read_reg32(volatile u32 *const addr) | |||
231 | " "__beqz"%0, 1b \n" \ | 231 | " "__beqz"%0, 1b \n" \ |
232 | " nop \n" \ | 232 | " nop \n" \ |
233 | " .set pop \n" \ | 233 | " .set pop \n" \ |
234 | : "=&r" (tmp), "=" GCC_OFF12_ASM() (*address) \ | 234 | : "=&r" (tmp), "=" GCC_OFF_SMALL_ASM() (*address) \ |
235 | : "0" (tmp), GCC_OFF12_ASM() (*address)) | 235 | : "0" (tmp), GCC_OFF_SMALL_ASM() (*address)) |
236 | 236 | ||
237 | #endif /* __ASM_REGOPS_H__ */ | 237 | #endif /* __ASM_REGOPS_H__ */ |
diff --git a/arch/mips/include/asm/mips-r2-to-r6-emul.h b/arch/mips/include/asm/mips-r2-to-r6-emul.h new file mode 100644 index 000000000000..60570f2c3ba2 --- /dev/null +++ b/arch/mips/include/asm/mips-r2-to-r6-emul.h | |||
@@ -0,0 +1,96 @@ | |||
1 | /* | ||
2 | * This file is subject to the terms and conditions of the GNU General Public | ||
3 | * License. See the file "COPYING" in the main directory of this archive | ||
4 | * for more details. | ||
5 | * | ||
6 | * Copyright (c) 2014 Imagination Technologies Ltd. | ||
7 | * Author: Markos Chandras <markos.chandras@imgtec.com> | ||
8 | */ | ||
9 | |||
10 | #ifndef __ASM_MIPS_R2_TO_R6_EMUL_H | ||
11 | #define __ASM_MIPS_R2_TO_R6_EMUL_H | ||
12 | |||
13 | struct mips_r2_emulator_stats { | ||
14 | u64 movs; | ||
15 | u64 hilo; | ||
16 | u64 muls; | ||
17 | u64 divs; | ||
18 | u64 dsps; | ||
19 | u64 bops; | ||
20 | u64 traps; | ||
21 | u64 fpus; | ||
22 | u64 loads; | ||
23 | u64 stores; | ||
24 | u64 llsc; | ||
25 | u64 dsemul; | ||
26 | }; | ||
27 | |||
28 | struct mips_r2br_emulator_stats { | ||
29 | u64 jrs; | ||
30 | u64 bltzl; | ||
31 | u64 bgezl; | ||
32 | u64 bltzll; | ||
33 | u64 bgezll; | ||
34 | u64 bltzall; | ||
35 | u64 bgezall; | ||
36 | u64 bltzal; | ||
37 | u64 bgezal; | ||
38 | u64 beql; | ||
39 | u64 bnel; | ||
40 | u64 blezl; | ||
41 | u64 bgtzl; | ||
42 | }; | ||
43 | |||
44 | #ifdef CONFIG_DEBUG_FS | ||
45 | |||
46 | #define MIPS_R2_STATS(M) \ | ||
47 | do { \ | ||
48 | u32 nir; \ | ||
49 | int err; \ | ||
50 | \ | ||
51 | preempt_disable(); \ | ||
52 | __this_cpu_inc(mipsr2emustats.M); \ | ||
53 | err = __get_user(nir, (u32 __user *)regs->cp0_epc); \ | ||
54 | if (!err) { \ | ||
55 | if (nir == BREAK_MATH) \ | ||
56 | __this_cpu_inc(mipsr2bdemustats.M); \ | ||
57 | } \ | ||
58 | preempt_enable(); \ | ||
59 | } while (0) | ||
60 | |||
61 | #define MIPS_R2BR_STATS(M) \ | ||
62 | do { \ | ||
63 | preempt_disable(); \ | ||
64 | __this_cpu_inc(mipsr2bremustats.M); \ | ||
65 | preempt_enable(); \ | ||
66 | } while (0) | ||
67 | |||
68 | #else | ||
69 | |||
70 | #define MIPS_R2_STATS(M) do { } while (0) | ||
71 | #define MIPS_R2BR_STATS(M) do { } while (0) | ||
72 | |||
73 | #endif /* CONFIG_DEBUG_FS */ | ||
74 | |||
75 | struct r2_decoder_table { | ||
76 | u32 mask; | ||
77 | u32 code; | ||
78 | int (*func)(struct pt_regs *regs, u32 inst); | ||
79 | }; | ||
80 | |||
81 | |||
82 | extern void do_trap_or_bp(struct pt_regs *regs, unsigned int code, | ||
83 | const char *str); | ||
84 | |||
85 | #ifndef CONFIG_MIPSR2_TO_R6_EMULATOR | ||
86 | static int mipsr2_emulation; | ||
87 | static __maybe_unused int mipsr2_decoder(struct pt_regs *regs, u32 inst) { return 0; }; | ||
88 | #else | ||
89 | /* MIPS R2 Emulator ON/OFF */ | ||
90 | extern int mipsr2_emulation; | ||
91 | extern int mipsr2_decoder(struct pt_regs *regs, u32 inst); | ||
92 | #endif /* CONFIG_MIPSR2_TO_R6_EMULATOR */ | ||
93 | |||
94 | #define NO_R6EMU (cpu_has_mips_r6 && !mipsr2_emulation) | ||
95 | |||
96 | #endif /* __ASM_MIPS_R2_TO_R6_EMUL_H */ | ||
diff --git a/arch/mips/include/asm/mipsregs.h b/arch/mips/include/asm/mipsregs.h index 5e4aef304b02..06346001ee4d 100644 --- a/arch/mips/include/asm/mipsregs.h +++ b/arch/mips/include/asm/mipsregs.h | |||
@@ -653,6 +653,7 @@ | |||
653 | #define MIPS_CONF5_NF (_ULCAST_(1) << 0) | 653 | #define MIPS_CONF5_NF (_ULCAST_(1) << 0) |
654 | #define MIPS_CONF5_UFR (_ULCAST_(1) << 2) | 654 | #define MIPS_CONF5_UFR (_ULCAST_(1) << 2) |
655 | #define MIPS_CONF5_MRP (_ULCAST_(1) << 3) | 655 | #define MIPS_CONF5_MRP (_ULCAST_(1) << 3) |
656 | #define MIPS_CONF5_LLB (_ULCAST_(1) << 4) | ||
656 | #define MIPS_CONF5_MVH (_ULCAST_(1) << 5) | 657 | #define MIPS_CONF5_MVH (_ULCAST_(1) << 5) |
657 | #define MIPS_CONF5_FRE (_ULCAST_(1) << 8) | 658 | #define MIPS_CONF5_FRE (_ULCAST_(1) << 8) |
658 | #define MIPS_CONF5_UFE (_ULCAST_(1) << 9) | 659 | #define MIPS_CONF5_UFE (_ULCAST_(1) << 9) |
@@ -1127,6 +1128,8 @@ do { \ | |||
1127 | #define write_c0_config6(val) __write_32bit_c0_register($16, 6, val) | 1128 | #define write_c0_config6(val) __write_32bit_c0_register($16, 6, val) |
1128 | #define write_c0_config7(val) __write_32bit_c0_register($16, 7, val) | 1129 | #define write_c0_config7(val) __write_32bit_c0_register($16, 7, val) |
1129 | 1130 | ||
1131 | #define read_c0_lladdr() __read_ulong_c0_register($17, 0) | ||
1132 | #define write_c0_lladdr(val) __write_ulong_c0_register($17, 0, val) | ||
1130 | #define read_c0_maar() __read_ulong_c0_register($17, 1) | 1133 | #define read_c0_maar() __read_ulong_c0_register($17, 1) |
1131 | #define write_c0_maar(val) __write_ulong_c0_register($17, 1, val) | 1134 | #define write_c0_maar(val) __write_ulong_c0_register($17, 1, val) |
1132 | #define read_c0_maari() __read_32bit_c0_register($17, 2) | 1135 | #define read_c0_maari() __read_32bit_c0_register($17, 2) |
diff --git a/arch/mips/include/asm/module.h b/arch/mips/include/asm/module.h index 800fe578dc99..0aaf9a01ea50 100644 --- a/arch/mips/include/asm/module.h +++ b/arch/mips/include/asm/module.h | |||
@@ -88,10 +88,14 @@ search_module_dbetables(unsigned long addr) | |||
88 | #define MODULE_PROC_FAMILY "MIPS32_R1 " | 88 | #define MODULE_PROC_FAMILY "MIPS32_R1 " |
89 | #elif defined CONFIG_CPU_MIPS32_R2 | 89 | #elif defined CONFIG_CPU_MIPS32_R2 |
90 | #define MODULE_PROC_FAMILY "MIPS32_R2 " | 90 | #define MODULE_PROC_FAMILY "MIPS32_R2 " |
91 | #elif defined CONFIG_CPU_MIPS32_R6 | ||
92 | #define MODULE_PROC_FAMILY "MIPS32_R6 " | ||
91 | #elif defined CONFIG_CPU_MIPS64_R1 | 93 | #elif defined CONFIG_CPU_MIPS64_R1 |
92 | #define MODULE_PROC_FAMILY "MIPS64_R1 " | 94 | #define MODULE_PROC_FAMILY "MIPS64_R1 " |
93 | #elif defined CONFIG_CPU_MIPS64_R2 | 95 | #elif defined CONFIG_CPU_MIPS64_R2 |
94 | #define MODULE_PROC_FAMILY "MIPS64_R2 " | 96 | #define MODULE_PROC_FAMILY "MIPS64_R2 " |
97 | #elif defined CONFIG_CPU_MIPS64_R6 | ||
98 | #define MODULE_PROC_FAMILY "MIPS64_R6 " | ||
95 | #elif defined CONFIG_CPU_R3000 | 99 | #elif defined CONFIG_CPU_R3000 |
96 | #define MODULE_PROC_FAMILY "R3000 " | 100 | #define MODULE_PROC_FAMILY "R3000 " |
97 | #elif defined CONFIG_CPU_TX39XX | 101 | #elif defined CONFIG_CPU_TX39XX |
diff --git a/arch/mips/include/asm/octeon/cvmx-cmd-queue.h b/arch/mips/include/asm/octeon/cvmx-cmd-queue.h index 75739c83f07e..8d05d9069823 100644 --- a/arch/mips/include/asm/octeon/cvmx-cmd-queue.h +++ b/arch/mips/include/asm/octeon/cvmx-cmd-queue.h | |||
@@ -275,7 +275,7 @@ static inline void __cvmx_cmd_queue_lock(cvmx_cmd_queue_id_t queue_id, | |||
275 | " lbu %[ticket], %[now_serving]\n" | 275 | " lbu %[ticket], %[now_serving]\n" |
276 | "4:\n" | 276 | "4:\n" |
277 | ".set pop\n" : | 277 | ".set pop\n" : |
278 | [ticket_ptr] "=" GCC_OFF12_ASM()(__cvmx_cmd_queue_state_ptr->ticket[__cvmx_cmd_queue_get_index(queue_id)]), | 278 | [ticket_ptr] "=" GCC_OFF_SMALL_ASM()(__cvmx_cmd_queue_state_ptr->ticket[__cvmx_cmd_queue_get_index(queue_id)]), |
279 | [now_serving] "=m"(qptr->now_serving), [ticket] "=r"(tmp), | 279 | [now_serving] "=m"(qptr->now_serving), [ticket] "=r"(tmp), |
280 | [my_ticket] "=r"(my_ticket) | 280 | [my_ticket] "=r"(my_ticket) |
281 | ); | 281 | ); |
diff --git a/arch/mips/include/asm/r4kcache.h b/arch/mips/include/asm/r4kcache.h index e293a8d89a6d..1b22d2da88a1 100644 --- a/arch/mips/include/asm/r4kcache.h +++ b/arch/mips/include/asm/r4kcache.h | |||
@@ -14,6 +14,7 @@ | |||
14 | 14 | ||
15 | #include <asm/asm.h> | 15 | #include <asm/asm.h> |
16 | #include <asm/cacheops.h> | 16 | #include <asm/cacheops.h> |
17 | #include <asm/compiler.h> | ||
17 | #include <asm/cpu-features.h> | 18 | #include <asm/cpu-features.h> |
18 | #include <asm/cpu-type.h> | 19 | #include <asm/cpu-type.h> |
19 | #include <asm/mipsmtregs.h> | 20 | #include <asm/mipsmtregs.h> |
@@ -39,7 +40,7 @@ extern void (*r4k_blast_icache)(void); | |||
39 | __asm__ __volatile__( \ | 40 | __asm__ __volatile__( \ |
40 | " .set push \n" \ | 41 | " .set push \n" \ |
41 | " .set noreorder \n" \ | 42 | " .set noreorder \n" \ |
42 | " .set arch=r4000 \n" \ | 43 | " .set "MIPS_ISA_ARCH_LEVEL" \n" \ |
43 | " cache %0, %1 \n" \ | 44 | " cache %0, %1 \n" \ |
44 | " .set pop \n" \ | 45 | " .set pop \n" \ |
45 | : \ | 46 | : \ |
@@ -147,7 +148,7 @@ static inline void flush_scache_line(unsigned long addr) | |||
147 | __asm__ __volatile__( \ | 148 | __asm__ __volatile__( \ |
148 | " .set push \n" \ | 149 | " .set push \n" \ |
149 | " .set noreorder \n" \ | 150 | " .set noreorder \n" \ |
150 | " .set arch=r4000 \n" \ | 151 | " .set "MIPS_ISA_ARCH_LEVEL" \n" \ |
151 | "1: cache %0, (%1) \n" \ | 152 | "1: cache %0, (%1) \n" \ |
152 | "2: .set pop \n" \ | 153 | "2: .set pop \n" \ |
153 | " .section __ex_table,\"a\" \n" \ | 154 | " .section __ex_table,\"a\" \n" \ |
@@ -218,6 +219,7 @@ static inline void invalidate_tcache_page(unsigned long addr) | |||
218 | cache_op(Page_Invalidate_T, addr); | 219 | cache_op(Page_Invalidate_T, addr); |
219 | } | 220 | } |
220 | 221 | ||
222 | #ifndef CONFIG_CPU_MIPSR6 | ||
221 | #define cache16_unroll32(base,op) \ | 223 | #define cache16_unroll32(base,op) \ |
222 | __asm__ __volatile__( \ | 224 | __asm__ __volatile__( \ |
223 | " .set push \n" \ | 225 | " .set push \n" \ |
@@ -322,6 +324,150 @@ static inline void invalidate_tcache_page(unsigned long addr) | |||
322 | : "r" (base), \ | 324 | : "r" (base), \ |
323 | "i" (op)); | 325 | "i" (op)); |
324 | 326 | ||
327 | #else | ||
328 | /* | ||
329 | * MIPS R6 changed the cache opcode and moved to a 8-bit offset field. | ||
330 | * This means we now need to increment the base register before we flush | ||
331 | * more cache lines | ||
332 | */ | ||
333 | #define cache16_unroll32(base,op) \ | ||
334 | __asm__ __volatile__( \ | ||
335 | " .set push\n" \ | ||
336 | " .set noreorder\n" \ | ||
337 | " .set mips64r6\n" \ | ||
338 | " .set noat\n" \ | ||
339 | " cache %1, 0x000(%0); cache %1, 0x010(%0)\n" \ | ||
340 | " cache %1, 0x020(%0); cache %1, 0x030(%0)\n" \ | ||
341 | " cache %1, 0x040(%0); cache %1, 0x050(%0)\n" \ | ||
342 | " cache %1, 0x060(%0); cache %1, 0x070(%0)\n" \ | ||
343 | " cache %1, 0x080(%0); cache %1, 0x090(%0)\n" \ | ||
344 | " cache %1, 0x0a0(%0); cache %1, 0x0b0(%0)\n" \ | ||
345 | " cache %1, 0x0c0(%0); cache %1, 0x0d0(%0)\n" \ | ||
346 | " cache %1, 0x0e0(%0); cache %1, 0x0f0(%0)\n" \ | ||
347 | " addiu $1, $0, 0x100 \n" \ | ||
348 | " cache %1, 0x000($1); cache %1, 0x010($1)\n" \ | ||
349 | " cache %1, 0x020($1); cache %1, 0x030($1)\n" \ | ||
350 | " cache %1, 0x040($1); cache %1, 0x050($1)\n" \ | ||
351 | " cache %1, 0x060($1); cache %1, 0x070($1)\n" \ | ||
352 | " cache %1, 0x080($1); cache %1, 0x090($1)\n" \ | ||
353 | " cache %1, 0x0a0($1); cache %1, 0x0b0($1)\n" \ | ||
354 | " cache %1, 0x0c0($1); cache %1, 0x0d0($1)\n" \ | ||
355 | " cache %1, 0x0e0($1); cache %1, 0x0f0($1)\n" \ | ||
356 | " .set pop\n" \ | ||
357 | : \ | ||
358 | : "r" (base), \ | ||
359 | "i" (op)); | ||
360 | |||
361 | #define cache32_unroll32(base,op) \ | ||
362 | __asm__ __volatile__( \ | ||
363 | " .set push\n" \ | ||
364 | " .set noreorder\n" \ | ||
365 | " .set mips64r6\n" \ | ||
366 | " .set noat\n" \ | ||
367 | " cache %1, 0x000(%0); cache %1, 0x020(%0)\n" \ | ||
368 | " cache %1, 0x040(%0); cache %1, 0x060(%0)\n" \ | ||
369 | " cache %1, 0x080(%0); cache %1, 0x0a0(%0)\n" \ | ||
370 | " cache %1, 0x0c0(%0); cache %1, 0x0e0(%0)\n" \ | ||
371 | " addiu $1, %0, 0x100\n" \ | ||
372 | " cache %1, 0x000($1); cache %1, 0x020($1)\n" \ | ||
373 | " cache %1, 0x040($1); cache %1, 0x060($1)\n" \ | ||
374 | " cache %1, 0x080($1); cache %1, 0x0a0($1)\n" \ | ||
375 | " cache %1, 0x0c0($1); cache %1, 0x0e0($1)\n" \ | ||
376 | " addiu $1, $1, 0x100\n" \ | ||
377 | " cache %1, 0x000($1); cache %1, 0x020($1)\n" \ | ||
378 | " cache %1, 0x040($1); cache %1, 0x060($1)\n" \ | ||
379 | " cache %1, 0x080($1); cache %1, 0x0a0($1)\n" \ | ||
380 | " cache %1, 0x0c0($1); cache %1, 0x0e0($1)\n" \ | ||
381 | " addiu $1, $1, 0x100\n" \ | ||
382 | " cache %1, 0x000($1); cache %1, 0x020($1)\n" \ | ||
383 | " cache %1, 0x040($1); cache %1, 0x060($1)\n" \ | ||
384 | " cache %1, 0x080($1); cache %1, 0x0a0($1)\n" \ | ||
385 | " cache %1, 0x0c0($1); cache %1, 0x0e0($1)\n" \ | ||
386 | " .set pop\n" \ | ||
387 | : \ | ||
388 | : "r" (base), \ | ||
389 | "i" (op)); | ||
390 | |||
391 | #define cache64_unroll32(base,op) \ | ||
392 | __asm__ __volatile__( \ | ||
393 | " .set push\n" \ | ||
394 | " .set noreorder\n" \ | ||
395 | " .set mips64r6\n" \ | ||
396 | " .set noat\n" \ | ||
397 | " cache %1, 0x000(%0); cache %1, 0x040(%0)\n" \ | ||
398 | " cache %1, 0x080(%0); cache %1, 0x0c0(%0)\n" \ | ||
399 | " addiu $1, %0, 0x100\n" \ | ||
400 | " cache %1, 0x000($1); cache %1, 0x040($1)\n" \ | ||
401 | " cache %1, 0x080($1); cache %1, 0x0c0($1)\n" \ | ||
402 | " addiu $1, %0, 0x100\n" \ | ||
403 | " cache %1, 0x000($1); cache %1, 0x040($1)\n" \ | ||
404 | " cache %1, 0x080($1); cache %1, 0x0c0($1)\n" \ | ||
405 | " addiu $1, %0, 0x100\n" \ | ||
406 | " cache %1, 0x000($1); cache %1, 0x040($1)\n" \ | ||
407 | " cache %1, 0x080($1); cache %1, 0x0c0($1)\n" \ | ||
408 | " addiu $1, %0, 0x100\n" \ | ||
409 | " cache %1, 0x000($1); cache %1, 0x040($1)\n" \ | ||
410 | " cache %1, 0x080($1); cache %1, 0x0c0($1)\n" \ | ||
411 | " addiu $1, %0, 0x100\n" \ | ||
412 | " cache %1, 0x000($1); cache %1, 0x040($1)\n" \ | ||
413 | " cache %1, 0x080($1); cache %1, 0x0c0($1)\n" \ | ||
414 | " addiu $1, %0, 0x100\n" \ | ||
415 | " cache %1, 0x000($1); cache %1, 0x040($1)\n" \ | ||
416 | " cache %1, 0x080($1); cache %1, 0x0c0($1)\n" \ | ||
417 | " addiu $1, %0, 0x100\n" \ | ||
418 | " cache %1, 0x000($1); cache %1, 0x040($1)\n" \ | ||
419 | " cache %1, 0x080($1); cache %1, 0x0c0($1)\n" \ | ||
420 | " .set pop\n" \ | ||
421 | : \ | ||
422 | : "r" (base), \ | ||
423 | "i" (op)); | ||
424 | |||
425 | #define cache128_unroll32(base,op) \ | ||
426 | __asm__ __volatile__( \ | ||
427 | " .set push\n" \ | ||
428 | " .set noreorder\n" \ | ||
429 | " .set mips64r6\n" \ | ||
430 | " .set noat\n" \ | ||
431 | " cache %1, 0x000(%0); cache %1, 0x080(%0)\n" \ | ||
432 | " addiu $1, %0, 0x100\n" \ | ||
433 | " cache %1, 0x000(%0); cache %1, 0x080(%0)\n" \ | ||
434 | " addiu $1, %0, 0x100\n" \ | ||
435 | " cache %1, 0x000(%0); cache %1, 0x080(%0)\n" \ | ||
436 | " addiu $1, %0, 0x100\n" \ | ||
437 | " cache %1, 0x000(%0); cache %1, 0x080(%0)\n" \ | ||
438 | " addiu $1, %0, 0x100\n" \ | ||
439 | " cache %1, 0x000(%0); cache %1, 0x080(%0)\n" \ | ||
440 | " addiu $1, %0, 0x100\n" \ | ||
441 | " cache %1, 0x000(%0); cache %1, 0x080(%0)\n" \ | ||
442 | " addiu $1, %0, 0x100\n" \ | ||
443 | " cache %1, 0x000(%0); cache %1, 0x080(%0)\n" \ | ||
444 | " addiu $1, %0, 0x100\n" \ | ||
445 | " cache %1, 0x000(%0); cache %1, 0x080(%0)\n" \ | ||
446 | " addiu $1, %0, 0x100\n" \ | ||
447 | " cache %1, 0x000(%0); cache %1, 0x080(%0)\n" \ | ||
448 | " addiu $1, %0, 0x100\n" \ | ||
449 | " cache %1, 0x000(%0); cache %1, 0x080(%0)\n" \ | ||
450 | " addiu $1, %0, 0x100\n" \ | ||
451 | " cache %1, 0x000(%0); cache %1, 0x080(%0)\n" \ | ||
452 | " addiu $1, %0, 0x100\n" \ | ||
453 | " cache %1, 0x000(%0); cache %1, 0x080(%0)\n" \ | ||
454 | " addiu $1, %0, 0x100\n" \ | ||
455 | " cache %1, 0x000(%0); cache %1, 0x080(%0)\n" \ | ||
456 | " addiu $1, %0, 0x100\n" \ | ||
457 | " cache %1, 0x000(%0); cache %1, 0x080(%0)\n" \ | ||
458 | " addiu $1, %0, 0x100\n" \ | ||
459 | " cache %1, 0x000(%0); cache %1, 0x080(%0)\n" \ | ||
460 | " addiu $1, %0, 0x100\n" \ | ||
461 | " cache %1, 0x000(%0); cache %1, 0x080(%0)\n" \ | ||
462 | " addiu $1, %0, 0x100\n" \ | ||
463 | " cache %1, 0x000(%0); cache %1, 0x080(%0)\n" \ | ||
464 | " addiu $1, %0, 0x100\n" \ | ||
465 | " .set pop\n" \ | ||
466 | : \ | ||
467 | : "r" (base), \ | ||
468 | "i" (op)); | ||
469 | #endif /* CONFIG_CPU_MIPSR6 */ | ||
470 | |||
325 | /* | 471 | /* |
326 | * Perform the cache operation specified by op using a user mode virtual | 472 | * Perform the cache operation specified by op using a user mode virtual |
327 | * address while in kernel mode. | 473 | * address while in kernel mode. |
diff --git a/arch/mips/include/asm/spinlock.h b/arch/mips/include/asm/spinlock.h index c6d06d383ef9..b4548690ade9 100644 --- a/arch/mips/include/asm/spinlock.h +++ b/arch/mips/include/asm/spinlock.h | |||
@@ -89,7 +89,7 @@ static inline void arch_spin_lock(arch_spinlock_t *lock) | |||
89 | " subu %[ticket], %[ticket], 1 \n" | 89 | " subu %[ticket], %[ticket], 1 \n" |
90 | " .previous \n" | 90 | " .previous \n" |
91 | " .set pop \n" | 91 | " .set pop \n" |
92 | : [ticket_ptr] "+" GCC_OFF12_ASM() (lock->lock), | 92 | : [ticket_ptr] "+" GCC_OFF_SMALL_ASM() (lock->lock), |
93 | [serving_now_ptr] "+m" (lock->h.serving_now), | 93 | [serving_now_ptr] "+m" (lock->h.serving_now), |
94 | [ticket] "=&r" (tmp), | 94 | [ticket] "=&r" (tmp), |
95 | [my_ticket] "=&r" (my_ticket) | 95 | [my_ticket] "=&r" (my_ticket) |
@@ -122,7 +122,7 @@ static inline void arch_spin_lock(arch_spinlock_t *lock) | |||
122 | " subu %[ticket], %[ticket], 1 \n" | 122 | " subu %[ticket], %[ticket], 1 \n" |
123 | " .previous \n" | 123 | " .previous \n" |
124 | " .set pop \n" | 124 | " .set pop \n" |
125 | : [ticket_ptr] "+" GCC_OFF12_ASM() (lock->lock), | 125 | : [ticket_ptr] "+" GCC_OFF_SMALL_ASM() (lock->lock), |
126 | [serving_now_ptr] "+m" (lock->h.serving_now), | 126 | [serving_now_ptr] "+m" (lock->h.serving_now), |
127 | [ticket] "=&r" (tmp), | 127 | [ticket] "=&r" (tmp), |
128 | [my_ticket] "=&r" (my_ticket) | 128 | [my_ticket] "=&r" (my_ticket) |
@@ -164,7 +164,7 @@ static inline unsigned int arch_spin_trylock(arch_spinlock_t *lock) | |||
164 | " li %[ticket], 0 \n" | 164 | " li %[ticket], 0 \n" |
165 | " .previous \n" | 165 | " .previous \n" |
166 | " .set pop \n" | 166 | " .set pop \n" |
167 | : [ticket_ptr] "+" GCC_OFF12_ASM() (lock->lock), | 167 | : [ticket_ptr] "+" GCC_OFF_SMALL_ASM() (lock->lock), |
168 | [ticket] "=&r" (tmp), | 168 | [ticket] "=&r" (tmp), |
169 | [my_ticket] "=&r" (tmp2), | 169 | [my_ticket] "=&r" (tmp2), |
170 | [now_serving] "=&r" (tmp3) | 170 | [now_serving] "=&r" (tmp3) |
@@ -188,7 +188,7 @@ static inline unsigned int arch_spin_trylock(arch_spinlock_t *lock) | |||
188 | " li %[ticket], 0 \n" | 188 | " li %[ticket], 0 \n" |
189 | " .previous \n" | 189 | " .previous \n" |
190 | " .set pop \n" | 190 | " .set pop \n" |
191 | : [ticket_ptr] "+" GCC_OFF12_ASM() (lock->lock), | 191 | : [ticket_ptr] "+" GCC_OFF_SMALL_ASM() (lock->lock), |
192 | [ticket] "=&r" (tmp), | 192 | [ticket] "=&r" (tmp), |
193 | [my_ticket] "=&r" (tmp2), | 193 | [my_ticket] "=&r" (tmp2), |
194 | [now_serving] "=&r" (tmp3) | 194 | [now_serving] "=&r" (tmp3) |
@@ -235,8 +235,8 @@ static inline void arch_read_lock(arch_rwlock_t *rw) | |||
235 | " beqzl %1, 1b \n" | 235 | " beqzl %1, 1b \n" |
236 | " nop \n" | 236 | " nop \n" |
237 | " .set reorder \n" | 237 | " .set reorder \n" |
238 | : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp) | 238 | : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp) |
239 | : GCC_OFF12_ASM() (rw->lock) | 239 | : GCC_OFF_SMALL_ASM() (rw->lock) |
240 | : "memory"); | 240 | : "memory"); |
241 | } else { | 241 | } else { |
242 | do { | 242 | do { |
@@ -245,8 +245,8 @@ static inline void arch_read_lock(arch_rwlock_t *rw) | |||
245 | " bltz %1, 1b \n" | 245 | " bltz %1, 1b \n" |
246 | " addu %1, 1 \n" | 246 | " addu %1, 1 \n" |
247 | "2: sc %1, %0 \n" | 247 | "2: sc %1, %0 \n" |
248 | : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp) | 248 | : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp) |
249 | : GCC_OFF12_ASM() (rw->lock) | 249 | : GCC_OFF_SMALL_ASM() (rw->lock) |
250 | : "memory"); | 250 | : "memory"); |
251 | } while (unlikely(!tmp)); | 251 | } while (unlikely(!tmp)); |
252 | } | 252 | } |
@@ -254,9 +254,6 @@ static inline void arch_read_lock(arch_rwlock_t *rw) | |||
254 | smp_llsc_mb(); | 254 | smp_llsc_mb(); |
255 | } | 255 | } |
256 | 256 | ||
257 | /* Note the use of sub, not subu which will make the kernel die with an | ||
258 | overflow exception if we ever try to unlock an rwlock that is already | ||
259 | unlocked or is being held by a writer. */ | ||
260 | static inline void arch_read_unlock(arch_rwlock_t *rw) | 257 | static inline void arch_read_unlock(arch_rwlock_t *rw) |
261 | { | 258 | { |
262 | unsigned int tmp; | 259 | unsigned int tmp; |
@@ -266,20 +263,20 @@ static inline void arch_read_unlock(arch_rwlock_t *rw) | |||
266 | if (R10000_LLSC_WAR) { | 263 | if (R10000_LLSC_WAR) { |
267 | __asm__ __volatile__( | 264 | __asm__ __volatile__( |
268 | "1: ll %1, %2 # arch_read_unlock \n" | 265 | "1: ll %1, %2 # arch_read_unlock \n" |
269 | " sub %1, 1 \n" | 266 | " addiu %1, 1 \n" |
270 | " sc %1, %0 \n" | 267 | " sc %1, %0 \n" |
271 | " beqzl %1, 1b \n" | 268 | " beqzl %1, 1b \n" |
272 | : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp) | 269 | : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp) |
273 | : GCC_OFF12_ASM() (rw->lock) | 270 | : GCC_OFF_SMALL_ASM() (rw->lock) |
274 | : "memory"); | 271 | : "memory"); |
275 | } else { | 272 | } else { |
276 | do { | 273 | do { |
277 | __asm__ __volatile__( | 274 | __asm__ __volatile__( |
278 | "1: ll %1, %2 # arch_read_unlock \n" | 275 | "1: ll %1, %2 # arch_read_unlock \n" |
279 | " sub %1, 1 \n" | 276 | " addiu %1, -1 \n" |
280 | " sc %1, %0 \n" | 277 | " sc %1, %0 \n" |
281 | : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp) | 278 | : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp) |
282 | : GCC_OFF12_ASM() (rw->lock) | 279 | : GCC_OFF_SMALL_ASM() (rw->lock) |
283 | : "memory"); | 280 | : "memory"); |
284 | } while (unlikely(!tmp)); | 281 | } while (unlikely(!tmp)); |
285 | } | 282 | } |
@@ -299,8 +296,8 @@ static inline void arch_write_lock(arch_rwlock_t *rw) | |||
299 | " beqzl %1, 1b \n" | 296 | " beqzl %1, 1b \n" |
300 | " nop \n" | 297 | " nop \n" |
301 | " .set reorder \n" | 298 | " .set reorder \n" |
302 | : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp) | 299 | : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp) |
303 | : GCC_OFF12_ASM() (rw->lock) | 300 | : GCC_OFF_SMALL_ASM() (rw->lock) |
304 | : "memory"); | 301 | : "memory"); |
305 | } else { | 302 | } else { |
306 | do { | 303 | do { |
@@ -309,8 +306,8 @@ static inline void arch_write_lock(arch_rwlock_t *rw) | |||
309 | " bnez %1, 1b \n" | 306 | " bnez %1, 1b \n" |
310 | " lui %1, 0x8000 \n" | 307 | " lui %1, 0x8000 \n" |
311 | "2: sc %1, %0 \n" | 308 | "2: sc %1, %0 \n" |
312 | : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp) | 309 | : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp) |
313 | : GCC_OFF12_ASM() (rw->lock) | 310 | : GCC_OFF_SMALL_ASM() (rw->lock) |
314 | : "memory"); | 311 | : "memory"); |
315 | } while (unlikely(!tmp)); | 312 | } while (unlikely(!tmp)); |
316 | } | 313 | } |
@@ -349,8 +346,8 @@ static inline int arch_read_trylock(arch_rwlock_t *rw) | |||
349 | __WEAK_LLSC_MB | 346 | __WEAK_LLSC_MB |
350 | " li %2, 1 \n" | 347 | " li %2, 1 \n" |
351 | "2: \n" | 348 | "2: \n" |
352 | : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret) | 349 | : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret) |
353 | : GCC_OFF12_ASM() (rw->lock) | 350 | : GCC_OFF_SMALL_ASM() (rw->lock) |
354 | : "memory"); | 351 | : "memory"); |
355 | } else { | 352 | } else { |
356 | __asm__ __volatile__( | 353 | __asm__ __volatile__( |
@@ -366,8 +363,8 @@ static inline int arch_read_trylock(arch_rwlock_t *rw) | |||
366 | __WEAK_LLSC_MB | 363 | __WEAK_LLSC_MB |
367 | " li %2, 1 \n" | 364 | " li %2, 1 \n" |
368 | "2: \n" | 365 | "2: \n" |
369 | : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret) | 366 | : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret) |
370 | : GCC_OFF12_ASM() (rw->lock) | 367 | : GCC_OFF_SMALL_ASM() (rw->lock) |
371 | : "memory"); | 368 | : "memory"); |
372 | } | 369 | } |
373 | 370 | ||
@@ -393,8 +390,8 @@ static inline int arch_write_trylock(arch_rwlock_t *rw) | |||
393 | " li %2, 1 \n" | 390 | " li %2, 1 \n" |
394 | " .set reorder \n" | 391 | " .set reorder \n" |
395 | "2: \n" | 392 | "2: \n" |
396 | : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret) | 393 | : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret) |
397 | : GCC_OFF12_ASM() (rw->lock) | 394 | : GCC_OFF_SMALL_ASM() (rw->lock) |
398 | : "memory"); | 395 | : "memory"); |
399 | } else { | 396 | } else { |
400 | do { | 397 | do { |
@@ -406,9 +403,9 @@ static inline int arch_write_trylock(arch_rwlock_t *rw) | |||
406 | " sc %1, %0 \n" | 403 | " sc %1, %0 \n" |
407 | " li %2, 1 \n" | 404 | " li %2, 1 \n" |
408 | "2: \n" | 405 | "2: \n" |
409 | : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp), | 406 | : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp), |
410 | "=&r" (ret) | 407 | "=&r" (ret) |
411 | : GCC_OFF12_ASM() (rw->lock) | 408 | : GCC_OFF_SMALL_ASM() (rw->lock) |
412 | : "memory"); | 409 | : "memory"); |
413 | } while (unlikely(!tmp)); | 410 | } while (unlikely(!tmp)); |
414 | 411 | ||
diff --git a/arch/mips/include/asm/spram.h b/arch/mips/include/asm/spram.h index 0b89006e4907..0f90d88e464d 100644 --- a/arch/mips/include/asm/spram.h +++ b/arch/mips/include/asm/spram.h | |||
@@ -1,10 +1,10 @@ | |||
1 | #ifndef _MIPS_SPRAM_H | 1 | #ifndef _MIPS_SPRAM_H |
2 | #define _MIPS_SPRAM_H | 2 | #define _MIPS_SPRAM_H |
3 | 3 | ||
4 | #ifdef CONFIG_CPU_MIPSR2 | 4 | #if defined(CONFIG_MIPS_SPRAM) |
5 | extern __init void spram_config(void); | 5 | extern __init void spram_config(void); |
6 | #else | 6 | #else |
7 | static inline void spram_config(void) { }; | 7 | static inline void spram_config(void) { }; |
8 | #endif /* CONFIG_CPU_MIPSR2 */ | 8 | #endif /* CONFIG_MIPS_SPRAM */ |
9 | 9 | ||
10 | #endif /* _MIPS_SPRAM_H */ | 10 | #endif /* _MIPS_SPRAM_H */ |
diff --git a/arch/mips/include/asm/stackframe.h b/arch/mips/include/asm/stackframe.h index b188c797565c..28d6d9364bd1 100644 --- a/arch/mips/include/asm/stackframe.h +++ b/arch/mips/include/asm/stackframe.h | |||
@@ -40,7 +40,7 @@ | |||
40 | LONG_S v1, PT_HI(sp) | 40 | LONG_S v1, PT_HI(sp) |
41 | mflhxu v1 | 41 | mflhxu v1 |
42 | LONG_S v1, PT_ACX(sp) | 42 | LONG_S v1, PT_ACX(sp) |
43 | #else | 43 | #elif !defined(CONFIG_CPU_MIPSR6) |
44 | mfhi v1 | 44 | mfhi v1 |
45 | #endif | 45 | #endif |
46 | #ifdef CONFIG_32BIT | 46 | #ifdef CONFIG_32BIT |
@@ -50,7 +50,7 @@ | |||
50 | LONG_S $10, PT_R10(sp) | 50 | LONG_S $10, PT_R10(sp) |
51 | LONG_S $11, PT_R11(sp) | 51 | LONG_S $11, PT_R11(sp) |
52 | LONG_S $12, PT_R12(sp) | 52 | LONG_S $12, PT_R12(sp) |
53 | #ifndef CONFIG_CPU_HAS_SMARTMIPS | 53 | #if !defined(CONFIG_CPU_HAS_SMARTMIPS) && !defined(CONFIG_CPU_MIPSR6) |
54 | LONG_S v1, PT_HI(sp) | 54 | LONG_S v1, PT_HI(sp) |
55 | mflo v1 | 55 | mflo v1 |
56 | #endif | 56 | #endif |
@@ -58,7 +58,7 @@ | |||
58 | LONG_S $14, PT_R14(sp) | 58 | LONG_S $14, PT_R14(sp) |
59 | LONG_S $15, PT_R15(sp) | 59 | LONG_S $15, PT_R15(sp) |
60 | LONG_S $24, PT_R24(sp) | 60 | LONG_S $24, PT_R24(sp) |
61 | #ifndef CONFIG_CPU_HAS_SMARTMIPS | 61 | #if !defined(CONFIG_CPU_HAS_SMARTMIPS) && !defined(CONFIG_CPU_MIPSR6) |
62 | LONG_S v1, PT_LO(sp) | 62 | LONG_S v1, PT_LO(sp) |
63 | #endif | 63 | #endif |
64 | #ifdef CONFIG_CPU_CAVIUM_OCTEON | 64 | #ifdef CONFIG_CPU_CAVIUM_OCTEON |
@@ -226,7 +226,7 @@ | |||
226 | mtlhx $24 | 226 | mtlhx $24 |
227 | LONG_L $24, PT_LO(sp) | 227 | LONG_L $24, PT_LO(sp) |
228 | mtlhx $24 | 228 | mtlhx $24 |
229 | #else | 229 | #elif !defined(CONFIG_CPU_MIPSR6) |
230 | LONG_L $24, PT_LO(sp) | 230 | LONG_L $24, PT_LO(sp) |
231 | mtlo $24 | 231 | mtlo $24 |
232 | LONG_L $24, PT_HI(sp) | 232 | LONG_L $24, PT_HI(sp) |
diff --git a/arch/mips/include/asm/switch_to.h b/arch/mips/include/asm/switch_to.h index b928b6f898cd..e92d6c4b5ed1 100644 --- a/arch/mips/include/asm/switch_to.h +++ b/arch/mips/include/asm/switch_to.h | |||
@@ -75,9 +75,12 @@ do { \ | |||
75 | #endif | 75 | #endif |
76 | 76 | ||
77 | #define __clear_software_ll_bit() \ | 77 | #define __clear_software_ll_bit() \ |
78 | do { \ | 78 | do { if (cpu_has_rw_llb) { \ |
79 | if (!__builtin_constant_p(cpu_has_llsc) || !cpu_has_llsc) \ | 79 | write_c0_lladdr(0); \ |
80 | ll_bit = 0; \ | 80 | } else { \ |
81 | if (!__builtin_constant_p(cpu_has_llsc) || !cpu_has_llsc)\ | ||
82 | ll_bit = 0; \ | ||
83 | } \ | ||
81 | } while (0) | 84 | } while (0) |
82 | 85 | ||
83 | #define switch_to(prev, next, last) \ | 86 | #define switch_to(prev, next, last) \ |
diff --git a/arch/mips/include/asm/thread_info.h b/arch/mips/include/asm/thread_info.h index 99eea59604e9..fb68fd2714fb 100644 --- a/arch/mips/include/asm/thread_info.h +++ b/arch/mips/include/asm/thread_info.h | |||
@@ -28,7 +28,7 @@ struct thread_info { | |||
28 | unsigned long tp_value; /* thread pointer */ | 28 | unsigned long tp_value; /* thread pointer */ |
29 | __u32 cpu; /* current CPU */ | 29 | __u32 cpu; /* current CPU */ |
30 | int preempt_count; /* 0 => preemptable, <0 => BUG */ | 30 | int preempt_count; /* 0 => preemptable, <0 => BUG */ |
31 | 31 | int r2_emul_return; /* 1 => Returning from R2 emulator */ | |
32 | mm_segment_t addr_limit; /* | 32 | mm_segment_t addr_limit; /* |
33 | * thread address space limit: | 33 | * thread address space limit: |
34 | * 0x7fffffff for user-thead | 34 | * 0x7fffffff for user-thead |
diff --git a/arch/mips/include/uapi/asm/inst.h b/arch/mips/include/uapi/asm/inst.h index 89c22433b1c6..fc0cf5ac0cf7 100644 --- a/arch/mips/include/uapi/asm/inst.h +++ b/arch/mips/include/uapi/asm/inst.h | |||
@@ -21,20 +21,20 @@ | |||
21 | enum major_op { | 21 | enum major_op { |
22 | spec_op, bcond_op, j_op, jal_op, | 22 | spec_op, bcond_op, j_op, jal_op, |
23 | beq_op, bne_op, blez_op, bgtz_op, | 23 | beq_op, bne_op, blez_op, bgtz_op, |
24 | addi_op, addiu_op, slti_op, sltiu_op, | 24 | addi_op, cbcond0_op = addi_op, addiu_op, slti_op, sltiu_op, |
25 | andi_op, ori_op, xori_op, lui_op, | 25 | andi_op, ori_op, xori_op, lui_op, |
26 | cop0_op, cop1_op, cop2_op, cop1x_op, | 26 | cop0_op, cop1_op, cop2_op, cop1x_op, |
27 | beql_op, bnel_op, blezl_op, bgtzl_op, | 27 | beql_op, bnel_op, blezl_op, bgtzl_op, |
28 | daddi_op, daddiu_op, ldl_op, ldr_op, | 28 | daddi_op, cbcond1_op = daddi_op, daddiu_op, ldl_op, ldr_op, |
29 | spec2_op, jalx_op, mdmx_op, spec3_op, | 29 | spec2_op, jalx_op, mdmx_op, spec3_op, |
30 | lb_op, lh_op, lwl_op, lw_op, | 30 | lb_op, lh_op, lwl_op, lw_op, |
31 | lbu_op, lhu_op, lwr_op, lwu_op, | 31 | lbu_op, lhu_op, lwr_op, lwu_op, |
32 | sb_op, sh_op, swl_op, sw_op, | 32 | sb_op, sh_op, swl_op, sw_op, |
33 | sdl_op, sdr_op, swr_op, cache_op, | 33 | sdl_op, sdr_op, swr_op, cache_op, |
34 | ll_op, lwc1_op, lwc2_op, pref_op, | 34 | ll_op, lwc1_op, lwc2_op, bc6_op = lwc2_op, pref_op, |
35 | lld_op, ldc1_op, ldc2_op, ld_op, | 35 | lld_op, ldc1_op, ldc2_op, beqzcjic_op = ldc2_op, ld_op, |
36 | sc_op, swc1_op, swc2_op, major_3b_op, | 36 | sc_op, swc1_op, swc2_op, balc6_op = swc2_op, major_3b_op, |
37 | scd_op, sdc1_op, sdc2_op, sd_op | 37 | scd_op, sdc1_op, sdc2_op, bnezcjialc_op = sdc2_op, sd_op |
38 | }; | 38 | }; |
39 | 39 | ||
40 | /* | 40 | /* |
@@ -83,9 +83,12 @@ enum spec3_op { | |||
83 | swe_op = 0x1f, bshfl_op = 0x20, | 83 | swe_op = 0x1f, bshfl_op = 0x20, |
84 | swle_op = 0x21, swre_op = 0x22, | 84 | swle_op = 0x21, swre_op = 0x22, |
85 | prefe_op = 0x23, dbshfl_op = 0x24, | 85 | prefe_op = 0x23, dbshfl_op = 0x24, |
86 | lbue_op = 0x28, lhue_op = 0x29, | 86 | cache6_op = 0x25, sc6_op = 0x26, |
87 | lbe_op = 0x2c, lhe_op = 0x2d, | 87 | scd6_op = 0x27, lbue_op = 0x28, |
88 | lle_op = 0x2e, lwe_op = 0x2f, | 88 | lhue_op = 0x29, lbe_op = 0x2c, |
89 | lhe_op = 0x2d, lle_op = 0x2e, | ||
90 | lwe_op = 0x2f, pref6_op = 0x35, | ||
91 | ll6_op = 0x36, lld6_op = 0x37, | ||
89 | rdhwr_op = 0x3b | 92 | rdhwr_op = 0x3b |
90 | }; | 93 | }; |
91 | 94 | ||
@@ -112,7 +115,8 @@ enum cop_op { | |||
112 | mfhc_op = 0x03, mtc_op = 0x04, | 115 | mfhc_op = 0x03, mtc_op = 0x04, |
113 | dmtc_op = 0x05, ctc_op = 0x06, | 116 | dmtc_op = 0x05, ctc_op = 0x06, |
114 | mthc0_op = 0x06, mthc_op = 0x07, | 117 | mthc0_op = 0x06, mthc_op = 0x07, |
115 | bc_op = 0x08, cop_op = 0x10, | 118 | bc_op = 0x08, bc1eqz_op = 0x09, |
119 | bc1nez_op = 0x0d, cop_op = 0x10, | ||
116 | copm_op = 0x18 | 120 | copm_op = 0x18 |
117 | }; | 121 | }; |
118 | 122 | ||