aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorPaul Burton <paul.burton@imgtec.com>2014-02-13 06:27:42 -0500
committerRalf Baechle <ralf@linux-mips.org>2014-03-26 18:09:11 -0400
commiteec43a224cf198c7e3538fca16f689e4d17d4471 (patch)
tree24d9236c0cd33dcf7862c09799910e8bbf5abde9
parenta8ad136789b4256fa372d59daaddb91b72aa0753 (diff)
MIPS: Save/restore MSA context around signals
This patch extends sigcontext in order to hold the most significant 64 bits of each vector register in addition to the MSA control & status register. The least significant 64 bits are already saved as the scalar FP context. This makes things a little awkward since the least & most significant 64 bits of each vector register are not contiguous in memory. Thus the copy_u & insert instructions are used to transfer the values of the most significant 64 bits via GP registers. Signed-off-by: Paul Burton <paul.burton@imgtec.com> Cc: linux-mips@linux-mips.org Patchwork: https://patchwork.linux-mips.org/patch/6533/ Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
-rw-r--r--arch/mips/include/asm/sigcontext.h2
-rw-r--r--arch/mips/include/uapi/asm/sigcontext.h8
-rw-r--r--arch/mips/kernel/asm-offsets.c3
-rw-r--r--arch/mips/kernel/r4k_fpu.S213
-rw-r--r--arch/mips/kernel/signal.c73
-rw-r--r--arch/mips/kernel/signal32.c74
6 files changed, 357 insertions, 16 deletions
diff --git a/arch/mips/include/asm/sigcontext.h b/arch/mips/include/asm/sigcontext.h
index eeeb0f48c767..f54bdbe85c0d 100644
--- a/arch/mips/include/asm/sigcontext.h
+++ b/arch/mips/include/asm/sigcontext.h
@@ -32,6 +32,8 @@ struct sigcontext32 {
32 __u32 sc_lo2; 32 __u32 sc_lo2;
33 __u32 sc_hi3; 33 __u32 sc_hi3;
34 __u32 sc_lo3; 34 __u32 sc_lo3;
35 __u64 sc_msaregs[32]; /* Most significant 64 bits */
36 __u32 sc_msa_csr;
35}; 37};
36#endif /* _MIPS_SIM == _MIPS_SIM_ABI64 || _MIPS_SIM == _MIPS_SIM_NABI32 */ 38#endif /* _MIPS_SIM == _MIPS_SIM_ABI64 || _MIPS_SIM == _MIPS_SIM_NABI32 */
37#endif /* _ASM_SIGCONTEXT_H */ 39#endif /* _ASM_SIGCONTEXT_H */
diff --git a/arch/mips/include/uapi/asm/sigcontext.h b/arch/mips/include/uapi/asm/sigcontext.h
index 6c9906f59c6e..681c17603a48 100644
--- a/arch/mips/include/uapi/asm/sigcontext.h
+++ b/arch/mips/include/uapi/asm/sigcontext.h
@@ -12,6 +12,10 @@
12#include <linux/types.h> 12#include <linux/types.h>
13#include <asm/sgidefs.h> 13#include <asm/sgidefs.h>
14 14
15/* Bits which may be set in sc_used_math */
16#define USEDMATH_FP (1 << 0)
17#define USEDMATH_MSA (1 << 1)
18
15#if _MIPS_SIM == _MIPS_SIM_ABI32 19#if _MIPS_SIM == _MIPS_SIM_ABI32
16 20
17/* 21/*
@@ -37,6 +41,8 @@ struct sigcontext {
37 unsigned long sc_lo2; 41 unsigned long sc_lo2;
38 unsigned long sc_hi3; 42 unsigned long sc_hi3;
39 unsigned long sc_lo3; 43 unsigned long sc_lo3;
44 unsigned long long sc_msaregs[32]; /* Most significant 64 bits */
45 unsigned long sc_msa_csr;
40}; 46};
41 47
42#endif /* _MIPS_SIM == _MIPS_SIM_ABI32 */ 48#endif /* _MIPS_SIM == _MIPS_SIM_ABI32 */
@@ -70,6 +76,8 @@ struct sigcontext {
70 __u32 sc_used_math; 76 __u32 sc_used_math;
71 __u32 sc_dsp; 77 __u32 sc_dsp;
72 __u32 sc_reserved; 78 __u32 sc_reserved;
79 __u64 sc_msaregs[32];
80 __u32 sc_msa_csr;
73}; 81};
74 82
75 83
diff --git a/arch/mips/kernel/asm-offsets.c b/arch/mips/kernel/asm-offsets.c
index 7ff80622c8d9..0ea75c244b48 100644
--- a/arch/mips/kernel/asm-offsets.c
+++ b/arch/mips/kernel/asm-offsets.c
@@ -295,6 +295,7 @@ void output_sc_defines(void)
295 OFFSET(SC_LO2, sigcontext, sc_lo2); 295 OFFSET(SC_LO2, sigcontext, sc_lo2);
296 OFFSET(SC_HI3, sigcontext, sc_hi3); 296 OFFSET(SC_HI3, sigcontext, sc_hi3);
297 OFFSET(SC_LO3, sigcontext, sc_lo3); 297 OFFSET(SC_LO3, sigcontext, sc_lo3);
298 OFFSET(SC_MSAREGS, sigcontext, sc_msaregs);
298 BLANK(); 299 BLANK();
299} 300}
300#endif 301#endif
@@ -309,6 +310,7 @@ void output_sc_defines(void)
309 OFFSET(SC_MDLO, sigcontext, sc_mdlo); 310 OFFSET(SC_MDLO, sigcontext, sc_mdlo);
310 OFFSET(SC_PC, sigcontext, sc_pc); 311 OFFSET(SC_PC, sigcontext, sc_pc);
311 OFFSET(SC_FPC_CSR, sigcontext, sc_fpc_csr); 312 OFFSET(SC_FPC_CSR, sigcontext, sc_fpc_csr);
313 OFFSET(SC_MSAREGS, sigcontext, sc_msaregs);
312 BLANK(); 314 BLANK();
313} 315}
314#endif 316#endif
@@ -320,6 +322,7 @@ void output_sc32_defines(void)
320 OFFSET(SC32_FPREGS, sigcontext32, sc_fpregs); 322 OFFSET(SC32_FPREGS, sigcontext32, sc_fpregs);
321 OFFSET(SC32_FPC_CSR, sigcontext32, sc_fpc_csr); 323 OFFSET(SC32_FPC_CSR, sigcontext32, sc_fpc_csr);
322 OFFSET(SC32_FPC_EIR, sigcontext32, sc_fpc_eir); 324 OFFSET(SC32_FPC_EIR, sigcontext32, sc_fpc_eir);
325 OFFSET(SC32_MSAREGS, sigcontext32, sc_msaregs);
323 BLANK(); 326 BLANK();
324} 327}
325#endif 328#endif
diff --git a/arch/mips/kernel/r4k_fpu.S b/arch/mips/kernel/r4k_fpu.S
index 253b2fb52026..752b50a69264 100644
--- a/arch/mips/kernel/r4k_fpu.S
+++ b/arch/mips/kernel/r4k_fpu.S
@@ -13,6 +13,7 @@
13 * Copyright (C) 1999, 2001 Silicon Graphics, Inc. 13 * Copyright (C) 1999, 2001 Silicon Graphics, Inc.
14 */ 14 */
15#include <asm/asm.h> 15#include <asm/asm.h>
16#include <asm/asmmacro.h>
16#include <asm/errno.h> 17#include <asm/errno.h>
17#include <asm/fpregdef.h> 18#include <asm/fpregdef.h>
18#include <asm/mipsregs.h> 19#include <asm/mipsregs.h>
@@ -245,6 +246,218 @@ LEAF(_restore_fp_context32)
245 END(_restore_fp_context32) 246 END(_restore_fp_context32)
246#endif 247#endif
247 248
249#ifdef CONFIG_CPU_HAS_MSA
250
251 .macro save_sc_msareg wr, off, sc, tmp
252#ifdef CONFIG_64BIT
253 copy_u_d \tmp, \wr, 1
254 EX sd \tmp, (\off+(\wr*8))(\sc)
255#elif defined(CONFIG_CPU_LITTLE_ENDIAN)
256 copy_u_w \tmp, \wr, 2
257 EX sw \tmp, (\off+(\wr*8)+0)(\sc)
258 copy_u_w \tmp, \wr, 3
259 EX sw \tmp, (\off+(\wr*8)+4)(\sc)
260#else /* CONFIG_CPU_BIG_ENDIAN */
261 copy_u_w \tmp, \wr, 2
262 EX sw \tmp, (\off+(\wr*8)+4)(\sc)
263 copy_u_w \tmp, \wr, 3
264 EX sw \tmp, (\off+(\wr*8)+0)(\sc)
265#endif
266 .endm
267
268/*
269 * int _save_msa_context(struct sigcontext *sc)
270 *
271 * Save the upper 64 bits of each vector register along with the MSA_CSR
272 * register into sc. Returns zero on success, else non-zero.
273 */
274LEAF(_save_msa_context)
275 save_sc_msareg 0, SC_MSAREGS, a0, t0
276 save_sc_msareg 1, SC_MSAREGS, a0, t0
277 save_sc_msareg 2, SC_MSAREGS, a0, t0
278 save_sc_msareg 3, SC_MSAREGS, a0, t0
279 save_sc_msareg 4, SC_MSAREGS, a0, t0
280 save_sc_msareg 5, SC_MSAREGS, a0, t0
281 save_sc_msareg 6, SC_MSAREGS, a0, t0
282 save_sc_msareg 7, SC_MSAREGS, a0, t0
283 save_sc_msareg 8, SC_MSAREGS, a0, t0
284 save_sc_msareg 9, SC_MSAREGS, a0, t0
285 save_sc_msareg 10, SC_MSAREGS, a0, t0
286 save_sc_msareg 11, SC_MSAREGS, a0, t0
287 save_sc_msareg 12, SC_MSAREGS, a0, t0
288 save_sc_msareg 13, SC_MSAREGS, a0, t0
289 save_sc_msareg 14, SC_MSAREGS, a0, t0
290 save_sc_msareg 15, SC_MSAREGS, a0, t0
291 save_sc_msareg 16, SC_MSAREGS, a0, t0
292 save_sc_msareg 17, SC_MSAREGS, a0, t0
293 save_sc_msareg 18, SC_MSAREGS, a0, t0
294 save_sc_msareg 19, SC_MSAREGS, a0, t0
295 save_sc_msareg 20, SC_MSAREGS, a0, t0
296 save_sc_msareg 21, SC_MSAREGS, a0, t0
297 save_sc_msareg 22, SC_MSAREGS, a0, t0
298 save_sc_msareg 23, SC_MSAREGS, a0, t0
299 save_sc_msareg 24, SC_MSAREGS, a0, t0
300 save_sc_msareg 25, SC_MSAREGS, a0, t0
301 save_sc_msareg 26, SC_MSAREGS, a0, t0
302 save_sc_msareg 27, SC_MSAREGS, a0, t0
303 save_sc_msareg 28, SC_MSAREGS, a0, t0
304 save_sc_msareg 29, SC_MSAREGS, a0, t0
305 save_sc_msareg 30, SC_MSAREGS, a0, t0
306 save_sc_msareg 31, SC_MSAREGS, a0, t0
307 jr ra
308 li v0, 0
309 END(_save_msa_context)
310
311#ifdef CONFIG_MIPS32_COMPAT
312
313/*
314 * int _save_msa_context32(struct sigcontext32 *sc)
315 *
316 * Save the upper 64 bits of each vector register along with the MSA_CSR
317 * register into sc. Returns zero on success, else non-zero.
318 */
319LEAF(_save_msa_context32)
320 save_sc_msareg 0, SC32_MSAREGS, a0, t0
321 save_sc_msareg 1, SC32_MSAREGS, a0, t0
322 save_sc_msareg 2, SC32_MSAREGS, a0, t0
323 save_sc_msareg 3, SC32_MSAREGS, a0, t0
324 save_sc_msareg 4, SC32_MSAREGS, a0, t0
325 save_sc_msareg 5, SC32_MSAREGS, a0, t0
326 save_sc_msareg 6, SC32_MSAREGS, a0, t0
327 save_sc_msareg 7, SC32_MSAREGS, a0, t0
328 save_sc_msareg 8, SC32_MSAREGS, a0, t0
329 save_sc_msareg 9, SC32_MSAREGS, a0, t0
330 save_sc_msareg 10, SC32_MSAREGS, a0, t0
331 save_sc_msareg 11, SC32_MSAREGS, a0, t0
332 save_sc_msareg 12, SC32_MSAREGS, a0, t0
333 save_sc_msareg 13, SC32_MSAREGS, a0, t0
334 save_sc_msareg 14, SC32_MSAREGS, a0, t0
335 save_sc_msareg 15, SC32_MSAREGS, a0, t0
336 save_sc_msareg 16, SC32_MSAREGS, a0, t0
337 save_sc_msareg 17, SC32_MSAREGS, a0, t0
338 save_sc_msareg 18, SC32_MSAREGS, a0, t0
339 save_sc_msareg 19, SC32_MSAREGS, a0, t0
340 save_sc_msareg 20, SC32_MSAREGS, a0, t0
341 save_sc_msareg 21, SC32_MSAREGS, a0, t0
342 save_sc_msareg 22, SC32_MSAREGS, a0, t0
343 save_sc_msareg 23, SC32_MSAREGS, a0, t0
344 save_sc_msareg 24, SC32_MSAREGS, a0, t0
345 save_sc_msareg 25, SC32_MSAREGS, a0, t0
346 save_sc_msareg 26, SC32_MSAREGS, a0, t0
347 save_sc_msareg 27, SC32_MSAREGS, a0, t0
348 save_sc_msareg 28, SC32_MSAREGS, a0, t0
349 save_sc_msareg 29, SC32_MSAREGS, a0, t0
350 save_sc_msareg 30, SC32_MSAREGS, a0, t0
351 save_sc_msareg 31, SC32_MSAREGS, a0, t0
352 jr ra
353 li v0, 0
354 END(_save_msa_context32)
355
356#endif /* CONFIG_MIPS32_COMPAT */
357
358 .macro restore_sc_msareg wr, off, sc, tmp
359#ifdef CONFIG_64BIT
360 EX ld \tmp, (\off+(\wr*8))(\sc)
361 insert_d \wr, 1, \tmp
362#elif defined(CONFIG_CPU_LITTLE_ENDIAN)
363 EX lw \tmp, (\off+(\wr*8)+0)(\sc)
364 insert_w \wr, 2, \tmp
365 EX lw \tmp, (\off+(\wr*8)+4)(\sc)
366 insert_w \wr, 3, \tmp
367#else /* CONFIG_CPU_BIG_ENDIAN */
368 EX lw \tmp, (\off+(\wr*8)+4)(\sc)
369 insert_w \wr, 2, \tmp
370 EX lw \tmp, (\off+(\wr*8)+0)(\sc)
371 insert_w \wr, 3, \tmp
372#endif
373 .endm
374
375/*
376 * int _restore_msa_context(struct sigcontext *sc)
377 */
378LEAF(_restore_msa_context)
379 restore_sc_msareg 0, SC_MSAREGS, a0, t0
380 restore_sc_msareg 1, SC_MSAREGS, a0, t0
381 restore_sc_msareg 2, SC_MSAREGS, a0, t0
382 restore_sc_msareg 3, SC_MSAREGS, a0, t0
383 restore_sc_msareg 4, SC_MSAREGS, a0, t0
384 restore_sc_msareg 5, SC_MSAREGS, a0, t0
385 restore_sc_msareg 6, SC_MSAREGS, a0, t0
386 restore_sc_msareg 7, SC_MSAREGS, a0, t0
387 restore_sc_msareg 8, SC_MSAREGS, a0, t0
388 restore_sc_msareg 9, SC_MSAREGS, a0, t0
389 restore_sc_msareg 10, SC_MSAREGS, a0, t0
390 restore_sc_msareg 11, SC_MSAREGS, a0, t0
391 restore_sc_msareg 12, SC_MSAREGS, a0, t0
392 restore_sc_msareg 13, SC_MSAREGS, a0, t0
393 restore_sc_msareg 14, SC_MSAREGS, a0, t0
394 restore_sc_msareg 15, SC_MSAREGS, a0, t0
395 restore_sc_msareg 16, SC_MSAREGS, a0, t0
396 restore_sc_msareg 17, SC_MSAREGS, a0, t0
397 restore_sc_msareg 18, SC_MSAREGS, a0, t0
398 restore_sc_msareg 19, SC_MSAREGS, a0, t0
399 restore_sc_msareg 20, SC_MSAREGS, a0, t0
400 restore_sc_msareg 21, SC_MSAREGS, a0, t0
401 restore_sc_msareg 22, SC_MSAREGS, a0, t0
402 restore_sc_msareg 23, SC_MSAREGS, a0, t0
403 restore_sc_msareg 24, SC_MSAREGS, a0, t0
404 restore_sc_msareg 25, SC_MSAREGS, a0, t0
405 restore_sc_msareg 26, SC_MSAREGS, a0, t0
406 restore_sc_msareg 27, SC_MSAREGS, a0, t0
407 restore_sc_msareg 28, SC_MSAREGS, a0, t0
408 restore_sc_msareg 29, SC_MSAREGS, a0, t0
409 restore_sc_msareg 30, SC_MSAREGS, a0, t0
410 restore_sc_msareg 31, SC_MSAREGS, a0, t0
411 jr ra
412 li v0, 0
413 END(_restore_msa_context)
414
415#ifdef CONFIG_MIPS32_COMPAT
416
417/*
418 * int _restore_msa_context32(struct sigcontext32 *sc)
419 */
420LEAF(_restore_msa_context32)
421 restore_sc_msareg 0, SC32_MSAREGS, a0, t0
422 restore_sc_msareg 1, SC32_MSAREGS, a0, t0
423 restore_sc_msareg 2, SC32_MSAREGS, a0, t0
424 restore_sc_msareg 3, SC32_MSAREGS, a0, t0
425 restore_sc_msareg 4, SC32_MSAREGS, a0, t0
426 restore_sc_msareg 5, SC32_MSAREGS, a0, t0
427 restore_sc_msareg 6, SC32_MSAREGS, a0, t0
428 restore_sc_msareg 7, SC32_MSAREGS, a0, t0
429 restore_sc_msareg 8, SC32_MSAREGS, a0, t0
430 restore_sc_msareg 9, SC32_MSAREGS, a0, t0
431 restore_sc_msareg 10, SC32_MSAREGS, a0, t0
432 restore_sc_msareg 11, SC32_MSAREGS, a0, t0
433 restore_sc_msareg 12, SC32_MSAREGS, a0, t0
434 restore_sc_msareg 13, SC32_MSAREGS, a0, t0
435 restore_sc_msareg 14, SC32_MSAREGS, a0, t0
436 restore_sc_msareg 15, SC32_MSAREGS, a0, t0
437 restore_sc_msareg 16, SC32_MSAREGS, a0, t0
438 restore_sc_msareg 17, SC32_MSAREGS, a0, t0
439 restore_sc_msareg 18, SC32_MSAREGS, a0, t0
440 restore_sc_msareg 19, SC32_MSAREGS, a0, t0
441 restore_sc_msareg 20, SC32_MSAREGS, a0, t0
442 restore_sc_msareg 21, SC32_MSAREGS, a0, t0
443 restore_sc_msareg 22, SC32_MSAREGS, a0, t0
444 restore_sc_msareg 23, SC32_MSAREGS, a0, t0
445 restore_sc_msareg 24, SC32_MSAREGS, a0, t0
446 restore_sc_msareg 25, SC32_MSAREGS, a0, t0
447 restore_sc_msareg 26, SC32_MSAREGS, a0, t0
448 restore_sc_msareg 27, SC32_MSAREGS, a0, t0
449 restore_sc_msareg 28, SC32_MSAREGS, a0, t0
450 restore_sc_msareg 29, SC32_MSAREGS, a0, t0
451 restore_sc_msareg 30, SC32_MSAREGS, a0, t0
452 restore_sc_msareg 31, SC32_MSAREGS, a0, t0
453 jr ra
454 li v0, 0
455 END(_restore_msa_context32)
456
457#endif /* CONFIG_MIPS32_COMPAT */
458
459#endif /* CONFIG_CPU_HAS_MSA */
460
248 .set reorder 461 .set reorder
249 462
250 .type fault@function 463 .type fault@function
diff --git a/arch/mips/kernel/signal.c b/arch/mips/kernel/signal.c
index 0f97c7dc54e6..fd61700409bc 100644
--- a/arch/mips/kernel/signal.c
+++ b/arch/mips/kernel/signal.c
@@ -30,6 +30,7 @@
30#include <linux/bitops.h> 30#include <linux/bitops.h>
31#include <asm/cacheflush.h> 31#include <asm/cacheflush.h>
32#include <asm/fpu.h> 32#include <asm/fpu.h>
33#include <asm/msa.h>
33#include <asm/sim.h> 34#include <asm/sim.h>
34#include <asm/ucontext.h> 35#include <asm/ucontext.h>
35#include <asm/cpu-features.h> 36#include <asm/cpu-features.h>
@@ -46,6 +47,9 @@ static int (*restore_fp_context)(struct sigcontext __user *sc);
46extern asmlinkage int _save_fp_context(struct sigcontext __user *sc); 47extern asmlinkage int _save_fp_context(struct sigcontext __user *sc);
47extern asmlinkage int _restore_fp_context(struct sigcontext __user *sc); 48extern asmlinkage int _restore_fp_context(struct sigcontext __user *sc);
48 49
50extern asmlinkage int _save_msa_context(struct sigcontext __user *sc);
51extern asmlinkage int _restore_msa_context(struct sigcontext __user *sc);
52
49struct sigframe { 53struct sigframe {
50 u32 sf_ass[4]; /* argument save space for o32 */ 54 u32 sf_ass[4]; /* argument save space for o32 */
51 u32 sf_pad[2]; /* Was: signal trampoline */ 55 u32 sf_pad[2]; /* Was: signal trampoline */
@@ -95,19 +99,59 @@ static int copy_fp_from_sigcontext(struct sigcontext __user *sc)
95} 99}
96 100
97/* 101/*
102 * These functions will save only the upper 64 bits of the vector registers,
103 * since the lower 64 bits have already been saved as the scalar FP context.
104 */
105static int copy_msa_to_sigcontext(struct sigcontext __user *sc)
106{
107 int i;
108 int err = 0;
109
110 for (i = 0; i < NUM_FPU_REGS; i++) {
111 err |=
112 __put_user(get_fpr64(&current->thread.fpu.fpr[i], 1),
113 &sc->sc_msaregs[i]);
114 }
115 err |= __put_user(current->thread.fpu.msacsr, &sc->sc_msa_csr);
116
117 return err;
118}
119
120static int copy_msa_from_sigcontext(struct sigcontext __user *sc)
121{
122 int i;
123 int err = 0;
124 u64 val;
125
126 for (i = 0; i < NUM_FPU_REGS; i++) {
127 err |= __get_user(val, &sc->sc_msaregs[i]);
128 set_fpr64(&current->thread.fpu.fpr[i], 1, val);
129 }
130 err |= __get_user(current->thread.fpu.msacsr, &sc->sc_msa_csr);
131
132 return err;
133}
134
135/*
98 * Helper routines 136 * Helper routines
99 */ 137 */
100static int protected_save_fp_context(struct sigcontext __user *sc) 138static int protected_save_fp_context(struct sigcontext __user *sc,
139 unsigned used_math)
101{ 140{
102 int err; 141 int err;
142 bool save_msa = cpu_has_msa && (used_math & USEDMATH_MSA);
103 while (1) { 143 while (1) {
104 lock_fpu_owner(); 144 lock_fpu_owner();
105 if (is_fpu_owner()) { 145 if (is_fpu_owner()) {
106 err = save_fp_context(sc); 146 err = save_fp_context(sc);
147 if (save_msa && !err)
148 err = _save_msa_context(sc);
107 unlock_fpu_owner(); 149 unlock_fpu_owner();
108 } else { 150 } else {
109 unlock_fpu_owner(); 151 unlock_fpu_owner();
110 err = copy_fp_to_sigcontext(sc); 152 err = copy_fp_to_sigcontext(sc);
153 if (save_msa && !err)
154 err = copy_msa_to_sigcontext(sc);
111 } 155 }
112 if (likely(!err)) 156 if (likely(!err))
113 break; 157 break;
@@ -121,17 +165,28 @@ static int protected_save_fp_context(struct sigcontext __user *sc)
121 return err; 165 return err;
122} 166}
123 167
124static int protected_restore_fp_context(struct sigcontext __user *sc) 168static int protected_restore_fp_context(struct sigcontext __user *sc,
169 unsigned used_math)
125{ 170{
126 int err, tmp __maybe_unused; 171 int err, tmp __maybe_unused;
172 bool restore_msa = cpu_has_msa && (used_math & USEDMATH_MSA);
127 while (1) { 173 while (1) {
128 lock_fpu_owner(); 174 lock_fpu_owner();
129 if (is_fpu_owner()) { 175 if (is_fpu_owner()) {
130 err = restore_fp_context(sc); 176 err = restore_fp_context(sc);
177 if (restore_msa && !err) {
178 enable_msa();
179 err = _restore_msa_context(sc);
180 } else {
181 /* signal handler may have used MSA */
182 disable_msa();
183 }
131 unlock_fpu_owner(); 184 unlock_fpu_owner();
132 } else { 185 } else {
133 unlock_fpu_owner(); 186 unlock_fpu_owner();
134 err = copy_fp_from_sigcontext(sc); 187 err = copy_fp_from_sigcontext(sc);
188 if (!err && (used_math & USEDMATH_MSA))
189 err = copy_msa_from_sigcontext(sc);
135 } 190 }
136 if (likely(!err)) 191 if (likely(!err))
137 break; 192 break;
@@ -172,7 +227,8 @@ int setup_sigcontext(struct pt_regs *regs, struct sigcontext __user *sc)
172 err |= __put_user(rddsp(DSP_MASK), &sc->sc_dsp); 227 err |= __put_user(rddsp(DSP_MASK), &sc->sc_dsp);
173 } 228 }
174 229
175 used_math = !!used_math(); 230 used_math = used_math() ? USEDMATH_FP : 0;
231 used_math |= thread_msa_context_live() ? USEDMATH_MSA : 0;
176 err |= __put_user(used_math, &sc->sc_used_math); 232 err |= __put_user(used_math, &sc->sc_used_math);
177 233
178 if (used_math) { 234 if (used_math) {
@@ -180,7 +236,7 @@ int setup_sigcontext(struct pt_regs *regs, struct sigcontext __user *sc)
180 * Save FPU state to signal context. Signal handler 236 * Save FPU state to signal context. Signal handler
181 * will "inherit" current FPU state. 237 * will "inherit" current FPU state.
182 */ 238 */
183 err |= protected_save_fp_context(sc); 239 err |= protected_save_fp_context(sc, used_math);
184 } 240 }
185 return err; 241 return err;
186} 242}
@@ -205,14 +261,14 @@ int fpcsr_pending(unsigned int __user *fpcsr)
205} 261}
206 262
207static int 263static int
208check_and_restore_fp_context(struct sigcontext __user *sc) 264check_and_restore_fp_context(struct sigcontext __user *sc, unsigned used_math)
209{ 265{
210 int err, sig; 266 int err, sig;
211 267
212 err = sig = fpcsr_pending(&sc->sc_fpc_csr); 268 err = sig = fpcsr_pending(&sc->sc_fpc_csr);
213 if (err > 0) 269 if (err > 0)
214 err = 0; 270 err = 0;
215 err |= protected_restore_fp_context(sc); 271 err |= protected_restore_fp_context(sc, used_math);
216 return err ?: sig; 272 return err ?: sig;
217} 273}
218 274
@@ -252,9 +308,10 @@ int restore_sigcontext(struct pt_regs *regs, struct sigcontext __user *sc)
252 if (used_math) { 308 if (used_math) {
253 /* restore fpu context if we have used it before */ 309 /* restore fpu context if we have used it before */
254 if (!err) 310 if (!err)
255 err = check_and_restore_fp_context(sc); 311 err = check_and_restore_fp_context(sc, used_math);
256 } else { 312 } else {
257 /* signal handler may have used FPU. Give it up. */ 313 /* signal handler may have used FPU or MSA. Disable them. */
314 disable_msa();
258 lose_fpu(0); 315 lose_fpu(0);
259 } 316 }
260 317
diff --git a/arch/mips/kernel/signal32.c b/arch/mips/kernel/signal32.c
index bae2e6ee2109..299f956e4db3 100644
--- a/arch/mips/kernel/signal32.c
+++ b/arch/mips/kernel/signal32.c
@@ -30,6 +30,7 @@
30#include <asm/sim.h> 30#include <asm/sim.h>
31#include <asm/ucontext.h> 31#include <asm/ucontext.h>
32#include <asm/fpu.h> 32#include <asm/fpu.h>
33#include <asm/msa.h>
33#include <asm/war.h> 34#include <asm/war.h>
34#include <asm/vdso.h> 35#include <asm/vdso.h>
35#include <asm/dsp.h> 36#include <asm/dsp.h>
@@ -42,6 +43,9 @@ static int (*restore_fp_context32)(struct sigcontext32 __user *sc);
42extern asmlinkage int _save_fp_context32(struct sigcontext32 __user *sc); 43extern asmlinkage int _save_fp_context32(struct sigcontext32 __user *sc);
43extern asmlinkage int _restore_fp_context32(struct sigcontext32 __user *sc); 44extern asmlinkage int _restore_fp_context32(struct sigcontext32 __user *sc);
44 45
46extern asmlinkage int _save_msa_context32(struct sigcontext32 __user *sc);
47extern asmlinkage int _restore_msa_context32(struct sigcontext32 __user *sc);
48
45/* 49/*
46 * Including <asm/unistd.h> would give use the 64-bit syscall numbers ... 50 * Including <asm/unistd.h> would give use the 64-bit syscall numbers ...
47 */ 51 */
@@ -111,19 +115,59 @@ static int copy_fp_from_sigcontext32(struct sigcontext32 __user *sc)
111} 115}
112 116
113/* 117/*
118 * These functions will save only the upper 64 bits of the vector registers,
119 * since the lower 64 bits have already been saved as the scalar FP context.
120 */
121static int copy_msa_to_sigcontext32(struct sigcontext32 __user *sc)
122{
123 int i;
124 int err = 0;
125
126 for (i = 0; i < NUM_FPU_REGS; i++) {
127 err |=
128 __put_user(get_fpr64(&current->thread.fpu.fpr[i], 1),
129 &sc->sc_msaregs[i]);
130 }
131 err |= __put_user(current->thread.fpu.msacsr, &sc->sc_msa_csr);
132
133 return err;
134}
135
136static int copy_msa_from_sigcontext32(struct sigcontext32 __user *sc)
137{
138 int i;
139 int err = 0;
140 u64 val;
141
142 for (i = 0; i < NUM_FPU_REGS; i++) {
143 err |= __get_user(val, &sc->sc_msaregs[i]);
144 set_fpr64(&current->thread.fpu.fpr[i], 1, val);
145 }
146 err |= __get_user(current->thread.fpu.msacsr, &sc->sc_msa_csr);
147
148 return err;
149}
150
151/*
114 * sigcontext handlers 152 * sigcontext handlers
115 */ 153 */
116static int protected_save_fp_context32(struct sigcontext32 __user *sc) 154static int protected_save_fp_context32(struct sigcontext32 __user *sc,
155 unsigned used_math)
117{ 156{
118 int err; 157 int err;
158 bool save_msa = cpu_has_msa && (used_math & USEDMATH_MSA);
119 while (1) { 159 while (1) {
120 lock_fpu_owner(); 160 lock_fpu_owner();
121 if (is_fpu_owner()) { 161 if (is_fpu_owner()) {
122 err = save_fp_context32(sc); 162 err = save_fp_context32(sc);
163 if (save_msa && !err)
164 err = _save_msa_context32(sc);
123 unlock_fpu_owner(); 165 unlock_fpu_owner();
124 } else { 166 } else {
125 unlock_fpu_owner(); 167 unlock_fpu_owner();
126 err = copy_fp_to_sigcontext32(sc); 168 err = copy_fp_to_sigcontext32(sc);
169 if (save_msa && !err)
170 err = copy_msa_to_sigcontext32(sc);
127 } 171 }
128 if (likely(!err)) 172 if (likely(!err))
129 break; 173 break;
@@ -137,17 +181,28 @@ static int protected_save_fp_context32(struct sigcontext32 __user *sc)
137 return err; 181 return err;
138} 182}
139 183
140static int protected_restore_fp_context32(struct sigcontext32 __user *sc) 184static int protected_restore_fp_context32(struct sigcontext32 __user *sc,
185 unsigned used_math)
141{ 186{
142 int err, tmp __maybe_unused; 187 int err, tmp __maybe_unused;
188 bool restore_msa = cpu_has_msa && (used_math & USEDMATH_MSA);
143 while (1) { 189 while (1) {
144 lock_fpu_owner(); 190 lock_fpu_owner();
145 if (is_fpu_owner()) { 191 if (is_fpu_owner()) {
146 err = restore_fp_context32(sc); 192 err = restore_fp_context32(sc);
193 if (restore_msa && !err) {
194 enable_msa();
195 err = _restore_msa_context32(sc);
196 } else {
197 /* signal handler may have used MSA */
198 disable_msa();
199 }
147 unlock_fpu_owner(); 200 unlock_fpu_owner();
148 } else { 201 } else {
149 unlock_fpu_owner(); 202 unlock_fpu_owner();
150 err = copy_fp_from_sigcontext32(sc); 203 err = copy_fp_from_sigcontext32(sc);
204 if (restore_msa && !err)
205 err = copy_msa_from_sigcontext32(sc);
151 } 206 }
152 if (likely(!err)) 207 if (likely(!err))
153 break; 208 break;
@@ -186,7 +241,8 @@ static int setup_sigcontext32(struct pt_regs *regs,
186 err |= __put_user(mflo3(), &sc->sc_lo3); 241 err |= __put_user(mflo3(), &sc->sc_lo3);
187 } 242 }
188 243
189 used_math = !!used_math(); 244 used_math = used_math() ? USEDMATH_FP : 0;
245 used_math |= thread_msa_context_live() ? USEDMATH_MSA : 0;
190 err |= __put_user(used_math, &sc->sc_used_math); 246 err |= __put_user(used_math, &sc->sc_used_math);
191 247
192 if (used_math) { 248 if (used_math) {
@@ -194,20 +250,21 @@ static int setup_sigcontext32(struct pt_regs *regs,
194 * Save FPU state to signal context. Signal handler 250 * Save FPU state to signal context. Signal handler
195 * will "inherit" current FPU state. 251 * will "inherit" current FPU state.
196 */ 252 */
197 err |= protected_save_fp_context32(sc); 253 err |= protected_save_fp_context32(sc, used_math);
198 } 254 }
199 return err; 255 return err;
200} 256}
201 257
202static int 258static int
203check_and_restore_fp_context32(struct sigcontext32 __user *sc) 259check_and_restore_fp_context32(struct sigcontext32 __user *sc,
260 unsigned used_math)
204{ 261{
205 int err, sig; 262 int err, sig;
206 263
207 err = sig = fpcsr_pending(&sc->sc_fpc_csr); 264 err = sig = fpcsr_pending(&sc->sc_fpc_csr);
208 if (err > 0) 265 if (err > 0)
209 err = 0; 266 err = 0;
210 err |= protected_restore_fp_context32(sc); 267 err |= protected_restore_fp_context32(sc, used_math);
211 return err ?: sig; 268 return err ?: sig;
212} 269}
213 270
@@ -244,9 +301,10 @@ static int restore_sigcontext32(struct pt_regs *regs,
244 if (used_math) { 301 if (used_math) {
245 /* restore fpu context if we have used it before */ 302 /* restore fpu context if we have used it before */
246 if (!err) 303 if (!err)
247 err = check_and_restore_fp_context32(sc); 304 err = check_and_restore_fp_context32(sc, used_math);
248 } else { 305 } else {
249 /* signal handler may have used FPU. Give it up. */ 306 /* signal handler may have used FPU or MSA. Disable them. */
307 disable_msa();
250 lose_fpu(0); 308 lose_fpu(0);
251 } 309 }
252 310