diff options
Diffstat (limited to 'arch/powerpc/kernel/traps.c')
-rw-r--r-- | arch/powerpc/kernel/traps.c | 45 |
1 files changed, 36 insertions, 9 deletions
diff --git a/arch/powerpc/kernel/traps.c b/arch/powerpc/kernel/traps.c index 26e1358ff0bc..33cd7a0b8e73 100644 --- a/arch/powerpc/kernel/traps.c +++ b/arch/powerpc/kernel/traps.c | |||
@@ -1416,11 +1416,19 @@ void fp_unavailable_tm(struct pt_regs *regs) | |||
1416 | /* This loads and recheckpoints the FP registers from | 1416 | /* This loads and recheckpoints the FP registers from |
1417 | * thread.fpr[]. They will remain in registers after the | 1417 | * thread.fpr[]. They will remain in registers after the |
1418 | * checkpoint so we don't need to reload them after. | 1418 | * checkpoint so we don't need to reload them after. |
1419 | * If VMX is in use, the VRs now hold checkpointed values, | ||
1420 | * so we don't want to load the VRs from the thread_struct. | ||
1419 | */ | 1421 | */ |
1420 | tm_recheckpoint(¤t->thread, regs->msr); | 1422 | tm_recheckpoint(¤t->thread, MSR_FP); |
1423 | |||
1424 | /* If VMX is in use, get the transactional values back */ | ||
1425 | if (regs->msr & MSR_VEC) { | ||
1426 | do_load_up_transact_altivec(¤t->thread); | ||
1427 | /* At this point all the VSX state is loaded, so enable it */ | ||
1428 | regs->msr |= MSR_VSX; | ||
1429 | } | ||
1421 | } | 1430 | } |
1422 | 1431 | ||
1423 | #ifdef CONFIG_ALTIVEC | ||
1424 | void altivec_unavailable_tm(struct pt_regs *regs) | 1432 | void altivec_unavailable_tm(struct pt_regs *regs) |
1425 | { | 1433 | { |
1426 | /* See the comments in fp_unavailable_tm(). This function operates | 1434 | /* See the comments in fp_unavailable_tm(). This function operates |
@@ -1432,14 +1440,19 @@ void altivec_unavailable_tm(struct pt_regs *regs) | |||
1432 | regs->nip, regs->msr); | 1440 | regs->nip, regs->msr); |
1433 | tm_reclaim_current(TM_CAUSE_FAC_UNAV); | 1441 | tm_reclaim_current(TM_CAUSE_FAC_UNAV); |
1434 | regs->msr |= MSR_VEC; | 1442 | regs->msr |= MSR_VEC; |
1435 | tm_recheckpoint(¤t->thread, regs->msr); | 1443 | tm_recheckpoint(¤t->thread, MSR_VEC); |
1436 | current->thread.used_vr = 1; | 1444 | current->thread.used_vr = 1; |
1445 | |||
1446 | if (regs->msr & MSR_FP) { | ||
1447 | do_load_up_transact_fpu(¤t->thread); | ||
1448 | regs->msr |= MSR_VSX; | ||
1449 | } | ||
1437 | } | 1450 | } |
1438 | #endif | ||
1439 | 1451 | ||
1440 | #ifdef CONFIG_VSX | ||
1441 | void vsx_unavailable_tm(struct pt_regs *regs) | 1452 | void vsx_unavailable_tm(struct pt_regs *regs) |
1442 | { | 1453 | { |
1454 | unsigned long orig_msr = regs->msr; | ||
1455 | |||
1443 | /* See the comments in fp_unavailable_tm(). This works similarly, | 1456 | /* See the comments in fp_unavailable_tm(). This works similarly, |
1444 | * though we're loading both FP and VEC registers in here. | 1457 | * though we're loading both FP and VEC registers in here. |
1445 | * | 1458 | * |
@@ -1451,16 +1464,30 @@ void vsx_unavailable_tm(struct pt_regs *regs) | |||
1451 | "MSR=%lx\n", | 1464 | "MSR=%lx\n", |
1452 | regs->nip, regs->msr); | 1465 | regs->nip, regs->msr); |
1453 | 1466 | ||
1467 | current->thread.used_vsr = 1; | ||
1468 | |||
1469 | /* If FP and VMX are already loaded, we have all the state we need */ | ||
1470 | if ((orig_msr & (MSR_FP | MSR_VEC)) == (MSR_FP | MSR_VEC)) { | ||
1471 | regs->msr |= MSR_VSX; | ||
1472 | return; | ||
1473 | } | ||
1474 | |||
1454 | /* This reclaims FP and/or VR regs if they're already enabled */ | 1475 | /* This reclaims FP and/or VR regs if they're already enabled */ |
1455 | tm_reclaim_current(TM_CAUSE_FAC_UNAV); | 1476 | tm_reclaim_current(TM_CAUSE_FAC_UNAV); |
1456 | 1477 | ||
1457 | regs->msr |= MSR_VEC | MSR_FP | current->thread.fpexc_mode | | 1478 | regs->msr |= MSR_VEC | MSR_FP | current->thread.fpexc_mode | |
1458 | MSR_VSX; | 1479 | MSR_VSX; |
1459 | /* This loads & recheckpoints FP and VRs. */ | 1480 | |
1460 | tm_recheckpoint(¤t->thread, regs->msr); | 1481 | /* This loads & recheckpoints FP and VRs; but we have |
1461 | current->thread.used_vsr = 1; | 1482 | * to be sure not to overwrite previously-valid state. |
1483 | */ | ||
1484 | tm_recheckpoint(¤t->thread, regs->msr & ~orig_msr); | ||
1485 | |||
1486 | if (orig_msr & MSR_FP) | ||
1487 | do_load_up_transact_fpu(¤t->thread); | ||
1488 | if (orig_msr & MSR_VEC) | ||
1489 | do_load_up_transact_altivec(¤t->thread); | ||
1462 | } | 1490 | } |
1463 | #endif | ||
1464 | #endif /* CONFIG_PPC_TRANSACTIONAL_MEM */ | 1491 | #endif /* CONFIG_PPC_TRANSACTIONAL_MEM */ |
1465 | 1492 | ||
1466 | void performance_monitor_exception(struct pt_regs *regs) | 1493 | void performance_monitor_exception(struct pt_regs *regs) |