diff options
Diffstat (limited to 'arch/powerpc/kernel/head_32.S')
-rw-r--r-- | arch/powerpc/kernel/head_32.S | 101 |
1 files changed, 4 insertions, 97 deletions
diff --git a/arch/powerpc/kernel/head_32.S b/arch/powerpc/kernel/head_32.S index c01467f952d3..48469463f89e 100644 --- a/arch/powerpc/kernel/head_32.S +++ b/arch/powerpc/kernel/head_32.S | |||
@@ -733,9 +733,11 @@ END_MMU_FTR_SECTION_IFSET(MMU_FTR_NEED_DTLB_SW_LRU) | |||
733 | AltiVecUnavailable: | 733 | AltiVecUnavailable: |
734 | EXCEPTION_PROLOG | 734 | EXCEPTION_PROLOG |
735 | #ifdef CONFIG_ALTIVEC | 735 | #ifdef CONFIG_ALTIVEC |
736 | bne load_up_altivec /* if from user, just load it up */ | 736 | beq 1f |
737 | bl load_up_altivec /* if from user, just load it up */ | ||
738 | b fast_exception_return | ||
737 | #endif /* CONFIG_ALTIVEC */ | 739 | #endif /* CONFIG_ALTIVEC */ |
738 | addi r3,r1,STACK_FRAME_OVERHEAD | 740 | 1: addi r3,r1,STACK_FRAME_OVERHEAD |
739 | EXC_XFER_EE_LITE(0xf20, altivec_unavailable_exception) | 741 | EXC_XFER_EE_LITE(0xf20, altivec_unavailable_exception) |
740 | 742 | ||
741 | PerformanceMonitor: | 743 | PerformanceMonitor: |
@@ -743,101 +745,6 @@ PerformanceMonitor: | |||
743 | addi r3,r1,STACK_FRAME_OVERHEAD | 745 | addi r3,r1,STACK_FRAME_OVERHEAD |
744 | EXC_XFER_STD(0xf00, performance_monitor_exception) | 746 | EXC_XFER_STD(0xf00, performance_monitor_exception) |
745 | 747 | ||
746 | #ifdef CONFIG_ALTIVEC | ||
747 | /* Note that the AltiVec support is closely modeled after the FP | ||
748 | * support. Changes to one are likely to be applicable to the | ||
749 | * other! */ | ||
750 | load_up_altivec: | ||
751 | /* | ||
752 | * Disable AltiVec for the task which had AltiVec previously, | ||
753 | * and save its AltiVec registers in its thread_struct. | ||
754 | * Enables AltiVec for use in the kernel on return. | ||
755 | * On SMP we know the AltiVec units are free, since we give it up every | ||
756 | * switch. -- Kumar | ||
757 | */ | ||
758 | mfmsr r5 | ||
759 | oris r5,r5,MSR_VEC@h | ||
760 | MTMSRD(r5) /* enable use of AltiVec now */ | ||
761 | isync | ||
762 | /* | ||
763 | * For SMP, we don't do lazy AltiVec switching because it just gets too | ||
764 | * horrendously complex, especially when a task switches from one CPU | ||
765 | * to another. Instead we call giveup_altivec in switch_to. | ||
766 | */ | ||
767 | #ifndef CONFIG_SMP | ||
768 | tophys(r6,0) | ||
769 | addis r3,r6,last_task_used_altivec@ha | ||
770 | lwz r4,last_task_used_altivec@l(r3) | ||
771 | cmpwi 0,r4,0 | ||
772 | beq 1f | ||
773 | add r4,r4,r6 | ||
774 | addi r4,r4,THREAD /* want THREAD of last_task_used_altivec */ | ||
775 | SAVE_32VRS(0,r10,r4) | ||
776 | mfvscr vr0 | ||
777 | li r10,THREAD_VSCR | ||
778 | stvx vr0,r10,r4 | ||
779 | lwz r5,PT_REGS(r4) | ||
780 | add r5,r5,r6 | ||
781 | lwz r4,_MSR-STACK_FRAME_OVERHEAD(r5) | ||
782 | lis r10,MSR_VEC@h | ||
783 | andc r4,r4,r10 /* disable altivec for previous task */ | ||
784 | stw r4,_MSR-STACK_FRAME_OVERHEAD(r5) | ||
785 | 1: | ||
786 | #endif /* CONFIG_SMP */ | ||
787 | /* enable use of AltiVec after return */ | ||
788 | oris r9,r9,MSR_VEC@h | ||
789 | mfspr r5,SPRN_SPRG3 /* current task's THREAD (phys) */ | ||
790 | li r4,1 | ||
791 | li r10,THREAD_VSCR | ||
792 | stw r4,THREAD_USED_VR(r5) | ||
793 | lvx vr0,r10,r5 | ||
794 | mtvscr vr0 | ||
795 | REST_32VRS(0,r10,r5) | ||
796 | #ifndef CONFIG_SMP | ||
797 | subi r4,r5,THREAD | ||
798 | sub r4,r4,r6 | ||
799 | stw r4,last_task_used_altivec@l(r3) | ||
800 | #endif /* CONFIG_SMP */ | ||
801 | /* restore registers and return */ | ||
802 | /* we haven't used ctr or xer or lr */ | ||
803 | b fast_exception_return | ||
804 | |||
805 | /* | ||
806 | * giveup_altivec(tsk) | ||
807 | * Disable AltiVec for the task given as the argument, | ||
808 | * and save the AltiVec registers in its thread_struct. | ||
809 | * Enables AltiVec for use in the kernel on return. | ||
810 | */ | ||
811 | |||
812 | .globl giveup_altivec | ||
813 | giveup_altivec: | ||
814 | mfmsr r5 | ||
815 | oris r5,r5,MSR_VEC@h | ||
816 | SYNC | ||
817 | MTMSRD(r5) /* enable use of AltiVec now */ | ||
818 | isync | ||
819 | cmpwi 0,r3,0 | ||
820 | beqlr- /* if no previous owner, done */ | ||
821 | addi r3,r3,THREAD /* want THREAD of task */ | ||
822 | lwz r5,PT_REGS(r3) | ||
823 | cmpwi 0,r5,0 | ||
824 | SAVE_32VRS(0, r4, r3) | ||
825 | mfvscr vr0 | ||
826 | li r4,THREAD_VSCR | ||
827 | stvx vr0,r4,r3 | ||
828 | beq 1f | ||
829 | lwz r4,_MSR-STACK_FRAME_OVERHEAD(r5) | ||
830 | lis r3,MSR_VEC@h | ||
831 | andc r4,r4,r3 /* disable AltiVec for previous task */ | ||
832 | stw r4,_MSR-STACK_FRAME_OVERHEAD(r5) | ||
833 | 1: | ||
834 | #ifndef CONFIG_SMP | ||
835 | li r5,0 | ||
836 | lis r4,last_task_used_altivec@ha | ||
837 | stw r5,last_task_used_altivec@l(r4) | ||
838 | #endif /* CONFIG_SMP */ | ||
839 | blr | ||
840 | #endif /* CONFIG_ALTIVEC */ | ||
841 | 748 | ||
842 | /* | 749 | /* |
843 | * This code is jumped to from the startup code to copy | 750 | * This code is jumped to from the startup code to copy |