aboutsummaryrefslogtreecommitdiffstats
path: root/arch/powerpc/kernel/align.c
diff options
context:
space:
mode:
Diffstat (limited to 'arch/powerpc/kernel/align.c')
-rw-r--r--arch/powerpc/kernel/align.c75
1 files changed, 52 insertions, 23 deletions
diff --git a/arch/powerpc/kernel/align.c b/arch/powerpc/kernel/align.c
index a5b632e52fae..b876e989220b 100644
--- a/arch/powerpc/kernel/align.c
+++ b/arch/powerpc/kernel/align.c
@@ -642,10 +642,14 @@ static int emulate_spe(struct pt_regs *regs, unsigned int reg,
642 */ 642 */
643static int emulate_vsx(unsigned char __user *addr, unsigned int reg, 643static int emulate_vsx(unsigned char __user *addr, unsigned int reg,
644 unsigned int areg, struct pt_regs *regs, 644 unsigned int areg, struct pt_regs *regs,
645 unsigned int flags, unsigned int length) 645 unsigned int flags, unsigned int length,
646 unsigned int elsize)
646{ 647{
647 char *ptr; 648 char *ptr;
649 unsigned long *lptr;
648 int ret = 0; 650 int ret = 0;
651 int sw = 0;
652 int i, j;
649 653
650 flush_vsx_to_thread(current); 654 flush_vsx_to_thread(current);
651 655
@@ -654,19 +658,35 @@ static int emulate_vsx(unsigned char __user *addr, unsigned int reg,
654 else 658 else
655 ptr = (char *) &current->thread.vr[reg - 32]; 659 ptr = (char *) &current->thread.vr[reg - 32];
656 660
657 if (flags & ST) 661 lptr = (unsigned long *) ptr;
658 ret = __copy_to_user(addr, ptr, length); 662
659 else { 663 if (flags & SW)
660 if (flags & SPLT){ 664 sw = elsize-1;
661 ret = __copy_from_user(ptr, addr, length); 665
662 ptr += length; 666 for (j = 0; j < length; j += elsize) {
667 for (i = 0; i < elsize; ++i) {
668 if (flags & ST)
669 ret |= __put_user(ptr[i^sw], addr + i);
670 else
671 ret |= __get_user(ptr[i^sw], addr + i);
663 } 672 }
664 ret |= __copy_from_user(ptr, addr, length); 673 ptr += elsize;
674 addr += elsize;
665 } 675 }
666 if (flags & U) 676
667 regs->gpr[areg] = regs->dar; 677 if (!ret) {
668 if (ret) 678 if (flags & U)
679 regs->gpr[areg] = regs->dar;
680
681 /* Splat load copies the same data to top and bottom 8 bytes */
682 if (flags & SPLT)
683 lptr[1] = lptr[0];
684 /* For 8 byte loads, zero the top 8 bytes */
685 else if (!(flags & ST) && (8 == length))
686 lptr[1] = 0;
687 } else
669 return -EFAULT; 688 return -EFAULT;
689
670 return 1; 690 return 1;
671} 691}
672#endif 692#endif
@@ -732,7 +752,7 @@ int fix_alignment(struct pt_regs *regs)
732 752
733#ifdef CONFIG_SPE 753#ifdef CONFIG_SPE
734 if ((instr >> 26) == 0x4) { 754 if ((instr >> 26) == 0x4) {
735 PPC_WARN_EMULATED(spe); 755 PPC_WARN_ALIGNMENT(spe, regs);
736 return emulate_spe(regs, reg, instr); 756 return emulate_spe(regs, reg, instr);
737 } 757 }
738#endif 758#endif
@@ -767,16 +787,25 @@ int fix_alignment(struct pt_regs *regs)
767 787
768#ifdef CONFIG_VSX 788#ifdef CONFIG_VSX
769 if ((instruction & 0xfc00003e) == 0x7c000018) { 789 if ((instruction & 0xfc00003e) == 0x7c000018) {
770 /* Additional register addressing bit (64 VSX vs 32 FPR/GPR */ 790 unsigned int elsize;
791
792 /* Additional register addressing bit (64 VSX vs 32 FPR/GPR) */
771 reg |= (instruction & 0x1) << 5; 793 reg |= (instruction & 0x1) << 5;
772 /* Simple inline decoder instead of a table */ 794 /* Simple inline decoder instead of a table */
795 /* VSX has only 8 and 16 byte memory accesses */
796 nb = 8;
773 if (instruction & 0x200) 797 if (instruction & 0x200)
774 nb = 16; 798 nb = 16;
775 else if (instruction & 0x080) 799
776 nb = 8; 800 /* Vector stores in little-endian mode swap individual
777 else 801 elements, so process them separately */
778 nb = 4; 802 elsize = 4;
803 if (instruction & 0x80)
804 elsize = 8;
805
779 flags = 0; 806 flags = 0;
807 if (regs->msr & MSR_LE)
808 flags |= SW;
780 if (instruction & 0x100) 809 if (instruction & 0x100)
781 flags |= ST; 810 flags |= ST;
782 if (instruction & 0x040) 811 if (instruction & 0x040)
@@ -786,15 +815,15 @@ int fix_alignment(struct pt_regs *regs)
786 flags |= SPLT; 815 flags |= SPLT;
787 nb = 8; 816 nb = 8;
788 } 817 }
789 PPC_WARN_EMULATED(vsx); 818 PPC_WARN_ALIGNMENT(vsx, regs);
790 return emulate_vsx(addr, reg, areg, regs, flags, nb); 819 return emulate_vsx(addr, reg, areg, regs, flags, nb, elsize);
791 } 820 }
792#endif 821#endif
793 /* A size of 0 indicates an instruction we don't support, with 822 /* A size of 0 indicates an instruction we don't support, with
794 * the exception of DCBZ which is handled as a special case here 823 * the exception of DCBZ which is handled as a special case here
795 */ 824 */
796 if (instr == DCBZ) { 825 if (instr == DCBZ) {
797 PPC_WARN_EMULATED(dcbz); 826 PPC_WARN_ALIGNMENT(dcbz, regs);
798 return emulate_dcbz(regs, addr); 827 return emulate_dcbz(regs, addr);
799 } 828 }
800 if (unlikely(nb == 0)) 829 if (unlikely(nb == 0))
@@ -804,7 +833,7 @@ int fix_alignment(struct pt_regs *regs)
804 * function 833 * function
805 */ 834 */
806 if (flags & M) { 835 if (flags & M) {
807 PPC_WARN_EMULATED(multiple); 836 PPC_WARN_ALIGNMENT(multiple, regs);
808 return emulate_multiple(regs, addr, reg, nb, 837 return emulate_multiple(regs, addr, reg, nb,
809 flags, instr, swiz); 838 flags, instr, swiz);
810 } 839 }
@@ -825,11 +854,11 @@ int fix_alignment(struct pt_regs *regs)
825 854
826 /* Special case for 16-byte FP loads and stores */ 855 /* Special case for 16-byte FP loads and stores */
827 if (nb == 16) { 856 if (nb == 16) {
828 PPC_WARN_EMULATED(fp_pair); 857 PPC_WARN_ALIGNMENT(fp_pair, regs);
829 return emulate_fp_pair(addr, reg, flags); 858 return emulate_fp_pair(addr, reg, flags);
830 } 859 }
831 860
832 PPC_WARN_EMULATED(unaligned); 861 PPC_WARN_ALIGNMENT(unaligned, regs);
833 862
834 /* If we are loading, get the data from user space, else 863 /* If we are loading, get the data from user space, else
835 * get it from register values 864 * get it from register values