aboutsummaryrefslogtreecommitdiffstats
path: root/arch/powerpc/kernel/align.c
diff options
context:
space:
mode:
Diffstat (limited to 'arch/powerpc/kernel/align.c')
-rw-r--r--arch/powerpc/kernel/align.c41
1 files changed, 33 insertions, 8 deletions
diff --git a/arch/powerpc/kernel/align.c b/arch/powerpc/kernel/align.c
index cce82b1e0374..59f70adcbcd9 100644
--- a/arch/powerpc/kernel/align.c
+++ b/arch/powerpc/kernel/align.c
@@ -630,7 +630,7 @@ static int emulate_spe(struct pt_regs *regs, unsigned int reg,
630} 630}
631#endif /* CONFIG_SPE */ 631#endif /* CONFIG_SPE */
632 632
633#if defined(CONFIG_VSX) && defined(__BIG_ENDIAN__) 633#ifdef CONFIG_VSX
634/* 634/*
635 * Emulate VSX instructions... 635 * Emulate VSX instructions...
636 */ 636 */
@@ -658,8 +658,25 @@ static int emulate_vsx(unsigned char __user *addr, unsigned int reg,
658 658
659 lptr = (unsigned long *) ptr; 659 lptr = (unsigned long *) ptr;
660 660
661#ifdef __LITTLE_ENDIAN__
662 if (flags & SW) {
663 elsize = length;
664 sw = length-1;
665 } else {
666 /*
667 * The elements are BE ordered, even in LE mode, so process
668 * them in reverse order.
669 */
670 addr += length - elsize;
671
672 /* 8 byte memory accesses go in the top 8 bytes of the VR */
673 if (length == 8)
674 ptr += 8;
675 }
676#else
661 if (flags & SW) 677 if (flags & SW)
662 sw = elsize-1; 678 sw = elsize-1;
679#endif
663 680
664 for (j = 0; j < length; j += elsize) { 681 for (j = 0; j < length; j += elsize) {
665 for (i = 0; i < elsize; ++i) { 682 for (i = 0; i < elsize; ++i) {
@@ -669,19 +686,31 @@ static int emulate_vsx(unsigned char __user *addr, unsigned int reg,
669 ret |= __get_user(ptr[i^sw], addr + i); 686 ret |= __get_user(ptr[i^sw], addr + i);
670 } 687 }
671 ptr += elsize; 688 ptr += elsize;
689#ifdef __LITTLE_ENDIAN__
690 addr -= elsize;
691#else
672 addr += elsize; 692 addr += elsize;
693#endif
673 } 694 }
674 695
696#ifdef __BIG_ENDIAN__
697#define VSX_HI 0
698#define VSX_LO 1
699#else
700#define VSX_HI 1
701#define VSX_LO 0
702#endif
703
675 if (!ret) { 704 if (!ret) {
676 if (flags & U) 705 if (flags & U)
677 regs->gpr[areg] = regs->dar; 706 regs->gpr[areg] = regs->dar;
678 707
679 /* Splat load copies the same data to top and bottom 8 bytes */ 708 /* Splat load copies the same data to top and bottom 8 bytes */
680 if (flags & SPLT) 709 if (flags & SPLT)
681 lptr[1] = lptr[0]; 710 lptr[VSX_LO] = lptr[VSX_HI];
682 /* For 8 byte loads, zero the top 8 bytes */ 711 /* For 8 byte loads, zero the low 8 bytes */
683 else if (!(flags & ST) && (8 == length)) 712 else if (!(flags & ST) && (8 == length))
684 lptr[1] = 0; 713 lptr[VSX_LO] = 0;
685 } else 714 } else
686 return -EFAULT; 715 return -EFAULT;
687 716
@@ -805,7 +834,6 @@ int fix_alignment(struct pt_regs *regs)
805 /* DAR has the operand effective address */ 834 /* DAR has the operand effective address */
806 addr = (unsigned char __user *)regs->dar; 835 addr = (unsigned char __user *)regs->dar;
807 836
808#ifdef __BIG_ENDIAN__
809#ifdef CONFIG_VSX 837#ifdef CONFIG_VSX
810 if ((instruction & 0xfc00003e) == 0x7c000018) { 838 if ((instruction & 0xfc00003e) == 0x7c000018) {
811 unsigned int elsize; 839 unsigned int elsize;
@@ -840,9 +868,6 @@ int fix_alignment(struct pt_regs *regs)
840 return emulate_vsx(addr, reg, areg, regs, flags, nb, elsize); 868 return emulate_vsx(addr, reg, areg, regs, flags, nb, elsize);
841 } 869 }
842#endif 870#endif
843#else
844 return -EFAULT;
845#endif
846 /* A size of 0 indicates an instruction we don't support, with 871 /* A size of 0 indicates an instruction we don't support, with
847 * the exception of DCBZ which is handled as a special case here 872 * the exception of DCBZ which is handled as a special case here
848 */ 873 */