powerpc: Handle VSX alignment faults correctly in little-endian mode
authorNeil Campbell <neilc@linux.vnet.ibm.com>
Mon, 14 Dec 2009 04:08:57 +0000 (04:08 +0000)
committerGreg Kroah-Hartman <gregkh@suse.de>
Mon, 18 Jan 2010 18:34:03 +0000 (10:34 -0800)
commit bb7f20b1c639606def3b91f4e4aca6daeee5d80a upstream.

This patch fixes the handling of VSX alignment faults in little-endian
mode (the current code assumes the processor is in big-endian mode).

The patch also makes the handlers clear the top 8 bytes of the register
when handling an 8 byte VSX load.

This is based on 2.6.32.

Signed-off-by: Neil Campbell <neilc@linux.vnet.ibm.com>
Acked-by: Michael Neuling <mikey@neuling.org>
Signed-off-by: Benjamin Herrenschmidt <benh@kernel.crashing.org>
Signed-off-by: Greg Kroah-Hartman <gregkh@suse.de>
arch/powerpc/kernel/align.c

index 73cb6a3..15a369e 100644 (file)
@@ -641,10 +641,14 @@ static int emulate_spe(struct pt_regs *regs, unsigned int reg,
  */
 static int emulate_vsx(unsigned char __user *addr, unsigned int reg,
                       unsigned int areg, struct pt_regs *regs,
-                      unsigned int flags, unsigned int length)
+                      unsigned int flags, unsigned int length,
+                      unsigned int elsize)
 {
        char *ptr;
+       unsigned long *lptr;
        int ret = 0;
+       int sw = 0;
+       int i, j;
 
        flush_vsx_to_thread(current);
 
@@ -653,19 +657,35 @@ static int emulate_vsx(unsigned char __user *addr, unsigned int reg,
        else
                ptr = (char *) &current->thread.vr[reg - 32];
 
-       if (flags & ST)
-               ret = __copy_to_user(addr, ptr, length);
-        else {
-               if (flags & SPLT){
-                       ret = __copy_from_user(ptr, addr, length);
-                       ptr += length;
+       lptr = (unsigned long *) ptr;
+
+       if (flags & SW)
+               sw = elsize-1;
+
+       for (j = 0; j < length; j += elsize) {
+               for (i = 0; i < elsize; ++i) {
+                       if (flags & ST)
+                               ret |= __put_user(ptr[i^sw], addr + i);
+                       else
+                               ret |= __get_user(ptr[i^sw], addr + i);
                }
-               ret |= __copy_from_user(ptr, addr, length);
+               ptr  += elsize;
+               addr += elsize;
        }
-       if (flags & U)
-               regs->gpr[areg] = regs->dar;
-       if (ret)
+
+       if (!ret) {
+               if (flags & U)
+                       regs->gpr[areg] = regs->dar;
+
+               /* Splat load copies the same data to top and bottom 8 bytes */
+               if (flags & SPLT)
+                       lptr[1] = lptr[0];
+               /* For 8 byte loads, zero the top 8 bytes */
+               else if (!(flags & ST) && (8 == length))
+                       lptr[1] = 0;
+       } else
                return -EFAULT;
+
        return 1;
 }
 #endif
@@ -764,16 +784,25 @@ int fix_alignment(struct pt_regs *regs)
 
 #ifdef CONFIG_VSX
        if ((instruction & 0xfc00003e) == 0x7c000018) {
-               /* Additional register addressing bit (64 VSX vs 32 FPR/GPR */
+               unsigned int elsize;
+
+               /* Additional register addressing bit (64 VSX vs 32 FPR/GPR) */
                reg |= (instruction & 0x1) << 5;
                /* Simple inline decoder instead of a table */
+               /* VSX has only 8 and 16 byte memory accesses */
+               nb = 8;
                if (instruction & 0x200)
                        nb = 16;
-               else if (instruction & 0x080)
-                       nb = 8;
-               else
-                       nb = 4;
+
+               /* Vector stores in little-endian mode swap individual
+                  elements, so process them separately */
+               elsize = 4;
+               if (instruction & 0x80)
+                       elsize = 8;
+
                flags = 0;
+               if (regs->msr & MSR_LE)
+                       flags |= SW;
                if (instruction & 0x100)
                        flags |= ST;
                if (instruction & 0x040)
@@ -783,7 +812,7 @@ int fix_alignment(struct pt_regs *regs)
                        flags |= SPLT;
                        nb = 8;
                }
-               return emulate_vsx(addr, reg, areg, regs, flags, nb);
+               return emulate_vsx(addr, reg, areg, regs, flags, nb, elsize);
        }
 #endif
        /* A size of 0 indicates an instruction we don't support, with