Remove workaround for binutils 2.15 assembler bug; this version is not
authorRalf Baechle <ralf@linux-mips.org>
Fri, 19 Aug 2005 14:29:15 +0000 (14:29 +0000)
committerRalf Baechle <ralf@linux-mips.org>
Sat, 29 Oct 2005 18:32:11 +0000 (19:32 +0100)
suitable to reliably build kernels anymore anyway and 2.16 has this
fixed.

Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
include/asm-mips/system.h

index 0f8caf3..b1ac3f5 100644 (file)
@@ -164,10 +164,6 @@ do {                                                                       \
                __restore_dsp(current);                                 \
 } while(0)
 
-#define ROT_IN_PIECES                                                  \
-       "       .set    noreorder       \n"                             \
-       "       .set    reorder         \n"
-
 static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
 {
        __u32 retval;
@@ -183,7 +179,6 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
                "       .set    mips3                                   \n"
                "       sc      %2, %1                                  \n"
                "       beqzl   %2, 1b                                  \n"
-               ROT_IN_PIECES
 #ifdef CONFIG_SMP
                "       sync                                            \n"
 #endif
@@ -235,7 +230,6 @@ static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val)
                "       move    %2, %z4                                 \n"
                "       scd     %2, %1                                  \n"
                "       beqzl   %2, 1b                                  \n"
-               ROT_IN_PIECES
 #ifdef CONFIG_SMP
                "       sync                                            \n"
 #endif
@@ -311,7 +305,6 @@ static inline unsigned long __cmpxchg_u32(volatile int * m, unsigned long old,
                "       move    $1, %z4                                 \n"
                "       sc      $1, %1                                  \n"
                "       beqzl   $1, 1b                                  \n"
-               ROT_IN_PIECES
 #ifdef CONFIG_SMP
                "       sync                                            \n"
 #endif
@@ -367,7 +360,6 @@ static inline unsigned long __cmpxchg_u64(volatile int * m, unsigned long old,
                "       move    $1, %z4                                 \n"
                "       scd     $1, %1                                  \n"
                "       beqzl   $1, 1b                                  \n"
-               ROT_IN_PIECES
 #ifdef CONFIG_SMP
                "       sync                                            \n"
 #endif