[PATCH] ARM SMP: Fix ARMv6 spinlock and semaphore implementations
authorRussell King <rmk@dyn-67.arm.linux.org.uk>
Sun, 24 Jul 2005 11:13:40 +0000 (12:13 +0100)
committerRussell King <rmk+kernel@arm.linux.org.uk>
Sun, 24 Jul 2005 11:13:40 +0000 (12:13 +0100)
Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
include/asm-arm/locks.h
include/asm-arm/spinlock.h

index c26298f..9cb33fc 100644 (file)
@@ -61,7 +61,7 @@
 "      strex   ip, lr, [%0]\n"                 \
 "      teq     ip, #0\n"                       \
 "      bne     1b\n"                           \
-"      teq     lr, #0\n"                       \
+"      cmp     lr, #0\n"                       \
 "      movle   ip, %0\n"                       \
 "      blle    " #wake                         \
        :                                       \
        __asm__ __volatile__(                   \
        "@ up_op_read\n"                        \
 "1:    ldrex   lr, [%0]\n"                     \
-"      add     lr, lr, %1\n"                   \
+"      adds    lr, lr, %1\n"                   \
 "      strex   ip, lr, [%0]\n"                 \
 "      teq     ip, #0\n"                       \
 "      bne     1b\n"                           \
index 1823236..9705d5e 100644 (file)
@@ -79,7 +79,8 @@ typedef struct {
 } rwlock_t;
 
 #define RW_LOCK_UNLOCKED       (rwlock_t) { 0 }
-#define rwlock_init(x)         do { *(x) + RW_LOCK_UNLOCKED; } while (0)
+#define rwlock_init(x)         do { *(x) = RW_LOCK_UNLOCKED; } while (0)
+#define rwlock_is_locked(x)    (*((volatile unsigned int *)(x)) != 0)
 
 /*
  * Write locks are easy - we just set bit 31.  When unlocking, we can
@@ -100,6 +101,21 @@ static inline void _raw_write_lock(rwlock_t *rw)
        : "cc", "memory");
 }
 
+static inline int _raw_write_trylock(rwlock_t *rw)
+{
+       unsigned long tmp;
+
+       __asm__ __volatile__(
+"1:    ldrex   %0, [%1]\n"
+"      teq     %0, #0\n"
+"      strexeq %0, %2, [%1]"
+       : "=&r" (tmp)
+       : "r" (&rw->lock), "r" (0x80000000)
+       : "cc", "memory");
+
+       return tmp == 0;
+}
+
 static inline void _raw_write_unlock(rwlock_t *rw)
 {
        __asm__ __volatile__(
@@ -138,6 +154,8 @@ static inline void _raw_read_lock(rwlock_t *rw)
 
 static inline void _raw_read_unlock(rwlock_t *rw)
 {
+       unsigned long tmp, tmp2;
+
        __asm__ __volatile__(
 "1:    ldrex   %0, [%2]\n"
 "      sub     %0, %0, #1\n"
@@ -151,19 +169,4 @@ static inline void _raw_read_unlock(rwlock_t *rw)
 
 #define _raw_read_trylock(lock) generic_raw_read_trylock(lock)
 
-static inline int _raw_write_trylock(rwlock_t *rw)
-{
-       unsigned long tmp;
-
-       __asm__ __volatile__(
-"1:    ldrex   %0, [%1]\n"
-"      teq     %0, #0\n"
-"      strexeq %0, %2, [%1]"
-       : "=&r" (tmp)
-       : "r" (&rw->lock), "r" (0x80000000)
-       : "cc", "memory");
-
-       return tmp == 0;
-}
-
 #endif /* __ASM_SPINLOCK_H */