None of weakly ordered processor supported in tree need this but it seems
like this could change ...
Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
config SYS_HAS_CPU_SB1
bool
config SYS_HAS_CPU_SB1
bool
+#
+# CPU may reorder R->R, R->W, W->R, W->W
+# Reordering beyond LL and SC is handled in WEAK_REORDERING_BEYOND_LLSC
+#
config WEAK_ORDERING
bool
config WEAK_ORDERING
bool
+
+#
+# CPU may reorder reads and writes beyond LL/SC
+# CPU may reorder R->LL, R->LL, W->LL, W->LL, R->SC, R->SC, W->SC, W->SC
+#
+config WEAK_REORDERING_BEYOND_LLSC
+ bool
if (cpu_has_llsc && R10000_LLSC_WAR) {
unsigned long temp;
if (cpu_has_llsc && R10000_LLSC_WAR) {
unsigned long temp;
raw_local_irq_restore(flags);
}
raw_local_irq_restore(flags);
}
if (cpu_has_llsc && R10000_LLSC_WAR) {
unsigned long temp;
if (cpu_has_llsc && R10000_LLSC_WAR) {
unsigned long temp;
raw_local_irq_restore(flags);
}
raw_local_irq_restore(flags);
}
if (cpu_has_llsc && R10000_LLSC_WAR) {
unsigned long temp;
if (cpu_has_llsc && R10000_LLSC_WAR) {
unsigned long temp;
raw_local_irq_restore(flags);
}
raw_local_irq_restore(flags);
}
if (cpu_has_llsc && R10000_LLSC_WAR) {
unsigned long temp;
if (cpu_has_llsc && R10000_LLSC_WAR) {
unsigned long temp;
raw_local_irq_restore(flags);
}
raw_local_irq_restore(flags);
}
if (cpu_has_llsc && R10000_LLSC_WAR) {
unsigned long temp;
if (cpu_has_llsc && R10000_LLSC_WAR) {
unsigned long temp;
raw_local_irq_restore(flags);
}
raw_local_irq_restore(flags);
}
if (cpu_has_llsc && R10000_LLSC_WAR) {
unsigned long temp;
if (cpu_has_llsc && R10000_LLSC_WAR) {
unsigned long temp;
raw_local_irq_restore(flags);
}
raw_local_irq_restore(flags);
}
* atomic*_return operations are serializing but not the non-*_return
* versions.
*/
* atomic*_return operations are serializing but not the non-*_return
* versions.
*/
-#define smp_mb__before_atomic_dec() smp_mb()
-#define smp_mb__after_atomic_dec() smp_mb()
-#define smp_mb__before_atomic_inc() smp_mb()
-#define smp_mb__after_atomic_inc() smp_mb()
+#define smp_mb__before_atomic_dec() smp_llsc_mb()
+#define smp_mb__after_atomic_dec() smp_llsc_mb()
+#define smp_mb__before_atomic_inc() smp_llsc_mb()
+#define smp_mb__after_atomic_inc() smp_llsc_mb()
#include <asm-generic/atomic.h>
#include <asm-generic/atomic.h>
#endif /* _ASM_ATOMIC_H */
#endif /* _ASM_ATOMIC_H */
#else
#define __WEAK_ORDERING_MB " \n"
#endif
#else
#define __WEAK_ORDERING_MB " \n"
#endif
+#if defined(CONFIG_WEAK_REORDERING_BEYOND_LLSC) && defined(CONFIG_SMP)
+#define __WEAK_LLSC_MB " sync \n"
+#else
+#define __WEAK_LLSC_MB " \n"
+#endif
#define smp_mb() __asm__ __volatile__(__WEAK_ORDERING_MB : : :"memory")
#define smp_rmb() __asm__ __volatile__(__WEAK_ORDERING_MB : : :"memory")
#define smp_mb() __asm__ __volatile__(__WEAK_ORDERING_MB : : :"memory")
#define smp_rmb() __asm__ __volatile__(__WEAK_ORDERING_MB : : :"memory")
#define set_mb(var, value) \
do { var = value; smp_mb(); } while (0)
#define set_mb(var, value) \
do { var = value; smp_mb(); } while (0)
+#define smp_llsc_mb() __asm__ __volatile__(__WEAK_LLSC_MB : : :"memory")
+#define smp_llsc_rmb() __asm__ __volatile__(__WEAK_LLSC_MB : : :"memory")
+#define smp_llsc_wmb() __asm__ __volatile__(__WEAK_LLSC_MB : : :"memory")
+
#endif /* __ASM_BARRIER_H */
#endif /* __ASM_BARRIER_H */
/*
* clear_bit() doesn't provide any barrier for the compiler.
*/
/*
* clear_bit() doesn't provide any barrier for the compiler.
*/
-#define smp_mb__before_clear_bit() smp_mb()
-#define smp_mb__after_clear_bit() smp_mb()
+#define smp_mb__before_clear_bit() smp_llsc_mb()
+#define smp_mb__after_clear_bit() smp_llsc_mb()
/*
* set_bit - Atomically set a bit in memory
/*
* set_bit - Atomically set a bit in memory
raw_local_irq_restore(flags);
}
raw_local_irq_restore(flags);
}
raw_local_irq_restore(flags);
}
raw_local_irq_restore(flags);
}
raw_local_irq_restore(flags);
}
raw_local_irq_restore(flags);
}
" .set mips3 \n" \
"2: sc $1, %2 \n" \
" beqzl $1, 1b \n" \
" .set mips3 \n" \
"2: sc $1, %2 \n" \
" beqzl $1, 1b \n" \
"3: \n" \
" .set pop \n" \
" .set mips0 \n" \
"3: \n" \
" .set pop \n" \
" .set mips0 \n" \
" .set mips3 \n" \
"2: sc $1, %2 \n" \
" beqz $1, 1b \n" \
" .set mips3 \n" \
"2: sc $1, %2 \n" \
" beqz $1, 1b \n" \
"3: \n" \
" .set pop \n" \
" .set mips0 \n" \
"3: \n" \
" .set pop \n" \
" .set mips0 \n" \
" .set mips3 \n"
"2: sc $1, %1 \n"
" beqzl $1, 1b \n"
" .set mips3 \n"
"2: sc $1, %1 \n"
" beqzl $1, 1b \n"
"3: \n"
" .set pop \n"
" .section .fixup,\"ax\" \n"
"3: \n"
" .set pop \n"
" .section .fixup,\"ax\" \n"
" .set mips3 \n"
"2: sc $1, %1 \n"
" beqz $1, 1b \n"
" .set mips3 \n"
"2: sc $1, %1 \n"
" beqz $1, 1b \n"
"3: \n"
" .set pop \n"
" .section .fixup,\"ax\" \n"
"3: \n"
" .set pop \n"
" .section .fixup,\"ax\" \n"
}
static inline void __raw_spin_unlock(raw_spinlock_t *lock)
}
static inline void __raw_spin_unlock(raw_spinlock_t *lock)
}
/* Note the use of sub, not subu which will make the kernel die with an
}
/* Note the use of sub, not subu which will make the kernel die with an
if (R10000_LLSC_WAR) {
__asm__ __volatile__(
if (R10000_LLSC_WAR) {
__asm__ __volatile__(
}
static inline void __raw_write_unlock(raw_rwlock_t *rw)
}
static inline void __raw_write_unlock(raw_rwlock_t *rw)
" .set reorder \n"
" beqzl %1, 1b \n"
" nop \n"
" .set reorder \n"
" beqzl %1, 1b \n"
" nop \n"
" li %2, 1 \n"
"2: \n"
: "=m" (rw->lock), "=&r" (tmp), "=&r" (ret)
" li %2, 1 \n"
"2: \n"
: "=m" (rw->lock), "=&r" (tmp), "=&r" (ret)
" beqz %1, 1b \n"
" nop \n"
" .set reorder \n"
" beqz %1, 1b \n"
" nop \n"
" .set reorder \n"
" li %2, 1 \n"
"2: \n"
: "=m" (rw->lock), "=&r" (tmp), "=&r" (ret)
" li %2, 1 \n"
"2: \n"
: "=m" (rw->lock), "=&r" (tmp), "=&r" (ret)
" sc %1, %0 \n"
" beqzl %1, 1b \n"
" nop \n"
" sc %1, %0 \n"
" beqzl %1, 1b \n"
" nop \n"
" li %2, 1 \n"
" .set reorder \n"
"2: \n"
" li %2, 1 \n"
" .set reorder \n"
"2: \n"
" beqz %1, 3f \n"
" li %2, 1 \n"
"2: \n"
" beqz %1, 3f \n"
" li %2, 1 \n"
"2: \n"
" .subsection 2 \n"
"3: b 1b \n"
" li %2, 0 \n"
" .subsection 2 \n"
"3: b 1b \n"
" li %2, 0 \n"
raw_local_irq_restore(flags); /* implies memory barrier */
}
raw_local_irq_restore(flags); /* implies memory barrier */
}
raw_local_irq_restore(flags); /* implies memory barrier */
}
raw_local_irq_restore(flags); /* implies memory barrier */
}
raw_local_irq_restore(flags); /* implies memory barrier */
}
raw_local_irq_restore(flags); /* implies memory barrier */
}
raw_local_irq_restore(flags); /* implies memory barrier */
}
raw_local_irq_restore(flags); /* implies memory barrier */
}