From f252ffd50c97dae87b45f1dbad24f71358ccfbd6 Mon Sep 17 00:00:00 2001 From: David Daney Date: Fri, 8 Jan 2010 17:17:43 -0800 Subject: MIPS: New macro smp_mb__before_llsc. Replace some instances of smp_llsc_mb() with a new macro smp_mb__before_llsc(). It is used before ll/sc sequences that are documented as needing write barrier semantics. The default implementation of smp_mb__before_llsc() is just smp_llsc_mb(), so there are no changes in semantics. Also simplify definition of smp_mb(), smp_rmb(), and smp_wmb() to be just barrier() in the non-SMP case. Signed-off-by: David Daney To: linux-mips@linux-mips.org Patchwork: http://patchwork.linux-mips.org/patch/851/ Signed-off-by: Ralf Baechle --- arch/mips/include/asm/spinlock.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'arch/mips/include/asm/spinlock.h') diff --git a/arch/mips/include/asm/spinlock.h b/arch/mips/include/asm/spinlock.h index 21ef9efbde43..5f16696eaa00 100644 --- a/arch/mips/include/asm/spinlock.h +++ b/arch/mips/include/asm/spinlock.h @@ -138,7 +138,7 @@ static inline void arch_spin_unlock(arch_spinlock_t *lock) { int tmp; - smp_llsc_mb(); + smp_mb__before_llsc(); if (R10000_LLSC_WAR) { __asm__ __volatile__ ( @@ -305,7 +305,7 @@ static inline void arch_read_unlock(arch_rwlock_t *rw) { unsigned int tmp; - smp_llsc_mb(); + smp_mb__before_llsc(); if (R10000_LLSC_WAR) { __asm__ __volatile__( -- cgit v1.2.3