summaryrefslogtreecommitdiff
path: root/arch/arm
diff options
context:
space:
mode:
authorGary King <gking@nvidia.com>2009-12-07 22:05:28 -0800
committerGary King <gking@nvidia.com>2009-12-08 13:53:07 -0800
commit9044d18f56010ded3c0ec396aa2b630b6a9920b2 (patch)
tree49b8cdf61d66ad7e285fa4569ef42d8e13951073 /arch/arm
parent74b76bad6200c1d9ce781056fa86fb2c32366201 (diff)
ARM: add memory barriers around atomic operations
Diffstat (limited to 'arch/arm')
-rw-r--r--arch/arm/include/asm/atomic.h16
-rw-r--r--arch/arm/include/asm/system.h4
-rw-r--r--arch/arm/lib/bitops.h2
3 files changed, 22 insertions, 0 deletions
diff --git a/arch/arm/include/asm/atomic.h b/arch/arm/include/asm/atomic.h
index ce438d119a33..906dd727a5d3 100644
--- a/arch/arm/include/asm/atomic.h
+++ b/arch/arm/include/asm/atomic.h
@@ -54,6 +54,8 @@ static inline int atomic_add_return(int i, atomic_t *v)
unsigned long tmp;
int result;
+ smp_mb();
+
do {
__asm__ __volatile__("@ atomic_add_return\n"
"1: ldrex %0, [%2]\n"
@@ -64,6 +66,8 @@ static inline int atomic_add_return(int i, atomic_t *v)
: "cc");
} while (tmp && atomic_backoff_delay());
+ smp_mb();
+
return result;
}
@@ -72,6 +76,8 @@ static inline int atomic_sub_return(int i, atomic_t *v)
unsigned long tmp;
int result;
+ smp_mb();
+
do {
__asm__ __volatile__("@ atomic_sub_return\n"
"1: ldrex %0, [%2]\n"
@@ -82,6 +88,8 @@ static inline int atomic_sub_return(int i, atomic_t *v)
: "cc");
} while (tmp && atomic_backoff_delay());
+ smp_mb();
+
return result;
}
@@ -89,6 +97,8 @@ static inline int atomic_cmpxchg(atomic_t *ptr, int old, int new)
{
unsigned long oldval, res;
+ smp_mb();
+
do {
__asm__ __volatile__("@ atomic_cmpxchg\n"
"ldrex %1, [%2]\n"
@@ -101,6 +111,8 @@ static inline int atomic_cmpxchg(atomic_t *ptr, int old, int new)
: "cc");
} while (res && atomic_backoff_delay());
+ smp_mb();
+
return oldval;
}
@@ -108,6 +120,8 @@ static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
{
unsigned long tmp, tmp2;
+ smp_mb();
+
do {
__asm__ __volatile__("@ atomic_clear_mask\n"
"1: ldrex %0, [%2]\n"
@@ -117,6 +131,8 @@ static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
: "r" (addr), "Ir" (mask)
: "cc");
} while (tmp && atomic_backoff_delay());
+
+ smp_mb();
}
#else /* ARM_ARCH_6 */
diff --git a/arch/arm/include/asm/system.h b/arch/arm/include/asm/system.h
index 9eeda3c177de..f149e9ffab30 100644
--- a/arch/arm/include/asm/system.h
+++ b/arch/arm/include/asm/system.h
@@ -245,6 +245,8 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size
unsigned int tmp;
#endif
+ smp_mb();
+
switch (size) {
#if __LINUX_ARM_ARCH__ >= 6
case 1:
@@ -305,6 +307,8 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size
break;
}
+ smp_mb();
+
return ret;
}
diff --git a/arch/arm/lib/bitops.h b/arch/arm/lib/bitops.h
index 28c7e4e1ef76..3200d82671b8 100644
--- a/arch/arm/lib/bitops.h
+++ b/arch/arm/lib/bitops.h
@@ -12,11 +12,13 @@
3: subs r0, r0, #1
bpl 3b
#endif
+ smp_dmb
1: ldrexb r2, [r1]
\instr r2, r2, r3
strexb r0, r2, [r1]
cmp r0, #0
bne 1b
+ smp_dmb
mov pc, lr
.endm