summaryrefslogtreecommitdiff
path: root/include/asm-ia64/atomic.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-ia64/atomic.h')
-rw-r--r--include/asm-ia64/atomic.h59
1 files changed, 31 insertions, 28 deletions
diff --git a/include/asm-ia64/atomic.h b/include/asm-ia64/atomic.h
index b16ad235c7ee..1fc3b83325da 100644
--- a/include/asm-ia64/atomic.h
+++ b/include/asm-ia64/atomic.h
@@ -15,6 +15,7 @@
#include <linux/types.h>
#include <asm/intrinsics.h>
+#include <asm/system.h>
/*
* On IA-64, counter must always be volatile to ensure that that the
@@ -95,36 +96,38 @@ ia64_atomic64_sub (__s64 i, atomic64_t *v)
(cmpxchg(&((v)->counter), old, new))
#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
-#define atomic_add_unless(v, a, u) \
-({ \
- __typeof__(v->counter) c, old; \
- c = atomic_read(v); \
- for (;;) { \
- if (unlikely(c == (u))) \
- break; \
- old = atomic_cmpxchg((v), c, c + (a)); \
- if (likely(old == c)) \
- break; \
- c = old; \
- } \
- c != (u); \
-})
+static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
+{
+ int c, old;
+ c = atomic_read(v);
+ for (;;) {
+ if (unlikely(c == (u)))
+ break;
+ old = atomic_cmpxchg((v), c, c + (a));
+ if (likely(old == c))
+ break;
+ c = old;
+ }
+ return c != (u);
+}
+
#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
-#define atomic64_add_unless(v, a, u) \
-({ \
- __typeof__(v->counter) c, old; \
- c = atomic64_read(v); \
- for (;;) { \
- if (unlikely(c == (u))) \
- break; \
- old = atomic64_cmpxchg((v), c, c + (a)); \
- if (likely(old == c)) \
- break; \
- c = old; \
- } \
- c != (u); \
-})
+static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
+{
+ long c, old;
+ c = atomic64_read(v);
+ for (;;) {
+ if (unlikely(c == (u)))
+ break;
+ old = atomic64_cmpxchg((v), c, c + (a));
+ if (likely(old == c))
+ break;
+ c = old;
+ }
+ return c != (u);
+}
+
#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
#define atomic_add_return(i,v) \