atomic.h: complete atomic_long operations in asm-generic
authorMathieu Desnoyers <mathieu.desnoyers@polymtl.ca>
Tue, 8 May 2007 07:34:19 +0000 (00:34 -0700)
committerLinus Torvalds <torvalds@woody.linux-foundation.org>
Tue, 8 May 2007 18:15:19 +0000 (11:15 -0700)
Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@polymtl.ca>
Signed-off-by: Andrew Morton <akpm@linux-foundation.org>
Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
include/asm-generic/atomic.h

index b7e4a04..5ae6dce 100644 (file)
@@ -9,6 +9,7 @@
  */
 
 #include <asm/types.h>
+#include <asm/system.h>
 
 /*
  * Suppport for atomic_long_t
@@ -66,6 +67,72 @@ static inline void atomic_long_sub(long i, atomic_long_t *l)
        atomic64_sub(i, v);
 }
 
+static inline int atomic_long_sub_and_test(long i, atomic_long_t *l)
+{
+       atomic64_t *v = (atomic64_t *)l;
+
+       return atomic64_sub_and_test(i, v);
+}
+
+static inline int atomic_long_dec_and_test(atomic_long_t *l)
+{
+       atomic64_t *v = (atomic64_t *)l;
+
+       return atomic64_dec_and_test(v);
+}
+
+static inline int atomic_long_inc_and_test(atomic_long_t *l)
+{
+       atomic64_t *v = (atomic64_t *)l;
+
+       return atomic64_inc_and_test(v);
+}
+
+static inline int atomic_long_add_negative(long i, atomic_long_t *l)
+{
+       atomic64_t *v = (atomic64_t *)l;
+
+       return atomic64_add_negative(i, v);
+}
+
+static inline long atomic_long_add_return(long i, atomic_long_t *l)
+{
+       atomic64_t *v = (atomic64_t *)l;
+
+       return (long)atomic64_add_return(i, v);
+}
+
+static inline long atomic_long_sub_return(long i, atomic_long_t *l)
+{
+       atomic64_t *v = (atomic64_t *)l;
+
+       return (long)atomic64_sub_return(i, v);
+}
+
+static inline long atomic_long_inc_return(atomic_long_t *l)
+{
+       atomic64_t *v = (atomic64_t *)l;
+
+       return (long)atomic64_inc_return(v);
+}
+
+static inline long atomic_long_dec_return(atomic_long_t *l)
+{
+       atomic64_t *v = (atomic64_t *)l;
+
+       return (long)atomic64_dec_return(v);
+}
+
+#define atomic_long_add_unless(l, a, u) \
+       atomic64_add_unless((atomic64_t *)(l), (a), (u))
+
+#define atomic_long_inc_not_zero(l) atomic64_inc_not_zero((atomic64_t *)(l))
+
+#define atomic_long_cmpxchg(l, old, new) \
+       (atomic_cmpxchg((atomic64_t *)(l), (old), (new)))
+#define atomic_long_xchg(v, new) \
+       (atomic_xchg((atomic64_t *)(l), (new)))
+
 #else  /*  BITS_PER_LONG == 64  */
 
 typedef atomic_t atomic_long_t;
@@ -113,6 +180,72 @@ static inline void atomic_long_sub(long i, atomic_long_t *l)
        atomic_sub(i, v);
 }
 
+static inline int atomic_long_sub_and_test(long i, atomic_long_t *l)
+{
+       atomic_t *v = (atomic_t *)l;
+
+       return atomic_sub_and_test(i, v);
+}
+
+static inline int atomic_long_dec_and_test(atomic_long_t *l)
+{
+       atomic_t *v = (atomic_t *)l;
+
+       return atomic_dec_and_test(v);
+}
+
+static inline int atomic_long_inc_and_test(atomic_long_t *l)
+{
+       atomic_t *v = (atomic_t *)l;
+
+       return atomic_inc_and_test(v);
+}
+
+static inline int atomic_long_add_negative(long i, atomic_long_t *l)
+{
+       atomic_t *v = (atomic_t *)l;
+
+       return atomic_add_negative(i, v);
+}
+
+static inline long atomic_long_add_return(long i, atomic_long_t *l)
+{
+       atomic_t *v = (atomic_t *)l;
+
+       return (long)atomic_add_return(i, v);
+}
+
+static inline long atomic_long_sub_return(long i, atomic_long_t *l)
+{
+       atomic_t *v = (atomic_t *)l;
+
+       return (long)atomic_sub_return(i, v);
+}
+
+static inline long atomic_long_inc_return(atomic_long_t *l)
+{
+       atomic_t *v = (atomic_t *)l;
+
+       return (long)atomic_inc_return(v);
+}
+
+static inline long atomic_long_dec_return(atomic_long_t *l)
+{
+       atomic_t *v = (atomic_t *)l;
+
+       return (long)atomic_dec_return(v);
+}
+
+#define atomic_long_add_unless(l, a, u) \
+       atomic_add_unless((atomic_t *)(l), (a), (u))
+
+#define atomic_long_inc_not_zero(l) atomic_inc_not_zero((atomic_t *)(l))
+
+#define atomic_long_cmpxchg(l, old, new) \
+       (atomic_cmpxchg((atomic_t *)(l), (old), (new)))
+#define atomic_long_xchg(v, new) \
+       (atomic_xchg((atomic_t *)(l), (new)))
+
 #endif  /*  BITS_PER_LONG == 64  */
 
 #endif  /*  _ASM_GENERIC_ATOMIC_H  */