X-Git-Url: http://git.rot13.org/?a=blobdiff_plain;f=include%2Fasm-cris%2Fatomic.h;h=0b51a87e5532e9025111b6c5177bf8c964d58f22;hb=a5c43dae7ae38c2a6b3e9a819bcf45f010bf6a4a;hp=70605b09e8b73a324affede0a8887807275fe1c1;hpb=2fca877b68b2b4fc5b94277858a1bedd46017cde;p=powerpc.git diff --git a/include/asm-cris/atomic.h b/include/asm-cris/atomic.h index 70605b09e8..0b51a87e55 100644 --- a/include/asm-cris/atomic.h +++ b/include/asm-cris/atomic.h @@ -20,7 +20,7 @@ typedef struct { volatile int counter; } atomic_t; /* These should be written in asm but we do it in C for now. */ -extern __inline__ void atomic_add(int i, volatile atomic_t *v) +static inline void atomic_add(int i, volatile atomic_t *v) { unsigned long flags; cris_atomic_save(v, flags); @@ -28,7 +28,7 @@ extern __inline__ void atomic_add(int i, volatile atomic_t *v) cris_atomic_restore(v, flags); } -extern __inline__ void atomic_sub(int i, volatile atomic_t *v) +static inline void atomic_sub(int i, volatile atomic_t *v) { unsigned long flags; cris_atomic_save(v, flags); @@ -36,7 +36,7 @@ extern __inline__ void atomic_sub(int i, volatile atomic_t *v) cris_atomic_restore(v, flags); } -extern __inline__ int atomic_add_return(int i, volatile atomic_t *v) +static inline int atomic_add_return(int i, volatile atomic_t *v) { unsigned long flags; int retval; @@ -48,7 +48,7 @@ extern __inline__ int atomic_add_return(int i, volatile atomic_t *v) #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0) -extern __inline__ int atomic_sub_return(int i, volatile atomic_t *v) +static inline int atomic_sub_return(int i, volatile atomic_t *v) { unsigned long flags; int retval; @@ -58,7 +58,7 @@ extern __inline__ int atomic_sub_return(int i, volatile atomic_t *v) return retval; } -extern __inline__ int atomic_sub_and_test(int i, volatile atomic_t *v) +static inline int atomic_sub_and_test(int i, volatile atomic_t *v) { int retval; unsigned long flags; @@ -68,7 +68,7 @@ extern __inline__ int atomic_sub_and_test(int i, volatile atomic_t *v) return retval; } -extern __inline__ void atomic_inc(volatile atomic_t *v) +static inline void atomic_inc(volatile atomic_t *v) { unsigned long flags; cris_atomic_save(v, flags); @@ -76,7 +76,7 @@ extern __inline__ void atomic_inc(volatile atomic_t *v) cris_atomic_restore(v, flags); } -extern __inline__ void atomic_dec(volatile atomic_t *v) +static inline void atomic_dec(volatile atomic_t *v) { unsigned long flags; cris_atomic_save(v, flags); @@ -84,7 +84,7 @@ extern __inline__ void atomic_dec(volatile atomic_t *v) cris_atomic_restore(v, flags); } -extern __inline__ int atomic_inc_return(volatile atomic_t *v) +static inline int atomic_inc_return(volatile atomic_t *v) { unsigned long flags; int retval; @@ -94,7 +94,7 @@ extern __inline__ int atomic_inc_return(volatile atomic_t *v) return retval; } -extern __inline__ int atomic_dec_return(volatile atomic_t *v) +static inline int atomic_dec_return(volatile atomic_t *v) { unsigned long flags; int retval; @@ -103,7 +103,7 @@ extern __inline__ int atomic_dec_return(volatile atomic_t *v) cris_atomic_restore(v, flags); return retval; } -extern __inline__ int atomic_dec_and_test(volatile atomic_t *v) +static inline int atomic_dec_and_test(volatile atomic_t *v) { int retval; unsigned long flags; @@ -113,7 +113,7 @@ extern __inline__ int atomic_dec_and_test(volatile atomic_t *v) return retval; } -extern __inline__ int atomic_inc_and_test(volatile atomic_t *v) +static inline int atomic_inc_and_test(volatile atomic_t *v) { int retval; unsigned long flags; @@ -123,10 +123,40 @@ extern __inline__ int atomic_inc_and_test(volatile atomic_t *v) return retval; } +static inline int atomic_cmpxchg(atomic_t *v, int old, int new) +{ + int ret; + unsigned long flags; + + cris_atomic_save(v, flags); + ret = v->counter; + if (likely(ret == old)) + v->counter = new; + cris_atomic_restore(v, flags); + return ret; +} + +#define atomic_xchg(v, new) (xchg(&((v)->counter), new)) + +static inline int atomic_add_unless(atomic_t *v, int a, int u) +{ + int ret; + unsigned long flags; + + cris_atomic_save(v, flags); + ret = v->counter; + if (ret != u) + v->counter += a; + cris_atomic_restore(v, flags); + return ret != u; +} +#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) + /* Atomic operations are already serializing */ #define smp_mb__before_atomic_dec() barrier() #define smp_mb__after_atomic_dec() barrier() #define smp_mb__before_atomic_inc() barrier() #define smp_mb__after_atomic_inc() barrier() +#include #endif