X-Git-Url: http://git.rot13.org/?a=blobdiff_plain;f=include%2Fasm-powerpc%2Fatomic.h;h=53283e2540b381ff086508d69052dd1b188bc6d1;hb=dd0ec16fa6cf2498b831663a543e1b67fce6e155;hp=ec4b14468959e295dc29ebbd0b212b6899597f70;hpb=97f2aab6698f3ab2552c41c1024a65ffd0763a6d;p=powerpc.git diff --git a/include/asm-powerpc/atomic.h b/include/asm-powerpc/atomic.h index ec4b144689..53283e2540 100644 --- a/include/asm-powerpc/atomic.h +++ b/include/asm-powerpc/atomic.h @@ -8,6 +8,7 @@ typedef struct { volatile int counter; } atomic_t; #ifdef __KERNEL__ +#include #include #include @@ -26,8 +27,8 @@ static __inline__ void atomic_add(int a, atomic_t *v) PPC405_ERR77(0,%3) " stwcx. %0,0,%3 \n\ bne- 1b" - : "=&r" (t), "=m" (v->counter) - : "r" (a), "r" (&v->counter), "m" (v->counter) + : "=&r" (t), "+m" (v->counter) + : "r" (a), "r" (&v->counter) : "cc"); } @@ -36,7 +37,7 @@ static __inline__ int atomic_add_return(int a, atomic_t *v) int t; __asm__ __volatile__( - EIEIO_ON_SMP + LWSYNC_ON_SMP "1: lwarx %0,0,%2 # atomic_add_return\n\ add %0,%1,%0\n" PPC405_ERR77(0,%2) @@ -62,8 +63,8 @@ static __inline__ void atomic_sub(int a, atomic_t *v) PPC405_ERR77(0,%3) " stwcx. %0,0,%3 \n\ bne- 1b" - : "=&r" (t), "=m" (v->counter) - : "r" (a), "r" (&v->counter), "m" (v->counter) + : "=&r" (t), "+m" (v->counter) + : "r" (a), "r" (&v->counter) : "cc"); } @@ -72,7 +73,7 @@ static __inline__ int atomic_sub_return(int a, atomic_t *v) int t; __asm__ __volatile__( - EIEIO_ON_SMP + LWSYNC_ON_SMP "1: lwarx %0,0,%2 # atomic_sub_return\n\ subf %0,%1,%0\n" PPC405_ERR77(0,%2) @@ -96,8 +97,8 @@ static __inline__ void atomic_inc(atomic_t *v) PPC405_ERR77(0,%2) " stwcx. %0,0,%2 \n\ bne- 1b" - : "=&r" (t), "=m" (v->counter) - : "r" (&v->counter), "m" (v->counter) + : "=&r" (t), "+m" (v->counter) + : "r" (&v->counter) : "cc"); } @@ -106,7 +107,7 @@ static __inline__ int atomic_inc_return(atomic_t *v) int t; __asm__ __volatile__( - EIEIO_ON_SMP + LWSYNC_ON_SMP "1: lwarx %0,0,%1 # atomic_inc_return\n\ addic %0,%0,1\n" PPC405_ERR77(0,%1) @@ -140,8 +141,8 @@ static __inline__ void atomic_dec(atomic_t *v) PPC405_ERR77(0,%2)\ " stwcx. %0,0,%2\n\ bne- 1b" - : "=&r" (t), "=m" (v->counter) - : "r" (&v->counter), "m" (v->counter) + : "=&r" (t), "+m" (v->counter) + : "r" (&v->counter) : "cc"); } @@ -150,7 +151,7 @@ static __inline__ int atomic_dec_return(atomic_t *v) int t; __asm__ __volatile__( - EIEIO_ON_SMP + LWSYNC_ON_SMP "1: lwarx %0,0,%1 # atomic_dec_return\n\ addic %0,%0,-1\n" PPC405_ERR77(0,%1) @@ -165,6 +166,7 @@ static __inline__ int atomic_dec_return(atomic_t *v) } #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n))) +#define atomic_xchg(v, new) (xchg(&((v)->counter), new)) /** * atomic_add_unless - add unless the number is a given value @@ -175,20 +177,29 @@ static __inline__ int atomic_dec_return(atomic_t *v) * Atomically adds @a to @v, so long as it was not @u. * Returns non-zero if @v was not @u, and zero otherwise. */ -#define atomic_add_unless(v, a, u) \ -({ \ - int c, old; \ - c = atomic_read(v); \ - for (;;) { \ - if (unlikely(c == (u))) \ - break; \ - old = atomic_cmpxchg((v), c, c + (a)); \ - if (likely(old == c)) \ - break; \ - c = old; \ - } \ - c != (u); \ -}) +static __inline__ int atomic_add_unless(atomic_t *v, int a, int u) +{ + int t; + + __asm__ __volatile__ ( + LWSYNC_ON_SMP +"1: lwarx %0,0,%1 # atomic_add_unless\n\ + cmpw 0,%0,%3 \n\ + beq- 2f \n\ + add %0,%2,%0 \n" + PPC405_ERR77(0,%2) +" stwcx. %0,0,%1 \n\ + bne- 1b \n" + ISYNC_ON_SMP +" subf %0,%2,%0 \n\ +2:" + : "=&r" (t) + : "r" (&v->counter), "r" (a), "r" (u) + : "cc", "memory"); + + return t != u; +} + #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0) @@ -203,7 +214,7 @@ static __inline__ int atomic_dec_if_positive(atomic_t *v) int t; __asm__ __volatile__( - EIEIO_ON_SMP + LWSYNC_ON_SMP "1: lwarx %0,0,%1 # atomic_dec_if_positive\n\ addic. %0,%0,-1\n\ blt- 2f\n" @@ -242,8 +253,8 @@ static __inline__ void atomic64_add(long a, atomic64_t *v) add %0,%2,%0\n\ stdcx. %0,0,%3 \n\ bne- 1b" - : "=&r" (t), "=m" (v->counter) - : "r" (a), "r" (&v->counter), "m" (v->counter) + : "=&r" (t), "+m" (v->counter) + : "r" (a), "r" (&v->counter) : "cc"); } @@ -252,7 +263,7 @@ static __inline__ long atomic64_add_return(long a, atomic64_t *v) long t; __asm__ __volatile__( - EIEIO_ON_SMP + LWSYNC_ON_SMP "1: ldarx %0,0,%2 # atomic64_add_return\n\ add %0,%1,%0\n\ stdcx. %0,0,%2 \n\ @@ -276,8 +287,8 @@ static __inline__ void atomic64_sub(long a, atomic64_t *v) subf %0,%2,%0\n\ stdcx. %0,0,%3 \n\ bne- 1b" - : "=&r" (t), "=m" (v->counter) - : "r" (a), "r" (&v->counter), "m" (v->counter) + : "=&r" (t), "+m" (v->counter) + : "r" (a), "r" (&v->counter) : "cc"); } @@ -286,7 +297,7 @@ static __inline__ long atomic64_sub_return(long a, atomic64_t *v) long t; __asm__ __volatile__( - EIEIO_ON_SMP + LWSYNC_ON_SMP "1: ldarx %0,0,%2 # atomic64_sub_return\n\ subf %0,%1,%0\n\ stdcx. %0,0,%2 \n\ @@ -308,8 +319,8 @@ static __inline__ void atomic64_inc(atomic64_t *v) addic %0,%0,1\n\ stdcx. %0,0,%2 \n\ bne- 1b" - : "=&r" (t), "=m" (v->counter) - : "r" (&v->counter), "m" (v->counter) + : "=&r" (t), "+m" (v->counter) + : "r" (&v->counter) : "cc"); } @@ -318,7 +329,7 @@ static __inline__ long atomic64_inc_return(atomic64_t *v) long t; __asm__ __volatile__( - EIEIO_ON_SMP + LWSYNC_ON_SMP "1: ldarx %0,0,%1 # atomic64_inc_return\n\ addic %0,%0,1\n\ stdcx. %0,0,%1 \n\ @@ -350,8 +361,8 @@ static __inline__ void atomic64_dec(atomic64_t *v) addic %0,%0,-1\n\ stdcx. %0,0,%2\n\ bne- 1b" - : "=&r" (t), "=m" (v->counter) - : "r" (&v->counter), "m" (v->counter) + : "=&r" (t), "+m" (v->counter) + : "r" (&v->counter) : "cc"); } @@ -360,7 +371,7 @@ static __inline__ long atomic64_dec_return(atomic64_t *v) long t; __asm__ __volatile__( - EIEIO_ON_SMP + LWSYNC_ON_SMP "1: ldarx %0,0,%1 # atomic64_dec_return\n\ addic %0,%0,-1\n\ stdcx. %0,0,%1\n\ @@ -385,7 +396,7 @@ static __inline__ long atomic64_dec_if_positive(atomic64_t *v) long t; __asm__ __volatile__( - EIEIO_ON_SMP + LWSYNC_ON_SMP "1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\ addic. %0,%0,-1\n\ blt- 2f\n\ @@ -402,5 +413,6 @@ static __inline__ long atomic64_dec_if_positive(atomic64_t *v) #endif /* __powerpc64__ */ +#include #endif /* __KERNEL__ */ #endif /* _ASM_POWERPC_ATOMIC_H_ */