X-Git-Url: http://git.onelab.eu/?a=blobdiff_plain;f=include%2Fasm-powerpc%2Fatomic.h;h=147a38dcc766ed20129a497080bbcb42cd1026b2;hb=e6fdde8aad83f83e942f741df8907c6c45e58866;hp=53283e2540b381ff086508d69052dd1b188bc6d1;hpb=3944158a6d33f94668dbd6bdc32ff5c67bb53ec2;p=linux-2.6.git diff --git a/include/asm-powerpc/atomic.h b/include/asm-powerpc/atomic.h index 53283e254..147a38dcc 100644 --- a/include/asm-powerpc/atomic.h +++ b/include/asm-powerpc/atomic.h @@ -8,7 +8,6 @@ typedef struct { volatile int counter; } atomic_t; #ifdef __KERNEL__ -#include #include #include @@ -27,8 +26,8 @@ static __inline__ void atomic_add(int a, atomic_t *v) PPC405_ERR77(0,%3) " stwcx. %0,0,%3 \n\ bne- 1b" - : "=&r" (t), "+m" (v->counter) - : "r" (a), "r" (&v->counter) + : "=&r" (t), "=m" (v->counter) + : "r" (a), "r" (&v->counter), "m" (v->counter) : "cc"); } @@ -63,8 +62,8 @@ static __inline__ void atomic_sub(int a, atomic_t *v) PPC405_ERR77(0,%3) " stwcx. %0,0,%3 \n\ bne- 1b" - : "=&r" (t), "+m" (v->counter) - : "r" (a), "r" (&v->counter) + : "=&r" (t), "=m" (v->counter) + : "r" (a), "r" (&v->counter), "m" (v->counter) : "cc"); } @@ -97,8 +96,8 @@ static __inline__ void atomic_inc(atomic_t *v) PPC405_ERR77(0,%2) " stwcx. %0,0,%2 \n\ bne- 1b" - : "=&r" (t), "+m" (v->counter) - : "r" (&v->counter) + : "=&r" (t), "=m" (v->counter) + : "r" (&v->counter), "m" (v->counter) : "cc"); } @@ -141,8 +140,8 @@ static __inline__ void atomic_dec(atomic_t *v) PPC405_ERR77(0,%2)\ " stwcx. %0,0,%2\n\ bne- 1b" - : "=&r" (t), "+m" (v->counter) - : "r" (&v->counter) + : "=&r" (t), "=m" (v->counter) + : "r" (&v->counter), "m" (v->counter) : "cc"); } @@ -177,29 +176,20 @@ static __inline__ int atomic_dec_return(atomic_t *v) * Atomically adds @a to @v, so long as it was not @u. * Returns non-zero if @v was not @u, and zero otherwise. */ -static __inline__ int atomic_add_unless(atomic_t *v, int a, int u) -{ - int t; - - __asm__ __volatile__ ( - LWSYNC_ON_SMP -"1: lwarx %0,0,%1 # atomic_add_unless\n\ - cmpw 0,%0,%3 \n\ - beq- 2f \n\ - add %0,%2,%0 \n" - PPC405_ERR77(0,%2) -" stwcx. %0,0,%1 \n\ - bne- 1b \n" - ISYNC_ON_SMP -" subf %0,%2,%0 \n\ -2:" - : "=&r" (t) - : "r" (&v->counter), "r" (a), "r" (u) - : "cc", "memory"); - - return t != u; -} - +#define atomic_add_unless(v, a, u) \ +({ \ + int c, old; \ + c = atomic_read(v); \ + for (;;) { \ + if (unlikely(c == (u))) \ + break; \ + old = atomic_cmpxchg((v), c, c + (a)); \ + if (likely(old == c)) \ + break; \ + c = old; \ + } \ + c != (u); \ +}) #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0) @@ -253,8 +243,8 @@ static __inline__ void atomic64_add(long a, atomic64_t *v) add %0,%2,%0\n\ stdcx. %0,0,%3 \n\ bne- 1b" - : "=&r" (t), "+m" (v->counter) - : "r" (a), "r" (&v->counter) + : "=&r" (t), "=m" (v->counter) + : "r" (a), "r" (&v->counter), "m" (v->counter) : "cc"); } @@ -287,8 +277,8 @@ static __inline__ void atomic64_sub(long a, atomic64_t *v) subf %0,%2,%0\n\ stdcx. %0,0,%3 \n\ bne- 1b" - : "=&r" (t), "+m" (v->counter) - : "r" (a), "r" (&v->counter) + : "=&r" (t), "=m" (v->counter) + : "r" (a), "r" (&v->counter), "m" (v->counter) : "cc"); } @@ -319,8 +309,8 @@ static __inline__ void atomic64_inc(atomic64_t *v) addic %0,%0,1\n\ stdcx. %0,0,%2 \n\ bne- 1b" - : "=&r" (t), "+m" (v->counter) - : "r" (&v->counter) + : "=&r" (t), "=m" (v->counter) + : "r" (&v->counter), "m" (v->counter) : "cc"); } @@ -361,8 +351,8 @@ static __inline__ void atomic64_dec(atomic64_t *v) addic %0,%0,-1\n\ stdcx. %0,0,%2\n\ bne- 1b" - : "=&r" (t), "+m" (v->counter) - : "r" (&v->counter) + : "=&r" (t), "=m" (v->counter) + : "r" (&v->counter), "m" (v->counter) : "cc"); }