X-Git-Url: http://git.onelab.eu/?a=blobdiff_plain;f=include%2Fasm-i386%2Fatomic.h;h=509720be772a4121aad122b2ec5fd9cf83a7cdf0;hb=6a77f38946aaee1cd85eeec6cf4229b204c15071;hp=4df45c5e4b167a9916f7364baa07ddf8100d9ab8;hpb=5273a3df6485dc2ad6aa7ddd441b9a21970f003b;p=linux-2.6.git diff --git a/include/asm-i386/atomic.h b/include/asm-i386/atomic.h index 4df45c5e4..509720be7 100644 --- a/include/asm-i386/atomic.h +++ b/include/asm-i386/atomic.h @@ -2,6 +2,8 @@ #define __ARCH_I386_ATOMIC__ #include +#include +#include /* * Atomic operations that C can't guarantee us. Useful for @@ -176,6 +178,46 @@ static __inline__ int atomic_add_negative(int i, atomic_t *v) return c; } +/** + * atomic_add_return - add and return + * @v: pointer of type atomic_t + * @i: integer value to add + * + * Atomically adds @i to @v and returns @i + @v + */ +static __inline__ int atomic_add_return(int i, atomic_t *v) +{ + int __i; +#ifdef CONFIG_M386 + if(unlikely(boot_cpu_data.x86==3)) + goto no_xadd; +#endif + /* Modern 486+ processor */ + __i = i; + __asm__ __volatile__( + LOCK "xaddl %0, %1;" + :"=r"(i) + :"m"(v->counter), "0"(i)); + return i + __i; + +#ifdef CONFIG_M386 +no_xadd: /* Legacy 386 processor */ + local_irq_disable(); + __i = atomic_read(v); + atomic_set(v, i + __i); + local_irq_enable(); + return i + __i; +#endif +} + +static __inline__ int atomic_sub_return(int i, atomic_t *v) +{ + return atomic_add_return(-i,v); +} + +#define atomic_inc_return(v) (atomic_add_return(1,v)) +#define atomic_dec_return(v) (atomic_sub_return(1,v)) + /* These are x86-specific, used by some header files */ #define atomic_clear_mask(mask, addr) \ __asm__ __volatile__(LOCK "andl %0,%1" \