#define atomic_set(v, i) (((v)->counter) = i)
#define atomic64_set(v, i) (((v)->counter) = i)
-extern int __atomic_add(int, atomic_t *);
-extern int __atomic64_add(__s64, atomic64_t *);
+extern void atomic_add(int, atomic_t *);
+extern void atomic64_add(int, atomic64_t *);
+extern void atomic_sub(int, atomic_t *);
+extern void atomic64_sub(int, atomic64_t *);
-extern int __atomic_sub(int, atomic_t *);
-extern int __atomic64_sub(__s64, atomic64_t *);
+extern int atomic_add_ret(int, atomic_t *);
+extern int atomic64_add_ret(int, atomic64_t *);
+extern int atomic_sub_ret(int, atomic_t *);
+extern int atomic64_sub_ret(int, atomic64_t *);
-#define atomic_add(i, v) ((void)__atomic_add(i, v))
-#define atomic64_add(i, v) ((void)__atomic64_add(i, v))
+#define atomic_dec_return(v) atomic_sub_ret(1, v)
+#define atomic64_dec_return(v) atomic64_sub_ret(1, v)
-#define atomic_sub(i, v) ((void)__atomic_sub(i, v))
-#define atomic64_sub(i, v) ((void)__atomic64_sub(i, v))
+#define atomic_inc_return(v) atomic_add_ret(1, v)
+#define atomic64_inc_return(v) atomic64_add_ret(1, v)
-#define atomic_dec_return(v) __atomic_sub(1, v)
-#define atomic64_dec_return(v) __atomic64_sub(1, v)
+#define atomic_sub_return(i, v) atomic_sub_ret(i, v)
+#define atomic64_sub_return(i, v) atomic64_sub_ret(i, v)
-#define atomic_inc_return(v) __atomic_add(1, v)
-#define atomic64_inc_return(v) __atomic64_add(1, v)
+#define atomic_add_return(i, v) atomic_add_ret(i, v)
+#define atomic64_add_return(i, v) atomic64_add_ret(i, v)
-#define atomic_sub_and_test(i, v) (__atomic_sub(i, v) == 0)
-#define atomic64_sub_and_test(i, v) (__atomic64_sub(i, v) == 0)
-
-#define atomic_dec_and_test(v) (__atomic_sub(1, v) == 0)
-#define atomic64_dec_and_test(v) (__atomic64_sub(1, v) == 0)
-
-#define atomic_inc(v) ((void)__atomic_add(1, v))
-#define atomic64_inc(v) ((void)__atomic64_add(1, v))
-
-#define atomic_dec(v) ((void)__atomic_sub(1, v))
-#define atomic64_dec(v) ((void)__atomic64_sub(1, v))
+/*
+ * atomic_inc_and_test - increment and test
+ * @v: pointer of type atomic_t
+ *
+ * Atomically increments @v by 1
+ * and returns true if the result is zero, or false for all
+ * other cases.
+ */
+#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
+#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
+
+#define atomic_sub_and_test(i, v) (atomic_sub_ret(i, v) == 0)
+#define atomic64_sub_and_test(i, v) (atomic64_sub_ret(i, v) == 0)
+
+#define atomic_dec_and_test(v) (atomic_sub_ret(1, v) == 0)
+#define atomic64_dec_and_test(v) (atomic64_sub_ret(1, v) == 0)
+
+#define atomic_inc(v) atomic_add(1, v)
+#define atomic64_inc(v) atomic64_add(1, v)
+
+#define atomic_dec(v) atomic_sub(1, v)
+#define atomic64_dec(v) atomic64_sub(1, v)
+
+#define atomic_add_negative(i, v) (atomic_add_ret(i, v) < 0)
+#define atomic64_add_negative(i, v) (atomic64_add_ret(i, v) < 0)
+
+#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
+#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
+
+#define atomic_add_unless(v, a, u) \
+({ \
+ int c, old; \
+ c = atomic_read(v); \
+ for (;;) { \
+ if (unlikely(c == (u))) \
+ break; \
+ old = atomic_cmpxchg((v), c, c + (a)); \
+ if (likely(old == c)) \
+ break; \
+ c = old; \
+ } \
+ likely(c != (u)); \
+})
+#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
/* Atomic operations are already serializing */
+#ifdef CONFIG_SMP
+#define smp_mb__before_atomic_dec() membar_storeload_loadload();
+#define smp_mb__after_atomic_dec() membar_storeload_storestore();
+#define smp_mb__before_atomic_inc() membar_storeload_loadload();
+#define smp_mb__after_atomic_inc() membar_storeload_storestore();
+#else
#define smp_mb__before_atomic_dec() barrier()
#define smp_mb__after_atomic_dec() barrier()
#define smp_mb__before_atomic_inc() barrier()
#define smp_mb__after_atomic_inc() barrier()
+#endif
+#include <asm-generic/atomic.h>
#endif /* !(__ARCH_SPARC64_ATOMIC__) */