1 #ifndef __ARCH_I386_ATOMIC__
2 #define __ARCH_I386_ATOMIC__
4 #include <linux/config.h>
7 * Atomic operations that C can't guarantee us. Useful for
8 * resource counting etc..
12 #define LOCK "lock ; "
18 * Make sure gcc doesn't try to be clever and move things around
19 * on us. We need to use _exactly_ the address the user gave us,
20 * not some alias that contains the same information.
22 typedef struct { volatile int counter; } atomic_t;
24 #define ATOMIC_INIT(i) { (i) }
27 * atomic_read - read atomic variable
28 * @v: pointer of type atomic_t
30 * Atomically reads the value of @v.
32 #define atomic_read(v) ((v)->counter)
35 * atomic_set - set atomic variable
36 * @v: pointer of type atomic_t
39 * Atomically sets the value of @v to @i.
41 #define atomic_set(v,i) (((v)->counter) = (i))
44 * atomic_add - add integer to atomic variable
45 * @i: integer value to add
46 * @v: pointer of type atomic_t
48 * Atomically adds @i to @v.
50 static __inline__ void atomic_add(int i, atomic_t *v)
55 :"ir" (i), "m" (v->counter));
59 * atomic_sub - subtract the atomic variable
60 * @i: integer value to subtract
61 * @v: pointer of type atomic_t
63 * Atomically subtracts @i from @v.
65 static __inline__ void atomic_sub(int i, atomic_t *v)
70 :"ir" (i), "m" (v->counter));
74 * atomic_sub_and_test - subtract value from variable and test result
75 * @i: integer value to subtract
76 * @v: pointer of type atomic_t
78 * Atomically subtracts @i from @v and returns
79 * true if the result is zero, or false for all
82 static __inline__ int atomic_sub_and_test(int i, atomic_t *v)
87 LOCK "subl %2,%0; sete %1"
88 :"=m" (v->counter), "=qm" (c)
89 :"ir" (i), "m" (v->counter) : "memory");
94 * atomic_inc - increment atomic variable
95 * @v: pointer of type atomic_t
97 * Atomically increments @v by 1.
99 static __inline__ void atomic_inc(atomic_t *v)
101 __asm__ __volatile__(
108 * atomic_dec - decrement atomic variable
109 * @v: pointer of type atomic_t
111 * Atomically decrements @v by 1.
113 static __inline__ void atomic_dec(atomic_t *v)
115 __asm__ __volatile__(
122 * atomic_dec_and_test - decrement and test
123 * @v: pointer of type atomic_t
125 * Atomically decrements @v by 1 and
126 * returns true if the result is 0, or false for all other
129 static __inline__ int atomic_dec_and_test(atomic_t *v)
133 __asm__ __volatile__(
134 LOCK "decl %0; sete %1"
135 :"=m" (v->counter), "=qm" (c)
136 :"m" (v->counter) : "memory");
141 * atomic_inc_and_test - increment and test
142 * @v: pointer of type atomic_t
144 * Atomically increments @v by 1
145 * and returns true if the result is zero, or false for all
148 static __inline__ int atomic_inc_and_test(atomic_t *v)
152 __asm__ __volatile__(
153 LOCK "incl %0; sete %1"
154 :"=m" (v->counter), "=qm" (c)
155 :"m" (v->counter) : "memory");
160 * atomic_add_negative - add and test if negative
161 * @v: pointer of type atomic_t
162 * @i: integer value to add
164 * Atomically adds @i to @v and returns true
165 * if the result is negative, or false when
166 * result is greater than or equal to zero.
168 static __inline__ int atomic_add_negative(int i, atomic_t *v)
172 __asm__ __volatile__(
173 LOCK "addl %2,%0; sets %1"
174 :"=m" (v->counter), "=qm" (c)
175 :"ir" (i), "m" (v->counter) : "memory");
179 /* These are x86-specific, used by some header files */
180 #define atomic_clear_mask(mask, addr) \
181 __asm__ __volatile__(LOCK "andl %0,%1" \
182 : : "r" (~(mask)),"m" (*addr) : "memory")
184 #define atomic_set_mask(mask, addr) \
185 __asm__ __volatile__(LOCK "orl %0,%1" \
186 : : "r" (mask),"m" (*(addr)) : "memory")
188 /* Atomic operations are already serializing on x86 */
189 #define smp_mb__before_atomic_dec() barrier()
190 #define smp_mb__after_atomic_dec() barrier()
191 #define smp_mb__before_atomic_inc() barrier()
192 #define smp_mb__after_atomic_inc() barrier()