2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc..
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
12 * Copyright (C) 1996, 97, 99, 2000, 03 by Ralf Baechle
16 * As workaround for the ATOMIC_DEC_AND_LOCK / atomic_dec_and_lock mess in
17 * <linux/spinlock.h> we have to include <linux/spinlock.h> outside the
18 * main big wrapper ...
20 #include <linux/config.h>
21 #include <linux/spinlock.h>
26 extern spinlock_t atomic_lock;
28 typedef struct { volatile int counter; } atomic_t;
30 #define ATOMIC_INIT(i) { (i) }
33 * atomic_read - read atomic variable
34 * @v: pointer of type atomic_t
36 * Atomically reads the value of @v.
38 #define atomic_read(v) ((v)->counter)
41 * atomic_set - set atomic variable
42 * @v: pointer of type atomic_t
45 * Atomically sets the value of @v to @i.
47 #define atomic_set(v,i) ((v)->counter = (i))
49 #ifdef CONFIG_CPU_HAS_LLSC
52 * atomic_add - add integer to atomic variable
53 * @i: integer value to add
54 * @v: pointer of type atomic_t
56 * Atomically adds @i to @v.
58 static __inline__ void atomic_add(int i, atomic_t * v)
63 "1: ll %0, %1 # atomic_add \n"
67 : "=&r" (temp), "=m" (v->counter)
68 : "Ir" (i), "m" (v->counter));
72 * atomic_sub - subtract the atomic variable
73 * @i: integer value to subtract
74 * @v: pointer of type atomic_t
76 * Atomically subtracts @i from @v.
78 static __inline__ void atomic_sub(int i, atomic_t * v)
83 "1: ll %0, %1 # atomic_sub \n"
87 : "=&r" (temp), "=m" (v->counter)
88 : "Ir" (i), "m" (v->counter));
92 * Same as above, but return the result value
94 static __inline__ int atomic_add_return(int i, atomic_t * v)
96 unsigned long temp, result;
99 "1: ll %1, %2 # atomic_add_return \n"
100 " addu %0, %1, %3 \n"
103 " addu %0, %1, %3 \n"
105 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
106 : "Ir" (i), "m" (v->counter)
112 static __inline__ int atomic_sub_return(int i, atomic_t * v)
114 unsigned long temp, result;
116 __asm__ __volatile__(
117 "1: ll %1, %2 # atomic_sub_return \n"
118 " subu %0, %1, %3 \n"
121 " subu %0, %1, %3 \n"
123 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
124 : "Ir" (i), "m" (v->counter)
133 * The MIPS I implementation is only atomic with respect to
134 * interrupts. R3000 based multiprocessor machines are rare anyway ...
136 * atomic_add - add integer to atomic variable
137 * @i: integer value to add
138 * @v: pointer of type atomic_t
140 * Atomically adds @i to @v.
142 static __inline__ void atomic_add(int i, atomic_t * v)
146 spin_lock_irqsave(&atomic_lock, flags);
148 spin_unlock_irqrestore(&atomic_lock, flags);
152 * atomic_sub - subtract the atomic variable
153 * @i: integer value to subtract
154 * @v: pointer of type atomic_t
156 * Atomically subtracts @i from @v.
158 static __inline__ void atomic_sub(int i, atomic_t * v)
162 spin_lock_irqsave(&atomic_lock, flags);
164 spin_unlock_irqrestore(&atomic_lock, flags);
167 static __inline__ int atomic_add_return(int i, atomic_t * v)
172 spin_lock_irqsave(&atomic_lock, flags);
176 spin_unlock_irqrestore(&atomic_lock, flags);
181 static __inline__ int atomic_sub_return(int i, atomic_t * v)
186 spin_lock_irqsave(&atomic_lock, flags);
190 spin_unlock_irqrestore(&atomic_lock, flags);
195 #endif /* CONFIG_CPU_HAS_LLSC */
197 #define atomic_dec_return(v) atomic_sub_return(1,(v))
198 #define atomic_inc_return(v) atomic_add_return(1,(v))
201 * atomic_sub_and_test - subtract value from variable and test result
202 * @i: integer value to subtract
203 * @v: pointer of type atomic_t
205 * Atomically subtracts @i from @v and returns
206 * true if the result is zero, or false for all
209 #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
212 * atomic_inc_and_test - increment and test
213 * @v: pointer of type atomic_t
215 * Atomically increments @v by 1
216 * and returns true if the result is zero, or false for all
219 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
222 * atomic_dec_and_test - decrement by 1 and test
223 * @v: pointer of type atomic_t
225 * Atomically decrements @v by 1 and
226 * returns true if the result is 0, or false for all other
229 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
232 * atomic_inc - increment atomic variable
233 * @v: pointer of type atomic_t
235 * Atomically increments @v by 1.
237 #define atomic_inc(v) atomic_add(1,(v))
240 * atomic_dec - decrement and test
241 * @v: pointer of type atomic_t
243 * Atomically decrements @v by 1.
245 #define atomic_dec(v) atomic_sub(1,(v))
248 * atomic_add_negative - add and test if negative
249 * @v: pointer of type atomic_t
250 * @i: integer value to add
252 * Atomically adds @i to @v and returns true
253 * if the result is negative, or false when
254 * result is greater than or equal to zero.
256 #define atomic_add_negative(i,v) (atomic_add_return(i, (v)) < 0)
260 typedef struct { volatile __s64 counter; } atomic64_t;
262 #define ATOMIC64_INIT(i) { (i) }
265 * atomic64_read - read atomic variable
266 * @v: pointer of type atomic64_t
269 #define atomic64_read(v) ((v)->counter)
272 * atomic64_set - set atomic variable
273 * @v: pointer of type atomic64_t
276 #define atomic64_set(v,i) ((v)->counter = (i))
278 #ifdef CONFIG_CPU_HAS_LLDSCD
281 * atomic64_add - add integer to atomic variable
282 * @i: integer value to add
283 * @v: pointer of type atomic64_t
285 * Atomically adds @i to @v.
287 static __inline__ void atomic64_add(int i, atomic64_t * v)
291 __asm__ __volatile__(
292 "1: lld %0, %1 # atomic64_add \n"
296 : "=&r" (temp), "=m" (v->counter)
297 : "Ir" (i), "m" (v->counter));
301 * atomic64_sub - subtract the atomic variable
302 * @i: integer value to subtract
303 * @v: pointer of type atomic64_t
305 * Atomically subtracts @i from @v.
307 static __inline__ void atomic64_sub(int i, atomic64_t * v)
311 __asm__ __volatile__(
312 "1: lld %0, %1 # atomic64_sub \n"
316 : "=&r" (temp), "=m" (v->counter)
317 : "Ir" (i), "m" (v->counter));
321 * Same as above, but return the result value
323 static __inline__ int atomic64_add_return(int i, atomic64_t * v)
325 unsigned long temp, result;
327 __asm__ __volatile__(
328 "1: lld %1, %2 # atomic64_add_return \n"
329 " addu %0, %1, %3 \n"
332 " addu %0, %1, %3 \n"
334 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
335 : "Ir" (i), "m" (v->counter)
341 static __inline__ int atomic64_sub_return(int i, atomic64_t * v)
343 unsigned long temp, result;
345 __asm__ __volatile__(
346 "1: lld %1, %2 # atomic64_sub_return \n"
347 " subu %0, %1, %3 \n"
350 " subu %0, %1, %3 \n"
352 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
353 : "Ir" (i), "m" (v->counter)
362 * This implementation is only atomic with respect to interrupts. It can't
365 * atomic64_add - add integer to atomic variable
366 * @i: integer value to add
367 * @v: pointer of type atomic64_t
369 * Atomically adds @i to @v.
371 static __inline__ void atomic64_add(int i, atomic64_t * v)
375 spin_lock_irqsave(&atomic_lock, flags);
377 spin_unlock_irqrestore(&atomic_lock, flags);
381 * atomic64_sub - subtract the atomic variable
382 * @i: integer value to subtract
383 * @v: pointer of type atomic64_t
385 * Atomically subtracts @i from @v.
387 static __inline__ void atomic64_sub(int i, atomic64_t * v)
391 spin_lock_irqsave(&atomic_lock, flags);
393 spin_unlock_irqrestore(&atomic_lock, flags);
396 static __inline__ int atomic64_add_return(int i, atomic64_t * v)
401 spin_lock_irqsave(&atomic_lock, flags);
405 spin_unlock_irqrestore(&atomic_lock, flags);
410 static __inline__ int atomic64_sub_return(int i, atomic64_t * v)
415 spin_lock_irqsave(&atomic_lock, flags);
419 spin_unlock_irqrestore(&atomic_lock, flags);
424 #endif /* CONFIG_CPU_HAS_LLDSCD */
426 #define atomic64_dec_return(v) atomic64_sub_return(1,(v))
427 #define atomic64_inc_return(v) atomic64_add_return(1,(v))
430 * atomic64_sub_and_test - subtract value from variable and test result
431 * @i: integer value to subtract
432 * @v: pointer of type atomic64_t
434 * Atomically subtracts @i from @v and returns
435 * true if the result is zero, or false for all
438 #define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
441 * atomic64_inc_and_test - increment and test
442 * @v: pointer of type atomic64_t
444 * Atomically increments @v by 1
445 * and returns true if the result is zero, or false for all
448 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
451 * atomic64_dec_and_test - decrement by 1 and test
452 * @v: pointer of type atomic64_t
454 * Atomically decrements @v by 1 and
455 * returns true if the result is 0, or false for all other
458 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
461 * atomic64_inc - increment atomic variable
462 * @v: pointer of type atomic64_t
464 * Atomically increments @v by 1.
466 #define atomic64_inc(v) atomic64_add(1,(v))
469 * atomic64_dec - decrement and test
470 * @v: pointer of type atomic64_t
472 * Atomically decrements @v by 1.
474 #define atomic64_dec(v) atomic64_sub(1,(v))
477 * atomic64_add_negative - add and test if negative
478 * @v: pointer of type atomic64_t
479 * @i: integer value to add
481 * Atomically adds @i to @v and returns true
482 * if the result is negative, or false when
483 * result is greater than or equal to zero.
485 #define atomic64_add_negative(i,v) (atomic64_add_return(i, (v)) < 0)
487 #endif /* CONFIG_MIPS64 */
490 * atomic*_return operations are serializing but not the non-*_return
493 #define smp_mb__before_atomic_dec() smp_mb()
494 #define smp_mb__after_atomic_dec() smp_mb()
495 #define smp_mb__before_atomic_inc() smp_mb()
496 #define smp_mb__after_atomic_inc() smp_mb()
498 #endif /* _ASM_ATOMIC_H */