2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc..
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
12 * Copyright (C) 1996, 97, 99, 2000, 03, 04 by Ralf Baechle
16 * As workaround for the ATOMIC_DEC_AND_LOCK / atomic_dec_and_lock mess in
17 * <linux/spinlock.h> we have to include <linux/spinlock.h> outside the
18 * main big wrapper ...
20 #include <linux/config.h>
21 #include <linux/spinlock.h>
26 extern spinlock_t atomic_lock;
28 typedef struct { volatile int counter; } atomic_t;
30 #define ATOMIC_INIT(i) { (i) }
33 * atomic_read - read atomic variable
34 * @v: pointer of type atomic_t
36 * Atomically reads the value of @v.
38 #define atomic_read(v) ((v)->counter)
41 * atomic_set - set atomic variable
42 * @v: pointer of type atomic_t
45 * Atomically sets the value of @v to @i.
47 #define atomic_set(v,i) ((v)->counter = (i))
49 #ifdef CONFIG_CPU_HAS_LLSC
52 * atomic_add - add integer to atomic variable
53 * @i: integer value to add
54 * @v: pointer of type atomic_t
56 * Atomically adds @i to @v.
58 static __inline__ void atomic_add(int i, atomic_t * v)
63 "1: ll %0, %1 # atomic_add \n"
67 : "=&r" (temp), "=m" (v->counter)
68 : "Ir" (i), "m" (v->counter));
72 * atomic_sub - subtract the atomic variable
73 * @i: integer value to subtract
74 * @v: pointer of type atomic_t
76 * Atomically subtracts @i from @v.
78 static __inline__ void atomic_sub(int i, atomic_t * v)
83 "1: ll %0, %1 # atomic_sub \n"
87 : "=&r" (temp), "=m" (v->counter)
88 : "Ir" (i), "m" (v->counter));
92 * Same as above, but return the result value
94 static __inline__ int atomic_add_return(int i, atomic_t * v)
96 unsigned long temp, result;
99 "1: ll %1, %2 # atomic_add_return \n"
100 " addu %0, %1, %3 \n"
103 " addu %0, %1, %3 \n"
105 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
106 : "Ir" (i), "m" (v->counter)
112 static __inline__ int atomic_sub_return(int i, atomic_t * v)
114 unsigned long temp, result;
116 __asm__ __volatile__(
117 "1: ll %1, %2 # atomic_sub_return \n"
118 " subu %0, %1, %3 \n"
121 " subu %0, %1, %3 \n"
123 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
124 : "Ir" (i), "m" (v->counter)
131 * atomic_sub_if_positive - add integer to atomic variable
132 * @v: pointer of type atomic_t
134 * Atomically test @v and decrement if it is greater than 0.
135 * The function returns the old value of @v minus 1.
137 static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
139 unsigned long temp, result;
141 __asm__ __volatile__(
142 "1: ll %1, %2 # atomic_sub_if_positive\n"
143 " subu %0, %1, %3 \n"
149 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
150 : "Ir" (i), "m" (v->counter)
159 * The MIPS I implementation is only atomic with respect to
160 * interrupts. R3000 based multiprocessor machines are rare anyway ...
162 * atomic_add - add integer to atomic variable
163 * @i: integer value to add
164 * @v: pointer of type atomic_t
166 * Atomically adds @i to @v.
168 static __inline__ void atomic_add(int i, atomic_t * v)
172 spin_lock_irqsave(&atomic_lock, flags);
174 spin_unlock_irqrestore(&atomic_lock, flags);
178 * atomic_sub - subtract the atomic variable
179 * @i: integer value to subtract
180 * @v: pointer of type atomic_t
182 * Atomically subtracts @i from @v.
184 static __inline__ void atomic_sub(int i, atomic_t * v)
188 spin_lock_irqsave(&atomic_lock, flags);
190 spin_unlock_irqrestore(&atomic_lock, flags);
193 static __inline__ int atomic_add_return(int i, atomic_t * v)
198 spin_lock_irqsave(&atomic_lock, flags);
202 spin_unlock_irqrestore(&atomic_lock, flags);
207 static __inline__ int atomic_sub_return(int i, atomic_t * v)
212 spin_lock_irqsave(&atomic_lock, flags);
216 spin_unlock_irqrestore(&atomic_lock, flags);
222 * atomic_sub_if_positive - add integer to atomic variable
223 * @v: pointer of type atomic_t
225 * Atomically test @v and decrement if it is greater than 0.
226 * The function returns the old value of @v minus 1.
228 static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
233 spin_lock_irqsave(&atomic_lock, flags);
238 spin_unlock_irqrestore(&atomic_lock, flags);
243 #endif /* CONFIG_CPU_HAS_LLSC */
245 #define atomic_dec_return(v) atomic_sub_return(1,(v))
246 #define atomic_inc_return(v) atomic_add_return(1,(v))
249 * atomic_sub_and_test - subtract value from variable and test result
250 * @i: integer value to subtract
251 * @v: pointer of type atomic_t
253 * Atomically subtracts @i from @v and returns
254 * true if the result is zero, or false for all
257 #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
260 * atomic_inc_and_test - increment and test
261 * @v: pointer of type atomic_t
263 * Atomically increments @v by 1
264 * and returns true if the result is zero, or false for all
267 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
270 * atomic_dec_and_test - decrement by 1 and test
271 * @v: pointer of type atomic_t
273 * Atomically decrements @v by 1 and
274 * returns true if the result is 0, or false for all other
277 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
280 * atomic_dec_if_positive - decrement by 1 if old value positive
281 * @v: pointer of type atomic_t
283 #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
286 * atomic_inc - increment atomic variable
287 * @v: pointer of type atomic_t
289 * Atomically increments @v by 1.
291 #define atomic_inc(v) atomic_add(1,(v))
294 * atomic_dec - decrement and test
295 * @v: pointer of type atomic_t
297 * Atomically decrements @v by 1.
299 #define atomic_dec(v) atomic_sub(1,(v))
302 * atomic_add_negative - add and test if negative
303 * @v: pointer of type atomic_t
304 * @i: integer value to add
306 * Atomically adds @i to @v and returns true
307 * if the result is negative, or false when
308 * result is greater than or equal to zero.
310 #define atomic_add_negative(i,v) (atomic_add_return(i, (v)) < 0)
314 typedef struct { volatile __s64 counter; } atomic64_t;
316 #define ATOMIC64_INIT(i) { (i) }
319 * atomic64_read - read atomic variable
320 * @v: pointer of type atomic64_t
323 #define atomic64_read(v) ((v)->counter)
326 * atomic64_set - set atomic variable
327 * @v: pointer of type atomic64_t
330 #define atomic64_set(v,i) ((v)->counter = (i))
332 #ifdef CONFIG_CPU_HAS_LLDSCD
335 * atomic64_add - add integer to atomic variable
336 * @i: integer value to add
337 * @v: pointer of type atomic64_t
339 * Atomically adds @i to @v.
341 static __inline__ void atomic64_add(long i, atomic64_t * v)
345 __asm__ __volatile__(
346 "1: lld %0, %1 # atomic64_add \n"
350 : "=&r" (temp), "=m" (v->counter)
351 : "Ir" (i), "m" (v->counter));
355 * atomic64_sub - subtract the atomic variable
356 * @i: integer value to subtract
357 * @v: pointer of type atomic64_t
359 * Atomically subtracts @i from @v.
361 static __inline__ void atomic64_sub(long i, atomic64_t * v)
365 __asm__ __volatile__(
366 "1: lld %0, %1 # atomic64_sub \n"
370 : "=&r" (temp), "=m" (v->counter)
371 : "Ir" (i), "m" (v->counter));
375 * Same as above, but return the result value
377 static __inline__ long atomic64_add_return(long i, atomic64_t * v)
379 unsigned long temp, result;
381 __asm__ __volatile__(
382 "1: lld %1, %2 # atomic64_add_return \n"
383 " addu %0, %1, %3 \n"
386 " addu %0, %1, %3 \n"
388 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
389 : "Ir" (i), "m" (v->counter)
395 static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
397 unsigned long temp, result;
399 __asm__ __volatile__(
400 "1: lld %1, %2 # atomic64_sub_return \n"
401 " subu %0, %1, %3 \n"
404 " subu %0, %1, %3 \n"
406 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
407 : "Ir" (i), "m" (v->counter)
414 * atomic64_sub_if_positive - add integer to atomic variable
415 * @v: pointer of type atomic64_t
417 * Atomically test @v and decrement if it is greater than 0.
418 * The function returns the old value of @v minus 1.
420 static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
422 unsigned long temp, result;
424 __asm__ __volatile__(
425 "1: lld %1, %2 # atomic64_sub_if_positive\n"
426 " dsubu %0, %1, %3 \n"
432 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
433 : "Ir" (i), "m" (v->counter)
442 * This implementation is only atomic with respect to interrupts. It can't
445 * atomic64_add - add integer to atomic variable
446 * @i: integer value to add
447 * @v: pointer of type atomic64_t
449 * Atomically adds @i to @v.
451 static __inline__ void atomic64_add(long i, atomic64_t * v)
455 spin_lock_irqsave(&atomic_lock, flags);
457 spin_unlock_irqrestore(&atomic_lock, flags);
461 * atomic64_sub - subtract the atomic variable
462 * @i: integer value to subtract
463 * @v: pointer of type atomic64_t
465 * Atomically subtracts @i from @v.
467 static __inline__ void atomic64_sub(long i, atomic64_t * v)
471 spin_lock_irqsave(&atomic_lock, flags);
473 spin_unlock_irqrestore(&atomic_lock, flags);
476 static __inline__ long atomic64_add_return(long i, atomic64_t * v)
481 spin_lock_irqsave(&atomic_lock, flags);
485 spin_unlock_irqrestore(&atomic_lock, flags);
490 static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
495 spin_lock_irqsave(&atomic_lock, flags);
499 spin_unlock_irqrestore(&atomic_lock, flags);
505 * atomic64_sub_if_positive - add integer to atomic variable
506 * @v: pointer of type atomic64_t
508 * Atomically test @v and decrement if it is greater than 0.
509 * The function returns the old value of @v minus 1.
511 static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
516 spin_lock_irqsave(&atomic_lock, flags);
521 spin_unlock_irqrestore(&atomic_lock, flags);
526 #endif /* CONFIG_CPU_HAS_LLDSCD */
528 #define atomic64_dec_return(v) atomic64_sub_return(1,(v))
529 #define atomic64_inc_return(v) atomic64_add_return(1,(v))
532 * atomic64_sub_and_test - subtract value from variable and test result
533 * @i: integer value to subtract
534 * @v: pointer of type atomic64_t
536 * Atomically subtracts @i from @v and returns
537 * true if the result is zero, or false for all
540 #define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
543 * atomic64_inc_and_test - increment and test
544 * @v: pointer of type atomic64_t
546 * Atomically increments @v by 1
547 * and returns true if the result is zero, or false for all
550 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
553 * atomic64_dec_and_test - decrement by 1 and test
554 * @v: pointer of type atomic64_t
556 * Atomically decrements @v by 1 and
557 * returns true if the result is 0, or false for all other
560 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
563 * atomic64_dec_if_positive - decrement by 1 if old value positive
564 * @v: pointer of type atomic64_t
566 #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
569 * atomic64_inc - increment atomic variable
570 * @v: pointer of type atomic64_t
572 * Atomically increments @v by 1.
574 #define atomic64_inc(v) atomic64_add(1,(v))
577 * atomic64_dec - decrement and test
578 * @v: pointer of type atomic64_t
580 * Atomically decrements @v by 1.
582 #define atomic64_dec(v) atomic64_sub(1,(v))
585 * atomic64_add_negative - add and test if negative
586 * @v: pointer of type atomic64_t
587 * @i: integer value to add
589 * Atomically adds @i to @v and returns true
590 * if the result is negative, or false when
591 * result is greater than or equal to zero.
593 #define atomic64_add_negative(i,v) (atomic64_add_return(i, (v)) < 0)
595 #endif /* CONFIG_MIPS64 */
598 * atomic*_return operations are serializing but not the non-*_return
601 #define smp_mb__before_atomic_dec() smp_mb()
602 #define smp_mb__after_atomic_dec() smp_mb()
603 #define smp_mb__before_atomic_inc() smp_mb()
604 #define smp_mb__after_atomic_inc() smp_mb()
606 #endif /* _ASM_ATOMIC_H */