2 * linux/include/asm-arm26/atomic.h
4 * Copyright (c) 1996 Russell King.
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
11 * 27-06-1996 RMK Created
12 * 13-04-1997 RMK Made functions atomic!
13 * 07-12-1997 RMK Upgraded for v2.1.
14 * 26-08-1998 PJB Added #ifdef __KERNEL__
16 * FIXME - its probably worth seeing what these compile into...
18 #ifndef __ASM_ARM_ATOMIC_H
19 #define __ASM_ARM_ATOMIC_H
21 #include <linux/config.h>
24 #error SMP is NOT supported
27 typedef struct { volatile int counter; } atomic_t;
29 #define ATOMIC_INIT(i) { (i) }
32 #include <asm/system.h>
34 #define atomic_read(v) ((v)->counter)
35 #define atomic_set(v,i) (((v)->counter) = (i))
37 static inline void atomic_add(int i, volatile atomic_t *v)
41 local_irq_save(flags);
43 local_irq_restore(flags);
46 static inline void atomic_sub(int i, volatile atomic_t *v)
50 local_irq_save(flags);
52 local_irq_restore(flags);
55 static inline void atomic_inc(volatile atomic_t *v)
59 local_irq_save(flags);
61 local_irq_restore(flags);
64 static inline void atomic_dec(volatile atomic_t *v)
68 local_irq_save(flags);
70 local_irq_restore(flags);
73 static inline int atomic_dec_and_test(volatile atomic_t *v)
78 local_irq_save(flags);
80 v->counter = val -= 1;
81 local_irq_restore(flags);
86 static inline int atomic_add_negative(int i, volatile atomic_t *v)
91 local_irq_save(flags);
93 v->counter = val += i;
94 local_irq_restore(flags);
99 static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
103 local_irq_save(flags);
105 local_irq_restore(flags);
108 static inline int atomic_add_return(int i, volatile atomic_t *v)
113 local_irq_save(flags);
114 val = v->counter + i;
116 local_irq_restore(flags);
121 static inline int atomic_sub_return(int i, volatile atomic_t *v)
123 return atomic_add_return(-i, v);
126 #define atomic_inc_return(v) (atomic_add_return(1,v))
127 #define atomic_dec_return(v) (atomic_sub_return(1,v))
129 /* Atomic operations are already serializing on ARM */
130 #define smp_mb__before_atomic_dec() barrier()
131 #define smp_mb__after_atomic_dec() barrier()
132 #define smp_mb__before_atomic_inc() barrier()
133 #define smp_mb__after_atomic_inc() barrier()