2 * linux/include/asm-arm/atomic.h
4 * Copyright (C) 1996 Russell King.
5 * Copyright (C) 2002 Deep Blue Solutions Ltd.
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License version 2 as
9 * published by the Free Software Foundation.
11 #ifndef __ASM_ARM_ATOMIC_H
12 #define __ASM_ARM_ATOMIC_H
14 #include <linux/config.h>
16 typedef struct { volatile int counter; } atomic_t;
18 #define ATOMIC_INIT(i) { (i) }
22 #define atomic_read(v) ((v)->counter)
24 #if __LINUX_ARM_ARCH__ >= 6
27 * ARMv6 UP and SMP safe atomic ops. We use load exclusive and
28 * store exclusive to ensure that these are atomic. We may loop
29 * to ensure that the update happens. Writing to 'v->counter'
30 * without using the following operations WILL break the atomic
31 * nature of these ops.
33 static inline void atomic_set(atomic_t *v, int i)
37 __asm__ __volatile__("@ atomic_set\n"
39 " strex %0, %2, [%1]\n"
43 : "r" (&v->counter), "r" (i)
47 static inline void atomic_add(int i, atomic_t *v)
49 unsigned long tmp, tmp2;
51 __asm__ __volatile__("@ atomic_add\n"
54 " strex %1, %0, [%2]\n"
57 : "=&r" (tmp), "=&r" (tmp2)
58 : "r" (&v->counter), "Ir" (i)
62 static inline void atomic_sub(int i, atomic_t *v)
64 unsigned long tmp, tmp2;
66 __asm__ __volatile__("@ atomic_sub\n"
69 " strex %1, %0, [%2]\n"
72 : "=&r" (tmp), "=&r" (tmp2)
73 : "r" (&v->counter), "Ir" (i)
77 #define atomic_inc(v) atomic_add(1, v)
78 #define atomic_dec(v) atomic_sub(1, v)
80 static inline int atomic_dec_and_test(atomic_t *v)
85 __asm__ __volatile__("@ atomic_dec_and_test\n"
88 " strex %1, %0, [%2]\n"
91 : "=&r" (result), "=&r" (tmp)
98 static inline int atomic_add_negative(int i, atomic_t *v)
103 __asm__ __volatile__("@ atomic_add_negative\n"
104 "1: ldrex %0, [%2]\n"
106 " strex %1, %0, [%2]\n"
109 : "=&r" (result), "=&r" (tmp)
110 : "r" (&v->counter), "Ir" (i)
116 static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
118 unsigned long tmp, tmp2;
120 __asm__ __volatile__("@ atomic_clear_mask\n"
123 " strex %1, %0, %2\n"
126 : "=&r" (tmp), "=&r" (tmp2)
127 : "r" (addr), "Ir" (mask)
131 #else /* ARM_ARCH_6 */
133 #include <asm/system.h>
136 #error SMP not supported on pre-ARMv6 CPUs
139 #define atomic_set(v,i) (((v)->counter) = (i))
141 static inline void atomic_add(int i, atomic_t *v)
145 local_irq_save(flags);
147 local_irq_restore(flags);
150 static inline void atomic_sub(int i, atomic_t *v)
154 local_irq_save(flags);
156 local_irq_restore(flags);
159 static inline void atomic_inc(atomic_t *v)
163 local_irq_save(flags);
165 local_irq_restore(flags);
168 static inline void atomic_dec(atomic_t *v)
172 local_irq_save(flags);
174 local_irq_restore(flags);
177 static inline int atomic_dec_and_test(atomic_t *v)
182 local_irq_save(flags);
184 v->counter = val -= 1;
185 local_irq_restore(flags);
190 static inline int atomic_add_negative(int i, atomic_t *v)
195 local_irq_save(flags);
197 v->counter = val += i;
198 local_irq_restore(flags);
203 static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
207 local_irq_save(flags);
209 local_irq_restore(flags);
212 #endif /* __LINUX_ARM_ARCH__ */
214 /* Atomic operations are already serializing on ARM */
215 #define smp_mb__before_atomic_dec() barrier()
216 #define smp_mb__after_atomic_dec() barrier()
217 #define smp_mb__before_atomic_inc() barrier()
218 #define smp_mb__after_atomic_inc() barrier()