Merge branch 'mainstream'
[sliver-openvswitch.git] / lib / ovs-atomic-gcc4+.h
1 /*
2  * Copyright (c) 2013 Nicira, Inc.
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at:
7  *
8  *     http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16
17 /* This header implements atomic operation primitives on GCC 4.x. */
18 #ifndef IN_OVS_ATOMIC_H
19 #error "This header should only be included indirectly via ovs-atomic.h."
20 #endif
21
22 #define OVS_ATOMIC_GCC4P_IMPL 1
23
24 #define DEFINE_LOCKLESS_ATOMIC(TYPE, NAME) typedef struct { TYPE value; } NAME
25
26 #define ATOMIC_BOOL_LOCK_FREE 2
27 DEFINE_LOCKLESS_ATOMIC(bool, atomic_bool);
28
29 #define ATOMIC_CHAR_LOCK_FREE 2
30 DEFINE_LOCKLESS_ATOMIC(char, atomic_char);
31 DEFINE_LOCKLESS_ATOMIC(signed char, atomic_schar);
32 DEFINE_LOCKLESS_ATOMIC(unsigned char, atomic_uchar);
33
34 #define ATOMIC_SHORT_LOCK_FREE 2
35 DEFINE_LOCKLESS_ATOMIC(short, atomic_short);
36 DEFINE_LOCKLESS_ATOMIC(unsigned short, atomic_ushort);
37
38 #define ATOMIC_INT_LOCK_FREE 2
39 DEFINE_LOCKLESS_ATOMIC(int, atomic_int);
40 DEFINE_LOCKLESS_ATOMIC(unsigned int, atomic_uint);
41
42 #if ULONG_MAX <= UINTPTR_MAX
43     #define ATOMIC_LONG_LOCK_FREE 2
44     DEFINE_LOCKLESS_ATOMIC(long, atomic_long);
45     DEFINE_LOCKLESS_ATOMIC(unsigned long, atomic_ulong);
46 #elif ULONG_MAX == UINT64_MAX
47     #define ATOMIC_LONG_LOCK_FREE 0
48     typedef struct locked_int64  atomic_long;
49     typedef struct locked_uint64 atomic_ulong;
50 #else
51     #error "not implemented"
52 #endif
53
54 #if ULLONG_MAX <= UINTPTR_MAX
55     #define ATOMIC_LLONG_LOCK_FREE 2
56     DEFINE_LOCKLESS_ATOMIC(long long, atomic_llong);
57     DEFINE_LOCKLESS_ATOMIC(unsigned long long, atomic_ullong);
58 #elif ULLONG_MAX == UINT64_MAX
59     #define ATOMIC_LLONG_LOCK_FREE 0
60     typedef struct locked_int64  atomic_llong;
61     typedef struct locked_uint64 atomic_ullong;
62 #else
63     #error "not implemented"
64 #endif
65
66 #if SIZE_MAX <= UINTPTR_MAX
67     DEFINE_LOCKLESS_ATOMIC(size_t, atomic_size_t);
68     DEFINE_LOCKLESS_ATOMIC(ptrdiff_t, atomic_ptrdiff_t);
69 #elif SIZE_MAX == UINT64_MAX
70     typedef struct locked_uint64 atomic_size_t;
71     typedef struct locked_int64  atomic_ptrdiff_t;
72 #else
73     #error "not implemented"
74 #endif
75
76 #if UINTMAX_MAX <= UINTPTR_MAX
77     DEFINE_LOCKLESS_ATOMIC(intmax_t, atomic_intmax_t);
78     DEFINE_LOCKLESS_ATOMIC(uintmax_t, atomic_uintmax_t);
79 #elif UINTMAX_MAX == UINT64_MAX
80     typedef struct locked_int64  atomic_intmax_t;
81     typedef struct locked_uint64 atomic_uintmax_t;
82 #else
83     #error "not implemented"
84 #endif
85
86 #define ATOMIC_POINTER_LOCK_FREE 2
87 DEFINE_LOCKLESS_ATOMIC(intptr_t, atomic_intptr_t);
88 DEFINE_LOCKLESS_ATOMIC(uintptr_t, atomic_uintptr_t);
89
90 /* Nonstandard atomic types. */
91 DEFINE_LOCKLESS_ATOMIC(uint8_t,  atomic_uint8_t);
92 DEFINE_LOCKLESS_ATOMIC(uint16_t, atomic_uint16_t);
93 DEFINE_LOCKLESS_ATOMIC(uint32_t, atomic_uint32_t);
94 DEFINE_LOCKLESS_ATOMIC(int8_t,   atomic_int8_t);
95 DEFINE_LOCKLESS_ATOMIC(int16_t,  atomic_int16_t);
96 DEFINE_LOCKLESS_ATOMIC(int32_t,  atomic_int32_t);
97 #if UINT64_MAX <= UINTPTR_MAX
98     DEFINE_LOCKLESS_ATOMIC(uint64_t, atomic_uint64_t);
99     DEFINE_LOCKLESS_ATOMIC(int64_t,  atomic_int64_t);
100 #else
101     typedef struct locked_uint64 atomic_uint64_t;
102     typedef struct locked_int64  atomic_int64_t;
103 #endif
104
105 typedef enum {
106     memory_order_relaxed,
107     memory_order_consume,
108     memory_order_acquire,
109     memory_order_release,
110     memory_order_acq_rel,
111     memory_order_seq_cst
112 } memory_order;
113 \f
114 /* locked_uint64. */
115
116 #define IF_LOCKED_UINT64(OBJECT, THEN, ELSE)                            \
117     __builtin_choose_expr(                                              \
118         __builtin_types_compatible_p(typeof(OBJECT), struct locked_uint64), \
119         (THEN), (ELSE))
120 #define AS_LOCKED_UINT64(OBJECT) ((struct locked_uint64 *) (void *) (OBJECT))
121 #define AS_UINT64(OBJECT) ((uint64_t *) (OBJECT))
122 struct locked_uint64 {
123     uint64_t value;
124 };
125
126 uint64_t locked_uint64_load(const struct locked_uint64 *);
127 void locked_uint64_store(struct locked_uint64 *, uint64_t);
128 uint64_t locked_uint64_add(struct locked_uint64 *, uint64_t arg);
129 uint64_t locked_uint64_sub(struct locked_uint64 *, uint64_t arg);
130 uint64_t locked_uint64_or(struct locked_uint64 *, uint64_t arg);
131 uint64_t locked_uint64_xor(struct locked_uint64 *, uint64_t arg);
132 uint64_t locked_uint64_and(struct locked_uint64 *, uint64_t arg);
133 \f
134 #define IF_LOCKED_INT64(OBJECT, THEN, ELSE)                             \
135     __builtin_choose_expr(                                              \
136         __builtin_types_compatible_p(typeof(OBJECT), struct locked_int64), \
137         (THEN), (ELSE))
138 #define AS_LOCKED_INT64(OBJECT) ((struct locked_int64 *) (void *) (OBJECT))
139 #define AS_INT64(OBJECT) ((int64_t *) (OBJECT))
140 struct locked_int64 {
141     int64_t value;
142 };
143 int64_t locked_int64_load(const struct locked_int64 *);
144 void locked_int64_store(struct locked_int64 *, int64_t);
145 int64_t locked_int64_add(struct locked_int64 *, int64_t arg);
146 int64_t locked_int64_sub(struct locked_int64 *, int64_t arg);
147 int64_t locked_int64_or(struct locked_int64 *, int64_t arg);
148 int64_t locked_int64_xor(struct locked_int64 *, int64_t arg);
149 int64_t locked_int64_and(struct locked_int64 *, int64_t arg);
150 \f
151 #define ATOMIC_VAR_INIT(VALUE) { .value = (VALUE) }
152 #define atomic_init(OBJECT, VALUE) ((OBJECT)->value = (VALUE), (void) 0)
153 #define atomic_destroy(OBJECT) ((void) (OBJECT))
154
155 static inline void
156 atomic_thread_fence(memory_order order)
157 {
158     if (order != memory_order_relaxed) {
159         __sync_synchronize();
160     }
161 }
162
163 static inline void
164 atomic_thread_fence_if_seq_cst(memory_order order)
165 {
166     if (order == memory_order_seq_cst) {
167         __sync_synchronize();
168     }
169 }
170
171 static inline void
172 atomic_signal_fence(memory_order order OVS_UNUSED)
173 {
174     if (order != memory_order_relaxed) {
175         asm volatile("" : : : "memory");
176     }
177 }
178
179 #define ATOMIC_SWITCH(OBJECT, LOCKLESS_CASE,                    \
180                       LOCKED_UINT64_CASE, LOCKED_INT64_CASE)    \
181     IF_LOCKED_UINT64(OBJECT, LOCKED_UINT64_CASE,                \
182                      IF_LOCKED_INT64(OBJECT, LOCKED_INT64_CASE, \
183                                      LOCKLESS_CASE))
184
185 #define atomic_is_lock_free(OBJ)                \
186     ((void) (OBJ)->value,                       \
187      ATOMIC_SWITCH(OBJ, true, false, false))
188
189 #define atomic_store(DST, SRC) \
190     atomic_store_explicit(DST, SRC, memory_order_seq_cst)
191 #define atomic_store_explicit(DST, SRC, ORDER)                          \
192     (ATOMIC_SWITCH(DST,                                                 \
193                    (atomic_thread_fence(ORDER),                         \
194                     (DST)->value = (SRC),                               \
195                     atomic_thread_fence_if_seq_cst(ORDER)),             \
196                    locked_uint64_store(AS_LOCKED_UINT64(DST), SRC),     \
197                    locked_int64_store(AS_LOCKED_INT64(DST), SRC)),      \
198      (void) 0)
199
200 #define atomic_read(SRC, DST) \
201     atomic_read_explicit(SRC, DST, memory_order_seq_cst)
202 #define atomic_read_explicit(SRC, DST, ORDER)                           \
203     (ATOMIC_SWITCH(SRC,                                                 \
204                    (atomic_thread_fence_if_seq_cst(ORDER),              \
205                     (*DST) = (SRC)->value,                              \
206                     atomic_thread_fence(ORDER)),                        \
207                    *(DST) = locked_uint64_load(AS_LOCKED_UINT64(SRC)),  \
208                    *(DST) = locked_int64_load(AS_LOCKED_INT64(SRC))),   \
209      (void) 0)
210
211 #define atomic_op__(RMW, OP, ARG, ORIG)                                 \
212     (ATOMIC_SWITCH(RMW,                                                 \
213                    *(ORIG) = __sync_fetch_and_##OP(&(RMW)->value, ARG), \
214                    *(ORIG) = locked_uint64_##OP(AS_LOCKED_UINT64(RMW), ARG), \
215                    *(ORIG) = locked_int64_##OP(AS_LOCKED_INT64(RMW), ARG)), \
216      (void) 0)
217
218 #define atomic_add(RMW, ARG, ORIG) atomic_op__(RMW, add, ARG, ORIG)
219 #define atomic_sub(RMW, ARG, ORIG) atomic_op__(RMW, sub, ARG, ORIG)
220 #define atomic_or( RMW, ARG, ORIG) atomic_op__(RMW, or,  ARG, ORIG)
221 #define atomic_xor(RMW, ARG, ORIG) atomic_op__(RMW, xor, ARG, ORIG)
222 #define atomic_and(RMW, ARG, ORIG) atomic_op__(RMW, and, ARG, ORIG)
223
224 #define atomic_add_explicit(RMW, OPERAND, ORIG, ORDER)  \
225     ((void) (ORDER), atomic_add(RMW, OPERAND, ORIG))
226 #define atomic_sub_explicit(RMW, OPERAND, ORIG, ORDER)  \
227     ((void) (ORDER), atomic_sub(RMW, OPERAND, ORIG))
228 #define atomic_or_explicit(RMW, OPERAND, ORIG, ORDER)   \
229     ((void) (ORDER), atomic_or(RMW, OPERAND, ORIG))
230 #define atomic_xor_explicit(RMW, OPERAND, ORIG, ORDER)  \
231     ((void) (ORDER), atomic_xor(RMW, OPERAND, ORIG))
232 #define atomic_and_explicit(RMW, OPERAND, ORIG, ORDER)  \
233     ((void) (ORDER), atomic_and(RMW, OPERAND, ORIG))
234 \f
235 /* atomic_flag */
236
237 typedef struct {
238     int b;
239 } atomic_flag;
240 #define ATOMIC_FLAG_INIT { false }
241
242 static inline void
243 atomic_flag_init(volatile atomic_flag *object OVS_UNUSED)
244 {
245     /* Nothing to do. */
246 }
247
248 static inline void
249 atomic_flag_destroy(volatile atomic_flag *object OVS_UNUSED)
250 {
251     /* Nothing to do. */
252 }
253
254 static inline bool
255 atomic_flag_test_and_set(volatile atomic_flag *object)
256 {
257     return __sync_lock_test_and_set(&object->b, 1);
258 }
259
260 static inline bool
261 atomic_flag_test_and_set_explicit(volatile atomic_flag *object,
262                                   memory_order order OVS_UNUSED)
263 {
264     return atomic_flag_test_and_set(object);
265 }
266
267 static inline void
268 atomic_flag_clear(volatile atomic_flag *object)
269 {
270     __sync_lock_release(&object->b);
271 }
272
273 static inline void
274 atomic_flag_clear_explicit(volatile atomic_flag *object,
275                            memory_order order OVS_UNUSED)
276 {
277     atomic_flag_clear(object);
278 }