2 * Copyright (c) 2013, 2014 Nicira, Inc.
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at:
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
17 /* This header implements atomic operation primitives on GCC 4.x. */
18 #ifndef IN_OVS_ATOMIC_H
19 #error "This header should only be included indirectly via ovs-atomic.h."
22 #include "ovs-atomic-locked.h"
23 #define OVS_ATOMIC_GCC4P_IMPL 1
25 #define ATOMIC(TYPE) TYPE
26 #include "ovs-atomic-types.h"
28 #define ATOMIC_BOOL_LOCK_FREE 2
29 #define ATOMIC_CHAR_LOCK_FREE 2
30 #define ATOMIC_SHORT_LOCK_FREE 2
31 #define ATOMIC_INT_LOCK_FREE 2
32 #define ATOMIC_LONG_LOCK_FREE (ULONG_MAX <= UINTPTR_MAX ? 2 : 0)
33 #define ATOMIC_LLONG_LOCK_FREE (ULLONG_MAX <= UINTPTR_MAX ? 2 : 0)
34 #define ATOMIC_POINTER_LOCK_FREE 2
45 #define IS_LOCKLESS_ATOMIC(OBJECT) (sizeof(OBJECT) <= sizeof(void *))
47 #define ATOMIC_VAR_INIT(VALUE) VALUE
48 #define atomic_init(OBJECT, VALUE) (*(OBJECT) = (VALUE), (void) 0)
49 #define atomic_destroy(OBJECT) ((void) (OBJECT))
52 atomic_thread_fence(memory_order order)
54 if (order != memory_order_relaxed) {
60 atomic_thread_fence_if_seq_cst(memory_order order)
62 if (order == memory_order_seq_cst) {
68 atomic_signal_fence(memory_order order OVS_UNUSED)
70 if (order != memory_order_relaxed) {
71 asm volatile("" : : : "memory");
75 #define atomic_is_lock_free(OBJ) \
77 IF_LOCKLESS_ATOMIC(OBJ, true, false))
79 #define atomic_store(DST, SRC) \
80 atomic_store_explicit(DST, SRC, memory_order_seq_cst)
81 #define atomic_store_explicit(DST, SRC, ORDER) \
83 typeof(DST) dst__ = (DST); \
84 typeof(SRC) src__ = (SRC); \
85 memory_order order__ = (ORDER); \
87 if (IS_LOCKLESS_ATOMIC(*dst__)) { \
88 atomic_thread_fence(order__); \
90 atomic_thread_fence_if_seq_cst(order__); \
92 atomic_store_locked(DST, SRC); \
96 #define atomic_read(SRC, DST) \
97 atomic_read_explicit(SRC, DST, memory_order_seq_cst)
98 #define atomic_read_explicit(SRC, DST, ORDER) \
100 typeof(DST) dst__ = (DST); \
101 typeof(SRC) src__ = (SRC); \
102 memory_order order__ = (ORDER); \
104 if (IS_LOCKLESS_ATOMIC(*src__)) { \
105 atomic_thread_fence_if_seq_cst(order__); \
108 atomic_read_locked(SRC, DST); \
113 #define atomic_op__(RMW, OP, ARG, ORIG) \
115 typeof(RMW) rmw__ = (RMW); \
116 typeof(ARG) arg__ = (ARG); \
117 typeof(ORIG) orig__ = (ORIG); \
119 if (IS_LOCKLESS_ATOMIC(*rmw__)) { \
120 *orig__ = __sync_fetch_and_##OP(rmw__, arg__); \
122 atomic_op_locked(RMW, OP, ARG, ORIG); \
126 #define atomic_add(RMW, ARG, ORIG) atomic_op__(RMW, add, ARG, ORIG)
127 #define atomic_sub(RMW, ARG, ORIG) atomic_op__(RMW, sub, ARG, ORIG)
128 #define atomic_or( RMW, ARG, ORIG) atomic_op__(RMW, or, ARG, ORIG)
129 #define atomic_xor(RMW, ARG, ORIG) atomic_op__(RMW, xor, ARG, ORIG)
130 #define atomic_and(RMW, ARG, ORIG) atomic_op__(RMW, and, ARG, ORIG)
132 #define atomic_add_explicit(RMW, OPERAND, ORIG, ORDER) \
133 ((void) (ORDER), atomic_add(RMW, OPERAND, ORIG))
134 #define atomic_sub_explicit(RMW, OPERAND, ORIG, ORDER) \
135 ((void) (ORDER), atomic_sub(RMW, OPERAND, ORIG))
136 #define atomic_or_explicit(RMW, OPERAND, ORIG, ORDER) \
137 ((void) (ORDER), atomic_or(RMW, OPERAND, ORIG))
138 #define atomic_xor_explicit(RMW, OPERAND, ORIG, ORDER) \
139 ((void) (ORDER), atomic_xor(RMW, OPERAND, ORIG))
140 #define atomic_and_explicit(RMW, OPERAND, ORIG, ORDER) \
141 ((void) (ORDER), atomic_and(RMW, OPERAND, ORIG))
148 #define ATOMIC_FLAG_INIT { false }
151 atomic_flag_init(volatile atomic_flag *object OVS_UNUSED)
157 atomic_flag_destroy(volatile atomic_flag *object OVS_UNUSED)
163 atomic_flag_test_and_set(volatile atomic_flag *object)
165 return __sync_lock_test_and_set(&object->b, 1);
169 atomic_flag_test_and_set_explicit(volatile atomic_flag *object,
170 memory_order order OVS_UNUSED)
172 return atomic_flag_test_and_set(object);
176 atomic_flag_clear(volatile atomic_flag *object)
178 __sync_lock_release(&object->b);
182 atomic_flag_clear_explicit(volatile atomic_flag *object,
183 memory_order order OVS_UNUSED)
185 atomic_flag_clear(object);