1 #ifndef __XEN_SYNCH_BITOPS_H__
2 #define __XEN_SYNCH_BITOPS_H__
5 * Copyright 1992, Linus Torvalds.
6 * Heavily modified to provide guaranteed strong synchronisation
7 * when communicating with Xen or other guest OSes running on other CPUs.
10 #include <linux/config.h>
12 #define ADDR (*(volatile long *) addr)
14 static __inline__ void synch_set_bit(int nr, volatile void * addr)
16 __asm__ __volatile__ (
18 : "+m" (ADDR) : "Ir" (nr) : "memory" );
21 static __inline__ void synch_clear_bit(int nr, volatile void * addr)
23 __asm__ __volatile__ (
25 : "+m" (ADDR) : "Ir" (nr) : "memory" );
28 static __inline__ void synch_change_bit(int nr, volatile void * addr)
30 __asm__ __volatile__ (
32 : "+m" (ADDR) : "Ir" (nr) : "memory" );
35 static __inline__ int synch_test_and_set_bit(int nr, volatile void * addr)
38 __asm__ __volatile__ (
39 "lock btsl %2,%1\n\tsbbl %0,%0"
40 : "=r" (oldbit), "+m" (ADDR) : "Ir" (nr) : "memory");
44 static __inline__ int synch_test_and_clear_bit(int nr, volatile void * addr)
47 __asm__ __volatile__ (
48 "lock btrl %2,%1\n\tsbbl %0,%0"
49 : "=r" (oldbit), "+m" (ADDR) : "Ir" (nr) : "memory");
53 static __inline__ int synch_test_and_change_bit(int nr, volatile void * addr)
57 __asm__ __volatile__ (
58 "lock btcl %2,%1\n\tsbbl %0,%0"
59 : "=r" (oldbit), "+m" (ADDR) : "Ir" (nr) : "memory");
63 struct __synch_xchg_dummy { unsigned long a[100]; };
64 #define __synch_xg(x) ((struct __synch_xchg_dummy *)(x))
66 #define synch_cmpxchg(ptr, old, new) \
67 ((__typeof__(*(ptr)))__synch_cmpxchg((ptr),\
68 (unsigned long)(old), \
69 (unsigned long)(new), \
72 static inline unsigned long __synch_cmpxchg(volatile void *ptr,
74 unsigned long new, int size)
79 __asm__ __volatile__("lock; cmpxchgb %b1,%2"
81 : "q"(new), "m"(*__synch_xg(ptr)),
86 __asm__ __volatile__("lock; cmpxchgw %w1,%2"
88 : "r"(new), "m"(*__synch_xg(ptr)),
94 __asm__ __volatile__("lock; cmpxchgl %k1,%2"
96 : "r"(new), "m"(*__synch_xg(ptr)),
101 __asm__ __volatile__("lock; cmpxchgq %1,%2"
103 : "r"(new), "m"(*__synch_xg(ptr)),
109 __asm__ __volatile__("lock; cmpxchgl %1,%2"
111 : "r"(new), "m"(*__synch_xg(ptr)),
120 static __always_inline int synch_const_test_bit(int nr,
121 const volatile void * addr)
123 return ((1UL << (nr & 31)) &
124 (((const volatile unsigned int *) addr)[nr >> 5])) != 0;
127 static __inline__ int synch_var_test_bit(int nr, volatile void * addr)
130 __asm__ __volatile__ (
131 "btl %2,%1\n\tsbbl %0,%0"
132 : "=r" (oldbit) : "m" (ADDR), "Ir" (nr) );
136 #define synch_test_bit(nr,addr) \
137 (__builtin_constant_p(nr) ? \
138 synch_const_test_bit((nr),(addr)) : \
139 synch_var_test_bit((nr),(addr)))
141 #define synch_cmpxchg_subword synch_cmpxchg
143 #endif /* __XEN_SYNCH_BITOPS_H__ */