1 #ifndef __XEN_SYNCH_BITOPS_H__
2 #define __XEN_SYNCH_BITOPS_H__
5 * Copyright 1992, Linus Torvalds.
6 * Heavily modified to provide guaranteed strong synchronisation
7 * when communicating with Xen or other guest OSes running on other CPUs.
9 * Copyed from asm-xen/asm-i386
12 #include <linux/config.h>
14 #define ADDR (*(volatile long *) addr)
16 static __inline__ void synch_set_bit(int nr, volatile void * addr)
18 __asm__ __volatile__ (
20 : "=m" (ADDR) : "Ir" (nr) : "memory" );
23 static __inline__ void synch_clear_bit(int nr, volatile void * addr)
25 __asm__ __volatile__ (
27 : "=m" (ADDR) : "Ir" (nr) : "memory" );
30 static __inline__ void synch_change_bit(int nr, volatile void * addr)
32 __asm__ __volatile__ (
34 : "=m" (ADDR) : "Ir" (nr) : "memory" );
37 static __inline__ int synch_test_and_set_bit(int nr, volatile void * addr)
40 __asm__ __volatile__ (
41 "lock btsl %2,%1\n\tsbbl %0,%0"
42 : "=r" (oldbit), "=m" (ADDR) : "Ir" (nr) : "memory");
46 static __inline__ int synch_test_and_clear_bit(int nr, volatile void * addr)
49 __asm__ __volatile__ (
50 "lock btrl %2,%1\n\tsbbl %0,%0"
51 : "=r" (oldbit), "=m" (ADDR) : "Ir" (nr) : "memory");
55 static __inline__ int synch_test_and_change_bit(int nr, volatile void * addr)
59 __asm__ __volatile__ (
60 "lock btcl %2,%1\n\tsbbl %0,%0"
61 : "=r" (oldbit), "=m" (ADDR) : "Ir" (nr) : "memory");
65 static __inline__ int synch_const_test_bit(int nr, const volatile void * addr)
67 return ((1UL << (nr & 31)) &
68 (((const volatile unsigned int *) addr)[nr >> 5])) != 0;
71 static __inline__ int synch_var_test_bit(int nr, volatile void * addr)
74 __asm__ __volatile__ (
75 "btl %2,%1\n\tsbbl %0,%0"
76 : "=r" (oldbit) : "m" (ADDR), "Ir" (nr) );
80 #define synch_test_bit(nr,addr) \
81 (__builtin_constant_p(nr) ? \
82 synch_const_test_bit((nr),(addr)) : \
83 synch_var_test_bit((nr),(addr)))
85 #endif /* __XEN_SYNCH_BITOPS_H__ */