1 #ifndef __ASM_MIPS_SYSTEM_H_WRAPPER
2 #define __ASM_MIPS_SYSTEM_H_WRAPPER 1
4 #include_next <asm/system.h>
6 #error "Cribbed from linux-2.6/include/asm-mips/system.h but untested."
8 #define __HAVE_ARCH_CMPXCHG 1
10 static inline unsigned long __cmpxchg_u32(volatile int * m, unsigned long old,
15 if (cpu_has_llsc && R10000_LLSC_WAR) {
20 "1: ll %0, %2 # __cmpxchg_u32 \n"
29 : "=&r" (retval), "=R" (*m)
30 : "R" (*m), "Jr" (old), "Jr" (new)
32 } else if (cpu_has_llsc) {
37 "1: ll %0, %2 # __cmpxchg_u32 \n"
49 : "=&r" (retval), "=R" (*m)
50 : "R" (*m), "Jr" (old), "Jr" (new)
55 raw_local_irq_save(flags);
59 raw_local_irq_restore(flags); /* implies memory barrier */
67 static inline unsigned long __cmpxchg_u32_local(volatile int * m,
68 unsigned long old, unsigned long new)
72 if (cpu_has_llsc && R10000_LLSC_WAR) {
77 "1: ll %0, %2 # __cmpxchg_u32 \n"
86 : "=&r" (retval), "=R" (*m)
87 : "R" (*m), "Jr" (old), "Jr" (new)
89 } else if (cpu_has_llsc) {
94 "1: ll %0, %2 # __cmpxchg_u32 \n"
103 : "=&r" (retval), "=R" (*m)
104 : "R" (*m), "Jr" (old), "Jr" (new)
109 local_irq_save(flags);
113 local_irq_restore(flags); /* implies memory barrier */
120 static inline unsigned long __cmpxchg_u64(volatile int * m, unsigned long old,
125 if (cpu_has_llsc && R10000_LLSC_WAR) {
126 __asm__ __volatile__(
130 "1: lld %0, %2 # __cmpxchg_u64 \n"
131 " bne %0, %z3, 2f \n"
137 : "=&r" (retval), "=R" (*m)
138 : "R" (*m), "Jr" (old), "Jr" (new)
140 } else if (cpu_has_llsc) {
141 __asm__ __volatile__(
145 "1: lld %0, %2 # __cmpxchg_u64 \n"
146 " bne %0, %z3, 2f \n"
155 : "=&r" (retval), "=R" (*m)
156 : "R" (*m), "Jr" (old), "Jr" (new)
161 raw_local_irq_save(flags);
165 raw_local_irq_restore(flags); /* implies memory barrier */
173 static inline unsigned long __cmpxchg_u64_local(volatile int * m,
174 unsigned long old, unsigned long new)
178 if (cpu_has_llsc && R10000_LLSC_WAR) {
179 __asm__ __volatile__(
183 "1: lld %0, %2 # __cmpxchg_u64 \n"
184 " bne %0, %z3, 2f \n"
190 : "=&r" (retval), "=R" (*m)
191 : "R" (*m), "Jr" (old), "Jr" (new)
193 } else if (cpu_has_llsc) {
194 __asm__ __volatile__(
198 "1: lld %0, %2 # __cmpxchg_u64 \n"
199 " bne %0, %z3, 2f \n"
205 : "=&r" (retval), "=R" (*m)
206 : "R" (*m), "Jr" (old), "Jr" (new)
211 local_irq_save(flags);
215 local_irq_restore(flags); /* implies memory barrier */
222 extern unsigned long __cmpxchg_u64_unsupported_on_32bit_kernels(
223 volatile int * m, unsigned long old, unsigned long new);
224 #define __cmpxchg_u64 __cmpxchg_u64_unsupported_on_32bit_kernels
225 extern unsigned long __cmpxchg_u64_local_unsupported_on_32bit_kernels(
226 volatile int * m, unsigned long old, unsigned long new);
227 #define __cmpxchg_u64_local __cmpxchg_u64_local_unsupported_on_32bit_kernels
230 /* This function doesn't exist, so you'll get a linker error
231 if something tries to do an invalid cmpxchg(). */
232 extern void __cmpxchg_called_with_bad_pointer(void);
234 static inline unsigned long __cmpxchg(volatile void * ptr, unsigned long old,
235 unsigned long new, int size)
239 return __cmpxchg_u32(ptr, old, new);
241 return __cmpxchg_u64(ptr, old, new);
243 __cmpxchg_called_with_bad_pointer();
247 static inline unsigned long __cmpxchg_local(volatile void * ptr,
248 unsigned long old, unsigned long new, int size)
252 return __cmpxchg_u32_local(ptr, old, new);
254 return __cmpxchg_u64_local(ptr, old, new);
256 __cmpxchg_called_with_bad_pointer();
260 #define cmpxchg(ptr,old,new) \
261 ((__typeof__(*(ptr)))__cmpxchg((ptr), \
262 (unsigned long)(old), (unsigned long)(new),sizeof(*(ptr))))
264 #define cmpxchg_local(ptr,old,new) \
265 ((__typeof__(*(ptr)))__cmpxchg_local((ptr), \
266 (unsigned long)(old), (unsigned long)(new),sizeof(*(ptr))))
268 #endif /* asm/system.h */