1 #ifndef _ALPHA_SPINLOCK_H
2 #define _ALPHA_SPINLOCK_H
4 #include <linux/config.h>
5 #include <asm/system.h>
6 #include <linux/kernel.h>
7 #include <asm/current.h>
11 * Simple spin lock operations. There are two variants, one clears IRQ's
12 * on the local processor, one does not.
14 * We make no fairness assumptions. They have a cost.
18 volatile unsigned int lock /*__attribute__((aligned(32))) */;
19 #ifdef CONFIG_DEBUG_SPINLOCK
23 struct task_struct * task;
24 const char *base_file;
27 unsigned int break_lock;
31 #ifdef CONFIG_DEBUG_SPINLOCK
32 #define SPIN_LOCK_UNLOCKED (spinlock_t) {0, -1, 0, NULL, NULL, NULL}
33 #define spin_lock_init(x) \
34 ((x)->lock = 0, (x)->on_cpu = -1, (x)->previous = NULL, (x)->task = NULL)
36 #define SPIN_LOCK_UNLOCKED (spinlock_t) { 0 }
37 #define spin_lock_init(x) ((x)->lock = 0)
40 #define spin_is_locked(x) ((x)->lock != 0)
41 #define spin_unlock_wait(x) ({ do { barrier(); } while ((x)->lock); })
42 #define _raw_spin_lock_flags(lock, flags) _raw_spin_lock(lock)
44 #ifdef CONFIG_DEBUG_SPINLOCK
45 extern void _raw_spin_unlock(spinlock_t * lock);
46 extern void debug_spin_lock(spinlock_t * lock, const char *, int);
47 extern int debug_spin_trylock(spinlock_t * lock, const char *, int);
49 #define _raw_spin_lock(LOCK) debug_spin_lock(LOCK, __BASE_FILE__, __LINE__)
50 #define _raw_spin_trylock(LOCK) debug_spin_trylock(LOCK, __BASE_FILE__, __LINE__)
52 #define spin_lock_own(LOCK, LOCATION) \
54 if (!((LOCK)->lock && (LOCK)->on_cpu == smp_processor_id())) \
55 printk("%s: called on %d from %p but lock %s on %d\n", \
56 LOCATION, smp_processor_id(), \
57 __builtin_return_address(0), \
58 (LOCK)->lock ? "taken" : "freed", (LOCK)->on_cpu); \
61 static inline void _raw_spin_unlock(spinlock_t * lock)
67 static inline void _raw_spin_lock(spinlock_t * lock)
71 /* Use sub-sections to put the actual loop at the end
72 of this object file's text section so as to perfect
86 : "=&r" (tmp), "=m" (lock->lock)
87 : "m"(lock->lock) : "memory");
90 static inline int _raw_spin_trylock(spinlock_t *lock)
92 return !test_and_set_bit(0, &lock->lock);
95 #define spin_lock_own(LOCK, LOCATION) ((void)0)
96 #endif /* CONFIG_DEBUG_SPINLOCK */
98 /***********************************************************/
101 volatile unsigned int write_lock:1, read_counter:31;
102 #ifdef CONFIG_PREEMPT
103 unsigned int break_lock;
105 } /*__attribute__((aligned(32)))*/ rwlock_t;
107 #define RW_LOCK_UNLOCKED (rwlock_t) { 0, 0 }
109 #define rwlock_init(x) do { *(x) = RW_LOCK_UNLOCKED; } while(0)
111 #ifdef CONFIG_DEBUG_RWLOCK
112 extern void _raw_write_lock(rwlock_t * lock);
113 extern void _raw_read_lock(rwlock_t * lock);
115 static inline void _raw_write_lock(rwlock_t * lock)
119 __asm__ __volatile__(
131 : "=m" (*lock), "=&r" (regx)
132 : "m" (*lock) : "memory");
135 static inline void _raw_read_lock(rwlock_t * lock)
139 __asm__ __volatile__(
151 : "=m" (*lock), "=&r" (regx)
152 : "m" (*lock) : "memory");
154 #endif /* CONFIG_DEBUG_RWLOCK */
156 static inline int _raw_write_trylock(rwlock_t * lock)
161 __asm__ __volatile__(
173 : "=m" (*lock), "=&r" (regx), "=&r" (success)
174 : "m" (*lock) : "memory");
179 static inline void _raw_write_unlock(rwlock_t * lock)
182 *(volatile int *)lock = 0;
185 static inline void _raw_read_unlock(rwlock_t * lock)
188 __asm__ __volatile__(
197 : "=m" (*lock), "=&r" (regx)
198 : "m" (*lock) : "memory");
201 #endif /* _ALPHA_SPINLOCK_H */