vserver 1.9.5.x5
[linux-2.6.git] / include / asm-mips / spinlock.h
index f16118f..114d3eb 100644 (file)
@@ -9,12 +9,18 @@
 #ifndef _ASM_SPINLOCK_H
 #define _ASM_SPINLOCK_H
 
+#include <linux/config.h>
+#include <asm/war.h>
+
 /*
  * Your basic SMP spinlocks, allowing only a single CPU anywhere
  */
 
 typedef struct {
        volatile unsigned int lock;
+#ifdef CONFIG_PREEMPT
+       unsigned int break_lock;
+#endif
 } spinlock_t;
 
 #define SPIN_LOCK_UNLOCKED (spinlock_t) { 0 }
@@ -36,27 +42,43 @@ static inline void _raw_spin_lock(spinlock_t *lock)
 {
        unsigned int tmp;
 
-       __asm__ __volatile__(
-       ".set\tnoreorder\t\t\t# _raw_spin_lock\n"
-       "1:\tll\t%1, %2\n\t"
-       "bnez\t%1, 1b\n\t"
-       " li\t%1, 1\n\t"
-       "sc\t%1, %0\n\t"
-       "beqz\t%1, 1b\n\t"
-       " sync\n\t"
-       ".set\treorder"
-       : "=m" (lock->lock), "=&r" (tmp)
-       : "m" (lock->lock)
-       : "memory");
+       if (R10000_LLSC_WAR) {
+               __asm__ __volatile__(
+               "       .set    noreorder       # _raw_spin_lock        \n"
+               "1:     ll      %1, %2                                  \n"
+               "       bnez    %1, 1b                                  \n"
+               "        li     %1, 1                                   \n"
+               "       sc      %1, %0                                  \n"
+               "       beqzl   %1, 1b                                  \n"
+               "        nop                                            \n"
+               "       sync                                            \n"
+               "       .set    reorder                                 \n"
+               : "=m" (lock->lock), "=&r" (tmp)
+               : "m" (lock->lock)
+               : "memory");
+       } else {
+               __asm__ __volatile__(
+               "       .set    noreorder       # _raw_spin_lock        \n"
+               "1:     ll      %1, %2                                  \n"
+               "       bnez    %1, 1b                                  \n"
+               "        li     %1, 1                                   \n"
+               "       sc      %1, %0                                  \n"
+               "       beqz    %1, 1b                                  \n"
+               "        sync                                           \n"
+               "       .set    reorder                                 \n"
+               : "=m" (lock->lock), "=&r" (tmp)
+               : "m" (lock->lock)
+               : "memory");
+       }
 }
 
 static inline void _raw_spin_unlock(spinlock_t *lock)
 {
        __asm__ __volatile__(
-       ".set\tnoreorder\t\t\t# _raw_spin_unlock\n\t"
-       "sync\n\t"
-       "sw\t$0, %0\n\t"
-       ".set\treorder"
+       "       .set    noreorder       # _raw_spin_unlock      \n"
+       "       sync                                            \n"
+       "       sw      $0, %0                                  \n"
+       "       .set\treorder                                   \n"
        : "=m" (lock->lock)
        : "m" (lock->lock)
        : "memory");
@@ -66,17 +88,34 @@ static inline unsigned int _raw_spin_trylock(spinlock_t *lock)
 {
        unsigned int temp, res;
 
-       __asm__ __volatile__(
-       ".set\tnoreorder\t\t\t# _raw_spin_trylock\n\t"
-       "1:\tll\t%0, %3\n\t"
-       "ori\t%2, %0, 1\n\t"
-       "sc\t%2, %1\n\t"
-       "beqz\t%2, 1b\n\t"
-       " andi\t%2, %0, 1\n\t"
-       ".set\treorder"
-       : "=&r" (temp), "=m" (lock->lock), "=&r" (res)
-       : "m" (lock->lock)
-       : "memory");
+       if (R10000_LLSC_WAR) {
+               __asm__ __volatile__(
+               "       .set    noreorder       # _raw_spin_trylock     \n"
+               "1:     ll      %0, %3                                  \n"
+               "       ori     %2, %0, 1                               \n"
+               "       sc      %2, %1                                  \n"
+               "       beqzl   %2, 1b                                  \n"
+               "        nop                                            \n"
+               "       andi    %2, %0, 1                               \n"
+               "       sync                                            \n"
+               "       .set    reorder"
+               : "=&r" (temp), "=m" (lock->lock), "=&r" (res)
+               : "m" (lock->lock)
+               : "memory");
+       } else {
+               __asm__ __volatile__(
+               "       .set    noreorder       # _raw_spin_trylock     \n"
+               "1:     ll      %0, %3                                  \n"
+               "       ori     %2, %0, 1                               \n"
+               "       sc      %2, %1                                  \n"
+               "       beqz    %2, 1b                                  \n"
+               "        andi   %2, %0, 1                               \n"
+               "       sync                                            \n"
+               "       .set    reorder"
+               : "=&r" (temp), "=m" (lock->lock), "=&r" (res)
+               : "m" (lock->lock)
+               : "memory");
+       }
 
        return res == 0;
 }
@@ -92,30 +131,47 @@ static inline unsigned int _raw_spin_trylock(spinlock_t *lock)
 
 typedef struct {
        volatile unsigned int lock;
+#ifdef CONFIG_PREEMPT
+       unsigned int break_lock;
+#endif
 } rwlock_t;
 
 #define RW_LOCK_UNLOCKED (rwlock_t) { 0 }
 
 #define rwlock_init(x)  do { *(x) = RW_LOCK_UNLOCKED; } while(0)
 
-#define rwlock_is_locked(x) ((x)->lock)
-
 static inline void _raw_read_lock(rwlock_t *rw)
 {
        unsigned int tmp;
 
-       __asm__ __volatile__(
-       ".set\tnoreorder\t\t\t# _raw_read_lock\n"
-       "1:\tll\t%1, %2\n\t"
-       "bltz\t%1, 1b\n\t"
-       " addu\t%1, 1\n\t"
-       "sc\t%1, %0\n\t"
-       "beqz\t%1, 1b\n\t"
-       " sync\n\t"
-       ".set\treorder"
-       : "=m" (rw->lock), "=&r" (tmp)
-       : "m" (rw->lock)
-       : "memory");
+       if (R10000_LLSC_WAR) {
+               __asm__ __volatile__(
+               "       .set    noreorder       # _raw_read_lock        \n"
+               "1:     ll      %1, %2                                  \n"
+               "       bltz    %1, 1b                                  \n"
+               "        addu   %1, 1                                   \n"
+               "       sc      %1, %0                                  \n"
+               "       beqzl   %1, 1b                                  \n"
+               "        nop                                            \n"
+               "       sync                                            \n"
+               "       .set    reorder                                 \n"
+               : "=m" (rw->lock), "=&r" (tmp)
+               : "m" (rw->lock)
+               : "memory");
+       } else {
+               __asm__ __volatile__(
+               "       .set    noreorder       # _raw_read_lock        \n"
+               "1:     ll      %1, %2                                  \n"
+               "       bltz    %1, 1b                                  \n"
+               "        addu   %1, 1                                   \n"
+               "       sc      %1, %0                                  \n"
+               "       beqz    %1, 1b                                  \n"
+               "        sync                                           \n"
+               "       .set    reorder                                 \n"
+               : "=m" (rw->lock), "=&r" (tmp)
+               : "m" (rw->lock)
+               : "memory");
+       }
 }
 
 /* Note the use of sub, not subu which will make the kernel die with an
@@ -125,69 +181,117 @@ static inline void _raw_read_unlock(rwlock_t *rw)
 {
        unsigned int tmp;
 
-       __asm__ __volatile__(
-       ".set\tnoreorder\t\t\t# _raw_read_unlock\n"
-       "1:\tll\t%1, %2\n\t"
-       "sub\t%1, 1\n\t"
-       "sc\t%1, %0\n\t"
-       "beqz\t%1, 1b\n\t"
-       " sync\n\t"
-       ".set\treorder"
-       : "=m" (rw->lock), "=&r" (tmp)
-       : "m" (rw->lock)
-       : "memory");
+       if (R10000_LLSC_WAR) {
+               __asm__ __volatile__(
+               "1:     ll      %1, %2          # _raw_read_unlock      \n"
+               "       sub     %1, 1                                   \n"
+               "       sc      %1, %0                                  \n"
+               "       beqzl   %1, 1b                                  \n"
+               "       sync                                            \n"
+               : "=m" (rw->lock), "=&r" (tmp)
+               : "m" (rw->lock)
+               : "memory");
+       } else {
+               __asm__ __volatile__(
+               "       .set    noreorder       # _raw_read_unlock      \n"
+               "1:     ll      %1, %2                                  \n"
+               "       sub     %1, 1                                   \n"
+               "       sc      %1, %0                                  \n"
+               "       beqz    %1, 1b                                  \n"
+               "        sync                                           \n"
+               "       .set    reorder                                 \n"
+               : "=m" (rw->lock), "=&r" (tmp)
+               : "m" (rw->lock)
+               : "memory");
+       }
 }
 
 static inline void _raw_write_lock(rwlock_t *rw)
 {
        unsigned int tmp;
 
-       __asm__ __volatile__(
-       ".set\tnoreorder\t\t\t# _raw_write_lock\n"
-       "1:\tll\t%1, %2\n\t"
-       "bnez\t%1, 1b\n\t"
-       " lui\t%1, 0x8000\n\t"
-       "sc\t%1, %0\n\t"
-       "beqz\t%1, 1b\n\t"
-       " sync\n\t"
-       ".set\treorder"
-       : "=m" (rw->lock), "=&r" (tmp)
-       : "m" (rw->lock)
-       : "memory");
+       if (R10000_LLSC_WAR) {
+               __asm__ __volatile__(
+               "       .set    noreorder       # _raw_write_lock       \n"
+               "1:     ll      %1, %2                                  \n"
+               "       bnez    %1, 1b                                  \n"
+               "        lui    %1, 0x8000                              \n"
+               "       sc      %1, %0                                  \n"
+               "       beqzl   %1, 1b                                  \n"
+               "        nop                                            \n"
+               "       sync                                            \n"
+               "       .set    reorder                                 \n"
+               : "=m" (rw->lock), "=&r" (tmp)
+               : "m" (rw->lock)
+               : "memory");
+       } else {
+               __asm__ __volatile__(
+               "       .set    noreorder       # _raw_write_lock       \n"
+               "1:     ll      %1, %2                                  \n"
+               "       bnez    %1, 1b                                  \n"
+               "        lui    %1, 0x8000                              \n"
+               "       sc      %1, %0                                  \n"
+               "       beqz    %1, 1b                                  \n"
+               "        nop                                            \n"
+               "       sync                                            \n"
+               "       .set    reorder                                 \n"
+               : "=m" (rw->lock), "=&r" (tmp)
+               : "m" (rw->lock)
+               : "memory");
+       }
 }
 
 static inline void _raw_write_unlock(rwlock_t *rw)
 {
        __asm__ __volatile__(
-       ".set\tnoreorder\t\t\t# _raw_write_unlock\n\t"
-       "sync\n\t"
-       "sw\t$0, %0\n\t"
-       ".set\treorder"
+       "       sync                    # _raw_write_unlock     \n"
+       "       sw      $0, %0                                  \n"
        : "=m" (rw->lock)
        : "m" (rw->lock)
        : "memory");
 }
 
+#define _raw_read_trylock(lock) generic_raw_read_trylock(lock)
+
 static inline int _raw_write_trylock(rwlock_t *rw)
 {
        unsigned int tmp;
        int ret;
 
-       __asm__ __volatile__(
-       ".set\tnoreorder\t\t\t# _raw_write_trylock\n"
-       "li\t%2, 0\n\t"
-       "1:\tll\t%1, %3\n\t"
-       "bnez\t%1, 2f\n\t"
-       "lui\t%1, 0x8000\n\t"
-       "sc\t%1, %0\n\t"
-       "beqz\t%1, 1b\n\t"
-       "sync\n\t"
-       "li\t%2, 1\n\t"
-       ".set\treorder\n"
-       "2:"
-       : "=m" (rw->lock), "=&r" (tmp), "=&r" (ret)
-       : "m" (rw->lock)
-       : "memory");
+       if (R10000_LLSC_WAR) {
+               __asm__ __volatile__(
+               "       .set    noreorder       # _raw_write_trylock    \n"
+               "       li      %2, 0                                   \n"
+               "1:     ll      %1, %3                                  \n"
+               "       bnez    %1, 2f                                  \n"
+               "        lui    %1, 0x8000                              \n"
+               "       sc      %1, %0                                  \n"
+               "       beqzl   %1, 1b                                  \n"
+               "        nop                                            \n"
+               "       sync                                            \n"
+               "       li      %2, 1                                   \n"
+               "       .set    reorder                                 \n"
+               "2:                                                     \n"
+               : "=m" (rw->lock), "=&r" (tmp), "=&r" (ret)
+               : "m" (rw->lock)
+               : "memory");
+       } else {
+               __asm__ __volatile__(
+               "       .set    noreorder       # _raw_write_trylock    \n"
+               "       li      %2, 0                                   \n"
+               "1:     ll      %1, %3                                  \n"
+               "       bnez    %1, 2f                                  \n"
+               "       lui     %1, 0x8000                              \n"
+               "       sc      %1, %0                                  \n"
+               "       beqz    %1, 1b                                  \n"
+               "        sync                                           \n"
+               "       li      %2, 1                                   \n"
+               "       .set    reorder                                 \n"
+               "2:                                                     \n"
+               : "=m" (rw->lock), "=&r" (tmp), "=&r" (ret)
+               : "m" (rw->lock)
+               : "memory");
+       }
 
        return ret;
 }