This commit was manufactured by cvs2svn to create tag
[linux-2.6.git] / include / asm-mips / spinlock.h
index adbca38..f16118f 100644 (file)
@@ -9,8 +9,6 @@
 #ifndef _ASM_SPINLOCK_H
 #define _ASM_SPINLOCK_H
 
-#include <asm/war.h>
-
 /*
  * Your basic SMP spinlocks, allowing only a single CPU anywhere
  */
@@ -38,43 +36,27 @@ static inline void _raw_spin_lock(spinlock_t *lock)
 {
        unsigned int tmp;
 
-       if (R10000_LLSC_WAR) {
-               __asm__ __volatile__(
-               "       .set    noreorder       # _raw_spin_lock        \n"
-               "1:     ll      %1, %2                                  \n"
-               "       bnez    %1, 1b                                  \n"
-               "        li     %1, 1                                   \n"
-               "       sc      %1, %0                                  \n"
-               "       beqzl   %1, 1b                                  \n"
-               "        nop                                            \n"
-               "       sync                                            \n"
-               "       .set    reorder                                 \n"
-               : "=m" (lock->lock), "=&r" (tmp)
-               : "m" (lock->lock)
-               : "memory");
-       } else {
-               __asm__ __volatile__(
-               "       .set    noreorder       # _raw_spin_lock        \n"
-               "1:     ll      %1, %2                                  \n"
-               "       bnez    %1, 1b                                  \n"
-               "        li     %1, 1                                   \n"
-               "       sc      %1, %0                                  \n"
-               "       beqz    %1, 1b                                  \n"
-               "        sync                                           \n"
-               "       .set    reorder                                 \n"
-               : "=m" (lock->lock), "=&r" (tmp)
-               : "m" (lock->lock)
-               : "memory");
-       }
+       __asm__ __volatile__(
+       ".set\tnoreorder\t\t\t# _raw_spin_lock\n"
+       "1:\tll\t%1, %2\n\t"
+       "bnez\t%1, 1b\n\t"
+       " li\t%1, 1\n\t"
+       "sc\t%1, %0\n\t"
+       "beqz\t%1, 1b\n\t"
+       " sync\n\t"
+       ".set\treorder"
+       : "=m" (lock->lock), "=&r" (tmp)
+       : "m" (lock->lock)
+       : "memory");
 }
 
 static inline void _raw_spin_unlock(spinlock_t *lock)
 {
        __asm__ __volatile__(
-       "       .set    noreorder       # _raw_spin_unlock      \n"
-       "       sync                                            \n"
-       "       sw      $0, %0                                  \n"
-       "       .set\treorder                                   \n"
+       ".set\tnoreorder\t\t\t# _raw_spin_unlock\n\t"
+       "sync\n\t"
+       "sw\t$0, %0\n\t"
+       ".set\treorder"
        : "=m" (lock->lock)
        : "m" (lock->lock)
        : "memory");
@@ -84,34 +66,17 @@ static inline unsigned int _raw_spin_trylock(spinlock_t *lock)
 {
        unsigned int temp, res;
 
-       if (R10000_LLSC_WAR) {
-               __asm__ __volatile__(
-               "       .set    noreorder       # _raw_spin_trylock     \n"
-               "1:     ll      %0, %3                                  \n"
-               "       ori     %2, %0, 1                               \n"
-               "       sc      %2, %1                                  \n"
-               "       beqzl   %2, 1b                                  \n"
-               "        nop                                            \n"
-               "       andi    %2, %0, 1                               \n"
-               "       sync                                            \n"
-               "       .set    reorder"
-               : "=&r" (temp), "=m" (lock->lock), "=&r" (res)
-               : "m" (lock->lock)
-               : "memory");
-       } else {
-               __asm__ __volatile__(
-               "       .set    noreorder       # _raw_spin_trylock     \n"
-               "1:     ll      %0, %3                                  \n"
-               "       ori     %2, %0, 1                               \n"
-               "       sc      %2, %1                                  \n"
-               "       beqz    %2, 1b                                  \n"
-               "        andi   %2, %0, 1                               \n"
-               "       sync                                            \n"
-               "       .set    reorder"
-               : "=&r" (temp), "=m" (lock->lock), "=&r" (res)
-               : "m" (lock->lock)
-               : "memory");
-       }
+       __asm__ __volatile__(
+       ".set\tnoreorder\t\t\t# _raw_spin_trylock\n\t"
+       "1:\tll\t%0, %3\n\t"
+       "ori\t%2, %0, 1\n\t"
+       "sc\t%2, %1\n\t"
+       "beqz\t%2, 1b\n\t"
+       " andi\t%2, %0, 1\n\t"
+       ".set\treorder"
+       : "=&r" (temp), "=m" (lock->lock), "=&r" (res)
+       : "m" (lock->lock)
+       : "memory");
 
        return res == 0;
 }
@@ -139,34 +104,18 @@ static inline void _raw_read_lock(rwlock_t *rw)
 {
        unsigned int tmp;
 
-       if (R10000_LLSC_WAR) {
-               __asm__ __volatile__(
-               "       .set    noreorder       # _raw_read_lock        \n"
-               "1:     ll      %1, %2                                  \n"
-               "       bltz    %1, 1b                                  \n"
-               "        addu   %1, 1                                   \n"
-               "       sc      %1, %0                                  \n"
-               "       beqzl   %1, 1b                                  \n"
-               "        nop                                            \n"
-               "       sync                                            \n"
-               "       .set    reorder                                 \n"
-               : "=m" (rw->lock), "=&r" (tmp)
-               : "m" (rw->lock)
-               : "memory");
-       } else {
-               __asm__ __volatile__(
-               "       .set    noreorder       # _raw_read_lock        \n"
-               "1:     ll      %1, %2                                  \n"
-               "       bltz    %1, 1b                                  \n"
-               "        addu   %1, 1                                   \n"
-               "       sc      %1, %0                                  \n"
-               "       beqz    %1, 1b                                  \n"
-               "        sync                                           \n"
-               "       .set    reorder                                 \n"
-               : "=m" (rw->lock), "=&r" (tmp)
-               : "m" (rw->lock)
-               : "memory");
-       }
+       __asm__ __volatile__(
+       ".set\tnoreorder\t\t\t# _raw_read_lock\n"
+       "1:\tll\t%1, %2\n\t"
+       "bltz\t%1, 1b\n\t"
+       " addu\t%1, 1\n\t"
+       "sc\t%1, %0\n\t"
+       "beqz\t%1, 1b\n\t"
+       " sync\n\t"
+       ".set\treorder"
+       : "=m" (rw->lock), "=&r" (tmp)
+       : "m" (rw->lock)
+       : "memory");
 }
 
 /* Note the use of sub, not subu which will make the kernel die with an
@@ -176,71 +125,44 @@ static inline void _raw_read_unlock(rwlock_t *rw)
 {
        unsigned int tmp;
 
-       if (R10000_LLSC_WAR) {
-               __asm__ __volatile__(
-               "1:     ll      %1, %2          # _raw_read_unlock      \n"
-               "       sub     %1, 1                                   \n"
-               "       sc      %1, %0                                  \n"
-               "       beqzl   %1, 1b                                  \n"
-               "       sync                                            \n"
-               : "=m" (rw->lock), "=&r" (tmp)
-               : "m" (rw->lock)
-               : "memory");
-       } else {
-               __asm__ __volatile__(
-               "       .set    noreorder       # _raw_read_unlock      \n"
-               "1:     ll      %1, %2                                  \n"
-               "       sub     %1, 1                                   \n"
-               "       sc      %1, %0                                  \n"
-               "       beqz    %1, 1b                                  \n"
-               "        sync                                           \n"
-               "       .set    reorder                                 \n"
-               : "=m" (rw->lock), "=&r" (tmp)
-               : "m" (rw->lock)
-               : "memory");
-       }
+       __asm__ __volatile__(
+       ".set\tnoreorder\t\t\t# _raw_read_unlock\n"
+       "1:\tll\t%1, %2\n\t"
+       "sub\t%1, 1\n\t"
+       "sc\t%1, %0\n\t"
+       "beqz\t%1, 1b\n\t"
+       " sync\n\t"
+       ".set\treorder"
+       : "=m" (rw->lock), "=&r" (tmp)
+       : "m" (rw->lock)
+       : "memory");
 }
 
 static inline void _raw_write_lock(rwlock_t *rw)
 {
        unsigned int tmp;
 
-       if (R10000_LLSC_WAR) {
-               __asm__ __volatile__(
-               "       .set    noreorder       # _raw_write_lock       \n"
-               "1:     ll      %1, %2                                  \n"
-               "       bnez    %1, 1b                                  \n"
-               "        lui    %1, 0x8000                              \n"
-               "       sc      %1, %0                                  \n"
-               "       beqzl   %1, 1b                                  \n"
-               "        nop                                            \n"
-               "       sync                                            \n"
-               "       .set    reorder                                 \n"
-               : "=m" (rw->lock), "=&r" (tmp)
-               : "m" (rw->lock)
-               : "memory");
-       } else {
-               __asm__ __volatile__(
-               "       .set    noreorder       # _raw_write_lock       \n"
-               "1:     ll      %1, %2                                  \n"
-               "       bnez    %1, 1b                                  \n"
-               "        lui    %1, 0x8000                              \n"
-               "       sc      %1, %0                                  \n"
-               "       beqz    %1, 1b                                  \n"
-               "        nop                                            \n"
-               "       sync                                            \n"
-               "       .set    reorder                                 \n"
-               : "=m" (rw->lock), "=&r" (tmp)
-               : "m" (rw->lock)
-               : "memory");
-       }
+       __asm__ __volatile__(
+       ".set\tnoreorder\t\t\t# _raw_write_lock\n"
+       "1:\tll\t%1, %2\n\t"
+       "bnez\t%1, 1b\n\t"
+       " lui\t%1, 0x8000\n\t"
+       "sc\t%1, %0\n\t"
+       "beqz\t%1, 1b\n\t"
+       " sync\n\t"
+       ".set\treorder"
+       : "=m" (rw->lock), "=&r" (tmp)
+       : "m" (rw->lock)
+       : "memory");
 }
 
 static inline void _raw_write_unlock(rwlock_t *rw)
 {
        __asm__ __volatile__(
-       "       sync                    # _raw_write_unlock     \n"
-       "       sw      $0, %0                                  \n"
+       ".set\tnoreorder\t\t\t# _raw_write_unlock\n\t"
+       "sync\n\t"
+       "sw\t$0, %0\n\t"
+       ".set\treorder"
        : "=m" (rw->lock)
        : "m" (rw->lock)
        : "memory");
@@ -251,40 +173,21 @@ static inline int _raw_write_trylock(rwlock_t *rw)
        unsigned int tmp;
        int ret;
 
-       if (R10000_LLSC_WAR) {
-               __asm__ __volatile__(
-               "       .set    noreorder       # _raw_write_trylock    \n"
-               "       li      %2, 0                                   \n"
-               "1:     ll      %1, %3                                  \n"
-               "       bnez    %1, 2f                                  \n"
-               "        lui    %1, 0x8000                              \n"
-               "       sc      %1, %0                                  \n"
-               "       beqzl   %1, 1b                                  \n"
-               "        nop                                            \n"
-               "       sync                                            \n"
-               "       li      %2, 1                                   \n"
-               "       .set    reorder                                 \n"
-               "2:                                                     \n"
-               : "=m" (rw->lock), "=&r" (tmp), "=&r" (ret)
-               : "m" (rw->lock)
-               : "memory");
-       } else {
-               __asm__ __volatile__(
-               "       .set    noreorder       # _raw_write_trylock    \n"
-               "       li      %2, 0                                   \n"
-               "1:     ll      %1, %3                                  \n"
-               "       bnez    %1, 2f                                  \n"
-               "       lui     %1, 0x8000                              \n"
-               "       sc      %1, %0                                  \n"
-               "       beqz    %1, 1b                                  \n"
-               "        sync                                           \n"
-               "       li      %2, 1                                   \n"
-               "       .set    reorder                                 \n"
-               "2:                                                     \n"
-               : "=m" (rw->lock), "=&r" (tmp), "=&r" (ret)
-               : "m" (rw->lock)
-               : "memory");
-       }
+       __asm__ __volatile__(
+       ".set\tnoreorder\t\t\t# _raw_write_trylock\n"
+       "li\t%2, 0\n\t"
+       "1:\tll\t%1, %3\n\t"
+       "bnez\t%1, 2f\n\t"
+       "lui\t%1, 0x8000\n\t"
+       "sc\t%1, %0\n\t"
+       "beqz\t%1, 1b\n\t"
+       "sync\n\t"
+       "li\t%2, 1\n\t"
+       ".set\treorder\n"
+       "2:"
+       : "=m" (rw->lock), "=&r" (tmp), "=&r" (ret)
+       : "m" (rw->lock)
+       : "memory");
 
        return ret;
 }