X-Git-Url: http://git.onelab.eu/?a=blobdiff_plain;f=include%2Fasm-mips%2Ffutex.h;h=47e5679c235303f5812eb1d4cd39a2140af89016;hb=a2f44b27303a5353859d77a3e96a1d3f33f56ab7;hp=2454c44a8f54c11b99771784eb2f77974a4a3d9c;hpb=134734d875a0a48d994ef20b9905209b4b8b6f75;p=linux-2.6.git diff --git a/include/asm-mips/futex.h b/include/asm-mips/futex.h index 2454c44a8..47e5679c2 100644 --- a/include/asm-mips/futex.h +++ b/include/asm-mips/futex.h @@ -1,45 +1,77 @@ +/* + * This file is subject to the terms and conditions of the GNU General Public + * License. See the file "COPYING" in the main directory of this archive + * for more details. + * + * Copyright (c) 2006 Ralf Baechle (ralf@linux-mips.org) + */ #ifndef _ASM_FUTEX_H #define _ASM_FUTEX_H #ifdef __KERNEL__ -#include #include +#include #include #include - -#ifdef CONFIG_SMP -#define __FUTEX_SMP_SYNC " sync \n" -#else -#define __FUTEX_SMP_SYNC -#endif +#include #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \ { \ - __asm__ __volatile__( \ - " .set push \n" \ - " .set noat \n" \ - " .set mips3 \n" \ - "1: ll %1, (%3) # __futex_atomic_op1 \n" \ - " .set mips0 \n" \ - " " insn " \n" \ - " .set mips3 \n" \ - "2: sc $1, (%3) \n" \ - " beqzl $1, 1b \n" \ - __FUTEX_SMP_SYNC \ - "3: \n" \ - " .set pop \n" \ - " .set mips0 \n" \ - " .section .fixup,\"ax\" \n" \ - "4: li %0, %5 \n" \ - " j 2b \n" \ - " .previous \n" \ - " .section __ex_table,\"a\" \n" \ - " "__UA_ADDR "\t1b, 4b \n" \ - " "__UA_ADDR "\t2b, 4b \n" \ - " .previous \n" \ - : "=r" (ret), "=r" (oldval) \ - : "0" (0), "r" (uaddr), "Jr" (oparg), "i" (-EFAULT)); \ + if (cpu_has_llsc && R10000_LLSC_WAR) { \ + __asm__ __volatile__( \ + " .set push \n" \ + " .set noat \n" \ + " .set mips3 \n" \ + "1: ll %1, %4 # __futex_atomic_op \n" \ + " .set mips0 \n" \ + " " insn " \n" \ + " .set mips3 \n" \ + "2: sc $1, %2 \n" \ + " beqzl $1, 1b \n" \ + __WEAK_ORDERING_MB \ + "3: \n" \ + " .set pop \n" \ + " .set mips0 \n" \ + " .section .fixup,\"ax\" \n" \ + "4: li %0, %6 \n" \ + " j 2b \n" \ + " .previous \n" \ + " .section __ex_table,\"a\" \n" \ + " "__UA_ADDR "\t1b, 4b \n" \ + " "__UA_ADDR "\t2b, 4b \n" \ + " .previous \n" \ + : "=r" (ret), "=&r" (oldval), "=R" (*uaddr) \ + : "0" (0), "R" (*uaddr), "Jr" (oparg), "i" (-EFAULT) \ + : "memory"); \ + } else if (cpu_has_llsc) { \ + __asm__ __volatile__( \ + " .set push \n" \ + " .set noat \n" \ + " .set mips3 \n" \ + "1: ll %1, %4 # __futex_atomic_op \n" \ + " .set mips0 \n" \ + " " insn " \n" \ + " .set mips3 \n" \ + "2: sc $1, %2 \n" \ + " beqz $1, 1b \n" \ + __WEAK_ORDERING_MB \ + "3: \n" \ + " .set pop \n" \ + " .set mips0 \n" \ + " .section .fixup,\"ax\" \n" \ + "4: li %0, %6 \n" \ + " j 2b \n" \ + " .previous \n" \ + " .section __ex_table,\"a\" \n" \ + " "__UA_ADDR "\t1b, 4b \n" \ + " "__UA_ADDR "\t2b, 4b \n" \ + " .previous \n" \ + : "=r" (ret), "=&r" (oldval), "=R" (*uaddr) \ + : "0" (0), "R" (*uaddr), "Jr" (oparg), "i" (-EFAULT) \ + : "memory"); \ + } else \ + ret = -ENOSYS; \ } static inline int @@ -56,34 +88,34 @@ futex_atomic_op_inuser (int encoded_op, int __user *uaddr) if (! access_ok (VERIFY_WRITE, uaddr, sizeof(int))) return -EFAULT; - inc_preempt_count(); + pagefault_disable(); switch (op) { case FUTEX_OP_SET: - __futex_atomic_op("move $1, %z4", ret, oldval, uaddr, oparg); + __futex_atomic_op("move $1, %z5", ret, oldval, uaddr, oparg); break; case FUTEX_OP_ADD: - __futex_atomic_op("addu $1, %1, %z4", + __futex_atomic_op("addu $1, %1, %z5", ret, oldval, uaddr, oparg); break; case FUTEX_OP_OR: - __futex_atomic_op("or $1, %1, %z4", + __futex_atomic_op("or $1, %1, %z5", ret, oldval, uaddr, oparg); break; case FUTEX_OP_ANDN: - __futex_atomic_op("and $1, %1, %z4", + __futex_atomic_op("and $1, %1, %z5", ret, oldval, uaddr, ~oparg); break; case FUTEX_OP_XOR: - __futex_atomic_op("xor $1, %1, %z4", + __futex_atomic_op("xor $1, %1, %z5", ret, oldval, uaddr, oparg); break; default: ret = -ENOSYS; } - dec_preempt_count(); + pagefault_enable(); if (!ret) { switch (cmp) { @@ -99,5 +131,73 @@ futex_atomic_op_inuser (int encoded_op, int __user *uaddr) return ret; } +static inline int +futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval) +{ + int retval; + + if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int))) + return -EFAULT; + + if (cpu_has_llsc && R10000_LLSC_WAR) { + __asm__ __volatile__( + "# futex_atomic_cmpxchg_inatomic \n" + " .set push \n" + " .set noat \n" + " .set mips3 \n" + "1: ll %0, %2 \n" + " bne %0, %z3, 3f \n" + " .set mips0 \n" + " move $1, %z4 \n" + " .set mips3 \n" + "2: sc $1, %1 \n" + " beqzl $1, 1b \n" + __WEAK_ORDERING_MB + "3: \n" + " .set pop \n" + " .section .fixup,\"ax\" \n" + "4: li %0, %5 \n" + " j 3b \n" + " .previous \n" + " .section __ex_table,\"a\" \n" + " "__UA_ADDR "\t1b, 4b \n" + " "__UA_ADDR "\t2b, 4b \n" + " .previous \n" + : "=&r" (retval), "=R" (*uaddr) + : "R" (*uaddr), "Jr" (oldval), "Jr" (newval), "i" (-EFAULT) + : "memory"); + } else if (cpu_has_llsc) { + __asm__ __volatile__( + "# futex_atomic_cmpxchg_inatomic \n" + " .set push \n" + " .set noat \n" + " .set mips3 \n" + "1: ll %0, %2 \n" + " bne %0, %z3, 3f \n" + " .set mips0 \n" + " move $1, %z4 \n" + " .set mips3 \n" + "2: sc $1, %1 \n" + " beqz $1, 1b \n" + __WEAK_ORDERING_MB + "3: \n" + " .set pop \n" + " .section .fixup,\"ax\" \n" + "4: li %0, %5 \n" + " j 3b \n" + " .previous \n" + " .section __ex_table,\"a\" \n" + " "__UA_ADDR "\t1b, 4b \n" + " "__UA_ADDR "\t2b, 4b \n" + " .previous \n" + : "=&r" (retval), "=R" (*uaddr) + : "R" (*uaddr), "Jr" (oldval), "Jr" (newval), "i" (-EFAULT) + : "memory"); + } else + return -ENOSYS; + + return retval; +} + #endif #endif