X-Git-Url: http://git.onelab.eu/?a=blobdiff_plain;f=arch%2Fsparc64%2Flib%2Fbitops.S;h=fd20171ecfd102006f13219615d9942b78ddaff7;hb=6a77f38946aaee1cd85eeec6cf4229b204c15071;hp=c1e370068673ca217f1bb4bef0dd8f53ea64f062;hpb=87fc8d1bb10cd459024a742c6a10961fefcef18f;p=linux-2.6.git diff --git a/arch/sparc64/lib/bitops.S b/arch/sparc64/lib/bitops.S index c1e370068..fd20171ec 100644 --- a/arch/sparc64/lib/bitops.S +++ b/arch/sparc64/lib/bitops.S @@ -4,69 +4,142 @@ * Copyright (C) 2000 David S. Miller (davem@redhat.com) */ +#include #include + /* On SMP we need to use memory barriers to ensure + * correct memory operation ordering, nop these out + * for uniprocessor. + */ +#ifdef CONFIG_SMP +#define BITOP_PRE_BARRIER membar #StoreLoad | #LoadLoad +#define BITOP_POST_BARRIER membar #StoreLoad | #StoreStore +#else +#define BITOP_PRE_BARRIER nop +#define BITOP_POST_BARRIER nop +#endif + .text - .align 64 - .globl ___test_and_set_bit - .type ___test_and_set_bit,#function -___test_and_set_bit: /* %o0=nr, %o1=addr */ + + .globl test_and_set_bit + .type test_and_set_bit,#function +test_and_set_bit: /* %o0=nr, %o1=addr */ + BITOP_PRE_BARRIER + srlx %o0, 6, %g1 + mov 1, %g5 + sllx %g1, 3, %g3 + and %o0, 63, %g2 + sllx %g5, %g2, %g5 + add %o1, %g3, %o1 +1: ldx [%o1], %g7 + or %g7, %g5, %g1 + casx [%o1], %g7, %g1 + cmp %g7, %g1 + bne,pn %xcc, 1b + and %g7, %g5, %g2 + BITOP_POST_BARRIER + clr %o0 + retl + movrne %g2, 1, %o0 + .size test_and_set_bit, .-test_and_set_bit + + .globl test_and_clear_bit + .type test_and_clear_bit,#function +test_and_clear_bit: /* %o0=nr, %o1=addr */ + BITOP_PRE_BARRIER + srlx %o0, 6, %g1 + mov 1, %g5 + sllx %g1, 3, %g3 + and %o0, 63, %g2 + sllx %g5, %g2, %g5 + add %o1, %g3, %o1 +1: ldx [%o1], %g7 + andn %g7, %g5, %g1 + casx [%o1], %g7, %g1 + cmp %g7, %g1 + bne,pn %xcc, 1b + and %g7, %g5, %g2 + BITOP_POST_BARRIER + clr %o0 + retl + movrne %g2, 1, %o0 + .size test_and_clear_bit, .-test_and_clear_bit + + .globl test_and_change_bit + .type test_and_change_bit,#function +test_and_change_bit: /* %o0=nr, %o1=addr */ + BITOP_PRE_BARRIER + srlx %o0, 6, %g1 + mov 1, %g5 + sllx %g1, 3, %g3 + and %o0, 63, %g2 + sllx %g5, %g2, %g5 + add %o1, %g3, %o1 +1: ldx [%o1], %g7 + xor %g7, %g5, %g1 + casx [%o1], %g7, %g1 + cmp %g7, %g1 + bne,pn %xcc, 1b + and %g7, %g5, %g2 + BITOP_POST_BARRIER + clr %o0 + retl + movrne %g2, 1, %o0 + .size test_and_change_bit, .-test_and_change_bit + + .globl set_bit + .type set_bit,#function +set_bit: /* %o0=nr, %o1=addr */ srlx %o0, 6, %g1 mov 1, %g5 sllx %g1, 3, %g3 and %o0, 63, %g2 sllx %g5, %g2, %g5 add %o1, %g3, %o1 - ldx [%o1], %g7 -1: andcc %g7, %g5, %o0 - bne,pn %xcc, 2f - xor %g7, %g5, %g1 +1: ldx [%o1], %g7 + or %g7, %g5, %g1 casx [%o1], %g7, %g1 cmp %g7, %g1 - bne,a,pn %xcc, 1b - ldx [%o1], %g7 -2: retl - membar #StoreLoad | #StoreStore - .size ___test_and_set_bit, .-___test_and_set_bit + bne,pn %xcc, 1b + nop + retl + nop + .size set_bit, .-set_bit - .globl ___test_and_clear_bit - .type ___test_and_clear_bit,#function -___test_and_clear_bit: /* %o0=nr, %o1=addr */ + .globl clear_bit + .type clear_bit,#function +clear_bit: /* %o0=nr, %o1=addr */ srlx %o0, 6, %g1 mov 1, %g5 sllx %g1, 3, %g3 and %o0, 63, %g2 sllx %g5, %g2, %g5 add %o1, %g3, %o1 - ldx [%o1], %g7 -1: andcc %g7, %g5, %o0 - be,pn %xcc, 2f - xor %g7, %g5, %g1 +1: ldx [%o1], %g7 + andn %g7, %g5, %g1 casx [%o1], %g7, %g1 cmp %g7, %g1 - bne,a,pn %xcc, 1b - ldx [%o1], %g7 -2: retl - membar #StoreLoad | #StoreStore - .size ___test_and_clear_bit, .-___test_and_clear_bit + bne,pn %xcc, 1b + nop + retl + nop + .size clear_bit, .-clear_bit - .globl ___test_and_change_bit - .type ___test_and_change_bit,#function -___test_and_change_bit: /* %o0=nr, %o1=addr */ + .globl change_bit + .type change_bit,#function +change_bit: /* %o0=nr, %o1=addr */ srlx %o0, 6, %g1 mov 1, %g5 sllx %g1, 3, %g3 and %o0, 63, %g2 sllx %g5, %g2, %g5 add %o1, %g3, %o1 - ldx [%o1], %g7 -1: and %g7, %g5, %o0 +1: ldx [%o1], %g7 xor %g7, %g5, %g1 casx [%o1], %g7, %g1 cmp %g7, %g1 - bne,a,pn %xcc, 1b - ldx [%o1], %g7 -2: retl - membar #StoreLoad | #StoreStore - nop - .size ___test_and_change_bit, .-___test_and_change_bit + bne,pn %xcc, 1b + nop + retl + nop + .size change_bit, .-change_bit