fedora core 6 1.2949 + vserver 2.2.0
[linux-2.6.git] / include / asm-s390 / bitops.h
index 3628899..f79c9b7 100644 (file)
@@ -12,7 +12,9 @@
  *    Copyright (C) 1992, Linus Torvalds
  *
  */
-#include <linux/config.h>
+
+#ifdef __KERNEL__
+
 #include <linux/compiler.h>
 
 /*
  * with operation of the form "set_bit(bitnr, flags)".
  */
 
-/* set ALIGN_CS to 1 if the SMP safe bit operations should
- * align the address to 4 byte boundary. It seems to work
- * without the alignment. 
- */
-#ifdef __KERNEL__
-#define ALIGN_CS 0
-#else
-#define ALIGN_CS 1
-#ifndef CONFIG_SMP
-#error "bitops won't work without CONFIG_SMP"
-#endif
-#endif
-
 /* bitmap tables from arch/S390/kernel/bitmap.S */
 extern const char _oi_bitmap[];
 extern const char _ni_bitmap[];
@@ -78,16 +67,35 @@ extern const char _sb_findmap[];
 #define __BITOPS_AND           "nr"
 #define __BITOPS_XOR           "xr"
 
-#define __BITOPS_LOOP(__old, __new, __addr, __val, __op_string)                \
-       __asm__ __volatile__("   l   %0,0(%4)\n"                        \
-                            "0: lr  %1,%0\n"                           \
-                            __op_string "  %1,%3\n"                    \
-                            "   cs  %0,%1,0(%4)\n"                     \
-                            "   jl  0b"                                \
-                            : "=&d" (__old), "=&d" (__new),            \
-                              "=m" (*(unsigned long *) __addr)         \
-                            : "d" (__val), "a" (__addr),               \
-                              "m" (*(unsigned long *) __addr) : "cc" );
+#if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 2)
+
+#define __BITOPS_LOOP(__old, __new, __addr, __val, __op_string)        \
+       asm volatile(                                           \
+               "       l       %0,%2\n"                        \
+               "0:     lr      %1,%0\n"                        \
+               __op_string "   %1,%3\n"                        \
+               "       cs      %0,%1,%2\n"                     \
+               "       jl      0b"                             \
+               : "=&d" (__old), "=&d" (__new),                 \
+                 "=Q" (*(unsigned long *) __addr)              \
+               : "d" (__val), "Q" (*(unsigned long *) __addr)  \
+               : "cc");
+
+#else /* __GNUC__ */
+
+#define __BITOPS_LOOP(__old, __new, __addr, __val, __op_string)        \
+       asm volatile(                                           \
+               "       l       %0,0(%4)\n"                     \
+               "0:     lr      %1,%0\n"                        \
+               __op_string "   %1,%3\n"                        \
+               "       cs      %0,%1,0(%4)\n"                  \
+               "       jl      0b"                             \
+               : "=&d" (__old), "=&d" (__new),                 \
+                 "=m" (*(unsigned long *) __addr)              \
+               : "d" (__val), "a" (__addr),                    \
+                 "m" (*(unsigned long *) __addr) : "cc");
+
+#endif /* __GNUC__ */
 
 #else /* __s390x__ */
 
@@ -97,21 +105,41 @@ extern const char _sb_findmap[];
 #define __BITOPS_AND           "ngr"
 #define __BITOPS_XOR           "xgr"
 
-#define __BITOPS_LOOP(__old, __new, __addr, __val, __op_string)                \
-       __asm__ __volatile__("   lg  %0,0(%4)\n"                        \
-                            "0: lgr %1,%0\n"                           \
-                            __op_string "  %1,%3\n"                    \
-                            "   csg %0,%1,0(%4)\n"                     \
-                            "   jl  0b"                                \
-                            : "=&d" (__old), "=&d" (__new),            \
-                              "=m" (*(unsigned long *) __addr)         \
-                            : "d" (__val), "a" (__addr),               \
-                              "m" (*(unsigned long *) __addr) : "cc" );
+#if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 2)
+
+#define __BITOPS_LOOP(__old, __new, __addr, __val, __op_string)        \
+       asm volatile(                                           \
+               "       lg      %0,%2\n"                        \
+               "0:     lgr     %1,%0\n"                        \
+               __op_string "   %1,%3\n"                        \
+               "       csg     %0,%1,%2\n"                     \
+               "       jl      0b"                             \
+               : "=&d" (__old), "=&d" (__new),                 \
+                 "=Q" (*(unsigned long *) __addr)              \
+               : "d" (__val), "Q" (*(unsigned long *) __addr)  \
+               : "cc");
+
+#else /* __GNUC__ */
+
+#define __BITOPS_LOOP(__old, __new, __addr, __val, __op_string)        \
+       asm volatile(                                           \
+               "       lg      %0,0(%4)\n"                     \
+               "0:     lgr     %1,%0\n"                        \
+               __op_string "   %1,%3\n"                        \
+               "       csg     %0,%1,0(%4)\n"                  \
+               "       jl      0b"                             \
+               : "=&d" (__old), "=&d" (__new),                 \
+                 "=m" (*(unsigned long *) __addr)              \
+               : "d" (__val), "a" (__addr),                    \
+                 "m" (*(unsigned long *) __addr) : "cc");
+
+
+#endif /* __GNUC__ */
 
 #endif /* __s390x__ */
 
 #define __BITOPS_WORDS(bits) (((bits)+__BITOPS_WORDSIZE-1)/__BITOPS_WORDSIZE)
-#define __BITOPS_BARRIER() __asm__ __volatile__ ( "" : : : "memory" )
+#define __BITOPS_BARRIER() asm volatile("" : : : "memory")
 
 #ifdef CONFIG_SMP
 /*
@@ -122,10 +150,6 @@ static inline void set_bit_cs(unsigned long nr, volatile unsigned long *ptr)
         unsigned long addr, old, new, mask;
 
        addr = (unsigned long) ptr;
-#if ALIGN_CS == 1
-       nr += (addr & __BITOPS_ALIGN) << 3;    /* add alignment to bit number */
-       addr ^= addr & __BITOPS_ALIGN;         /* align address to 8 */
-#endif
        /* calculate address for CS */
        addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3;
        /* make OR mask */
@@ -142,10 +166,6 @@ static inline void clear_bit_cs(unsigned long nr, volatile unsigned long *ptr)
         unsigned long addr, old, new, mask;
 
        addr = (unsigned long) ptr;
-#if ALIGN_CS == 1
-       nr += (addr & __BITOPS_ALIGN) << 3;    /* add alignment to bit number */
-       addr ^= addr & __BITOPS_ALIGN;         /* align address to 8 */
-#endif
        /* calculate address for CS */
        addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3;
        /* make AND mask */
@@ -162,10 +182,6 @@ static inline void change_bit_cs(unsigned long nr, volatile unsigned long *ptr)
         unsigned long addr, old, new, mask;
 
        addr = (unsigned long) ptr;
-#if ALIGN_CS == 1
-       nr += (addr & __BITOPS_ALIGN) << 3;    /* add alignment to bit number */
-       addr ^= addr & __BITOPS_ALIGN;         /* align address to 8 */
-#endif
        /* calculate address for CS */
        addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3;
        /* make XOR mask */
@@ -183,10 +199,6 @@ test_and_set_bit_cs(unsigned long nr, volatile unsigned long *ptr)
         unsigned long addr, old, new, mask;
 
        addr = (unsigned long) ptr;
-#if ALIGN_CS == 1
-       nr += (addr & __BITOPS_ALIGN) << 3;    /* add alignment to bit number */
-       addr ^= addr & __BITOPS_ALIGN;         /* align address to 8 */
-#endif
        /* calculate address for CS */
        addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3;
        /* make OR/test mask */
@@ -206,10 +218,6 @@ test_and_clear_bit_cs(unsigned long nr, volatile unsigned long *ptr)
         unsigned long addr, old, new, mask;
 
        addr = (unsigned long) ptr;
-#if ALIGN_CS == 1
-       nr += (addr & __BITOPS_ALIGN) << 3;    /* add alignment to bit number */
-       addr ^= addr & __BITOPS_ALIGN;         /* align address to 8 */
-#endif
        /* calculate address for CS */
        addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3;
        /* make AND/test mask */
@@ -229,10 +237,6 @@ test_and_change_bit_cs(unsigned long nr, volatile unsigned long *ptr)
         unsigned long addr, old, new, mask;
 
        addr = (unsigned long) ptr;
-#if ALIGN_CS == 1
-       nr += (addr & __BITOPS_ALIGN) << 3;  /* add alignment to bit number */
-       addr ^= addr & __BITOPS_ALIGN;       /* align address to 8 */
-#endif
        /* calculate address for CS */
        addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3;
        /* make XOR/test mask */
@@ -252,10 +256,10 @@ static inline void __set_bit(unsigned long nr, volatile unsigned long *ptr)
        unsigned long addr;
 
        addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3);
-        asm volatile("oc 0(1,%1),0(%2)"
-                    : "=m" (*(char *) addr)
-                    : "a" (addr), "a" (_oi_bitmap + (nr & 7)),
-                      "m" (*(char *) addr) : "cc" );
+       asm volatile(
+               "       oc      0(1,%1),0(%2)"
+               : "=m" (*(char *) addr) : "a" (addr),
+                 "a" (_oi_bitmap + (nr & 7)), "m" (*(char *) addr) : "cc" );
 }
 
 static inline void 
@@ -264,40 +268,7 @@ __constant_set_bit(const unsigned long nr, volatile unsigned long *ptr)
        unsigned long addr;
 
        addr = ((unsigned long) ptr) + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3);
-       switch (nr&7) {
-       case 0:
-               asm volatile ("oi 0(%1),0x01" : "=m" (*(char *) addr)
-                             : "a" (addr), "m" (*(char *) addr) : "cc" );
-               break;
-       case 1:
-               asm volatile ("oi 0(%1),0x02" : "=m" (*(char *) addr)
-                             : "a" (addr), "m" (*(char *) addr) : "cc" );
-               break;
-       case 2:
-               asm volatile ("oi 0(%1),0x04" : "=m" (*(char *) addr)
-                             : "a" (addr), "m" (*(char *) addr) : "cc" );
-               break;
-       case 3:
-               asm volatile ("oi 0(%1),0x08" : "=m" (*(char *) addr)
-                             : "a" (addr), "m" (*(char *) addr) : "cc" );
-               break;
-       case 4:
-               asm volatile ("oi 0(%1),0x10" : "=m" (*(char *) addr)
-                             : "a" (addr), "m" (*(char *) addr) : "cc" );
-               break;
-       case 5:
-               asm volatile ("oi 0(%1),0x20" : "=m" (*(char *) addr)
-                             : "a" (addr), "m" (*(char *) addr) : "cc" );
-               break;
-       case 6:
-               asm volatile ("oi 0(%1),0x40" : "=m" (*(char *) addr)
-                             : "a" (addr), "m" (*(char *) addr) : "cc" );
-               break;
-       case 7:
-               asm volatile ("oi 0(%1),0x80" : "=m" (*(char *) addr)
-                             : "a" (addr), "m" (*(char *) addr) : "cc" );
-               break;
-       }
+       *(unsigned char *) addr |= 1 << (nr & 7);
 }
 
 #define set_bit_simple(nr,addr) \
@@ -314,10 +285,10 @@ __clear_bit(unsigned long nr, volatile unsigned long *ptr)
        unsigned long addr;
 
        addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3);
-        asm volatile("nc 0(1,%1),0(%2)"
-                    : "=m" (*(char *) addr)
-                    : "a" (addr), "a" (_ni_bitmap + (nr & 7)),
-                      "m" (*(char *) addr) : "cc" );
+       asm volatile(
+               "       nc      0(1,%1),0(%2)"
+               : "=m" (*(char *) addr) : "a" (addr),
+                 "a" (_ni_bitmap + (nr & 7)), "m" (*(char *) addr) : "cc");
 }
 
 static inline void 
@@ -326,40 +297,7 @@ __constant_clear_bit(const unsigned long nr, volatile unsigned long *ptr)
        unsigned long addr;
 
        addr = ((unsigned long) ptr) + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3);
-       switch (nr&7) {
-       case 0:
-               asm volatile ("ni 0(%1),0xFE" : "=m" (*(char *) addr)
-                             : "a" (addr), "m" (*(char *) addr) : "cc" );
-               break;
-       case 1:
-               asm volatile ("ni 0(%1),0xFD": "=m" (*(char *) addr)
-                             : "a" (addr), "m" (*(char *) addr) : "cc" );
-               break;
-       case 2:
-               asm volatile ("ni 0(%1),0xFB" : "=m" (*(char *) addr)
-                             : "a" (addr), "m" (*(char *) addr) : "cc" );
-               break;
-       case 3:
-               asm volatile ("ni 0(%1),0xF7" : "=m" (*(char *) addr)
-                             : "a" (addr), "m" (*(char *) addr) : "cc" );
-               break;
-       case 4:
-               asm volatile ("ni 0(%1),0xEF" : "=m" (*(char *) addr)
-                             : "a" (addr), "m" (*(char *) addr) : "cc" );
-               break;
-       case 5:
-               asm volatile ("ni 0(%1),0xDF" : "=m" (*(char *) addr)
-                             : "a" (addr), "m" (*(char *) addr) : "cc" );
-               break;
-       case 6:
-               asm volatile ("ni 0(%1),0xBF" : "=m" (*(char *) addr)
-                             : "a" (addr), "m" (*(char *) addr) : "cc" );
-               break;
-       case 7:
-               asm volatile ("ni 0(%1),0x7F" : "=m" (*(char *) addr)
-                             : "a" (addr), "m" (*(char *) addr) : "cc" );
-               break;
-       }
+       *(unsigned char *) addr &= ~(1 << (nr & 7));
 }
 
 #define clear_bit_simple(nr,addr) \
@@ -375,10 +313,10 @@ static inline void __change_bit(unsigned long nr, volatile unsigned long *ptr)
        unsigned long addr;
 
        addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3);
-        asm volatile("xc 0(1,%1),0(%2)"
-                    :  "=m" (*(char *) addr)
-                    : "a" (addr), "a" (_oi_bitmap + (nr & 7)),
-                      "m" (*(char *) addr) : "cc" );
+       asm volatile(
+               "       xc      0(1,%1),0(%2)"
+               :  "=m" (*(char *) addr) : "a" (addr),
+                  "a" (_oi_bitmap + (nr & 7)), "m" (*(char *) addr) : "cc" );
 }
 
 static inline void 
@@ -387,40 +325,7 @@ __constant_change_bit(const unsigned long nr, volatile unsigned long *ptr)
        unsigned long addr;
 
        addr = ((unsigned long) ptr) + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3);
-       switch (nr&7) {
-       case 0:
-               asm volatile ("xi 0(%1),0x01" : "=m" (*(char *) addr)
-                             : "a" (addr), "m" (*(char *) addr) : "cc" );
-               break;
-       case 1:
-               asm volatile ("xi 0(%1),0x02" : "=m" (*(char *) addr)
-                             : "a" (addr), "m" (*(char *) addr) : "cc" );
-               break;
-       case 2:
-               asm volatile ("xi 0(%1),0x04" : "=m" (*(char *) addr)
-                             : "a" (addr), "m" (*(char *) addr) : "cc" );
-               break;
-       case 3:
-               asm volatile ("xi 0(%1),0x08" : "=m" (*(char *) addr)
-                             : "a" (addr), "m" (*(char *) addr) : "cc" );
-               break;
-       case 4:
-               asm volatile ("xi 0(%1),0x10" : "=m" (*(char *) addr)
-                             : "a" (addr), "m" (*(char *) addr) : "cc" );
-               break;
-       case 5:
-               asm volatile ("xi 0(%1),0x20" : "=m" (*(char *) addr)
-                             : "a" (addr), "m" (*(char *) addr) : "cc" );
-               break;
-       case 6:
-               asm volatile ("xi 0(%1),0x40" : "=m" (*(char *) addr)
-                             : "a" (addr), "m" (*(char *) addr) : "cc" );
-               break;
-       case 7:
-               asm volatile ("xi 0(%1),0x80" : "=m" (*(char *) addr)
-                             : "a" (addr), "m" (*(char *) addr) : "cc" );
-               break;
-       }
+       *(unsigned char *) addr ^= 1 << (nr & 7);
 }
 
 #define change_bit_simple(nr,addr) \
@@ -439,10 +344,11 @@ test_and_set_bit_simple(unsigned long nr, volatile unsigned long *ptr)
 
        addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3);
        ch = *(unsigned char *) addr;
-        asm volatile("oc 0(1,%1),0(%2)"
-                    : "=m" (*(char *) addr)
-                    : "a" (addr), "a" (_oi_bitmap + (nr & 7)),
-                      "m" (*(char *) addr) : "cc", "memory" );
+       asm volatile(
+               "       oc      0(1,%1),0(%2)"
+               : "=m" (*(char *) addr)
+               : "a" (addr), "a" (_oi_bitmap + (nr & 7)),
+                 "m" (*(char *) addr) : "cc", "memory");
        return (ch >> (nr & 7)) & 1;
 }
 #define __test_and_set_bit(X,Y)                test_and_set_bit_simple(X,Y)
@@ -458,10 +364,11 @@ test_and_clear_bit_simple(unsigned long nr, volatile unsigned long *ptr)
 
        addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3);
        ch = *(unsigned char *) addr;
-        asm volatile("nc 0(1,%1),0(%2)"
-                    : "=m" (*(char *) addr)
-                    : "a" (addr), "a" (_ni_bitmap + (nr & 7)),
-                      "m" (*(char *) addr) : "cc", "memory" );
+       asm volatile(
+               "       nc      0(1,%1),0(%2)"
+               : "=m" (*(char *) addr)
+               : "a" (addr), "a" (_ni_bitmap + (nr & 7)),
+                 "m" (*(char *) addr) : "cc", "memory");
        return (ch >> (nr & 7)) & 1;
 }
 #define __test_and_clear_bit(X,Y)      test_and_clear_bit_simple(X,Y)
@@ -477,10 +384,11 @@ test_and_change_bit_simple(unsigned long nr, volatile unsigned long *ptr)
 
        addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3);
        ch = *(unsigned char *) addr;
-        asm volatile("xc 0(1,%1),0(%2)"
-                    : "=m" (*(char *) addr)
-                    : "a" (addr), "a" (_oi_bitmap + (nr & 7)),
-                      "m" (*(char *) addr) : "cc", "memory" );
+       asm volatile(
+               "       xc      0(1,%1),0(%2)"
+               : "=m" (*(char *) addr)
+               : "a" (addr), "a" (_oi_bitmap + (nr & 7)),
+                 "m" (*(char *) addr) : "cc", "memory");
        return (ch >> (nr & 7)) & 1;
 }
 #define __test_and_change_bit(X,Y)     test_and_change_bit_simple(X,Y)
@@ -592,35 +500,36 @@ find_first_zero_bit(const unsigned long * addr, unsigned long size)
 
         if (!size)
                 return 0;
-        __asm__("   lhi  %1,-1\n"
-                "   lr   %2,%3\n"
-                "   slr  %0,%0\n"
-                "   ahi  %2,31\n"
-                "   srl  %2,5\n"
-                "0: c    %1,0(%0,%4)\n"
-                "   jne  1f\n"
-                "   la   %0,4(%0)\n"
-                "   brct %2,0b\n"
-                "   lr   %0,%3\n"
-                "   j    4f\n"
-                "1: l    %2,0(%0,%4)\n"
-                "   sll  %0,3\n"
-                "   lhi  %1,0xff\n"
-                "   tml  %2,0xffff\n"
-                "   jno  2f\n"
-                "   ahi  %0,16\n"
-                "   srl  %2,16\n"
-                "2: tml  %2,0x00ff\n"
-                "   jno  3f\n"
-                "   ahi  %0,8\n"
-                "   srl  %2,8\n"
-                "3: nr   %2,%1\n"
-                "   ic   %2,0(%2,%5)\n"
-                "   alr  %0,%2\n"
-                "4:"
-                : "=&a" (res), "=&d" (cmp), "=&a" (count)
-                : "a" (size), "a" (addr), "a" (&_zb_findmap),
-                 "m" (*(addrtype *) addr) : "cc" );
+       asm volatile(
+               "       lhi     %1,-1\n"
+               "       lr      %2,%3\n"
+               "       slr     %0,%0\n"
+               "       ahi     %2,31\n"
+               "       srl     %2,5\n"
+               "0:     c       %1,0(%0,%4)\n"
+               "       jne     1f\n"
+               "       la      %0,4(%0)\n"
+               "       brct    %2,0b\n"
+               "       lr      %0,%3\n"
+               "       j       4f\n"
+               "1:     l       %2,0(%0,%4)\n"
+               "       sll     %0,3\n"
+               "       lhi     %1,0xff\n"
+               "       tml     %2,0xffff\n"
+               "       jno     2f\n"
+               "       ahi     %0,16\n"
+               "       srl     %2,16\n"
+               "2:     tml     %2,0x00ff\n"
+               "       jno     3f\n"
+               "       ahi     %0,8\n"
+               "       srl     %2,8\n"
+               "3:     nr      %2,%1\n"
+               "       ic      %2,0(%2,%5)\n"
+               "       alr     %0,%2\n"
+               "4:"
+               : "=&a" (res), "=&d" (cmp), "=&a" (count)
+               : "a" (size), "a" (addr), "a" (&_zb_findmap),
+                 "m" (*(addrtype *) addr) : "cc");
         return (res < size) ? res : size;
 }
 
@@ -633,35 +542,36 @@ find_first_bit(const unsigned long * addr, unsigned long size)
 
         if (!size)
                 return 0;
-        __asm__("   slr  %1,%1\n"
-                "   lr   %2,%3\n"
-                "   slr  %0,%0\n"
-                "   ahi  %2,31\n"
-                "   srl  %2,5\n"
-                "0: c    %1,0(%0,%4)\n"
-                "   jne  1f\n"
-                "   la   %0,4(%0)\n"
-                "   brct %2,0b\n"
-                "   lr   %0,%3\n"
-                "   j    4f\n"
-                "1: l    %2,0(%0,%4)\n"
-                "   sll  %0,3\n"
-                "   lhi  %1,0xff\n"
-                "   tml  %2,0xffff\n"
-                "   jnz  2f\n"
-                "   ahi  %0,16\n"
-                "   srl  %2,16\n"
-                "2: tml  %2,0x00ff\n"
-                "   jnz  3f\n"
-                "   ahi  %0,8\n"
-                "   srl  %2,8\n"
-                "3: nr   %2,%1\n"
-                "   ic   %2,0(%2,%5)\n"
-                "   alr  %0,%2\n"
-                "4:"
-                : "=&a" (res), "=&d" (cmp), "=&a" (count)
-                : "a" (size), "a" (addr), "a" (&_sb_findmap),
-                 "m" (*(addrtype *) addr) : "cc" );
+       asm volatile(
+               "       slr     %1,%1\n"
+               "       lr      %2,%3\n"
+               "       slr     %0,%0\n"
+               "       ahi     %2,31\n"
+               "       srl     %2,5\n"
+               "0:     c       %1,0(%0,%4)\n"
+               "       jne     1f\n"
+               "       la      %0,4(%0)\n"
+               "       brct    %2,0b\n"
+               "       lr      %0,%3\n"
+               "       j       4f\n"
+               "1:     l       %2,0(%0,%4)\n"
+               "       sll     %0,3\n"
+               "       lhi     %1,0xff\n"
+               "       tml     %2,0xffff\n"
+               "       jnz     2f\n"
+               "       ahi     %0,16\n"
+               "       srl     %2,16\n"
+               "2:     tml     %2,0x00ff\n"
+               "       jnz     3f\n"
+               "       ahi     %0,8\n"
+               "       srl     %2,8\n"
+               "3:     nr      %2,%1\n"
+               "       ic      %2,0(%2,%5)\n"
+               "       alr     %0,%2\n"
+               "4:"
+               : "=&a" (res), "=&d" (cmp), "=&a" (count)
+               : "a" (size), "a" (addr), "a" (&_sb_findmap),
+                 "m" (*(addrtype *) addr) : "cc");
         return (res < size) ? res : size;
 }
 
@@ -675,39 +585,40 @@ find_first_zero_bit(const unsigned long * addr, unsigned long size)
 
         if (!size)
                 return 0;
-        __asm__("   lghi  %1,-1\n"
-                "   lgr   %2,%3\n"
-                "   slgr  %0,%0\n"
-                "   aghi  %2,63\n"
-                "   srlg  %2,%2,6\n"
-                "0: cg    %1,0(%0,%4)\n"
-                "   jne   1f\n"
-                "   la    %0,8(%0)\n"
-                "   brct  %2,0b\n"
-                "   lgr   %0,%3\n"
-                "   j     5f\n"
-                "1: lg    %2,0(%0,%4)\n"
-                "   sllg  %0,%0,3\n"
-                "   clr   %2,%1\n"
-               "   jne   2f\n"
-               "   aghi  %0,32\n"
-                "   srlg  %2,%2,32\n"
-               "2: lghi  %1,0xff\n"
-                "   tmll  %2,0xffff\n"
-                "   jno   3f\n"
-                "   aghi  %0,16\n"
-                "   srl   %2,16\n"
-                "3: tmll  %2,0x00ff\n"
-                "   jno   4f\n"
-                "   aghi  %0,8\n"
-                "   srl   %2,8\n"
-                "4: ngr   %2,%1\n"
-                "   ic    %2,0(%2,%5)\n"
-                "   algr  %0,%2\n"
-                "5:"
-                : "=&a" (res), "=&d" (cmp), "=&a" (count)
+       asm volatile(
+               "       lghi    %1,-1\n"
+               "       lgr     %2,%3\n"
+               "       slgr    %0,%0\n"
+               "       aghi    %2,63\n"
+               "       srlg    %2,%2,6\n"
+               "0:     cg      %1,0(%0,%4)\n"
+               "       jne     1f\n"
+               "       la      %0,8(%0)\n"
+               "       brct    %2,0b\n"
+               "       lgr     %0,%3\n"
+               "       j       5f\n"
+               "1:     lg      %2,0(%0,%4)\n"
+               "       sllg    %0,%0,3\n"
+               "       clr     %2,%1\n"
+               "       jne     2f\n"
+               "       aghi    %0,32\n"
+               "       srlg    %2,%2,32\n"
+               "2:     lghi    %1,0xff\n"
+               "       tmll    %2,0xffff\n"
+               "       jno     3f\n"
+               "       aghi    %0,16\n"
+               "       srl     %2,16\n"
+               "3:     tmll    %2,0x00ff\n"
+               "       jno     4f\n"
+               "       aghi    %0,8\n"
+               "       srl     %2,8\n"
+               "4:     ngr     %2,%1\n"
+               "       ic      %2,0(%2,%5)\n"
+               "       algr    %0,%2\n"
+               "5:"
+               : "=&a" (res), "=&d" (cmp), "=&a" (count)
                : "a" (size), "a" (addr), "a" (&_zb_findmap),
-                 "m" (*(addrtype *) addr) : "cc" );
+                 "m" (*(addrtype *) addr) : "cc");
         return (res < size) ? res : size;
 }
 
@@ -719,39 +630,40 @@ find_first_bit(const unsigned long * addr, unsigned long size)
 
         if (!size)
                 return 0;
-        __asm__("   slgr  %1,%1\n"
-                "   lgr   %2,%3\n"
-                "   slgr  %0,%0\n"
-                "   aghi  %2,63\n"
-                "   srlg  %2,%2,6\n"
-                "0: cg    %1,0(%0,%4)\n"
-                "   jne   1f\n"
-                "   aghi  %0,8\n"
-                "   brct  %2,0b\n"
-                "   lgr   %0,%3\n"
-                "   j     5f\n"
-                "1: lg    %2,0(%0,%4)\n"
-                "   sllg  %0,%0,3\n"
-                "   clr   %2,%1\n"
-               "   jne   2f\n"
-               "   aghi  %0,32\n"
-                "   srlg  %2,%2,32\n"
-               "2: lghi  %1,0xff\n"
-                "   tmll  %2,0xffff\n"
-                "   jnz   3f\n"
-                "   aghi  %0,16\n"
-                "   srl   %2,16\n"
-                "3: tmll  %2,0x00ff\n"
-                "   jnz   4f\n"
-                "   aghi  %0,8\n"
-                "   srl   %2,8\n"
-                "4: ngr   %2,%1\n"
-                "   ic    %2,0(%2,%5)\n"
-                "   algr  %0,%2\n"
-                "5:"
-                : "=&a" (res), "=&d" (cmp), "=&a" (count)
+       asm volatile(
+               "       slgr    %1,%1\n"
+               "       lgr     %2,%3\n"
+               "       slgr    %0,%0\n"
+               "       aghi    %2,63\n"
+               "       srlg    %2,%2,6\n"
+               "0:     cg      %1,0(%0,%4)\n"
+               "       jne     1f\n"
+               "       aghi    %0,8\n"
+               "       brct    %2,0b\n"
+               "       lgr     %0,%3\n"
+               "       j       5f\n"
+               "1:     lg      %2,0(%0,%4)\n"
+               "       sllg    %0,%0,3\n"
+               "       clr     %2,%1\n"
+               "       jne     2f\n"
+               "       aghi    %0,32\n"
+               "       srlg    %2,%2,32\n"
+               "2:     lghi    %1,0xff\n"
+               "       tmll    %2,0xffff\n"
+               "       jnz     3f\n"
+               "       aghi    %0,16\n"
+               "       srl     %2,16\n"
+               "3:     tmll    %2,0x00ff\n"
+               "       jnz     4f\n"
+               "       aghi    %0,8\n"
+               "       srl     %2,8\n"
+               "4:     ngr     %2,%1\n"
+               "       ic      %2,0(%2,%5)\n"
+               "       algr    %0,%2\n"
+               "5:"
+               : "=&a" (res), "=&d" (cmp), "=&a" (count)
                : "a" (size), "a" (addr), "a" (&_sb_findmap),
-                 "m" (*(addrtype *) addr) : "cc" );
+                 "m" (*(addrtype *) addr) : "cc");
         return (res < size) ? res : size;
 }
 
@@ -828,37 +740,12 @@ static inline int sched_find_first_bit(unsigned long *b)
        return find_first_bit(b, 140);
 }
 
-/*
- * ffs: find first bit set. This is defined the same way as
- * the libc and compiler builtin ffs routines, therefore
- * differs in spirit from the above ffz (man ffs).
- */
-#define ffs(x) generic_ffs(x)
-
-/*
- * fls: find last bit set.
- */
-#define fls(x) generic_fls(x)
-#define fls64(x)   generic_fls64(x)
-
-/*
- * hweightN: returns the hamming weight (i.e. the number
- * of bits set) of a N-bit word
- */
-#define hweight64(x)                                           \
-({                                                             \
-       unsigned long __x = (x);                                \
-       unsigned int __w;                                       \
-       __w = generic_hweight32((unsigned int) __x);            \
-       __w += generic_hweight32((unsigned int) (__x>>32));     \
-       __w;                                                    \
-})
-#define hweight32(x) generic_hweight32(x)
-#define hweight16(x) generic_hweight16(x)
-#define hweight8(x) generic_hweight8(x)
+#include <asm-generic/bitops/ffs.h>
 
+#include <asm-generic/bitops/fls.h>
+#include <asm-generic/bitops/fls64.h>
 
-#ifdef __KERNEL__
+#include <asm-generic/bitops/hweight.h>
 
 /*
  * ATTENTION: intel byte ordering convention for ext2 and minix !!
@@ -871,11 +758,11 @@ static inline int sched_find_first_bit(unsigned long *b)
  */
 
 #define ext2_set_bit(nr, addr)       \
-       test_and_set_bit((nr)^(__BITOPS_WORDSIZE - 8), (unsigned long *)addr)
+       __test_and_set_bit((nr)^(__BITOPS_WORDSIZE - 8), (unsigned long *)addr)
 #define ext2_set_bit_atomic(lock, nr, addr)       \
        test_and_set_bit((nr)^(__BITOPS_WORDSIZE - 8), (unsigned long *)addr)
 #define ext2_clear_bit(nr, addr)     \
-       test_and_clear_bit((nr)^(__BITOPS_WORDSIZE - 8), (unsigned long *)addr)
+       __test_and_clear_bit((nr)^(__BITOPS_WORDSIZE - 8), (unsigned long *)addr)
 #define ext2_clear_bit_atomic(lock, nr, addr)     \
        test_and_clear_bit((nr)^(__BITOPS_WORDSIZE - 8), (unsigned long *)addr)
 #define ext2_test_bit(nr, addr)      \
@@ -892,36 +779,37 @@ ext2_find_first_zero_bit(void *vaddr, unsigned int size)
 
         if (!size)
                 return 0;
-        __asm__("   lhi  %1,-1\n"
-                "   lr   %2,%3\n"
-                "   ahi  %2,31\n"
-                "   srl  %2,5\n"
-                "   slr  %0,%0\n"
-                "0: cl   %1,0(%0,%4)\n"
-                "   jne  1f\n"
-                "   ahi  %0,4\n"
-                "   brct %2,0b\n"
-                "   lr   %0,%3\n"
-                "   j    4f\n"
-                "1: l    %2,0(%0,%4)\n"
-                "   sll  %0,3\n"
-                "   ahi  %0,24\n"
-                "   lhi  %1,0xff\n"
-                "   tmh  %2,0xffff\n"
-                "   jo   2f\n"
-                "   ahi  %0,-16\n"
-                "   srl  %2,16\n"
-                "2: tml  %2,0xff00\n"
-                "   jo   3f\n"
-                "   ahi  %0,-8\n"
-                "   srl  %2,8\n"
-                "3: nr   %2,%1\n"
-                "   ic   %2,0(%2,%5)\n"
-                "   alr  %0,%2\n"
-                "4:"
-                : "=&a" (res), "=&d" (cmp), "=&a" (count)
-                : "a" (size), "a" (vaddr), "a" (&_zb_findmap),
-                 "m" (*(addrtype *) vaddr) : "cc" );
+       asm volatile(
+               "       lhi     %1,-1\n"
+               "       lr      %2,%3\n"
+               "       ahi     %2,31\n"
+               "       srl     %2,5\n"
+               "       slr     %0,%0\n"
+               "0:     cl      %1,0(%0,%4)\n"
+               "       jne     1f\n"
+               "       ahi     %0,4\n"
+               "       brct    %2,0b\n"
+               "       lr      %0,%3\n"
+               "       j       4f\n"
+               "1:     l       %2,0(%0,%4)\n"
+               "       sll     %0,3\n"
+               "       ahi     %0,24\n"
+               "       lhi     %1,0xff\n"
+               "       tmh     %2,0xffff\n"
+               "       jo      2f\n"
+               "       ahi     %0,-16\n"
+               "       srl     %2,16\n"
+               "2:     tml     %2,0xff00\n"
+               "       jo      3f\n"
+               "       ahi     %0,-8\n"
+               "       srl     %2,8\n"
+               "3:     nr      %2,%1\n"
+               "       ic      %2,0(%2,%5)\n"
+               "       alr     %0,%2\n"
+               "4:"
+               : "=&a" (res), "=&d" (cmp), "=&a" (count)
+               : "a" (size), "a" (vaddr), "a" (&_zb_findmap),
+                 "m" (*(addrtype *) vaddr) : "cc");
         return (res < size) ? res : size;
 }
 
@@ -935,39 +823,40 @@ ext2_find_first_zero_bit(void *vaddr, unsigned long size)
 
         if (!size)
                 return 0;
-        __asm__("   lghi  %1,-1\n"
-                "   lgr   %2,%3\n"
-                "   aghi  %2,63\n"
-                "   srlg  %2,%2,6\n"
-                "   slgr  %0,%0\n"
-                "0: clg   %1,0(%0,%4)\n"
-                "   jne   1f\n"
-                "   aghi  %0,8\n"
-                "   brct  %2,0b\n"
-                "   lgr   %0,%3\n"
-                "   j     5f\n"
-                "1: cl    %1,0(%0,%4)\n"
-               "   jne   2f\n"
-               "   aghi  %0,4\n"
-               "2: l     %2,0(%0,%4)\n"
-                "   sllg  %0,%0,3\n"
-                "   aghi  %0,24\n"
-                "   lghi  %1,0xff\n"
-                "   tmlh  %2,0xffff\n"
-                "   jo    3f\n"
-                "   aghi  %0,-16\n"
-                "   srl   %2,16\n"
-                "3: tmll  %2,0xff00\n"
-                "   jo    4f\n"
-                "   aghi  %0,-8\n"
-                "   srl   %2,8\n"
-                "4: ngr   %2,%1\n"
-                "   ic    %2,0(%2,%5)\n"
-                "   algr  %0,%2\n"
-                "5:"
-                : "=&a" (res), "=&d" (cmp), "=&a" (count)
+       asm volatile(
+               "       lghi    %1,-1\n"
+               "       lgr     %2,%3\n"
+               "       aghi    %2,63\n"
+               "       srlg    %2,%2,6\n"
+               "       slgr    %0,%0\n"
+               "0:     clg     %1,0(%0,%4)\n"
+               "       jne     1f\n"
+               "       aghi    %0,8\n"
+               "       brct    %2,0b\n"
+               "       lgr     %0,%3\n"
+               "       j       5f\n"
+               "1:     cl      %1,0(%0,%4)\n"
+               "       jne     2f\n"
+               "       aghi    %0,4\n"
+               "2:     l       %2,0(%0,%4)\n"
+               "       sllg    %0,%0,3\n"
+               "       aghi    %0,24\n"
+               "       lghi    %1,0xff\n"
+               "       tmlh    %2,0xffff\n"
+               "       jo      3f\n"
+               "       aghi    %0,-16\n"
+               "       srl     %2,16\n"
+               "3:     tmll    %2,0xff00\n"
+               "       jo      4f\n"
+               "       aghi    %0,-8\n"
+               "       srl     %2,8\n"
+               "4:     ngr     %2,%1\n"
+               "       ic      %2,0(%2,%5)\n"
+               "       algr    %0,%2\n"
+               "5:"
+               : "=&a" (res), "=&d" (cmp), "=&a" (count)
                : "a" (size), "a" (vaddr), "a" (&_zb_findmap),
-                 "m" (*(addrtype *) vaddr) : "cc" );
+                 "m" (*(addrtype *) vaddr) : "cc");
         return (res < size) ? res : size;
 }
 
@@ -987,13 +876,16 @@ ext2_find_next_zero_bit(void *vaddr, unsigned long size, unsigned long offset)
        p = addr + offset / __BITOPS_WORDSIZE;
         if (bit) {
 #ifndef __s390x__
-                asm("   ic   %0,0(%1)\n"
-                   "   icm  %0,2,1(%1)\n"
-                   "   icm  %0,4,2(%1)\n"
-                   "   icm  %0,8,3(%1)"
-                   : "=&a" (word) : "a" (p), "m" (*p) : "cc" );
+               asm volatile(
+                       "       ic      %0,0(%1)\n"
+                       "       icm     %0,2,1(%1)\n"
+                       "       icm     %0,4,2(%1)\n"
+                       "       icm     %0,8,3(%1)"
+                       : "=&a" (word) : "a" (p), "m" (*p) : "cc");
 #else
-                asm("   lrvg %0,%1" : "=a" (word) : "m" (*p) );
+               asm volatile(
+                       "       lrvg    %0,%1"
+                       : "=a" (word) : "m" (*p) );
 #endif
                /*
                 * s390 version of ffz returns __BITOPS_WORDSIZE
@@ -1011,18 +903,7 @@ ext2_find_next_zero_bit(void *vaddr, unsigned long size, unsigned long offset)
        return offset + ext2_find_first_zero_bit(p, size);
 }
 
-/* Bitmap functions for the minix filesystem.  */
-/* FIXME !!! */
-#define minix_test_and_set_bit(nr,addr) \
-       test_and_set_bit(nr,(unsigned long *)addr)
-#define minix_set_bit(nr,addr) \
-       set_bit(nr,(unsigned long *)addr)
-#define minix_test_and_clear_bit(nr,addr) \
-       test_and_clear_bit(nr,(unsigned long *)addr)
-#define minix_test_bit(nr,addr) \
-       test_bit(nr,(unsigned long *)addr)
-#define minix_find_first_zero_bit(addr,size) \
-       find_first_zero_bit(addr,size)
+#include <asm-generic/bitops/minix.h>
 
 #endif /* __KERNEL__ */