Fedora kernel-2.6.17-1.2142_FC4 patched with stable patch-2.6.17.4-vs2.0.2-rc26.diff
[linux-2.6.git] / include / asm-sparc64 / uaccess.h
index 5690142..afe236b 100644 (file)
@@ -59,12 +59,6 @@ static inline int access_ok(int type, const void __user * addr, unsigned long si
        return 1;
 }
 
-/* this function will go away soon - use access_ok() instead */
-static inline int __deprecated verify_area(int type, const void __user * addr, unsigned long size)
-{
-       return 0;
-}
-
 /*
  * The exception table consists of pairs of addresses: the first is the
  * address of an instruction that is allowed to fault, and the second is
@@ -76,26 +70,14 @@ static inline int __deprecated verify_area(int type, const void __user * addr, u
  * with the main instruction path.  This means when everything is well,
  * we don't even have to jump over them.  Further, they do not intrude
  * on our cache or tlb entries.
- *
- * There is a special way how to put a range of potentially faulting
- * insns (like twenty ldd/std's with now intervening other instructions)
- * You specify address of first in insn and 0 in fixup and in the next
- * exception_table_entry you specify last potentially faulting insn + 1
- * and in fixup the routine which should handle the fault.
- * That fixup code will get
- * (faulting_insn_address - first_insn_in_the_range_address)/4
- * in %g2 (ie. index of the faulting instruction in the range).
  */
 
-struct exception_table_entry
-{
-        unsigned insn, fixup;
+struct exception_table_entry {
+        unsigned int insn, fixup;
 };
 
-/* Special exable search, which handles ranges.  Returns fixup */
-unsigned long search_extables_range(unsigned long addr, unsigned long *g2);
-
 extern void __ret_efault(void);
+extern void __retl_efault(void);
 
 /* Uh, these should become the main single-value transfer routines..
  * They automatically use the right size if we just have the right
@@ -132,16 +114,6 @@ case 8: __put_user_asm(data,x,addr,__pu_ret); break; \
 default: __pu_ret = __put_user_bad(); break; \
 } __pu_ret; })
 
-#define __put_user_nocheck_ret(data,addr,size,retval) ({ \
-register int __foo __asm__ ("l1"); \
-switch (size) { \
-case 1: __put_user_asm_ret(data,b,addr,retval,__foo); break; \
-case 2: __put_user_asm_ret(data,h,addr,retval,__foo); break; \
-case 4: __put_user_asm_ret(data,w,addr,retval,__foo); break; \
-case 8: __put_user_asm_ret(data,x,addr,retval,__foo); break; \
-default: if (__put_user_bad()) return retval; break; \
-} })
-
 #define __put_user_asm(x,size,addr,ret)                                        \
 __asm__ __volatile__(                                                  \
        "/* Put user asm, inline. */\n"                                 \
@@ -154,40 +126,13 @@ __asm__ __volatile__(                                                     \
        "b      2b\n\t"                                                 \
        " mov   %3, %0\n\n\t"                                           \
        ".previous\n\t"                                                 \
-       ".section __ex_table,#alloc\n\t"                                \
+       ".section __ex_table,\"a\"\n\t"                                 \
        ".align 4\n\t"                                                  \
        ".word  1b, 3b\n\t"                                             \
        ".previous\n\n\t"                                               \
        : "=r" (ret) : "r" (x), "r" (__m(addr)),                                \
         "i" (-EFAULT))
 
-#define __put_user_asm_ret(x,size,addr,ret,foo)                                \
-if (__builtin_constant_p(ret) && ret == -EFAULT)                       \
-__asm__ __volatile__(                                                  \
-       "/* Put user asm ret, inline. */\n"                             \
-"1:\t" "st"#size "a %1, [%2] %%asi\n\n\t"                              \
-       ".section __ex_table,#alloc\n\t"                                \
-       ".align 4\n\t"                                                  \
-       ".word  1b, __ret_efault\n\n\t"                                 \
-       ".previous\n\n\t"                                               \
-       : "=r" (foo) : "r" (x), "r" (__m(addr)));                       \
-else                                                                   \
-__asm__ __volatile__(                                                  \
-       "/* Put user asm ret, inline. */\n"                             \
-"1:\t" "st"#size "a %1, [%2] %%asi\n\n\t"                              \
-       ".section .fixup,#alloc,#execinstr\n\t"                         \
-       ".align 4\n"                                                    \
-"3:\n\t"                                                               \
-       "ret\n\t"                                                       \
-       " restore %%g0, %3, %%o0\n\n\t"                                 \
-       ".previous\n\t"                                                 \
-       ".section __ex_table,#alloc\n\t"                                \
-       ".align 4\n\t"                                                  \
-       ".word  1b, 3b\n\n\t"                                           \
-       ".previous\n\n\t"                                               \
-       : "=r" (foo) : "r" (x), "r" (__m(addr)),                                \
-         "i" (ret))
-
 extern int __put_user_bad(void);
 
 #define __get_user_nocheck(data,addr,size,type) ({ \
@@ -224,7 +169,7 @@ __asm__ __volatile__(                                                       \
        "b      2b\n\t"                                                 \
        " mov   %3, %0\n\n\t"                                           \
        ".previous\n\t"                                                 \
-       ".section __ex_table,#alloc\n\t"                                \
+       ".section __ex_table,\"a\"\n\t"                                 \
        ".align 4\n\t"                                                  \
        ".word  1b, 3b\n\n\t"                                           \
        ".previous\n\t"                                                 \
@@ -236,7 +181,7 @@ if (__builtin_constant_p(retval) && retval == -EFAULT)                      \
 __asm__ __volatile__(                                                  \
        "/* Get user asm ret, inline. */\n"                             \
 "1:\t" "ld"#size "a [%1] %%asi, %0\n\n\t"                              \
-       ".section __ex_table,#alloc\n\t"                                \
+       ".section __ex_table,\"a\"\n\t"                                 \
        ".align 4\n\t"                                                  \
        ".word  1b,__ret_efault\n\n\t"                                  \
        ".previous\n\t"                                                 \
@@ -251,7 +196,7 @@ __asm__ __volatile__(                                                       \
        "ret\n\t"                                                       \
        " restore %%g0, %2, %%o0\n\n\t"                                 \
        ".previous\n\t"                                                 \
-       ".section __ex_table,#alloc\n\t"                                \
+       ".section __ex_table,\"a\"\n\t"                                 \
        ".align 4\n\t"                                                  \
        ".word  1b, 3b\n\n\t"                                           \
        ".previous\n\t"                                                 \
@@ -269,7 +214,7 @@ copy_from_user(void *to, const void __user *from, unsigned long size)
 {
        unsigned long ret = ___copy_from_user(to, from, size);
 
-       if (ret)
+       if (unlikely(ret))
                ret = copy_from_user_fixup(to, from, size);
        return ret;
 }
@@ -285,7 +230,7 @@ copy_to_user(void __user *to, const void *from, unsigned long size)
 {
        unsigned long ret = ___copy_to_user(to, from, size);
 
-       if (ret)
+       if (unlikely(ret))
                ret = copy_to_user_fixup(to, from, size);
        return ret;
 }
@@ -301,20 +246,13 @@ copy_in_user(void __user *to, void __user *from, unsigned long size)
 {
        unsigned long ret = ___copy_in_user(to, from, size);
 
-       if (ret)
+       if (unlikely(ret))
                ret = copy_in_user_fixup(to, from, size);
        return ret;
 }
 #define __copy_in_user copy_in_user
 
-extern unsigned long __must_check __bzero_noasi(void __user *, unsigned long);
-
-static inline unsigned long __must_check
-__clear_user(void __user *addr, unsigned long size)
-{
-       
-       return __bzero_noasi(addr, size);
-}
+extern unsigned long __must_check __clear_user(void __user *, unsigned long);
 
 #define clear_user __clear_user