Merge to Fedora kernel-2.6.7-1.492
[linux-2.6.git] / arch / i386 / lib / usercopy.c
1 /* 
2  * User address space access functions.
3  * The non inlined parts of asm-i386/uaccess.h are here.
4  *
5  * Copyright 1997 Andi Kleen <ak@muc.de>
6  * Copyright 1997 Linus Torvalds
7  */
8 #include <linux/config.h>
9 #include <linux/mm.h>
10 #include <linux/highmem.h>
11 #include <linux/blkdev.h>
12 #include <asm/uaccess.h>
13 #include <asm/mmx.h>
14
15 static inline int __movsl_is_ok(unsigned long a1, unsigned long a2, unsigned long n)
16 {
17 #ifdef CONFIG_X86_INTEL_USERCOPY
18         if (n >= 64 && ((a1 ^ a2) & movsl_mask.mask))
19                 return 0;
20 #endif
21         return 1;
22 }
23 #define movsl_is_ok(a1,a2,n) \
24         __movsl_is_ok((unsigned long)(a1),(unsigned long)(a2),(n))
25
26 /*
27  * Copy a null terminated string from userspace.
28  */
29
30 #define __do_strncpy_from_user(dst,src,count,res)                          \
31 do {                                                                       \
32         int __d0, __d1, __d2;                                              \
33         might_sleep();                                                     \
34         __asm__ __volatile__(                                              \
35                 "       testl %1,%1\n"                                     \
36                 "       jz 2f\n"                                           \
37                 "0:     lodsb\n"                                           \
38                 "       stosb\n"                                           \
39                 "       testb %%al,%%al\n"                                 \
40                 "       jz 1f\n"                                           \
41                 "       decl %1\n"                                         \
42                 "       jnz 0b\n"                                          \
43                 "1:     subl %1,%0\n"                                      \
44                 "2:\n"                                                     \
45                 ".section .fixup,\"ax\"\n"                                 \
46                 "3:     movl %5,%0\n"                                      \
47                 "       jmp 2b\n"                                          \
48                 ".previous\n"                                              \
49                 ".section __ex_table,\"a\"\n"                              \
50                 "       .align 4\n"                                        \
51                 "       .long 0b,3b\n"                                     \
52                 ".previous"                                                \
53                 : "=d"(res), "=c"(count), "=&a" (__d0), "=&S" (__d1),      \
54                   "=&D" (__d2)                                             \
55                 : "i"(-EFAULT), "0"(count), "1"(count), "3"(src), "4"(dst) \
56                 : "memory");                                               \
57 } while (0)
58
59 /**
60  * __strncpy_from_user: - Copy a NUL terminated string from userspace, with less checking.
61  * @dst:   Destination address, in kernel space.  This buffer must be at
62  *         least @count bytes long.
63  * @src:   Source address, in user space.
64  * @count: Maximum number of bytes to copy, including the trailing NUL.
65  * 
66  * Copies a NUL-terminated string from userspace to kernel space.
67  * Caller must check the specified block with access_ok() before calling
68  * this function.
69  *
70  * On success, returns the length of the string (not including the trailing
71  * NUL).
72  *
73  * If access to userspace fails, returns -EFAULT (some data may have been
74  * copied).
75  *
76  * If @count is smaller than the length of the string, copies @count bytes
77  * and returns @count.
78  */
79 long
80 __direct_strncpy_from_user(char *dst, const char __user *src, long count)
81 {
82         long res;
83         __do_strncpy_from_user(dst, src, count, res);
84         return res;
85 }
86
87 /**
88  * strncpy_from_user: - Copy a NUL terminated string from userspace.
89  * @dst:   Destination address, in kernel space.  This buffer must be at
90  *         least @count bytes long.
91  * @src:   Source address, in user space.
92  * @count: Maximum number of bytes to copy, including the trailing NUL.
93  * 
94  * Copies a NUL-terminated string from userspace to kernel space.
95  *
96  * On success, returns the length of the string (not including the trailing
97  * NUL).
98  *
99  * If access to userspace fails, returns -EFAULT (some data may have been
100  * copied).
101  *
102  * If @count is smaller than the length of the string, copies @count bytes
103  * and returns @count.
104  */
105 long
106 direct_strncpy_from_user(char *dst, const char __user *src, long count)
107 {
108         long res = -EFAULT;
109         if (access_ok(VERIFY_READ, src, 1))
110                 __do_strncpy_from_user(dst, src, count, res);
111         return res;
112 }
113
114
115 /*
116  * Zero Userspace
117  */
118
119 #define __do_clear_user(addr,size)                                      \
120 do {                                                                    \
121         int __d0;                                                       \
122         might_sleep();                                                  \
123         __asm__ __volatile__(                                           \
124                 "0:     rep; stosl\n"                                   \
125                 "       movl %2,%0\n"                                   \
126                 "1:     rep; stosb\n"                                   \
127                 "2:\n"                                                  \
128                 ".section .fixup,\"ax\"\n"                              \
129                 "3:     lea 0(%2,%0,4),%0\n"                            \
130                 "       jmp 2b\n"                                       \
131                 ".previous\n"                                           \
132                 ".section __ex_table,\"a\"\n"                           \
133                 "       .align 4\n"                                     \
134                 "       .long 0b,3b\n"                                  \
135                 "       .long 1b,2b\n"                                  \
136                 ".previous"                                             \
137                 : "=&c"(size), "=&D" (__d0)                             \
138                 : "r"(size & 3), "0"(size / 4), "1"(addr), "a"(0));     \
139 } while (0)
140
141 /**
142  * clear_user: - Zero a block of memory in user space.
143  * @to:   Destination address, in user space.
144  * @n:    Number of bytes to zero.
145  *
146  * Zero a block of memory in user space.
147  *
148  * Returns number of bytes that could not be cleared.
149  * On success, this will be zero.
150  */
151 unsigned long
152 direct_clear_user(void __user *to, unsigned long n)
153 {
154         might_sleep();
155         if (access_ok(VERIFY_WRITE, to, n))
156                 __do_clear_user(to, n);
157         return n;
158 }
159
160 /**
161  * __clear_user: - Zero a block of memory in user space, with less checking.
162  * @to:   Destination address, in user space.
163  * @n:    Number of bytes to zero.
164  *
165  * Zero a block of memory in user space.  Caller must check
166  * the specified block with access_ok() before calling this function.
167  *
168  * Returns number of bytes that could not be cleared.
169  * On success, this will be zero.
170  */
171 unsigned long
172 __direct_clear_user(void __user *to, unsigned long n)
173 {
174         __do_clear_user(to, n);
175         return n;
176 }
177
178 /**
179  * strlen_user: - Get the size of a string in user space.
180  * @s: The string to measure.
181  * @n: The maximum valid length
182  *
183  * Get the size of a NUL-terminated string in user space.
184  *
185  * Returns the size of the string INCLUDING the terminating NUL.
186  * On exception, returns 0.
187  * If the string is too long, returns a value greater than @n.
188  */
189 long direct_strnlen_user(const char __user *s, long n)
190 {
191         unsigned long mask = -__addr_ok(s);
192         unsigned long res, tmp;
193
194         might_sleep();
195
196         __asm__ __volatile__(
197                 "       testl %0, %0\n"
198                 "       jz 3f\n"
199                 "       andl %0,%%ecx\n"
200                 "0:     repne; scasb\n"
201                 "       setne %%al\n"
202                 "       subl %%ecx,%0\n"
203                 "       addl %0,%%eax\n"
204                 "1:\n"
205                 ".section .fixup,\"ax\"\n"
206                 "2:     xorl %%eax,%%eax\n"
207                 "       jmp 1b\n"
208                 "3:     movb $1,%%al\n"
209                 "       jmp 1b\n"
210                 ".previous\n"
211                 ".section __ex_table,\"a\"\n"
212                 "       .align 4\n"
213                 "       .long 0b,2b\n"
214                 ".previous"
215                 :"=r" (n), "=D" (s), "=a" (res), "=c" (tmp)
216                 :"0" (n), "1" (s), "2" (0), "3" (mask)
217                 :"cc");
218         return res & mask;
219 }
220
221 #ifdef CONFIG_X86_INTEL_USERCOPY
222 static unsigned long
223 __copy_user_intel(void __user *to, const void *from, unsigned long size)
224 {
225         int d0, d1;
226         __asm__ __volatile__(
227                        "       .align 2,0x90\n"
228                        "1:     movl 32(%4), %%eax\n"
229                        "       cmpl $67, %0\n"
230                        "       jbe 3f\n"
231                        "2:     movl 64(%4), %%eax\n"
232                        "       .align 2,0x90\n"
233                        "3:     movl 0(%4), %%eax\n"
234                        "4:     movl 4(%4), %%edx\n"
235                        "5:     movl %%eax, 0(%3)\n"
236                        "6:     movl %%edx, 4(%3)\n"
237                        "7:     movl 8(%4), %%eax\n"
238                        "8:     movl 12(%4),%%edx\n"
239                        "9:     movl %%eax, 8(%3)\n"
240                        "10:    movl %%edx, 12(%3)\n"
241                        "11:    movl 16(%4), %%eax\n"
242                        "12:    movl 20(%4), %%edx\n"
243                        "13:    movl %%eax, 16(%3)\n"
244                        "14:    movl %%edx, 20(%3)\n"
245                        "15:    movl 24(%4), %%eax\n"
246                        "16:    movl 28(%4), %%edx\n"
247                        "17:    movl %%eax, 24(%3)\n"
248                        "18:    movl %%edx, 28(%3)\n"
249                        "19:    movl 32(%4), %%eax\n"
250                        "20:    movl 36(%4), %%edx\n"
251                        "21:    movl %%eax, 32(%3)\n"
252                        "22:    movl %%edx, 36(%3)\n"
253                        "23:    movl 40(%4), %%eax\n"
254                        "24:    movl 44(%4), %%edx\n"
255                        "25:    movl %%eax, 40(%3)\n"
256                        "26:    movl %%edx, 44(%3)\n"
257                        "27:    movl 48(%4), %%eax\n"
258                        "28:    movl 52(%4), %%edx\n"
259                        "29:    movl %%eax, 48(%3)\n"
260                        "30:    movl %%edx, 52(%3)\n"
261                        "31:    movl 56(%4), %%eax\n"
262                        "32:    movl 60(%4), %%edx\n"
263                        "33:    movl %%eax, 56(%3)\n"
264                        "34:    movl %%edx, 60(%3)\n"
265                        "       addl $-64, %0\n"
266                        "       addl $64, %4\n"
267                        "       addl $64, %3\n"
268                        "       cmpl $63, %0\n"
269                        "       ja  1b\n"
270                        "35:    movl  %0, %%eax\n"
271                        "       shrl  $2, %0\n"
272                        "       andl  $3, %%eax\n"
273                        "       cld\n"
274                        "99:    rep; movsl\n"
275                        "36:    movl %%eax, %0\n"
276                        "37:    rep; movsb\n"
277                        "100:\n"
278                        ".section .fixup,\"ax\"\n"
279                        "101:   lea 0(%%eax,%0,4),%0\n"
280                        "       jmp 100b\n"
281                        ".previous\n"
282                        ".section __ex_table,\"a\"\n"
283                        "       .align 4\n"
284                        "       .long 1b,100b\n"
285                        "       .long 2b,100b\n"
286                        "       .long 3b,100b\n"
287                        "       .long 4b,100b\n"
288                        "       .long 5b,100b\n"
289                        "       .long 6b,100b\n"
290                        "       .long 7b,100b\n"
291                        "       .long 8b,100b\n"
292                        "       .long 9b,100b\n"
293                        "       .long 10b,100b\n"
294                        "       .long 11b,100b\n"
295                        "       .long 12b,100b\n"
296                        "       .long 13b,100b\n"
297                        "       .long 14b,100b\n"
298                        "       .long 15b,100b\n"
299                        "       .long 16b,100b\n"
300                        "       .long 17b,100b\n"
301                        "       .long 18b,100b\n"
302                        "       .long 19b,100b\n"
303                        "       .long 20b,100b\n"
304                        "       .long 21b,100b\n"
305                        "       .long 22b,100b\n"
306                        "       .long 23b,100b\n"
307                        "       .long 24b,100b\n"
308                        "       .long 25b,100b\n"
309                        "       .long 26b,100b\n"
310                        "       .long 27b,100b\n"
311                        "       .long 28b,100b\n"
312                        "       .long 29b,100b\n"
313                        "       .long 30b,100b\n"
314                        "       .long 31b,100b\n"
315                        "       .long 32b,100b\n"
316                        "       .long 33b,100b\n"
317                        "       .long 34b,100b\n"
318                        "       .long 35b,100b\n"
319                        "       .long 36b,100b\n"
320                        "       .long 37b,100b\n"
321                        "       .long 99b,101b\n"
322                        ".previous"
323                        : "=&c"(size), "=&D" (d0), "=&S" (d1)
324                        :  "1"(to), "2"(from), "0"(size)
325                        : "eax", "edx", "memory");
326         return size;
327 }
328
329 static unsigned long
330 __copy_user_zeroing_intel(void *to, const void __user *from, unsigned long size)
331 {
332         int d0, d1;
333         __asm__ __volatile__(
334                        "        .align 2,0x90\n"
335                        "0:      movl 32(%4), %%eax\n"
336                        "        cmpl $67, %0\n"      
337                        "        jbe 2f\n"            
338                        "1:      movl 64(%4), %%eax\n"
339                        "        .align 2,0x90\n"     
340                        "2:      movl 0(%4), %%eax\n" 
341                        "21:     movl 4(%4), %%edx\n" 
342                        "        movl %%eax, 0(%3)\n" 
343                        "        movl %%edx, 4(%3)\n" 
344                        "3:      movl 8(%4), %%eax\n" 
345                        "31:     movl 12(%4),%%edx\n" 
346                        "        movl %%eax, 8(%3)\n" 
347                        "        movl %%edx, 12(%3)\n"
348                        "4:      movl 16(%4), %%eax\n"
349                        "41:     movl 20(%4), %%edx\n"
350                        "        movl %%eax, 16(%3)\n"
351                        "        movl %%edx, 20(%3)\n"
352                        "10:     movl 24(%4), %%eax\n"
353                        "51:     movl 28(%4), %%edx\n"
354                        "        movl %%eax, 24(%3)\n"
355                        "        movl %%edx, 28(%3)\n"
356                        "11:     movl 32(%4), %%eax\n"
357                        "61:     movl 36(%4), %%edx\n"
358                        "        movl %%eax, 32(%3)\n"
359                        "        movl %%edx, 36(%3)\n"
360                        "12:     movl 40(%4), %%eax\n"
361                        "71:     movl 44(%4), %%edx\n"
362                        "        movl %%eax, 40(%3)\n"
363                        "        movl %%edx, 44(%3)\n"
364                        "13:     movl 48(%4), %%eax\n"
365                        "81:     movl 52(%4), %%edx\n"
366                        "        movl %%eax, 48(%3)\n"
367                        "        movl %%edx, 52(%3)\n"
368                        "14:     movl 56(%4), %%eax\n"
369                        "91:     movl 60(%4), %%edx\n"
370                        "        movl %%eax, 56(%3)\n"
371                        "        movl %%edx, 60(%3)\n"
372                        "        addl $-64, %0\n"     
373                        "        addl $64, %4\n"      
374                        "        addl $64, %3\n"      
375                        "        cmpl $63, %0\n"      
376                        "        ja  0b\n"            
377                        "5:      movl  %0, %%eax\n"   
378                        "        shrl  $2, %0\n"      
379                        "        andl $3, %%eax\n"    
380                        "        cld\n"               
381                        "6:      rep; movsl\n"   
382                        "        movl %%eax,%0\n"
383                        "7:      rep; movsb\n"   
384                        "8:\n"                   
385                        ".section .fixup,\"ax\"\n"
386                        "9:      lea 0(%%eax,%0,4),%0\n" 
387                        "16:     pushl %0\n"     
388                        "        pushl %%eax\n"  
389                        "        xorl %%eax,%%eax\n"
390                        "        rep; stosb\n"   
391                        "        popl %%eax\n"   
392                        "        popl %0\n"      
393                        "        jmp 8b\n"       
394                        ".previous\n"            
395                        ".section __ex_table,\"a\"\n"
396                        "        .align 4\n"        
397                        "        .long 0b,16b\n"  
398                        "        .long 1b,16b\n"
399                        "        .long 2b,16b\n"
400                        "        .long 21b,16b\n"
401                        "        .long 3b,16b\n" 
402                        "        .long 31b,16b\n"
403                        "        .long 4b,16b\n" 
404                        "        .long 41b,16b\n"
405                        "        .long 10b,16b\n"
406                        "        .long 51b,16b\n"
407                        "        .long 11b,16b\n"
408                        "        .long 61b,16b\n"
409                        "        .long 12b,16b\n"
410                        "        .long 71b,16b\n"
411                        "        .long 13b,16b\n"
412                        "        .long 81b,16b\n"
413                        "        .long 14b,16b\n"
414                        "        .long 91b,16b\n"
415                        "        .long 6b,9b\n"  
416                        "        .long 7b,16b\n" 
417                        ".previous"              
418                        : "=&c"(size), "=&D" (d0), "=&S" (d1)
419                        :  "1"(to), "2"(from), "0"(size)
420                        : "eax", "edx", "memory");
421         return size;
422 }
423 #else
424 /*
425  * Leave these declared but undefined.  They should not be any references to
426  * them
427  */
428 unsigned long
429 __copy_user_zeroing_intel(void *to, const void __user *from, unsigned long size);
430 unsigned long
431 __copy_user_intel(void __user *to, const void *from, unsigned long size);
432 #endif /* CONFIG_X86_INTEL_USERCOPY */
433
434 /* Generic arbitrary sized copy.  */
435 #define __copy_user(to,from,size)                                       \
436 do {                                                                    \
437         int __d0, __d1, __d2;                                           \
438         __asm__ __volatile__(                                           \
439                 "       cmp  $7,%0\n"                                   \
440                 "       jbe  1f\n"                                      \
441                 "       movl %1,%0\n"                                   \
442                 "       negl %0\n"                                      \
443                 "       andl $7,%0\n"                                   \
444                 "       subl %0,%3\n"                                   \
445                 "4:     rep; movsb\n"                                   \
446                 "       movl %3,%0\n"                                   \
447                 "       shrl $2,%0\n"                                   \
448                 "       andl $3,%3\n"                                   \
449                 "       .align 2,0x90\n"                                \
450                 "0:     rep; movsl\n"                                   \
451                 "       movl %3,%0\n"                                   \
452                 "1:     rep; movsb\n"                                   \
453                 "2:\n"                                                  \
454                 ".section .fixup,\"ax\"\n"                              \
455                 "5:     addl %3,%0\n"                                   \
456                 "       jmp 2b\n"                                       \
457                 "3:     lea 0(%3,%0,4),%0\n"                            \
458                 "       jmp 2b\n"                                       \
459                 ".previous\n"                                           \
460                 ".section __ex_table,\"a\"\n"                           \
461                 "       .align 4\n"                                     \
462                 "       .long 4b,5b\n"                                  \
463                 "       .long 0b,3b\n"                                  \
464                 "       .long 1b,2b\n"                                  \
465                 ".previous"                                             \
466                 : "=&c"(size), "=&D" (__d0), "=&S" (__d1), "=r"(__d2)   \
467                 : "3"(size), "0"(size), "1"(to), "2"(from)              \
468                 : "memory");                                            \
469 } while (0)
470
471 #define __copy_user_zeroing(to,from,size)                               \
472 do {                                                                    \
473         int __d0, __d1, __d2;                                           \
474         __asm__ __volatile__(                                           \
475                 "       cmp  $7,%0\n"                                   \
476                 "       jbe  1f\n"                                      \
477                 "       movl %1,%0\n"                                   \
478                 "       negl %0\n"                                      \
479                 "       andl $7,%0\n"                                   \
480                 "       subl %0,%3\n"                                   \
481                 "4:     rep; movsb\n"                                   \
482                 "       movl %3,%0\n"                                   \
483                 "       shrl $2,%0\n"                                   \
484                 "       andl $3,%3\n"                                   \
485                 "       .align 2,0x90\n"                                \
486                 "0:     rep; movsl\n"                                   \
487                 "       movl %3,%0\n"                                   \
488                 "1:     rep; movsb\n"                                   \
489                 "2:\n"                                                  \
490                 ".section .fixup,\"ax\"\n"                              \
491                 "5:     addl %3,%0\n"                                   \
492                 "       jmp 6f\n"                                       \
493                 "3:     lea 0(%3,%0,4),%0\n"                            \
494                 "6:     pushl %0\n"                                     \
495                 "       pushl %%eax\n"                                  \
496                 "       xorl %%eax,%%eax\n"                             \
497                 "       rep; stosb\n"                                   \
498                 "       popl %%eax\n"                                   \
499                 "       popl %0\n"                                      \
500                 "       jmp 2b\n"                                       \
501                 ".previous\n"                                           \
502                 ".section __ex_table,\"a\"\n"                           \
503                 "       .align 4\n"                                     \
504                 "       .long 4b,5b\n"                                  \
505                 "       .long 0b,3b\n"                                  \
506                 "       .long 1b,6b\n"                                  \
507                 ".previous"                                             \
508                 : "=&c"(size), "=&D" (__d0), "=&S" (__d1), "=r"(__d2)   \
509                 : "3"(size), "0"(size), "1"(to), "2"(from)              \
510                 : "memory");                                            \
511 } while (0)
512
513
514 unsigned long __copy_to_user_ll(void __user *to, const void *from, unsigned long n)
515 {
516 #ifndef CONFIG_X86_WP_WORKS_OK
517         if (unlikely(boot_cpu_data.wp_works_ok == 0) &&
518                         ((unsigned long )to) < TASK_SIZE) {
519                 /* 
520                  * CPU does not honor the WP bit when writing
521                  * from supervisory mode, and due to preemption or SMP,
522                  * the page tables can change at any time.
523                  * Do it manually.      Manfred <manfred@colorfullife.com>
524                  */
525                 while (n) {
526                         unsigned long offset = ((unsigned long)to)%PAGE_SIZE;
527                         unsigned long len = PAGE_SIZE - offset;
528                         int retval;
529                         struct page *pg;
530                         void *maddr;
531                         
532                         if (len > n)
533                                 len = n;
534
535 survive:
536                         down_read(&current->mm->mmap_sem);
537                         retval = get_user_pages(current, current->mm,
538                                         (unsigned long )to, 1, 1, 0, &pg, NULL);
539
540                         if (retval == -ENOMEM && current->pid == 1) {
541                                 up_read(&current->mm->mmap_sem);
542                                 blk_congestion_wait(WRITE, HZ/50);
543                                 goto survive;
544                         }
545
546                         if (retval != 1) {
547                                 up_read(&current->mm->mmap_sem);
548                                 break;
549                         }
550
551                         maddr = kmap_atomic(pg, KM_USER0);
552                         memcpy(maddr + offset, from, len);
553                         kunmap_atomic(maddr, KM_USER0);
554                         set_page_dirty_lock(pg);
555                         put_page(pg);
556                         up_read(&current->mm->mmap_sem);
557
558                         from += len;
559                         to += len;
560                         n -= len;
561                 }
562                 return n;
563         }
564 #endif
565         if (movsl_is_ok(to, from, n))
566                 __copy_user(to, from, n);
567         else
568                 n = __copy_user_intel(to, from, n);
569         return n;
570 }
571
572 unsigned long __copy_from_user_ll(void *to, const void __user *from, unsigned long n)
573 {
574         if (movsl_is_ok(to, from, n))
575                 __copy_user_zeroing(to, from, n);
576         else
577                 n = __copy_user_zeroing_intel(to, from, n);
578         return n;
579 }
580