Merge signal handler branch
[pandora-kernel.git] / arch / i386 / lib / usercopy.c
1 /* 
2  * User address space access functions.
3  * The non inlined parts of asm-i386/uaccess.h are here.
4  *
5  * Copyright 1997 Andi Kleen <ak@muc.de>
6  * Copyright 1997 Linus Torvalds
7  */
8 #include <linux/config.h>
9 #include <linux/mm.h>
10 #include <linux/highmem.h>
11 #include <linux/blkdev.h>
12 #include <linux/module.h>
13 #include <asm/uaccess.h>
14 #include <asm/mmx.h>
15
16 static inline int __movsl_is_ok(unsigned long a1, unsigned long a2, unsigned long n)
17 {
18 #ifdef CONFIG_X86_INTEL_USERCOPY
19         if (n >= 64 && ((a1 ^ a2) & movsl_mask.mask))
20                 return 0;
21 #endif
22         return 1;
23 }
24 #define movsl_is_ok(a1,a2,n) \
25         __movsl_is_ok((unsigned long)(a1),(unsigned long)(a2),(n))
26
27 /*
28  * Copy a null terminated string from userspace.
29  */
30
31 #define __do_strncpy_from_user(dst,src,count,res)                          \
32 do {                                                                       \
33         int __d0, __d1, __d2;                                              \
34         might_sleep();                                                     \
35         __asm__ __volatile__(                                              \
36                 "       testl %1,%1\n"                                     \
37                 "       jz 2f\n"                                           \
38                 "0:     lodsb\n"                                           \
39                 "       stosb\n"                                           \
40                 "       testb %%al,%%al\n"                                 \
41                 "       jz 1f\n"                                           \
42                 "       decl %1\n"                                         \
43                 "       jnz 0b\n"                                          \
44                 "1:     subl %1,%0\n"                                      \
45                 "2:\n"                                                     \
46                 ".section .fixup,\"ax\"\n"                                 \
47                 "3:     movl %5,%0\n"                                      \
48                 "       jmp 2b\n"                                          \
49                 ".previous\n"                                              \
50                 ".section __ex_table,\"a\"\n"                              \
51                 "       .align 4\n"                                        \
52                 "       .long 0b,3b\n"                                     \
53                 ".previous"                                                \
54                 : "=d"(res), "=c"(count), "=&a" (__d0), "=&S" (__d1),      \
55                   "=&D" (__d2)                                             \
56                 : "i"(-EFAULT), "0"(count), "1"(count), "3"(src), "4"(dst) \
57                 : "memory");                                               \
58 } while (0)
59
60 /**
61  * __strncpy_from_user: - Copy a NUL terminated string from userspace, with less checking.
62  * @dst:   Destination address, in kernel space.  This buffer must be at
63  *         least @count bytes long.
64  * @src:   Source address, in user space.
65  * @count: Maximum number of bytes to copy, including the trailing NUL.
66  * 
67  * Copies a NUL-terminated string from userspace to kernel space.
68  * Caller must check the specified block with access_ok() before calling
69  * this function.
70  *
71  * On success, returns the length of the string (not including the trailing
72  * NUL).
73  *
74  * If access to userspace fails, returns -EFAULT (some data may have been
75  * copied).
76  *
77  * If @count is smaller than the length of the string, copies @count bytes
78  * and returns @count.
79  */
80 long
81 __strncpy_from_user(char *dst, const char __user *src, long count)
82 {
83         long res;
84         __do_strncpy_from_user(dst, src, count, res);
85         return res;
86 }
87 EXPORT_SYMBOL(__strncpy_from_user);
88
89 /**
90  * strncpy_from_user: - Copy a NUL terminated string from userspace.
91  * @dst:   Destination address, in kernel space.  This buffer must be at
92  *         least @count bytes long.
93  * @src:   Source address, in user space.
94  * @count: Maximum number of bytes to copy, including the trailing NUL.
95  * 
96  * Copies a NUL-terminated string from userspace to kernel space.
97  *
98  * On success, returns the length of the string (not including the trailing
99  * NUL).
100  *
101  * If access to userspace fails, returns -EFAULT (some data may have been
102  * copied).
103  *
104  * If @count is smaller than the length of the string, copies @count bytes
105  * and returns @count.
106  */
107 long
108 strncpy_from_user(char *dst, const char __user *src, long count)
109 {
110         long res = -EFAULT;
111         if (access_ok(VERIFY_READ, src, 1))
112                 __do_strncpy_from_user(dst, src, count, res);
113         return res;
114 }
115 EXPORT_SYMBOL(strncpy_from_user);
116
117 /*
118  * Zero Userspace
119  */
120
121 #define __do_clear_user(addr,size)                                      \
122 do {                                                                    \
123         int __d0;                                                       \
124         might_sleep();                                                  \
125         __asm__ __volatile__(                                           \
126                 "0:     rep; stosl\n"                                   \
127                 "       movl %2,%0\n"                                   \
128                 "1:     rep; stosb\n"                                   \
129                 "2:\n"                                                  \
130                 ".section .fixup,\"ax\"\n"                              \
131                 "3:     lea 0(%2,%0,4),%0\n"                            \
132                 "       jmp 2b\n"                                       \
133                 ".previous\n"                                           \
134                 ".section __ex_table,\"a\"\n"                           \
135                 "       .align 4\n"                                     \
136                 "       .long 0b,3b\n"                                  \
137                 "       .long 1b,2b\n"                                  \
138                 ".previous"                                             \
139                 : "=&c"(size), "=&D" (__d0)                             \
140                 : "r"(size & 3), "0"(size / 4), "1"(addr), "a"(0));     \
141 } while (0)
142
143 /**
144  * clear_user: - Zero a block of memory in user space.
145  * @to:   Destination address, in user space.
146  * @n:    Number of bytes to zero.
147  *
148  * Zero a block of memory in user space.
149  *
150  * Returns number of bytes that could not be cleared.
151  * On success, this will be zero.
152  */
153 unsigned long
154 clear_user(void __user *to, unsigned long n)
155 {
156         might_sleep();
157         if (access_ok(VERIFY_WRITE, to, n))
158                 __do_clear_user(to, n);
159         return n;
160 }
161 EXPORT_SYMBOL(clear_user);
162
163 /**
164  * __clear_user: - Zero a block of memory in user space, with less checking.
165  * @to:   Destination address, in user space.
166  * @n:    Number of bytes to zero.
167  *
168  * Zero a block of memory in user space.  Caller must check
169  * the specified block with access_ok() before calling this function.
170  *
171  * Returns number of bytes that could not be cleared.
172  * On success, this will be zero.
173  */
174 unsigned long
175 __clear_user(void __user *to, unsigned long n)
176 {
177         __do_clear_user(to, n);
178         return n;
179 }
180 EXPORT_SYMBOL(__clear_user);
181
182 /**
183  * strlen_user: - Get the size of a string in user space.
184  * @s: The string to measure.
185  * @n: The maximum valid length
186  *
187  * Get the size of a NUL-terminated string in user space.
188  *
189  * Returns the size of the string INCLUDING the terminating NUL.
190  * On exception, returns 0.
191  * If the string is too long, returns a value greater than @n.
192  */
193 long strnlen_user(const char __user *s, long n)
194 {
195         unsigned long mask = -__addr_ok(s);
196         unsigned long res, tmp;
197
198         might_sleep();
199
200         __asm__ __volatile__(
201                 "       testl %0, %0\n"
202                 "       jz 3f\n"
203                 "       andl %0,%%ecx\n"
204                 "0:     repne; scasb\n"
205                 "       setne %%al\n"
206                 "       subl %%ecx,%0\n"
207                 "       addl %0,%%eax\n"
208                 "1:\n"
209                 ".section .fixup,\"ax\"\n"
210                 "2:     xorl %%eax,%%eax\n"
211                 "       jmp 1b\n"
212                 "3:     movb $1,%%al\n"
213                 "       jmp 1b\n"
214                 ".previous\n"
215                 ".section __ex_table,\"a\"\n"
216                 "       .align 4\n"
217                 "       .long 0b,2b\n"
218                 ".previous"
219                 :"=r" (n), "=D" (s), "=a" (res), "=c" (tmp)
220                 :"0" (n), "1" (s), "2" (0), "3" (mask)
221                 :"cc");
222         return res & mask;
223 }
224 EXPORT_SYMBOL(strnlen_user);
225
226 #ifdef CONFIG_X86_INTEL_USERCOPY
227 static unsigned long
228 __copy_user_intel(void __user *to, const void *from, unsigned long size)
229 {
230         int d0, d1;
231         __asm__ __volatile__(
232                        "       .align 2,0x90\n"
233                        "1:     movl 32(%4), %%eax\n"
234                        "       cmpl $67, %0\n"
235                        "       jbe 3f\n"
236                        "2:     movl 64(%4), %%eax\n"
237                        "       .align 2,0x90\n"
238                        "3:     movl 0(%4), %%eax\n"
239                        "4:     movl 4(%4), %%edx\n"
240                        "5:     movl %%eax, 0(%3)\n"
241                        "6:     movl %%edx, 4(%3)\n"
242                        "7:     movl 8(%4), %%eax\n"
243                        "8:     movl 12(%4),%%edx\n"
244                        "9:     movl %%eax, 8(%3)\n"
245                        "10:    movl %%edx, 12(%3)\n"
246                        "11:    movl 16(%4), %%eax\n"
247                        "12:    movl 20(%4), %%edx\n"
248                        "13:    movl %%eax, 16(%3)\n"
249                        "14:    movl %%edx, 20(%3)\n"
250                        "15:    movl 24(%4), %%eax\n"
251                        "16:    movl 28(%4), %%edx\n"
252                        "17:    movl %%eax, 24(%3)\n"
253                        "18:    movl %%edx, 28(%3)\n"
254                        "19:    movl 32(%4), %%eax\n"
255                        "20:    movl 36(%4), %%edx\n"
256                        "21:    movl %%eax, 32(%3)\n"
257                        "22:    movl %%edx, 36(%3)\n"
258                        "23:    movl 40(%4), %%eax\n"
259                        "24:    movl 44(%4), %%edx\n"
260                        "25:    movl %%eax, 40(%3)\n"
261                        "26:    movl %%edx, 44(%3)\n"
262                        "27:    movl 48(%4), %%eax\n"
263                        "28:    movl 52(%4), %%edx\n"
264                        "29:    movl %%eax, 48(%3)\n"
265                        "30:    movl %%edx, 52(%3)\n"
266                        "31:    movl 56(%4), %%eax\n"
267                        "32:    movl 60(%4), %%edx\n"
268                        "33:    movl %%eax, 56(%3)\n"
269                        "34:    movl %%edx, 60(%3)\n"
270                        "       addl $-64, %0\n"
271                        "       addl $64, %4\n"
272                        "       addl $64, %3\n"
273                        "       cmpl $63, %0\n"
274                        "       ja  1b\n"
275                        "35:    movl  %0, %%eax\n"
276                        "       shrl  $2, %0\n"
277                        "       andl  $3, %%eax\n"
278                        "       cld\n"
279                        "99:    rep; movsl\n"
280                        "36:    movl %%eax, %0\n"
281                        "37:    rep; movsb\n"
282                        "100:\n"
283                        ".section .fixup,\"ax\"\n"
284                        "101:   lea 0(%%eax,%0,4),%0\n"
285                        "       jmp 100b\n"
286                        ".previous\n"
287                        ".section __ex_table,\"a\"\n"
288                        "       .align 4\n"
289                        "       .long 1b,100b\n"
290                        "       .long 2b,100b\n"
291                        "       .long 3b,100b\n"
292                        "       .long 4b,100b\n"
293                        "       .long 5b,100b\n"
294                        "       .long 6b,100b\n"
295                        "       .long 7b,100b\n"
296                        "       .long 8b,100b\n"
297                        "       .long 9b,100b\n"
298                        "       .long 10b,100b\n"
299                        "       .long 11b,100b\n"
300                        "       .long 12b,100b\n"
301                        "       .long 13b,100b\n"
302                        "       .long 14b,100b\n"
303                        "       .long 15b,100b\n"
304                        "       .long 16b,100b\n"
305                        "       .long 17b,100b\n"
306                        "       .long 18b,100b\n"
307                        "       .long 19b,100b\n"
308                        "       .long 20b,100b\n"
309                        "       .long 21b,100b\n"
310                        "       .long 22b,100b\n"
311                        "       .long 23b,100b\n"
312                        "       .long 24b,100b\n"
313                        "       .long 25b,100b\n"
314                        "       .long 26b,100b\n"
315                        "       .long 27b,100b\n"
316                        "       .long 28b,100b\n"
317                        "       .long 29b,100b\n"
318                        "       .long 30b,100b\n"
319                        "       .long 31b,100b\n"
320                        "       .long 32b,100b\n"
321                        "       .long 33b,100b\n"
322                        "       .long 34b,100b\n"
323                        "       .long 35b,100b\n"
324                        "       .long 36b,100b\n"
325                        "       .long 37b,100b\n"
326                        "       .long 99b,101b\n"
327                        ".previous"
328                        : "=&c"(size), "=&D" (d0), "=&S" (d1)
329                        :  "1"(to), "2"(from), "0"(size)
330                        : "eax", "edx", "memory");
331         return size;
332 }
333
334 static unsigned long
335 __copy_user_zeroing_intel(void *to, const void __user *from, unsigned long size)
336 {
337         int d0, d1;
338         __asm__ __volatile__(
339                        "        .align 2,0x90\n"
340                        "0:      movl 32(%4), %%eax\n"
341                        "        cmpl $67, %0\n"      
342                        "        jbe 2f\n"            
343                        "1:      movl 64(%4), %%eax\n"
344                        "        .align 2,0x90\n"     
345                        "2:      movl 0(%4), %%eax\n" 
346                        "21:     movl 4(%4), %%edx\n" 
347                        "        movl %%eax, 0(%3)\n" 
348                        "        movl %%edx, 4(%3)\n" 
349                        "3:      movl 8(%4), %%eax\n" 
350                        "31:     movl 12(%4),%%edx\n" 
351                        "        movl %%eax, 8(%3)\n" 
352                        "        movl %%edx, 12(%3)\n"
353                        "4:      movl 16(%4), %%eax\n"
354                        "41:     movl 20(%4), %%edx\n"
355                        "        movl %%eax, 16(%3)\n"
356                        "        movl %%edx, 20(%3)\n"
357                        "10:     movl 24(%4), %%eax\n"
358                        "51:     movl 28(%4), %%edx\n"
359                        "        movl %%eax, 24(%3)\n"
360                        "        movl %%edx, 28(%3)\n"
361                        "11:     movl 32(%4), %%eax\n"
362                        "61:     movl 36(%4), %%edx\n"
363                        "        movl %%eax, 32(%3)\n"
364                        "        movl %%edx, 36(%3)\n"
365                        "12:     movl 40(%4), %%eax\n"
366                        "71:     movl 44(%4), %%edx\n"
367                        "        movl %%eax, 40(%3)\n"
368                        "        movl %%edx, 44(%3)\n"
369                        "13:     movl 48(%4), %%eax\n"
370                        "81:     movl 52(%4), %%edx\n"
371                        "        movl %%eax, 48(%3)\n"
372                        "        movl %%edx, 52(%3)\n"
373                        "14:     movl 56(%4), %%eax\n"
374                        "91:     movl 60(%4), %%edx\n"
375                        "        movl %%eax, 56(%3)\n"
376                        "        movl %%edx, 60(%3)\n"
377                        "        addl $-64, %0\n"     
378                        "        addl $64, %4\n"      
379                        "        addl $64, %3\n"      
380                        "        cmpl $63, %0\n"      
381                        "        ja  0b\n"            
382                        "5:      movl  %0, %%eax\n"   
383                        "        shrl  $2, %0\n"      
384                        "        andl $3, %%eax\n"    
385                        "        cld\n"               
386                        "6:      rep; movsl\n"   
387                        "        movl %%eax,%0\n"
388                        "7:      rep; movsb\n"   
389                        "8:\n"                   
390                        ".section .fixup,\"ax\"\n"
391                        "9:      lea 0(%%eax,%0,4),%0\n" 
392                        "16:     pushl %0\n"     
393                        "        pushl %%eax\n"  
394                        "        xorl %%eax,%%eax\n"
395                        "        rep; stosb\n"   
396                        "        popl %%eax\n"   
397                        "        popl %0\n"      
398                        "        jmp 8b\n"       
399                        ".previous\n"            
400                        ".section __ex_table,\"a\"\n"
401                        "        .align 4\n"        
402                        "        .long 0b,16b\n"  
403                        "        .long 1b,16b\n"
404                        "        .long 2b,16b\n"
405                        "        .long 21b,16b\n"
406                        "        .long 3b,16b\n" 
407                        "        .long 31b,16b\n"
408                        "        .long 4b,16b\n" 
409                        "        .long 41b,16b\n"
410                        "        .long 10b,16b\n"
411                        "        .long 51b,16b\n"
412                        "        .long 11b,16b\n"
413                        "        .long 61b,16b\n"
414                        "        .long 12b,16b\n"
415                        "        .long 71b,16b\n"
416                        "        .long 13b,16b\n"
417                        "        .long 81b,16b\n"
418                        "        .long 14b,16b\n"
419                        "        .long 91b,16b\n"
420                        "        .long 6b,9b\n"  
421                        "        .long 7b,16b\n" 
422                        ".previous"              
423                        : "=&c"(size), "=&D" (d0), "=&S" (d1)
424                        :  "1"(to), "2"(from), "0"(size)
425                        : "eax", "edx", "memory");
426         return size;
427 }
428
429 /*
430  * Non Temporal Hint version of __copy_user_zeroing_intel.  It is cache aware.
431  * hyoshiok@miraclelinux.com
432  */
433
434 static unsigned long __copy_user_zeroing_intel_nocache(void *to,
435                                 const void __user *from, unsigned long size)
436 {
437         int d0, d1;
438
439         __asm__ __volatile__(
440                "        .align 2,0x90\n"
441                "0:      movl 32(%4), %%eax\n"
442                "        cmpl $67, %0\n"
443                "        jbe 2f\n"
444                "1:      movl 64(%4), %%eax\n"
445                "        .align 2,0x90\n"
446                "2:      movl 0(%4), %%eax\n"
447                "21:     movl 4(%4), %%edx\n"
448                "        movnti %%eax, 0(%3)\n"
449                "        movnti %%edx, 4(%3)\n"
450                "3:      movl 8(%4), %%eax\n"
451                "31:     movl 12(%4),%%edx\n"
452                "        movnti %%eax, 8(%3)\n"
453                "        movnti %%edx, 12(%3)\n"
454                "4:      movl 16(%4), %%eax\n"
455                "41:     movl 20(%4), %%edx\n"
456                "        movnti %%eax, 16(%3)\n"
457                "        movnti %%edx, 20(%3)\n"
458                "10:     movl 24(%4), %%eax\n"
459                "51:     movl 28(%4), %%edx\n"
460                "        movnti %%eax, 24(%3)\n"
461                "        movnti %%edx, 28(%3)\n"
462                "11:     movl 32(%4), %%eax\n"
463                "61:     movl 36(%4), %%edx\n"
464                "        movnti %%eax, 32(%3)\n"
465                "        movnti %%edx, 36(%3)\n"
466                "12:     movl 40(%4), %%eax\n"
467                "71:     movl 44(%4), %%edx\n"
468                "        movnti %%eax, 40(%3)\n"
469                "        movnti %%edx, 44(%3)\n"
470                "13:     movl 48(%4), %%eax\n"
471                "81:     movl 52(%4), %%edx\n"
472                "        movnti %%eax, 48(%3)\n"
473                "        movnti %%edx, 52(%3)\n"
474                "14:     movl 56(%4), %%eax\n"
475                "91:     movl 60(%4), %%edx\n"
476                "        movnti %%eax, 56(%3)\n"
477                "        movnti %%edx, 60(%3)\n"
478                "        addl $-64, %0\n"
479                "        addl $64, %4\n"
480                "        addl $64, %3\n"
481                "        cmpl $63, %0\n"
482                "        ja  0b\n"
483                "        sfence \n"
484                "5:      movl  %0, %%eax\n"
485                "        shrl  $2, %0\n"
486                "        andl $3, %%eax\n"
487                "        cld\n"
488                "6:      rep; movsl\n"
489                "        movl %%eax,%0\n"
490                "7:      rep; movsb\n"
491                "8:\n"
492                ".section .fixup,\"ax\"\n"
493                "9:      lea 0(%%eax,%0,4),%0\n"
494                "16:     pushl %0\n"
495                "        pushl %%eax\n"
496                "        xorl %%eax,%%eax\n"
497                "        rep; stosb\n"
498                "        popl %%eax\n"
499                "        popl %0\n"
500                "        jmp 8b\n"
501                ".previous\n"
502                ".section __ex_table,\"a\"\n"
503                "        .align 4\n"
504                "        .long 0b,16b\n"
505                "        .long 1b,16b\n"
506                "        .long 2b,16b\n"
507                "        .long 21b,16b\n"
508                "        .long 3b,16b\n"
509                "        .long 31b,16b\n"
510                "        .long 4b,16b\n"
511                "        .long 41b,16b\n"
512                "        .long 10b,16b\n"
513                "        .long 51b,16b\n"
514                "        .long 11b,16b\n"
515                "        .long 61b,16b\n"
516                "        .long 12b,16b\n"
517                "        .long 71b,16b\n"
518                "        .long 13b,16b\n"
519                "        .long 81b,16b\n"
520                "        .long 14b,16b\n"
521                "        .long 91b,16b\n"
522                "        .long 6b,9b\n"
523                "        .long 7b,16b\n"
524                ".previous"
525                : "=&c"(size), "=&D" (d0), "=&S" (d1)
526                :  "1"(to), "2"(from), "0"(size)
527                : "eax", "edx", "memory");
528         return size;
529 }
530
531 #else
532
533 /*
534  * Leave these declared but undefined.  They should not be any references to
535  * them
536  */
537 unsigned long __copy_user_zeroing_intel(void *to, const void __user *from,
538                                         unsigned long size);
539 unsigned long __copy_user_intel(void __user *to, const void *from,
540                                         unsigned long size);
541 unsigned long __copy_user_zeroing_intel_nocache(void *to,
542                                 const void __user *from, unsigned long size);
543 #endif /* CONFIG_X86_INTEL_USERCOPY */
544
545 /* Generic arbitrary sized copy.  */
546 #define __copy_user(to,from,size)                                       \
547 do {                                                                    \
548         int __d0, __d1, __d2;                                           \
549         __asm__ __volatile__(                                           \
550                 "       cmp  $7,%0\n"                                   \
551                 "       jbe  1f\n"                                      \
552                 "       movl %1,%0\n"                                   \
553                 "       negl %0\n"                                      \
554                 "       andl $7,%0\n"                                   \
555                 "       subl %0,%3\n"                                   \
556                 "4:     rep; movsb\n"                                   \
557                 "       movl %3,%0\n"                                   \
558                 "       shrl $2,%0\n"                                   \
559                 "       andl $3,%3\n"                                   \
560                 "       .align 2,0x90\n"                                \
561                 "0:     rep; movsl\n"                                   \
562                 "       movl %3,%0\n"                                   \
563                 "1:     rep; movsb\n"                                   \
564                 "2:\n"                                                  \
565                 ".section .fixup,\"ax\"\n"                              \
566                 "5:     addl %3,%0\n"                                   \
567                 "       jmp 2b\n"                                       \
568                 "3:     lea 0(%3,%0,4),%0\n"                            \
569                 "       jmp 2b\n"                                       \
570                 ".previous\n"                                           \
571                 ".section __ex_table,\"a\"\n"                           \
572                 "       .align 4\n"                                     \
573                 "       .long 4b,5b\n"                                  \
574                 "       .long 0b,3b\n"                                  \
575                 "       .long 1b,2b\n"                                  \
576                 ".previous"                                             \
577                 : "=&c"(size), "=&D" (__d0), "=&S" (__d1), "=r"(__d2)   \
578                 : "3"(size), "0"(size), "1"(to), "2"(from)              \
579                 : "memory");                                            \
580 } while (0)
581
582 #define __copy_user_zeroing(to,from,size)                               \
583 do {                                                                    \
584         int __d0, __d1, __d2;                                           \
585         __asm__ __volatile__(                                           \
586                 "       cmp  $7,%0\n"                                   \
587                 "       jbe  1f\n"                                      \
588                 "       movl %1,%0\n"                                   \
589                 "       negl %0\n"                                      \
590                 "       andl $7,%0\n"                                   \
591                 "       subl %0,%3\n"                                   \
592                 "4:     rep; movsb\n"                                   \
593                 "       movl %3,%0\n"                                   \
594                 "       shrl $2,%0\n"                                   \
595                 "       andl $3,%3\n"                                   \
596                 "       .align 2,0x90\n"                                \
597                 "0:     rep; movsl\n"                                   \
598                 "       movl %3,%0\n"                                   \
599                 "1:     rep; movsb\n"                                   \
600                 "2:\n"                                                  \
601                 ".section .fixup,\"ax\"\n"                              \
602                 "5:     addl %3,%0\n"                                   \
603                 "       jmp 6f\n"                                       \
604                 "3:     lea 0(%3,%0,4),%0\n"                            \
605                 "6:     pushl %0\n"                                     \
606                 "       pushl %%eax\n"                                  \
607                 "       xorl %%eax,%%eax\n"                             \
608                 "       rep; stosb\n"                                   \
609                 "       popl %%eax\n"                                   \
610                 "       popl %0\n"                                      \
611                 "       jmp 2b\n"                                       \
612                 ".previous\n"                                           \
613                 ".section __ex_table,\"a\"\n"                           \
614                 "       .align 4\n"                                     \
615                 "       .long 4b,5b\n"                                  \
616                 "       .long 0b,3b\n"                                  \
617                 "       .long 1b,6b\n"                                  \
618                 ".previous"                                             \
619                 : "=&c"(size), "=&D" (__d0), "=&S" (__d1), "=r"(__d2)   \
620                 : "3"(size), "0"(size), "1"(to), "2"(from)              \
621                 : "memory");                                            \
622 } while (0)
623
624 unsigned long __copy_to_user_ll(void __user *to, const void *from,
625                                 unsigned long n)
626 {
627         BUG_ON((long) n < 0);
628 #ifndef CONFIG_X86_WP_WORKS_OK
629         if (unlikely(boot_cpu_data.wp_works_ok == 0) &&
630                         ((unsigned long )to) < TASK_SIZE) {
631                 /* 
632                  * CPU does not honor the WP bit when writing
633                  * from supervisory mode, and due to preemption or SMP,
634                  * the page tables can change at any time.
635                  * Do it manually.      Manfred <manfred@colorfullife.com>
636                  */
637                 while (n) {
638                         unsigned long offset = ((unsigned long)to)%PAGE_SIZE;
639                         unsigned long len = PAGE_SIZE - offset;
640                         int retval;
641                         struct page *pg;
642                         void *maddr;
643                         
644                         if (len > n)
645                                 len = n;
646
647 survive:
648                         down_read(&current->mm->mmap_sem);
649                         retval = get_user_pages(current, current->mm,
650                                         (unsigned long )to, 1, 1, 0, &pg, NULL);
651
652                         if (retval == -ENOMEM && current->pid == 1) {
653                                 up_read(&current->mm->mmap_sem);
654                                 blk_congestion_wait(WRITE, HZ/50);
655                                 goto survive;
656                         }
657
658                         if (retval != 1) {
659                                 up_read(&current->mm->mmap_sem);
660                                 break;
661                         }
662
663                         maddr = kmap_atomic(pg, KM_USER0);
664                         memcpy(maddr + offset, from, len);
665                         kunmap_atomic(maddr, KM_USER0);
666                         set_page_dirty_lock(pg);
667                         put_page(pg);
668                         up_read(&current->mm->mmap_sem);
669
670                         from += len;
671                         to += len;
672                         n -= len;
673                 }
674                 return n;
675         }
676 #endif
677         if (movsl_is_ok(to, from, n))
678                 __copy_user(to, from, n);
679         else
680                 n = __copy_user_intel(to, from, n);
681         return n;
682 }
683 EXPORT_SYMBOL(__copy_to_user_ll);
684
685 unsigned long __copy_from_user_ll(void *to, const void __user *from,
686                                         unsigned long n)
687 {
688         BUG_ON((long)n < 0);
689         if (movsl_is_ok(to, from, n))
690                 __copy_user_zeroing(to, from, n);
691         else
692                 n = __copy_user_zeroing_intel(to, from, n);
693         return n;
694 }
695 EXPORT_SYMBOL(__copy_from_user_ll);
696
697 unsigned long __copy_from_user_ll_nocache(void *to, const void __user *from,
698                                         unsigned long n)
699 {
700         BUG_ON((long)n < 0);
701 #ifdef CONFIG_X86_INTEL_USERCOPY
702         if ( n > 64 && cpu_has_xmm2)
703                 n = __copy_user_zeroing_intel_nocache(to, from, n);
704         else
705                 __copy_user_zeroing(to, from, n);
706 #else
707         __copy_user_zeroing(to, from, n);
708 #endif
709         return n;
710 }
711
712 /**
713  * copy_to_user: - Copy a block of data into user space.
714  * @to:   Destination address, in user space.
715  * @from: Source address, in kernel space.
716  * @n:    Number of bytes to copy.
717  *
718  * Context: User context only.  This function may sleep.
719  *
720  * Copy data from kernel space to user space.
721  *
722  * Returns number of bytes that could not be copied.
723  * On success, this will be zero.
724  */
725 unsigned long
726 copy_to_user(void __user *to, const void *from, unsigned long n)
727 {
728         might_sleep();
729         BUG_ON((long) n < 0);
730         if (access_ok(VERIFY_WRITE, to, n))
731                 n = __copy_to_user(to, from, n);
732         return n;
733 }
734 EXPORT_SYMBOL(copy_to_user);
735
736 /**
737  * copy_from_user: - Copy a block of data from user space.
738  * @to:   Destination address, in kernel space.
739  * @from: Source address, in user space.
740  * @n:    Number of bytes to copy.
741  *
742  * Context: User context only.  This function may sleep.
743  *
744  * Copy data from user space to kernel space.
745  *
746  * Returns number of bytes that could not be copied.
747  * On success, this will be zero.
748  *
749  * If some data could not be copied, this function will pad the copied
750  * data to the requested size using zero bytes.
751  */
752 unsigned long
753 copy_from_user(void *to, const void __user *from, unsigned long n)
754 {
755         might_sleep();
756         BUG_ON((long) n < 0);
757         if (access_ok(VERIFY_READ, from, n))
758                 n = __copy_from_user(to, from, n);
759         else
760                 memset(to, 0, n);
761         return n;
762 }
763 EXPORT_SYMBOL(copy_from_user);