OpenCores
URL https://opencores.org/ocsvn/or1k_soc_on_altera_embedded_dev_kit/or1k_soc_on_altera_embedded_dev_kit/trunk

Subversion Repositories or1k_soc_on_altera_embedded_dev_kit

[/] [or1k_soc_on_altera_embedded_dev_kit/] [trunk/] [linux-2.6/] [linux-2.6.24/] [arch/] [x86/] [lib/] [usercopy_32.c] - Blame information for rev 3

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 3 xianfeng
/*
2
 * User address space access functions.
3
 * The non inlined parts of asm-i386/uaccess.h are here.
4
 *
5
 * Copyright 1997 Andi Kleen <ak@muc.de>
6
 * Copyright 1997 Linus Torvalds
7
 */
8
#include <linux/mm.h>
9
#include <linux/highmem.h>
10
#include <linux/blkdev.h>
11
#include <linux/module.h>
12
#include <linux/backing-dev.h>
13
#include <linux/interrupt.h>
14
#include <asm/uaccess.h>
15
#include <asm/mmx.h>
16
 
17
static inline int __movsl_is_ok(unsigned long a1, unsigned long a2, unsigned long n)
18
{
19
#ifdef CONFIG_X86_INTEL_USERCOPY
20
        if (n >= 64 && ((a1 ^ a2) & movsl_mask.mask))
21
                return 0;
22
#endif
23
        return 1;
24
}
25
#define movsl_is_ok(a1,a2,n) \
26
        __movsl_is_ok((unsigned long)(a1),(unsigned long)(a2),(n))
27
 
28
/*
29
 * Copy a null terminated string from userspace.
30
 */
31
 
32
#define __do_strncpy_from_user(dst,src,count,res)                          \
33
do {                                                                       \
34
        int __d0, __d1, __d2;                                              \
35
        might_sleep();                                                     \
36
        __asm__ __volatile__(                                              \
37
                "       testl %1,%1\n"                                     \
38
                "       jz 2f\n"                                           \
39
                "0:     lodsb\n"                                           \
40
                "       stosb\n"                                           \
41
                "       testb %%al,%%al\n"                                 \
42
                "       jz 1f\n"                                           \
43
                "       decl %1\n"                                         \
44
                "       jnz 0b\n"                                          \
45
                "1:     subl %1,%0\n"                                      \
46
                "2:\n"                                                     \
47
                ".section .fixup,\"ax\"\n"                                 \
48
                "3:     movl %5,%0\n"                                      \
49
                "       jmp 2b\n"                                          \
50
                ".previous\n"                                              \
51
                ".section __ex_table,\"a\"\n"                              \
52
                "       .align 4\n"                                        \
53
                "       .long 0b,3b\n"                                     \
54
                ".previous"                                                \
55
                : "=d"(res), "=c"(count), "=&a" (__d0), "=&S" (__d1),      \
56
                  "=&D" (__d2)                                             \
57
                : "i"(-EFAULT), "0"(count), "1"(count), "3"(src), "4"(dst) \
58
                : "memory");                                               \
59
} while (0)
60
 
61
/**
62
 * __strncpy_from_user: - Copy a NUL terminated string from userspace, with less checking.
63
 * @dst:   Destination address, in kernel space.  This buffer must be at
64
 *         least @count bytes long.
65
 * @src:   Source address, in user space.
66
 * @count: Maximum number of bytes to copy, including the trailing NUL.
67
 *
68
 * Copies a NUL-terminated string from userspace to kernel space.
69
 * Caller must check the specified block with access_ok() before calling
70
 * this function.
71
 *
72
 * On success, returns the length of the string (not including the trailing
73
 * NUL).
74
 *
75
 * If access to userspace fails, returns -EFAULT (some data may have been
76
 * copied).
77
 *
78
 * If @count is smaller than the length of the string, copies @count bytes
79
 * and returns @count.
80
 */
81
long
82
__strncpy_from_user(char *dst, const char __user *src, long count)
83
{
84
        long res;
85
        __do_strncpy_from_user(dst, src, count, res);
86
        return res;
87
}
88
EXPORT_SYMBOL(__strncpy_from_user);
89
 
90
/**
91
 * strncpy_from_user: - Copy a NUL terminated string from userspace.
92
 * @dst:   Destination address, in kernel space.  This buffer must be at
93
 *         least @count bytes long.
94
 * @src:   Source address, in user space.
95
 * @count: Maximum number of bytes to copy, including the trailing NUL.
96
 *
97
 * Copies a NUL-terminated string from userspace to kernel space.
98
 *
99
 * On success, returns the length of the string (not including the trailing
100
 * NUL).
101
 *
102
 * If access to userspace fails, returns -EFAULT (some data may have been
103
 * copied).
104
 *
105
 * If @count is smaller than the length of the string, copies @count bytes
106
 * and returns @count.
107
 */
108
long
109
strncpy_from_user(char *dst, const char __user *src, long count)
110
{
111
        long res = -EFAULT;
112
        if (access_ok(VERIFY_READ, src, 1))
113
                __do_strncpy_from_user(dst, src, count, res);
114
        return res;
115
}
116
EXPORT_SYMBOL(strncpy_from_user);
117
 
118
/*
119
 * Zero Userspace
120
 */
121
 
122
#define __do_clear_user(addr,size)                                      \
123
do {                                                                    \
124
        int __d0;                                                       \
125
        might_sleep();                                                  \
126
        __asm__ __volatile__(                                           \
127
                "0:     rep; stosl\n"                                   \
128
                "       movl %2,%0\n"                                   \
129
                "1:     rep; stosb\n"                                   \
130
                "2:\n"                                                  \
131
                ".section .fixup,\"ax\"\n"                              \
132
                "3:     lea 0(%2,%0,4),%0\n"                            \
133
                "       jmp 2b\n"                                       \
134
                ".previous\n"                                           \
135
                ".section __ex_table,\"a\"\n"                           \
136
                "       .align 4\n"                                     \
137
                "       .long 0b,3b\n"                                  \
138
                "       .long 1b,2b\n"                                  \
139
                ".previous"                                             \
140
                : "=&c"(size), "=&D" (__d0)                             \
141
                : "r"(size & 3), "0"(size / 4), "1"(addr), "a"(0));       \
142
} while (0)
143
 
144
/**
145
 * clear_user: - Zero a block of memory in user space.
146
 * @to:   Destination address, in user space.
147
 * @n:    Number of bytes to zero.
148
 *
149
 * Zero a block of memory in user space.
150
 *
151
 * Returns number of bytes that could not be cleared.
152
 * On success, this will be zero.
153
 */
154
unsigned long
155
clear_user(void __user *to, unsigned long n)
156
{
157
        might_sleep();
158
        if (access_ok(VERIFY_WRITE, to, n))
159
                __do_clear_user(to, n);
160
        return n;
161
}
162
EXPORT_SYMBOL(clear_user);
163
 
164
/**
165
 * __clear_user: - Zero a block of memory in user space, with less checking.
166
 * @to:   Destination address, in user space.
167
 * @n:    Number of bytes to zero.
168
 *
169
 * Zero a block of memory in user space.  Caller must check
170
 * the specified block with access_ok() before calling this function.
171
 *
172
 * Returns number of bytes that could not be cleared.
173
 * On success, this will be zero.
174
 */
175
unsigned long
176
__clear_user(void __user *to, unsigned long n)
177
{
178
        __do_clear_user(to, n);
179
        return n;
180
}
181
EXPORT_SYMBOL(__clear_user);
182
 
183
/**
184
 * strnlen_user: - Get the size of a string in user space.
185
 * @s: The string to measure.
186
 * @n: The maximum valid length
187
 *
188
 * Get the size of a NUL-terminated string in user space.
189
 *
190
 * Returns the size of the string INCLUDING the terminating NUL.
191
 * On exception, returns 0.
192
 * If the string is too long, returns a value greater than @n.
193
 */
194
long strnlen_user(const char __user *s, long n)
195
{
196
        unsigned long mask = -__addr_ok(s);
197
        unsigned long res, tmp;
198
 
199
        might_sleep();
200
 
201
        __asm__ __volatile__(
202
                "       testl %0, %0\n"
203
                "       jz 3f\n"
204
                "       andl %0,%%ecx\n"
205
                "0:     repne; scasb\n"
206
                "       setne %%al\n"
207
                "       subl %%ecx,%0\n"
208
                "       addl %0,%%eax\n"
209
                "1:\n"
210
                ".section .fixup,\"ax\"\n"
211
                "2:     xorl %%eax,%%eax\n"
212
                "       jmp 1b\n"
213
                "3:     movb $1,%%al\n"
214
                "       jmp 1b\n"
215
                ".previous\n"
216
                ".section __ex_table,\"a\"\n"
217
                "       .align 4\n"
218
                "       .long 0b,2b\n"
219
                ".previous"
220
                :"=r" (n), "=D" (s), "=a" (res), "=c" (tmp)
221
                :"0" (n), "1" (s), "2" (0), "3" (mask)
222
                :"cc");
223
        return res & mask;
224
}
225
EXPORT_SYMBOL(strnlen_user);
226
 
227
#ifdef CONFIG_X86_INTEL_USERCOPY
228
static unsigned long
229
__copy_user_intel(void __user *to, const void *from, unsigned long size)
230
{
231
        int d0, d1;
232
        __asm__ __volatile__(
233
                       "       .align 2,0x90\n"
234
                       "1:     movl 32(%4), %%eax\n"
235
                       "       cmpl $67, %0\n"
236
                       "       jbe 3f\n"
237
                       "2:     movl 64(%4), %%eax\n"
238
                       "       .align 2,0x90\n"
239
                       "3:     movl 0(%4), %%eax\n"
240
                       "4:     movl 4(%4), %%edx\n"
241
                       "5:     movl %%eax, 0(%3)\n"
242
                       "6:     movl %%edx, 4(%3)\n"
243
                       "7:     movl 8(%4), %%eax\n"
244
                       "8:     movl 12(%4),%%edx\n"
245
                       "9:     movl %%eax, 8(%3)\n"
246
                       "10:    movl %%edx, 12(%3)\n"
247
                       "11:    movl 16(%4), %%eax\n"
248
                       "12:    movl 20(%4), %%edx\n"
249
                       "13:    movl %%eax, 16(%3)\n"
250
                       "14:    movl %%edx, 20(%3)\n"
251
                       "15:    movl 24(%4), %%eax\n"
252
                       "16:    movl 28(%4), %%edx\n"
253
                       "17:    movl %%eax, 24(%3)\n"
254
                       "18:    movl %%edx, 28(%3)\n"
255
                       "19:    movl 32(%4), %%eax\n"
256
                       "20:    movl 36(%4), %%edx\n"
257
                       "21:    movl %%eax, 32(%3)\n"
258
                       "22:    movl %%edx, 36(%3)\n"
259
                       "23:    movl 40(%4), %%eax\n"
260
                       "24:    movl 44(%4), %%edx\n"
261
                       "25:    movl %%eax, 40(%3)\n"
262
                       "26:    movl %%edx, 44(%3)\n"
263
                       "27:    movl 48(%4), %%eax\n"
264
                       "28:    movl 52(%4), %%edx\n"
265
                       "29:    movl %%eax, 48(%3)\n"
266
                       "30:    movl %%edx, 52(%3)\n"
267
                       "31:    movl 56(%4), %%eax\n"
268
                       "32:    movl 60(%4), %%edx\n"
269
                       "33:    movl %%eax, 56(%3)\n"
270
                       "34:    movl %%edx, 60(%3)\n"
271
                       "       addl $-64, %0\n"
272
                       "       addl $64, %4\n"
273
                       "       addl $64, %3\n"
274
                       "       cmpl $63, %0\n"
275
                       "       ja  1b\n"
276
                       "35:    movl  %0, %%eax\n"
277
                       "       shrl  $2, %0\n"
278
                       "       andl  $3, %%eax\n"
279
                       "       cld\n"
280
                       "99:    rep; movsl\n"
281
                       "36:    movl %%eax, %0\n"
282
                       "37:    rep; movsb\n"
283
                       "100:\n"
284
                       ".section .fixup,\"ax\"\n"
285
                       "101:   lea 0(%%eax,%0,4),%0\n"
286
                       "       jmp 100b\n"
287
                       ".previous\n"
288
                       ".section __ex_table,\"a\"\n"
289
                       "       .align 4\n"
290
                       "       .long 1b,100b\n"
291
                       "       .long 2b,100b\n"
292
                       "       .long 3b,100b\n"
293
                       "       .long 4b,100b\n"
294
                       "       .long 5b,100b\n"
295
                       "       .long 6b,100b\n"
296
                       "       .long 7b,100b\n"
297
                       "       .long 8b,100b\n"
298
                       "       .long 9b,100b\n"
299
                       "       .long 10b,100b\n"
300
                       "       .long 11b,100b\n"
301
                       "       .long 12b,100b\n"
302
                       "       .long 13b,100b\n"
303
                       "       .long 14b,100b\n"
304
                       "       .long 15b,100b\n"
305
                       "       .long 16b,100b\n"
306
                       "       .long 17b,100b\n"
307
                       "       .long 18b,100b\n"
308
                       "       .long 19b,100b\n"
309
                       "       .long 20b,100b\n"
310
                       "       .long 21b,100b\n"
311
                       "       .long 22b,100b\n"
312
                       "       .long 23b,100b\n"
313
                       "       .long 24b,100b\n"
314
                       "       .long 25b,100b\n"
315
                       "       .long 26b,100b\n"
316
                       "       .long 27b,100b\n"
317
                       "       .long 28b,100b\n"
318
                       "       .long 29b,100b\n"
319
                       "       .long 30b,100b\n"
320
                       "       .long 31b,100b\n"
321
                       "       .long 32b,100b\n"
322
                       "       .long 33b,100b\n"
323
                       "       .long 34b,100b\n"
324
                       "       .long 35b,100b\n"
325
                       "       .long 36b,100b\n"
326
                       "       .long 37b,100b\n"
327
                       "       .long 99b,101b\n"
328
                       ".previous"
329
                       : "=&c"(size), "=&D" (d0), "=&S" (d1)
330
                       :  "1"(to), "2"(from), "0"(size)
331
                       : "eax", "edx", "memory");
332
        return size;
333
}
334
 
335
static unsigned long
336
__copy_user_zeroing_intel(void *to, const void __user *from, unsigned long size)
337
{
338
        int d0, d1;
339
        __asm__ __volatile__(
340
                       "        .align 2,0x90\n"
341
                       "0:      movl 32(%4), %%eax\n"
342
                       "        cmpl $67, %0\n"
343
                       "        jbe 2f\n"
344
                       "1:      movl 64(%4), %%eax\n"
345
                       "        .align 2,0x90\n"
346
                       "2:      movl 0(%4), %%eax\n"
347
                       "21:     movl 4(%4), %%edx\n"
348
                       "        movl %%eax, 0(%3)\n"
349
                       "        movl %%edx, 4(%3)\n"
350
                       "3:      movl 8(%4), %%eax\n"
351
                       "31:     movl 12(%4),%%edx\n"
352
                       "        movl %%eax, 8(%3)\n"
353
                       "        movl %%edx, 12(%3)\n"
354
                       "4:      movl 16(%4), %%eax\n"
355
                       "41:     movl 20(%4), %%edx\n"
356
                       "        movl %%eax, 16(%3)\n"
357
                       "        movl %%edx, 20(%3)\n"
358
                       "10:     movl 24(%4), %%eax\n"
359
                       "51:     movl 28(%4), %%edx\n"
360
                       "        movl %%eax, 24(%3)\n"
361
                       "        movl %%edx, 28(%3)\n"
362
                       "11:     movl 32(%4), %%eax\n"
363
                       "61:     movl 36(%4), %%edx\n"
364
                       "        movl %%eax, 32(%3)\n"
365
                       "        movl %%edx, 36(%3)\n"
366
                       "12:     movl 40(%4), %%eax\n"
367
                       "71:     movl 44(%4), %%edx\n"
368
                       "        movl %%eax, 40(%3)\n"
369
                       "        movl %%edx, 44(%3)\n"
370
                       "13:     movl 48(%4), %%eax\n"
371
                       "81:     movl 52(%4), %%edx\n"
372
                       "        movl %%eax, 48(%3)\n"
373
                       "        movl %%edx, 52(%3)\n"
374
                       "14:     movl 56(%4), %%eax\n"
375
                       "91:     movl 60(%4), %%edx\n"
376
                       "        movl %%eax, 56(%3)\n"
377
                       "        movl %%edx, 60(%3)\n"
378
                       "        addl $-64, %0\n"
379
                       "        addl $64, %4\n"
380
                       "        addl $64, %3\n"
381
                       "        cmpl $63, %0\n"
382
                       "        ja  0b\n"
383
                       "5:      movl  %0, %%eax\n"
384
                       "        shrl  $2, %0\n"
385
                       "        andl $3, %%eax\n"
386
                       "        cld\n"
387
                       "6:      rep; movsl\n"
388
                       "        movl %%eax,%0\n"
389
                       "7:      rep; movsb\n"
390
                       "8:\n"
391
                       ".section .fixup,\"ax\"\n"
392
                       "9:      lea 0(%%eax,%0,4),%0\n"
393
                       "16:     pushl %0\n"
394
                       "        pushl %%eax\n"
395
                       "        xorl %%eax,%%eax\n"
396
                       "        rep; stosb\n"
397
                       "        popl %%eax\n"
398
                       "        popl %0\n"
399
                       "        jmp 8b\n"
400
                       ".previous\n"
401
                       ".section __ex_table,\"a\"\n"
402
                       "        .align 4\n"
403
                       "        .long 0b,16b\n"
404
                       "        .long 1b,16b\n"
405
                       "        .long 2b,16b\n"
406
                       "        .long 21b,16b\n"
407
                       "        .long 3b,16b\n"
408
                       "        .long 31b,16b\n"
409
                       "        .long 4b,16b\n"
410
                       "        .long 41b,16b\n"
411
                       "        .long 10b,16b\n"
412
                       "        .long 51b,16b\n"
413
                       "        .long 11b,16b\n"
414
                       "        .long 61b,16b\n"
415
                       "        .long 12b,16b\n"
416
                       "        .long 71b,16b\n"
417
                       "        .long 13b,16b\n"
418
                       "        .long 81b,16b\n"
419
                       "        .long 14b,16b\n"
420
                       "        .long 91b,16b\n"
421
                       "        .long 6b,9b\n"
422
                       "        .long 7b,16b\n"
423
                       ".previous"
424
                       : "=&c"(size), "=&D" (d0), "=&S" (d1)
425
                       :  "1"(to), "2"(from), "0"(size)
426
                       : "eax", "edx", "memory");
427
        return size;
428
}
429
 
430
/*
431
 * Non Temporal Hint version of __copy_user_zeroing_intel.  It is cache aware.
432
 * hyoshiok@miraclelinux.com
433
 */
434
 
435
static unsigned long __copy_user_zeroing_intel_nocache(void *to,
436
                                const void __user *from, unsigned long size)
437
{
438
        int d0, d1;
439
 
440
        __asm__ __volatile__(
441
               "        .align 2,0x90\n"
442
               "0:      movl 32(%4), %%eax\n"
443
               "        cmpl $67, %0\n"
444
               "        jbe 2f\n"
445
               "1:      movl 64(%4), %%eax\n"
446
               "        .align 2,0x90\n"
447
               "2:      movl 0(%4), %%eax\n"
448
               "21:     movl 4(%4), %%edx\n"
449
               "        movnti %%eax, 0(%3)\n"
450
               "        movnti %%edx, 4(%3)\n"
451
               "3:      movl 8(%4), %%eax\n"
452
               "31:     movl 12(%4),%%edx\n"
453
               "        movnti %%eax, 8(%3)\n"
454
               "        movnti %%edx, 12(%3)\n"
455
               "4:      movl 16(%4), %%eax\n"
456
               "41:     movl 20(%4), %%edx\n"
457
               "        movnti %%eax, 16(%3)\n"
458
               "        movnti %%edx, 20(%3)\n"
459
               "10:     movl 24(%4), %%eax\n"
460
               "51:     movl 28(%4), %%edx\n"
461
               "        movnti %%eax, 24(%3)\n"
462
               "        movnti %%edx, 28(%3)\n"
463
               "11:     movl 32(%4), %%eax\n"
464
               "61:     movl 36(%4), %%edx\n"
465
               "        movnti %%eax, 32(%3)\n"
466
               "        movnti %%edx, 36(%3)\n"
467
               "12:     movl 40(%4), %%eax\n"
468
               "71:     movl 44(%4), %%edx\n"
469
               "        movnti %%eax, 40(%3)\n"
470
               "        movnti %%edx, 44(%3)\n"
471
               "13:     movl 48(%4), %%eax\n"
472
               "81:     movl 52(%4), %%edx\n"
473
               "        movnti %%eax, 48(%3)\n"
474
               "        movnti %%edx, 52(%3)\n"
475
               "14:     movl 56(%4), %%eax\n"
476
               "91:     movl 60(%4), %%edx\n"
477
               "        movnti %%eax, 56(%3)\n"
478
               "        movnti %%edx, 60(%3)\n"
479
               "        addl $-64, %0\n"
480
               "        addl $64, %4\n"
481
               "        addl $64, %3\n"
482
               "        cmpl $63, %0\n"
483
               "        ja  0b\n"
484
               "        sfence \n"
485
               "5:      movl  %0, %%eax\n"
486
               "        shrl  $2, %0\n"
487
               "        andl $3, %%eax\n"
488
               "        cld\n"
489
               "6:      rep; movsl\n"
490
               "        movl %%eax,%0\n"
491
               "7:      rep; movsb\n"
492
               "8:\n"
493
               ".section .fixup,\"ax\"\n"
494
               "9:      lea 0(%%eax,%0,4),%0\n"
495
               "16:     pushl %0\n"
496
               "        pushl %%eax\n"
497
               "        xorl %%eax,%%eax\n"
498
               "        rep; stosb\n"
499
               "        popl %%eax\n"
500
               "        popl %0\n"
501
               "        jmp 8b\n"
502
               ".previous\n"
503
               ".section __ex_table,\"a\"\n"
504
               "        .align 4\n"
505
               "        .long 0b,16b\n"
506
               "        .long 1b,16b\n"
507
               "        .long 2b,16b\n"
508
               "        .long 21b,16b\n"
509
               "        .long 3b,16b\n"
510
               "        .long 31b,16b\n"
511
               "        .long 4b,16b\n"
512
               "        .long 41b,16b\n"
513
               "        .long 10b,16b\n"
514
               "        .long 51b,16b\n"
515
               "        .long 11b,16b\n"
516
               "        .long 61b,16b\n"
517
               "        .long 12b,16b\n"
518
               "        .long 71b,16b\n"
519
               "        .long 13b,16b\n"
520
               "        .long 81b,16b\n"
521
               "        .long 14b,16b\n"
522
               "        .long 91b,16b\n"
523
               "        .long 6b,9b\n"
524
               "        .long 7b,16b\n"
525
               ".previous"
526
               : "=&c"(size), "=&D" (d0), "=&S" (d1)
527
               :  "1"(to), "2"(from), "0"(size)
528
               : "eax", "edx", "memory");
529
        return size;
530
}
531
 
532
static unsigned long __copy_user_intel_nocache(void *to,
533
                                const void __user *from, unsigned long size)
534
{
535
        int d0, d1;
536
 
537
        __asm__ __volatile__(
538
               "        .align 2,0x90\n"
539
               "0:      movl 32(%4), %%eax\n"
540
               "        cmpl $67, %0\n"
541
               "        jbe 2f\n"
542
               "1:      movl 64(%4), %%eax\n"
543
               "        .align 2,0x90\n"
544
               "2:      movl 0(%4), %%eax\n"
545
               "21:     movl 4(%4), %%edx\n"
546
               "        movnti %%eax, 0(%3)\n"
547
               "        movnti %%edx, 4(%3)\n"
548
               "3:      movl 8(%4), %%eax\n"
549
               "31:     movl 12(%4),%%edx\n"
550
               "        movnti %%eax, 8(%3)\n"
551
               "        movnti %%edx, 12(%3)\n"
552
               "4:      movl 16(%4), %%eax\n"
553
               "41:     movl 20(%4), %%edx\n"
554
               "        movnti %%eax, 16(%3)\n"
555
               "        movnti %%edx, 20(%3)\n"
556
               "10:     movl 24(%4), %%eax\n"
557
               "51:     movl 28(%4), %%edx\n"
558
               "        movnti %%eax, 24(%3)\n"
559
               "        movnti %%edx, 28(%3)\n"
560
               "11:     movl 32(%4), %%eax\n"
561
               "61:     movl 36(%4), %%edx\n"
562
               "        movnti %%eax, 32(%3)\n"
563
               "        movnti %%edx, 36(%3)\n"
564
               "12:     movl 40(%4), %%eax\n"
565
               "71:     movl 44(%4), %%edx\n"
566
               "        movnti %%eax, 40(%3)\n"
567
               "        movnti %%edx, 44(%3)\n"
568
               "13:     movl 48(%4), %%eax\n"
569
               "81:     movl 52(%4), %%edx\n"
570
               "        movnti %%eax, 48(%3)\n"
571
               "        movnti %%edx, 52(%3)\n"
572
               "14:     movl 56(%4), %%eax\n"
573
               "91:     movl 60(%4), %%edx\n"
574
               "        movnti %%eax, 56(%3)\n"
575
               "        movnti %%edx, 60(%3)\n"
576
               "        addl $-64, %0\n"
577
               "        addl $64, %4\n"
578
               "        addl $64, %3\n"
579
               "        cmpl $63, %0\n"
580
               "        ja  0b\n"
581
               "        sfence \n"
582
               "5:      movl  %0, %%eax\n"
583
               "        shrl  $2, %0\n"
584
               "        andl $3, %%eax\n"
585
               "        cld\n"
586
               "6:      rep; movsl\n"
587
               "        movl %%eax,%0\n"
588
               "7:      rep; movsb\n"
589
               "8:\n"
590
               ".section .fixup,\"ax\"\n"
591
               "9:      lea 0(%%eax,%0,4),%0\n"
592
               "16:     jmp 8b\n"
593
               ".previous\n"
594
               ".section __ex_table,\"a\"\n"
595
               "        .align 4\n"
596
               "        .long 0b,16b\n"
597
               "        .long 1b,16b\n"
598
               "        .long 2b,16b\n"
599
               "        .long 21b,16b\n"
600
               "        .long 3b,16b\n"
601
               "        .long 31b,16b\n"
602
               "        .long 4b,16b\n"
603
               "        .long 41b,16b\n"
604
               "        .long 10b,16b\n"
605
               "        .long 51b,16b\n"
606
               "        .long 11b,16b\n"
607
               "        .long 61b,16b\n"
608
               "        .long 12b,16b\n"
609
               "        .long 71b,16b\n"
610
               "        .long 13b,16b\n"
611
               "        .long 81b,16b\n"
612
               "        .long 14b,16b\n"
613
               "        .long 91b,16b\n"
614
               "        .long 6b,9b\n"
615
               "        .long 7b,16b\n"
616
               ".previous"
617
               : "=&c"(size), "=&D" (d0), "=&S" (d1)
618
               :  "1"(to), "2"(from), "0"(size)
619
               : "eax", "edx", "memory");
620
        return size;
621
}
622
 
623
#else
624
 
625
/*
626
 * Leave these declared but undefined.  They should not be any references to
627
 * them
628
 */
629
unsigned long __copy_user_zeroing_intel(void *to, const void __user *from,
630
                                        unsigned long size);
631
unsigned long __copy_user_intel(void __user *to, const void *from,
632
                                        unsigned long size);
633
unsigned long __copy_user_zeroing_intel_nocache(void *to,
634
                                const void __user *from, unsigned long size);
635
#endif /* CONFIG_X86_INTEL_USERCOPY */
636
 
637
/* Generic arbitrary sized copy.  */
638
#define __copy_user(to,from,size)                                       \
639
do {                                                                    \
640
        int __d0, __d1, __d2;                                           \
641
        __asm__ __volatile__(                                           \
642
                "       cmp  $7,%0\n"                                   \
643
                "       jbe  1f\n"                                      \
644
                "       movl %1,%0\n"                                   \
645
                "       negl %0\n"                                      \
646
                "       andl $7,%0\n"                                   \
647
                "       subl %0,%3\n"                                   \
648
                "4:     rep; movsb\n"                                   \
649
                "       movl %3,%0\n"                                   \
650
                "       shrl $2,%0\n"                                   \
651
                "       andl $3,%3\n"                                   \
652
                "       .align 2,0x90\n"                                \
653
                "0:     rep; movsl\n"                                   \
654
                "       movl %3,%0\n"                                   \
655
                "1:     rep; movsb\n"                                   \
656
                "2:\n"                                                  \
657
                ".section .fixup,\"ax\"\n"                              \
658
                "5:     addl %3,%0\n"                                   \
659
                "       jmp 2b\n"                                       \
660
                "3:     lea 0(%3,%0,4),%0\n"                            \
661
                "       jmp 2b\n"                                       \
662
                ".previous\n"                                           \
663
                ".section __ex_table,\"a\"\n"                           \
664
                "       .align 4\n"                                     \
665
                "       .long 4b,5b\n"                                  \
666
                "       .long 0b,3b\n"                                  \
667
                "       .long 1b,2b\n"                                  \
668
                ".previous"                                             \
669
                : "=&c"(size), "=&D" (__d0), "=&S" (__d1), "=r"(__d2)   \
670
                : "3"(size), "0"(size), "1"(to), "2"(from)               \
671
                : "memory");                                            \
672
} while (0)
673
 
674
#define __copy_user_zeroing(to,from,size)                               \
675
do {                                                                    \
676
        int __d0, __d1, __d2;                                           \
677
        __asm__ __volatile__(                                           \
678
                "       cmp  $7,%0\n"                                   \
679
                "       jbe  1f\n"                                      \
680
                "       movl %1,%0\n"                                   \
681
                "       negl %0\n"                                      \
682
                "       andl $7,%0\n"                                   \
683
                "       subl %0,%3\n"                                   \
684
                "4:     rep; movsb\n"                                   \
685
                "       movl %3,%0\n"                                   \
686
                "       shrl $2,%0\n"                                   \
687
                "       andl $3,%3\n"                                   \
688
                "       .align 2,0x90\n"                                \
689
                "0:     rep; movsl\n"                                   \
690
                "       movl %3,%0\n"                                   \
691
                "1:     rep; movsb\n"                                   \
692
                "2:\n"                                                  \
693
                ".section .fixup,\"ax\"\n"                              \
694
                "5:     addl %3,%0\n"                                   \
695
                "       jmp 6f\n"                                       \
696
                "3:     lea 0(%3,%0,4),%0\n"                            \
697
                "6:     pushl %0\n"                                     \
698
                "       pushl %%eax\n"                                  \
699
                "       xorl %%eax,%%eax\n"                             \
700
                "       rep; stosb\n"                                   \
701
                "       popl %%eax\n"                                   \
702
                "       popl %0\n"                                      \
703
                "       jmp 2b\n"                                       \
704
                ".previous\n"                                           \
705
                ".section __ex_table,\"a\"\n"                           \
706
                "       .align 4\n"                                     \
707
                "       .long 4b,5b\n"                                  \
708
                "       .long 0b,3b\n"                                  \
709
                "       .long 1b,6b\n"                                  \
710
                ".previous"                                             \
711
                : "=&c"(size), "=&D" (__d0), "=&S" (__d1), "=r"(__d2)   \
712
                : "3"(size), "0"(size), "1"(to), "2"(from)               \
713
                : "memory");                                            \
714
} while (0)
715
 
716
unsigned long __copy_to_user_ll(void __user *to, const void *from,
717
                                unsigned long n)
718
{
719
#ifndef CONFIG_X86_WP_WORKS_OK
720
        if (unlikely(boot_cpu_data.wp_works_ok == 0) &&
721
                        ((unsigned long )to) < TASK_SIZE) {
722
                /*
723
                 * When we are in an atomic section (see
724
                 * mm/filemap.c:file_read_actor), return the full
725
                 * length to take the slow path.
726
                 */
727
                if (in_atomic())
728
                        return n;
729
 
730
                /*
731
                 * CPU does not honor the WP bit when writing
732
                 * from supervisory mode, and due to preemption or SMP,
733
                 * the page tables can change at any time.
734
                 * Do it manually.      Manfred <manfred@colorfullife.com>
735
                 */
736
                while (n) {
737
                        unsigned long offset = ((unsigned long)to)%PAGE_SIZE;
738
                        unsigned long len = PAGE_SIZE - offset;
739
                        int retval;
740
                        struct page *pg;
741
                        void *maddr;
742
 
743
                        if (len > n)
744
                                len = n;
745
 
746
survive:
747
                        down_read(&current->mm->mmap_sem);
748
                        retval = get_user_pages(current, current->mm,
749
                                        (unsigned long )to, 1, 1, 0, &pg, NULL);
750
 
751
                        if (retval == -ENOMEM && is_global_init(current)) {
752
                                up_read(&current->mm->mmap_sem);
753
                                congestion_wait(WRITE, HZ/50);
754
                                goto survive;
755
                        }
756
 
757
                        if (retval != 1) {
758
                                up_read(&current->mm->mmap_sem);
759
                                break;
760
                        }
761
 
762
                        maddr = kmap_atomic(pg, KM_USER0);
763
                        memcpy(maddr + offset, from, len);
764
                        kunmap_atomic(maddr, KM_USER0);
765
                        set_page_dirty_lock(pg);
766
                        put_page(pg);
767
                        up_read(&current->mm->mmap_sem);
768
 
769
                        from += len;
770
                        to += len;
771
                        n -= len;
772
                }
773
                return n;
774
        }
775
#endif
776
        if (movsl_is_ok(to, from, n))
777
                __copy_user(to, from, n);
778
        else
779
                n = __copy_user_intel(to, from, n);
780
        return n;
781
}
782
EXPORT_SYMBOL(__copy_to_user_ll);
783
 
784
unsigned long __copy_from_user_ll(void *to, const void __user *from,
785
                                        unsigned long n)
786
{
787
        if (movsl_is_ok(to, from, n))
788
                __copy_user_zeroing(to, from, n);
789
        else
790
                n = __copy_user_zeroing_intel(to, from, n);
791
        return n;
792
}
793
EXPORT_SYMBOL(__copy_from_user_ll);
794
 
795
unsigned long __copy_from_user_ll_nozero(void *to, const void __user *from,
796
                                         unsigned long n)
797
{
798
        if (movsl_is_ok(to, from, n))
799
                __copy_user(to, from, n);
800
        else
801
                n = __copy_user_intel((void __user *)to,
802
                                      (const void *)from, n);
803
        return n;
804
}
805
EXPORT_SYMBOL(__copy_from_user_ll_nozero);
806
 
807
unsigned long __copy_from_user_ll_nocache(void *to, const void __user *from,
808
                                        unsigned long n)
809
{
810
#ifdef CONFIG_X86_INTEL_USERCOPY
811
        if ( n > 64 && cpu_has_xmm2)
812
                n = __copy_user_zeroing_intel_nocache(to, from, n);
813
        else
814
                __copy_user_zeroing(to, from, n);
815
#else
816
        __copy_user_zeroing(to, from, n);
817
#endif
818
        return n;
819
}
820
 
821
unsigned long __copy_from_user_ll_nocache_nozero(void *to, const void __user *from,
822
                                        unsigned long n)
823
{
824
#ifdef CONFIG_X86_INTEL_USERCOPY
825
        if ( n > 64 && cpu_has_xmm2)
826
                n = __copy_user_intel_nocache(to, from, n);
827
        else
828
                __copy_user(to, from, n);
829
#else
830
        __copy_user(to, from, n);
831
#endif
832
        return n;
833
}
834
 
835
/**
836
 * copy_to_user: - Copy a block of data into user space.
837
 * @to:   Destination address, in user space.
838
 * @from: Source address, in kernel space.
839
 * @n:    Number of bytes to copy.
840
 *
841
 * Context: User context only.  This function may sleep.
842
 *
843
 * Copy data from kernel space to user space.
844
 *
845
 * Returns number of bytes that could not be copied.
846
 * On success, this will be zero.
847
 */
848
unsigned long
849
copy_to_user(void __user *to, const void *from, unsigned long n)
850
{
851
        if (access_ok(VERIFY_WRITE, to, n))
852
                n = __copy_to_user(to, from, n);
853
        return n;
854
}
855
EXPORT_SYMBOL(copy_to_user);
856
 
857
/**
858
 * copy_from_user: - Copy a block of data from user space.
859
 * @to:   Destination address, in kernel space.
860
 * @from: Source address, in user space.
861
 * @n:    Number of bytes to copy.
862
 *
863
 * Context: User context only.  This function may sleep.
864
 *
865
 * Copy data from user space to kernel space.
866
 *
867
 * Returns number of bytes that could not be copied.
868
 * On success, this will be zero.
869
 *
870
 * If some data could not be copied, this function will pad the copied
871
 * data to the requested size using zero bytes.
872
 */
873
unsigned long
874
copy_from_user(void *to, const void __user *from, unsigned long n)
875
{
876
        if (access_ok(VERIFY_READ, from, n))
877
                n = __copy_from_user(to, from, n);
878
        else
879
                memset(to, 0, n);
880
        return n;
881
}
882
EXPORT_SYMBOL(copy_from_user);

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.