11 #include <linux/module.h>
14 #include <asm/uaccess.h>
18 #ifdef CONFIG_X86_INTEL_USERCOPY
25 static inline int __movsl_is_ok(
unsigned long a1,
unsigned long a2,
unsigned long n)
27 #ifdef CONFIG_X86_INTEL_USERCOPY
28 if (n >= 64 && ((a1 ^ a2) & movsl_mask.mask))
33 #define movsl_is_ok(a1, a2, n) \
34 __movsl_is_ok((unsigned long)(a1), (unsigned long)(a2), (n))
40 #define __do_clear_user(addr,size) \
44 __asm__ __volatile__( \
50 ".section .fixup,\"ax\"\n" \
51 "3: lea 0(%2,%0,4),%0\n" \
56 : "=&c"(size), "=&D" (__d0) \
57 : "r"(size & 3), "0"(size / 4), "1"(addr), "a"(0)); \
99 #ifdef CONFIG_X86_INTEL_USERCOPY
106 "1: movl 32(%4), %%eax\n"
109 "2: movl 64(%4), %%eax\n"
111 "3: movl 0(%4), %%eax\n"
112 "4: movl 4(%4), %%edx\n"
113 "5: movl %%eax, 0(%3)\n"
114 "6: movl %%edx, 4(%3)\n"
115 "7: movl 8(%4), %%eax\n"
116 "8: movl 12(%4),%%edx\n"
117 "9: movl %%eax, 8(%3)\n"
118 "10: movl %%edx, 12(%3)\n"
119 "11: movl 16(%4), %%eax\n"
120 "12: movl 20(%4), %%edx\n"
121 "13: movl %%eax, 16(%3)\n"
122 "14: movl %%edx, 20(%3)\n"
123 "15: movl 24(%4), %%eax\n"
124 "16: movl 28(%4), %%edx\n"
125 "17: movl %%eax, 24(%3)\n"
126 "18: movl %%edx, 28(%3)\n"
127 "19: movl 32(%4), %%eax\n"
128 "20: movl 36(%4), %%edx\n"
129 "21: movl %%eax, 32(%3)\n"
130 "22: movl %%edx, 36(%3)\n"
131 "23: movl 40(%4), %%eax\n"
132 "24: movl 44(%4), %%edx\n"
133 "25: movl %%eax, 40(%3)\n"
134 "26: movl %%edx, 44(%3)\n"
135 "27: movl 48(%4), %%eax\n"
136 "28: movl 52(%4), %%edx\n"
137 "29: movl %%eax, 48(%3)\n"
138 "30: movl %%edx, 52(%3)\n"
139 "31: movl 56(%4), %%eax\n"
140 "32: movl 60(%4), %%edx\n"
141 "33: movl %%eax, 56(%3)\n"
142 "34: movl %%edx, 60(%3)\n"
148 "35: movl %0, %%eax\n"
153 "36: movl %%eax, %0\n"
156 ".section .fixup,\"ax\"\n"
157 "101: lea 0(%%eax,%0,4),%0\n"
198 :
"=&c"(size),
"=&D" (d0),
"=&S" (d1)
199 :
"1"(to),
"2"(from),
"0"(size)
200 :
"eax",
"edx",
"memory");
210 "0: movl 32(%4), %%eax\n"
213 "1: movl 64(%4), %%eax\n"
215 "2: movl 0(%4), %%eax\n"
216 "21: movl 4(%4), %%edx\n"
217 " movl %%eax, 0(%3)\n"
218 " movl %%edx, 4(%3)\n"
219 "3: movl 8(%4), %%eax\n"
220 "31: movl 12(%4),%%edx\n"
221 " movl %%eax, 8(%3)\n"
222 " movl %%edx, 12(%3)\n"
223 "4: movl 16(%4), %%eax\n"
224 "41: movl 20(%4), %%edx\n"
225 " movl %%eax, 16(%3)\n"
226 " movl %%edx, 20(%3)\n"
227 "10: movl 24(%4), %%eax\n"
228 "51: movl 28(%4), %%edx\n"
229 " movl %%eax, 24(%3)\n"
230 " movl %%edx, 28(%3)\n"
231 "11: movl 32(%4), %%eax\n"
232 "61: movl 36(%4), %%edx\n"
233 " movl %%eax, 32(%3)\n"
234 " movl %%edx, 36(%3)\n"
235 "12: movl 40(%4), %%eax\n"
236 "71: movl 44(%4), %%edx\n"
237 " movl %%eax, 40(%3)\n"
238 " movl %%edx, 44(%3)\n"
239 "13: movl 48(%4), %%eax\n"
240 "81: movl 52(%4), %%edx\n"
241 " movl %%eax, 48(%3)\n"
242 " movl %%edx, 52(%3)\n"
243 "14: movl 56(%4), %%eax\n"
244 "91: movl 60(%4), %%edx\n"
245 " movl %%eax, 56(%3)\n"
246 " movl %%edx, 60(%3)\n"
252 "5: movl %0, %%eax\n"
260 ".section .fixup,\"ax\"\n"
261 "9: lea 0(%%eax,%0,4),%0\n"
264 " xorl %%eax,%%eax\n"
290 :
"=&c"(size),
"=&D" (d0),
"=&S" (d1)
291 :
"1"(to),
"2"(from),
"0"(size)
292 :
"eax",
"edx",
"memory");
302 const void __user *from,
unsigned long size)
308 "0: movl 32(%4), %%eax\n"
311 "1: movl 64(%4), %%eax\n"
313 "2: movl 0(%4), %%eax\n"
314 "21: movl 4(%4), %%edx\n"
315 " movnti %%eax, 0(%3)\n"
316 " movnti %%edx, 4(%3)\n"
317 "3: movl 8(%4), %%eax\n"
318 "31: movl 12(%4),%%edx\n"
319 " movnti %%eax, 8(%3)\n"
320 " movnti %%edx, 12(%3)\n"
321 "4: movl 16(%4), %%eax\n"
322 "41: movl 20(%4), %%edx\n"
323 " movnti %%eax, 16(%3)\n"
324 " movnti %%edx, 20(%3)\n"
325 "10: movl 24(%4), %%eax\n"
326 "51: movl 28(%4), %%edx\n"
327 " movnti %%eax, 24(%3)\n"
328 " movnti %%edx, 28(%3)\n"
329 "11: movl 32(%4), %%eax\n"
330 "61: movl 36(%4), %%edx\n"
331 " movnti %%eax, 32(%3)\n"
332 " movnti %%edx, 36(%3)\n"
333 "12: movl 40(%4), %%eax\n"
334 "71: movl 44(%4), %%edx\n"
335 " movnti %%eax, 40(%3)\n"
336 " movnti %%edx, 44(%3)\n"
337 "13: movl 48(%4), %%eax\n"
338 "81: movl 52(%4), %%edx\n"
339 " movnti %%eax, 48(%3)\n"
340 " movnti %%edx, 52(%3)\n"
341 "14: movl 56(%4), %%eax\n"
342 "91: movl 60(%4), %%edx\n"
343 " movnti %%eax, 56(%3)\n"
344 " movnti %%edx, 60(%3)\n"
351 "5: movl %0, %%eax\n"
359 ".section .fixup,\"ax\"\n"
360 "9: lea 0(%%eax,%0,4),%0\n"
363 " xorl %%eax,%%eax\n"
389 :
"=&c"(size),
"=&D" (d0),
"=&S" (d1)
390 :
"1"(to),
"2"(from),
"0"(size)
391 :
"eax",
"edx",
"memory");
395 static unsigned long __copy_user_intel_nocache(
void *to,
396 const void __user *from,
unsigned long size)
402 "0: movl 32(%4), %%eax\n"
405 "1: movl 64(%4), %%eax\n"
407 "2: movl 0(%4), %%eax\n"
408 "21: movl 4(%4), %%edx\n"
409 " movnti %%eax, 0(%3)\n"
410 " movnti %%edx, 4(%3)\n"
411 "3: movl 8(%4), %%eax\n"
412 "31: movl 12(%4),%%edx\n"
413 " movnti %%eax, 8(%3)\n"
414 " movnti %%edx, 12(%3)\n"
415 "4: movl 16(%4), %%eax\n"
416 "41: movl 20(%4), %%edx\n"
417 " movnti %%eax, 16(%3)\n"
418 " movnti %%edx, 20(%3)\n"
419 "10: movl 24(%4), %%eax\n"
420 "51: movl 28(%4), %%edx\n"
421 " movnti %%eax, 24(%3)\n"
422 " movnti %%edx, 28(%3)\n"
423 "11: movl 32(%4), %%eax\n"
424 "61: movl 36(%4), %%edx\n"
425 " movnti %%eax, 32(%3)\n"
426 " movnti %%edx, 36(%3)\n"
427 "12: movl 40(%4), %%eax\n"
428 "71: movl 44(%4), %%edx\n"
429 " movnti %%eax, 40(%3)\n"
430 " movnti %%edx, 44(%3)\n"
431 "13: movl 48(%4), %%eax\n"
432 "81: movl 52(%4), %%edx\n"
433 " movnti %%eax, 48(%3)\n"
434 " movnti %%edx, 52(%3)\n"
435 "14: movl 56(%4), %%eax\n"
436 "91: movl 60(%4), %%edx\n"
437 " movnti %%eax, 56(%3)\n"
438 " movnti %%edx, 60(%3)\n"
445 "5: movl %0, %%eax\n"
453 ".section .fixup,\"ax\"\n"
454 "9: lea 0(%%eax,%0,4),%0\n"
477 :
"=&c"(size),
"=&D" (d0),
"=&S" (d1)
478 :
"1"(to),
"2"(from),
"0"(size)
479 :
"eax",
"edx",
"memory");
494 const void __user *from,
unsigned long size);
498 #define __copy_user(to, from, size) \
500 int __d0, __d1, __d2; \
501 __asm__ __volatile__( \
517 ".section .fixup,\"ax\"\n" \
520 "3: lea 0(%3,%0,4),%0\n" \
523 _ASM_EXTABLE(4b,5b) \
524 _ASM_EXTABLE(0b,3b) \
525 _ASM_EXTABLE(1b,2b) \
526 : "=&c"(size), "=&D" (__d0), "=&S" (__d1), "=r"(__d2) \
527 : "3"(size), "0"(size), "1"(to), "2"(from) \
531 #define __copy_user_zeroing(to, from, size) \
533 int __d0, __d1, __d2; \
534 __asm__ __volatile__( \
550 ".section .fixup,\"ax\"\n" \
553 "3: lea 0(%3,%0,4),%0\n" \
556 " xorl %%eax,%%eax\n" \
562 _ASM_EXTABLE(4b,5b) \
563 _ASM_EXTABLE(0b,3b) \
564 _ASM_EXTABLE(1b,6b) \
565 : "=&c"(size), "=&D" (__d0), "=&S" (__d1), "=r"(__d2) \
566 : "3"(size), "0"(size), "1"(to), "2"(from) \
573 #ifndef CONFIG_X86_WP_WORKS_OK
603 (
unsigned long)to, 1, 1, 0, &pg,
NULL);
617 memcpy(maddr + offset, from, len);
661 (
const void *)from, n);
671 #ifdef CONFIG_X86_INTEL_USERCOPY
672 if (n > 64 && cpu_has_xmm2)
688 #ifdef CONFIG_X86_INTEL_USERCOPY
689 if (n > 64 && cpu_has_xmm2)
690 n = __copy_user_intel_nocache(to, from, n);
752 WARN(1,
"Buffer overflow detected!\n");