Go to the documentation of this file. 1 #ifndef _ASM_X86_UACCESS_H
2 #define _ASM_X86_UACCESS_H
6 #include <linux/errno.h>
7 #include <linux/compiler.h>
9 #include <linux/string.h>
15 #define VERIFY_WRITE 1
25 #define MAKE_MM_SEG(s) ((mm_segment_t) { (s) })
27 #define KERNEL_DS MAKE_MM_SEG(-1UL)
28 #define USER_DS MAKE_MM_SEG(TASK_SIZE_MAX)
30 #define get_ds() (KERNEL_DS)
31 #define get_fs() (current_thread_info()->addr_limit)
32 #define set_fs(x) (current_thread_info()->addr_limit = (x))
34 #define segment_eq(a, b) ((a).seg == (b).seg)
36 #define user_addr_max() (current_thread_info()->addr_limit.seg)
37 #define __addr_ok(addr) \
38 ((unsigned long __force)(addr) < user_addr_max())
50 #define __range_not_ok(addr, size, limit) \
52 unsigned long flag, roksum; \
53 __chk_user_ptr(addr); \
54 asm("add %3,%1 ; sbb %0,%0 ; cmp %1,%4 ; sbb $0,%0" \
55 : "=&r" (flag), "=r" (roksum) \
56 : "1" (addr), "g" ((long)(size)), \
80 #define access_ok(type, addr, size) \
81 (likely(__range_not_ok(addr, size, user_addr_max()) == 0))
101 #define ARCH_HAS_SORT_EXTABLE
102 #define ARCH_HAS_SEARCH_EXTABLE
128 #define __get_user_x(size, ret, x, ptr) \
129 asm volatile("call __get_user_" #size \
130 : "=a" (ret), "=d" (x) \
154 #define __get_user_8(__ret_gu, __val_gu, ptr) \
155 __get_user_x(X, __ret_gu, __val_gu, ptr)
157 #define __get_user_8(__ret_gu, __val_gu, ptr) \
158 __get_user_x(8, __ret_gu, __val_gu, ptr)
161 #define get_user(x, ptr) \
164 unsigned long __val_gu; \
165 __chk_user_ptr(ptr); \
167 switch (sizeof(*(ptr))) { \
169 __get_user_x(1, __ret_gu, __val_gu, ptr); \
172 __get_user_x(2, __ret_gu, __val_gu, ptr); \
175 __get_user_x(4, __ret_gu, __val_gu, ptr); \
178 __get_user_8(__ret_gu, __val_gu, ptr); \
181 __get_user_x(X, __ret_gu, __val_gu, ptr); \
184 (x) = (__typeof__(*(ptr)))__val_gu; \
188 #define __put_user_x(size, x, ptr, __ret_pu) \
189 asm volatile("call __put_user_" #size : "=a" (__ret_pu) \
190 : "0" ((typeof(*(ptr)))(x)), "c" (ptr) : "ebx")
195 #define __put_user_asm_u64(x, addr, err, errret) \
196 asm volatile(ASM_STAC "\n" \
197 "1: movl %%eax,0(%2)\n" \
198 "2: movl %%edx,4(%2)\n" \
199 "3: " ASM_CLAC "\n" \
200 ".section .fixup,\"ax\"\n" \
204 _ASM_EXTABLE(1b, 4b) \
205 _ASM_EXTABLE(2b, 4b) \
207 : "A" (x), "r" (addr), "i" (errret), "0" (err))
209 #define __put_user_asm_ex_u64(x, addr) \
210 asm volatile(ASM_STAC "\n" \
211 "1: movl %%eax,0(%1)\n" \
212 "2: movl %%edx,4(%1)\n" \
213 "3: " ASM_CLAC "\n" \
214 _ASM_EXTABLE_EX(1b, 2b) \
215 _ASM_EXTABLE_EX(2b, 3b) \
216 : : "A" (x), "r" (addr))
218 #define __put_user_x8(x, ptr, __ret_pu) \
219 asm volatile("call __put_user_8" : "=a" (__ret_pu) \
220 : "A" ((typeof(*(ptr)))(x)), "c" (ptr) : "ebx")
222 #define __put_user_asm_u64(x, ptr, retval, errret) \
223 __put_user_asm(x, ptr, retval, "q", "", "er", errret)
224 #define __put_user_asm_ex_u64(x, addr) \
225 __put_user_asm_ex(x, addr, "q", "", "er")
226 #define __put_user_x8(x, ptr, __ret_pu) __put_user_x(8, x, ptr, __ret_pu)
240 #ifdef CONFIG_X86_WP_WORKS_OK
258 #define put_user(x, ptr) \
261 __typeof__(*(ptr)) __pu_val; \
262 __chk_user_ptr(ptr); \
265 switch (sizeof(*(ptr))) { \
267 __put_user_x(1, __pu_val, ptr, __ret_pu); \
270 __put_user_x(2, __pu_val, ptr, __ret_pu); \
273 __put_user_x(4, __pu_val, ptr, __ret_pu); \
276 __put_user_x8(__pu_val, ptr, __ret_pu); \
279 __put_user_x(X, __pu_val, ptr, __ret_pu); \
285 #define __put_user_size(x, ptr, size, retval, errret) \
288 __chk_user_ptr(ptr); \
291 __put_user_asm(x, ptr, retval, "b", "b", "iq", errret); \
294 __put_user_asm(x, ptr, retval, "w", "w", "ir", errret); \
297 __put_user_asm(x, ptr, retval, "l", "k", "ir", errret); \
300 __put_user_asm_u64((__typeof__(*ptr))(x), ptr, retval, \
308 #define __put_user_size_ex(x, ptr, size) \
310 __chk_user_ptr(ptr); \
313 __put_user_asm_ex(x, ptr, "b", "b", "iq"); \
316 __put_user_asm_ex(x, ptr, "w", "w", "ir"); \
319 __put_user_asm_ex(x, ptr, "l", "k", "ir"); \
322 __put_user_asm_ex_u64((__typeof__(*ptr))(x), ptr); \
331 #define __put_user_size(x, ptr, size, retval, errret) \
333 __typeof__(*(ptr))__pus_tmp = x; \
336 if (unlikely(__copy_to_user_ll(ptr, &__pus_tmp, size) != 0)) \
340 #define put_user(x, ptr) \
343 __typeof__(*(ptr))__pus_tmp = x; \
345 if (unlikely(__copy_to_user_ll(ptr, &__pus_tmp, \
346 sizeof(*(ptr))) != 0)) \
347 __ret_pu = -EFAULT; \
353 #define __get_user_asm_u64(x, ptr, retval, errret) (x) = __get_user_bad()
354 #define __get_user_asm_ex_u64(x, ptr) (x) = __get_user_bad()
356 #define __get_user_asm_u64(x, ptr, retval, errret) \
357 __get_user_asm(x, ptr, retval, "q", "", "=r", errret)
358 #define __get_user_asm_ex_u64(x, ptr) \
359 __get_user_asm_ex(x, ptr, "q", "", "=r")
362 #define __get_user_size(x, ptr, size, retval, errret) \
365 __chk_user_ptr(ptr); \
368 __get_user_asm(x, ptr, retval, "b", "b", "=q", errret); \
371 __get_user_asm(x, ptr, retval, "w", "w", "=r", errret); \
374 __get_user_asm(x, ptr, retval, "l", "k", "=r", errret); \
377 __get_user_asm_u64(x, ptr, retval, errret); \
380 (x) = __get_user_bad(); \
384 #define __get_user_asm(x, addr, err, itype, rtype, ltype, errret) \
385 asm volatile(ASM_STAC "\n" \
386 "1: mov"itype" %2,%"rtype"1\n" \
387 "2: " ASM_CLAC "\n" \
388 ".section .fixup,\"ax\"\n" \
390 " xor"itype" %"rtype"1,%"rtype"1\n" \
393 _ASM_EXTABLE(1b, 3b) \
394 : "=r" (err), ltype(x) \
395 : "m" (__m(addr)), "i" (errret), "0" (err))
397 #define __get_user_size_ex(x, ptr, size) \
399 __chk_user_ptr(ptr); \
402 __get_user_asm_ex(x, ptr, "b", "b", "=q"); \
405 __get_user_asm_ex(x, ptr, "w", "w", "=r"); \
408 __get_user_asm_ex(x, ptr, "l", "k", "=r"); \
411 __get_user_asm_ex_u64(x, ptr); \
414 (x) = __get_user_bad(); \
418 #define __get_user_asm_ex(x, addr, itype, rtype, ltype) \
419 asm volatile("1: mov"itype" %1,%"rtype"0\n" \
421 _ASM_EXTABLE_EX(1b, 2b) \
422 : ltype(x) : "m" (__m(addr)))
424 #define __put_user_nocheck(x, ptr, size) \
427 __put_user_size((x), (ptr), (size), __pu_err, -EFAULT); \
431 #define __get_user_nocheck(x, ptr, size) \
434 unsigned long __gu_val; \
435 __get_user_size(__gu_val, (ptr), (size), __gu_err, -EFAULT); \
436 (x) = (__force __typeof__(*(ptr)))__gu_val; \
442 #define __m(x) (*(struct __large_struct __user *)(x))
449 #define __put_user_asm(x, addr, err, itype, rtype, ltype, errret) \
450 asm volatile(ASM_STAC "\n" \
451 "1: mov"itype" %"rtype"1,%2\n" \
452 "2: " ASM_CLAC "\n" \
453 ".section .fixup,\"ax\"\n" \
457 _ASM_EXTABLE(1b, 3b) \
459 : ltype(x), "m" (__m(addr)), "i" (errret), "0" (err))
461 #define __put_user_asm_ex(x, addr, itype, rtype, ltype) \
462 asm volatile("1: mov"itype" %"rtype"0,%1\n" \
464 _ASM_EXTABLE_EX(1b, 2b) \
465 : : ltype(x), "m" (__m(addr)))
470 #define uaccess_try do { \
471 current_thread_info()->uaccess_err = 0; \
475 #define uaccess_catch(err) \
477 (err) |= (current_thread_info()->uaccess_err ? -EFAULT : 0); \
501 #define __get_user(x, ptr) \
502 __get_user_nocheck((x), (ptr), sizeof(*(ptr)))
524 #define __put_user(x, ptr) \
525 __put_user_nocheck((__typeof__(*(ptr)))(x), (ptr), sizeof(*(ptr)))
527 #define __get_user_unaligned __get_user
528 #define __put_user_unaligned __put_user
537 #define get_user_try uaccess_try
538 #define get_user_catch(err) uaccess_catch(err)
540 #define get_user_ex(x, ptr) do { \
541 unsigned long __gue_val; \
542 __get_user_size_ex((__gue_val), (ptr), (sizeof(*(ptr)))); \
543 (x) = (__force __typeof__(*(ptr)))__gue_val; \
546 #ifdef CONFIG_X86_WP_WORKS_OK
548 #define put_user_try uaccess_try
549 #define put_user_catch(err) uaccess_catch(err)
551 #define put_user_ex(x, ptr) \
552 __put_user_size_ex((__typeof__(*(ptr)))(x), (ptr), sizeof(*(ptr)))
556 #define put_user_try do { \
557 int __uaccess_err = 0;
559 #define put_user_catch(err) \
560 (err) |= __uaccess_err; \
563 #define put_user_ex(x, ptr) do { \
564 __uaccess_err |= __put_user(x, ptr); \
583 #ifdef CONFIG_X86_INTEL_USERCOPY
584 extern struct movsl_mask {
589 #define ARCH_HAS_NOCACHE_UACCESS 1
592 # include <asm/uaccess_32.h>
594 # include <asm/uaccess_64.h>