15 #ifndef _ASM_TILE_UACCESS_H
16 #define _ASM_TILE_UACCESS_H
21 #include <linux/sched.h>
24 #include <asm/processor.h>
28 #define VERIFY_WRITE 1
37 #define MAKE_MM_SEG(a) ((mm_segment_t) { (a) })
39 #define KERNEL_DS MAKE_MM_SEG(-1UL)
40 #define USER_DS MAKE_MM_SEG(PAGE_OFFSET)
42 #define get_ds() (KERNEL_DS)
43 #define get_fs() (current_thread_info()->addr_limit)
44 #define set_fs(x) (current_thread_info()->addr_limit = (x))
46 #define segment_eq(a, b) ((a).seg == (b).seg)
62 #define is_arch_mappable_range is_arch_mappable_range
64 #define is_arch_mappable_range(addr, size) 0
92 #define access_ok(type, addr, size) ({ \
93 __chk_user_ptr(addr); \
94 likely(__range_ok((unsigned long)(addr), (size)) == 0); \
129 #define _ASM_PTR ".quad"
131 #define _ASM_PTR ".long"
134 #define __get_user_asm(OP, x, ptr, ret) \
135 asm volatile("1: {" #OP " %1, %2; movei %0, 0 }\n" \
136 ".pushsection .fixup,\"ax\"\n" \
137 "0: { movei %1, 0; movei %0, %3 }\n" \
139 ".section __ex_table,\"a\"\n" \
140 _ASM_PTR " 1b, 0b\n" \
143 : "=r" (ret), "=r" (x) \
144 : "r" (ptr), "i" (-EFAULT))
147 #define __get_user_1(x, ptr, ret) __get_user_asm(ld1u, x, ptr, ret)
148 #define __get_user_2(x, ptr, ret) __get_user_asm(ld2u, x, ptr, ret)
149 #define __get_user_4(x, ptr, ret) __get_user_asm(ld4s, x, ptr, ret)
150 #define __get_user_8(x, ptr, ret) __get_user_asm(ld, x, ptr, ret)
152 #define __get_user_1(x, ptr, ret) __get_user_asm(lb_u, x, ptr, ret)
153 #define __get_user_2(x, ptr, ret) __get_user_asm(lh_u, x, ptr, ret)
154 #define __get_user_4(x, ptr, ret) __get_user_asm(lw, x, ptr, ret)
155 #ifdef __LITTLE_ENDIAN
156 #define __lo32(a, b) a
157 #define __hi32(a, b) b
159 #define __lo32(a, b) b
160 #define __hi32(a, b) a
162 #define __get_user_8(x, ptr, ret) \
164 unsigned int __a, __b; \
165 asm volatile("1: { lw %1, %3; addi %2, %3, 4 }\n" \
166 "2: { lw %2, %2; movei %0, 0 }\n" \
167 ".pushsection .fixup,\"ax\"\n" \
168 "0: { movei %1, 0; movei %2, 0 }\n" \
169 "{ movei %0, %4; j 9f }\n" \
170 ".section __ex_table,\"a\"\n" \
175 : "=r" (ret), "=r" (__a), "=&r" (__b) \
176 : "r" (ptr), "i" (-EFAULT)); \
177 (x) = (__typeof(x))(__typeof((x)-(x))) \
178 (((u64)__hi32(__a, __b) << 32) | \
206 #define __get_user(x, ptr) \
209 __chk_user_ptr(ptr); \
210 switch (sizeof(*(ptr))) { \
211 case 1: __get_user_1(x, ptr, __ret); break; \
212 case 2: __get_user_2(x, ptr, __ret); break; \
213 case 4: __get_user_4(x, ptr, __ret); break; \
214 case 8: __get_user_8(x, ptr, __ret); break; \
215 default: __ret = __get_user_bad(); break; \
222 #define __put_user_asm(OP, x, ptr, ret) \
223 asm volatile("1: {" #OP " %1, %2; movei %0, 0 }\n" \
224 ".pushsection .fixup,\"ax\"\n" \
225 "0: { movei %0, %3; j 9f }\n" \
226 ".section __ex_table,\"a\"\n" \
227 _ASM_PTR " 1b, 0b\n" \
231 : "r" (ptr), "r" (x), "i" (-EFAULT))
234 #define __put_user_1(x, ptr, ret) __put_user_asm(st1, x, ptr, ret)
235 #define __put_user_2(x, ptr, ret) __put_user_asm(st2, x, ptr, ret)
236 #define __put_user_4(x, ptr, ret) __put_user_asm(st4, x, ptr, ret)
237 #define __put_user_8(x, ptr, ret) __put_user_asm(st, x, ptr, ret)
239 #define __put_user_1(x, ptr, ret) __put_user_asm(sb, x, ptr, ret)
240 #define __put_user_2(x, ptr, ret) __put_user_asm(sh, x, ptr, ret)
241 #define __put_user_4(x, ptr, ret) __put_user_asm(sw, x, ptr, ret)
242 #define __put_user_8(x, ptr, ret) \
244 u64 __x = (__typeof((x)-(x)))(x); \
245 int __lo = (int) __x, __hi = (int) (__x >> 32); \
246 asm volatile("1: { sw %1, %2; addi %0, %1, 4 }\n" \
247 "2: { sw %0, %3; movei %0, 0 }\n" \
248 ".pushsection .fixup,\"ax\"\n" \
249 "0: { movei %0, %4; j 9f }\n" \
250 ".section __ex_table,\"a\"\n" \
256 : "r" (ptr), "r" (__lo32(__lo, __hi)), \
257 "r" (__hi32(__lo, __hi)), "i" (-EFAULT)); \
283 #define __put_user(x, ptr) \
286 __chk_user_ptr(ptr); \
287 switch (sizeof(*(ptr))) { \
288 case 1: __put_user_1(x, ptr, __ret); break; \
289 case 2: __put_user_2(x, ptr, __ret); break; \
290 case 4: __put_user_4(x, ptr, __ret); break; \
291 case 8: __put_user_8(x, ptr, __ret); break; \
292 default: __ret = __put_user_bad(); break; \
302 #define put_user(x, ptr) \
304 __typeof__(*(ptr)) __user *__Pu_addr = (ptr); \
305 access_ok(VERIFY_WRITE, (__Pu_addr), sizeof(*(__Pu_addr))) ? \
306 __put_user((x), (__Pu_addr)) : \
310 #define get_user(x, ptr) \
312 __typeof__(*(ptr)) const __user *__Gu_addr = (ptr); \
313 access_ok(VERIFY_READ, (__Gu_addr), sizeof(*(__Gu_addr))) ? \
314 __get_user((x), (__Gu_addr)) : \
315 ((x) = 0, -EFAULT); \
337 void __user *to,
const void *
from,
unsigned long n);
377 void *to,
const void __user *from,
unsigned long n);
379 void *to,
const void __user *from,
unsigned long n);
389 _copy_from_user(
void *to,
const void __user *from,
unsigned long n)
398 #ifdef CONFIG_DEBUG_COPY_FROM_USER
403 const void __user *from,
408 if (
likely(sz == -1 || sz >= n))
409 n = _copy_from_user(to, from, n);
416 #define copy_from_user _copy_from_user
434 extern unsigned long __copy_in_user_inatomic(
435 void __user *to,
const void __user *from,
unsigned long n);
438 __copy_in_user(
void __user *to,
const void __user *from,
unsigned long n)
441 return __copy_in_user_inatomic(to, from, n);
445 copy_in_user(
void __user *to,
const void __user *from,
unsigned long n)
474 #define strlen_user(str) strnlen_user(str, LONG_MAX)
523 void __user *
mem,
unsigned long len)
529 void __user *
mem,
unsigned long len)
544 extern unsigned long flush_user_asm(
void __user *mem,
unsigned long len);
546 void __user *mem,
unsigned long len)
557 void __user *mem,
unsigned long len)
560 return __flush_user(mem, len);
576 extern unsigned long inv_user_asm(
void __user *mem,
unsigned long len);
578 void __user *mem,
unsigned long len)
588 void __user *mem,
unsigned long len)
591 return __inv_user(mem, len);
603 extern unsigned long finv_user_asm(
void __user *mem,
unsigned long len);
605 void __user *mem,
unsigned long len)
615 void __user *mem,
unsigned long len)
618 return __finv_user(mem, len);