9 #include <linux/errno.h>
13 #include <asm/uaccess.h>
14 #include <asm/futex.h>
60 size_t n,
int write_user)
70 kaddr = follow_table(mm, uaddr, write_user);
80 from = (
void *) kaddr;
100 static __always_inline unsigned long __dat_user_addr(
unsigned long uaddr,
108 kaddr = follow_table(mm, uaddr, write);
130 rc = __user_copy_pt((
unsigned long) from, to, n, 0);
132 memset(to + n - rc, 0, rc);
142 return __user_copy_pt((
unsigned long) to, (
void *) from, n, 1);
145 static size_t clear_user_pt(
size_t n,
void __user *to)
159 ret = __user_copy_pt((
unsigned long) to + done,
163 return ret + n -
done;
168 static size_t strnlen_user_pt(
size_t count,
const char __user *
src)
170 unsigned long uaddr = (
unsigned long) src;
181 kaddr = follow_table(mm, uaddr, 0);
187 len_str =
strnlen((
char *) kaddr, len);
190 }
while ((len_str == len) && (done < count));
200 static size_t strncpy_from_user_pt(
size_t count,
const char __user *src,
203 size_t n = strnlen_user_pt(count, src);
211 if (dst[n-1] ==
'\0')
216 if (__user_copy_pt((
unsigned long) src, dst, n, 0))
218 if (dst[n-1] ==
'\0')
224 static size_t copy_in_user_pt(
size_t n,
void __user *to,
225 const void __user *from)
229 unsigned long uaddr_from = (
unsigned long) from;
230 unsigned long uaddr_to = (
unsigned long) to;
231 unsigned long kaddr_to, kaddr_from;
244 kaddr_from = follow_table(mm, uaddr_from, 0);
245 error_code = kaddr_from;
251 kaddr_to = follow_table(mm, uaddr_to, 1);
252 error_code = (
unsigned long) kaddr_to;
260 memcpy((
void *) kaddr_to, (
void *) kaddr_from, size);
274 #define __futex_atomic_op(insn, ret, oldval, newval, uaddr, oparg) \
275 asm volatile("0: l %1,0(%6)\n" \
277 "2: cs %1,%2,0(%6)\n" \
281 EX_TABLE(0b,4b) EX_TABLE(2b,4b) EX_TABLE(3b,4b) \
282 : "=d" (ret), "=&d" (oldval), "=&d" (newval), \
284 : "0" (-EFAULT), "d" (oparg), "a" (uaddr), \
285 "m" (*uaddr) : "cc" );
287 static int __futex_atomic_op_pt(
int op,
u32 __user *uaddr,
int oparg,
int *old)
289 int oldval = 0, newval,
ret;
294 ret, oldval, newval, uaddr, oparg);
298 ret, oldval, newval, uaddr, oparg);
302 ret, oldval, newval, uaddr, oparg);
306 ret, oldval, newval, uaddr, oparg);
310 ret, oldval, newval, uaddr, oparg);
325 return __futex_atomic_op_pt(op, uaddr, oparg, old);
326 spin_lock(&
current->mm->page_table_lock);
330 spin_unlock(&
current->mm->page_table_lock);
334 spin_unlock(&
current->mm->page_table_lock);
335 ret = __futex_atomic_op_pt(op, uaddr, oparg, old);
340 static int __futex_atomic_cmpxchg_pt(
u32 *uval,
u32 __user *uaddr,
345 asm volatile(
"0: cs %1,%4,0(%5)\n"
349 : "=
d" (ret), "+
d" (oldval), "=
m" (*uaddr)
350 : "0" (-
EFAULT), "
d" (newval), "
a" (uaddr), "
m" (*uaddr)
362 return __futex_atomic_cmpxchg_pt(uval, uaddr, oldval, newval);
363 spin_lock(&
current->mm->page_table_lock);
367 spin_unlock(&
current->mm->page_table_lock);
371 spin_unlock(&
current->mm->page_table_lock);
372 ret = __futex_atomic_cmpxchg_pt(uval, uaddr, oldval, newval);
382 .copy_in_user = copy_in_user_pt,
383 .clear_user = clear_user_pt,
384 .strnlen_user = strnlen_user_pt,
385 .strncpy_from_user = strncpy_from_user_pt,