13 #include <linux/futex.h>
15 #include <asm/barrier.h>
16 #include <asm/errno.h>
19 #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
21 if (cpu_has_llsc && R10000_LLSC_WAR) { \
22 __asm__ __volatile__( \
26 "1: ll %1, %4 # __futex_atomic_op \n" \
36 " .section .fixup,\"ax\" \n" \
40 " .section __ex_table,\"a\" \n" \
41 " "__UA_ADDR "\t1b, 4b \n" \
42 " "__UA_ADDR "\t2b, 4b \n" \
44 : "=r" (ret), "=&r" (oldval), "=R" (*uaddr) \
45 : "0" (0), "R" (*uaddr), "Jr" (oparg), "i" (-EFAULT) \
47 } else if (cpu_has_llsc) { \
48 __asm__ __volatile__( \
52 "1: ll %1, %4 # __futex_atomic_op \n" \
62 " .section .fixup,\"ax\" \n" \
66 " .section __ex_table,\"a\" \n" \
67 " "__UA_ADDR "\t1b, 4b \n" \
68 " "__UA_ADDR "\t2b, 4b \n" \
70 : "=r" (ret), "=&r" (oldval), "=R" (*uaddr) \
71 : "0" (0), "R" (*uaddr), "Jr" (oparg), "i" (-EFAULT) \
80 int op = (encoded_op >> 28) & 7;
81 int cmp = (encoded_op >> 24) & 15;
82 int oparg = (encoded_op << 8) >> 20;
83 int cmparg = (encoded_op << 20) >> 20;
100 ret, oldval, uaddr, oparg);
104 ret, oldval, uaddr, oparg);
108 ret, oldval, uaddr, ~oparg);
112 ret, oldval, uaddr, oparg);
135 futex_atomic_cmpxchg_inatomic(
u32 *uval,
u32 __user *uaddr,
146 "# futex_atomic_cmpxchg_inatomic \n"
151 " bne %1, %z4, 3f \n"
160 " .section .fixup,\"ax\" \n"
164 " .section __ex_table,\"a\" \n"
165 " "__UA_ADDR
"\t1b, 4b \n"
166 " "__UA_ADDR
"\t2b, 4b \n"
168 :
"+r" (ret),
"=&r" (val),
"=R" (*uaddr)
169 :
"R" (*uaddr),
"Jr" (oldval),
"Jr" (newval),
"i" (-
EFAULT)
173 "# futex_atomic_cmpxchg_inatomic \n"
178 " bne %1, %z4, 3f \n"
187 " .section .fixup,\"ax\" \n"
191 " .section __ex_table,\"a\" \n"
192 " "__UA_ADDR
"\t1b, 4b \n"
193 " "__UA_ADDR
"\t2b, 4b \n"
195 :
"+r" (ret),
"=&r" (val),
"=R" (*uaddr)
196 :
"R" (*uaddr),
"Jr" (oldval),
"Jr" (newval),
"i" (-
EFAULT)