4 #ifndef _ASM_POWERPC_MUTEX_H
5 #define _ASM_POWERPC_MUTEX_H
7 static inline int __mutex_cmpxchg_lock(
atomic_t *
v,
int old,
int new)
12 "1: lwarx %0,0,%1 # mutex trylock\n\
22 :
"r" (&v->
counter),
"r" (old),
"r" (
new)
28 static inline int __mutex_dec_return_lock(
atomic_t *v)
33 "1: lwarx %0,0,%1 # mutex lock\n\
46 static inline int __mutex_inc_return_unlock(
atomic_t *v)
52 "1: lwarx %0,0,%1 # mutex unlock\n\
77 if (
unlikely(__mutex_dec_return_lock(count) < 0))
94 if (
unlikely(__mutex_dec_return_lock(count) < 0))
95 return fail_fn(count);
111 if (
unlikely(__mutex_inc_return_unlock(count) <= 0))
115 #define __mutex_slowpath_needs_to_unlock() 1
129 if (
likely(__mutex_cmpxchg_lock(count, 1, 0) == 1))