9 #ifndef __ASM_SPINLOCK_H
10 #define __ASM_SPINLOCK_H
17 _raw_compare_and_swap(
volatile unsigned int *lock,
18 unsigned int old,
unsigned int new)
22 :
"=d" (old),
"=Q" (*lock)
23 :
"0" (old),
"d" (
new),
"Q" (*lock)
37 #define arch_spin_is_locked(x) ((x)->owner_cpu != 0)
38 #define arch_spin_unlock_wait(lock) \
39 do { while (arch_spin_is_locked(lock)) \
40 arch_spin_relax(lock); } while (0)
80 _raw_compare_and_swap(&lp->owner_cpu, lp->owner_cpu, 0);
98 #define arch_read_can_lock(x) ((int)(x)->lock >= 0)
104 #define arch_write_can_lock(x) ((x)->lock == 0)
116 old = rw->
lock & 0x7fffffff
U;
117 if (_raw_compare_and_swap(&rw->
lock, old, old + 1) != old)
124 old = rw->
lock & 0x7fffffff
U;
125 if (_raw_compare_and_swap(&rw->
lock, old, old + 1) != old)
131 unsigned int old, cmp;
136 old = _raw_compare_and_swap(&rw->
lock, old, old - 1);
137 }
while (cmp != old);
142 if (
unlikely(_raw_compare_and_swap(&rw->
lock, 0, 0x80000000) != 0))
148 if (
unlikely(_raw_compare_and_swap(&rw->
lock, 0, 0x80000000) != 0))
154 _raw_compare_and_swap(&rw->
lock, 0x80000000, 0);
160 old = rw->
lock & 0x7fffffff
U;
161 if (
likely(_raw_compare_and_swap(&rw->
lock, old, old + 1) == old))
168 if (
likely(_raw_compare_and_swap(&rw->
lock, 0, 0x80000000) == 0))
173 #define arch_read_relax(lock) cpu_relax()
174 #define arch_write_relax(lock) cpu_relax()