Go to the documentation of this file.
17 #ifndef _ASM_TILE_ATOMIC_64_H
18 #define _ASM_TILE_ATOMIC_64_H
22 #include <asm/barrier.h>
23 #include <arch/spr_def.h>
27 #define atomic_set(v, i) ((v)->counter = (i))
40 val = __insn_cmpexch4((
void *)&v->
counter, n);
49 val = __insn_exch4((
void *)&v->
counter, n);
56 __insn_fetchadd4((
void *)&v->
counter, i);
63 val = __insn_fetchadd4((
void *)&v->
counter, i) +
i;
76 }
while (guess != oldval);
82 #define ATOMIC64_INIT(i) { (i) }
84 #define atomic64_read(v) ((v)->counter)
85 #define atomic64_set(v, i) ((v)->counter = (i))
92 val = __insn_cmpexch((
void *)&v->
counter, n);
101 val = __insn_exch((
void *)&v->
counter, n);
108 __insn_fetchadd((
void *)&v->
counter, i);
115 val = __insn_fetchadd((
void *)&v->
counter, i) +
i;
120 static inline long atomic64_add_unless(
atomic64_t *v,
long a,
long u)
122 long guess, oldval = v->
counter;
128 }
while (guess != oldval);
132 #define atomic64_sub_return(i, v) atomic64_add_return(-(i), (v))
133 #define atomic64_sub(i, v) atomic64_add(-(i), (v))
134 #define atomic64_inc_return(v) atomic64_add_return(1, (v))
135 #define atomic64_dec_return(v) atomic64_sub_return(1, (v))
136 #define atomic64_inc(v) atomic64_add(1, (v))
137 #define atomic64_dec(v) atomic64_sub(1, (v))
139 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
140 #define atomic64_dec_and_test(v) (atomic64_dec_return(v) == 0)
141 #define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
142 #define atomic64_add_negative(i, v) (atomic64_add_return((i), (v)) < 0)
144 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
147 #define smp_mb__before_atomic_dec() smp_mb()
148 #define smp_mb__after_atomic_dec() smp_mb()
149 #define smp_mb__before_atomic_inc() smp_mb()
150 #define smp_mb__after_atomic_inc() smp_mb()
153 #define __HAVE_ARCH_CMPXCHG