|
#define | ATOMIC_INIT(i) { (i) } |
|
#define | ATOMIC64_INIT(i) { (i) } |
|
#define | atomic_read(v) (*(volatile int *)&(v)->counter) |
|
#define | atomic64_read(v) (*(volatile long *)&(v)->counter) |
|
#define | atomic_set(v, i) ((v)->counter = (i)) |
|
#define | atomic64_set(v, i) ((v)->counter = (i)) |
|
#define | atomic64_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new)) |
|
#define | atomic64_xchg(v, new) (xchg(&((v)->counter), new)) |
|
#define | atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new)) |
|
#define | atomic_xchg(v, new) (xchg(&((v)->counter), new)) |
|
#define | atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) |
|
#define | atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0) |
|
#define | atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0) |
|
#define | atomic_dec_return(v) atomic_sub_return(1,(v)) |
|
#define | atomic64_dec_return(v) atomic64_sub_return(1,(v)) |
|
#define | atomic_inc_return(v) atomic_add_return(1,(v)) |
|
#define | atomic64_inc_return(v) atomic64_add_return(1,(v)) |
|
#define | atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0) |
|
#define | atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0) |
|
#define | atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0) |
|
#define | atomic64_inc_and_test(v) (atomic64_add_return(1, (v)) == 0) |
|
#define | atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0) |
|
#define | atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0) |
|
#define | atomic_inc(v) atomic_add(1,(v)) |
|
#define | atomic64_inc(v) atomic64_add(1,(v)) |
|
#define | atomic_dec(v) atomic_sub(1,(v)) |
|
#define | atomic64_dec(v) atomic64_sub(1,(v)) |
|
#define | smp_mb__before_atomic_dec() smp_mb() |
|
#define | smp_mb__after_atomic_dec() smp_mb() |
|
#define | smp_mb__before_atomic_inc() smp_mb() |
|
#define | smp_mb__after_atomic_inc() smp_mb() |
|