36 #ifndef _ASM_POWERPC_BITOPS_H
37 #define _ASM_POWERPC_BITOPS_H
41 #ifndef _LINUX_BITOPS_H
42 #error only <linux/bitops.h> can be included directly
45 #include <linux/compiler.h>
46 #include <asm/asm-compat.h>
52 #define smp_mb__before_clear_bit() smp_mb()
53 #define smp_mb__after_clear_bit() smp_mb()
55 #define BITOP_MASK(nr) (1UL << ((nr) % BITS_PER_LONG))
56 #define BITOP_WORD(nr) ((nr) / BITS_PER_LONG)
57 #define BITOP_LE_SWIZZLE ((BITS_PER_LONG-1) & ~0x7)
60 #define DEFINE_BITOP(fn, op, prefix, postfix) \
61 static __inline__ void fn(unsigned long mask, \
62 volatile unsigned long *_p) \
65 unsigned long *p = (unsigned long *)_p; \
66 __asm__ __volatile__ ( \
68 "1:" PPC_LLARX(%0,0,%3,0) "\n" \
69 stringify_in_c(op) "%0,%0,%2\n" \
71 PPC_STLCX "%0,0,%3\n" \
74 : "=&r" (old), "+m" (*p) \
75 : "r" (mask), "r" (p) \
80 DEFINE_BITOP(clear_bits,
andc, "", "")
81 DEFINE_BITOP(clear_bits_unlock, andc, PPC_RELEASE_BARRIER, "")
82 DEFINE_BITOP(change_bits,
xor, "", "")
91 clear_bits(BITOP_MASK(nr), addr +
BITOP_WORD(nr));
96 clear_bits_unlock(BITOP_MASK(nr), addr +
BITOP_WORD(nr));
101 change_bits(BITOP_MASK(nr), addr +
BITOP_WORD(nr));
106 #define DEFINE_TESTOP(fn, op, prefix, postfix, eh) \
107 static __inline__ unsigned long fn( \
108 unsigned long mask, \
109 volatile unsigned long *_p) \
111 unsigned long old, t; \
112 unsigned long *p = (unsigned long *)_p; \
113 __asm__ __volatile__ ( \
115 "1:" PPC_LLARX(%0,0,%3,eh) "\n" \
116 stringify_in_c(op) "%1,%0,%2\n" \
118 PPC_STLCX "%1,0,%3\n" \
121 : "=&r" (old), "=&r" (t) \
122 : "r" (mask), "r" (p) \
124 return (old & mask); \
127 DEFINE_TESTOP(test_and_set_bits,
or, PPC_ATOMIC_ENTRY_BARRIER,
128 PPC_ATOMIC_EXIT_BARRIER, 0)
129 DEFINE_TESTOP(test_and_set_bits_lock,
or, "",
130 PPC_ACQUIRE_BARRIER, 1)
131 DEFINE_TESTOP(test_and_clear_bits, andc, PPC_ATOMIC_ENTRY_BARRIER,
132 PPC_ATOMIC_EXIT_BARRIER, 0)
133 DEFINE_TESTOP(test_and_change_bits, xor, PPC_ATOMIC_ENTRY_BARRIER,
134 PPC_ATOMIC_EXIT_BARRIER, 0)
137 volatile
unsigned long *addr)
139 return test_and_set_bits(BITOP_MASK(nr), addr +
BITOP_WORD(nr)) != 0;
143 volatile unsigned long *addr)
145 return test_and_set_bits_lock(BITOP_MASK(nr),
150 volatile unsigned long *addr)
152 return test_and_clear_bits(BITOP_MASK(nr), addr +
BITOP_WORD(nr)) != 0;
156 volatile unsigned long *addr)
158 return test_and_change_bits(BITOP_MASK(nr), addr +
BITOP_WORD(nr)) != 0;
165 __asm__ __volatile__(PPC_RELEASE_BARRIER
"" :::
"memory");
174 int __ilog2(
unsigned long x)
178 asm (
PPC_CNTLZL "%0,%1" :
"=r" (lz) :
"r" (x));
186 asm (
"cntlzw %0,%1" :
"=r" (
bit) :
"r" (n));
195 asm (
"cntlzd %0,%1" :
"=r" (
bit) :
"r" (n));
217 return __ilog2(x & -x);
222 return __ilog2(x & -x);
232 unsigned long i = (
unsigned long)x;
233 return __ilog2(i & -i) + 1;
244 asm (
"cntlzw %0,%1" :
"=r" (lz) :
"r" (x));
263 asm (
"cntlzd %0,%1" :
"=r" (lz) :
"r" (x));
271 unsigned int __arch_hweight8(
unsigned int w);
272 unsigned int __arch_hweight16(
unsigned int w);
273 unsigned int __arch_hweight32(
unsigned int w);
274 unsigned long __arch_hweight64(
__u64 w);
284 static __inline__ int test_bit_le(
unsigned long nr,
285 __const__
void *addr)
287 __const__
unsigned char *
tmp = (__const__
unsigned char *) addr;
288 return (tmp[nr >> 3] >> (nr & 7)) & 1;
291 static inline void set_bit_le(
int nr,
void *addr)
293 set_bit(nr ^ BITOP_LE_SWIZZLE, addr);
296 static inline void clear_bit_le(
int nr,
void *addr)
301 static inline void __set_bit_le(
int nr,
void *addr)
306 static inline void __clear_bit_le(
int nr,
void *addr)
311 static inline int test_and_set_bit_le(
int nr,
void *addr)
316 static inline int test_and_clear_bit_le(
int nr,
void *addr)
321 static inline int __test_and_set_bit_le(
int nr,
void *addr)
326 static inline int __test_and_clear_bit_le(
int nr,
void *addr)
331 #define find_first_zero_bit_le(addr, size) \
332 find_next_zero_bit_le((addr), (size), 0)