8 #ifndef __ASM_AVR32_BITOPS_H
9 #define __ASM_AVR32_BITOPS_H
11 #ifndef _LINUX_BITOPS_H
12 #error only <linux/bitops.h> can be included directly
15 #include <asm/byteorder.h>
20 #define smp_mb__before_clear_bit() barrier()
21 #define smp_mb__after_clear_bit() barrier()
39 if (__builtin_constant_p(nr)) {
46 :
"=&r"(
tmp),
"=o"(*p)
57 :
"=&r"(
tmp),
"=o"(*p)
78 if (__builtin_constant_p(nr)) {
85 :
"=&r"(
tmp),
"=o"(*p)
96 :
"=&r"(
tmp),
"=o"(*p)
111 static inline void change_bit(
int nr,
volatile void * addr)
113 unsigned long *p = ((
unsigned long *)addr) + nr /
BITS_PER_LONG;
123 :
"=&r"(
tmp),
"=o"(*p)
138 unsigned long *p = ((
unsigned long *)addr) + nr /
BITS_PER_LONG;
140 unsigned long tmp, old;
142 if (__builtin_constant_p(nr)) {
150 :
"=&r"(
tmp),
"=o"(*p),
"=&r"(old)
160 :
"=&r"(
tmp),
"=o"(*p),
"=&r"(old)
165 return (old & mask) != 0;
178 unsigned long *p = ((
unsigned long *)addr) + nr /
BITS_PER_LONG;
180 unsigned long tmp, old;
182 if (__builtin_constant_p(nr)) {
190 :
"=&r"(
tmp),
"=o"(*p),
"=&r"(old)
201 :
"=&r"(
tmp),
"=o"(*p),
"=&r"(old)
206 return (old & mask) != 0;
219 unsigned long *p = ((
unsigned long *)addr) + nr /
BITS_PER_LONG;
221 unsigned long tmp, old;
229 :
"=&r"(
tmp),
"=o"(*p),
"=&r"(old)
233 return (old & mask) != 0;
239 static inline unsigned long __ffs(
unsigned long word)
245 :
"=r"(
result),
"=&r"(word)
251 static inline unsigned long ffz(
unsigned long word)
257 static inline int fls(
unsigned long word)
261 asm(
"clz %0,%1" :
"=r"(
result) :
"r"(word));
265 static inline int __fls(
unsigned long word)
267 return fls(word) - 1;
272 #define find_first_zero_bit find_first_zero_bit
277 #define find_next_zero_bit find_next_zero_bit
281 #define find_first_bit find_first_bit
286 #define find_next_bit find_next_bit
296 static inline int ffs(
unsigned long word)
300 return __ffs(word) + 1;
310 #define find_next_zero_bit_le find_next_zero_bit_le
314 #define find_next_bit_le find_next_bit_le