11 #ifndef _LINUX_BITOPS_H
12 #error only <linux/bitops.h> can be included directly
15 #include <linux/compiler.h>
30 static inline void bset_reg_set_bit(
int nr,
volatile unsigned long *
vaddr)
32 char *
p = (
char *)vaddr + (nr ^ 31) / 8;
34 __asm__ __volatile__ (
"bset %1,(%0)"
36 :
"a" (p),
"di" (nr & 7)
40 static inline void bset_mem_set_bit(
int nr,
volatile unsigned long *vaddr)
42 char *p = (
char *)vaddr + (nr ^ 31) / 8;
44 __asm__ __volatile__ (
"bset %1,%0"
49 static inline void bfset_mem_set_bit(
int nr,
volatile unsigned long *vaddr)
51 __asm__ __volatile__ (
"bfset %1{%0:#1}"
53 :
"d" (nr ^ 31),
"o" (*vaddr)
57 #if defined(CONFIG_COLDFIRE)
58 #define set_bit(nr, vaddr) bset_reg_set_bit(nr, vaddr)
59 #elif defined(CONFIG_CPU_HAS_NO_BITFIELDS)
60 #define set_bit(nr, vaddr) bset_mem_set_bit(nr, vaddr)
62 #define set_bit(nr, vaddr) (__builtin_constant_p(nr) ? \
63 bset_mem_set_bit(nr, vaddr) : \
64 bfset_mem_set_bit(nr, vaddr))
67 #define __set_bit(nr, vaddr) set_bit(nr, vaddr)
73 #define smp_mb__before_clear_bit() barrier()
74 #define smp_mb__after_clear_bit() barrier()
76 static inline void bclr_reg_clear_bit(
int nr,
volatile unsigned long *vaddr)
78 char *p = (
char *)vaddr + (nr ^ 31) / 8;
80 __asm__ __volatile__ (
"bclr %1,(%0)"
82 :
"a" (p),
"di" (nr & 7)
86 static inline void bclr_mem_clear_bit(
int nr,
volatile unsigned long *vaddr)
88 char *p = (
char *)vaddr + (nr ^ 31) / 8;
90 __asm__ __volatile__ (
"bclr %1,%0"
95 static inline void bfclr_mem_clear_bit(
int nr,
volatile unsigned long *vaddr)
97 __asm__ __volatile__ (
"bfclr %1{%0:#1}"
99 :
"d" (nr ^ 31),
"o" (*vaddr)
103 #if defined(CONFIG_COLDFIRE)
104 #define clear_bit(nr, vaddr) bclr_reg_clear_bit(nr, vaddr)
105 #elif defined(CONFIG_CPU_HAS_NO_BITFIELDS)
106 #define clear_bit(nr, vaddr) bclr_mem_clear_bit(nr, vaddr)
108 #define clear_bit(nr, vaddr) (__builtin_constant_p(nr) ? \
109 bclr_mem_clear_bit(nr, vaddr) : \
110 bfclr_mem_clear_bit(nr, vaddr))
113 #define __clear_bit(nr, vaddr) clear_bit(nr, vaddr)
116 static inline void bchg_reg_change_bit(
int nr,
volatile unsigned long *vaddr)
118 char *p = (
char *)vaddr + (nr ^ 31) / 8;
120 __asm__ __volatile__ (
"bchg %1,(%0)"
122 :
"a" (p),
"di" (nr & 7)
126 static inline void bchg_mem_change_bit(
int nr,
volatile unsigned long *vaddr)
128 char *p = (
char *)vaddr + (nr ^ 31) / 8;
130 __asm__ __volatile__ (
"bchg %1,%0"
135 static inline void bfchg_mem_change_bit(
int nr,
volatile unsigned long *vaddr)
137 __asm__ __volatile__ (
"bfchg %1{%0:#1}"
139 :
"d" (nr ^ 31),
"o" (*vaddr)
143 #if defined(CONFIG_COLDFIRE)
144 #define change_bit(nr, vaddr) bchg_reg_change_bit(nr, vaddr)
145 #elif defined(CONFIG_CPU_HAS_NO_BITFIELDS)
146 #define change_bit(nr, vaddr) bchg_mem_change_bit(nr, vaddr)
148 #define change_bit(nr, vaddr) (__builtin_constant_p(nr) ? \
149 bchg_mem_change_bit(nr, vaddr) : \
150 bfchg_mem_change_bit(nr, vaddr))
153 #define __change_bit(nr, vaddr) change_bit(nr, vaddr)
156 static inline int test_bit(
int nr,
const unsigned long *vaddr)
158 return (vaddr[nr >> 5] & (1
UL << (nr & 31))) != 0;
162 static inline int bset_reg_test_and_set_bit(
int nr,
163 volatile unsigned long *vaddr)
165 char *p = (
char *)vaddr + (nr ^ 31) / 8;
168 __asm__ __volatile__ (
"bset %2,(%1); sne %0"
170 :
"a" (p),
"di" (nr & 7)
175 static inline int bset_mem_test_and_set_bit(
int nr,
176 volatile unsigned long *vaddr)
178 char *p = (
char *)vaddr + (nr ^ 31) / 8;
181 __asm__ __volatile__ (
"bset %2,%1; sne %0"
182 :
"=d" (retval),
"+m" (*p)
187 static inline int bfset_mem_test_and_set_bit(
int nr,
188 volatile unsigned long *vaddr)
192 __asm__ __volatile__ (
"bfset %2{%1:#1}; sne %0"
194 :
"d" (nr ^ 31),
"o" (*vaddr)
199 #if defined(CONFIG_COLDFIRE)
200 #define test_and_set_bit(nr, vaddr) bset_reg_test_and_set_bit(nr, vaddr)
201 #elif defined(CONFIG_CPU_HAS_NO_BITFIELDS)
202 #define test_and_set_bit(nr, vaddr) bset_mem_test_and_set_bit(nr, vaddr)
204 #define test_and_set_bit(nr, vaddr) (__builtin_constant_p(nr) ? \
205 bset_mem_test_and_set_bit(nr, vaddr) : \
206 bfset_mem_test_and_set_bit(nr, vaddr))
209 #define __test_and_set_bit(nr, vaddr) test_and_set_bit(nr, vaddr)
212 static inline int bclr_reg_test_and_clear_bit(
int nr,
213 volatile unsigned long *vaddr)
215 char *p = (
char *)vaddr + (nr ^ 31) / 8;
218 __asm__ __volatile__ (
"bclr %2,(%1); sne %0"
220 :
"a" (p),
"di" (nr & 7)
225 static inline int bclr_mem_test_and_clear_bit(
int nr,
226 volatile unsigned long *vaddr)
228 char *p = (
char *)vaddr + (nr ^ 31) / 8;
231 __asm__ __volatile__ (
"bclr %2,%1; sne %0"
232 :
"=d" (retval),
"+m" (*p)
237 static inline int bfclr_mem_test_and_clear_bit(
int nr,
238 volatile unsigned long *vaddr)
242 __asm__ __volatile__ (
"bfclr %2{%1:#1}; sne %0"
244 :
"d" (nr ^ 31),
"o" (*vaddr)
249 #if defined(CONFIG_COLDFIRE)
250 #define test_and_clear_bit(nr, vaddr) bclr_reg_test_and_clear_bit(nr, vaddr)
251 #elif defined(CONFIG_CPU_HAS_NO_BITFIELDS)
252 #define test_and_clear_bit(nr, vaddr) bclr_mem_test_and_clear_bit(nr, vaddr)
254 #define test_and_clear_bit(nr, vaddr) (__builtin_constant_p(nr) ? \
255 bclr_mem_test_and_clear_bit(nr, vaddr) : \
256 bfclr_mem_test_and_clear_bit(nr, vaddr))
259 #define __test_and_clear_bit(nr, vaddr) test_and_clear_bit(nr, vaddr)
262 static inline int bchg_reg_test_and_change_bit(
int nr,
263 volatile unsigned long *vaddr)
265 char *p = (
char *)vaddr + (nr ^ 31) / 8;
268 __asm__ __volatile__ (
"bchg %2,(%1); sne %0"
270 :
"a" (p),
"di" (nr & 7)
275 static inline int bchg_mem_test_and_change_bit(
int nr,
276 volatile unsigned long *vaddr)
278 char *p = (
char *)vaddr + (nr ^ 31) / 8;
281 __asm__ __volatile__ (
"bchg %2,%1; sne %0"
282 :
"=d" (retval),
"+m" (*p)
287 static inline int bfchg_mem_test_and_change_bit(
int nr,
288 volatile unsigned long *vaddr)
292 __asm__ __volatile__ (
"bfchg %2{%1:#1}; sne %0"
294 :
"d" (nr ^ 31),
"o" (*vaddr)
299 #if defined(CONFIG_COLDFIRE)
300 #define test_and_change_bit(nr, vaddr) bchg_reg_test_and_change_bit(nr, vaddr)
301 #elif defined(CONFIG_CPU_HAS_NO_BITFIELDS)
302 #define test_and_change_bit(nr, vaddr) bchg_mem_test_and_change_bit(nr, vaddr)
304 #define test_and_change_bit(nr, vaddr) (__builtin_constant_p(nr) ? \
305 bchg_mem_test_and_change_bit(nr, vaddr) : \
306 bfchg_mem_test_and_change_bit(nr, vaddr))
309 #define __test_and_change_bit(nr, vaddr) test_and_change_bit(nr, vaddr)
318 #if defined(CONFIG_CPU_HAS_NO_BITFIELDS)
326 const unsigned long *p =
vaddr;
334 words = (size + 31) >> 5;
335 while (!(num = ~*p++)) {
340 __asm__ __volatile__ (
"bfffo %1{#0,#0},%0"
341 :
"=d" (res) :
"d" (num & -num));
344 res += ((
long)p - (
long)vaddr - 4) * 8;
345 return res < size ? res :
size;
347 #define find_first_zero_bit find_first_zero_bit
352 const unsigned long *p = vaddr + (offset >> 5);
359 unsigned long num = ~*p++ & (~0UL <<
bit);
363 __asm__ __volatile__ (
"bfffo %1{#0,#0},%0"
364 :
"=d" (res) :
"d" (num & -num));
367 return offset < size ? offset :
size;
377 #define find_next_zero_bit find_next_zero_bit
381 const unsigned long *p =
vaddr;
389 words = (size + 31) >> 5;
390 while (!(num = *p++)) {
395 __asm__ __volatile__ (
"bfffo %1{#0,#0},%0"
396 :
"=d" (res) :
"d" (num & -num));
399 res += ((
long)p - (
long)vaddr - 4) * 8;
400 return res < size ? res :
size;
402 #define find_first_bit find_first_bit
407 const unsigned long *p = vaddr + (offset >> 5);
414 unsigned long num = *p++ & (~0UL <<
bit);
418 __asm__ __volatile__ (
"bfffo %1{#0,#0},%0"
419 :
"=d" (res) :
"d" (num & -num));
422 return offset < size ? offset :
size;
432 #define find_next_bit find_next_bit
438 static inline unsigned long ffz(
unsigned long word)
442 __asm__ __volatile__ (
"bfffo %1{#0,#0},%0"
443 :
"=d" (res) :
"d" (~word & -~word));
451 #if defined(CONFIG_CPU_HAS_NO_BITFIELDS)
459 #if (defined(__mcfisaaplus__) || defined(__mcfisac__)) && \
460 !defined(CONFIG_M68000) && !defined(CONFIG_MCPU32)
461 static inline int __ffs(
int x)
463 __asm__ __volatile__ (
"bitrev %0; ff1 %0"
469 static inline int ffs(
int x)
491 static inline int ffs(
int x)
500 #define __ffs(x) (ffs(x) - 1)
505 static inline int fls(
int x)
515 static inline int __fls(
int x)