12 #ifndef _LINUX_BITOPS_H
13 #error only <linux/bitops.h> can be included directly
16 #include <linux/compiler.h>
17 #include <linux/types.h>
18 #include <asm/barrier.h>
19 #include <asm/byteorder.h>
20 #include <asm/cpu-features.h>
24 #if _MIPS_SZLONG == 32
26 #define SZLONG_MASK 31UL
31 #elif _MIPS_SZLONG == 64
33 #define SZLONG_MASK 63UL
43 #define smp_mb__before_clear_bit() smp_mb__before_llsc()
44 #define smp_mb__after_clear_bit() smp_llsc_mb()
55 volatile unsigned long *
addr);
57 volatile unsigned long *
addr);
59 volatile unsigned long *
addr);
61 volatile unsigned long *
addr);
74 static inline void set_bit(
unsigned long nr,
volatile unsigned long *
addr)
76 unsigned long *
m = ((
unsigned long *) addr) + (nr >> SZLONG_LOG);
77 int bit = nr & SZLONG_MASK;
83 "1: " __LL
"%0, %1 # set_bit \n"
88 :
"=&r" (temp),
"=m" (*m)
89 :
"ir" (1
UL << bit),
"m" (*m));
90 #ifdef CONFIG_CPU_MIPSR2
94 " " __LL
"%0, %1 # set_bit \n"
95 " " __INS
"%0, %3, %2, 1 \n"
97 :
"=&r" (temp),
"+m" (*m)
98 :
"ir" (bit),
"r" (~0));
105 " " __LL
"%0, %1 # set_bit \n"
109 :
"=&r" (temp),
"+m" (*m)
110 :
"ir" (1
UL << bit));
126 static inline void clear_bit(
unsigned long nr,
volatile unsigned long *addr)
128 unsigned long *m = ((
unsigned long *) addr) + (nr >> SZLONG_LOG);
129 int bit = nr & SZLONG_MASK;
135 "1: " __LL
"%0, %1 # clear_bit \n"
140 :
"=&r" (temp),
"+m" (*m)
141 :
"ir" (~(1
UL << bit)));
142 #ifdef CONFIG_CPU_MIPSR2
146 " " __LL
"%0, %1 # clear_bit \n"
147 " " __INS
"%0, $0, %2, 1 \n"
149 :
"=&r" (temp),
"+m" (*m)
157 " " __LL
"%0, %1 # clear_bit \n"
161 :
"=&r" (temp),
"+m" (*m)
162 :
"ir" (~(1
UL << bit)));
176 static inline void clear_bit_unlock(
unsigned long nr,
volatile unsigned long *addr)
191 static inline void change_bit(
unsigned long nr,
volatile unsigned long *addr)
193 int bit = nr & SZLONG_MASK;
196 unsigned long *m = ((
unsigned long *) addr) + (nr >> SZLONG_LOG);
201 "1: " __LL
"%0, %1 # change_bit \n"
206 :
"=&r" (temp),
"+m" (*m)
207 :
"ir" (1
UL << bit));
209 unsigned long *m = ((
unsigned long *) addr) + (nr >> SZLONG_LOG);
215 " " __LL
"%0, %1 # change_bit \n"
219 :
"=&r" (temp),
"+m" (*m)
220 :
"ir" (1
UL << bit));
235 volatile unsigned long *addr)
237 int bit = nr & SZLONG_MASK;
243 unsigned long *m = ((
unsigned long *) addr) + (nr >> SZLONG_LOG);
248 "1: " __LL
"%0, %1 # test_and_set_bit \n"
254 :
"=&r" (temp),
"+m" (*m),
"=&r" (res)
258 unsigned long *m = ((
unsigned long *) addr) + (nr >> SZLONG_LOG);
264 " " __LL
"%0, %1 # test_and_set_bit \n"
268 :
"=&r" (temp),
"+m" (*m),
"=&r" (res)
273 res = temp & (1
UL <<
bit);
291 volatile unsigned long *addr)
293 int bit = nr & SZLONG_MASK;
297 unsigned long *m = ((
unsigned long *) addr) + (nr >> SZLONG_LOG);
302 "1: " __LL
"%0, %1 # test_and_set_bit \n"
308 :
"=&r" (temp),
"+m" (*m),
"=&r" (res)
312 unsigned long *m = ((
unsigned long *) addr) + (nr >> SZLONG_LOG);
318 " " __LL
"%0, %1 # test_and_set_bit \n"
322 :
"=&r" (temp),
"+m" (*m),
"=&r" (res)
327 res = temp & (1
UL <<
bit);
344 volatile unsigned long *addr)
346 int bit = nr & SZLONG_MASK;
352 unsigned long *m = ((
unsigned long *) addr) + (nr >> SZLONG_LOG);
357 "1: " __LL
"%0, %1 # test_and_clear_bit \n"
364 :
"=&r" (temp),
"+m" (*m),
"=&r" (res)
367 #ifdef CONFIG_CPU_MIPSR2
369 unsigned long *m = ((
unsigned long *) addr) + (nr >> SZLONG_LOG);
374 " " __LL
"%0, %1 # test_and_clear_bit \n"
375 " " __EXT
"%2, %0, %3, 1 \n"
376 " " __INS
"%0, $0, %3, 1 \n"
378 :
"=&r" (temp),
"+m" (*m),
"=&r" (res)
384 unsigned long *m = ((
unsigned long *) addr) + (nr >> SZLONG_LOG);
390 " " __LL
"%0, %1 # test_and_clear_bit \n"
395 :
"=&r" (temp),
"+m" (*m),
"=&r" (res)
400 res = temp & (1
UL <<
bit);
418 volatile unsigned long *addr)
420 int bit = nr & SZLONG_MASK;
426 unsigned long *m = ((
unsigned long *) addr) + (nr >> SZLONG_LOG);
431 "1: " __LL
"%0, %1 # test_and_change_bit \n"
437 :
"=&r" (temp),
"+m" (*m),
"=&r" (res)
441 unsigned long *m = ((
unsigned long *) addr) + (nr >> SZLONG_LOG);
447 " " __LL
"%0, %1 # test_and_change_bit \n"
449 " " __SC
"\t%2, %1 \n"
451 :
"=&r" (temp),
"+m" (*m),
"=&r" (res)
456 res = temp & (1
UL <<
bit);
486 static inline unsigned long __fls(
unsigned long word)
518 #if BITS_PER_LONG == 64
519 if (!(word & (~0ul << 32))) {
552 static inline unsigned long __ffs(
unsigned long word)
554 return __fls(word & -word);
564 static inline int fls(
int x)
569 __asm__(
"clz %0, %1" :
"=r" (x) :
"r" (x));
577 if (!(x & 0xffff0000u)) {
581 if (!(x & 0xff000000u)) {
585 if (!(x & 0xf0000000u)) {
589 if (!(x & 0xc0000000u)) {
593 if (!(x & 0x80000000u)) {
610 static inline int ffs(
int word)
615 return fls(word & -word);
625 #include <asm/arch_hweight.h>