7 #ifndef _BLACKFIN_BITOPS_H
8 #define _BLACKFIN_BITOPS_H
10 #include <linux/compiler.h>
19 #ifndef _LINUX_BITOPS_H
20 #error only <linux/bitops.h> can be included directly
36 #ifndef smp_mb__before_clear_bit
37 #define smp_mb__before_clear_bit() smp_mb()
38 #define smp_mb__after_clear_bit() smp_mb()
44 #include <asm/byteorder.h>
45 #include <linux/linkage.h>
55 asmlinkage int __raw_bit_test_clear_asm(
volatile unsigned long *
addr,
int nr);
57 asmlinkage int __raw_bit_test_toggle_asm(
volatile unsigned long *
addr,
int nr);
59 asmlinkage int __raw_bit_test_asm(
const volatile unsigned long *
addr,
int nr);
61 static inline void set_bit(
int nr,
volatile unsigned long *
addr)
63 volatile unsigned long *
a = addr + (nr >> 5);
64 __raw_bit_set_asm(a, nr & 0x1f);
69 volatile unsigned long *
a = addr + (nr >> 5);
70 __raw_bit_clear_asm(a, nr & 0x1f);
75 volatile unsigned long *
a = addr + (nr >> 5);
76 __raw_bit_toggle_asm(a, nr & 0x1f);
79 static inline int test_bit(
int nr,
const volatile unsigned long *
addr)
81 volatile const unsigned long *
a = addr + (nr >> 5);
82 return __raw_bit_test_asm(a, nr & 0x1f) != 0;
87 volatile unsigned long *
a = addr + (nr >> 5);
88 return __raw_bit_test_set_asm(a, nr & 0x1f);
93 volatile unsigned long *
a = addr + (nr >> 5);
94 return __raw_bit_test_clear_asm(a, nr & 0x1f);
99 volatile unsigned long *
a = addr + (nr >> 5);
100 return __raw_bit_test_toggle_asm(a, nr & 0x1f);
106 #define smp_mb__before_clear_bit() barrier()
107 #define smp_mb__after_clear_bit() barrier()
109 #define test_bit __skip_test_bit
123 static inline unsigned int __arch_hweight32(
unsigned int w)
129 :
"=d" (res) :
"d" (w));
133 static inline unsigned int __arch_hweight64(
__u64 w)
135 return __arch_hweight32((
unsigned int)(w >> 32)) +
136 __arch_hweight32((
unsigned int)w);
139 static inline unsigned int __arch_hweight16(
unsigned int w)
141 return __arch_hweight32(w & 0xffff);
144 static inline unsigned int __arch_hweight8(
unsigned int w)
146 return __arch_hweight32(w & 0xff);