Linux Kernel  3.7.1
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
bitops.h
Go to the documentation of this file.
1 #ifndef _ASM_M32R_BITOPS_H
2 #define _ASM_M32R_BITOPS_H
3 
4 /*
5  * linux/include/asm-m32r/bitops.h
6  *
7  * Copyright 1992, Linus Torvalds.
8  *
9  * M32R version:
10  * Copyright (C) 2001, 2002 Hitoshi Yamamoto
11  * Copyright (C) 2004 Hirokazu Takata <takata at linux-m32r.org>
12  */
13 
14 #ifndef _LINUX_BITOPS_H
15 #error only <linux/bitops.h> can be included directly
16 #endif
17 
18 #include <linux/compiler.h>
19 #include <linux/irqflags.h>
20 #include <asm/assembler.h>
21 #include <asm/byteorder.h>
22 #include <asm/dcache_clear.h>
23 #include <asm/types.h>
24 
25 /*
26  * These have to be done with inline assembly: that way the bit-setting
27  * is guaranteed to be atomic. All bit operations return 0 if the bit
28  * was cleared before the operation and != 0 if it was not.
29  *
30  * bit 0 is the LSB of addr; bit 32 is the LSB of (addr+1).
31  */
32 
43 static __inline__ void set_bit(int nr, volatile void * addr)
44 {
45  __u32 mask;
46  volatile __u32 *a = addr;
47  unsigned long flags;
48  unsigned long tmp;
49 
50  a += (nr >> 5);
51  mask = (1 << (nr & 0x1F));
52 
53  local_irq_save(flags);
54  __asm__ __volatile__ (
55  DCACHE_CLEAR("%0", "r6", "%1")
56  M32R_LOCK" %0, @%1; \n\t"
57  "or %0, %2; \n\t"
58  M32R_UNLOCK" %0, @%1; \n\t"
59  : "=&r" (tmp)
60  : "r" (a), "r" (mask)
61  : "memory"
62 #ifdef CONFIG_CHIP_M32700_TS1
63  , "r6"
64 #endif /* CONFIG_CHIP_M32700_TS1 */
65  );
66  local_irq_restore(flags);
67 }
68 
79 static __inline__ void clear_bit(int nr, volatile void * addr)
80 {
81  __u32 mask;
82  volatile __u32 *a = addr;
83  unsigned long flags;
84  unsigned long tmp;
85 
86  a += (nr >> 5);
87  mask = (1 << (nr & 0x1F));
88 
89  local_irq_save(flags);
90 
91  __asm__ __volatile__ (
92  DCACHE_CLEAR("%0", "r6", "%1")
93  M32R_LOCK" %0, @%1; \n\t"
94  "and %0, %2; \n\t"
95  M32R_UNLOCK" %0, @%1; \n\t"
96  : "=&r" (tmp)
97  : "r" (a), "r" (~mask)
98  : "memory"
99 #ifdef CONFIG_CHIP_M32700_TS1
100  , "r6"
101 #endif /* CONFIG_CHIP_M32700_TS1 */
102  );
103  local_irq_restore(flags);
104 }
105 
106 #define smp_mb__before_clear_bit() barrier()
107 #define smp_mb__after_clear_bit() barrier()
108 
118 static __inline__ void change_bit(int nr, volatile void * addr)
119 {
120  __u32 mask;
121  volatile __u32 *a = addr;
122  unsigned long flags;
123  unsigned long tmp;
124 
125  a += (nr >> 5);
126  mask = (1 << (nr & 0x1F));
127 
128  local_irq_save(flags);
129  __asm__ __volatile__ (
130  DCACHE_CLEAR("%0", "r6", "%1")
131  M32R_LOCK" %0, @%1; \n\t"
132  "xor %0, %2; \n\t"
133  M32R_UNLOCK" %0, @%1; \n\t"
134  : "=&r" (tmp)
135  : "r" (a), "r" (mask)
136  : "memory"
137 #ifdef CONFIG_CHIP_M32700_TS1
138  , "r6"
139 #endif /* CONFIG_CHIP_M32700_TS1 */
140  );
141  local_irq_restore(flags);
142 }
143 
152 static __inline__ int test_and_set_bit(int nr, volatile void * addr)
153 {
154  __u32 mask, oldbit;
155  volatile __u32 *a = addr;
156  unsigned long flags;
157  unsigned long tmp;
158 
159  a += (nr >> 5);
160  mask = (1 << (nr & 0x1F));
161 
162  local_irq_save(flags);
163  __asm__ __volatile__ (
164  DCACHE_CLEAR("%0", "%1", "%2")
165  M32R_LOCK" %0, @%2; \n\t"
166  "mv %1, %0; \n\t"
167  "and %0, %3; \n\t"
168  "or %1, %3; \n\t"
169  M32R_UNLOCK" %1, @%2; \n\t"
170  : "=&r" (oldbit), "=&r" (tmp)
171  : "r" (a), "r" (mask)
172  : "memory"
173  );
174  local_irq_restore(flags);
175 
176  return (oldbit != 0);
177 }
178 
187 static __inline__ int test_and_clear_bit(int nr, volatile void * addr)
188 {
189  __u32 mask, oldbit;
190  volatile __u32 *a = addr;
191  unsigned long flags;
192  unsigned long tmp;
193 
194  a += (nr >> 5);
195  mask = (1 << (nr & 0x1F));
196 
197  local_irq_save(flags);
198 
199  __asm__ __volatile__ (
200  DCACHE_CLEAR("%0", "%1", "%3")
201  M32R_LOCK" %0, @%3; \n\t"
202  "mv %1, %0; \n\t"
203  "and %0, %2; \n\t"
204  "not %2, %2; \n\t"
205  "and %1, %2; \n\t"
206  M32R_UNLOCK" %1, @%3; \n\t"
207  : "=&r" (oldbit), "=&r" (tmp), "+r" (mask)
208  : "r" (a)
209  : "memory"
210  );
211  local_irq_restore(flags);
212 
213  return (oldbit != 0);
214 }
215 
224 static __inline__ int test_and_change_bit(int nr, volatile void * addr)
225 {
226  __u32 mask, oldbit;
227  volatile __u32 *a = addr;
228  unsigned long flags;
229  unsigned long tmp;
230 
231  a += (nr >> 5);
232  mask = (1 << (nr & 0x1F));
233 
234  local_irq_save(flags);
235  __asm__ __volatile__ (
236  DCACHE_CLEAR("%0", "%1", "%2")
237  M32R_LOCK" %0, @%2; \n\t"
238  "mv %1, %0; \n\t"
239  "and %0, %3; \n\t"
240  "xor %1, %3; \n\t"
241  M32R_UNLOCK" %1, @%2; \n\t"
242  : "=&r" (oldbit), "=&r" (tmp)
243  : "r" (a), "r" (mask)
244  : "memory"
245  );
246  local_irq_restore(flags);
247 
248  return (oldbit != 0);
249 }
250 
252 #include <asm-generic/bitops/ffz.h>
254 #include <asm-generic/bitops/fls.h>
257 
258 #ifdef __KERNEL__
259 
261 #include <asm-generic/bitops/find.h>
262 #include <asm-generic/bitops/ffs.h>
264 #include <asm-generic/bitops/lock.h>
265 
266 #endif /* __KERNEL__ */
267 
268 #ifdef __KERNEL__
269 
270 #include <asm-generic/bitops/le.h>
272 
273 #endif /* __KERNEL__ */
274 
275 #endif /* _ASM_M32R_BITOPS_H */