Linux Kernel  3.7.1
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
atomic.h
Go to the documentation of this file.
1 #ifndef _ASM_M32R_ATOMIC_H
2 #define _ASM_M32R_ATOMIC_H
3 
4 /*
5  * linux/include/asm-m32r/atomic.h
6  *
7  * M32R version:
8  * Copyright (C) 2001, 2002 Hitoshi Yamamoto
9  * Copyright (C) 2004 Hirokazu Takata <takata at linux-m32r.org>
10  */
11 
12 #include <linux/types.h>
13 #include <asm/assembler.h>
14 #include <asm/cmpxchg.h>
15 #include <asm/dcache_clear.h>
16 
17 /*
18  * Atomic operations that C can't guarantee us. Useful for
19  * resource counting etc..
20  */
21 
22 #define ATOMIC_INIT(i) { (i) }
23 
30 #define atomic_read(v) (*(volatile int *)&(v)->counter)
31 
39 #define atomic_set(v,i) (((v)->counter) = (i))
40 
48 static __inline__ int atomic_add_return(int i, atomic_t *v)
49 {
50  unsigned long flags;
51  int result;
52 
53  local_irq_save(flags);
54  __asm__ __volatile__ (
55  "# atomic_add_return \n\t"
56  DCACHE_CLEAR("%0", "r4", "%1")
57  M32R_LOCK" %0, @%1; \n\t"
58  "add %0, %2; \n\t"
59  M32R_UNLOCK" %0, @%1; \n\t"
60  : "=&r" (result)
61  : "r" (&v->counter), "r" (i)
62  : "memory"
63 #ifdef CONFIG_CHIP_M32700_TS1
64  , "r4"
65 #endif /* CONFIG_CHIP_M32700_TS1 */
66  );
67  local_irq_restore(flags);
68 
69  return result;
70 }
71 
79 static __inline__ int atomic_sub_return(int i, atomic_t *v)
80 {
81  unsigned long flags;
82  int result;
83 
84  local_irq_save(flags);
85  __asm__ __volatile__ (
86  "# atomic_sub_return \n\t"
87  DCACHE_CLEAR("%0", "r4", "%1")
88  M32R_LOCK" %0, @%1; \n\t"
89  "sub %0, %2; \n\t"
90  M32R_UNLOCK" %0, @%1; \n\t"
91  : "=&r" (result)
92  : "r" (&v->counter), "r" (i)
93  : "memory"
94 #ifdef CONFIG_CHIP_M32700_TS1
95  , "r4"
96 #endif /* CONFIG_CHIP_M32700_TS1 */
97  );
98  local_irq_restore(flags);
99 
100  return result;
101 }
102 
110 #define atomic_add(i,v) ((void) atomic_add_return((i), (v)))
111 
119 #define atomic_sub(i,v) ((void) atomic_sub_return((i), (v)))
120 
130 #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
131 
139 {
140  unsigned long flags;
141  int result;
142 
143  local_irq_save(flags);
144  __asm__ __volatile__ (
145  "# atomic_inc_return \n\t"
146  DCACHE_CLEAR("%0", "r4", "%1")
147  M32R_LOCK" %0, @%1; \n\t"
148  "addi %0, #1; \n\t"
149  M32R_UNLOCK" %0, @%1; \n\t"
150  : "=&r" (result)
151  : "r" (&v->counter)
152  : "memory"
153 #ifdef CONFIG_CHIP_M32700_TS1
154  , "r4"
155 #endif /* CONFIG_CHIP_M32700_TS1 */
156  );
157  local_irq_restore(flags);
158 
159  return result;
160 }
161 
169 {
170  unsigned long flags;
171  int result;
172 
173  local_irq_save(flags);
174  __asm__ __volatile__ (
175  "# atomic_dec_return \n\t"
176  DCACHE_CLEAR("%0", "r4", "%1")
177  M32R_LOCK" %0, @%1; \n\t"
178  "addi %0, #-1; \n\t"
179  M32R_UNLOCK" %0, @%1; \n\t"
180  : "=&r" (result)
181  : "r" (&v->counter)
182  : "memory"
183 #ifdef CONFIG_CHIP_M32700_TS1
184  , "r4"
185 #endif /* CONFIG_CHIP_M32700_TS1 */
186  );
187  local_irq_restore(flags);
188 
189  return result;
190 }
191 
198 #define atomic_inc(v) ((void)atomic_inc_return(v))
199 
206 #define atomic_dec(v) ((void)atomic_dec_return(v))
207 
216 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
217 
226 #define atomic_dec_and_test(v) (atomic_dec_return(v) == 0)
227 
237 #define atomic_add_negative(i,v) (atomic_add_return((i), (v)) < 0)
238 
239 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
240 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
241 
251 static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
252 {
253  int c, old;
254  c = atomic_read(v);
255  for (;;) {
256  if (unlikely(c == (u)))
257  break;
258  old = atomic_cmpxchg((v), c, c + (a));
259  if (likely(old == c))
260  break;
261  c = old;
262  }
263  return c;
264 }
265 
266 
267 static __inline__ void atomic_clear_mask(unsigned long mask, atomic_t *addr)
268 {
269  unsigned long flags;
270  unsigned long tmp;
271 
272  local_irq_save(flags);
273  __asm__ __volatile__ (
274  "# atomic_clear_mask \n\t"
275  DCACHE_CLEAR("%0", "r5", "%1")
276  M32R_LOCK" %0, @%1; \n\t"
277  "and %0, %2; \n\t"
278  M32R_UNLOCK" %0, @%1; \n\t"
279  : "=&r" (tmp)
280  : "r" (addr), "r" (~mask)
281  : "memory"
282 #ifdef CONFIG_CHIP_M32700_TS1
283  , "r5"
284 #endif /* CONFIG_CHIP_M32700_TS1 */
285  );
286  local_irq_restore(flags);
287 }
288 
289 static __inline__ void atomic_set_mask(unsigned long mask, atomic_t *addr)
290 {
291  unsigned long flags;
292  unsigned long tmp;
293 
294  local_irq_save(flags);
295  __asm__ __volatile__ (
296  "# atomic_set_mask \n\t"
297  DCACHE_CLEAR("%0", "r5", "%1")
298  M32R_LOCK" %0, @%1; \n\t"
299  "or %0, %2; \n\t"
300  M32R_UNLOCK" %0, @%1; \n\t"
301  : "=&r" (tmp)
302  : "r" (addr), "r" (mask)
303  : "memory"
304 #ifdef CONFIG_CHIP_M32700_TS1
305  , "r5"
306 #endif /* CONFIG_CHIP_M32700_TS1 */
307  );
308  local_irq_restore(flags);
309 }
310 
311 /* Atomic operations are already serializing on m32r */
312 #define smp_mb__before_atomic_dec() barrier()
313 #define smp_mb__after_atomic_dec() barrier()
314 #define smp_mb__before_atomic_inc() barrier()
315 #define smp_mb__after_atomic_inc() barrier()
316 
317 #endif /* _ASM_M32R_ATOMIC_H */