Linux Kernel  3.7.1
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
atomic.h
Go to the documentation of this file.
1 /* $Id: atomic.h,v 1.3 2001/07/25 16:15:19 bjornw Exp $ */
2 
3 #ifndef __ASM_CRIS_ATOMIC__
4 #define __ASM_CRIS_ATOMIC__
5 
6 #include <linux/compiler.h>
7 #include <linux/types.h>
8 #include <asm/cmpxchg.h>
9 #include <arch/atomic.h>
10 
11 /*
12  * Atomic operations that C can't guarantee us. Useful for
13  * resource counting etc..
14  */
15 
16 #define ATOMIC_INIT(i) { (i) }
17 
18 #define atomic_read(v) (*(volatile int *)&(v)->counter)
19 #define atomic_set(v,i) (((v)->counter) = (i))
20 
21 /* These should be written in asm but we do it in C for now. */
22 
23 static inline void atomic_add(int i, volatile atomic_t *v)
24 {
25  unsigned long flags;
26  cris_atomic_save(v, flags);
27  v->counter += i;
28  cris_atomic_restore(v, flags);
29 }
30 
31 static inline void atomic_sub(int i, volatile atomic_t *v)
32 {
33  unsigned long flags;
34  cris_atomic_save(v, flags);
35  v->counter -= i;
36  cris_atomic_restore(v, flags);
37 }
38 
39 static inline int atomic_add_return(int i, volatile atomic_t *v)
40 {
41  unsigned long flags;
42  int retval;
43  cris_atomic_save(v, flags);
44  retval = (v->counter += i);
45  cris_atomic_restore(v, flags);
46  return retval;
47 }
48 
49 #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
50 
51 static inline int atomic_sub_return(int i, volatile atomic_t *v)
52 {
53  unsigned long flags;
54  int retval;
55  cris_atomic_save(v, flags);
56  retval = (v->counter -= i);
57  cris_atomic_restore(v, flags);
58  return retval;
59 }
60 
61 static inline int atomic_sub_and_test(int i, volatile atomic_t *v)
62 {
63  int retval;
64  unsigned long flags;
65  cris_atomic_save(v, flags);
66  retval = (v->counter -= i) == 0;
67  cris_atomic_restore(v, flags);
68  return retval;
69 }
70 
71 static inline void atomic_inc(volatile atomic_t *v)
72 {
73  unsigned long flags;
74  cris_atomic_save(v, flags);
75  (v->counter)++;
76  cris_atomic_restore(v, flags);
77 }
78 
79 static inline void atomic_dec(volatile atomic_t *v)
80 {
81  unsigned long flags;
82  cris_atomic_save(v, flags);
83  (v->counter)--;
84  cris_atomic_restore(v, flags);
85 }
86 
87 static inline int atomic_inc_return(volatile atomic_t *v)
88 {
89  unsigned long flags;
90  int retval;
91  cris_atomic_save(v, flags);
92  retval = ++(v->counter);
93  cris_atomic_restore(v, flags);
94  return retval;
95 }
96 
97 static inline int atomic_dec_return(volatile atomic_t *v)
98 {
99  unsigned long flags;
100  int retval;
101  cris_atomic_save(v, flags);
102  retval = --(v->counter);
103  cris_atomic_restore(v, flags);
104  return retval;
105 }
106 static inline int atomic_dec_and_test(volatile atomic_t *v)
107 {
108  int retval;
109  unsigned long flags;
110  cris_atomic_save(v, flags);
111  retval = --(v->counter) == 0;
112  cris_atomic_restore(v, flags);
113  return retval;
114 }
115 
116 static inline int atomic_inc_and_test(volatile atomic_t *v)
117 {
118  int retval;
119  unsigned long flags;
120  cris_atomic_save(v, flags);
121  retval = ++(v->counter) == 0;
122  cris_atomic_restore(v, flags);
123  return retval;
124 }
125 
126 static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
127 {
128  int ret;
129  unsigned long flags;
130 
131  cris_atomic_save(v, flags);
132  ret = v->counter;
133  if (likely(ret == old))
134  v->counter = new;
135  cris_atomic_restore(v, flags);
136  return ret;
137 }
138 
139 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
140 
141 static inline int __atomic_add_unless(atomic_t *v, int a, int u)
142 {
143  int ret;
144  unsigned long flags;
145 
146  cris_atomic_save(v, flags);
147  ret = v->counter;
148  if (ret != u)
149  v->counter += a;
150  cris_atomic_restore(v, flags);
151  return ret;
152 }
153 
154 /* Atomic operations are already serializing */
155 #define smp_mb__before_atomic_dec() barrier()
156 #define smp_mb__after_atomic_dec() barrier()
157 #define smp_mb__before_atomic_inc() barrier()
158 #define smp_mb__after_atomic_inc() barrier()
159 
160 #endif