Linux Kernel  3.7.1
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
atomic.h
Go to the documentation of this file.
1 /* atomic.h: atomic operation emulation for FR-V
2  *
3  * For an explanation of how atomic ops work in this arch, see:
4  * Documentation/frv/atomic-ops.txt
5  *
6  * Copyright (C) 2004 Red Hat, Inc. All Rights Reserved.
7  * Written by David Howells ([email protected])
8  *
9  * This program is free software; you can redistribute it and/or
10  * modify it under the terms of the GNU General Public License
11  * as published by the Free Software Foundation; either version
12  * 2 of the License, or (at your option) any later version.
13  */
14 #ifndef _ASM_ATOMIC_H
15 #define _ASM_ATOMIC_H
16 
17 #include <linux/types.h>
18 #include <asm/spr-regs.h>
19 #include <asm/cmpxchg.h>
20 
21 #ifdef CONFIG_SMP
22 #error not SMP safe
23 #endif
24 
25 /*
26  * Atomic operations that C can't guarantee us. Useful for
27  * resource counting etc..
28  *
29  * We do not have SMP systems, so we don't have to deal with that.
30  */
31 
32 /* Atomic operations are already serializing */
33 #define smp_mb__before_atomic_dec() barrier()
34 #define smp_mb__after_atomic_dec() barrier()
35 #define smp_mb__before_atomic_inc() barrier()
36 #define smp_mb__after_atomic_inc() barrier()
37 
38 #define ATOMIC_INIT(i) { (i) }
39 #define atomic_read(v) (*(volatile int *)&(v)->counter)
40 #define atomic_set(v, i) (((v)->counter) = (i))
41 
42 #ifndef CONFIG_FRV_OUTOFLINE_ATOMIC_OPS
43 static inline int atomic_add_return(int i, atomic_t *v)
44 {
45  unsigned long val;
46 
47  asm("0: \n"
48  " orcc gr0,gr0,gr0,icc3 \n" /* set ICC3.Z */
49  " ckeq icc3,cc7 \n"
50  " ld.p %M0,%1 \n" /* LD.P/ORCR must be atomic */
51  " orcr cc7,cc7,cc3 \n" /* set CC3 to true */
52  " add%I2 %1,%2,%1 \n"
53  " cst.p %1,%M0 ,cc3,#1 \n"
54  " corcc gr29,gr29,gr0 ,cc3,#1 \n" /* clear ICC3.Z if store happens */
55  " beq icc3,#0,0b \n"
56  : "+U"(v->counter), "=&r"(val)
57  : "NPr"(i)
58  : "memory", "cc7", "cc3", "icc3"
59  );
60 
61  return val;
62 }
63 
64 static inline int atomic_sub_return(int i, atomic_t *v)
65 {
66  unsigned long val;
67 
68  asm("0: \n"
69  " orcc gr0,gr0,gr0,icc3 \n" /* set ICC3.Z */
70  " ckeq icc3,cc7 \n"
71  " ld.p %M0,%1 \n" /* LD.P/ORCR must be atomic */
72  " orcr cc7,cc7,cc3 \n" /* set CC3 to true */
73  " sub%I2 %1,%2,%1 \n"
74  " cst.p %1,%M0 ,cc3,#1 \n"
75  " corcc gr29,gr29,gr0 ,cc3,#1 \n" /* clear ICC3.Z if store happens */
76  " beq icc3,#0,0b \n"
77  : "+U"(v->counter), "=&r"(val)
78  : "NPr"(i)
79  : "memory", "cc7", "cc3", "icc3"
80  );
81 
82  return val;
83 }
84 
85 #else
86 
87 extern int atomic_add_return(int i, atomic_t *v);
88 extern int atomic_sub_return(int i, atomic_t *v);
89 
90 #endif
91 
92 static inline int atomic_add_negative(int i, atomic_t *v)
93 {
94  return atomic_add_return(i, v) < 0;
95 }
96 
97 static inline void atomic_add(int i, atomic_t *v)
98 {
99  atomic_add_return(i, v);
100 }
101 
102 static inline void atomic_sub(int i, atomic_t *v)
103 {
104  atomic_sub_return(i, v);
105 }
106 
107 static inline void atomic_inc(atomic_t *v)
108 {
109  atomic_add_return(1, v);
110 }
111 
112 static inline void atomic_dec(atomic_t *v)
113 {
114  atomic_sub_return(1, v);
115 }
116 
117 #define atomic_dec_return(v) atomic_sub_return(1, (v))
118 #define atomic_inc_return(v) atomic_add_return(1, (v))
119 
120 #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
121 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
122 #define atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0)
123 
124 /*
125  * 64-bit atomic ops
126  */
127 typedef struct {
128  volatile long long counter;
129 } atomic64_t;
130 
131 #define ATOMIC64_INIT(i) { (i) }
132 
133 static inline long long atomic64_read(atomic64_t *v)
134 {
135  long long counter;
136 
137  asm("ldd%I1 %M1,%0"
138  : "=e"(counter)
139  : "m"(v->counter));
140  return counter;
141 }
142 
143 static inline void atomic64_set(atomic64_t *v, long long i)
144 {
145  asm volatile("std%I0 %1,%M0"
146  : "=m"(v->counter)
147  : "e"(i));
148 }
149 
150 extern long long atomic64_inc_return(atomic64_t *v);
151 extern long long atomic64_dec_return(atomic64_t *v);
152 extern long long atomic64_add_return(long long i, atomic64_t *v);
153 extern long long atomic64_sub_return(long long i, atomic64_t *v);
154 
155 static inline long long atomic64_add_negative(long long i, atomic64_t *v)
156 {
157  return atomic64_add_return(i, v) < 0;
158 }
159 
160 static inline void atomic64_add(long long i, atomic64_t *v)
161 {
162  atomic64_add_return(i, v);
163 }
164 
165 static inline void atomic64_sub(long long i, atomic64_t *v)
166 {
167  atomic64_sub_return(i, v);
168 }
169 
170 static inline void atomic64_inc(atomic64_t *v)
171 {
173 }
174 
175 static inline void atomic64_dec(atomic64_t *v)
176 {
178 }
179 
180 #define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
181 #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
182 #define atomic64_inc_and_test(v) (atomic64_inc_return((v)) == 0)
183 
184 #define atomic_cmpxchg(v, old, new) (cmpxchg(&(v)->counter, old, new))
185 #define atomic_xchg(v, new) (xchg(&(v)->counter, new))
186 #define atomic64_cmpxchg(v, old, new) (__cmpxchg_64(old, new, &(v)->counter))
187 #define atomic64_xchg(v, new) (__xchg_64(new, &(v)->counter))
188 
189 static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
190 {
191  int c, old;
192  c = atomic_read(v);
193  for (;;) {
194  if (unlikely(c == (u)))
195  break;
196  old = atomic_cmpxchg((v), c, c + (a));
197  if (likely(old == c))
198  break;
199  c = old;
200  }
201  return c;
202 }
203 
204 
205 #endif /* _ASM_ATOMIC_H */