Linux Kernel  3.7.1
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
atomic.h
Go to the documentation of this file.
1 #ifndef _ASM_POWERPC_ATOMIC_H_
2 #define _ASM_POWERPC_ATOMIC_H_
3 
4 /*
5  * PowerPC atomic operations
6  */
7 
8 #ifdef __KERNEL__
9 #include <linux/types.h>
10 #include <asm/cmpxchg.h>
11 
12 #define ATOMIC_INIT(i) { (i) }
13 
14 static __inline__ int atomic_read(const atomic_t *v)
15 {
16  int t;
17 
18  __asm__ __volatile__("lwz%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));
19 
20  return t;
21 }
22 
23 static __inline__ void atomic_set(atomic_t *v, int i)
24 {
25  __asm__ __volatile__("stw%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
26 }
27 
28 static __inline__ void atomic_add(int a, atomic_t *v)
29 {
30  int t;
31 
32  __asm__ __volatile__(
33 "1: lwarx %0,0,%3 # atomic_add\n\
34  add %0,%2,%0\n"
35  PPC405_ERR77(0,%3)
36 " stwcx. %0,0,%3 \n\
37  bne- 1b"
38  : "=&r" (t), "+m" (v->counter)
39  : "r" (a), "r" (&v->counter)
40  : "cc");
41 }
42 
43 static __inline__ int atomic_add_return(int a, atomic_t *v)
44 {
45  int t;
46 
47  __asm__ __volatile__(
48  PPC_ATOMIC_ENTRY_BARRIER
49 "1: lwarx %0,0,%2 # atomic_add_return\n\
50  add %0,%1,%0\n"
51  PPC405_ERR77(0,%2)
52 " stwcx. %0,0,%2 \n\
53  bne- 1b"
54  PPC_ATOMIC_EXIT_BARRIER
55  : "=&r" (t)
56  : "r" (a), "r" (&v->counter)
57  : "cc", "memory");
58 
59  return t;
60 }
61 
62 #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
63 
64 static __inline__ void atomic_sub(int a, atomic_t *v)
65 {
66  int t;
67 
68  __asm__ __volatile__(
69 "1: lwarx %0,0,%3 # atomic_sub\n\
70  subf %0,%2,%0\n"
71  PPC405_ERR77(0,%3)
72 " stwcx. %0,0,%3 \n\
73  bne- 1b"
74  : "=&r" (t), "+m" (v->counter)
75  : "r" (a), "r" (&v->counter)
76  : "cc");
77 }
78 
79 static __inline__ int atomic_sub_return(int a, atomic_t *v)
80 {
81  int t;
82 
83  __asm__ __volatile__(
84  PPC_ATOMIC_ENTRY_BARRIER
85 "1: lwarx %0,0,%2 # atomic_sub_return\n\
86  subf %0,%1,%0\n"
87  PPC405_ERR77(0,%2)
88 " stwcx. %0,0,%2 \n\
89  bne- 1b"
90  PPC_ATOMIC_EXIT_BARRIER
91  : "=&r" (t)
92  : "r" (a), "r" (&v->counter)
93  : "cc", "memory");
94 
95  return t;
96 }
97 
98 static __inline__ void atomic_inc(atomic_t *v)
99 {
100  int t;
101 
102  __asm__ __volatile__(
103 "1: lwarx %0,0,%2 # atomic_inc\n\
104  addic %0,%0,1\n"
105  PPC405_ERR77(0,%2)
106 " stwcx. %0,0,%2 \n\
107  bne- 1b"
108  : "=&r" (t), "+m" (v->counter)
109  : "r" (&v->counter)
110  : "cc", "xer");
111 }
112 
114 {
115  int t;
116 
117  __asm__ __volatile__(
118  PPC_ATOMIC_ENTRY_BARRIER
119 "1: lwarx %0,0,%1 # atomic_inc_return\n\
120  addic %0,%0,1\n"
121  PPC405_ERR77(0,%1)
122 " stwcx. %0,0,%1 \n\
123  bne- 1b"
124  PPC_ATOMIC_EXIT_BARRIER
125  : "=&r" (t)
126  : "r" (&v->counter)
127  : "cc", "xer", "memory");
128 
129  return t;
130 }
131 
132 /*
133  * atomic_inc_and_test - increment and test
134  * @v: pointer of type atomic_t
135  *
136  * Atomically increments @v by 1
137  * and returns true if the result is zero, or false for all
138  * other cases.
139  */
140 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
141 
142 static __inline__ void atomic_dec(atomic_t *v)
143 {
144  int t;
145 
146  __asm__ __volatile__(
147 "1: lwarx %0,0,%2 # atomic_dec\n\
148  addic %0,%0,-1\n"
149  PPC405_ERR77(0,%2)\
150 " stwcx. %0,0,%2\n\
151  bne- 1b"
152  : "=&r" (t), "+m" (v->counter)
153  : "r" (&v->counter)
154  : "cc", "xer");
155 }
156 
158 {
159  int t;
160 
161  __asm__ __volatile__(
162  PPC_ATOMIC_ENTRY_BARRIER
163 "1: lwarx %0,0,%1 # atomic_dec_return\n\
164  addic %0,%0,-1\n"
165  PPC405_ERR77(0,%1)
166 " stwcx. %0,0,%1\n\
167  bne- 1b"
168  PPC_ATOMIC_EXIT_BARRIER
169  : "=&r" (t)
170  : "r" (&v->counter)
171  : "cc", "xer", "memory");
172 
173  return t;
174 }
175 
176 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
177 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
178 
188 static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
189 {
190  int t;
191 
192  __asm__ __volatile__ (
193  PPC_ATOMIC_ENTRY_BARRIER
194 "1: lwarx %0,0,%1 # __atomic_add_unless\n\
195  cmpw 0,%0,%3 \n\
196  beq- 2f \n\
197  add %0,%2,%0 \n"
198  PPC405_ERR77(0,%2)
199 " stwcx. %0,0,%1 \n\
200  bne- 1b \n"
201  PPC_ATOMIC_EXIT_BARRIER
202 " subf %0,%2,%0 \n\
203 2:"
204  : "=&r" (t)
205  : "r" (&v->counter), "r" (a), "r" (u)
206  : "cc", "memory");
207 
208  return t;
209 }
210 
219 {
220  int t1, t2;
221 
222  __asm__ __volatile__ (
223  PPC_ATOMIC_ENTRY_BARRIER
224 "1: lwarx %0,0,%2 # atomic_inc_not_zero\n\
225  cmpwi 0,%0,0\n\
226  beq- 2f\n\
227  addic %1,%0,1\n"
228  PPC405_ERR77(0,%2)
229 " stwcx. %1,0,%2\n\
230  bne- 1b\n"
231  PPC_ATOMIC_EXIT_BARRIER
232  "\n\
233 2:"
234  : "=&r" (t1), "=&r" (t2)
235  : "r" (&v->counter)
236  : "cc", "xer", "memory");
237 
238  return t1;
239 }
240 #define atomic_inc_not_zero(v) atomic_inc_not_zero((v))
241 
242 #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
243 #define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
244 
245 /*
246  * Atomically test *v and decrement if it is greater than 0.
247  * The function returns the old value of *v minus 1, even if
248  * the atomic variable, v, was not decremented.
249  */
251 {
252  int t;
253 
254  __asm__ __volatile__(
255  PPC_ATOMIC_ENTRY_BARRIER
256 "1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
257  cmpwi %0,1\n\
258  addi %0,%0,-1\n\
259  blt- 2f\n"
260  PPC405_ERR77(0,%1)
261 " stwcx. %0,0,%1\n\
262  bne- 1b"
263  PPC_ATOMIC_EXIT_BARRIER
264  "\n\
265 2:" : "=&b" (t)
266  : "r" (&v->counter)
267  : "cc", "memory");
268 
269  return t;
270 }
271 #define atomic_dec_if_positive atomic_dec_if_positive
272 
273 #define smp_mb__before_atomic_dec() smp_mb()
274 #define smp_mb__after_atomic_dec() smp_mb()
275 #define smp_mb__before_atomic_inc() smp_mb()
276 #define smp_mb__after_atomic_inc() smp_mb()
277 
278 #ifdef __powerpc64__
279 
280 #define ATOMIC64_INIT(i) { (i) }
281 
282 static __inline__ long atomic64_read(const atomic64_t *v)
283 {
284  long t;
285 
286  __asm__ __volatile__("ld%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));
287 
288  return t;
289 }
290 
291 static __inline__ void atomic64_set(atomic64_t *v, long i)
292 {
293  __asm__ __volatile__("std%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
294 }
295 
296 static __inline__ void atomic64_add(long a, atomic64_t *v)
297 {
298  long t;
299 
300  __asm__ __volatile__(
301 "1: ldarx %0,0,%3 # atomic64_add\n\
302  add %0,%2,%0\n\
303  stdcx. %0,0,%3 \n\
304  bne- 1b"
305  : "=&r" (t), "+m" (v->counter)
306  : "r" (a), "r" (&v->counter)
307  : "cc");
308 }
309 
310 static __inline__ long atomic64_add_return(long a, atomic64_t *v)
311 {
312  long t;
313 
314  __asm__ __volatile__(
315  PPC_ATOMIC_ENTRY_BARRIER
316 "1: ldarx %0,0,%2 # atomic64_add_return\n\
317  add %0,%1,%0\n\
318  stdcx. %0,0,%2 \n\
319  bne- 1b"
320  PPC_ATOMIC_EXIT_BARRIER
321  : "=&r" (t)
322  : "r" (a), "r" (&v->counter)
323  : "cc", "memory");
324 
325  return t;
326 }
327 
328 #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
329 
330 static __inline__ void atomic64_sub(long a, atomic64_t *v)
331 {
332  long t;
333 
334  __asm__ __volatile__(
335 "1: ldarx %0,0,%3 # atomic64_sub\n\
336  subf %0,%2,%0\n\
337  stdcx. %0,0,%3 \n\
338  bne- 1b"
339  : "=&r" (t), "+m" (v->counter)
340  : "r" (a), "r" (&v->counter)
341  : "cc");
342 }
343 
344 static __inline__ long atomic64_sub_return(long a, atomic64_t *v)
345 {
346  long t;
347 
348  __asm__ __volatile__(
349  PPC_ATOMIC_ENTRY_BARRIER
350 "1: ldarx %0,0,%2 # atomic64_sub_return\n\
351  subf %0,%1,%0\n\
352  stdcx. %0,0,%2 \n\
353  bne- 1b"
354  PPC_ATOMIC_EXIT_BARRIER
355  : "=&r" (t)
356  : "r" (a), "r" (&v->counter)
357  : "cc", "memory");
358 
359  return t;
360 }
361 
362 static __inline__ void atomic64_inc(atomic64_t *v)
363 {
364  long t;
365 
366  __asm__ __volatile__(
367 "1: ldarx %0,0,%2 # atomic64_inc\n\
368  addic %0,%0,1\n\
369  stdcx. %0,0,%2 \n\
370  bne- 1b"
371  : "=&r" (t), "+m" (v->counter)
372  : "r" (&v->counter)
373  : "cc", "xer");
374 }
375 
377 {
378  long t;
379 
380  __asm__ __volatile__(
381  PPC_ATOMIC_ENTRY_BARRIER
382 "1: ldarx %0,0,%1 # atomic64_inc_return\n\
383  addic %0,%0,1\n\
384  stdcx. %0,0,%1 \n\
385  bne- 1b"
386  PPC_ATOMIC_EXIT_BARRIER
387  : "=&r" (t)
388  : "r" (&v->counter)
389  : "cc", "xer", "memory");
390 
391  return t;
392 }
393 
394 /*
395  * atomic64_inc_and_test - increment and test
396  * @v: pointer of type atomic64_t
397  *
398  * Atomically increments @v by 1
399  * and returns true if the result is zero, or false for all
400  * other cases.
401  */
402 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
403 
404 static __inline__ void atomic64_dec(atomic64_t *v)
405 {
406  long t;
407 
408  __asm__ __volatile__(
409 "1: ldarx %0,0,%2 # atomic64_dec\n\
410  addic %0,%0,-1\n\
411  stdcx. %0,0,%2\n\
412  bne- 1b"
413  : "=&r" (t), "+m" (v->counter)
414  : "r" (&v->counter)
415  : "cc", "xer");
416 }
417 
419 {
420  long t;
421 
422  __asm__ __volatile__(
423  PPC_ATOMIC_ENTRY_BARRIER
424 "1: ldarx %0,0,%1 # atomic64_dec_return\n\
425  addic %0,%0,-1\n\
426  stdcx. %0,0,%1\n\
427  bne- 1b"
428  PPC_ATOMIC_EXIT_BARRIER
429  : "=&r" (t)
430  : "r" (&v->counter)
431  : "cc", "xer", "memory");
432 
433  return t;
434 }
435 
436 #define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
437 #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
438 
439 /*
440  * Atomically test *v and decrement if it is greater than 0.
441  * The function returns the old value of *v minus 1.
442  */
444 {
445  long t;
446 
447  __asm__ __volatile__(
448  PPC_ATOMIC_ENTRY_BARRIER
449 "1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\
450  addic. %0,%0,-1\n\
451  blt- 2f\n\
452  stdcx. %0,0,%1\n\
453  bne- 1b"
454  PPC_ATOMIC_EXIT_BARRIER
455  "\n\
456 2:" : "=&r" (t)
457  : "r" (&v->counter)
458  : "cc", "xer", "memory");
459 
460  return t;
461 }
462 
463 #define atomic64_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
464 #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
465 
475 static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
476 {
477  long t;
478 
479  __asm__ __volatile__ (
480  PPC_ATOMIC_ENTRY_BARRIER
481 "1: ldarx %0,0,%1 # __atomic_add_unless\n\
482  cmpd 0,%0,%3 \n\
483  beq- 2f \n\
484  add %0,%2,%0 \n"
485 " stdcx. %0,0,%1 \n\
486  bne- 1b \n"
487  PPC_ATOMIC_EXIT_BARRIER
488 " subf %0,%2,%0 \n\
489 2:"
490  : "=&r" (t)
491  : "r" (&v->counter), "r" (a), "r" (u)
492  : "cc", "memory");
493 
494  return t != u;
495 }
496 
505 {
506  long t1, t2;
507 
508  __asm__ __volatile__ (
509  PPC_ATOMIC_ENTRY_BARRIER
510 "1: ldarx %0,0,%2 # atomic64_inc_not_zero\n\
511  cmpdi 0,%0,0\n\
512  beq- 2f\n\
513  addic %1,%0,1\n\
514  stdcx. %1,0,%2\n\
515  bne- 1b\n"
516  PPC_ATOMIC_EXIT_BARRIER
517  "\n\
518 2:"
519  : "=&r" (t1), "=&r" (t2)
520  : "r" (&v->counter)
521  : "cc", "xer", "memory");
522 
523  return t1;
524 }
525 
526 #endif /* __powerpc64__ */
527 
528 #endif /* __KERNEL__ */
529 #endif /* _ASM_POWERPC_ATOMIC_H_ */