Linux Kernel  3.7.1
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
atomic-grb.h
Go to the documentation of this file.
1 #ifndef __ASM_SH_ATOMIC_GRB_H
2 #define __ASM_SH_ATOMIC_GRB_H
3 
4 static inline void atomic_add(int i, atomic_t *v)
5 {
6  int tmp;
7 
8  __asm__ __volatile__ (
9  " .align 2 \n\t"
10  " mova 1f, r0 \n\t" /* r0 = end point */
11  " mov r15, r1 \n\t" /* r1 = saved sp */
12  " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
13  " mov.l @%1, %0 \n\t" /* load old value */
14  " add %2, %0 \n\t" /* add */
15  " mov.l %0, @%1 \n\t" /* store new value */
16  "1: mov r1, r15 \n\t" /* LOGOUT */
17  : "=&r" (tmp),
18  "+r" (v)
19  : "r" (i)
20  : "memory" , "r0", "r1");
21 }
22 
23 static inline void atomic_sub(int i, atomic_t *v)
24 {
25  int tmp;
26 
27  __asm__ __volatile__ (
28  " .align 2 \n\t"
29  " mova 1f, r0 \n\t" /* r0 = end point */
30  " mov r15, r1 \n\t" /* r1 = saved sp */
31  " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
32  " mov.l @%1, %0 \n\t" /* load old value */
33  " sub %2, %0 \n\t" /* sub */
34  " mov.l %0, @%1 \n\t" /* store new value */
35  "1: mov r1, r15 \n\t" /* LOGOUT */
36  : "=&r" (tmp),
37  "+r" (v)
38  : "r" (i)
39  : "memory" , "r0", "r1");
40 }
41 
42 static inline int atomic_add_return(int i, atomic_t *v)
43 {
44  int tmp;
45 
46  __asm__ __volatile__ (
47  " .align 2 \n\t"
48  " mova 1f, r0 \n\t" /* r0 = end point */
49  " mov r15, r1 \n\t" /* r1 = saved sp */
50  " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
51  " mov.l @%1, %0 \n\t" /* load old value */
52  " add %2, %0 \n\t" /* add */
53  " mov.l %0, @%1 \n\t" /* store new value */
54  "1: mov r1, r15 \n\t" /* LOGOUT */
55  : "=&r" (tmp),
56  "+r" (v)
57  : "r" (i)
58  : "memory" , "r0", "r1");
59 
60  return tmp;
61 }
62 
63 static inline int atomic_sub_return(int i, atomic_t *v)
64 {
65  int tmp;
66 
67  __asm__ __volatile__ (
68  " .align 2 \n\t"
69  " mova 1f, r0 \n\t" /* r0 = end point */
70  " mov r15, r1 \n\t" /* r1 = saved sp */
71  " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
72  " mov.l @%1, %0 \n\t" /* load old value */
73  " sub %2, %0 \n\t" /* sub */
74  " mov.l %0, @%1 \n\t" /* store new value */
75  "1: mov r1, r15 \n\t" /* LOGOUT */
76  : "=&r" (tmp),
77  "+r" (v)
78  : "r" (i)
79  : "memory", "r0", "r1");
80 
81  return tmp;
82 }
83 
84 static inline void atomic_clear_mask(unsigned int mask, atomic_t *v)
85 {
86  int tmp;
87  unsigned int _mask = ~mask;
88 
89  __asm__ __volatile__ (
90  " .align 2 \n\t"
91  " mova 1f, r0 \n\t" /* r0 = end point */
92  " mov r15, r1 \n\t" /* r1 = saved sp */
93  " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
94  " mov.l @%1, %0 \n\t" /* load old value */
95  " and %2, %0 \n\t" /* add */
96  " mov.l %0, @%1 \n\t" /* store new value */
97  "1: mov r1, r15 \n\t" /* LOGOUT */
98  : "=&r" (tmp),
99  "+r" (v)
100  : "r" (_mask)
101  : "memory" , "r0", "r1");
102 }
103 
104 static inline void atomic_set_mask(unsigned int mask, atomic_t *v)
105 {
106  int tmp;
107 
108  __asm__ __volatile__ (
109  " .align 2 \n\t"
110  " mova 1f, r0 \n\t" /* r0 = end point */
111  " mov r15, r1 \n\t" /* r1 = saved sp */
112  " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
113  " mov.l @%1, %0 \n\t" /* load old value */
114  " or %2, %0 \n\t" /* or */
115  " mov.l %0, @%1 \n\t" /* store new value */
116  "1: mov r1, r15 \n\t" /* LOGOUT */
117  : "=&r" (tmp),
118  "+r" (v)
119  : "r" (mask)
120  : "memory" , "r0", "r1");
121 }
122 
123 #endif /* __ASM_SH_ATOMIC_GRB_H */