Linux Kernel  3.7.1
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
xchg.h
Go to the documentation of this file.
1 #ifndef _ALPHA_CMPXCHG_H
2 #error Do not include xchg.h directly!
3 #else
4 /*
5  * xchg/xchg_local and cmpxchg/cmpxchg_local share the same code
6  * except that local version do not have the expensive memory barrier.
7  * So this file is included twice from asm/cmpxchg.h.
8  */
9 
10 /*
11  * Atomic exchange.
12  * Since it can be used to implement critical sections
13  * it must clobber "memory" (also for interrupts in UP).
14  */
15 
16 static inline unsigned long
17 ____xchg(_u8, volatile char *m, unsigned long val)
18 {
19  unsigned long ret, tmp, addr64;
20 
21  __asm__ __volatile__(
22  " andnot %4,7,%3\n"
23  " insbl %1,%4,%1\n"
24  "1: ldq_l %2,0(%3)\n"
25  " extbl %2,%4,%0\n"
26  " mskbl %2,%4,%2\n"
27  " or %1,%2,%2\n"
28  " stq_c %2,0(%3)\n"
29  " beq %2,2f\n"
30  __ASM__MB
31  ".subsection 2\n"
32  "2: br 1b\n"
33  ".previous"
34  : "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64)
35  : "r" ((long)m), "1" (val) : "memory");
36 
37  return ret;
38 }
39 
40 static inline unsigned long
41 ____xchg(_u16, volatile short *m, unsigned long val)
42 {
43  unsigned long ret, tmp, addr64;
44 
45  __asm__ __volatile__(
46  " andnot %4,7,%3\n"
47  " inswl %1,%4,%1\n"
48  "1: ldq_l %2,0(%3)\n"
49  " extwl %2,%4,%0\n"
50  " mskwl %2,%4,%2\n"
51  " or %1,%2,%2\n"
52  " stq_c %2,0(%3)\n"
53  " beq %2,2f\n"
54  __ASM__MB
55  ".subsection 2\n"
56  "2: br 1b\n"
57  ".previous"
58  : "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64)
59  : "r" ((long)m), "1" (val) : "memory");
60 
61  return ret;
62 }
63 
64 static inline unsigned long
65 ____xchg(_u32, volatile int *m, unsigned long val)
66 {
67  unsigned long dummy;
68 
69  __asm__ __volatile__(
70  "1: ldl_l %0,%4\n"
71  " bis $31,%3,%1\n"
72  " stl_c %1,%2\n"
73  " beq %1,2f\n"
74  __ASM__MB
75  ".subsection 2\n"
76  "2: br 1b\n"
77  ".previous"
78  : "=&r" (val), "=&r" (dummy), "=m" (*m)
79  : "rI" (val), "m" (*m) : "memory");
80 
81  return val;
82 }
83 
84 static inline unsigned long
85 ____xchg(_u64, volatile long *m, unsigned long val)
86 {
87  unsigned long dummy;
88 
89  __asm__ __volatile__(
90  "1: ldq_l %0,%4\n"
91  " bis $31,%3,%1\n"
92  " stq_c %1,%2\n"
93  " beq %1,2f\n"
94  __ASM__MB
95  ".subsection 2\n"
96  "2: br 1b\n"
97  ".previous"
98  : "=&r" (val), "=&r" (dummy), "=m" (*m)
99  : "rI" (val), "m" (*m) : "memory");
100 
101  return val;
102 }
103 
104 /* This function doesn't exist, so you'll get a linker error
105  if something tries to do an invalid xchg(). */
106 extern void __xchg_called_with_bad_pointer(void);
107 
108 static __always_inline unsigned long
109 ____xchg(, volatile void *ptr, unsigned long x, int size)
110 {
111  switch (size) {
112  case 1:
113  return ____xchg(_u8, ptr, x);
114  case 2:
115  return ____xchg(_u16, ptr, x);
116  case 4:
117  return ____xchg(_u32, ptr, x);
118  case 8:
119  return ____xchg(_u64, ptr, x);
120  }
122  return x;
123 }
124 
125 /*
126  * Atomic compare and exchange. Compare OLD with MEM, if identical,
127  * store NEW in MEM. Return the initial value in MEM. Success is
128  * indicated by comparing RETURN with OLD.
129  *
130  * The memory barrier should be placed in SMP only when we actually
131  * make the change. If we don't change anything (so if the returned
132  * prev is equal to old) then we aren't acquiring anything new and
133  * we don't need any memory barrier as far I can tell.
134  */
135 
136 static inline unsigned long
137 ____cmpxchg(_u8, volatile char *m, unsigned char old, unsigned char new)
138 {
139  unsigned long prev, tmp, cmp, addr64;
140 
141  __asm__ __volatile__(
142  " andnot %5,7,%4\n"
143  " insbl %1,%5,%1\n"
144  "1: ldq_l %2,0(%4)\n"
145  " extbl %2,%5,%0\n"
146  " cmpeq %0,%6,%3\n"
147  " beq %3,2f\n"
148  " mskbl %2,%5,%2\n"
149  " or %1,%2,%2\n"
150  " stq_c %2,0(%4)\n"
151  " beq %2,3f\n"
152  __ASM__MB
153  "2:\n"
154  ".subsection 2\n"
155  "3: br 1b\n"
156  ".previous"
157  : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64)
158  : "r" ((long)m), "Ir" (old), "1" (new) : "memory");
159 
160  return prev;
161 }
162 
163 static inline unsigned long
164 ____cmpxchg(_u16, volatile short *m, unsigned short old, unsigned short new)
165 {
166  unsigned long prev, tmp, cmp, addr64;
167 
168  __asm__ __volatile__(
169  " andnot %5,7,%4\n"
170  " inswl %1,%5,%1\n"
171  "1: ldq_l %2,0(%4)\n"
172  " extwl %2,%5,%0\n"
173  " cmpeq %0,%6,%3\n"
174  " beq %3,2f\n"
175  " mskwl %2,%5,%2\n"
176  " or %1,%2,%2\n"
177  " stq_c %2,0(%4)\n"
178  " beq %2,3f\n"
179  __ASM__MB
180  "2:\n"
181  ".subsection 2\n"
182  "3: br 1b\n"
183  ".previous"
184  : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64)
185  : "r" ((long)m), "Ir" (old), "1" (new) : "memory");
186 
187  return prev;
188 }
189 
190 static inline unsigned long
191 ____cmpxchg(_u32, volatile int *m, int old, int new)
192 {
193  unsigned long prev, cmp;
194 
195  __asm__ __volatile__(
196  "1: ldl_l %0,%5\n"
197  " cmpeq %0,%3,%1\n"
198  " beq %1,2f\n"
199  " mov %4,%1\n"
200  " stl_c %1,%2\n"
201  " beq %1,3f\n"
202  __ASM__MB
203  "2:\n"
204  ".subsection 2\n"
205  "3: br 1b\n"
206  ".previous"
207  : "=&r"(prev), "=&r"(cmp), "=m"(*m)
208  : "r"((long) old), "r"(new), "m"(*m) : "memory");
209 
210  return prev;
211 }
212 
213 static inline unsigned long
214 ____cmpxchg(_u64, volatile long *m, unsigned long old, unsigned long new)
215 {
216  unsigned long prev, cmp;
217 
218  __asm__ __volatile__(
219  "1: ldq_l %0,%5\n"
220  " cmpeq %0,%3,%1\n"
221  " beq %1,2f\n"
222  " mov %4,%1\n"
223  " stq_c %1,%2\n"
224  " beq %1,3f\n"
225  __ASM__MB
226  "2:\n"
227  ".subsection 2\n"
228  "3: br 1b\n"
229  ".previous"
230  : "=&r"(prev), "=&r"(cmp), "=m"(*m)
231  : "r"((long) old), "r"(new), "m"(*m) : "memory");
232 
233  return prev;
234 }
235 
236 /* This function doesn't exist, so you'll get a linker error
237  if something tries to do an invalid cmpxchg(). */
238 extern void __cmpxchg_called_with_bad_pointer(void);
239 
240 static __always_inline unsigned long
241 ____cmpxchg(, volatile void *ptr, unsigned long old, unsigned long new,
242  int size)
243 {
244  switch (size) {
245  case 1:
246  return ____cmpxchg(_u8, ptr, old, new);
247  case 2:
248  return ____cmpxchg(_u16, ptr, old, new);
249  case 4:
250  return ____cmpxchg(_u32, ptr, old, new);
251  case 8:
252  return ____cmpxchg(_u64, ptr, old, new);
253  }
255  return old;
256 }
257 
258 #endif