Linux Kernel  3.7.1
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
cmpxchg.h
Go to the documentation of this file.
1 #ifndef _ASM_M32R_CMPXCHG_H
2 #define _ASM_M32R_CMPXCHG_H
3 
4 /*
5  * M32R version:
6  * Copyright (C) 2001, 2002 Hitoshi Yamamoto
7  * Copyright (C) 2004 Hirokazu Takata <takata at linux-m32r.org>
8  */
9 
10 #include <linux/irqflags.h>
11 #include <asm/assembler.h>
12 #include <asm/dcache_clear.h>
13 
14 extern void __xchg_called_with_bad_pointer(void);
15 
16 static __always_inline unsigned long
17 __xchg(unsigned long x, volatile void *ptr, int size)
18 {
19  unsigned long flags;
20  unsigned long tmp = 0;
21 
22  local_irq_save(flags);
23 
24  switch (size) {
25 #ifndef CONFIG_SMP
26  case 1:
27  __asm__ __volatile__ (
28  "ldb %0, @%2 \n\t"
29  "stb %1, @%2 \n\t"
30  : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
31  break;
32  case 2:
33  __asm__ __volatile__ (
34  "ldh %0, @%2 \n\t"
35  "sth %1, @%2 \n\t"
36  : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
37  break;
38  case 4:
39  __asm__ __volatile__ (
40  "ld %0, @%2 \n\t"
41  "st %1, @%2 \n\t"
42  : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
43  break;
44 #else /* CONFIG_SMP */
45  case 4:
46  __asm__ __volatile__ (
47  DCACHE_CLEAR("%0", "r4", "%2")
48  "lock %0, @%2; \n\t"
49  "unlock %1, @%2; \n\t"
50  : "=&r" (tmp) : "r" (x), "r" (ptr)
51  : "memory"
52 #ifdef CONFIG_CHIP_M32700_TS1
53  , "r4"
54 #endif /* CONFIG_CHIP_M32700_TS1 */
55  );
56  break;
57 #endif /* CONFIG_SMP */
58  default:
60  }
61 
62  local_irq_restore(flags);
63 
64  return (tmp);
65 }
66 
67 #define xchg(ptr, x) \
68  ((__typeof__(*(ptr)))__xchg((unsigned long)(x), (ptr), sizeof(*(ptr))))
69 
70 static __always_inline unsigned long
71 __xchg_local(unsigned long x, volatile void *ptr, int size)
72 {
73  unsigned long flags;
74  unsigned long tmp = 0;
75 
76  local_irq_save(flags);
77 
78  switch (size) {
79  case 1:
80  __asm__ __volatile__ (
81  "ldb %0, @%2 \n\t"
82  "stb %1, @%2 \n\t"
83  : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
84  break;
85  case 2:
86  __asm__ __volatile__ (
87  "ldh %0, @%2 \n\t"
88  "sth %1, @%2 \n\t"
89  : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
90  break;
91  case 4:
92  __asm__ __volatile__ (
93  "ld %0, @%2 \n\t"
94  "st %1, @%2 \n\t"
95  : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
96  break;
97  default:
99  }
100 
101  local_irq_restore(flags);
102 
103  return (tmp);
104 }
105 
106 #define xchg_local(ptr, x) \
107  ((__typeof__(*(ptr)))__xchg_local((unsigned long)(x), (ptr), \
108  sizeof(*(ptr))))
109 
110 #define __HAVE_ARCH_CMPXCHG 1
111 
112 static inline unsigned long
113 __cmpxchg_u32(volatile unsigned int *p, unsigned int old, unsigned int new)
114 {
115  unsigned long flags;
116  unsigned int retval;
117 
118  local_irq_save(flags);
119  __asm__ __volatile__ (
120  DCACHE_CLEAR("%0", "r4", "%1")
121  M32R_LOCK" %0, @%1; \n"
122  " bne %0, %2, 1f; \n"
123  M32R_UNLOCK" %3, @%1; \n"
124  " bra 2f; \n"
125  " .fillinsn \n"
126  "1:"
127  M32R_UNLOCK" %0, @%1; \n"
128  " .fillinsn \n"
129  "2:"
130  : "=&r" (retval)
131  : "r" (p), "r" (old), "r" (new)
132  : "cbit", "memory"
133 #ifdef CONFIG_CHIP_M32700_TS1
134  , "r4"
135 #endif /* CONFIG_CHIP_M32700_TS1 */
136  );
137  local_irq_restore(flags);
138 
139  return retval;
140 }
141 
142 static inline unsigned long
143 __cmpxchg_local_u32(volatile unsigned int *p, unsigned int old,
144  unsigned int new)
145 {
146  unsigned long flags;
147  unsigned int retval;
148 
149  local_irq_save(flags);
150  __asm__ __volatile__ (
151  DCACHE_CLEAR("%0", "r4", "%1")
152  "ld %0, @%1; \n"
153  " bne %0, %2, 1f; \n"
154  "st %3, @%1; \n"
155  " bra 2f; \n"
156  " .fillinsn \n"
157  "1:"
158  "st %0, @%1; \n"
159  " .fillinsn \n"
160  "2:"
161  : "=&r" (retval)
162  : "r" (p), "r" (old), "r" (new)
163  : "cbit", "memory"
164 #ifdef CONFIG_CHIP_M32700_TS1
165  , "r4"
166 #endif /* CONFIG_CHIP_M32700_TS1 */
167  );
168  local_irq_restore(flags);
169 
170  return retval;
171 }
172 
173 /* This function doesn't exist, so you'll get a linker error
174  if something tries to do an invalid cmpxchg(). */
175 extern void __cmpxchg_called_with_bad_pointer(void);
176 
177 static inline unsigned long
178 __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, int size)
179 {
180  switch (size) {
181  case 4:
182  return __cmpxchg_u32(ptr, old, new);
183 #if 0 /* we don't have __cmpxchg_u64 */
184  case 8:
185  return __cmpxchg_u64(ptr, old, new);
186 #endif /* 0 */
187  }
189  return old;
190 }
191 
192 #define cmpxchg(ptr, o, n) \
193  ((__typeof__(*(ptr))) __cmpxchg((ptr), (unsigned long)(o), \
194  (unsigned long)(n), sizeof(*(ptr))))
195 
197 
198 static inline unsigned long __cmpxchg_local(volatile void *ptr,
199  unsigned long old,
200  unsigned long new, int size)
201 {
202  switch (size) {
203  case 4:
204  return __cmpxchg_local_u32(ptr, old, new);
205  default:
206  return __cmpxchg_local_generic(ptr, old, new, size);
207  }
208 
209  return old;
210 }
211 
212 /*
213  * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
214  * them available.
215  */
216 #define cmpxchg_local(ptr, o, n) \
217  ((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o), \
218  (unsigned long)(n), sizeof(*(ptr))))
219 #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
220 
221 #endif /* _ASM_M32R_CMPXCHG_H */