Go to the documentation of this file.
18 #ifndef __ASM_CMPXCHG_H
19 #define __ASM_CMPXCHG_H
23 #include <asm/barrier.h>
25 static inline unsigned long __xchg(
unsigned long x,
volatile void *
ptr,
int size)
31 asm volatile(
"// __xchg1\n"
32 "1: ldaxrb %w0, [%3]\n"
33 " stlxrb %w1, %w2, [%3]\n"
35 :
"=&r" (
ret),
"=&r" (tmp)
40 asm volatile(
"// __xchg2\n"
41 "1: ldaxrh %w0, [%3]\n"
42 " stlxrh %w1, %w2, [%3]\n"
44 :
"=&r" (
ret),
"=&r" (tmp)
49 asm volatile(
"// __xchg4\n"
50 "1: ldaxr %w0, [%3]\n"
51 " stlxr %w1, %w2, [%3]\n"
53 :
"=&r" (
ret),
"=&r" (tmp)
58 asm volatile(
"// __xchg8\n"
60 " stlxr %w1, %2, [%3]\n"
62 :
"=&r" (
ret),
"=&r" (tmp)
74 ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
76 static inline unsigned long __cmpxchg(
volatile void *ptr,
unsigned long old,
77 unsigned long new,
int size)
79 unsigned long oldval = 0,
res;
84 asm volatile(
"// __cmpxchg1\n"
89 " stxrb %w0, %w4, [%2]\n"
91 :
"=&r" (
res),
"=&r" (oldval)
92 :
"r" (
ptr),
"Ir" (old),
"r" (
new)
99 asm volatile(
"// __cmpxchg2\n"
104 " stxrh %w0, %w4, [%2]\n"
106 :
"=&r" (
res),
"=&r" (oldval)
107 :
"r" (
ptr),
"Ir" (old),
"r" (
new)
114 asm volatile(
"// __cmpxchg4\n"
119 " stxr %w0, %w4, [%2]\n"
121 :
"=&r" (
res),
"=&r" (oldval)
122 :
"r" (
ptr),
"Ir" (old),
"r" (
new)
129 asm volatile(
"// __cmpxchg8\n"
134 " stxr %w0, %4, [%2]\n"
136 :
"=&r" (
res),
"=&r" (oldval)
137 :
"r" (
ptr),
"Ir" (old),
"r" (
new)
149 static inline unsigned long __cmpxchg_mb(
volatile void *ptr,
unsigned long old,
150 unsigned long new,
int size)
161 #define cmpxchg(ptr,o,n) \
162 ((__typeof__(*(ptr)))__cmpxchg_mb((ptr), \
163 (unsigned long)(o), \
164 (unsigned long)(n), \
167 #define cmpxchg_local(ptr,o,n) \
168 ((__typeof__(*(ptr)))__cmpxchg((ptr), \
169 (unsigned long)(o), \
170 (unsigned long)(n), \