35 #ifndef GOOGLE_PROTOBUF_ATOMICOPS_INTERNALS_TSAN_H_
36 #define GOOGLE_PROTOBUF_ATOMICOPS_INTERNALS_TSAN_H_
38 #define ATOMICOPS_COMPILER_BARRIER() __asm__ __volatile__("" : : : "memory")
40 #include <sanitizer/tsan_interface_atomic.h>
50 __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value,
51 __tsan_memory_order_relaxed, __tsan_memory_order_relaxed);
57 return __tsan_atomic32_exchange(ptr, new_value,
58 __tsan_memory_order_relaxed);
63 return __tsan_atomic32_exchange(ptr, new_value,
64 __tsan_memory_order_acquire);
69 return __tsan_atomic32_exchange(ptr, new_value,
70 __tsan_memory_order_release);
75 return increment + __tsan_atomic32_fetch_add(ptr, increment,
76 __tsan_memory_order_relaxed);
81 return increment + __tsan_atomic32_fetch_add(ptr, increment,
82 __tsan_memory_order_acq_rel);
89 __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value,
90 __tsan_memory_order_acquire, __tsan_memory_order_acquire);
98 __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value,
99 __tsan_memory_order_release, __tsan_memory_order_relaxed);
104 __tsan_atomic32_store(ptr, value, __tsan_memory_order_relaxed);
108 __tsan_atomic32_store(ptr, value, __tsan_memory_order_relaxed);
109 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
113 __tsan_atomic32_store(ptr, value, __tsan_memory_order_release);
117 return __tsan_atomic32_load(ptr, __tsan_memory_order_relaxed);
121 return __tsan_atomic32_load(ptr, __tsan_memory_order_acquire);
125 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
126 return __tsan_atomic32_load(ptr, __tsan_memory_order_relaxed);
131 Atomic64 new_value) {
132 Atomic64 cmp = old_value;
133 __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value,
134 __tsan_memory_order_relaxed, __tsan_memory_order_relaxed);
139 Atomic64 new_value) {
140 return __tsan_atomic64_exchange(ptr, new_value, __tsan_memory_order_relaxed);
144 Atomic64 new_value) {
145 return __tsan_atomic64_exchange(ptr, new_value, __tsan_memory_order_acquire);
149 Atomic64 new_value) {
150 return __tsan_atomic64_exchange(ptr, new_value, __tsan_memory_order_release);
154 Atomic64 increment) {
155 return increment + __tsan_atomic64_fetch_add(ptr, increment,
156 __tsan_memory_order_relaxed);
160 Atomic64 increment) {
161 return increment + __tsan_atomic64_fetch_add(ptr, increment,
162 __tsan_memory_order_acq_rel);
166 __tsan_atomic64_store(ptr, value, __tsan_memory_order_relaxed);
170 __tsan_atomic64_store(ptr, value, __tsan_memory_order_relaxed);
171 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
175 __tsan_atomic64_store(ptr, value, __tsan_memory_order_release);
179 return __tsan_atomic64_load(ptr, __tsan_memory_order_relaxed);
182 inline Atomic64
Acquire_Load(
volatile const Atomic64 *ptr) {
183 return __tsan_atomic64_load(ptr, __tsan_memory_order_acquire);
186 inline Atomic64
Release_Load(
volatile const Atomic64 *ptr) {
187 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
188 return __tsan_atomic64_load(ptr, __tsan_memory_order_relaxed);
193 Atomic64 new_value) {
194 Atomic64 cmp = old_value;
195 __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value,
196 __tsan_memory_order_acquire, __tsan_memory_order_acquire);
202 Atomic64 new_value) {
203 Atomic64 cmp = old_value;
204 __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value,
205 __tsan_memory_order_release, __tsan_memory_order_relaxed);
210 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
217 #undef ATOMICOPS_COMPILER_BARRIER
219 #endif // GOOGLE_PROTOBUF_ATOMICOPS_INTERNALS_TSAN_H_
Atomic32 Acquire_Load(volatile const Atomic32 *ptr)
Definition: atomicops_internals_arm64_gcc.h:167
void Acquire_Store(volatile Atomic32 *ptr, Atomic32 value)
Definition: atomicops_internals_arm64_gcc.h:149
Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32 *ptr, Atomic32 old_value, Atomic32 new_value)
Definition: atomicops_internals_arm64_gcc.h:52
Atomic32 Acquire_AtomicExchange(volatile Atomic32 *ptr, Atomic32 new_value)
Definition: atomicops_internals_tsan.h:61
void Release_Store(volatile Atomic32 *ptr, Atomic32 value)
Definition: atomicops_internals_arm64_gcc.h:154
Atomic32 Release_AtomicExchange(volatile Atomic32 *ptr, Atomic32 new_value)
Definition: atomicops_internals_tsan.h:67
Atomic32 Release_Load(volatile const Atomic32 *ptr)
Definition: atomicops_internals_arm64_gcc.h:180
Definition: document.h:390
int32 Atomic32
Definition: atomicops.h:65
Atomic32 Release_CompareAndSwap(volatile Atomic32 *ptr, Atomic32 old_value, Atomic32 new_value)
Definition: atomicops_internals_arm64_gcc.h:136
Atomic32 Barrier_AtomicIncrement(volatile Atomic32 *ptr, Atomic32 increment)
Definition: atomicops_internals_arm64_gcc.h:118
Definition: BnetFileGenerator.h:47
Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32 *ptr, Atomic32 increment)
Definition: atomicops_internals_arm64_gcc.h:97
const FieldDescriptor value
Definition: descriptor.h:1522
Atomic32 NoBarrier_AtomicExchange(volatile Atomic32 *ptr, Atomic32 new_value)
Definition: atomicops_internals_arm64_gcc.h:77
void NoBarrier_Store(volatile Atomic32 *ptr, Atomic32 value)
Definition: atomicops_internals_arm64_gcc.h:145
Atomic32 NoBarrier_Load(volatile const Atomic32 *ptr)
Definition: atomicops_internals_arm64_gcc.h:163
Atomic32 Acquire_CompareAndSwap(volatile Atomic32 *ptr, Atomic32 old_value, Atomic32 new_value)
Definition: atomicops_internals_arm64_gcc.h:127
void MemoryBarrier()
Definition: atomicops_internals_arm64_gcc.h:40