5 #ifndef V8_BASE_ATOMICOPS_INTERNALS_STD_H_ 6 #define V8_BASE_ATOMICOPS_INTERNALS_STD_H_ 10 #include "src/base/build_config.h" 11 #include "src/base/macros.h" 18 volatile std::atomic<T>* to_std_atomic(
volatile T* ptr) {
19 return reinterpret_cast<volatile std::atomic<T>*
>(ptr);
22 volatile const std::atomic<T>* to_std_atomic_const(
volatile const T* ptr) {
23 return reinterpret_cast<volatile const std::atomic<T>*
>(ptr);
27 inline void SeqCst_MemoryFence() {
28 std::atomic_thread_fence(std::memory_order_seq_cst);
31 inline Atomic32 Relaxed_CompareAndSwap(
volatile Atomic32* ptr,
32 Atomic32 old_value, Atomic32 new_value) {
33 std::atomic_compare_exchange_strong_explicit(
34 helper::to_std_atomic(ptr), &old_value, new_value,
35 std::memory_order_relaxed, std::memory_order_relaxed);
39 inline Atomic32 Relaxed_AtomicExchange(
volatile Atomic32* ptr,
41 return std::atomic_exchange_explicit(helper::to_std_atomic(ptr), new_value,
42 std::memory_order_relaxed);
45 inline Atomic32 Relaxed_AtomicIncrement(
volatile Atomic32* ptr,
47 return increment + std::atomic_fetch_add_explicit(helper::to_std_atomic(ptr),
49 std::memory_order_relaxed);
52 inline Atomic32 Barrier_AtomicIncrement(
volatile Atomic32* ptr,
54 return increment + std::atomic_fetch_add_explicit(helper::to_std_atomic(ptr),
56 std::memory_order_seq_cst);
59 inline Atomic32 Acquire_CompareAndSwap(
volatile Atomic32* ptr,
60 Atomic32 old_value, Atomic32 new_value) {
61 atomic_compare_exchange_strong_explicit(
62 helper::to_std_atomic(ptr), &old_value, new_value,
63 std::memory_order_acquire, std::memory_order_acquire);
67 inline Atomic8 Release_CompareAndSwap(
volatile Atomic8* ptr, Atomic8 old_value,
69 bool result = atomic_compare_exchange_strong_explicit(
70 helper::to_std_atomic(ptr), &old_value, new_value,
71 std::memory_order_release, std::memory_order_relaxed);
76 inline Atomic32 Release_CompareAndSwap(
volatile Atomic32* ptr,
77 Atomic32 old_value, Atomic32 new_value) {
78 atomic_compare_exchange_strong_explicit(
79 helper::to_std_atomic(ptr), &old_value, new_value,
80 std::memory_order_release, std::memory_order_relaxed);
84 inline void Relaxed_Store(
volatile Atomic8* ptr, Atomic8 value) {
85 std::atomic_store_explicit(helper::to_std_atomic(ptr), value,
86 std::memory_order_relaxed);
89 inline void Relaxed_Store(
volatile Atomic16* ptr, Atomic16 value) {
90 std::atomic_store_explicit(helper::to_std_atomic(ptr), value,
91 std::memory_order_relaxed);
94 inline void Relaxed_Store(
volatile Atomic32* ptr, Atomic32 value) {
95 std::atomic_store_explicit(helper::to_std_atomic(ptr), value,
96 std::memory_order_relaxed);
99 inline void Release_Store(
volatile Atomic32* ptr, Atomic32 value) {
100 std::atomic_store_explicit(helper::to_std_atomic(ptr), value,
101 std::memory_order_release);
104 inline Atomic8 Relaxed_Load(
volatile const Atomic8* ptr) {
105 return std::atomic_load_explicit(helper::to_std_atomic_const(ptr),
106 std::memory_order_relaxed);
109 inline Atomic16 Relaxed_Load(
volatile const Atomic16* ptr) {
110 return std::atomic_load_explicit(helper::to_std_atomic_const(ptr),
111 std::memory_order_relaxed);
114 inline Atomic32 Relaxed_Load(
volatile const Atomic32* ptr) {
115 return std::atomic_load_explicit(helper::to_std_atomic_const(ptr),
116 std::memory_order_relaxed);
119 inline Atomic32 Acquire_Load(
volatile const Atomic32* ptr) {
120 return std::atomic_load_explicit(helper::to_std_atomic_const(ptr),
121 std::memory_order_acquire);
124 #if defined(V8_HOST_ARCH_64_BIT) 126 inline Atomic64 Relaxed_CompareAndSwap(
volatile Atomic64* ptr,
127 Atomic64 old_value, Atomic64 new_value) {
128 std::atomic_compare_exchange_strong_explicit(
129 helper::to_std_atomic(ptr), &old_value, new_value,
130 std::memory_order_relaxed, std::memory_order_relaxed);
134 inline Atomic64 Relaxed_AtomicExchange(
volatile Atomic64* ptr,
135 Atomic64 new_value) {
136 return std::atomic_exchange_explicit(helper::to_std_atomic(ptr), new_value,
137 std::memory_order_relaxed);
140 inline Atomic64 Relaxed_AtomicIncrement(
volatile Atomic64* ptr,
141 Atomic64 increment) {
142 return increment + std::atomic_fetch_add_explicit(helper::to_std_atomic(ptr),
144 std::memory_order_relaxed);
147 inline Atomic64 Barrier_AtomicIncrement(
volatile Atomic64* ptr,
148 Atomic64 increment) {
149 return increment + std::atomic_fetch_add_explicit(helper::to_std_atomic(ptr),
151 std::memory_order_seq_cst);
154 inline Atomic64 Acquire_CompareAndSwap(
volatile Atomic64* ptr,
155 Atomic64 old_value, Atomic64 new_value) {
156 std::atomic_compare_exchange_strong_explicit(
157 helper::to_std_atomic(ptr), &old_value, new_value,
158 std::memory_order_acquire, std::memory_order_acquire);
162 inline Atomic64 Release_CompareAndSwap(
volatile Atomic64* ptr,
163 Atomic64 old_value, Atomic64 new_value) {
164 std::atomic_compare_exchange_strong_explicit(
165 helper::to_std_atomic(ptr), &old_value, new_value,
166 std::memory_order_release, std::memory_order_relaxed);
170 inline void Relaxed_Store(
volatile Atomic64* ptr, Atomic64 value) {
171 std::atomic_store_explicit(helper::to_std_atomic(ptr), value,
172 std::memory_order_relaxed);
175 inline void Release_Store(
volatile Atomic64* ptr, Atomic64 value) {
176 std::atomic_store_explicit(helper::to_std_atomic(ptr), value,
177 std::memory_order_release);
180 inline Atomic64 Relaxed_Load(
volatile const Atomic64* ptr) {
181 return std::atomic_load_explicit(helper::to_std_atomic_const(ptr),
182 std::memory_order_relaxed);
185 inline Atomic64 Acquire_Load(
volatile const Atomic64* ptr) {
186 return std::atomic_load_explicit(helper::to_std_atomic_const(ptr),
187 std::memory_order_acquire);
190 #endif // defined(V8_HOST_ARCH_64_BIT) 194 #endif // V8_BASE_ATOMICOPS_INTERNALS_STD_H_