31 #ifndef _GLIBCXX_ATOMIC_2_H
32 #define _GLIBCXX_ATOMIC_2_H 1
34 #pragma GCC system_header
55 atomic_flag(
bool __i): __atomic_flag_base({ __i }) { }
58 test_and_set(
memory_order __m = memory_order_seq_cst)
volatile
61 if (__m != memory_order_acquire && __m != memory_order_acq_rel)
63 return __sync_lock_test_and_set(&_M_i, 1);
69 __glibcxx_assert(__m != memory_order_consume);
70 __glibcxx_assert(__m != memory_order_acquire);
71 __glibcxx_assert(__m != memory_order_acq_rel);
73 __sync_lock_release(&_M_i);
74 if (__m != memory_order_acquire && __m != memory_order_acq_rel)
95 is_lock_free()
const volatile
99 store(
void* __v,
memory_order __m = memory_order_seq_cst)
volatile
101 __glibcxx_assert(__m != memory_order_acquire);
102 __glibcxx_assert(__m != memory_order_acq_rel);
103 __glibcxx_assert(__m != memory_order_consume);
105 if (__m == memory_order_relaxed)
111 if (__m == memory_order_seq_cst)
112 __sync_synchronize();
117 load(
memory_order __m = memory_order_seq_cst)
const volatile
119 __glibcxx_assert(__m != memory_order_release);
120 __glibcxx_assert(__m != memory_order_acq_rel);
122 __sync_synchronize();
124 __sync_synchronize();
129 exchange(
void* __v,
memory_order __m = memory_order_seq_cst)
volatile
132 return __sync_lock_test_and_set(&_M_i, __v);
136 compare_exchange_weak(
void*& __v1,
void* __v2,
memory_order __m1,
138 {
return compare_exchange_strong(__v1, __v2, __m1, __m2); }
141 compare_exchange_weak(
void*& __v1,
void* __v2,
144 return compare_exchange_weak(__v1, __v2, __m,
145 __calculate_memory_order(__m));
149 compare_exchange_strong(
void*& __v1,
void* __v2,
memory_order __m1,
152 __glibcxx_assert(__m2 != memory_order_release);
153 __glibcxx_assert(__m2 != memory_order_acq_rel);
154 __glibcxx_assert(__m2 <= __m1);
157 void* __v1n = __sync_val_compare_and_swap(&_M_i, __v1o, __v2);
161 return __v1o == __v1n;
165 compare_exchange_strong(
void*& __v1,
void* __v2,
168 return compare_exchange_strong(__v1, __v2, __m,
169 __calculate_memory_order(__m));
173 fetch_add(ptrdiff_t __d,
memory_order __m = memory_order_seq_cst)
volatile
174 {
return __sync_fetch_and_add(&_M_i, __d); }
177 fetch_sub(ptrdiff_t __d,
memory_order __m = memory_order_seq_cst)
volatile
178 {
return __sync_fetch_and_sub(&_M_i, __d); }
180 operator void*()
const volatile
191 operator+=(ptrdiff_t __d)
volatile
192 {
return __sync_add_and_fetch(&_M_i, __d); }
195 operator-=(ptrdiff_t __d)
volatile
196 {
return __sync_sub_and_fetch(&_M_i, __d); }
221 template<
typename _ITp>
225 typedef _ITp __integral_type;
227 __integral_type _M_i;
230 __atomic_base() =
default;
231 ~__atomic_base() =
default;
232 __atomic_base(
const __atomic_base&) =
delete;
233 __atomic_base& operator=(
const __atomic_base&) =
delete;
236 __atomic_base(__integral_type __i) { _M_i = __i; }
238 operator __integral_type()
const volatile
242 operator=(__integral_type __i)
249 operator++(
int)
volatile
250 {
return fetch_add(1); }
253 operator--(
int)
volatile
254 {
return fetch_sub(1); }
257 operator++()
volatile
258 {
return __sync_add_and_fetch(&_M_i, 1); }
261 operator--()
volatile
262 {
return __sync_sub_and_fetch(&_M_i, 1); }
265 operator+=(__integral_type __i)
volatile
266 {
return __sync_add_and_fetch(&_M_i, __i); }
269 operator-=(__integral_type __i)
volatile
270 {
return __sync_sub_and_fetch(&_M_i, __i); }
273 operator&=(__integral_type __i)
volatile
274 {
return __sync_and_and_fetch(&_M_i, __i); }
277 operator|=(__integral_type __i)
volatile
278 {
return __sync_or_and_fetch(&_M_i, __i); }
281 operator^=(__integral_type __i)
volatile
282 {
return __sync_xor_and_fetch(&_M_i, __i); }
285 is_lock_free()
const volatile
289 store(__integral_type __i,
292 __glibcxx_assert(__m != memory_order_acquire);
293 __glibcxx_assert(__m != memory_order_acq_rel);
294 __glibcxx_assert(__m != memory_order_consume);
296 if (__m == memory_order_relaxed)
302 if (__m == memory_order_seq_cst)
303 __sync_synchronize();
308 load(
memory_order __m = memory_order_seq_cst)
const volatile
310 __glibcxx_assert(__m != memory_order_release);
311 __glibcxx_assert(__m != memory_order_acq_rel);
313 __sync_synchronize();
314 __integral_type __ret = _M_i;
315 __sync_synchronize();
320 exchange(__integral_type __i,
324 return __sync_lock_test_and_set(&_M_i, __i);
328 compare_exchange_weak(__integral_type& __i1, __integral_type __i2,
330 {
return compare_exchange_strong(__i1, __i2, __m1, __m2); }
333 compare_exchange_weak(__integral_type& __i1, __integral_type __i2,
336 return compare_exchange_weak(__i1, __i2, __m,
337 __calculate_memory_order(__m));
341 compare_exchange_strong(__integral_type& __i1, __integral_type __i2,
344 __glibcxx_assert(__m2 != memory_order_release);
345 __glibcxx_assert(__m2 != memory_order_acq_rel);
346 __glibcxx_assert(__m2 <= __m1);
348 __integral_type __i1o = __i1;
349 __integral_type __i1n = __sync_val_compare_and_swap(&_M_i, __i1o, __i2);
353 return __i1o == __i1n;
357 compare_exchange_strong(__integral_type& __i1, __integral_type __i2,
360 return compare_exchange_strong(__i1, __i2, __m,
361 __calculate_memory_order(__m));
365 fetch_add(__integral_type __i,
367 {
return __sync_fetch_and_add(&_M_i, __i); }
370 fetch_sub(__integral_type __i,
372 {
return __sync_fetch_and_sub(&_M_i, __i); }
375 fetch_and(__integral_type __i,
377 {
return __sync_fetch_and_and(&_M_i, __i); }
380 fetch_or(__integral_type __i,
382 {
return __sync_fetch_and_or(&_M_i, __i); }
385 fetch_xor(__integral_type __i,
387 {
return __sync_fetch_and_xor(&_M_i, __i); }
396 __atomic_base<bool> _M_base;
408 {
return _M_base.operator=(__i); }
410 operator bool()
const volatile
411 {
return _M_base.load(); }
414 is_lock_free()
const volatile
415 {
return _M_base.is_lock_free(); }
418 store(
bool __i,
memory_order __m = memory_order_seq_cst)
volatile
419 { _M_base.store(__i, __m); }
422 load(
memory_order __m = memory_order_seq_cst)
const volatile
423 {
return _M_base.load(__m); }
426 exchange(
bool __i,
memory_order __m = memory_order_seq_cst)
volatile
427 {
return _M_base.exchange(__i, __m); }
430 compare_exchange_weak(
bool& __i1,
bool __i2,
memory_order __m1,
432 {
return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
435 compare_exchange_weak(
bool& __i1,
bool __i2,
437 {
return _M_base.compare_exchange_weak(__i1, __i2, __m); }
440 compare_exchange_strong(
bool& __i1,
bool __i2,
memory_order __m1,
442 {
return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
446 compare_exchange_strong(
bool& __i1,
bool __i2,
448 {
return _M_base.compare_exchange_strong(__i1, __i2, __m); }
memory_order
Enumeration for memory_order.