libstdc++
|
00001 // -*- C++ -*- header. 00002 00003 // Copyright (C) 2008-2017 Free Software Foundation, Inc. 00004 // 00005 // This file is part of the GNU ISO C++ Library. This library is free 00006 // software; you can redistribute it and/or modify it under the 00007 // terms of the GNU General Public License as published by the 00008 // Free Software Foundation; either version 3, or (at your option) 00009 // any later version. 00010 00011 // This library is distributed in the hope that it will be useful, 00012 // but WITHOUT ANY WARRANTY; without even the implied warranty of 00013 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 00014 // GNU General Public License for more details. 00015 00016 // Under Section 7 of GPL version 3, you are granted additional 00017 // permissions described in the GCC Runtime Library Exception, version 00018 // 3.1, as published by the Free Software Foundation. 00019 00020 // You should have received a copy of the GNU General Public License and 00021 // a copy of the GCC Runtime Library Exception along with this program; 00022 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see 00023 // <http://www.gnu.org/licenses/>. 00024 00025 /** @file include/atomic 00026 * This is a Standard C++ Library header. 00027 */ 00028 00029 // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl. 00030 // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html 00031 00032 #ifndef _GLIBCXX_ATOMIC 00033 #define _GLIBCXX_ATOMIC 1 00034 00035 #pragma GCC system_header 00036 00037 #if __cplusplus < 201103L 00038 # include <bits/c++0x_warning.h> 00039 #else 00040 00041 #include <bits/atomic_base.h> 00042 #include <bits/move.h> 00043 00044 namespace std _GLIBCXX_VISIBILITY(default) 00045 { 00046 _GLIBCXX_BEGIN_NAMESPACE_VERSION 00047 00048 /** 00049 * @addtogroup atomics 00050 * @{ 00051 */ 00052 00053 #if __cplusplus > 201402L 00054 # define __cpp_lib_atomic_is_always_lock_free 201603 00055 #endif 00056 00057 template<typename _Tp> 00058 struct atomic; 00059 00060 /// atomic<bool> 00061 // NB: No operators or fetch-operations for this type. 00062 template<> 00063 struct atomic<bool> 00064 { 00065 private: 00066 __atomic_base<bool> _M_base; 00067 00068 public: 00069 atomic() noexcept = default; 00070 ~atomic() noexcept = default; 00071 atomic(const atomic&) = delete; 00072 atomic& operator=(const atomic&) = delete; 00073 atomic& operator=(const atomic&) volatile = delete; 00074 00075 constexpr atomic(bool __i) noexcept : _M_base(__i) { } 00076 00077 bool 00078 operator=(bool __i) noexcept 00079 { return _M_base.operator=(__i); } 00080 00081 bool 00082 operator=(bool __i) volatile noexcept 00083 { return _M_base.operator=(__i); } 00084 00085 operator bool() const noexcept 00086 { return _M_base.load(); } 00087 00088 operator bool() const volatile noexcept 00089 { return _M_base.load(); } 00090 00091 bool 00092 is_lock_free() const noexcept { return _M_base.is_lock_free(); } 00093 00094 bool 00095 is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); } 00096 00097 #if __cplusplus > 201402L 00098 static constexpr bool is_always_lock_free = ATOMIC_BOOL_LOCK_FREE == 2; 00099 #endif 00100 00101 void 00102 store(bool __i, memory_order __m = memory_order_seq_cst) noexcept 00103 { _M_base.store(__i, __m); } 00104 00105 void 00106 store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept 00107 { _M_base.store(__i, __m); } 00108 00109 bool 00110 load(memory_order __m = memory_order_seq_cst) const noexcept 00111 { return _M_base.load(__m); } 00112 00113 bool 00114 load(memory_order __m = memory_order_seq_cst) const volatile noexcept 00115 { return _M_base.load(__m); } 00116 00117 bool 00118 exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept 00119 { return _M_base.exchange(__i, __m); } 00120 00121 bool 00122 exchange(bool __i, 00123 memory_order __m = memory_order_seq_cst) volatile noexcept 00124 { return _M_base.exchange(__i, __m); } 00125 00126 bool 00127 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1, 00128 memory_order __m2) noexcept 00129 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); } 00130 00131 bool 00132 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1, 00133 memory_order __m2) volatile noexcept 00134 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); } 00135 00136 bool 00137 compare_exchange_weak(bool& __i1, bool __i2, 00138 memory_order __m = memory_order_seq_cst) noexcept 00139 { return _M_base.compare_exchange_weak(__i1, __i2, __m); } 00140 00141 bool 00142 compare_exchange_weak(bool& __i1, bool __i2, 00143 memory_order __m = memory_order_seq_cst) volatile noexcept 00144 { return _M_base.compare_exchange_weak(__i1, __i2, __m); } 00145 00146 bool 00147 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1, 00148 memory_order __m2) noexcept 00149 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); } 00150 00151 bool 00152 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1, 00153 memory_order __m2) volatile noexcept 00154 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); } 00155 00156 bool 00157 compare_exchange_strong(bool& __i1, bool __i2, 00158 memory_order __m = memory_order_seq_cst) noexcept 00159 { return _M_base.compare_exchange_strong(__i1, __i2, __m); } 00160 00161 bool 00162 compare_exchange_strong(bool& __i1, bool __i2, 00163 memory_order __m = memory_order_seq_cst) volatile noexcept 00164 { return _M_base.compare_exchange_strong(__i1, __i2, __m); } 00165 }; 00166 00167 00168 /** 00169 * @brief Generic atomic type, primary class template. 00170 * 00171 * @tparam _Tp Type to be made atomic, must be trivally copyable. 00172 */ 00173 template<typename _Tp> 00174 struct atomic 00175 { 00176 private: 00177 // Align 1/2/4/8/16-byte types to at least their size. 00178 static constexpr int _S_min_alignment 00179 = (sizeof(_Tp) & (sizeof(_Tp) - 1)) || sizeof(_Tp) > 16 00180 ? 0 : sizeof(_Tp); 00181 00182 static constexpr int _S_alignment 00183 = _S_min_alignment > alignof(_Tp) ? _S_min_alignment : alignof(_Tp); 00184 00185 alignas(_S_alignment) _Tp _M_i; 00186 00187 static_assert(__is_trivially_copyable(_Tp), 00188 "std::atomic requires a trivially copyable type"); 00189 00190 static_assert(sizeof(_Tp) > 0, 00191 "Incomplete or zero-sized types are not supported"); 00192 00193 public: 00194 atomic() noexcept = default; 00195 ~atomic() noexcept = default; 00196 atomic(const atomic&) = delete; 00197 atomic& operator=(const atomic&) = delete; 00198 atomic& operator=(const atomic&) volatile = delete; 00199 00200 constexpr atomic(_Tp __i) noexcept : _M_i(__i) { } 00201 00202 operator _Tp() const noexcept 00203 { return load(); } 00204 00205 operator _Tp() const volatile noexcept 00206 { return load(); } 00207 00208 _Tp 00209 operator=(_Tp __i) noexcept 00210 { store(__i); return __i; } 00211 00212 _Tp 00213 operator=(_Tp __i) volatile noexcept 00214 { store(__i); return __i; } 00215 00216 bool 00217 is_lock_free() const noexcept 00218 { 00219 // Produce a fake, minimally aligned pointer. 00220 return __atomic_is_lock_free(sizeof(_M_i), 00221 reinterpret_cast<void *>(-__alignof(_M_i))); 00222 } 00223 00224 bool 00225 is_lock_free() const volatile noexcept 00226 { 00227 // Produce a fake, minimally aligned pointer. 00228 return __atomic_is_lock_free(sizeof(_M_i), 00229 reinterpret_cast<void *>(-__alignof(_M_i))); 00230 } 00231 00232 #if __cplusplus > 201402L 00233 static constexpr bool is_always_lock_free 00234 = __atomic_always_lock_free(sizeof(_M_i), 0); 00235 #endif 00236 00237 void 00238 store(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept 00239 { __atomic_store(std::__addressof(_M_i), std::__addressof(__i), __m); } 00240 00241 void 00242 store(_Tp __i, memory_order __m = memory_order_seq_cst) volatile noexcept 00243 { __atomic_store(std::__addressof(_M_i), std::__addressof(__i), __m); } 00244 00245 _Tp 00246 load(memory_order __m = memory_order_seq_cst) const noexcept 00247 { 00248 alignas(_Tp) unsigned char __buf[sizeof(_Tp)]; 00249 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf); 00250 __atomic_load(std::__addressof(_M_i), __ptr, __m); 00251 return *__ptr; 00252 } 00253 00254 _Tp 00255 load(memory_order __m = memory_order_seq_cst) const volatile noexcept 00256 { 00257 alignas(_Tp) unsigned char __buf[sizeof(_Tp)]; 00258 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf); 00259 __atomic_load(std::__addressof(_M_i), __ptr, __m); 00260 return *__ptr; 00261 } 00262 00263 _Tp 00264 exchange(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept 00265 { 00266 alignas(_Tp) unsigned char __buf[sizeof(_Tp)]; 00267 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf); 00268 __atomic_exchange(std::__addressof(_M_i), std::__addressof(__i), 00269 __ptr, __m); 00270 return *__ptr; 00271 } 00272 00273 _Tp 00274 exchange(_Tp __i, 00275 memory_order __m = memory_order_seq_cst) volatile noexcept 00276 { 00277 alignas(_Tp) unsigned char __buf[sizeof(_Tp)]; 00278 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf); 00279 __atomic_exchange(std::__addressof(_M_i), std::__addressof(__i), 00280 __ptr, __m); 00281 return *__ptr; 00282 } 00283 00284 bool 00285 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s, 00286 memory_order __f) noexcept 00287 { 00288 return __atomic_compare_exchange(std::__addressof(_M_i), 00289 std::__addressof(__e), 00290 std::__addressof(__i), 00291 true, __s, __f); 00292 } 00293 00294 bool 00295 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s, 00296 memory_order __f) volatile noexcept 00297 { 00298 return __atomic_compare_exchange(std::__addressof(_M_i), 00299 std::__addressof(__e), 00300 std::__addressof(__i), 00301 true, __s, __f); 00302 } 00303 00304 bool 00305 compare_exchange_weak(_Tp& __e, _Tp __i, 00306 memory_order __m = memory_order_seq_cst) noexcept 00307 { return compare_exchange_weak(__e, __i, __m, 00308 __cmpexch_failure_order(__m)); } 00309 00310 bool 00311 compare_exchange_weak(_Tp& __e, _Tp __i, 00312 memory_order __m = memory_order_seq_cst) volatile noexcept 00313 { return compare_exchange_weak(__e, __i, __m, 00314 __cmpexch_failure_order(__m)); } 00315 00316 bool 00317 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s, 00318 memory_order __f) noexcept 00319 { 00320 return __atomic_compare_exchange(std::__addressof(_M_i), 00321 std::__addressof(__e), 00322 std::__addressof(__i), 00323 false, __s, __f); 00324 } 00325 00326 bool 00327 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s, 00328 memory_order __f) volatile noexcept 00329 { 00330 return __atomic_compare_exchange(std::__addressof(_M_i), 00331 std::__addressof(__e), 00332 std::__addressof(__i), 00333 false, __s, __f); 00334 } 00335 00336 bool 00337 compare_exchange_strong(_Tp& __e, _Tp __i, 00338 memory_order __m = memory_order_seq_cst) noexcept 00339 { return compare_exchange_strong(__e, __i, __m, 00340 __cmpexch_failure_order(__m)); } 00341 00342 bool 00343 compare_exchange_strong(_Tp& __e, _Tp __i, 00344 memory_order __m = memory_order_seq_cst) volatile noexcept 00345 { return compare_exchange_strong(__e, __i, __m, 00346 __cmpexch_failure_order(__m)); } 00347 }; 00348 00349 00350 /// Partial specialization for pointer types. 00351 template<typename _Tp> 00352 struct atomic<_Tp*> 00353 { 00354 typedef _Tp* __pointer_type; 00355 typedef __atomic_base<_Tp*> __base_type; 00356 __base_type _M_b; 00357 00358 atomic() noexcept = default; 00359 ~atomic() noexcept = default; 00360 atomic(const atomic&) = delete; 00361 atomic& operator=(const atomic&) = delete; 00362 atomic& operator=(const atomic&) volatile = delete; 00363 00364 constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { } 00365 00366 operator __pointer_type() const noexcept 00367 { return __pointer_type(_M_b); } 00368 00369 operator __pointer_type() const volatile noexcept 00370 { return __pointer_type(_M_b); } 00371 00372 __pointer_type 00373 operator=(__pointer_type __p) noexcept 00374 { return _M_b.operator=(__p); } 00375 00376 __pointer_type 00377 operator=(__pointer_type __p) volatile noexcept 00378 { return _M_b.operator=(__p); } 00379 00380 __pointer_type 00381 operator++(int) noexcept 00382 { return _M_b++; } 00383 00384 __pointer_type 00385 operator++(int) volatile noexcept 00386 { return _M_b++; } 00387 00388 __pointer_type 00389 operator--(int) noexcept 00390 { return _M_b--; } 00391 00392 __pointer_type 00393 operator--(int) volatile noexcept 00394 { return _M_b--; } 00395 00396 __pointer_type 00397 operator++() noexcept 00398 { return ++_M_b; } 00399 00400 __pointer_type 00401 operator++() volatile noexcept 00402 { return ++_M_b; } 00403 00404 __pointer_type 00405 operator--() noexcept 00406 { return --_M_b; } 00407 00408 __pointer_type 00409 operator--() volatile noexcept 00410 { return --_M_b; } 00411 00412 __pointer_type 00413 operator+=(ptrdiff_t __d) noexcept 00414 { return _M_b.operator+=(__d); } 00415 00416 __pointer_type 00417 operator+=(ptrdiff_t __d) volatile noexcept 00418 { return _M_b.operator+=(__d); } 00419 00420 __pointer_type 00421 operator-=(ptrdiff_t __d) noexcept 00422 { return _M_b.operator-=(__d); } 00423 00424 __pointer_type 00425 operator-=(ptrdiff_t __d) volatile noexcept 00426 { return _M_b.operator-=(__d); } 00427 00428 bool 00429 is_lock_free() const noexcept 00430 { return _M_b.is_lock_free(); } 00431 00432 bool 00433 is_lock_free() const volatile noexcept 00434 { return _M_b.is_lock_free(); } 00435 00436 #if __cplusplus > 201402L 00437 static constexpr bool is_always_lock_free = ATOMIC_POINTER_LOCK_FREE == 2; 00438 #endif 00439 00440 void 00441 store(__pointer_type __p, 00442 memory_order __m = memory_order_seq_cst) noexcept 00443 { return _M_b.store(__p, __m); } 00444 00445 void 00446 store(__pointer_type __p, 00447 memory_order __m = memory_order_seq_cst) volatile noexcept 00448 { return _M_b.store(__p, __m); } 00449 00450 __pointer_type 00451 load(memory_order __m = memory_order_seq_cst) const noexcept 00452 { return _M_b.load(__m); } 00453 00454 __pointer_type 00455 load(memory_order __m = memory_order_seq_cst) const volatile noexcept 00456 { return _M_b.load(__m); } 00457 00458 __pointer_type 00459 exchange(__pointer_type __p, 00460 memory_order __m = memory_order_seq_cst) noexcept 00461 { return _M_b.exchange(__p, __m); } 00462 00463 __pointer_type 00464 exchange(__pointer_type __p, 00465 memory_order __m = memory_order_seq_cst) volatile noexcept 00466 { return _M_b.exchange(__p, __m); } 00467 00468 bool 00469 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2, 00470 memory_order __m1, memory_order __m2) noexcept 00471 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); } 00472 00473 bool 00474 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2, 00475 memory_order __m1, 00476 memory_order __m2) volatile noexcept 00477 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); } 00478 00479 bool 00480 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2, 00481 memory_order __m = memory_order_seq_cst) noexcept 00482 { 00483 return compare_exchange_weak(__p1, __p2, __m, 00484 __cmpexch_failure_order(__m)); 00485 } 00486 00487 bool 00488 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2, 00489 memory_order __m = memory_order_seq_cst) volatile noexcept 00490 { 00491 return compare_exchange_weak(__p1, __p2, __m, 00492 __cmpexch_failure_order(__m)); 00493 } 00494 00495 bool 00496 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, 00497 memory_order __m1, memory_order __m2) noexcept 00498 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); } 00499 00500 bool 00501 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, 00502 memory_order __m1, 00503 memory_order __m2) volatile noexcept 00504 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); } 00505 00506 bool 00507 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, 00508 memory_order __m = memory_order_seq_cst) noexcept 00509 { 00510 return _M_b.compare_exchange_strong(__p1, __p2, __m, 00511 __cmpexch_failure_order(__m)); 00512 } 00513 00514 bool 00515 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, 00516 memory_order __m = memory_order_seq_cst) volatile noexcept 00517 { 00518 return _M_b.compare_exchange_strong(__p1, __p2, __m, 00519 __cmpexch_failure_order(__m)); 00520 } 00521 00522 __pointer_type 00523 fetch_add(ptrdiff_t __d, 00524 memory_order __m = memory_order_seq_cst) noexcept 00525 { return _M_b.fetch_add(__d, __m); } 00526 00527 __pointer_type 00528 fetch_add(ptrdiff_t __d, 00529 memory_order __m = memory_order_seq_cst) volatile noexcept 00530 { return _M_b.fetch_add(__d, __m); } 00531 00532 __pointer_type 00533 fetch_sub(ptrdiff_t __d, 00534 memory_order __m = memory_order_seq_cst) noexcept 00535 { return _M_b.fetch_sub(__d, __m); } 00536 00537 __pointer_type 00538 fetch_sub(ptrdiff_t __d, 00539 memory_order __m = memory_order_seq_cst) volatile noexcept 00540 { return _M_b.fetch_sub(__d, __m); } 00541 }; 00542 00543 00544 /// Explicit specialization for char. 00545 template<> 00546 struct atomic<char> : __atomic_base<char> 00547 { 00548 typedef char __integral_type; 00549 typedef __atomic_base<char> __base_type; 00550 00551 atomic() noexcept = default; 00552 ~atomic() noexcept = default; 00553 atomic(const atomic&) = delete; 00554 atomic& operator=(const atomic&) = delete; 00555 atomic& operator=(const atomic&) volatile = delete; 00556 00557 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 00558 00559 using __base_type::operator __integral_type; 00560 using __base_type::operator=; 00561 00562 #if __cplusplus > 201402L 00563 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2; 00564 #endif 00565 }; 00566 00567 /// Explicit specialization for signed char. 00568 template<> 00569 struct atomic<signed char> : __atomic_base<signed char> 00570 { 00571 typedef signed char __integral_type; 00572 typedef __atomic_base<signed char> __base_type; 00573 00574 atomic() noexcept= default; 00575 ~atomic() noexcept = default; 00576 atomic(const atomic&) = delete; 00577 atomic& operator=(const atomic&) = delete; 00578 atomic& operator=(const atomic&) volatile = delete; 00579 00580 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 00581 00582 using __base_type::operator __integral_type; 00583 using __base_type::operator=; 00584 00585 #if __cplusplus > 201402L 00586 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2; 00587 #endif 00588 }; 00589 00590 /// Explicit specialization for unsigned char. 00591 template<> 00592 struct atomic<unsigned char> : __atomic_base<unsigned char> 00593 { 00594 typedef unsigned char __integral_type; 00595 typedef __atomic_base<unsigned char> __base_type; 00596 00597 atomic() noexcept= default; 00598 ~atomic() noexcept = default; 00599 atomic(const atomic&) = delete; 00600 atomic& operator=(const atomic&) = delete; 00601 atomic& operator=(const atomic&) volatile = delete; 00602 00603 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 00604 00605 using __base_type::operator __integral_type; 00606 using __base_type::operator=; 00607 00608 #if __cplusplus > 201402L 00609 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2; 00610 #endif 00611 }; 00612 00613 /// Explicit specialization for short. 00614 template<> 00615 struct atomic<short> : __atomic_base<short> 00616 { 00617 typedef short __integral_type; 00618 typedef __atomic_base<short> __base_type; 00619 00620 atomic() noexcept = default; 00621 ~atomic() noexcept = default; 00622 atomic(const atomic&) = delete; 00623 atomic& operator=(const atomic&) = delete; 00624 atomic& operator=(const atomic&) volatile = delete; 00625 00626 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 00627 00628 using __base_type::operator __integral_type; 00629 using __base_type::operator=; 00630 00631 #if __cplusplus > 201402L 00632 static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2; 00633 #endif 00634 }; 00635 00636 /// Explicit specialization for unsigned short. 00637 template<> 00638 struct atomic<unsigned short> : __atomic_base<unsigned short> 00639 { 00640 typedef unsigned short __integral_type; 00641 typedef __atomic_base<unsigned short> __base_type; 00642 00643 atomic() noexcept = default; 00644 ~atomic() noexcept = default; 00645 atomic(const atomic&) = delete; 00646 atomic& operator=(const atomic&) = delete; 00647 atomic& operator=(const atomic&) volatile = delete; 00648 00649 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 00650 00651 using __base_type::operator __integral_type; 00652 using __base_type::operator=; 00653 00654 #if __cplusplus > 201402L 00655 static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2; 00656 #endif 00657 }; 00658 00659 /// Explicit specialization for int. 00660 template<> 00661 struct atomic<int> : __atomic_base<int> 00662 { 00663 typedef int __integral_type; 00664 typedef __atomic_base<int> __base_type; 00665 00666 atomic() noexcept = default; 00667 ~atomic() noexcept = default; 00668 atomic(const atomic&) = delete; 00669 atomic& operator=(const atomic&) = delete; 00670 atomic& operator=(const atomic&) volatile = delete; 00671 00672 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 00673 00674 using __base_type::operator __integral_type; 00675 using __base_type::operator=; 00676 00677 #if __cplusplus > 201402L 00678 static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2; 00679 #endif 00680 }; 00681 00682 /// Explicit specialization for unsigned int. 00683 template<> 00684 struct atomic<unsigned int> : __atomic_base<unsigned int> 00685 { 00686 typedef unsigned int __integral_type; 00687 typedef __atomic_base<unsigned int> __base_type; 00688 00689 atomic() noexcept = default; 00690 ~atomic() noexcept = default; 00691 atomic(const atomic&) = delete; 00692 atomic& operator=(const atomic&) = delete; 00693 atomic& operator=(const atomic&) volatile = delete; 00694 00695 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 00696 00697 using __base_type::operator __integral_type; 00698 using __base_type::operator=; 00699 00700 #if __cplusplus > 201402L 00701 static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2; 00702 #endif 00703 }; 00704 00705 /// Explicit specialization for long. 00706 template<> 00707 struct atomic<long> : __atomic_base<long> 00708 { 00709 typedef long __integral_type; 00710 typedef __atomic_base<long> __base_type; 00711 00712 atomic() noexcept = default; 00713 ~atomic() noexcept = default; 00714 atomic(const atomic&) = delete; 00715 atomic& operator=(const atomic&) = delete; 00716 atomic& operator=(const atomic&) volatile = delete; 00717 00718 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 00719 00720 using __base_type::operator __integral_type; 00721 using __base_type::operator=; 00722 00723 #if __cplusplus > 201402L 00724 static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2; 00725 #endif 00726 }; 00727 00728 /// Explicit specialization for unsigned long. 00729 template<> 00730 struct atomic<unsigned long> : __atomic_base<unsigned long> 00731 { 00732 typedef unsigned long __integral_type; 00733 typedef __atomic_base<unsigned long> __base_type; 00734 00735 atomic() noexcept = default; 00736 ~atomic() noexcept = default; 00737 atomic(const atomic&) = delete; 00738 atomic& operator=(const atomic&) = delete; 00739 atomic& operator=(const atomic&) volatile = delete; 00740 00741 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 00742 00743 using __base_type::operator __integral_type; 00744 using __base_type::operator=; 00745 00746 #if __cplusplus > 201402L 00747 static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2; 00748 #endif 00749 }; 00750 00751 /// Explicit specialization for long long. 00752 template<> 00753 struct atomic<long long> : __atomic_base<long long> 00754 { 00755 typedef long long __integral_type; 00756 typedef __atomic_base<long long> __base_type; 00757 00758 atomic() noexcept = default; 00759 ~atomic() noexcept = default; 00760 atomic(const atomic&) = delete; 00761 atomic& operator=(const atomic&) = delete; 00762 atomic& operator=(const atomic&) volatile = delete; 00763 00764 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 00765 00766 using __base_type::operator __integral_type; 00767 using __base_type::operator=; 00768 00769 #if __cplusplus > 201402L 00770 static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2; 00771 #endif 00772 }; 00773 00774 /// Explicit specialization for unsigned long long. 00775 template<> 00776 struct atomic<unsigned long long> : __atomic_base<unsigned long long> 00777 { 00778 typedef unsigned long long __integral_type; 00779 typedef __atomic_base<unsigned long long> __base_type; 00780 00781 atomic() noexcept = default; 00782 ~atomic() noexcept = default; 00783 atomic(const atomic&) = delete; 00784 atomic& operator=(const atomic&) = delete; 00785 atomic& operator=(const atomic&) volatile = delete; 00786 00787 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 00788 00789 using __base_type::operator __integral_type; 00790 using __base_type::operator=; 00791 00792 #if __cplusplus > 201402L 00793 static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2; 00794 #endif 00795 }; 00796 00797 /// Explicit specialization for wchar_t. 00798 template<> 00799 struct atomic<wchar_t> : __atomic_base<wchar_t> 00800 { 00801 typedef wchar_t __integral_type; 00802 typedef __atomic_base<wchar_t> __base_type; 00803 00804 atomic() noexcept = default; 00805 ~atomic() noexcept = default; 00806 atomic(const atomic&) = delete; 00807 atomic& operator=(const atomic&) = delete; 00808 atomic& operator=(const atomic&) volatile = delete; 00809 00810 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 00811 00812 using __base_type::operator __integral_type; 00813 using __base_type::operator=; 00814 00815 #if __cplusplus > 201402L 00816 static constexpr bool is_always_lock_free = ATOMIC_WCHAR_T_LOCK_FREE == 2; 00817 #endif 00818 }; 00819 00820 /// Explicit specialization for char16_t. 00821 template<> 00822 struct atomic<char16_t> : __atomic_base<char16_t> 00823 { 00824 typedef char16_t __integral_type; 00825 typedef __atomic_base<char16_t> __base_type; 00826 00827 atomic() noexcept = default; 00828 ~atomic() noexcept = default; 00829 atomic(const atomic&) = delete; 00830 atomic& operator=(const atomic&) = delete; 00831 atomic& operator=(const atomic&) volatile = delete; 00832 00833 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 00834 00835 using __base_type::operator __integral_type; 00836 using __base_type::operator=; 00837 00838 #if __cplusplus > 201402L 00839 static constexpr bool is_always_lock_free = ATOMIC_CHAR16_T_LOCK_FREE == 2; 00840 #endif 00841 }; 00842 00843 /// Explicit specialization for char32_t. 00844 template<> 00845 struct atomic<char32_t> : __atomic_base<char32_t> 00846 { 00847 typedef char32_t __integral_type; 00848 typedef __atomic_base<char32_t> __base_type; 00849 00850 atomic() noexcept = default; 00851 ~atomic() noexcept = default; 00852 atomic(const atomic&) = delete; 00853 atomic& operator=(const atomic&) = delete; 00854 atomic& operator=(const atomic&) volatile = delete; 00855 00856 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 00857 00858 using __base_type::operator __integral_type; 00859 using __base_type::operator=; 00860 00861 #if __cplusplus > 201402L 00862 static constexpr bool is_always_lock_free = ATOMIC_CHAR32_T_LOCK_FREE == 2; 00863 #endif 00864 }; 00865 00866 00867 /// atomic_bool 00868 typedef atomic<bool> atomic_bool; 00869 00870 /// atomic_char 00871 typedef atomic<char> atomic_char; 00872 00873 /// atomic_schar 00874 typedef atomic<signed char> atomic_schar; 00875 00876 /// atomic_uchar 00877 typedef atomic<unsigned char> atomic_uchar; 00878 00879 /// atomic_short 00880 typedef atomic<short> atomic_short; 00881 00882 /// atomic_ushort 00883 typedef atomic<unsigned short> atomic_ushort; 00884 00885 /// atomic_int 00886 typedef atomic<int> atomic_int; 00887 00888 /// atomic_uint 00889 typedef atomic<unsigned int> atomic_uint; 00890 00891 /// atomic_long 00892 typedef atomic<long> atomic_long; 00893 00894 /// atomic_ulong 00895 typedef atomic<unsigned long> atomic_ulong; 00896 00897 /// atomic_llong 00898 typedef atomic<long long> atomic_llong; 00899 00900 /// atomic_ullong 00901 typedef atomic<unsigned long long> atomic_ullong; 00902 00903 /// atomic_wchar_t 00904 typedef atomic<wchar_t> atomic_wchar_t; 00905 00906 /// atomic_char16_t 00907 typedef atomic<char16_t> atomic_char16_t; 00908 00909 /// atomic_char32_t 00910 typedef atomic<char32_t> atomic_char32_t; 00911 00912 00913 // _GLIBCXX_RESOLVE_LIB_DEFECTS 00914 // 2441. Exact-width atomic typedefs should be provided 00915 00916 /// atomic_int8_t 00917 typedef atomic<int8_t> atomic_int8_t; 00918 00919 /// atomic_uint8_t 00920 typedef atomic<uint8_t> atomic_uint8_t; 00921 00922 /// atomic_int16_t 00923 typedef atomic<int16_t> atomic_int16_t; 00924 00925 /// atomic_uint16_t 00926 typedef atomic<uint16_t> atomic_uint16_t; 00927 00928 /// atomic_int32_t 00929 typedef atomic<int32_t> atomic_int32_t; 00930 00931 /// atomic_uint32_t 00932 typedef atomic<uint32_t> atomic_uint32_t; 00933 00934 /// atomic_int64_t 00935 typedef atomic<int64_t> atomic_int64_t; 00936 00937 /// atomic_uint64_t 00938 typedef atomic<uint64_t> atomic_uint64_t; 00939 00940 00941 /// atomic_int_least8_t 00942 typedef atomic<int_least8_t> atomic_int_least8_t; 00943 00944 /// atomic_uint_least8_t 00945 typedef atomic<uint_least8_t> atomic_uint_least8_t; 00946 00947 /// atomic_int_least16_t 00948 typedef atomic<int_least16_t> atomic_int_least16_t; 00949 00950 /// atomic_uint_least16_t 00951 typedef atomic<uint_least16_t> atomic_uint_least16_t; 00952 00953 /// atomic_int_least32_t 00954 typedef atomic<int_least32_t> atomic_int_least32_t; 00955 00956 /// atomic_uint_least32_t 00957 typedef atomic<uint_least32_t> atomic_uint_least32_t; 00958 00959 /// atomic_int_least64_t 00960 typedef atomic<int_least64_t> atomic_int_least64_t; 00961 00962 /// atomic_uint_least64_t 00963 typedef atomic<uint_least64_t> atomic_uint_least64_t; 00964 00965 00966 /// atomic_int_fast8_t 00967 typedef atomic<int_fast8_t> atomic_int_fast8_t; 00968 00969 /// atomic_uint_fast8_t 00970 typedef atomic<uint_fast8_t> atomic_uint_fast8_t; 00971 00972 /// atomic_int_fast16_t 00973 typedef atomic<int_fast16_t> atomic_int_fast16_t; 00974 00975 /// atomic_uint_fast16_t 00976 typedef atomic<uint_fast16_t> atomic_uint_fast16_t; 00977 00978 /// atomic_int_fast32_t 00979 typedef atomic<int_fast32_t> atomic_int_fast32_t; 00980 00981 /// atomic_uint_fast32_t 00982 typedef atomic<uint_fast32_t> atomic_uint_fast32_t; 00983 00984 /// atomic_int_fast64_t 00985 typedef atomic<int_fast64_t> atomic_int_fast64_t; 00986 00987 /// atomic_uint_fast64_t 00988 typedef atomic<uint_fast64_t> atomic_uint_fast64_t; 00989 00990 00991 /// atomic_intptr_t 00992 typedef atomic<intptr_t> atomic_intptr_t; 00993 00994 /// atomic_uintptr_t 00995 typedef atomic<uintptr_t> atomic_uintptr_t; 00996 00997 /// atomic_size_t 00998 typedef atomic<size_t> atomic_size_t; 00999 01000 /// atomic_intmax_t 01001 typedef atomic<intmax_t> atomic_intmax_t; 01002 01003 /// atomic_uintmax_t 01004 typedef atomic<uintmax_t> atomic_uintmax_t; 01005 01006 /// atomic_ptrdiff_t 01007 typedef atomic<ptrdiff_t> atomic_ptrdiff_t; 01008 01009 01010 // Function definitions, atomic_flag operations. 01011 inline bool 01012 atomic_flag_test_and_set_explicit(atomic_flag* __a, 01013 memory_order __m) noexcept 01014 { return __a->test_and_set(__m); } 01015 01016 inline bool 01017 atomic_flag_test_and_set_explicit(volatile atomic_flag* __a, 01018 memory_order __m) noexcept 01019 { return __a->test_and_set(__m); } 01020 01021 inline void 01022 atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept 01023 { __a->clear(__m); } 01024 01025 inline void 01026 atomic_flag_clear_explicit(volatile atomic_flag* __a, 01027 memory_order __m) noexcept 01028 { __a->clear(__m); } 01029 01030 inline bool 01031 atomic_flag_test_and_set(atomic_flag* __a) noexcept 01032 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); } 01033 01034 inline bool 01035 atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept 01036 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); } 01037 01038 inline void 01039 atomic_flag_clear(atomic_flag* __a) noexcept 01040 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); } 01041 01042 inline void 01043 atomic_flag_clear(volatile atomic_flag* __a) noexcept 01044 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); } 01045 01046 01047 // Function templates generally applicable to atomic types. 01048 template<typename _ITp> 01049 inline bool 01050 atomic_is_lock_free(const atomic<_ITp>* __a) noexcept 01051 { return __a->is_lock_free(); } 01052 01053 template<typename _ITp> 01054 inline bool 01055 atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept 01056 { return __a->is_lock_free(); } 01057 01058 template<typename _ITp> 01059 inline void 01060 atomic_init(atomic<_ITp>* __a, _ITp __i) noexcept 01061 { __a->store(__i, memory_order_relaxed); } 01062 01063 template<typename _ITp> 01064 inline void 01065 atomic_init(volatile atomic<_ITp>* __a, _ITp __i) noexcept 01066 { __a->store(__i, memory_order_relaxed); } 01067 01068 template<typename _ITp> 01069 inline void 01070 atomic_store_explicit(atomic<_ITp>* __a, _ITp __i, 01071 memory_order __m) noexcept 01072 { __a->store(__i, __m); } 01073 01074 template<typename _ITp> 01075 inline void 01076 atomic_store_explicit(volatile atomic<_ITp>* __a, _ITp __i, 01077 memory_order __m) noexcept 01078 { __a->store(__i, __m); } 01079 01080 template<typename _ITp> 01081 inline _ITp 01082 atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept 01083 { return __a->load(__m); } 01084 01085 template<typename _ITp> 01086 inline _ITp 01087 atomic_load_explicit(const volatile atomic<_ITp>* __a, 01088 memory_order __m) noexcept 01089 { return __a->load(__m); } 01090 01091 template<typename _ITp> 01092 inline _ITp 01093 atomic_exchange_explicit(atomic<_ITp>* __a, _ITp __i, 01094 memory_order __m) noexcept 01095 { return __a->exchange(__i, __m); } 01096 01097 template<typename _ITp> 01098 inline _ITp 01099 atomic_exchange_explicit(volatile atomic<_ITp>* __a, _ITp __i, 01100 memory_order __m) noexcept 01101 { return __a->exchange(__i, __m); } 01102 01103 template<typename _ITp> 01104 inline bool 01105 atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a, 01106 _ITp* __i1, _ITp __i2, 01107 memory_order __m1, 01108 memory_order __m2) noexcept 01109 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); } 01110 01111 template<typename _ITp> 01112 inline bool 01113 atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a, 01114 _ITp* __i1, _ITp __i2, 01115 memory_order __m1, 01116 memory_order __m2) noexcept 01117 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); } 01118 01119 template<typename _ITp> 01120 inline bool 01121 atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a, 01122 _ITp* __i1, _ITp __i2, 01123 memory_order __m1, 01124 memory_order __m2) noexcept 01125 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); } 01126 01127 template<typename _ITp> 01128 inline bool 01129 atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a, 01130 _ITp* __i1, _ITp __i2, 01131 memory_order __m1, 01132 memory_order __m2) noexcept 01133 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); } 01134 01135 01136 template<typename _ITp> 01137 inline void 01138 atomic_store(atomic<_ITp>* __a, _ITp __i) noexcept 01139 { atomic_store_explicit(__a, __i, memory_order_seq_cst); } 01140 01141 template<typename _ITp> 01142 inline void 01143 atomic_store(volatile atomic<_ITp>* __a, _ITp __i) noexcept 01144 { atomic_store_explicit(__a, __i, memory_order_seq_cst); } 01145 01146 template<typename _ITp> 01147 inline _ITp 01148 atomic_load(const atomic<_ITp>* __a) noexcept 01149 { return atomic_load_explicit(__a, memory_order_seq_cst); } 01150 01151 template<typename _ITp> 01152 inline _ITp 01153 atomic_load(const volatile atomic<_ITp>* __a) noexcept 01154 { return atomic_load_explicit(__a, memory_order_seq_cst); } 01155 01156 template<typename _ITp> 01157 inline _ITp 01158 atomic_exchange(atomic<_ITp>* __a, _ITp __i) noexcept 01159 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); } 01160 01161 template<typename _ITp> 01162 inline _ITp 01163 atomic_exchange(volatile atomic<_ITp>* __a, _ITp __i) noexcept 01164 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); } 01165 01166 template<typename _ITp> 01167 inline bool 01168 atomic_compare_exchange_weak(atomic<_ITp>* __a, 01169 _ITp* __i1, _ITp __i2) noexcept 01170 { 01171 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2, 01172 memory_order_seq_cst, 01173 memory_order_seq_cst); 01174 } 01175 01176 template<typename _ITp> 01177 inline bool 01178 atomic_compare_exchange_weak(volatile atomic<_ITp>* __a, 01179 _ITp* __i1, _ITp __i2) noexcept 01180 { 01181 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2, 01182 memory_order_seq_cst, 01183 memory_order_seq_cst); 01184 } 01185 01186 template<typename _ITp> 01187 inline bool 01188 atomic_compare_exchange_strong(atomic<_ITp>* __a, 01189 _ITp* __i1, _ITp __i2) noexcept 01190 { 01191 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2, 01192 memory_order_seq_cst, 01193 memory_order_seq_cst); 01194 } 01195 01196 template<typename _ITp> 01197 inline bool 01198 atomic_compare_exchange_strong(volatile atomic<_ITp>* __a, 01199 _ITp* __i1, _ITp __i2) noexcept 01200 { 01201 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2, 01202 memory_order_seq_cst, 01203 memory_order_seq_cst); 01204 } 01205 01206 // Function templates for atomic_integral operations only, using 01207 // __atomic_base. Template argument should be constricted to 01208 // intergral types as specified in the standard, excluding address 01209 // types. 01210 template<typename _ITp> 01211 inline _ITp 01212 atomic_fetch_add_explicit(__atomic_base<_ITp>* __a, _ITp __i, 01213 memory_order __m) noexcept 01214 { return __a->fetch_add(__i, __m); } 01215 01216 template<typename _ITp> 01217 inline _ITp 01218 atomic_fetch_add_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, 01219 memory_order __m) noexcept 01220 { return __a->fetch_add(__i, __m); } 01221 01222 template<typename _ITp> 01223 inline _ITp 01224 atomic_fetch_sub_explicit(__atomic_base<_ITp>* __a, _ITp __i, 01225 memory_order __m) noexcept 01226 { return __a->fetch_sub(__i, __m); } 01227 01228 template<typename _ITp> 01229 inline _ITp 01230 atomic_fetch_sub_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, 01231 memory_order __m) noexcept 01232 { return __a->fetch_sub(__i, __m); } 01233 01234 template<typename _ITp> 01235 inline _ITp 01236 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a, _ITp __i, 01237 memory_order __m) noexcept 01238 { return __a->fetch_and(__i, __m); } 01239 01240 template<typename _ITp> 01241 inline _ITp 01242 atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, 01243 memory_order __m) noexcept 01244 { return __a->fetch_and(__i, __m); } 01245 01246 template<typename _ITp> 01247 inline _ITp 01248 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a, _ITp __i, 01249 memory_order __m) noexcept 01250 { return __a->fetch_or(__i, __m); } 01251 01252 template<typename _ITp> 01253 inline _ITp 01254 atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, 01255 memory_order __m) noexcept 01256 { return __a->fetch_or(__i, __m); } 01257 01258 template<typename _ITp> 01259 inline _ITp 01260 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a, _ITp __i, 01261 memory_order __m) noexcept 01262 { return __a->fetch_xor(__i, __m); } 01263 01264 template<typename _ITp> 01265 inline _ITp 01266 atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, 01267 memory_order __m) noexcept 01268 { return __a->fetch_xor(__i, __m); } 01269 01270 template<typename _ITp> 01271 inline _ITp 01272 atomic_fetch_add(__atomic_base<_ITp>* __a, _ITp __i) noexcept 01273 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); } 01274 01275 template<typename _ITp> 01276 inline _ITp 01277 atomic_fetch_add(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept 01278 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); } 01279 01280 template<typename _ITp> 01281 inline _ITp 01282 atomic_fetch_sub(__atomic_base<_ITp>* __a, _ITp __i) noexcept 01283 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); } 01284 01285 template<typename _ITp> 01286 inline _ITp 01287 atomic_fetch_sub(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept 01288 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); } 01289 01290 template<typename _ITp> 01291 inline _ITp 01292 atomic_fetch_and(__atomic_base<_ITp>* __a, _ITp __i) noexcept 01293 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); } 01294 01295 template<typename _ITp> 01296 inline _ITp 01297 atomic_fetch_and(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept 01298 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); } 01299 01300 template<typename _ITp> 01301 inline _ITp 01302 atomic_fetch_or(__atomic_base<_ITp>* __a, _ITp __i) noexcept 01303 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); } 01304 01305 template<typename _ITp> 01306 inline _ITp 01307 atomic_fetch_or(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept 01308 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); } 01309 01310 template<typename _ITp> 01311 inline _ITp 01312 atomic_fetch_xor(__atomic_base<_ITp>* __a, _ITp __i) noexcept 01313 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); } 01314 01315 template<typename _ITp> 01316 inline _ITp 01317 atomic_fetch_xor(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept 01318 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); } 01319 01320 01321 // Partial specializations for pointers. 01322 template<typename _ITp> 01323 inline _ITp* 01324 atomic_fetch_add_explicit(atomic<_ITp*>* __a, ptrdiff_t __d, 01325 memory_order __m) noexcept 01326 { return __a->fetch_add(__d, __m); } 01327 01328 template<typename _ITp> 01329 inline _ITp* 01330 atomic_fetch_add_explicit(volatile atomic<_ITp*>* __a, ptrdiff_t __d, 01331 memory_order __m) noexcept 01332 { return __a->fetch_add(__d, __m); } 01333 01334 template<typename _ITp> 01335 inline _ITp* 01336 atomic_fetch_add(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept 01337 { return __a->fetch_add(__d); } 01338 01339 template<typename _ITp> 01340 inline _ITp* 01341 atomic_fetch_add(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept 01342 { return __a->fetch_add(__d); } 01343 01344 template<typename _ITp> 01345 inline _ITp* 01346 atomic_fetch_sub_explicit(volatile atomic<_ITp*>* __a, 01347 ptrdiff_t __d, memory_order __m) noexcept 01348 { return __a->fetch_sub(__d, __m); } 01349 01350 template<typename _ITp> 01351 inline _ITp* 01352 atomic_fetch_sub_explicit(atomic<_ITp*>* __a, ptrdiff_t __d, 01353 memory_order __m) noexcept 01354 { return __a->fetch_sub(__d, __m); } 01355 01356 template<typename _ITp> 01357 inline _ITp* 01358 atomic_fetch_sub(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept 01359 { return __a->fetch_sub(__d); } 01360 01361 template<typename _ITp> 01362 inline _ITp* 01363 atomic_fetch_sub(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept 01364 { return __a->fetch_sub(__d); } 01365 // @} group atomics 01366 01367 _GLIBCXX_END_NAMESPACE_VERSION 01368 } // namespace 01369 01370 #endif // C++11 01371 01372 #endif // _GLIBCXX_ATOMIC