atomic 47 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654
  1. // -*- C++ -*- header.
  2. // Copyright (C) 2008-2022 Free Software Foundation, Inc.
  3. //
  4. // This file is part of the GNU ISO C++ Library. This library is free
  5. // software; you can redistribute it and/or modify it under the
  6. // terms of the GNU General Public License as published by the
  7. // Free Software Foundation; either version 3, or (at your option)
  8. // any later version.
  9. // This library is distributed in the hope that it will be useful,
  10. // but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  12. // GNU General Public License for more details.
  13. // Under Section 7 of GPL version 3, you are granted additional
  14. // permissions described in the GCC Runtime Library Exception, version
  15. // 3.1, as published by the Free Software Foundation.
  16. // You should have received a copy of the GNU General Public License and
  17. // a copy of the GCC Runtime Library Exception along with this program;
  18. // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
  19. // <http://www.gnu.org/licenses/>.
  20. /** @file include/atomic
  21. * This is a Standard C++ Library header.
  22. */
  23. // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
  24. // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
  25. #ifndef _GLIBCXX_ATOMIC
  26. #define _GLIBCXX_ATOMIC 1
  27. #pragma GCC system_header
  28. #if __cplusplus < 201103L
  29. # include <bits/c++0x_warning.h>
  30. #else
  31. #include <bits/atomic_base.h>
  32. namespace std _GLIBCXX_VISIBILITY(default)
  33. {
  34. _GLIBCXX_BEGIN_NAMESPACE_VERSION
  35. /**
  36. * @addtogroup atomics
  37. * @{
  38. */
  39. #if __cplusplus >= 201703L
  40. # define __cpp_lib_atomic_is_always_lock_free 201603L
  41. #endif
  42. template<typename _Tp>
  43. struct atomic;
  44. /// atomic<bool>
  45. // NB: No operators or fetch-operations for this type.
  46. template<>
  47. struct atomic<bool>
  48. {
  49. using value_type = bool;
  50. private:
  51. __atomic_base<bool> _M_base;
  52. public:
  53. atomic() noexcept = default;
  54. ~atomic() noexcept = default;
  55. atomic(const atomic&) = delete;
  56. atomic& operator=(const atomic&) = delete;
  57. atomic& operator=(const atomic&) volatile = delete;
  58. constexpr atomic(bool __i) noexcept : _M_base(__i) { }
  59. bool
  60. operator=(bool __i) noexcept
  61. { return _M_base.operator=(__i); }
  62. bool
  63. operator=(bool __i) volatile noexcept
  64. { return _M_base.operator=(__i); }
  65. operator bool() const noexcept
  66. { return _M_base.load(); }
  67. operator bool() const volatile noexcept
  68. { return _M_base.load(); }
  69. bool
  70. is_lock_free() const noexcept { return _M_base.is_lock_free(); }
  71. bool
  72. is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
  73. #if __cplusplus >= 201703L
  74. static constexpr bool is_always_lock_free = ATOMIC_BOOL_LOCK_FREE == 2;
  75. #endif
  76. void
  77. store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
  78. { _M_base.store(__i, __m); }
  79. void
  80. store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
  81. { _M_base.store(__i, __m); }
  82. bool
  83. load(memory_order __m = memory_order_seq_cst) const noexcept
  84. { return _M_base.load(__m); }
  85. bool
  86. load(memory_order __m = memory_order_seq_cst) const volatile noexcept
  87. { return _M_base.load(__m); }
  88. bool
  89. exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
  90. { return _M_base.exchange(__i, __m); }
  91. bool
  92. exchange(bool __i,
  93. memory_order __m = memory_order_seq_cst) volatile noexcept
  94. { return _M_base.exchange(__i, __m); }
  95. bool
  96. compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
  97. memory_order __m2) noexcept
  98. { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
  99. bool
  100. compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
  101. memory_order __m2) volatile noexcept
  102. { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
  103. bool
  104. compare_exchange_weak(bool& __i1, bool __i2,
  105. memory_order __m = memory_order_seq_cst) noexcept
  106. { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
  107. bool
  108. compare_exchange_weak(bool& __i1, bool __i2,
  109. memory_order __m = memory_order_seq_cst) volatile noexcept
  110. { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
  111. bool
  112. compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
  113. memory_order __m2) noexcept
  114. { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
  115. bool
  116. compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
  117. memory_order __m2) volatile noexcept
  118. { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
  119. bool
  120. compare_exchange_strong(bool& __i1, bool __i2,
  121. memory_order __m = memory_order_seq_cst) noexcept
  122. { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
  123. bool
  124. compare_exchange_strong(bool& __i1, bool __i2,
  125. memory_order __m = memory_order_seq_cst) volatile noexcept
  126. { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
  127. #if __cpp_lib_atomic_wait
  128. void
  129. wait(bool __old, memory_order __m = memory_order_seq_cst) const noexcept
  130. { _M_base.wait(__old, __m); }
  131. // TODO add const volatile overload
  132. void
  133. notify_one() noexcept
  134. { _M_base.notify_one(); }
  135. void
  136. notify_all() noexcept
  137. { _M_base.notify_all(); }
  138. #endif // __cpp_lib_atomic_wait
  139. };
  140. #if __cplusplus <= 201703L
  141. # define _GLIBCXX20_INIT(I)
  142. #else
  143. # define _GLIBCXX20_INIT(I) = I
  144. #endif
  145. /**
  146. * @brief Generic atomic type, primary class template.
  147. *
  148. * @tparam _Tp Type to be made atomic, must be trivially copyable.
  149. */
  150. template<typename _Tp>
  151. struct atomic
  152. {
  153. using value_type = _Tp;
  154. private:
  155. // Align 1/2/4/8/16-byte types to at least their size.
  156. static constexpr int _S_min_alignment
  157. = (sizeof(_Tp) & (sizeof(_Tp) - 1)) || sizeof(_Tp) > 16
  158. ? 0 : sizeof(_Tp);
  159. static constexpr int _S_alignment
  160. = _S_min_alignment > alignof(_Tp) ? _S_min_alignment : alignof(_Tp);
  161. alignas(_S_alignment) _Tp _M_i _GLIBCXX20_INIT(_Tp());
  162. static_assert(__is_trivially_copyable(_Tp),
  163. "std::atomic requires a trivially copyable type");
  164. static_assert(sizeof(_Tp) > 0,
  165. "Incomplete or zero-sized types are not supported");
  166. #if __cplusplus > 201703L
  167. static_assert(is_copy_constructible_v<_Tp>);
  168. static_assert(is_move_constructible_v<_Tp>);
  169. static_assert(is_copy_assignable_v<_Tp>);
  170. static_assert(is_move_assignable_v<_Tp>);
  171. #endif
  172. public:
  173. atomic() = default;
  174. ~atomic() noexcept = default;
  175. atomic(const atomic&) = delete;
  176. atomic& operator=(const atomic&) = delete;
  177. atomic& operator=(const atomic&) volatile = delete;
  178. constexpr atomic(_Tp __i) noexcept : _M_i(__i) { }
  179. operator _Tp() const noexcept
  180. { return load(); }
  181. operator _Tp() const volatile noexcept
  182. { return load(); }
  183. _Tp
  184. operator=(_Tp __i) noexcept
  185. { store(__i); return __i; }
  186. _Tp
  187. operator=(_Tp __i) volatile noexcept
  188. { store(__i); return __i; }
  189. bool
  190. is_lock_free() const noexcept
  191. {
  192. // Produce a fake, minimally aligned pointer.
  193. return __atomic_is_lock_free(sizeof(_M_i),
  194. reinterpret_cast<void *>(-_S_alignment));
  195. }
  196. bool
  197. is_lock_free() const volatile noexcept
  198. {
  199. // Produce a fake, minimally aligned pointer.
  200. return __atomic_is_lock_free(sizeof(_M_i),
  201. reinterpret_cast<void *>(-_S_alignment));
  202. }
  203. #if __cplusplus >= 201703L
  204. static constexpr bool is_always_lock_free
  205. = __atomic_always_lock_free(sizeof(_M_i), 0);
  206. #endif
  207. void
  208. store(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
  209. {
  210. __atomic_store(std::__addressof(_M_i), std::__addressof(__i), int(__m));
  211. }
  212. void
  213. store(_Tp __i, memory_order __m = memory_order_seq_cst) volatile noexcept
  214. {
  215. __atomic_store(std::__addressof(_M_i), std::__addressof(__i), int(__m));
  216. }
  217. _Tp
  218. load(memory_order __m = memory_order_seq_cst) const noexcept
  219. {
  220. alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
  221. _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
  222. __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
  223. return *__ptr;
  224. }
  225. _Tp
  226. load(memory_order __m = memory_order_seq_cst) const volatile noexcept
  227. {
  228. alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
  229. _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
  230. __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
  231. return *__ptr;
  232. }
  233. _Tp
  234. exchange(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
  235. {
  236. alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
  237. _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
  238. __atomic_exchange(std::__addressof(_M_i), std::__addressof(__i),
  239. __ptr, int(__m));
  240. return *__ptr;
  241. }
  242. _Tp
  243. exchange(_Tp __i,
  244. memory_order __m = memory_order_seq_cst) volatile noexcept
  245. {
  246. alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
  247. _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
  248. __atomic_exchange(std::__addressof(_M_i), std::__addressof(__i),
  249. __ptr, int(__m));
  250. return *__ptr;
  251. }
  252. bool
  253. compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
  254. memory_order __f) noexcept
  255. {
  256. __glibcxx_assert(__is_valid_cmpexch_failure_order(__f));
  257. return __atomic_compare_exchange(std::__addressof(_M_i),
  258. std::__addressof(__e),
  259. std::__addressof(__i),
  260. true, int(__s), int(__f));
  261. }
  262. bool
  263. compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
  264. memory_order __f) volatile noexcept
  265. {
  266. __glibcxx_assert(__is_valid_cmpexch_failure_order(__f));
  267. return __atomic_compare_exchange(std::__addressof(_M_i),
  268. std::__addressof(__e),
  269. std::__addressof(__i),
  270. true, int(__s), int(__f));
  271. }
  272. bool
  273. compare_exchange_weak(_Tp& __e, _Tp __i,
  274. memory_order __m = memory_order_seq_cst) noexcept
  275. { return compare_exchange_weak(__e, __i, __m,
  276. __cmpexch_failure_order(__m)); }
  277. bool
  278. compare_exchange_weak(_Tp& __e, _Tp __i,
  279. memory_order __m = memory_order_seq_cst) volatile noexcept
  280. { return compare_exchange_weak(__e, __i, __m,
  281. __cmpexch_failure_order(__m)); }
  282. bool
  283. compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
  284. memory_order __f) noexcept
  285. {
  286. __glibcxx_assert(__is_valid_cmpexch_failure_order(__f));
  287. return __atomic_compare_exchange(std::__addressof(_M_i),
  288. std::__addressof(__e),
  289. std::__addressof(__i),
  290. false, int(__s), int(__f));
  291. }
  292. bool
  293. compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
  294. memory_order __f) volatile noexcept
  295. {
  296. __glibcxx_assert(__is_valid_cmpexch_failure_order(__f));
  297. return __atomic_compare_exchange(std::__addressof(_M_i),
  298. std::__addressof(__e),
  299. std::__addressof(__i),
  300. false, int(__s), int(__f));
  301. }
  302. bool
  303. compare_exchange_strong(_Tp& __e, _Tp __i,
  304. memory_order __m = memory_order_seq_cst) noexcept
  305. { return compare_exchange_strong(__e, __i, __m,
  306. __cmpexch_failure_order(__m)); }
  307. bool
  308. compare_exchange_strong(_Tp& __e, _Tp __i,
  309. memory_order __m = memory_order_seq_cst) volatile noexcept
  310. { return compare_exchange_strong(__e, __i, __m,
  311. __cmpexch_failure_order(__m)); }
  312. #if __cpp_lib_atomic_wait
  313. void
  314. wait(_Tp __old, memory_order __m = memory_order_seq_cst) const noexcept
  315. {
  316. std::__atomic_wait_address_v(&_M_i, __old,
  317. [__m, this] { return this->load(__m); });
  318. }
  319. // TODO add const volatile overload
  320. void
  321. notify_one() noexcept
  322. { std::__atomic_notify_address(&_M_i, false); }
  323. void
  324. notify_all() noexcept
  325. { std::__atomic_notify_address(&_M_i, true); }
  326. #endif // __cpp_lib_atomic_wait
  327. };
  328. #undef _GLIBCXX20_INIT
  329. /// Partial specialization for pointer types.
  330. template<typename _Tp>
  331. struct atomic<_Tp*>
  332. {
  333. using value_type = _Tp*;
  334. using difference_type = ptrdiff_t;
  335. typedef _Tp* __pointer_type;
  336. typedef __atomic_base<_Tp*> __base_type;
  337. __base_type _M_b;
  338. atomic() noexcept = default;
  339. ~atomic() noexcept = default;
  340. atomic(const atomic&) = delete;
  341. atomic& operator=(const atomic&) = delete;
  342. atomic& operator=(const atomic&) volatile = delete;
  343. constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
  344. operator __pointer_type() const noexcept
  345. { return __pointer_type(_M_b); }
  346. operator __pointer_type() const volatile noexcept
  347. { return __pointer_type(_M_b); }
  348. __pointer_type
  349. operator=(__pointer_type __p) noexcept
  350. { return _M_b.operator=(__p); }
  351. __pointer_type
  352. operator=(__pointer_type __p) volatile noexcept
  353. { return _M_b.operator=(__p); }
  354. __pointer_type
  355. operator++(int) noexcept
  356. {
  357. #if __cplusplus >= 201703L
  358. static_assert( is_object<_Tp>::value, "pointer to object type" );
  359. #endif
  360. return _M_b++;
  361. }
  362. __pointer_type
  363. operator++(int) volatile noexcept
  364. {
  365. #if __cplusplus >= 201703L
  366. static_assert( is_object<_Tp>::value, "pointer to object type" );
  367. #endif
  368. return _M_b++;
  369. }
  370. __pointer_type
  371. operator--(int) noexcept
  372. {
  373. #if __cplusplus >= 201703L
  374. static_assert( is_object<_Tp>::value, "pointer to object type" );
  375. #endif
  376. return _M_b--;
  377. }
  378. __pointer_type
  379. operator--(int) volatile noexcept
  380. {
  381. #if __cplusplus >= 201703L
  382. static_assert( is_object<_Tp>::value, "pointer to object type" );
  383. #endif
  384. return _M_b--;
  385. }
  386. __pointer_type
  387. operator++() noexcept
  388. {
  389. #if __cplusplus >= 201703L
  390. static_assert( is_object<_Tp>::value, "pointer to object type" );
  391. #endif
  392. return ++_M_b;
  393. }
  394. __pointer_type
  395. operator++() volatile noexcept
  396. {
  397. #if __cplusplus >= 201703L
  398. static_assert( is_object<_Tp>::value, "pointer to object type" );
  399. #endif
  400. return ++_M_b;
  401. }
  402. __pointer_type
  403. operator--() noexcept
  404. {
  405. #if __cplusplus >= 201703L
  406. static_assert( is_object<_Tp>::value, "pointer to object type" );
  407. #endif
  408. return --_M_b;
  409. }
  410. __pointer_type
  411. operator--() volatile noexcept
  412. {
  413. #if __cplusplus >= 201703L
  414. static_assert( is_object<_Tp>::value, "pointer to object type" );
  415. #endif
  416. return --_M_b;
  417. }
  418. __pointer_type
  419. operator+=(ptrdiff_t __d) noexcept
  420. {
  421. #if __cplusplus >= 201703L
  422. static_assert( is_object<_Tp>::value, "pointer to object type" );
  423. #endif
  424. return _M_b.operator+=(__d);
  425. }
  426. __pointer_type
  427. operator+=(ptrdiff_t __d) volatile noexcept
  428. {
  429. #if __cplusplus >= 201703L
  430. static_assert( is_object<_Tp>::value, "pointer to object type" );
  431. #endif
  432. return _M_b.operator+=(__d);
  433. }
  434. __pointer_type
  435. operator-=(ptrdiff_t __d) noexcept
  436. {
  437. #if __cplusplus >= 201703L
  438. static_assert( is_object<_Tp>::value, "pointer to object type" );
  439. #endif
  440. return _M_b.operator-=(__d);
  441. }
  442. __pointer_type
  443. operator-=(ptrdiff_t __d) volatile noexcept
  444. {
  445. #if __cplusplus >= 201703L
  446. static_assert( is_object<_Tp>::value, "pointer to object type" );
  447. #endif
  448. return _M_b.operator-=(__d);
  449. }
  450. bool
  451. is_lock_free() const noexcept
  452. { return _M_b.is_lock_free(); }
  453. bool
  454. is_lock_free() const volatile noexcept
  455. { return _M_b.is_lock_free(); }
  456. #if __cplusplus >= 201703L
  457. static constexpr bool is_always_lock_free
  458. = ATOMIC_POINTER_LOCK_FREE == 2;
  459. #endif
  460. void
  461. store(__pointer_type __p,
  462. memory_order __m = memory_order_seq_cst) noexcept
  463. { return _M_b.store(__p, __m); }
  464. void
  465. store(__pointer_type __p,
  466. memory_order __m = memory_order_seq_cst) volatile noexcept
  467. { return _M_b.store(__p, __m); }
  468. __pointer_type
  469. load(memory_order __m = memory_order_seq_cst) const noexcept
  470. { return _M_b.load(__m); }
  471. __pointer_type
  472. load(memory_order __m = memory_order_seq_cst) const volatile noexcept
  473. { return _M_b.load(__m); }
  474. __pointer_type
  475. exchange(__pointer_type __p,
  476. memory_order __m = memory_order_seq_cst) noexcept
  477. { return _M_b.exchange(__p, __m); }
  478. __pointer_type
  479. exchange(__pointer_type __p,
  480. memory_order __m = memory_order_seq_cst) volatile noexcept
  481. { return _M_b.exchange(__p, __m); }
  482. bool
  483. compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
  484. memory_order __m1, memory_order __m2) noexcept
  485. { return _M_b.compare_exchange_weak(__p1, __p2, __m1, __m2); }
  486. bool
  487. compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
  488. memory_order __m1,
  489. memory_order __m2) volatile noexcept
  490. { return _M_b.compare_exchange_weak(__p1, __p2, __m1, __m2); }
  491. bool
  492. compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
  493. memory_order __m = memory_order_seq_cst) noexcept
  494. {
  495. return compare_exchange_weak(__p1, __p2, __m,
  496. __cmpexch_failure_order(__m));
  497. }
  498. bool
  499. compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
  500. memory_order __m = memory_order_seq_cst) volatile noexcept
  501. {
  502. return compare_exchange_weak(__p1, __p2, __m,
  503. __cmpexch_failure_order(__m));
  504. }
  505. bool
  506. compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
  507. memory_order __m1, memory_order __m2) noexcept
  508. { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
  509. bool
  510. compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
  511. memory_order __m1,
  512. memory_order __m2) volatile noexcept
  513. { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
  514. bool
  515. compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
  516. memory_order __m = memory_order_seq_cst) noexcept
  517. {
  518. return _M_b.compare_exchange_strong(__p1, __p2, __m,
  519. __cmpexch_failure_order(__m));
  520. }
  521. bool
  522. compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
  523. memory_order __m = memory_order_seq_cst) volatile noexcept
  524. {
  525. return _M_b.compare_exchange_strong(__p1, __p2, __m,
  526. __cmpexch_failure_order(__m));
  527. }
  528. #if __cpp_lib_atomic_wait
  529. void
  530. wait(__pointer_type __old, memory_order __m = memory_order_seq_cst) const noexcept
  531. { _M_b.wait(__old, __m); }
  532. // TODO add const volatile overload
  533. void
  534. notify_one() noexcept
  535. { _M_b.notify_one(); }
  536. void
  537. notify_all() noexcept
  538. { _M_b.notify_all(); }
  539. #endif // __cpp_lib_atomic_wait
  540. __pointer_type
  541. fetch_add(ptrdiff_t __d,
  542. memory_order __m = memory_order_seq_cst) noexcept
  543. {
  544. #if __cplusplus >= 201703L
  545. static_assert( is_object<_Tp>::value, "pointer to object type" );
  546. #endif
  547. return _M_b.fetch_add(__d, __m);
  548. }
  549. __pointer_type
  550. fetch_add(ptrdiff_t __d,
  551. memory_order __m = memory_order_seq_cst) volatile noexcept
  552. {
  553. #if __cplusplus >= 201703L
  554. static_assert( is_object<_Tp>::value, "pointer to object type" );
  555. #endif
  556. return _M_b.fetch_add(__d, __m);
  557. }
  558. __pointer_type
  559. fetch_sub(ptrdiff_t __d,
  560. memory_order __m = memory_order_seq_cst) noexcept
  561. {
  562. #if __cplusplus >= 201703L
  563. static_assert( is_object<_Tp>::value, "pointer to object type" );
  564. #endif
  565. return _M_b.fetch_sub(__d, __m);
  566. }
  567. __pointer_type
  568. fetch_sub(ptrdiff_t __d,
  569. memory_order __m = memory_order_seq_cst) volatile noexcept
  570. {
  571. #if __cplusplus >= 201703L
  572. static_assert( is_object<_Tp>::value, "pointer to object type" );
  573. #endif
  574. return _M_b.fetch_sub(__d, __m);
  575. }
  576. };
  577. /// Explicit specialization for char.
  578. template<>
  579. struct atomic<char> : __atomic_base<char>
  580. {
  581. typedef char __integral_type;
  582. typedef __atomic_base<char> __base_type;
  583. atomic() noexcept = default;
  584. ~atomic() noexcept = default;
  585. atomic(const atomic&) = delete;
  586. atomic& operator=(const atomic&) = delete;
  587. atomic& operator=(const atomic&) volatile = delete;
  588. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  589. using __base_type::operator __integral_type;
  590. using __base_type::operator=;
  591. #if __cplusplus >= 201703L
  592. static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
  593. #endif
  594. };
  595. /// Explicit specialization for signed char.
  596. template<>
  597. struct atomic<signed char> : __atomic_base<signed char>
  598. {
  599. typedef signed char __integral_type;
  600. typedef __atomic_base<signed char> __base_type;
  601. atomic() noexcept= default;
  602. ~atomic() noexcept = default;
  603. atomic(const atomic&) = delete;
  604. atomic& operator=(const atomic&) = delete;
  605. atomic& operator=(const atomic&) volatile = delete;
  606. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  607. using __base_type::operator __integral_type;
  608. using __base_type::operator=;
  609. #if __cplusplus >= 201703L
  610. static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
  611. #endif
  612. };
  613. /// Explicit specialization for unsigned char.
  614. template<>
  615. struct atomic<unsigned char> : __atomic_base<unsigned char>
  616. {
  617. typedef unsigned char __integral_type;
  618. typedef __atomic_base<unsigned char> __base_type;
  619. atomic() noexcept= default;
  620. ~atomic() noexcept = default;
  621. atomic(const atomic&) = delete;
  622. atomic& operator=(const atomic&) = delete;
  623. atomic& operator=(const atomic&) volatile = delete;
  624. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  625. using __base_type::operator __integral_type;
  626. using __base_type::operator=;
  627. #if __cplusplus >= 201703L
  628. static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
  629. #endif
  630. };
  631. /// Explicit specialization for short.
  632. template<>
  633. struct atomic<short> : __atomic_base<short>
  634. {
  635. typedef short __integral_type;
  636. typedef __atomic_base<short> __base_type;
  637. atomic() noexcept = default;
  638. ~atomic() noexcept = default;
  639. atomic(const atomic&) = delete;
  640. atomic& operator=(const atomic&) = delete;
  641. atomic& operator=(const atomic&) volatile = delete;
  642. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  643. using __base_type::operator __integral_type;
  644. using __base_type::operator=;
  645. #if __cplusplus >= 201703L
  646. static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
  647. #endif
  648. };
  649. /// Explicit specialization for unsigned short.
  650. template<>
  651. struct atomic<unsigned short> : __atomic_base<unsigned short>
  652. {
  653. typedef unsigned short __integral_type;
  654. typedef __atomic_base<unsigned short> __base_type;
  655. atomic() noexcept = default;
  656. ~atomic() noexcept = default;
  657. atomic(const atomic&) = delete;
  658. atomic& operator=(const atomic&) = delete;
  659. atomic& operator=(const atomic&) volatile = delete;
  660. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  661. using __base_type::operator __integral_type;
  662. using __base_type::operator=;
  663. #if __cplusplus >= 201703L
  664. static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
  665. #endif
  666. };
  667. /// Explicit specialization for int.
  668. template<>
  669. struct atomic<int> : __atomic_base<int>
  670. {
  671. typedef int __integral_type;
  672. typedef __atomic_base<int> __base_type;
  673. atomic() noexcept = default;
  674. ~atomic() noexcept = default;
  675. atomic(const atomic&) = delete;
  676. atomic& operator=(const atomic&) = delete;
  677. atomic& operator=(const atomic&) volatile = delete;
  678. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  679. using __base_type::operator __integral_type;
  680. using __base_type::operator=;
  681. #if __cplusplus >= 201703L
  682. static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
  683. #endif
  684. };
  685. /// Explicit specialization for unsigned int.
  686. template<>
  687. struct atomic<unsigned int> : __atomic_base<unsigned int>
  688. {
  689. typedef unsigned int __integral_type;
  690. typedef __atomic_base<unsigned int> __base_type;
  691. atomic() noexcept = default;
  692. ~atomic() noexcept = default;
  693. atomic(const atomic&) = delete;
  694. atomic& operator=(const atomic&) = delete;
  695. atomic& operator=(const atomic&) volatile = delete;
  696. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  697. using __base_type::operator __integral_type;
  698. using __base_type::operator=;
  699. #if __cplusplus >= 201703L
  700. static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
  701. #endif
  702. };
  703. /// Explicit specialization for long.
  704. template<>
  705. struct atomic<long> : __atomic_base<long>
  706. {
  707. typedef long __integral_type;
  708. typedef __atomic_base<long> __base_type;
  709. atomic() noexcept = default;
  710. ~atomic() noexcept = default;
  711. atomic(const atomic&) = delete;
  712. atomic& operator=(const atomic&) = delete;
  713. atomic& operator=(const atomic&) volatile = delete;
  714. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  715. using __base_type::operator __integral_type;
  716. using __base_type::operator=;
  717. #if __cplusplus >= 201703L
  718. static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
  719. #endif
  720. };
  721. /// Explicit specialization for unsigned long.
  722. template<>
  723. struct atomic<unsigned long> : __atomic_base<unsigned long>
  724. {
  725. typedef unsigned long __integral_type;
  726. typedef __atomic_base<unsigned long> __base_type;
  727. atomic() noexcept = default;
  728. ~atomic() noexcept = default;
  729. atomic(const atomic&) = delete;
  730. atomic& operator=(const atomic&) = delete;
  731. atomic& operator=(const atomic&) volatile = delete;
  732. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  733. using __base_type::operator __integral_type;
  734. using __base_type::operator=;
  735. #if __cplusplus >= 201703L
  736. static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
  737. #endif
  738. };
  739. /// Explicit specialization for long long.
  740. template<>
  741. struct atomic<long long> : __atomic_base<long long>
  742. {
  743. typedef long long __integral_type;
  744. typedef __atomic_base<long long> __base_type;
  745. atomic() noexcept = default;
  746. ~atomic() noexcept = default;
  747. atomic(const atomic&) = delete;
  748. atomic& operator=(const atomic&) = delete;
  749. atomic& operator=(const atomic&) volatile = delete;
  750. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  751. using __base_type::operator __integral_type;
  752. using __base_type::operator=;
  753. #if __cplusplus >= 201703L
  754. static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
  755. #endif
  756. };
  757. /// Explicit specialization for unsigned long long.
  758. template<>
  759. struct atomic<unsigned long long> : __atomic_base<unsigned long long>
  760. {
  761. typedef unsigned long long __integral_type;
  762. typedef __atomic_base<unsigned long long> __base_type;
  763. atomic() noexcept = default;
  764. ~atomic() noexcept = default;
  765. atomic(const atomic&) = delete;
  766. atomic& operator=(const atomic&) = delete;
  767. atomic& operator=(const atomic&) volatile = delete;
  768. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  769. using __base_type::operator __integral_type;
  770. using __base_type::operator=;
  771. #if __cplusplus >= 201703L
  772. static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
  773. #endif
  774. };
  775. /// Explicit specialization for wchar_t.
  776. template<>
  777. struct atomic<wchar_t> : __atomic_base<wchar_t>
  778. {
  779. typedef wchar_t __integral_type;
  780. typedef __atomic_base<wchar_t> __base_type;
  781. atomic() noexcept = default;
  782. ~atomic() noexcept = default;
  783. atomic(const atomic&) = delete;
  784. atomic& operator=(const atomic&) = delete;
  785. atomic& operator=(const atomic&) volatile = delete;
  786. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  787. using __base_type::operator __integral_type;
  788. using __base_type::operator=;
  789. #if __cplusplus >= 201703L
  790. static constexpr bool is_always_lock_free = ATOMIC_WCHAR_T_LOCK_FREE == 2;
  791. #endif
  792. };
  793. #ifdef _GLIBCXX_USE_CHAR8_T
  794. /// Explicit specialization for char8_t.
  795. template<>
  796. struct atomic<char8_t> : __atomic_base<char8_t>
  797. {
  798. typedef char8_t __integral_type;
  799. typedef __atomic_base<char8_t> __base_type;
  800. atomic() noexcept = default;
  801. ~atomic() noexcept = default;
  802. atomic(const atomic&) = delete;
  803. atomic& operator=(const atomic&) = delete;
  804. atomic& operator=(const atomic&) volatile = delete;
  805. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  806. using __base_type::operator __integral_type;
  807. using __base_type::operator=;
  808. #if __cplusplus > 201402L
  809. static constexpr bool is_always_lock_free
  810. = ATOMIC_CHAR8_T_LOCK_FREE == 2;
  811. #endif
  812. };
  813. #endif
  814. /// Explicit specialization for char16_t.
  815. template<>
  816. struct atomic<char16_t> : __atomic_base<char16_t>
  817. {
  818. typedef char16_t __integral_type;
  819. typedef __atomic_base<char16_t> __base_type;
  820. atomic() noexcept = default;
  821. ~atomic() noexcept = default;
  822. atomic(const atomic&) = delete;
  823. atomic& operator=(const atomic&) = delete;
  824. atomic& operator=(const atomic&) volatile = delete;
  825. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  826. using __base_type::operator __integral_type;
  827. using __base_type::operator=;
  828. #if __cplusplus >= 201703L
  829. static constexpr bool is_always_lock_free
  830. = ATOMIC_CHAR16_T_LOCK_FREE == 2;
  831. #endif
  832. };
  833. /// Explicit specialization for char32_t.
  834. template<>
  835. struct atomic<char32_t> : __atomic_base<char32_t>
  836. {
  837. typedef char32_t __integral_type;
  838. typedef __atomic_base<char32_t> __base_type;
  839. atomic() noexcept = default;
  840. ~atomic() noexcept = default;
  841. atomic(const atomic&) = delete;
  842. atomic& operator=(const atomic&) = delete;
  843. atomic& operator=(const atomic&) volatile = delete;
  844. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  845. using __base_type::operator __integral_type;
  846. using __base_type::operator=;
  847. #if __cplusplus >= 201703L
  848. static constexpr bool is_always_lock_free
  849. = ATOMIC_CHAR32_T_LOCK_FREE == 2;
  850. #endif
  851. };
  852. /// atomic_bool
  853. typedef atomic<bool> atomic_bool;
  854. /// atomic_char
  855. typedef atomic<char> atomic_char;
  856. /// atomic_schar
  857. typedef atomic<signed char> atomic_schar;
  858. /// atomic_uchar
  859. typedef atomic<unsigned char> atomic_uchar;
  860. /// atomic_short
  861. typedef atomic<short> atomic_short;
  862. /// atomic_ushort
  863. typedef atomic<unsigned short> atomic_ushort;
  864. /// atomic_int
  865. typedef atomic<int> atomic_int;
  866. /// atomic_uint
  867. typedef atomic<unsigned int> atomic_uint;
  868. /// atomic_long
  869. typedef atomic<long> atomic_long;
  870. /// atomic_ulong
  871. typedef atomic<unsigned long> atomic_ulong;
  872. /// atomic_llong
  873. typedef atomic<long long> atomic_llong;
  874. /// atomic_ullong
  875. typedef atomic<unsigned long long> atomic_ullong;
  876. /// atomic_wchar_t
  877. typedef atomic<wchar_t> atomic_wchar_t;
  878. #ifdef _GLIBCXX_USE_CHAR8_T
  879. /// atomic_char8_t
  880. typedef atomic<char8_t> atomic_char8_t;
  881. #endif
  882. /// atomic_char16_t
  883. typedef atomic<char16_t> atomic_char16_t;
  884. /// atomic_char32_t
  885. typedef atomic<char32_t> atomic_char32_t;
  886. #ifdef _GLIBCXX_USE_C99_STDINT_TR1
  887. // _GLIBCXX_RESOLVE_LIB_DEFECTS
  888. // 2441. Exact-width atomic typedefs should be provided
  889. /// atomic_int8_t
  890. typedef atomic<int8_t> atomic_int8_t;
  891. /// atomic_uint8_t
  892. typedef atomic<uint8_t> atomic_uint8_t;
  893. /// atomic_int16_t
  894. typedef atomic<int16_t> atomic_int16_t;
  895. /// atomic_uint16_t
  896. typedef atomic<uint16_t> atomic_uint16_t;
  897. /// atomic_int32_t
  898. typedef atomic<int32_t> atomic_int32_t;
  899. /// atomic_uint32_t
  900. typedef atomic<uint32_t> atomic_uint32_t;
  901. /// atomic_int64_t
  902. typedef atomic<int64_t> atomic_int64_t;
  903. /// atomic_uint64_t
  904. typedef atomic<uint64_t> atomic_uint64_t;
  905. /// atomic_int_least8_t
  906. typedef atomic<int_least8_t> atomic_int_least8_t;
  907. /// atomic_uint_least8_t
  908. typedef atomic<uint_least8_t> atomic_uint_least8_t;
  909. /// atomic_int_least16_t
  910. typedef atomic<int_least16_t> atomic_int_least16_t;
  911. /// atomic_uint_least16_t
  912. typedef atomic<uint_least16_t> atomic_uint_least16_t;
  913. /// atomic_int_least32_t
  914. typedef atomic<int_least32_t> atomic_int_least32_t;
  915. /// atomic_uint_least32_t
  916. typedef atomic<uint_least32_t> atomic_uint_least32_t;
  917. /// atomic_int_least64_t
  918. typedef atomic<int_least64_t> atomic_int_least64_t;
  919. /// atomic_uint_least64_t
  920. typedef atomic<uint_least64_t> atomic_uint_least64_t;
  921. /// atomic_int_fast8_t
  922. typedef atomic<int_fast8_t> atomic_int_fast8_t;
  923. /// atomic_uint_fast8_t
  924. typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
  925. /// atomic_int_fast16_t
  926. typedef atomic<int_fast16_t> atomic_int_fast16_t;
  927. /// atomic_uint_fast16_t
  928. typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
  929. /// atomic_int_fast32_t
  930. typedef atomic<int_fast32_t> atomic_int_fast32_t;
  931. /// atomic_uint_fast32_t
  932. typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
  933. /// atomic_int_fast64_t
  934. typedef atomic<int_fast64_t> atomic_int_fast64_t;
  935. /// atomic_uint_fast64_t
  936. typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
  937. #endif
  938. /// atomic_intptr_t
  939. typedef atomic<intptr_t> atomic_intptr_t;
  940. /// atomic_uintptr_t
  941. typedef atomic<uintptr_t> atomic_uintptr_t;
  942. /// atomic_size_t
  943. typedef atomic<size_t> atomic_size_t;
  944. /// atomic_ptrdiff_t
  945. typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
  946. #ifdef _GLIBCXX_USE_C99_STDINT_TR1
  947. /// atomic_intmax_t
  948. typedef atomic<intmax_t> atomic_intmax_t;
  949. /// atomic_uintmax_t
  950. typedef atomic<uintmax_t> atomic_uintmax_t;
  951. #endif
  952. // Function definitions, atomic_flag operations.
  953. inline bool
  954. atomic_flag_test_and_set_explicit(atomic_flag* __a,
  955. memory_order __m) noexcept
  956. { return __a->test_and_set(__m); }
  957. inline bool
  958. atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
  959. memory_order __m) noexcept
  960. { return __a->test_and_set(__m); }
  961. inline void
  962. atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
  963. { __a->clear(__m); }
  964. inline void
  965. atomic_flag_clear_explicit(volatile atomic_flag* __a,
  966. memory_order __m) noexcept
  967. { __a->clear(__m); }
  968. inline bool
  969. atomic_flag_test_and_set(atomic_flag* __a) noexcept
  970. { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
  971. inline bool
  972. atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
  973. { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
  974. inline void
  975. atomic_flag_clear(atomic_flag* __a) noexcept
  976. { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
  977. inline void
  978. atomic_flag_clear(volatile atomic_flag* __a) noexcept
  979. { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
  980. // _GLIBCXX_RESOLVE_LIB_DEFECTS
  981. // 3220. P0558 broke conforming C++14 uses of atomic shared_ptr
  982. template<typename _Tp>
  983. using __atomic_val_t = __type_identity_t<_Tp>;
  984. template<typename _Tp>
  985. using __atomic_diff_t = typename atomic<_Tp>::difference_type;
  986. // [atomics.nonmembers] Non-member functions.
  987. // Function templates generally applicable to atomic types.
  988. template<typename _ITp>
  989. inline bool
  990. atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
  991. { return __a->is_lock_free(); }
  992. template<typename _ITp>
  993. inline bool
  994. atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
  995. { return __a->is_lock_free(); }
  996. template<typename _ITp>
  997. inline void
  998. atomic_init(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
  999. { __a->store(__i, memory_order_relaxed); }
  1000. template<typename _ITp>
  1001. inline void
  1002. atomic_init(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
  1003. { __a->store(__i, memory_order_relaxed); }
  1004. template<typename _ITp>
  1005. inline void
  1006. atomic_store_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
  1007. memory_order __m) noexcept
  1008. { __a->store(__i, __m); }
  1009. template<typename _ITp>
  1010. inline void
  1011. atomic_store_explicit(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
  1012. memory_order __m) noexcept
  1013. { __a->store(__i, __m); }
  1014. template<typename _ITp>
  1015. inline _ITp
  1016. atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
  1017. { return __a->load(__m); }
  1018. template<typename _ITp>
  1019. inline _ITp
  1020. atomic_load_explicit(const volatile atomic<_ITp>* __a,
  1021. memory_order __m) noexcept
  1022. { return __a->load(__m); }
  1023. template<typename _ITp>
  1024. inline _ITp
  1025. atomic_exchange_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
  1026. memory_order __m) noexcept
  1027. { return __a->exchange(__i, __m); }
  1028. template<typename _ITp>
  1029. inline _ITp
  1030. atomic_exchange_explicit(volatile atomic<_ITp>* __a,
  1031. __atomic_val_t<_ITp> __i,
  1032. memory_order __m) noexcept
  1033. { return __a->exchange(__i, __m); }
  1034. template<typename _ITp>
  1035. inline bool
  1036. atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
  1037. __atomic_val_t<_ITp>* __i1,
  1038. __atomic_val_t<_ITp> __i2,
  1039. memory_order __m1,
  1040. memory_order __m2) noexcept
  1041. { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
  1042. template<typename _ITp>
  1043. inline bool
  1044. atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
  1045. __atomic_val_t<_ITp>* __i1,
  1046. __atomic_val_t<_ITp> __i2,
  1047. memory_order __m1,
  1048. memory_order __m2) noexcept
  1049. { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
  1050. template<typename _ITp>
  1051. inline bool
  1052. atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
  1053. __atomic_val_t<_ITp>* __i1,
  1054. __atomic_val_t<_ITp> __i2,
  1055. memory_order __m1,
  1056. memory_order __m2) noexcept
  1057. { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
  1058. template<typename _ITp>
  1059. inline bool
  1060. atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
  1061. __atomic_val_t<_ITp>* __i1,
  1062. __atomic_val_t<_ITp> __i2,
  1063. memory_order __m1,
  1064. memory_order __m2) noexcept
  1065. { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
  1066. template<typename _ITp>
  1067. inline void
  1068. atomic_store(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
  1069. { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
  1070. template<typename _ITp>
  1071. inline void
  1072. atomic_store(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
  1073. { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
  1074. template<typename _ITp>
  1075. inline _ITp
  1076. atomic_load(const atomic<_ITp>* __a) noexcept
  1077. { return atomic_load_explicit(__a, memory_order_seq_cst); }
  1078. template<typename _ITp>
  1079. inline _ITp
  1080. atomic_load(const volatile atomic<_ITp>* __a) noexcept
  1081. { return atomic_load_explicit(__a, memory_order_seq_cst); }
  1082. template<typename _ITp>
  1083. inline _ITp
  1084. atomic_exchange(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
  1085. { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
  1086. template<typename _ITp>
  1087. inline _ITp
  1088. atomic_exchange(volatile atomic<_ITp>* __a,
  1089. __atomic_val_t<_ITp> __i) noexcept
  1090. { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
  1091. template<typename _ITp>
  1092. inline bool
  1093. atomic_compare_exchange_weak(atomic<_ITp>* __a,
  1094. __atomic_val_t<_ITp>* __i1,
  1095. __atomic_val_t<_ITp> __i2) noexcept
  1096. {
  1097. return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
  1098. memory_order_seq_cst,
  1099. memory_order_seq_cst);
  1100. }
  1101. template<typename _ITp>
  1102. inline bool
  1103. atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
  1104. __atomic_val_t<_ITp>* __i1,
  1105. __atomic_val_t<_ITp> __i2) noexcept
  1106. {
  1107. return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
  1108. memory_order_seq_cst,
  1109. memory_order_seq_cst);
  1110. }
  1111. template<typename _ITp>
  1112. inline bool
  1113. atomic_compare_exchange_strong(atomic<_ITp>* __a,
  1114. __atomic_val_t<_ITp>* __i1,
  1115. __atomic_val_t<_ITp> __i2) noexcept
  1116. {
  1117. return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
  1118. memory_order_seq_cst,
  1119. memory_order_seq_cst);
  1120. }
  1121. template<typename _ITp>
  1122. inline bool
  1123. atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
  1124. __atomic_val_t<_ITp>* __i1,
  1125. __atomic_val_t<_ITp> __i2) noexcept
  1126. {
  1127. return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
  1128. memory_order_seq_cst,
  1129. memory_order_seq_cst);
  1130. }
  1131. #if __cpp_lib_atomic_wait
  1132. template<typename _Tp>
  1133. inline void
  1134. atomic_wait(const atomic<_Tp>* __a,
  1135. typename std::atomic<_Tp>::value_type __old) noexcept
  1136. { __a->wait(__old); }
  1137. template<typename _Tp>
  1138. inline void
  1139. atomic_wait_explicit(const atomic<_Tp>* __a,
  1140. typename std::atomic<_Tp>::value_type __old,
  1141. std::memory_order __m) noexcept
  1142. { __a->wait(__old, __m); }
  1143. template<typename _Tp>
  1144. inline void
  1145. atomic_notify_one(atomic<_Tp>* __a) noexcept
  1146. { __a->notify_one(); }
  1147. template<typename _Tp>
  1148. inline void
  1149. atomic_notify_all(atomic<_Tp>* __a) noexcept
  1150. { __a->notify_all(); }
  1151. #endif // __cpp_lib_atomic_wait
  1152. // Function templates for atomic_integral and atomic_pointer operations only.
  1153. // Some operations (and, or, xor) are only available for atomic integrals,
  1154. // which is implemented by taking a parameter of type __atomic_base<_ITp>*.
  1155. template<typename _ITp>
  1156. inline _ITp
  1157. atomic_fetch_add_explicit(atomic<_ITp>* __a,
  1158. __atomic_diff_t<_ITp> __i,
  1159. memory_order __m) noexcept
  1160. { return __a->fetch_add(__i, __m); }
  1161. template<typename _ITp>
  1162. inline _ITp
  1163. atomic_fetch_add_explicit(volatile atomic<_ITp>* __a,
  1164. __atomic_diff_t<_ITp> __i,
  1165. memory_order __m) noexcept
  1166. { return __a->fetch_add(__i, __m); }
  1167. template<typename _ITp>
  1168. inline _ITp
  1169. atomic_fetch_sub_explicit(atomic<_ITp>* __a,
  1170. __atomic_diff_t<_ITp> __i,
  1171. memory_order __m) noexcept
  1172. { return __a->fetch_sub(__i, __m); }
  1173. template<typename _ITp>
  1174. inline _ITp
  1175. atomic_fetch_sub_explicit(volatile atomic<_ITp>* __a,
  1176. __atomic_diff_t<_ITp> __i,
  1177. memory_order __m) noexcept
  1178. { return __a->fetch_sub(__i, __m); }
  1179. template<typename _ITp>
  1180. inline _ITp
  1181. atomic_fetch_and_explicit(__atomic_base<_ITp>* __a,
  1182. __atomic_val_t<_ITp> __i,
  1183. memory_order __m) noexcept
  1184. { return __a->fetch_and(__i, __m); }
  1185. template<typename _ITp>
  1186. inline _ITp
  1187. atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a,
  1188. __atomic_val_t<_ITp> __i,
  1189. memory_order __m) noexcept
  1190. { return __a->fetch_and(__i, __m); }
  1191. template<typename _ITp>
  1192. inline _ITp
  1193. atomic_fetch_or_explicit(__atomic_base<_ITp>* __a,
  1194. __atomic_val_t<_ITp> __i,
  1195. memory_order __m) noexcept
  1196. { return __a->fetch_or(__i, __m); }
  1197. template<typename _ITp>
  1198. inline _ITp
  1199. atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a,
  1200. __atomic_val_t<_ITp> __i,
  1201. memory_order __m) noexcept
  1202. { return __a->fetch_or(__i, __m); }
  1203. template<typename _ITp>
  1204. inline _ITp
  1205. atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a,
  1206. __atomic_val_t<_ITp> __i,
  1207. memory_order __m) noexcept
  1208. { return __a->fetch_xor(__i, __m); }
  1209. template<typename _ITp>
  1210. inline _ITp
  1211. atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a,
  1212. __atomic_val_t<_ITp> __i,
  1213. memory_order __m) noexcept
  1214. { return __a->fetch_xor(__i, __m); }
  1215. template<typename _ITp>
  1216. inline _ITp
  1217. atomic_fetch_add(atomic<_ITp>* __a,
  1218. __atomic_diff_t<_ITp> __i) noexcept
  1219. { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
  1220. template<typename _ITp>
  1221. inline _ITp
  1222. atomic_fetch_add(volatile atomic<_ITp>* __a,
  1223. __atomic_diff_t<_ITp> __i) noexcept
  1224. { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
  1225. template<typename _ITp>
  1226. inline _ITp
  1227. atomic_fetch_sub(atomic<_ITp>* __a,
  1228. __atomic_diff_t<_ITp> __i) noexcept
  1229. { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
  1230. template<typename _ITp>
  1231. inline _ITp
  1232. atomic_fetch_sub(volatile atomic<_ITp>* __a,
  1233. __atomic_diff_t<_ITp> __i) noexcept
  1234. { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
  1235. template<typename _ITp>
  1236. inline _ITp
  1237. atomic_fetch_and(__atomic_base<_ITp>* __a,
  1238. __atomic_val_t<_ITp> __i) noexcept
  1239. { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
  1240. template<typename _ITp>
  1241. inline _ITp
  1242. atomic_fetch_and(volatile __atomic_base<_ITp>* __a,
  1243. __atomic_val_t<_ITp> __i) noexcept
  1244. { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
  1245. template<typename _ITp>
  1246. inline _ITp
  1247. atomic_fetch_or(__atomic_base<_ITp>* __a,
  1248. __atomic_val_t<_ITp> __i) noexcept
  1249. { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
  1250. template<typename _ITp>
  1251. inline _ITp
  1252. atomic_fetch_or(volatile __atomic_base<_ITp>* __a,
  1253. __atomic_val_t<_ITp> __i) noexcept
  1254. { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
  1255. template<typename _ITp>
  1256. inline _ITp
  1257. atomic_fetch_xor(__atomic_base<_ITp>* __a,
  1258. __atomic_val_t<_ITp> __i) noexcept
  1259. { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
  1260. template<typename _ITp>
  1261. inline _ITp
  1262. atomic_fetch_xor(volatile __atomic_base<_ITp>* __a,
  1263. __atomic_val_t<_ITp> __i) noexcept
  1264. { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
  1265. #if __cplusplus > 201703L
  1266. #define __cpp_lib_atomic_float 201711L
  1267. template<>
  1268. struct atomic<float> : __atomic_float<float>
  1269. {
  1270. atomic() noexcept = default;
  1271. constexpr
  1272. atomic(float __fp) noexcept : __atomic_float<float>(__fp)
  1273. { }
  1274. atomic& operator=(const atomic&) volatile = delete;
  1275. atomic& operator=(const atomic&) = delete;
  1276. using __atomic_float<float>::operator=;
  1277. };
  1278. template<>
  1279. struct atomic<double> : __atomic_float<double>
  1280. {
  1281. atomic() noexcept = default;
  1282. constexpr
  1283. atomic(double __fp) noexcept : __atomic_float<double>(__fp)
  1284. { }
  1285. atomic& operator=(const atomic&) volatile = delete;
  1286. atomic& operator=(const atomic&) = delete;
  1287. using __atomic_float<double>::operator=;
  1288. };
  1289. template<>
  1290. struct atomic<long double> : __atomic_float<long double>
  1291. {
  1292. atomic() noexcept = default;
  1293. constexpr
  1294. atomic(long double __fp) noexcept : __atomic_float<long double>(__fp)
  1295. { }
  1296. atomic& operator=(const atomic&) volatile = delete;
  1297. atomic& operator=(const atomic&) = delete;
  1298. using __atomic_float<long double>::operator=;
  1299. };
  1300. #define __cpp_lib_atomic_ref 201806L
  1301. /// Class template to provide atomic operations on a non-atomic variable.
  1302. template<typename _Tp>
  1303. struct atomic_ref : __atomic_ref<_Tp>
  1304. {
  1305. explicit
  1306. atomic_ref(_Tp& __t) noexcept : __atomic_ref<_Tp>(__t)
  1307. { }
  1308. atomic_ref& operator=(const atomic_ref&) = delete;
  1309. atomic_ref(const atomic_ref&) = default;
  1310. using __atomic_ref<_Tp>::operator=;
  1311. };
  1312. #endif // C++2a
  1313. /// @} group atomics
  1314. _GLIBCXX_END_NAMESPACE_VERSION
  1315. } // namespace
  1316. #endif // C++11
  1317. #endif // _GLIBCXX_ATOMIC