atomic 43 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482
  1. // -*- C++ -*- header.
  2. // Copyright (C) 2008-2019 Free Software Foundation, Inc.
  3. //
  4. // This file is part of the GNU ISO C++ Library. This library is free
  5. // software; you can redistribute it and/or modify it under the
  6. // terms of the GNU General Public License as published by the
  7. // Free Software Foundation; either version 3, or (at your option)
  8. // any later version.
  9. // This library is distributed in the hope that it will be useful,
  10. // but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  12. // GNU General Public License for more details.
  13. // Under Section 7 of GPL version 3, you are granted additional
  14. // permissions described in the GCC Runtime Library Exception, version
  15. // 3.1, as published by the Free Software Foundation.
  16. // You should have received a copy of the GNU General Public License and
  17. // a copy of the GCC Runtime Library Exception along with this program;
  18. // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
  19. // <http://www.gnu.org/licenses/>.
  20. /** @file include/atomic
  21. * This is a Standard C++ Library header.
  22. */
  23. // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
  24. // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
  25. #ifndef _GLIBCXX_ATOMIC
  26. #define _GLIBCXX_ATOMIC 1
  27. #pragma GCC system_header
  28. #if __cplusplus < 201103L
  29. # include <bits/c++0x_warning.h>
  30. #else
  31. #include <bits/atomic_base.h>
  32. #include <bits/move.h>
  33. namespace std _GLIBCXX_VISIBILITY(default)
  34. {
  35. _GLIBCXX_BEGIN_NAMESPACE_VERSION
  36. /**
  37. * @addtogroup atomics
  38. * @{
  39. */
  40. #if __cplusplus >= 201703L
  41. # define __cpp_lib_atomic_is_always_lock_free 201603
  42. #endif
  43. template<typename _Tp>
  44. struct atomic;
  45. /// atomic<bool>
  46. // NB: No operators or fetch-operations for this type.
  47. template<>
  48. struct atomic<bool>
  49. {
  50. using value_type = bool;
  51. private:
  52. __atomic_base<bool> _M_base;
  53. public:
  54. atomic() noexcept = default;
  55. ~atomic() noexcept = default;
  56. atomic(const atomic&) = delete;
  57. atomic& operator=(const atomic&) = delete;
  58. atomic& operator=(const atomic&) volatile = delete;
  59. constexpr atomic(bool __i) noexcept : _M_base(__i) { }
  60. bool
  61. operator=(bool __i) noexcept
  62. { return _M_base.operator=(__i); }
  63. bool
  64. operator=(bool __i) volatile noexcept
  65. { return _M_base.operator=(__i); }
  66. operator bool() const noexcept
  67. { return _M_base.load(); }
  68. operator bool() const volatile noexcept
  69. { return _M_base.load(); }
  70. bool
  71. is_lock_free() const noexcept { return _M_base.is_lock_free(); }
  72. bool
  73. is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
  74. #if __cplusplus >= 201703L
  75. static constexpr bool is_always_lock_free = ATOMIC_BOOL_LOCK_FREE == 2;
  76. #endif
  77. void
  78. store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
  79. { _M_base.store(__i, __m); }
  80. void
  81. store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
  82. { _M_base.store(__i, __m); }
  83. bool
  84. load(memory_order __m = memory_order_seq_cst) const noexcept
  85. { return _M_base.load(__m); }
  86. bool
  87. load(memory_order __m = memory_order_seq_cst) const volatile noexcept
  88. { return _M_base.load(__m); }
  89. bool
  90. exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
  91. { return _M_base.exchange(__i, __m); }
  92. bool
  93. exchange(bool __i,
  94. memory_order __m = memory_order_seq_cst) volatile noexcept
  95. { return _M_base.exchange(__i, __m); }
  96. bool
  97. compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
  98. memory_order __m2) noexcept
  99. { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
  100. bool
  101. compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
  102. memory_order __m2) volatile noexcept
  103. { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
  104. bool
  105. compare_exchange_weak(bool& __i1, bool __i2,
  106. memory_order __m = memory_order_seq_cst) noexcept
  107. { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
  108. bool
  109. compare_exchange_weak(bool& __i1, bool __i2,
  110. memory_order __m = memory_order_seq_cst) volatile noexcept
  111. { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
  112. bool
  113. compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
  114. memory_order __m2) noexcept
  115. { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
  116. bool
  117. compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
  118. memory_order __m2) volatile noexcept
  119. { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
  120. bool
  121. compare_exchange_strong(bool& __i1, bool __i2,
  122. memory_order __m = memory_order_seq_cst) noexcept
  123. { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
  124. bool
  125. compare_exchange_strong(bool& __i1, bool __i2,
  126. memory_order __m = memory_order_seq_cst) volatile noexcept
  127. { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
  128. };
  129. /**
  130. * @brief Generic atomic type, primary class template.
  131. *
  132. * @tparam _Tp Type to be made atomic, must be trivally copyable.
  133. */
  134. template<typename _Tp>
  135. struct atomic
  136. {
  137. using value_type = _Tp;
  138. private:
  139. // Align 1/2/4/8/16-byte types to at least their size.
  140. static constexpr int _S_min_alignment
  141. = (sizeof(_Tp) & (sizeof(_Tp) - 1)) || sizeof(_Tp) > 16
  142. ? 0 : sizeof(_Tp);
  143. static constexpr int _S_alignment
  144. = _S_min_alignment > alignof(_Tp) ? _S_min_alignment : alignof(_Tp);
  145. alignas(_S_alignment) _Tp _M_i;
  146. static_assert(__is_trivially_copyable(_Tp),
  147. "std::atomic requires a trivially copyable type");
  148. static_assert(sizeof(_Tp) > 0,
  149. "Incomplete or zero-sized types are not supported");
  150. public:
  151. atomic() noexcept = default;
  152. ~atomic() noexcept = default;
  153. atomic(const atomic&) = delete;
  154. atomic& operator=(const atomic&) = delete;
  155. atomic& operator=(const atomic&) volatile = delete;
  156. constexpr atomic(_Tp __i) noexcept : _M_i(__i) { }
  157. operator _Tp() const noexcept
  158. { return load(); }
  159. operator _Tp() const volatile noexcept
  160. { return load(); }
  161. _Tp
  162. operator=(_Tp __i) noexcept
  163. { store(__i); return __i; }
  164. _Tp
  165. operator=(_Tp __i) volatile noexcept
  166. { store(__i); return __i; }
  167. bool
  168. is_lock_free() const noexcept
  169. {
  170. // Produce a fake, minimally aligned pointer.
  171. return __atomic_is_lock_free(sizeof(_M_i),
  172. reinterpret_cast<void *>(-_S_alignment));
  173. }
  174. bool
  175. is_lock_free() const volatile noexcept
  176. {
  177. // Produce a fake, minimally aligned pointer.
  178. return __atomic_is_lock_free(sizeof(_M_i),
  179. reinterpret_cast<void *>(-_S_alignment));
  180. }
  181. #if __cplusplus >= 201703L
  182. static constexpr bool is_always_lock_free
  183. = __atomic_always_lock_free(sizeof(_M_i), 0);
  184. #endif
  185. void
  186. store(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
  187. { __atomic_store(std::__addressof(_M_i), std::__addressof(__i), int(__m)); }
  188. void
  189. store(_Tp __i, memory_order __m = memory_order_seq_cst) volatile noexcept
  190. { __atomic_store(std::__addressof(_M_i), std::__addressof(__i), int(__m)); }
  191. _Tp
  192. load(memory_order __m = memory_order_seq_cst) const noexcept
  193. {
  194. alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
  195. _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
  196. __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
  197. return *__ptr;
  198. }
  199. _Tp
  200. load(memory_order __m = memory_order_seq_cst) const volatile noexcept
  201. {
  202. alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
  203. _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
  204. __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
  205. return *__ptr;
  206. }
  207. _Tp
  208. exchange(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
  209. {
  210. alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
  211. _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
  212. __atomic_exchange(std::__addressof(_M_i), std::__addressof(__i),
  213. __ptr, int(__m));
  214. return *__ptr;
  215. }
  216. _Tp
  217. exchange(_Tp __i,
  218. memory_order __m = memory_order_seq_cst) volatile noexcept
  219. {
  220. alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
  221. _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
  222. __atomic_exchange(std::__addressof(_M_i), std::__addressof(__i),
  223. __ptr, int(__m));
  224. return *__ptr;
  225. }
  226. bool
  227. compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
  228. memory_order __f) noexcept
  229. {
  230. return __atomic_compare_exchange(std::__addressof(_M_i),
  231. std::__addressof(__e),
  232. std::__addressof(__i),
  233. true, int(__s), int(__f));
  234. }
  235. bool
  236. compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
  237. memory_order __f) volatile noexcept
  238. {
  239. return __atomic_compare_exchange(std::__addressof(_M_i),
  240. std::__addressof(__e),
  241. std::__addressof(__i),
  242. true, int(__s), int(__f));
  243. }
  244. bool
  245. compare_exchange_weak(_Tp& __e, _Tp __i,
  246. memory_order __m = memory_order_seq_cst) noexcept
  247. { return compare_exchange_weak(__e, __i, __m,
  248. __cmpexch_failure_order(__m)); }
  249. bool
  250. compare_exchange_weak(_Tp& __e, _Tp __i,
  251. memory_order __m = memory_order_seq_cst) volatile noexcept
  252. { return compare_exchange_weak(__e, __i, __m,
  253. __cmpexch_failure_order(__m)); }
  254. bool
  255. compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
  256. memory_order __f) noexcept
  257. {
  258. return __atomic_compare_exchange(std::__addressof(_M_i),
  259. std::__addressof(__e),
  260. std::__addressof(__i),
  261. false, int(__s), int(__f));
  262. }
  263. bool
  264. compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
  265. memory_order __f) volatile noexcept
  266. {
  267. return __atomic_compare_exchange(std::__addressof(_M_i),
  268. std::__addressof(__e),
  269. std::__addressof(__i),
  270. false, int(__s), int(__f));
  271. }
  272. bool
  273. compare_exchange_strong(_Tp& __e, _Tp __i,
  274. memory_order __m = memory_order_seq_cst) noexcept
  275. { return compare_exchange_strong(__e, __i, __m,
  276. __cmpexch_failure_order(__m)); }
  277. bool
  278. compare_exchange_strong(_Tp& __e, _Tp __i,
  279. memory_order __m = memory_order_seq_cst) volatile noexcept
  280. { return compare_exchange_strong(__e, __i, __m,
  281. __cmpexch_failure_order(__m)); }
  282. };
  283. /// Partial specialization for pointer types.
  284. template<typename _Tp>
  285. struct atomic<_Tp*>
  286. {
  287. using value_type = _Tp*;
  288. using difference_type = ptrdiff_t;
  289. typedef _Tp* __pointer_type;
  290. typedef __atomic_base<_Tp*> __base_type;
  291. __base_type _M_b;
  292. atomic() noexcept = default;
  293. ~atomic() noexcept = default;
  294. atomic(const atomic&) = delete;
  295. atomic& operator=(const atomic&) = delete;
  296. atomic& operator=(const atomic&) volatile = delete;
  297. constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
  298. operator __pointer_type() const noexcept
  299. { return __pointer_type(_M_b); }
  300. operator __pointer_type() const volatile noexcept
  301. { return __pointer_type(_M_b); }
  302. __pointer_type
  303. operator=(__pointer_type __p) noexcept
  304. { return _M_b.operator=(__p); }
  305. __pointer_type
  306. operator=(__pointer_type __p) volatile noexcept
  307. { return _M_b.operator=(__p); }
  308. __pointer_type
  309. operator++(int) noexcept
  310. {
  311. #if __cplusplus >= 201703L
  312. static_assert( is_object<_Tp>::value, "pointer to object type" );
  313. #endif
  314. return _M_b++;
  315. }
  316. __pointer_type
  317. operator++(int) volatile noexcept
  318. {
  319. #if __cplusplus >= 201703L
  320. static_assert( is_object<_Tp>::value, "pointer to object type" );
  321. #endif
  322. return _M_b++;
  323. }
  324. __pointer_type
  325. operator--(int) noexcept
  326. {
  327. #if __cplusplus >= 201703L
  328. static_assert( is_object<_Tp>::value, "pointer to object type" );
  329. #endif
  330. return _M_b--;
  331. }
  332. __pointer_type
  333. operator--(int) volatile noexcept
  334. {
  335. #if __cplusplus >= 201703L
  336. static_assert( is_object<_Tp>::value, "pointer to object type" );
  337. #endif
  338. return _M_b--;
  339. }
  340. __pointer_type
  341. operator++() noexcept
  342. {
  343. #if __cplusplus >= 201703L
  344. static_assert( is_object<_Tp>::value, "pointer to object type" );
  345. #endif
  346. return ++_M_b;
  347. }
  348. __pointer_type
  349. operator++() volatile noexcept
  350. {
  351. #if __cplusplus >= 201703L
  352. static_assert( is_object<_Tp>::value, "pointer to object type" );
  353. #endif
  354. return ++_M_b;
  355. }
  356. __pointer_type
  357. operator--() noexcept
  358. {
  359. #if __cplusplus >= 201703L
  360. static_assert( is_object<_Tp>::value, "pointer to object type" );
  361. #endif
  362. return --_M_b;
  363. }
  364. __pointer_type
  365. operator--() volatile noexcept
  366. {
  367. #if __cplusplus >= 201703L
  368. static_assert( is_object<_Tp>::value, "pointer to object type" );
  369. #endif
  370. return --_M_b;
  371. }
  372. __pointer_type
  373. operator+=(ptrdiff_t __d) noexcept
  374. {
  375. #if __cplusplus >= 201703L
  376. static_assert( is_object<_Tp>::value, "pointer to object type" );
  377. #endif
  378. return _M_b.operator+=(__d);
  379. }
  380. __pointer_type
  381. operator+=(ptrdiff_t __d) volatile noexcept
  382. {
  383. #if __cplusplus >= 201703L
  384. static_assert( is_object<_Tp>::value, "pointer to object type" );
  385. #endif
  386. return _M_b.operator+=(__d);
  387. }
  388. __pointer_type
  389. operator-=(ptrdiff_t __d) noexcept
  390. {
  391. #if __cplusplus >= 201703L
  392. static_assert( is_object<_Tp>::value, "pointer to object type" );
  393. #endif
  394. return _M_b.operator-=(__d);
  395. }
  396. __pointer_type
  397. operator-=(ptrdiff_t __d) volatile noexcept
  398. {
  399. #if __cplusplus >= 201703L
  400. static_assert( is_object<_Tp>::value, "pointer to object type" );
  401. #endif
  402. return _M_b.operator-=(__d);
  403. }
  404. bool
  405. is_lock_free() const noexcept
  406. { return _M_b.is_lock_free(); }
  407. bool
  408. is_lock_free() const volatile noexcept
  409. { return _M_b.is_lock_free(); }
  410. #if __cplusplus >= 201703L
  411. static constexpr bool is_always_lock_free = ATOMIC_POINTER_LOCK_FREE == 2;
  412. #endif
  413. void
  414. store(__pointer_type __p,
  415. memory_order __m = memory_order_seq_cst) noexcept
  416. { return _M_b.store(__p, __m); }
  417. void
  418. store(__pointer_type __p,
  419. memory_order __m = memory_order_seq_cst) volatile noexcept
  420. { return _M_b.store(__p, __m); }
  421. __pointer_type
  422. load(memory_order __m = memory_order_seq_cst) const noexcept
  423. { return _M_b.load(__m); }
  424. __pointer_type
  425. load(memory_order __m = memory_order_seq_cst) const volatile noexcept
  426. { return _M_b.load(__m); }
  427. __pointer_type
  428. exchange(__pointer_type __p,
  429. memory_order __m = memory_order_seq_cst) noexcept
  430. { return _M_b.exchange(__p, __m); }
  431. __pointer_type
  432. exchange(__pointer_type __p,
  433. memory_order __m = memory_order_seq_cst) volatile noexcept
  434. { return _M_b.exchange(__p, __m); }
  435. bool
  436. compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
  437. memory_order __m1, memory_order __m2) noexcept
  438. { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
  439. bool
  440. compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
  441. memory_order __m1,
  442. memory_order __m2) volatile noexcept
  443. { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
  444. bool
  445. compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
  446. memory_order __m = memory_order_seq_cst) noexcept
  447. {
  448. return compare_exchange_weak(__p1, __p2, __m,
  449. __cmpexch_failure_order(__m));
  450. }
  451. bool
  452. compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
  453. memory_order __m = memory_order_seq_cst) volatile noexcept
  454. {
  455. return compare_exchange_weak(__p1, __p2, __m,
  456. __cmpexch_failure_order(__m));
  457. }
  458. bool
  459. compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
  460. memory_order __m1, memory_order __m2) noexcept
  461. { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
  462. bool
  463. compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
  464. memory_order __m1,
  465. memory_order __m2) volatile noexcept
  466. { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
  467. bool
  468. compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
  469. memory_order __m = memory_order_seq_cst) noexcept
  470. {
  471. return _M_b.compare_exchange_strong(__p1, __p2, __m,
  472. __cmpexch_failure_order(__m));
  473. }
  474. bool
  475. compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
  476. memory_order __m = memory_order_seq_cst) volatile noexcept
  477. {
  478. return _M_b.compare_exchange_strong(__p1, __p2, __m,
  479. __cmpexch_failure_order(__m));
  480. }
  481. __pointer_type
  482. fetch_add(ptrdiff_t __d,
  483. memory_order __m = memory_order_seq_cst) noexcept
  484. {
  485. #if __cplusplus >= 201703L
  486. static_assert( is_object<_Tp>::value, "pointer to object type" );
  487. #endif
  488. return _M_b.fetch_add(__d, __m);
  489. }
  490. __pointer_type
  491. fetch_add(ptrdiff_t __d,
  492. memory_order __m = memory_order_seq_cst) volatile noexcept
  493. {
  494. #if __cplusplus >= 201703L
  495. static_assert( is_object<_Tp>::value, "pointer to object type" );
  496. #endif
  497. return _M_b.fetch_add(__d, __m);
  498. }
  499. __pointer_type
  500. fetch_sub(ptrdiff_t __d,
  501. memory_order __m = memory_order_seq_cst) noexcept
  502. {
  503. #if __cplusplus >= 201703L
  504. static_assert( is_object<_Tp>::value, "pointer to object type" );
  505. #endif
  506. return _M_b.fetch_sub(__d, __m);
  507. }
  508. __pointer_type
  509. fetch_sub(ptrdiff_t __d,
  510. memory_order __m = memory_order_seq_cst) volatile noexcept
  511. {
  512. #if __cplusplus >= 201703L
  513. static_assert( is_object<_Tp>::value, "pointer to object type" );
  514. #endif
  515. return _M_b.fetch_sub(__d, __m);
  516. }
  517. };
  518. /// Explicit specialization for char.
  519. template<>
  520. struct atomic<char> : __atomic_base<char>
  521. {
  522. typedef char __integral_type;
  523. typedef __atomic_base<char> __base_type;
  524. atomic() noexcept = default;
  525. ~atomic() noexcept = default;
  526. atomic(const atomic&) = delete;
  527. atomic& operator=(const atomic&) = delete;
  528. atomic& operator=(const atomic&) volatile = delete;
  529. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  530. using __base_type::operator __integral_type;
  531. using __base_type::operator=;
  532. #if __cplusplus >= 201703L
  533. static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
  534. #endif
  535. };
  536. /// Explicit specialization for signed char.
  537. template<>
  538. struct atomic<signed char> : __atomic_base<signed char>
  539. {
  540. typedef signed char __integral_type;
  541. typedef __atomic_base<signed char> __base_type;
  542. atomic() noexcept= default;
  543. ~atomic() noexcept = default;
  544. atomic(const atomic&) = delete;
  545. atomic& operator=(const atomic&) = delete;
  546. atomic& operator=(const atomic&) volatile = delete;
  547. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  548. using __base_type::operator __integral_type;
  549. using __base_type::operator=;
  550. #if __cplusplus >= 201703L
  551. static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
  552. #endif
  553. };
  554. /// Explicit specialization for unsigned char.
  555. template<>
  556. struct atomic<unsigned char> : __atomic_base<unsigned char>
  557. {
  558. typedef unsigned char __integral_type;
  559. typedef __atomic_base<unsigned char> __base_type;
  560. atomic() noexcept= default;
  561. ~atomic() noexcept = default;
  562. atomic(const atomic&) = delete;
  563. atomic& operator=(const atomic&) = delete;
  564. atomic& operator=(const atomic&) volatile = delete;
  565. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  566. using __base_type::operator __integral_type;
  567. using __base_type::operator=;
  568. #if __cplusplus >= 201703L
  569. static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
  570. #endif
  571. };
  572. /// Explicit specialization for short.
  573. template<>
  574. struct atomic<short> : __atomic_base<short>
  575. {
  576. typedef short __integral_type;
  577. typedef __atomic_base<short> __base_type;
  578. atomic() noexcept = default;
  579. ~atomic() noexcept = default;
  580. atomic(const atomic&) = delete;
  581. atomic& operator=(const atomic&) = delete;
  582. atomic& operator=(const atomic&) volatile = delete;
  583. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  584. using __base_type::operator __integral_type;
  585. using __base_type::operator=;
  586. #if __cplusplus >= 201703L
  587. static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
  588. #endif
  589. };
  590. /// Explicit specialization for unsigned short.
  591. template<>
  592. struct atomic<unsigned short> : __atomic_base<unsigned short>
  593. {
  594. typedef unsigned short __integral_type;
  595. typedef __atomic_base<unsigned short> __base_type;
  596. atomic() noexcept = default;
  597. ~atomic() noexcept = default;
  598. atomic(const atomic&) = delete;
  599. atomic& operator=(const atomic&) = delete;
  600. atomic& operator=(const atomic&) volatile = delete;
  601. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  602. using __base_type::operator __integral_type;
  603. using __base_type::operator=;
  604. #if __cplusplus >= 201703L
  605. static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
  606. #endif
  607. };
  608. /// Explicit specialization for int.
  609. template<>
  610. struct atomic<int> : __atomic_base<int>
  611. {
  612. typedef int __integral_type;
  613. typedef __atomic_base<int> __base_type;
  614. atomic() noexcept = default;
  615. ~atomic() noexcept = default;
  616. atomic(const atomic&) = delete;
  617. atomic& operator=(const atomic&) = delete;
  618. atomic& operator=(const atomic&) volatile = delete;
  619. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  620. using __base_type::operator __integral_type;
  621. using __base_type::operator=;
  622. #if __cplusplus >= 201703L
  623. static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
  624. #endif
  625. };
  626. /// Explicit specialization for unsigned int.
  627. template<>
  628. struct atomic<unsigned int> : __atomic_base<unsigned int>
  629. {
  630. typedef unsigned int __integral_type;
  631. typedef __atomic_base<unsigned int> __base_type;
  632. atomic() noexcept = default;
  633. ~atomic() noexcept = default;
  634. atomic(const atomic&) = delete;
  635. atomic& operator=(const atomic&) = delete;
  636. atomic& operator=(const atomic&) volatile = delete;
  637. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  638. using __base_type::operator __integral_type;
  639. using __base_type::operator=;
  640. #if __cplusplus >= 201703L
  641. static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
  642. #endif
  643. };
  644. /// Explicit specialization for long.
  645. template<>
  646. struct atomic<long> : __atomic_base<long>
  647. {
  648. typedef long __integral_type;
  649. typedef __atomic_base<long> __base_type;
  650. atomic() noexcept = default;
  651. ~atomic() noexcept = default;
  652. atomic(const atomic&) = delete;
  653. atomic& operator=(const atomic&) = delete;
  654. atomic& operator=(const atomic&) volatile = delete;
  655. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  656. using __base_type::operator __integral_type;
  657. using __base_type::operator=;
  658. #if __cplusplus >= 201703L
  659. static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
  660. #endif
  661. };
  662. /// Explicit specialization for unsigned long.
  663. template<>
  664. struct atomic<unsigned long> : __atomic_base<unsigned long>
  665. {
  666. typedef unsigned long __integral_type;
  667. typedef __atomic_base<unsigned long> __base_type;
  668. atomic() noexcept = default;
  669. ~atomic() noexcept = default;
  670. atomic(const atomic&) = delete;
  671. atomic& operator=(const atomic&) = delete;
  672. atomic& operator=(const atomic&) volatile = delete;
  673. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  674. using __base_type::operator __integral_type;
  675. using __base_type::operator=;
  676. #if __cplusplus >= 201703L
  677. static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
  678. #endif
  679. };
  680. /// Explicit specialization for long long.
  681. template<>
  682. struct atomic<long long> : __atomic_base<long long>
  683. {
  684. typedef long long __integral_type;
  685. typedef __atomic_base<long long> __base_type;
  686. atomic() noexcept = default;
  687. ~atomic() noexcept = default;
  688. atomic(const atomic&) = delete;
  689. atomic& operator=(const atomic&) = delete;
  690. atomic& operator=(const atomic&) volatile = delete;
  691. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  692. using __base_type::operator __integral_type;
  693. using __base_type::operator=;
  694. #if __cplusplus >= 201703L
  695. static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
  696. #endif
  697. };
  698. /// Explicit specialization for unsigned long long.
  699. template<>
  700. struct atomic<unsigned long long> : __atomic_base<unsigned long long>
  701. {
  702. typedef unsigned long long __integral_type;
  703. typedef __atomic_base<unsigned long long> __base_type;
  704. atomic() noexcept = default;
  705. ~atomic() noexcept = default;
  706. atomic(const atomic&) = delete;
  707. atomic& operator=(const atomic&) = delete;
  708. atomic& operator=(const atomic&) volatile = delete;
  709. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  710. using __base_type::operator __integral_type;
  711. using __base_type::operator=;
  712. #if __cplusplus >= 201703L
  713. static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
  714. #endif
  715. };
  716. /// Explicit specialization for wchar_t.
  717. template<>
  718. struct atomic<wchar_t> : __atomic_base<wchar_t>
  719. {
  720. typedef wchar_t __integral_type;
  721. typedef __atomic_base<wchar_t> __base_type;
  722. atomic() noexcept = default;
  723. ~atomic() noexcept = default;
  724. atomic(const atomic&) = delete;
  725. atomic& operator=(const atomic&) = delete;
  726. atomic& operator=(const atomic&) volatile = delete;
  727. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  728. using __base_type::operator __integral_type;
  729. using __base_type::operator=;
  730. #if __cplusplus >= 201703L
  731. static constexpr bool is_always_lock_free = ATOMIC_WCHAR_T_LOCK_FREE == 2;
  732. #endif
  733. };
  734. #ifdef _GLIBCXX_USE_CHAR8_T
  735. /// Explicit specialization for char8_t.
  736. template<>
  737. struct atomic<char8_t> : __atomic_base<char8_t>
  738. {
  739. typedef char8_t __integral_type;
  740. typedef __atomic_base<char8_t> __base_type;
  741. atomic() noexcept = default;
  742. ~atomic() noexcept = default;
  743. atomic(const atomic&) = delete;
  744. atomic& operator=(const atomic&) = delete;
  745. atomic& operator=(const atomic&) volatile = delete;
  746. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  747. using __base_type::operator __integral_type;
  748. using __base_type::operator=;
  749. #if __cplusplus > 201402L
  750. static constexpr bool is_always_lock_free = ATOMIC_CHAR8_T_LOCK_FREE == 2;
  751. #endif
  752. };
  753. #endif
  754. /// Explicit specialization for char16_t.
  755. template<>
  756. struct atomic<char16_t> : __atomic_base<char16_t>
  757. {
  758. typedef char16_t __integral_type;
  759. typedef __atomic_base<char16_t> __base_type;
  760. atomic() noexcept = default;
  761. ~atomic() noexcept = default;
  762. atomic(const atomic&) = delete;
  763. atomic& operator=(const atomic&) = delete;
  764. atomic& operator=(const atomic&) volatile = delete;
  765. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  766. using __base_type::operator __integral_type;
  767. using __base_type::operator=;
  768. #if __cplusplus >= 201703L
  769. static constexpr bool is_always_lock_free = ATOMIC_CHAR16_T_LOCK_FREE == 2;
  770. #endif
  771. };
  772. /// Explicit specialization for char32_t.
  773. template<>
  774. struct atomic<char32_t> : __atomic_base<char32_t>
  775. {
  776. typedef char32_t __integral_type;
  777. typedef __atomic_base<char32_t> __base_type;
  778. atomic() noexcept = default;
  779. ~atomic() noexcept = default;
  780. atomic(const atomic&) = delete;
  781. atomic& operator=(const atomic&) = delete;
  782. atomic& operator=(const atomic&) volatile = delete;
  783. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  784. using __base_type::operator __integral_type;
  785. using __base_type::operator=;
  786. #if __cplusplus >= 201703L
  787. static constexpr bool is_always_lock_free = ATOMIC_CHAR32_T_LOCK_FREE == 2;
  788. #endif
  789. };
  790. /// atomic_bool
  791. typedef atomic<bool> atomic_bool;
  792. /// atomic_char
  793. typedef atomic<char> atomic_char;
  794. /// atomic_schar
  795. typedef atomic<signed char> atomic_schar;
  796. /// atomic_uchar
  797. typedef atomic<unsigned char> atomic_uchar;
  798. /// atomic_short
  799. typedef atomic<short> atomic_short;
  800. /// atomic_ushort
  801. typedef atomic<unsigned short> atomic_ushort;
  802. /// atomic_int
  803. typedef atomic<int> atomic_int;
  804. /// atomic_uint
  805. typedef atomic<unsigned int> atomic_uint;
  806. /// atomic_long
  807. typedef atomic<long> atomic_long;
  808. /// atomic_ulong
  809. typedef atomic<unsigned long> atomic_ulong;
  810. /// atomic_llong
  811. typedef atomic<long long> atomic_llong;
  812. /// atomic_ullong
  813. typedef atomic<unsigned long long> atomic_ullong;
  814. /// atomic_wchar_t
  815. typedef atomic<wchar_t> atomic_wchar_t;
  816. #ifdef _GLIBCXX_USE_CHAR8_T
  817. /// atomic_char8_t
  818. typedef atomic<char8_t> atomic_char8_t;
  819. #endif
  820. /// atomic_char16_t
  821. typedef atomic<char16_t> atomic_char16_t;
  822. /// atomic_char32_t
  823. typedef atomic<char32_t> atomic_char32_t;
  824. #ifdef _GLIBCXX_USE_C99_STDINT_TR1
  825. // _GLIBCXX_RESOLVE_LIB_DEFECTS
  826. // 2441. Exact-width atomic typedefs should be provided
  827. /// atomic_int8_t
  828. typedef atomic<int8_t> atomic_int8_t;
  829. /// atomic_uint8_t
  830. typedef atomic<uint8_t> atomic_uint8_t;
  831. /// atomic_int16_t
  832. typedef atomic<int16_t> atomic_int16_t;
  833. /// atomic_uint16_t
  834. typedef atomic<uint16_t> atomic_uint16_t;
  835. /// atomic_int32_t
  836. typedef atomic<int32_t> atomic_int32_t;
  837. /// atomic_uint32_t
  838. typedef atomic<uint32_t> atomic_uint32_t;
  839. /// atomic_int64_t
  840. typedef atomic<int64_t> atomic_int64_t;
  841. /// atomic_uint64_t
  842. typedef atomic<uint64_t> atomic_uint64_t;
  843. /// atomic_int_least8_t
  844. typedef atomic<int_least8_t> atomic_int_least8_t;
  845. /// atomic_uint_least8_t
  846. typedef atomic<uint_least8_t> atomic_uint_least8_t;
  847. /// atomic_int_least16_t
  848. typedef atomic<int_least16_t> atomic_int_least16_t;
  849. /// atomic_uint_least16_t
  850. typedef atomic<uint_least16_t> atomic_uint_least16_t;
  851. /// atomic_int_least32_t
  852. typedef atomic<int_least32_t> atomic_int_least32_t;
  853. /// atomic_uint_least32_t
  854. typedef atomic<uint_least32_t> atomic_uint_least32_t;
  855. /// atomic_int_least64_t
  856. typedef atomic<int_least64_t> atomic_int_least64_t;
  857. /// atomic_uint_least64_t
  858. typedef atomic<uint_least64_t> atomic_uint_least64_t;
  859. /// atomic_int_fast8_t
  860. typedef atomic<int_fast8_t> atomic_int_fast8_t;
  861. /// atomic_uint_fast8_t
  862. typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
  863. /// atomic_int_fast16_t
  864. typedef atomic<int_fast16_t> atomic_int_fast16_t;
  865. /// atomic_uint_fast16_t
  866. typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
  867. /// atomic_int_fast32_t
  868. typedef atomic<int_fast32_t> atomic_int_fast32_t;
  869. /// atomic_uint_fast32_t
  870. typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
  871. /// atomic_int_fast64_t
  872. typedef atomic<int_fast64_t> atomic_int_fast64_t;
  873. /// atomic_uint_fast64_t
  874. typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
  875. #endif
  876. /// atomic_intptr_t
  877. typedef atomic<intptr_t> atomic_intptr_t;
  878. /// atomic_uintptr_t
  879. typedef atomic<uintptr_t> atomic_uintptr_t;
  880. /// atomic_size_t
  881. typedef atomic<size_t> atomic_size_t;
  882. /// atomic_ptrdiff_t
  883. typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
  884. #ifdef _GLIBCXX_USE_C99_STDINT_TR1
  885. /// atomic_intmax_t
  886. typedef atomic<intmax_t> atomic_intmax_t;
  887. /// atomic_uintmax_t
  888. typedef atomic<uintmax_t> atomic_uintmax_t;
  889. #endif
  890. // Function definitions, atomic_flag operations.
  891. inline bool
  892. atomic_flag_test_and_set_explicit(atomic_flag* __a,
  893. memory_order __m) noexcept
  894. { return __a->test_and_set(__m); }
  895. inline bool
  896. atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
  897. memory_order __m) noexcept
  898. { return __a->test_and_set(__m); }
  899. inline void
  900. atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
  901. { __a->clear(__m); }
  902. inline void
  903. atomic_flag_clear_explicit(volatile atomic_flag* __a,
  904. memory_order __m) noexcept
  905. { __a->clear(__m); }
  906. inline bool
  907. atomic_flag_test_and_set(atomic_flag* __a) noexcept
  908. { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
  909. inline bool
  910. atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
  911. { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
  912. inline void
  913. atomic_flag_clear(atomic_flag* __a) noexcept
  914. { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
  915. inline void
  916. atomic_flag_clear(volatile atomic_flag* __a) noexcept
  917. { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
  918. template<typename _Tp>
  919. using __atomic_val_t = typename atomic<_Tp>::value_type;
  920. template<typename _Tp>
  921. using __atomic_diff_t = typename atomic<_Tp>::difference_type;
  922. // [atomics.nonmembers] Non-member functions.
  923. // Function templates generally applicable to atomic types.
  924. template<typename _ITp>
  925. inline bool
  926. atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
  927. { return __a->is_lock_free(); }
  928. template<typename _ITp>
  929. inline bool
  930. atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
  931. { return __a->is_lock_free(); }
  932. template<typename _ITp>
  933. inline void
  934. atomic_init(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
  935. { __a->store(__i, memory_order_relaxed); }
  936. template<typename _ITp>
  937. inline void
  938. atomic_init(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
  939. { __a->store(__i, memory_order_relaxed); }
  940. template<typename _ITp>
  941. inline void
  942. atomic_store_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
  943. memory_order __m) noexcept
  944. { __a->store(__i, __m); }
  945. template<typename _ITp>
  946. inline void
  947. atomic_store_explicit(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
  948. memory_order __m) noexcept
  949. { __a->store(__i, __m); }
  950. template<typename _ITp>
  951. inline _ITp
  952. atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
  953. { return __a->load(__m); }
  954. template<typename _ITp>
  955. inline _ITp
  956. atomic_load_explicit(const volatile atomic<_ITp>* __a,
  957. memory_order __m) noexcept
  958. { return __a->load(__m); }
  959. template<typename _ITp>
  960. inline _ITp
  961. atomic_exchange_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
  962. memory_order __m) noexcept
  963. { return __a->exchange(__i, __m); }
  964. template<typename _ITp>
  965. inline _ITp
  966. atomic_exchange_explicit(volatile atomic<_ITp>* __a,
  967. __atomic_val_t<_ITp> __i,
  968. memory_order __m) noexcept
  969. { return __a->exchange(__i, __m); }
  970. template<typename _ITp>
  971. inline bool
  972. atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
  973. __atomic_val_t<_ITp>* __i1,
  974. __atomic_val_t<_ITp> __i2,
  975. memory_order __m1,
  976. memory_order __m2) noexcept
  977. { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
  978. template<typename _ITp>
  979. inline bool
  980. atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
  981. __atomic_val_t<_ITp>* __i1,
  982. __atomic_val_t<_ITp> __i2,
  983. memory_order __m1,
  984. memory_order __m2) noexcept
  985. { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
  986. template<typename _ITp>
  987. inline bool
  988. atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
  989. __atomic_val_t<_ITp>* __i1,
  990. __atomic_val_t<_ITp> __i2,
  991. memory_order __m1,
  992. memory_order __m2) noexcept
  993. { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
  994. template<typename _ITp>
  995. inline bool
  996. atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
  997. __atomic_val_t<_ITp>* __i1,
  998. __atomic_val_t<_ITp> __i2,
  999. memory_order __m1,
  1000. memory_order __m2) noexcept
  1001. { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
  1002. template<typename _ITp>
  1003. inline void
  1004. atomic_store(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
  1005. { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
  1006. template<typename _ITp>
  1007. inline void
  1008. atomic_store(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
  1009. { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
  1010. template<typename _ITp>
  1011. inline _ITp
  1012. atomic_load(const atomic<_ITp>* __a) noexcept
  1013. { return atomic_load_explicit(__a, memory_order_seq_cst); }
  1014. template<typename _ITp>
  1015. inline _ITp
  1016. atomic_load(const volatile atomic<_ITp>* __a) noexcept
  1017. { return atomic_load_explicit(__a, memory_order_seq_cst); }
  1018. template<typename _ITp>
  1019. inline _ITp
  1020. atomic_exchange(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
  1021. { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
  1022. template<typename _ITp>
  1023. inline _ITp
  1024. atomic_exchange(volatile atomic<_ITp>* __a,
  1025. __atomic_val_t<_ITp> __i) noexcept
  1026. { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
  1027. template<typename _ITp>
  1028. inline bool
  1029. atomic_compare_exchange_weak(atomic<_ITp>* __a,
  1030. __atomic_val_t<_ITp>* __i1,
  1031. __atomic_val_t<_ITp> __i2) noexcept
  1032. {
  1033. return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
  1034. memory_order_seq_cst,
  1035. memory_order_seq_cst);
  1036. }
  1037. template<typename _ITp>
  1038. inline bool
  1039. atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
  1040. __atomic_val_t<_ITp>* __i1,
  1041. __atomic_val_t<_ITp> __i2) noexcept
  1042. {
  1043. return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
  1044. memory_order_seq_cst,
  1045. memory_order_seq_cst);
  1046. }
  1047. template<typename _ITp>
  1048. inline bool
  1049. atomic_compare_exchange_strong(atomic<_ITp>* __a,
  1050. __atomic_val_t<_ITp>* __i1,
  1051. __atomic_val_t<_ITp> __i2) noexcept
  1052. {
  1053. return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
  1054. memory_order_seq_cst,
  1055. memory_order_seq_cst);
  1056. }
  1057. template<typename _ITp>
  1058. inline bool
  1059. atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
  1060. __atomic_val_t<_ITp>* __i1,
  1061. __atomic_val_t<_ITp> __i2) noexcept
  1062. {
  1063. return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
  1064. memory_order_seq_cst,
  1065. memory_order_seq_cst);
  1066. }
  1067. // Function templates for atomic_integral and atomic_pointer operations only.
  1068. // Some operations (and, or, xor) are only available for atomic integrals,
  1069. // which is implemented by taking a parameter of type __atomic_base<_ITp>*.
  1070. template<typename _ITp>
  1071. inline _ITp
  1072. atomic_fetch_add_explicit(atomic<_ITp>* __a,
  1073. __atomic_diff_t<_ITp> __i,
  1074. memory_order __m) noexcept
  1075. { return __a->fetch_add(__i, __m); }
  1076. template<typename _ITp>
  1077. inline _ITp
  1078. atomic_fetch_add_explicit(volatile atomic<_ITp>* __a,
  1079. __atomic_diff_t<_ITp> __i,
  1080. memory_order __m) noexcept
  1081. { return __a->fetch_add(__i, __m); }
  1082. template<typename _ITp>
  1083. inline _ITp
  1084. atomic_fetch_sub_explicit(atomic<_ITp>* __a,
  1085. __atomic_diff_t<_ITp> __i,
  1086. memory_order __m) noexcept
  1087. { return __a->fetch_sub(__i, __m); }
  1088. template<typename _ITp>
  1089. inline _ITp
  1090. atomic_fetch_sub_explicit(volatile atomic<_ITp>* __a,
  1091. __atomic_diff_t<_ITp> __i,
  1092. memory_order __m) noexcept
  1093. { return __a->fetch_sub(__i, __m); }
  1094. template<typename _ITp>
  1095. inline _ITp
  1096. atomic_fetch_and_explicit(__atomic_base<_ITp>* __a,
  1097. __atomic_val_t<_ITp> __i,
  1098. memory_order __m) noexcept
  1099. { return __a->fetch_and(__i, __m); }
  1100. template<typename _ITp>
  1101. inline _ITp
  1102. atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a,
  1103. __atomic_val_t<_ITp> __i,
  1104. memory_order __m) noexcept
  1105. { return __a->fetch_and(__i, __m); }
  1106. template<typename _ITp>
  1107. inline _ITp
  1108. atomic_fetch_or_explicit(__atomic_base<_ITp>* __a,
  1109. __atomic_val_t<_ITp> __i,
  1110. memory_order __m) noexcept
  1111. { return __a->fetch_or(__i, __m); }
  1112. template<typename _ITp>
  1113. inline _ITp
  1114. atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a,
  1115. __atomic_val_t<_ITp> __i,
  1116. memory_order __m) noexcept
  1117. { return __a->fetch_or(__i, __m); }
  1118. template<typename _ITp>
  1119. inline _ITp
  1120. atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a,
  1121. __atomic_val_t<_ITp> __i,
  1122. memory_order __m) noexcept
  1123. { return __a->fetch_xor(__i, __m); }
  1124. template<typename _ITp>
  1125. inline _ITp
  1126. atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a,
  1127. __atomic_val_t<_ITp> __i,
  1128. memory_order __m) noexcept
  1129. { return __a->fetch_xor(__i, __m); }
  1130. template<typename _ITp>
  1131. inline _ITp
  1132. atomic_fetch_add(atomic<_ITp>* __a,
  1133. __atomic_diff_t<_ITp> __i) noexcept
  1134. { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
  1135. template<typename _ITp>
  1136. inline _ITp
  1137. atomic_fetch_add(volatile atomic<_ITp>* __a,
  1138. __atomic_diff_t<_ITp> __i) noexcept
  1139. { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
  1140. template<typename _ITp>
  1141. inline _ITp
  1142. atomic_fetch_sub(atomic<_ITp>* __a,
  1143. __atomic_diff_t<_ITp> __i) noexcept
  1144. { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
  1145. template<typename _ITp>
  1146. inline _ITp
  1147. atomic_fetch_sub(volatile atomic<_ITp>* __a,
  1148. __atomic_diff_t<_ITp> __i) noexcept
  1149. { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
  1150. template<typename _ITp>
  1151. inline _ITp
  1152. atomic_fetch_and(__atomic_base<_ITp>* __a,
  1153. __atomic_val_t<_ITp> __i) noexcept
  1154. { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
  1155. template<typename _ITp>
  1156. inline _ITp
  1157. atomic_fetch_and(volatile __atomic_base<_ITp>* __a,
  1158. __atomic_val_t<_ITp> __i) noexcept
  1159. { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
  1160. template<typename _ITp>
  1161. inline _ITp
  1162. atomic_fetch_or(__atomic_base<_ITp>* __a,
  1163. __atomic_val_t<_ITp> __i) noexcept
  1164. { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
  1165. template<typename _ITp>
  1166. inline _ITp
  1167. atomic_fetch_or(volatile __atomic_base<_ITp>* __a,
  1168. __atomic_val_t<_ITp> __i) noexcept
  1169. { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
  1170. template<typename _ITp>
  1171. inline _ITp
  1172. atomic_fetch_xor(__atomic_base<_ITp>* __a,
  1173. __atomic_val_t<_ITp> __i) noexcept
  1174. { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
  1175. template<typename _ITp>
  1176. inline _ITp
  1177. atomic_fetch_xor(volatile __atomic_base<_ITp>* __a,
  1178. __atomic_val_t<_ITp> __i) noexcept
  1179. { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
  1180. // @} group atomics
  1181. _GLIBCXX_END_NAMESPACE_VERSION
  1182. } // namespace
  1183. #endif // C++11
  1184. #endif // _GLIBCXX_ATOMIC