atomic 45 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559
  1. // -*- C++ -*- header.
  2. // Copyright (C) 2008-2020 Free Software Foundation, Inc.
  3. //
  4. // This file is part of the GNU ISO C++ Library. This library is free
  5. // software; you can redistribute it and/or modify it under the
  6. // terms of the GNU General Public License as published by the
  7. // Free Software Foundation; either version 3, or (at your option)
  8. // any later version.
  9. // This library is distributed in the hope that it will be useful,
  10. // but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  12. // GNU General Public License for more details.
  13. // Under Section 7 of GPL version 3, you are granted additional
  14. // permissions described in the GCC Runtime Library Exception, version
  15. // 3.1, as published by the Free Software Foundation.
  16. // You should have received a copy of the GNU General Public License and
  17. // a copy of the GCC Runtime Library Exception along with this program;
  18. // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
  19. // <http://www.gnu.org/licenses/>.
  20. /** @file include/atomic
  21. * This is a Standard C++ Library header.
  22. */
  23. // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
  24. // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
  25. #ifndef _GLIBCXX_ATOMIC
  26. #define _GLIBCXX_ATOMIC 1
  27. #pragma GCC system_header
  28. #if __cplusplus < 201103L
  29. # include <bits/c++0x_warning.h>
  30. #else
  31. #include <bits/atomic_base.h>
  32. namespace std _GLIBCXX_VISIBILITY(default)
  33. {
  34. _GLIBCXX_BEGIN_NAMESPACE_VERSION
  35. /**
  36. * @addtogroup atomics
  37. * @{
  38. */
  39. #if __cplusplus >= 201703L
  40. # define __cpp_lib_atomic_is_always_lock_free 201603
  41. #endif
  42. template<typename _Tp>
  43. struct atomic;
  44. /// atomic<bool>
  45. // NB: No operators or fetch-operations for this type.
  46. template<>
  47. struct atomic<bool>
  48. {
  49. using value_type = bool;
  50. private:
  51. __atomic_base<bool> _M_base;
  52. public:
  53. atomic() noexcept = default;
  54. ~atomic() noexcept = default;
  55. atomic(const atomic&) = delete;
  56. atomic& operator=(const atomic&) = delete;
  57. atomic& operator=(const atomic&) volatile = delete;
  58. constexpr atomic(bool __i) noexcept : _M_base(__i) { }
  59. bool
  60. operator=(bool __i) noexcept
  61. { return _M_base.operator=(__i); }
  62. bool
  63. operator=(bool __i) volatile noexcept
  64. { return _M_base.operator=(__i); }
  65. operator bool() const noexcept
  66. { return _M_base.load(); }
  67. operator bool() const volatile noexcept
  68. { return _M_base.load(); }
  69. bool
  70. is_lock_free() const noexcept { return _M_base.is_lock_free(); }
  71. bool
  72. is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
  73. #if __cplusplus >= 201703L
  74. static constexpr bool is_always_lock_free = ATOMIC_BOOL_LOCK_FREE == 2;
  75. #endif
  76. void
  77. store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
  78. { _M_base.store(__i, __m); }
  79. void
  80. store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
  81. { _M_base.store(__i, __m); }
  82. bool
  83. load(memory_order __m = memory_order_seq_cst) const noexcept
  84. { return _M_base.load(__m); }
  85. bool
  86. load(memory_order __m = memory_order_seq_cst) const volatile noexcept
  87. { return _M_base.load(__m); }
  88. bool
  89. exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
  90. { return _M_base.exchange(__i, __m); }
  91. bool
  92. exchange(bool __i,
  93. memory_order __m = memory_order_seq_cst) volatile noexcept
  94. { return _M_base.exchange(__i, __m); }
  95. bool
  96. compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
  97. memory_order __m2) noexcept
  98. { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
  99. bool
  100. compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
  101. memory_order __m2) volatile noexcept
  102. { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
  103. bool
  104. compare_exchange_weak(bool& __i1, bool __i2,
  105. memory_order __m = memory_order_seq_cst) noexcept
  106. { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
  107. bool
  108. compare_exchange_weak(bool& __i1, bool __i2,
  109. memory_order __m = memory_order_seq_cst) volatile noexcept
  110. { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
  111. bool
  112. compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
  113. memory_order __m2) noexcept
  114. { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
  115. bool
  116. compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
  117. memory_order __m2) volatile noexcept
  118. { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
  119. bool
  120. compare_exchange_strong(bool& __i1, bool __i2,
  121. memory_order __m = memory_order_seq_cst) noexcept
  122. { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
  123. bool
  124. compare_exchange_strong(bool& __i1, bool __i2,
  125. memory_order __m = memory_order_seq_cst) volatile noexcept
  126. { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
  127. };
  128. #if __cplusplus <= 201703L
  129. # define _GLIBCXX20_INIT(I)
  130. #else
  131. # define _GLIBCXX20_INIT(I) = I
  132. #endif
  133. /**
  134. * @brief Generic atomic type, primary class template.
  135. *
  136. * @tparam _Tp Type to be made atomic, must be trivially copyable.
  137. */
  138. template<typename _Tp>
  139. struct atomic
  140. {
  141. using value_type = _Tp;
  142. private:
  143. // Align 1/2/4/8/16-byte types to at least their size.
  144. static constexpr int _S_min_alignment
  145. = (sizeof(_Tp) & (sizeof(_Tp) - 1)) || sizeof(_Tp) > 16
  146. ? 0 : sizeof(_Tp);
  147. static constexpr int _S_alignment
  148. = _S_min_alignment > alignof(_Tp) ? _S_min_alignment : alignof(_Tp);
  149. alignas(_S_alignment) _Tp _M_i _GLIBCXX20_INIT(_Tp());
  150. static_assert(__is_trivially_copyable(_Tp),
  151. "std::atomic requires a trivially copyable type");
  152. static_assert(sizeof(_Tp) > 0,
  153. "Incomplete or zero-sized types are not supported");
  154. #if __cplusplus > 201703L
  155. static_assert(is_copy_constructible_v<_Tp>);
  156. static_assert(is_move_constructible_v<_Tp>);
  157. static_assert(is_copy_assignable_v<_Tp>);
  158. static_assert(is_move_assignable_v<_Tp>);
  159. #endif
  160. public:
  161. atomic() = default;
  162. ~atomic() noexcept = default;
  163. atomic(const atomic&) = delete;
  164. atomic& operator=(const atomic&) = delete;
  165. atomic& operator=(const atomic&) volatile = delete;
  166. constexpr atomic(_Tp __i) noexcept : _M_i(__i) { }
  167. operator _Tp() const noexcept
  168. { return load(); }
  169. operator _Tp() const volatile noexcept
  170. { return load(); }
  171. _Tp
  172. operator=(_Tp __i) noexcept
  173. { store(__i); return __i; }
  174. _Tp
  175. operator=(_Tp __i) volatile noexcept
  176. { store(__i); return __i; }
  177. bool
  178. is_lock_free() const noexcept
  179. {
  180. // Produce a fake, minimally aligned pointer.
  181. return __atomic_is_lock_free(sizeof(_M_i),
  182. reinterpret_cast<void *>(-_S_alignment));
  183. }
  184. bool
  185. is_lock_free() const volatile noexcept
  186. {
  187. // Produce a fake, minimally aligned pointer.
  188. return __atomic_is_lock_free(sizeof(_M_i),
  189. reinterpret_cast<void *>(-_S_alignment));
  190. }
  191. #if __cplusplus >= 201703L
  192. static constexpr bool is_always_lock_free
  193. = __atomic_always_lock_free(sizeof(_M_i), 0);
  194. #endif
  195. void
  196. store(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
  197. { __atomic_store(std::__addressof(_M_i), std::__addressof(__i), int(__m)); }
  198. void
  199. store(_Tp __i, memory_order __m = memory_order_seq_cst) volatile noexcept
  200. { __atomic_store(std::__addressof(_M_i), std::__addressof(__i), int(__m)); }
  201. _Tp
  202. load(memory_order __m = memory_order_seq_cst) const noexcept
  203. {
  204. alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
  205. _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
  206. __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
  207. return *__ptr;
  208. }
  209. _Tp
  210. load(memory_order __m = memory_order_seq_cst) const volatile noexcept
  211. {
  212. alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
  213. _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
  214. __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
  215. return *__ptr;
  216. }
  217. _Tp
  218. exchange(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
  219. {
  220. alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
  221. _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
  222. __atomic_exchange(std::__addressof(_M_i), std::__addressof(__i),
  223. __ptr, int(__m));
  224. return *__ptr;
  225. }
  226. _Tp
  227. exchange(_Tp __i,
  228. memory_order __m = memory_order_seq_cst) volatile noexcept
  229. {
  230. alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
  231. _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
  232. __atomic_exchange(std::__addressof(_M_i), std::__addressof(__i),
  233. __ptr, int(__m));
  234. return *__ptr;
  235. }
  236. bool
  237. compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
  238. memory_order __f) noexcept
  239. {
  240. return __atomic_compare_exchange(std::__addressof(_M_i),
  241. std::__addressof(__e),
  242. std::__addressof(__i),
  243. true, int(__s), int(__f));
  244. }
  245. bool
  246. compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
  247. memory_order __f) volatile noexcept
  248. {
  249. return __atomic_compare_exchange(std::__addressof(_M_i),
  250. std::__addressof(__e),
  251. std::__addressof(__i),
  252. true, int(__s), int(__f));
  253. }
  254. bool
  255. compare_exchange_weak(_Tp& __e, _Tp __i,
  256. memory_order __m = memory_order_seq_cst) noexcept
  257. { return compare_exchange_weak(__e, __i, __m,
  258. __cmpexch_failure_order(__m)); }
  259. bool
  260. compare_exchange_weak(_Tp& __e, _Tp __i,
  261. memory_order __m = memory_order_seq_cst) volatile noexcept
  262. { return compare_exchange_weak(__e, __i, __m,
  263. __cmpexch_failure_order(__m)); }
  264. bool
  265. compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
  266. memory_order __f) noexcept
  267. {
  268. return __atomic_compare_exchange(std::__addressof(_M_i),
  269. std::__addressof(__e),
  270. std::__addressof(__i),
  271. false, int(__s), int(__f));
  272. }
  273. bool
  274. compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
  275. memory_order __f) volatile noexcept
  276. {
  277. return __atomic_compare_exchange(std::__addressof(_M_i),
  278. std::__addressof(__e),
  279. std::__addressof(__i),
  280. false, int(__s), int(__f));
  281. }
  282. bool
  283. compare_exchange_strong(_Tp& __e, _Tp __i,
  284. memory_order __m = memory_order_seq_cst) noexcept
  285. { return compare_exchange_strong(__e, __i, __m,
  286. __cmpexch_failure_order(__m)); }
  287. bool
  288. compare_exchange_strong(_Tp& __e, _Tp __i,
  289. memory_order __m = memory_order_seq_cst) volatile noexcept
  290. { return compare_exchange_strong(__e, __i, __m,
  291. __cmpexch_failure_order(__m)); }
  292. };
  293. #undef _GLIBCXX20_INIT
  294. /// Partial specialization for pointer types.
  295. template<typename _Tp>
  296. struct atomic<_Tp*>
  297. {
  298. using value_type = _Tp*;
  299. using difference_type = ptrdiff_t;
  300. typedef _Tp* __pointer_type;
  301. typedef __atomic_base<_Tp*> __base_type;
  302. __base_type _M_b;
  303. atomic() noexcept = default;
  304. ~atomic() noexcept = default;
  305. atomic(const atomic&) = delete;
  306. atomic& operator=(const atomic&) = delete;
  307. atomic& operator=(const atomic&) volatile = delete;
  308. constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
  309. operator __pointer_type() const noexcept
  310. { return __pointer_type(_M_b); }
  311. operator __pointer_type() const volatile noexcept
  312. { return __pointer_type(_M_b); }
  313. __pointer_type
  314. operator=(__pointer_type __p) noexcept
  315. { return _M_b.operator=(__p); }
  316. __pointer_type
  317. operator=(__pointer_type __p) volatile noexcept
  318. { return _M_b.operator=(__p); }
  319. __pointer_type
  320. operator++(int) noexcept
  321. {
  322. #if __cplusplus >= 201703L
  323. static_assert( is_object<_Tp>::value, "pointer to object type" );
  324. #endif
  325. return _M_b++;
  326. }
  327. __pointer_type
  328. operator++(int) volatile noexcept
  329. {
  330. #if __cplusplus >= 201703L
  331. static_assert( is_object<_Tp>::value, "pointer to object type" );
  332. #endif
  333. return _M_b++;
  334. }
  335. __pointer_type
  336. operator--(int) noexcept
  337. {
  338. #if __cplusplus >= 201703L
  339. static_assert( is_object<_Tp>::value, "pointer to object type" );
  340. #endif
  341. return _M_b--;
  342. }
  343. __pointer_type
  344. operator--(int) volatile noexcept
  345. {
  346. #if __cplusplus >= 201703L
  347. static_assert( is_object<_Tp>::value, "pointer to object type" );
  348. #endif
  349. return _M_b--;
  350. }
  351. __pointer_type
  352. operator++() noexcept
  353. {
  354. #if __cplusplus >= 201703L
  355. static_assert( is_object<_Tp>::value, "pointer to object type" );
  356. #endif
  357. return ++_M_b;
  358. }
  359. __pointer_type
  360. operator++() volatile noexcept
  361. {
  362. #if __cplusplus >= 201703L
  363. static_assert( is_object<_Tp>::value, "pointer to object type" );
  364. #endif
  365. return ++_M_b;
  366. }
  367. __pointer_type
  368. operator--() noexcept
  369. {
  370. #if __cplusplus >= 201703L
  371. static_assert( is_object<_Tp>::value, "pointer to object type" );
  372. #endif
  373. return --_M_b;
  374. }
  375. __pointer_type
  376. operator--() volatile noexcept
  377. {
  378. #if __cplusplus >= 201703L
  379. static_assert( is_object<_Tp>::value, "pointer to object type" );
  380. #endif
  381. return --_M_b;
  382. }
  383. __pointer_type
  384. operator+=(ptrdiff_t __d) noexcept
  385. {
  386. #if __cplusplus >= 201703L
  387. static_assert( is_object<_Tp>::value, "pointer to object type" );
  388. #endif
  389. return _M_b.operator+=(__d);
  390. }
  391. __pointer_type
  392. operator+=(ptrdiff_t __d) volatile noexcept
  393. {
  394. #if __cplusplus >= 201703L
  395. static_assert( is_object<_Tp>::value, "pointer to object type" );
  396. #endif
  397. return _M_b.operator+=(__d);
  398. }
  399. __pointer_type
  400. operator-=(ptrdiff_t __d) noexcept
  401. {
  402. #if __cplusplus >= 201703L
  403. static_assert( is_object<_Tp>::value, "pointer to object type" );
  404. #endif
  405. return _M_b.operator-=(__d);
  406. }
  407. __pointer_type
  408. operator-=(ptrdiff_t __d) volatile noexcept
  409. {
  410. #if __cplusplus >= 201703L
  411. static_assert( is_object<_Tp>::value, "pointer to object type" );
  412. #endif
  413. return _M_b.operator-=(__d);
  414. }
  415. bool
  416. is_lock_free() const noexcept
  417. { return _M_b.is_lock_free(); }
  418. bool
  419. is_lock_free() const volatile noexcept
  420. { return _M_b.is_lock_free(); }
  421. #if __cplusplus >= 201703L
  422. static constexpr bool is_always_lock_free = ATOMIC_POINTER_LOCK_FREE == 2;
  423. #endif
  424. void
  425. store(__pointer_type __p,
  426. memory_order __m = memory_order_seq_cst) noexcept
  427. { return _M_b.store(__p, __m); }
  428. void
  429. store(__pointer_type __p,
  430. memory_order __m = memory_order_seq_cst) volatile noexcept
  431. { return _M_b.store(__p, __m); }
  432. __pointer_type
  433. load(memory_order __m = memory_order_seq_cst) const noexcept
  434. { return _M_b.load(__m); }
  435. __pointer_type
  436. load(memory_order __m = memory_order_seq_cst) const volatile noexcept
  437. { return _M_b.load(__m); }
  438. __pointer_type
  439. exchange(__pointer_type __p,
  440. memory_order __m = memory_order_seq_cst) noexcept
  441. { return _M_b.exchange(__p, __m); }
  442. __pointer_type
  443. exchange(__pointer_type __p,
  444. memory_order __m = memory_order_seq_cst) volatile noexcept
  445. { return _M_b.exchange(__p, __m); }
  446. bool
  447. compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
  448. memory_order __m1, memory_order __m2) noexcept
  449. { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
  450. bool
  451. compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
  452. memory_order __m1,
  453. memory_order __m2) volatile noexcept
  454. { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
  455. bool
  456. compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
  457. memory_order __m = memory_order_seq_cst) noexcept
  458. {
  459. return compare_exchange_weak(__p1, __p2, __m,
  460. __cmpexch_failure_order(__m));
  461. }
  462. bool
  463. compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
  464. memory_order __m = memory_order_seq_cst) volatile noexcept
  465. {
  466. return compare_exchange_weak(__p1, __p2, __m,
  467. __cmpexch_failure_order(__m));
  468. }
  469. bool
  470. compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
  471. memory_order __m1, memory_order __m2) noexcept
  472. { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
  473. bool
  474. compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
  475. memory_order __m1,
  476. memory_order __m2) volatile noexcept
  477. { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
  478. bool
  479. compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
  480. memory_order __m = memory_order_seq_cst) noexcept
  481. {
  482. return _M_b.compare_exchange_strong(__p1, __p2, __m,
  483. __cmpexch_failure_order(__m));
  484. }
  485. bool
  486. compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
  487. memory_order __m = memory_order_seq_cst) volatile noexcept
  488. {
  489. return _M_b.compare_exchange_strong(__p1, __p2, __m,
  490. __cmpexch_failure_order(__m));
  491. }
  492. __pointer_type
  493. fetch_add(ptrdiff_t __d,
  494. memory_order __m = memory_order_seq_cst) noexcept
  495. {
  496. #if __cplusplus >= 201703L
  497. static_assert( is_object<_Tp>::value, "pointer to object type" );
  498. #endif
  499. return _M_b.fetch_add(__d, __m);
  500. }
  501. __pointer_type
  502. fetch_add(ptrdiff_t __d,
  503. memory_order __m = memory_order_seq_cst) volatile noexcept
  504. {
  505. #if __cplusplus >= 201703L
  506. static_assert( is_object<_Tp>::value, "pointer to object type" );
  507. #endif
  508. return _M_b.fetch_add(__d, __m);
  509. }
  510. __pointer_type
  511. fetch_sub(ptrdiff_t __d,
  512. memory_order __m = memory_order_seq_cst) noexcept
  513. {
  514. #if __cplusplus >= 201703L
  515. static_assert( is_object<_Tp>::value, "pointer to object type" );
  516. #endif
  517. return _M_b.fetch_sub(__d, __m);
  518. }
  519. __pointer_type
  520. fetch_sub(ptrdiff_t __d,
  521. memory_order __m = memory_order_seq_cst) volatile noexcept
  522. {
  523. #if __cplusplus >= 201703L
  524. static_assert( is_object<_Tp>::value, "pointer to object type" );
  525. #endif
  526. return _M_b.fetch_sub(__d, __m);
  527. }
  528. };
  529. /// Explicit specialization for char.
  530. template<>
  531. struct atomic<char> : __atomic_base<char>
  532. {
  533. typedef char __integral_type;
  534. typedef __atomic_base<char> __base_type;
  535. atomic() noexcept = default;
  536. ~atomic() noexcept = default;
  537. atomic(const atomic&) = delete;
  538. atomic& operator=(const atomic&) = delete;
  539. atomic& operator=(const atomic&) volatile = delete;
  540. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  541. using __base_type::operator __integral_type;
  542. using __base_type::operator=;
  543. #if __cplusplus >= 201703L
  544. static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
  545. #endif
  546. };
  547. /// Explicit specialization for signed char.
  548. template<>
  549. struct atomic<signed char> : __atomic_base<signed char>
  550. {
  551. typedef signed char __integral_type;
  552. typedef __atomic_base<signed char> __base_type;
  553. atomic() noexcept= default;
  554. ~atomic() noexcept = default;
  555. atomic(const atomic&) = delete;
  556. atomic& operator=(const atomic&) = delete;
  557. atomic& operator=(const atomic&) volatile = delete;
  558. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  559. using __base_type::operator __integral_type;
  560. using __base_type::operator=;
  561. #if __cplusplus >= 201703L
  562. static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
  563. #endif
  564. };
  565. /// Explicit specialization for unsigned char.
  566. template<>
  567. struct atomic<unsigned char> : __atomic_base<unsigned char>
  568. {
  569. typedef unsigned char __integral_type;
  570. typedef __atomic_base<unsigned char> __base_type;
  571. atomic() noexcept= default;
  572. ~atomic() noexcept = default;
  573. atomic(const atomic&) = delete;
  574. atomic& operator=(const atomic&) = delete;
  575. atomic& operator=(const atomic&) volatile = delete;
  576. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  577. using __base_type::operator __integral_type;
  578. using __base_type::operator=;
  579. #if __cplusplus >= 201703L
  580. static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
  581. #endif
  582. };
  583. /// Explicit specialization for short.
  584. template<>
  585. struct atomic<short> : __atomic_base<short>
  586. {
  587. typedef short __integral_type;
  588. typedef __atomic_base<short> __base_type;
  589. atomic() noexcept = default;
  590. ~atomic() noexcept = default;
  591. atomic(const atomic&) = delete;
  592. atomic& operator=(const atomic&) = delete;
  593. atomic& operator=(const atomic&) volatile = delete;
  594. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  595. using __base_type::operator __integral_type;
  596. using __base_type::operator=;
  597. #if __cplusplus >= 201703L
  598. static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
  599. #endif
  600. };
  601. /// Explicit specialization for unsigned short.
  602. template<>
  603. struct atomic<unsigned short> : __atomic_base<unsigned short>
  604. {
  605. typedef unsigned short __integral_type;
  606. typedef __atomic_base<unsigned short> __base_type;
  607. atomic() noexcept = default;
  608. ~atomic() noexcept = default;
  609. atomic(const atomic&) = delete;
  610. atomic& operator=(const atomic&) = delete;
  611. atomic& operator=(const atomic&) volatile = delete;
  612. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  613. using __base_type::operator __integral_type;
  614. using __base_type::operator=;
  615. #if __cplusplus >= 201703L
  616. static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
  617. #endif
  618. };
  619. /// Explicit specialization for int.
  620. template<>
  621. struct atomic<int> : __atomic_base<int>
  622. {
  623. typedef int __integral_type;
  624. typedef __atomic_base<int> __base_type;
  625. atomic() noexcept = default;
  626. ~atomic() noexcept = default;
  627. atomic(const atomic&) = delete;
  628. atomic& operator=(const atomic&) = delete;
  629. atomic& operator=(const atomic&) volatile = delete;
  630. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  631. using __base_type::operator __integral_type;
  632. using __base_type::operator=;
  633. #if __cplusplus >= 201703L
  634. static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
  635. #endif
  636. };
  637. /// Explicit specialization for unsigned int.
  638. template<>
  639. struct atomic<unsigned int> : __atomic_base<unsigned int>
  640. {
  641. typedef unsigned int __integral_type;
  642. typedef __atomic_base<unsigned int> __base_type;
  643. atomic() noexcept = default;
  644. ~atomic() noexcept = default;
  645. atomic(const atomic&) = delete;
  646. atomic& operator=(const atomic&) = delete;
  647. atomic& operator=(const atomic&) volatile = delete;
  648. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  649. using __base_type::operator __integral_type;
  650. using __base_type::operator=;
  651. #if __cplusplus >= 201703L
  652. static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
  653. #endif
  654. };
  655. /// Explicit specialization for long.
  656. template<>
  657. struct atomic<long> : __atomic_base<long>
  658. {
  659. typedef long __integral_type;
  660. typedef __atomic_base<long> __base_type;
  661. atomic() noexcept = default;
  662. ~atomic() noexcept = default;
  663. atomic(const atomic&) = delete;
  664. atomic& operator=(const atomic&) = delete;
  665. atomic& operator=(const atomic&) volatile = delete;
  666. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  667. using __base_type::operator __integral_type;
  668. using __base_type::operator=;
  669. #if __cplusplus >= 201703L
  670. static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
  671. #endif
  672. };
  673. /// Explicit specialization for unsigned long.
  674. template<>
  675. struct atomic<unsigned long> : __atomic_base<unsigned long>
  676. {
  677. typedef unsigned long __integral_type;
  678. typedef __atomic_base<unsigned long> __base_type;
  679. atomic() noexcept = default;
  680. ~atomic() noexcept = default;
  681. atomic(const atomic&) = delete;
  682. atomic& operator=(const atomic&) = delete;
  683. atomic& operator=(const atomic&) volatile = delete;
  684. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  685. using __base_type::operator __integral_type;
  686. using __base_type::operator=;
  687. #if __cplusplus >= 201703L
  688. static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
  689. #endif
  690. };
  691. /// Explicit specialization for long long.
  692. template<>
  693. struct atomic<long long> : __atomic_base<long long>
  694. {
  695. typedef long long __integral_type;
  696. typedef __atomic_base<long long> __base_type;
  697. atomic() noexcept = default;
  698. ~atomic() noexcept = default;
  699. atomic(const atomic&) = delete;
  700. atomic& operator=(const atomic&) = delete;
  701. atomic& operator=(const atomic&) volatile = delete;
  702. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  703. using __base_type::operator __integral_type;
  704. using __base_type::operator=;
  705. #if __cplusplus >= 201703L
  706. static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
  707. #endif
  708. };
  709. /// Explicit specialization for unsigned long long.
  710. template<>
  711. struct atomic<unsigned long long> : __atomic_base<unsigned long long>
  712. {
  713. typedef unsigned long long __integral_type;
  714. typedef __atomic_base<unsigned long long> __base_type;
  715. atomic() noexcept = default;
  716. ~atomic() noexcept = default;
  717. atomic(const atomic&) = delete;
  718. atomic& operator=(const atomic&) = delete;
  719. atomic& operator=(const atomic&) volatile = delete;
  720. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  721. using __base_type::operator __integral_type;
  722. using __base_type::operator=;
  723. #if __cplusplus >= 201703L
  724. static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
  725. #endif
  726. };
  727. /// Explicit specialization for wchar_t.
  728. template<>
  729. struct atomic<wchar_t> : __atomic_base<wchar_t>
  730. {
  731. typedef wchar_t __integral_type;
  732. typedef __atomic_base<wchar_t> __base_type;
  733. atomic() noexcept = default;
  734. ~atomic() noexcept = default;
  735. atomic(const atomic&) = delete;
  736. atomic& operator=(const atomic&) = delete;
  737. atomic& operator=(const atomic&) volatile = delete;
  738. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  739. using __base_type::operator __integral_type;
  740. using __base_type::operator=;
  741. #if __cplusplus >= 201703L
  742. static constexpr bool is_always_lock_free = ATOMIC_WCHAR_T_LOCK_FREE == 2;
  743. #endif
  744. };
  745. #ifdef _GLIBCXX_USE_CHAR8_T
  746. /// Explicit specialization for char8_t.
  747. template<>
  748. struct atomic<char8_t> : __atomic_base<char8_t>
  749. {
  750. typedef char8_t __integral_type;
  751. typedef __atomic_base<char8_t> __base_type;
  752. atomic() noexcept = default;
  753. ~atomic() noexcept = default;
  754. atomic(const atomic&) = delete;
  755. atomic& operator=(const atomic&) = delete;
  756. atomic& operator=(const atomic&) volatile = delete;
  757. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  758. using __base_type::operator __integral_type;
  759. using __base_type::operator=;
  760. #if __cplusplus > 201402L
  761. static constexpr bool is_always_lock_free = ATOMIC_CHAR8_T_LOCK_FREE == 2;
  762. #endif
  763. };
  764. #endif
  765. /// Explicit specialization for char16_t.
  766. template<>
  767. struct atomic<char16_t> : __atomic_base<char16_t>
  768. {
  769. typedef char16_t __integral_type;
  770. typedef __atomic_base<char16_t> __base_type;
  771. atomic() noexcept = default;
  772. ~atomic() noexcept = default;
  773. atomic(const atomic&) = delete;
  774. atomic& operator=(const atomic&) = delete;
  775. atomic& operator=(const atomic&) volatile = delete;
  776. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  777. using __base_type::operator __integral_type;
  778. using __base_type::operator=;
  779. #if __cplusplus >= 201703L
  780. static constexpr bool is_always_lock_free = ATOMIC_CHAR16_T_LOCK_FREE == 2;
  781. #endif
  782. };
  783. /// Explicit specialization for char32_t.
  784. template<>
  785. struct atomic<char32_t> : __atomic_base<char32_t>
  786. {
  787. typedef char32_t __integral_type;
  788. typedef __atomic_base<char32_t> __base_type;
  789. atomic() noexcept = default;
  790. ~atomic() noexcept = default;
  791. atomic(const atomic&) = delete;
  792. atomic& operator=(const atomic&) = delete;
  793. atomic& operator=(const atomic&) volatile = delete;
  794. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  795. using __base_type::operator __integral_type;
  796. using __base_type::operator=;
  797. #if __cplusplus >= 201703L
  798. static constexpr bool is_always_lock_free = ATOMIC_CHAR32_T_LOCK_FREE == 2;
  799. #endif
  800. };
  801. /// atomic_bool
  802. typedef atomic<bool> atomic_bool;
  803. /// atomic_char
  804. typedef atomic<char> atomic_char;
  805. /// atomic_schar
  806. typedef atomic<signed char> atomic_schar;
  807. /// atomic_uchar
  808. typedef atomic<unsigned char> atomic_uchar;
  809. /// atomic_short
  810. typedef atomic<short> atomic_short;
  811. /// atomic_ushort
  812. typedef atomic<unsigned short> atomic_ushort;
  813. /// atomic_int
  814. typedef atomic<int> atomic_int;
  815. /// atomic_uint
  816. typedef atomic<unsigned int> atomic_uint;
  817. /// atomic_long
  818. typedef atomic<long> atomic_long;
  819. /// atomic_ulong
  820. typedef atomic<unsigned long> atomic_ulong;
  821. /// atomic_llong
  822. typedef atomic<long long> atomic_llong;
  823. /// atomic_ullong
  824. typedef atomic<unsigned long long> atomic_ullong;
  825. /// atomic_wchar_t
  826. typedef atomic<wchar_t> atomic_wchar_t;
  827. #ifdef _GLIBCXX_USE_CHAR8_T
  828. /// atomic_char8_t
  829. typedef atomic<char8_t> atomic_char8_t;
  830. #endif
  831. /// atomic_char16_t
  832. typedef atomic<char16_t> atomic_char16_t;
  833. /// atomic_char32_t
  834. typedef atomic<char32_t> atomic_char32_t;
  835. #ifdef _GLIBCXX_USE_C99_STDINT_TR1
  836. // _GLIBCXX_RESOLVE_LIB_DEFECTS
  837. // 2441. Exact-width atomic typedefs should be provided
  838. /// atomic_int8_t
  839. typedef atomic<int8_t> atomic_int8_t;
  840. /// atomic_uint8_t
  841. typedef atomic<uint8_t> atomic_uint8_t;
  842. /// atomic_int16_t
  843. typedef atomic<int16_t> atomic_int16_t;
  844. /// atomic_uint16_t
  845. typedef atomic<uint16_t> atomic_uint16_t;
  846. /// atomic_int32_t
  847. typedef atomic<int32_t> atomic_int32_t;
  848. /// atomic_uint32_t
  849. typedef atomic<uint32_t> atomic_uint32_t;
  850. /// atomic_int64_t
  851. typedef atomic<int64_t> atomic_int64_t;
  852. /// atomic_uint64_t
  853. typedef atomic<uint64_t> atomic_uint64_t;
  854. /// atomic_int_least8_t
  855. typedef atomic<int_least8_t> atomic_int_least8_t;
  856. /// atomic_uint_least8_t
  857. typedef atomic<uint_least8_t> atomic_uint_least8_t;
  858. /// atomic_int_least16_t
  859. typedef atomic<int_least16_t> atomic_int_least16_t;
  860. /// atomic_uint_least16_t
  861. typedef atomic<uint_least16_t> atomic_uint_least16_t;
  862. /// atomic_int_least32_t
  863. typedef atomic<int_least32_t> atomic_int_least32_t;
  864. /// atomic_uint_least32_t
  865. typedef atomic<uint_least32_t> atomic_uint_least32_t;
  866. /// atomic_int_least64_t
  867. typedef atomic<int_least64_t> atomic_int_least64_t;
  868. /// atomic_uint_least64_t
  869. typedef atomic<uint_least64_t> atomic_uint_least64_t;
  870. /// atomic_int_fast8_t
  871. typedef atomic<int_fast8_t> atomic_int_fast8_t;
  872. /// atomic_uint_fast8_t
  873. typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
  874. /// atomic_int_fast16_t
  875. typedef atomic<int_fast16_t> atomic_int_fast16_t;
  876. /// atomic_uint_fast16_t
  877. typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
  878. /// atomic_int_fast32_t
  879. typedef atomic<int_fast32_t> atomic_int_fast32_t;
  880. /// atomic_uint_fast32_t
  881. typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
  882. /// atomic_int_fast64_t
  883. typedef atomic<int_fast64_t> atomic_int_fast64_t;
  884. /// atomic_uint_fast64_t
  885. typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
  886. #endif
  887. /// atomic_intptr_t
  888. typedef atomic<intptr_t> atomic_intptr_t;
  889. /// atomic_uintptr_t
  890. typedef atomic<uintptr_t> atomic_uintptr_t;
  891. /// atomic_size_t
  892. typedef atomic<size_t> atomic_size_t;
  893. /// atomic_ptrdiff_t
  894. typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
  895. #ifdef _GLIBCXX_USE_C99_STDINT_TR1
  896. /// atomic_intmax_t
  897. typedef atomic<intmax_t> atomic_intmax_t;
  898. /// atomic_uintmax_t
  899. typedef atomic<uintmax_t> atomic_uintmax_t;
  900. #endif
  901. // Function definitions, atomic_flag operations.
  902. inline bool
  903. atomic_flag_test_and_set_explicit(atomic_flag* __a,
  904. memory_order __m) noexcept
  905. { return __a->test_and_set(__m); }
  906. inline bool
  907. atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
  908. memory_order __m) noexcept
  909. { return __a->test_and_set(__m); }
  910. inline void
  911. atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
  912. { __a->clear(__m); }
  913. inline void
  914. atomic_flag_clear_explicit(volatile atomic_flag* __a,
  915. memory_order __m) noexcept
  916. { __a->clear(__m); }
  917. inline bool
  918. atomic_flag_test_and_set(atomic_flag* __a) noexcept
  919. { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
  920. inline bool
  921. atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
  922. { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
  923. inline void
  924. atomic_flag_clear(atomic_flag* __a) noexcept
  925. { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
  926. inline void
  927. atomic_flag_clear(volatile atomic_flag* __a) noexcept
  928. { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
  929. template<typename _Tp>
  930. using __atomic_val_t = typename atomic<_Tp>::value_type;
  931. template<typename _Tp>
  932. using __atomic_diff_t = typename atomic<_Tp>::difference_type;
  933. // [atomics.nonmembers] Non-member functions.
  934. // Function templates generally applicable to atomic types.
  935. template<typename _ITp>
  936. inline bool
  937. atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
  938. { return __a->is_lock_free(); }
  939. template<typename _ITp>
  940. inline bool
  941. atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
  942. { return __a->is_lock_free(); }
  943. template<typename _ITp>
  944. inline void
  945. atomic_init(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
  946. { __a->store(__i, memory_order_relaxed); }
  947. template<typename _ITp>
  948. inline void
  949. atomic_init(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
  950. { __a->store(__i, memory_order_relaxed); }
  951. template<typename _ITp>
  952. inline void
  953. atomic_store_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
  954. memory_order __m) noexcept
  955. { __a->store(__i, __m); }
  956. template<typename _ITp>
  957. inline void
  958. atomic_store_explicit(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
  959. memory_order __m) noexcept
  960. { __a->store(__i, __m); }
  961. template<typename _ITp>
  962. inline _ITp
  963. atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
  964. { return __a->load(__m); }
  965. template<typename _ITp>
  966. inline _ITp
  967. atomic_load_explicit(const volatile atomic<_ITp>* __a,
  968. memory_order __m) noexcept
  969. { return __a->load(__m); }
  970. template<typename _ITp>
  971. inline _ITp
  972. atomic_exchange_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
  973. memory_order __m) noexcept
  974. { return __a->exchange(__i, __m); }
  975. template<typename _ITp>
  976. inline _ITp
  977. atomic_exchange_explicit(volatile atomic<_ITp>* __a,
  978. __atomic_val_t<_ITp> __i,
  979. memory_order __m) noexcept
  980. { return __a->exchange(__i, __m); }
  981. template<typename _ITp>
  982. inline bool
  983. atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
  984. __atomic_val_t<_ITp>* __i1,
  985. __atomic_val_t<_ITp> __i2,
  986. memory_order __m1,
  987. memory_order __m2) noexcept
  988. { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
  989. template<typename _ITp>
  990. inline bool
  991. atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
  992. __atomic_val_t<_ITp>* __i1,
  993. __atomic_val_t<_ITp> __i2,
  994. memory_order __m1,
  995. memory_order __m2) noexcept
  996. { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
  997. template<typename _ITp>
  998. inline bool
  999. atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
  1000. __atomic_val_t<_ITp>* __i1,
  1001. __atomic_val_t<_ITp> __i2,
  1002. memory_order __m1,
  1003. memory_order __m2) noexcept
  1004. { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
  1005. template<typename _ITp>
  1006. inline bool
  1007. atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
  1008. __atomic_val_t<_ITp>* __i1,
  1009. __atomic_val_t<_ITp> __i2,
  1010. memory_order __m1,
  1011. memory_order __m2) noexcept
  1012. { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
  1013. template<typename _ITp>
  1014. inline void
  1015. atomic_store(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
  1016. { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
  1017. template<typename _ITp>
  1018. inline void
  1019. atomic_store(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
  1020. { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
  1021. template<typename _ITp>
  1022. inline _ITp
  1023. atomic_load(const atomic<_ITp>* __a) noexcept
  1024. { return atomic_load_explicit(__a, memory_order_seq_cst); }
  1025. template<typename _ITp>
  1026. inline _ITp
  1027. atomic_load(const volatile atomic<_ITp>* __a) noexcept
  1028. { return atomic_load_explicit(__a, memory_order_seq_cst); }
  1029. template<typename _ITp>
  1030. inline _ITp
  1031. atomic_exchange(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
  1032. { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
  1033. template<typename _ITp>
  1034. inline _ITp
  1035. atomic_exchange(volatile atomic<_ITp>* __a,
  1036. __atomic_val_t<_ITp> __i) noexcept
  1037. { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
  1038. template<typename _ITp>
  1039. inline bool
  1040. atomic_compare_exchange_weak(atomic<_ITp>* __a,
  1041. __atomic_val_t<_ITp>* __i1,
  1042. __atomic_val_t<_ITp> __i2) noexcept
  1043. {
  1044. return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
  1045. memory_order_seq_cst,
  1046. memory_order_seq_cst);
  1047. }
  1048. template<typename _ITp>
  1049. inline bool
  1050. atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
  1051. __atomic_val_t<_ITp>* __i1,
  1052. __atomic_val_t<_ITp> __i2) noexcept
  1053. {
  1054. return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
  1055. memory_order_seq_cst,
  1056. memory_order_seq_cst);
  1057. }
  1058. template<typename _ITp>
  1059. inline bool
  1060. atomic_compare_exchange_strong(atomic<_ITp>* __a,
  1061. __atomic_val_t<_ITp>* __i1,
  1062. __atomic_val_t<_ITp> __i2) noexcept
  1063. {
  1064. return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
  1065. memory_order_seq_cst,
  1066. memory_order_seq_cst);
  1067. }
  1068. template<typename _ITp>
  1069. inline bool
  1070. atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
  1071. __atomic_val_t<_ITp>* __i1,
  1072. __atomic_val_t<_ITp> __i2) noexcept
  1073. {
  1074. return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
  1075. memory_order_seq_cst,
  1076. memory_order_seq_cst);
  1077. }
  1078. // Function templates for atomic_integral and atomic_pointer operations only.
  1079. // Some operations (and, or, xor) are only available for atomic integrals,
  1080. // which is implemented by taking a parameter of type __atomic_base<_ITp>*.
  1081. template<typename _ITp>
  1082. inline _ITp
  1083. atomic_fetch_add_explicit(atomic<_ITp>* __a,
  1084. __atomic_diff_t<_ITp> __i,
  1085. memory_order __m) noexcept
  1086. { return __a->fetch_add(__i, __m); }
  1087. template<typename _ITp>
  1088. inline _ITp
  1089. atomic_fetch_add_explicit(volatile atomic<_ITp>* __a,
  1090. __atomic_diff_t<_ITp> __i,
  1091. memory_order __m) noexcept
  1092. { return __a->fetch_add(__i, __m); }
  1093. template<typename _ITp>
  1094. inline _ITp
  1095. atomic_fetch_sub_explicit(atomic<_ITp>* __a,
  1096. __atomic_diff_t<_ITp> __i,
  1097. memory_order __m) noexcept
  1098. { return __a->fetch_sub(__i, __m); }
  1099. template<typename _ITp>
  1100. inline _ITp
  1101. atomic_fetch_sub_explicit(volatile atomic<_ITp>* __a,
  1102. __atomic_diff_t<_ITp> __i,
  1103. memory_order __m) noexcept
  1104. { return __a->fetch_sub(__i, __m); }
  1105. template<typename _ITp>
  1106. inline _ITp
  1107. atomic_fetch_and_explicit(__atomic_base<_ITp>* __a,
  1108. __atomic_val_t<_ITp> __i,
  1109. memory_order __m) noexcept
  1110. { return __a->fetch_and(__i, __m); }
  1111. template<typename _ITp>
  1112. inline _ITp
  1113. atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a,
  1114. __atomic_val_t<_ITp> __i,
  1115. memory_order __m) noexcept
  1116. { return __a->fetch_and(__i, __m); }
  1117. template<typename _ITp>
  1118. inline _ITp
  1119. atomic_fetch_or_explicit(__atomic_base<_ITp>* __a,
  1120. __atomic_val_t<_ITp> __i,
  1121. memory_order __m) noexcept
  1122. { return __a->fetch_or(__i, __m); }
  1123. template<typename _ITp>
  1124. inline _ITp
  1125. atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a,
  1126. __atomic_val_t<_ITp> __i,
  1127. memory_order __m) noexcept
  1128. { return __a->fetch_or(__i, __m); }
  1129. template<typename _ITp>
  1130. inline _ITp
  1131. atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a,
  1132. __atomic_val_t<_ITp> __i,
  1133. memory_order __m) noexcept
  1134. { return __a->fetch_xor(__i, __m); }
  1135. template<typename _ITp>
  1136. inline _ITp
  1137. atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a,
  1138. __atomic_val_t<_ITp> __i,
  1139. memory_order __m) noexcept
  1140. { return __a->fetch_xor(__i, __m); }
  1141. template<typename _ITp>
  1142. inline _ITp
  1143. atomic_fetch_add(atomic<_ITp>* __a,
  1144. __atomic_diff_t<_ITp> __i) noexcept
  1145. { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
  1146. template<typename _ITp>
  1147. inline _ITp
  1148. atomic_fetch_add(volatile atomic<_ITp>* __a,
  1149. __atomic_diff_t<_ITp> __i) noexcept
  1150. { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
  1151. template<typename _ITp>
  1152. inline _ITp
  1153. atomic_fetch_sub(atomic<_ITp>* __a,
  1154. __atomic_diff_t<_ITp> __i) noexcept
  1155. { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
  1156. template<typename _ITp>
  1157. inline _ITp
  1158. atomic_fetch_sub(volatile atomic<_ITp>* __a,
  1159. __atomic_diff_t<_ITp> __i) noexcept
  1160. { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
  1161. template<typename _ITp>
  1162. inline _ITp
  1163. atomic_fetch_and(__atomic_base<_ITp>* __a,
  1164. __atomic_val_t<_ITp> __i) noexcept
  1165. { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
  1166. template<typename _ITp>
  1167. inline _ITp
  1168. atomic_fetch_and(volatile __atomic_base<_ITp>* __a,
  1169. __atomic_val_t<_ITp> __i) noexcept
  1170. { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
  1171. template<typename _ITp>
  1172. inline _ITp
  1173. atomic_fetch_or(__atomic_base<_ITp>* __a,
  1174. __atomic_val_t<_ITp> __i) noexcept
  1175. { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
  1176. template<typename _ITp>
  1177. inline _ITp
  1178. atomic_fetch_or(volatile __atomic_base<_ITp>* __a,
  1179. __atomic_val_t<_ITp> __i) noexcept
  1180. { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
  1181. template<typename _ITp>
  1182. inline _ITp
  1183. atomic_fetch_xor(__atomic_base<_ITp>* __a,
  1184. __atomic_val_t<_ITp> __i) noexcept
  1185. { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
  1186. template<typename _ITp>
  1187. inline _ITp
  1188. atomic_fetch_xor(volatile __atomic_base<_ITp>* __a,
  1189. __atomic_val_t<_ITp> __i) noexcept
  1190. { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
  1191. #if __cplusplus > 201703L
  1192. #define __cpp_lib_atomic_float 201711L
  1193. template<>
  1194. struct atomic<float> : __atomic_float<float>
  1195. {
  1196. atomic() noexcept = default;
  1197. constexpr
  1198. atomic(float __fp) noexcept : __atomic_float<float>(__fp)
  1199. { }
  1200. atomic& operator=(const atomic&) volatile = delete;
  1201. atomic& operator=(const atomic&) = delete;
  1202. using __atomic_float<float>::operator=;
  1203. };
  1204. template<>
  1205. struct atomic<double> : __atomic_float<double>
  1206. {
  1207. atomic() noexcept = default;
  1208. constexpr
  1209. atomic(double __fp) noexcept : __atomic_float<double>(__fp)
  1210. { }
  1211. atomic& operator=(const atomic&) volatile = delete;
  1212. atomic& operator=(const atomic&) = delete;
  1213. using __atomic_float<double>::operator=;
  1214. };
  1215. template<>
  1216. struct atomic<long double> : __atomic_float<long double>
  1217. {
  1218. atomic() noexcept = default;
  1219. constexpr
  1220. atomic(long double __fp) noexcept : __atomic_float<long double>(__fp)
  1221. { }
  1222. atomic& operator=(const atomic&) volatile = delete;
  1223. atomic& operator=(const atomic&) = delete;
  1224. using __atomic_float<long double>::operator=;
  1225. };
  1226. #define __cpp_lib_atomic_ref 201806L
  1227. /// Class template to provide atomic operations on a non-atomic variable.
  1228. template<typename _Tp>
  1229. struct atomic_ref : __atomic_ref<_Tp>
  1230. {
  1231. explicit
  1232. atomic_ref(_Tp& __t) noexcept : __atomic_ref<_Tp>(__t)
  1233. { }
  1234. atomic_ref& operator=(const atomic_ref&) = delete;
  1235. atomic_ref(const atomic_ref&) = default;
  1236. using __atomic_ref<_Tp>::operator=;
  1237. };
  1238. #endif // C++2a
  1239. /// @} group atomics
  1240. _GLIBCXX_END_NAMESPACE_VERSION
  1241. } // namespace
  1242. #endif // C++11
  1243. #endif // _GLIBCXX_ATOMIC