hashtable_policy.h 63 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041
  1. // Internal policy header for unordered_set and unordered_map -*- C++ -*-
  2. // Copyright (C) 2010-2022 Free Software Foundation, Inc.
  3. //
  4. // This file is part of the GNU ISO C++ Library. This library is free
  5. // software; you can redistribute it and/or modify it under the
  6. // terms of the GNU General Public License as published by the
  7. // Free Software Foundation; either version 3, or (at your option)
  8. // any later version.
  9. // This library is distributed in the hope that it will be useful,
  10. // but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  12. // GNU General Public License for more details.
  13. // Under Section 7 of GPL version 3, you are granted additional
  14. // permissions described in the GCC Runtime Library Exception, version
  15. // 3.1, as published by the Free Software Foundation.
  16. // You should have received a copy of the GNU General Public License and
  17. // a copy of the GCC Runtime Library Exception along with this program;
  18. // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
  19. // <http://www.gnu.org/licenses/>.
  20. /** @file bits/hashtable_policy.h
  21. * This is an internal header file, included by other library headers.
  22. * Do not attempt to use it directly.
  23. * @headername{unordered_map,unordered_set}
  24. */
  25. #ifndef _HASHTABLE_POLICY_H
  26. #define _HASHTABLE_POLICY_H 1
  27. #include <tuple> // for std::tuple, std::forward_as_tuple
  28. #include <bits/stl_algobase.h> // for std::min, std::is_permutation.
  29. #include <ext/aligned_buffer.h> // for __gnu_cxx::__aligned_buffer
  30. #include <ext/alloc_traits.h> // for std::__alloc_rebind
  31. #include <ext/numeric_traits.h> // for __gnu_cxx::__int_traits
  32. namespace std _GLIBCXX_VISIBILITY(default)
  33. {
  34. _GLIBCXX_BEGIN_NAMESPACE_VERSION
  35. /// @cond undocumented
  36. template<typename _Key, typename _Value, typename _Alloc,
  37. typename _ExtractKey, typename _Equal,
  38. typename _Hash, typename _RangeHash, typename _Unused,
  39. typename _RehashPolicy, typename _Traits>
  40. class _Hashtable;
  41. namespace __detail
  42. {
  43. /**
  44. * @defgroup hashtable-detail Base and Implementation Classes
  45. * @ingroup unordered_associative_containers
  46. * @{
  47. */
  48. template<typename _Key, typename _Value, typename _ExtractKey,
  49. typename _Equal, typename _Hash, typename _RangeHash,
  50. typename _Unused, typename _Traits>
  51. struct _Hashtable_base;
  52. // Helper function: return distance(first, last) for forward
  53. // iterators, or 0/1 for input iterators.
  54. template<typename _Iterator>
  55. inline typename std::iterator_traits<_Iterator>::difference_type
  56. __distance_fw(_Iterator __first, _Iterator __last,
  57. std::input_iterator_tag)
  58. { return __first != __last ? 1 : 0; }
  59. template<typename _Iterator>
  60. inline typename std::iterator_traits<_Iterator>::difference_type
  61. __distance_fw(_Iterator __first, _Iterator __last,
  62. std::forward_iterator_tag)
  63. { return std::distance(__first, __last); }
  64. template<typename _Iterator>
  65. inline typename std::iterator_traits<_Iterator>::difference_type
  66. __distance_fw(_Iterator __first, _Iterator __last)
  67. { return __distance_fw(__first, __last,
  68. std::__iterator_category(__first)); }
  69. struct _Identity
  70. {
  71. template<typename _Tp>
  72. _Tp&&
  73. operator()(_Tp&& __x) const noexcept
  74. { return std::forward<_Tp>(__x); }
  75. };
  76. struct _Select1st
  77. {
  78. template<typename _Pair>
  79. struct __1st_type;
  80. template<typename _Tp, typename _Up>
  81. struct __1st_type<pair<_Tp, _Up>>
  82. { using type = _Tp; };
  83. template<typename _Tp, typename _Up>
  84. struct __1st_type<const pair<_Tp, _Up>>
  85. { using type = const _Tp; };
  86. template<typename _Pair>
  87. struct __1st_type<_Pair&>
  88. { using type = typename __1st_type<_Pair>::type&; };
  89. template<typename _Tp>
  90. typename __1st_type<_Tp>::type&&
  91. operator()(_Tp&& __x) const noexcept
  92. { return std::forward<_Tp>(__x).first; }
  93. };
  94. template<typename _ExKey>
  95. struct _NodeBuilder;
  96. template<>
  97. struct _NodeBuilder<_Select1st>
  98. {
  99. template<typename _Kt, typename _Arg, typename _NodeGenerator>
  100. static auto
  101. _S_build(_Kt&& __k, _Arg&& __arg, const _NodeGenerator& __node_gen)
  102. -> typename _NodeGenerator::__node_type*
  103. {
  104. return __node_gen(std::forward<_Kt>(__k),
  105. std::forward<_Arg>(__arg).second);
  106. }
  107. };
  108. template<>
  109. struct _NodeBuilder<_Identity>
  110. {
  111. template<typename _Kt, typename _Arg, typename _NodeGenerator>
  112. static auto
  113. _S_build(_Kt&& __k, _Arg&&, const _NodeGenerator& __node_gen)
  114. -> typename _NodeGenerator::__node_type*
  115. { return __node_gen(std::forward<_Kt>(__k)); }
  116. };
  117. template<typename _NodeAlloc>
  118. struct _Hashtable_alloc;
  119. // Functor recycling a pool of nodes and using allocation once the pool is
  120. // empty.
  121. template<typename _NodeAlloc>
  122. struct _ReuseOrAllocNode
  123. {
  124. private:
  125. using __node_alloc_type = _NodeAlloc;
  126. using __hashtable_alloc = _Hashtable_alloc<__node_alloc_type>;
  127. using __node_alloc_traits =
  128. typename __hashtable_alloc::__node_alloc_traits;
  129. public:
  130. using __node_type = typename __hashtable_alloc::__node_type;
  131. _ReuseOrAllocNode(__node_type* __nodes, __hashtable_alloc& __h)
  132. : _M_nodes(__nodes), _M_h(__h) { }
  133. _ReuseOrAllocNode(const _ReuseOrAllocNode&) = delete;
  134. ~_ReuseOrAllocNode()
  135. { _M_h._M_deallocate_nodes(_M_nodes); }
  136. template<typename... _Args>
  137. __node_type*
  138. operator()(_Args&&... __args) const
  139. {
  140. if (_M_nodes)
  141. {
  142. __node_type* __node = _M_nodes;
  143. _M_nodes = _M_nodes->_M_next();
  144. __node->_M_nxt = nullptr;
  145. auto& __a = _M_h._M_node_allocator();
  146. __node_alloc_traits::destroy(__a, __node->_M_valptr());
  147. __try
  148. {
  149. __node_alloc_traits::construct(__a, __node->_M_valptr(),
  150. std::forward<_Args>(__args)...);
  151. }
  152. __catch(...)
  153. {
  154. _M_h._M_deallocate_node_ptr(__node);
  155. __throw_exception_again;
  156. }
  157. return __node;
  158. }
  159. return _M_h._M_allocate_node(std::forward<_Args>(__args)...);
  160. }
  161. private:
  162. mutable __node_type* _M_nodes;
  163. __hashtable_alloc& _M_h;
  164. };
  165. // Functor similar to the previous one but without any pool of nodes to
  166. // recycle.
  167. template<typename _NodeAlloc>
  168. struct _AllocNode
  169. {
  170. private:
  171. using __hashtable_alloc = _Hashtable_alloc<_NodeAlloc>;
  172. public:
  173. using __node_type = typename __hashtable_alloc::__node_type;
  174. _AllocNode(__hashtable_alloc& __h)
  175. : _M_h(__h) { }
  176. template<typename... _Args>
  177. __node_type*
  178. operator()(_Args&&... __args) const
  179. { return _M_h._M_allocate_node(std::forward<_Args>(__args)...); }
  180. private:
  181. __hashtable_alloc& _M_h;
  182. };
  183. // Auxiliary types used for all instantiations of _Hashtable nodes
  184. // and iterators.
  185. /**
  186. * struct _Hashtable_traits
  187. *
  188. * Important traits for hash tables.
  189. *
  190. * @tparam _Cache_hash_code Boolean value. True if the value of
  191. * the hash function is stored along with the value. This is a
  192. * time-space tradeoff. Storing it may improve lookup speed by
  193. * reducing the number of times we need to call the _Hash or _Equal
  194. * functors.
  195. *
  196. * @tparam _Constant_iterators Boolean value. True if iterator and
  197. * const_iterator are both constant iterator types. This is true
  198. * for unordered_set and unordered_multiset, false for
  199. * unordered_map and unordered_multimap.
  200. *
  201. * @tparam _Unique_keys Boolean value. True if the return value
  202. * of _Hashtable::count(k) is always at most one, false if it may
  203. * be an arbitrary number. This is true for unordered_set and
  204. * unordered_map, false for unordered_multiset and
  205. * unordered_multimap.
  206. */
  207. template<bool _Cache_hash_code, bool _Constant_iterators, bool _Unique_keys>
  208. struct _Hashtable_traits
  209. {
  210. using __hash_cached = __bool_constant<_Cache_hash_code>;
  211. using __constant_iterators = __bool_constant<_Constant_iterators>;
  212. using __unique_keys = __bool_constant<_Unique_keys>;
  213. };
  214. /**
  215. * struct _Hashtable_hash_traits
  216. *
  217. * Important traits for hash tables depending on associated hasher.
  218. *
  219. */
  220. template<typename _Hash>
  221. struct _Hashtable_hash_traits
  222. {
  223. static constexpr std::size_t
  224. __small_size_threshold() noexcept
  225. { return std::__is_fast_hash<_Hash>::value ? 0 : 20; }
  226. };
  227. /**
  228. * struct _Hash_node_base
  229. *
  230. * Nodes, used to wrap elements stored in the hash table. A policy
  231. * template parameter of class template _Hashtable controls whether
  232. * nodes also store a hash code. In some cases (e.g. strings) this
  233. * may be a performance win.
  234. */
  235. struct _Hash_node_base
  236. {
  237. _Hash_node_base* _M_nxt;
  238. _Hash_node_base() noexcept : _M_nxt() { }
  239. _Hash_node_base(_Hash_node_base* __next) noexcept : _M_nxt(__next) { }
  240. };
  241. /**
  242. * struct _Hash_node_value_base
  243. *
  244. * Node type with the value to store.
  245. */
  246. template<typename _Value>
  247. struct _Hash_node_value_base
  248. {
  249. typedef _Value value_type;
  250. __gnu_cxx::__aligned_buffer<_Value> _M_storage;
  251. _Value*
  252. _M_valptr() noexcept
  253. { return _M_storage._M_ptr(); }
  254. const _Value*
  255. _M_valptr() const noexcept
  256. { return _M_storage._M_ptr(); }
  257. _Value&
  258. _M_v() noexcept
  259. { return *_M_valptr(); }
  260. const _Value&
  261. _M_v() const noexcept
  262. { return *_M_valptr(); }
  263. };
  264. /**
  265. * Primary template struct _Hash_node_code_cache.
  266. */
  267. template<bool _Cache_hash_code>
  268. struct _Hash_node_code_cache
  269. { };
  270. /**
  271. * Specialization for node with cache, struct _Hash_node_code_cache.
  272. */
  273. template<>
  274. struct _Hash_node_code_cache<true>
  275. { std::size_t _M_hash_code; };
  276. template<typename _Value, bool _Cache_hash_code>
  277. struct _Hash_node_value
  278. : _Hash_node_value_base<_Value>
  279. , _Hash_node_code_cache<_Cache_hash_code>
  280. { };
  281. /**
  282. * Primary template struct _Hash_node.
  283. */
  284. template<typename _Value, bool _Cache_hash_code>
  285. struct _Hash_node
  286. : _Hash_node_base
  287. , _Hash_node_value<_Value, _Cache_hash_code>
  288. {
  289. _Hash_node*
  290. _M_next() const noexcept
  291. { return static_cast<_Hash_node*>(this->_M_nxt); }
  292. };
  293. /// Base class for node iterators.
  294. template<typename _Value, bool _Cache_hash_code>
  295. struct _Node_iterator_base
  296. {
  297. using __node_type = _Hash_node<_Value, _Cache_hash_code>;
  298. __node_type* _M_cur;
  299. _Node_iterator_base() : _M_cur(nullptr) { }
  300. _Node_iterator_base(__node_type* __p) noexcept
  301. : _M_cur(__p) { }
  302. void
  303. _M_incr() noexcept
  304. { _M_cur = _M_cur->_M_next(); }
  305. friend bool
  306. operator==(const _Node_iterator_base& __x, const _Node_iterator_base& __y)
  307. noexcept
  308. { return __x._M_cur == __y._M_cur; }
  309. #if __cpp_impl_three_way_comparison < 201907L
  310. friend bool
  311. operator!=(const _Node_iterator_base& __x, const _Node_iterator_base& __y)
  312. noexcept
  313. { return __x._M_cur != __y._M_cur; }
  314. #endif
  315. };
  316. /// Node iterators, used to iterate through all the hashtable.
  317. template<typename _Value, bool __constant_iterators, bool __cache>
  318. struct _Node_iterator
  319. : public _Node_iterator_base<_Value, __cache>
  320. {
  321. private:
  322. using __base_type = _Node_iterator_base<_Value, __cache>;
  323. using __node_type = typename __base_type::__node_type;
  324. public:
  325. using value_type = _Value;
  326. using difference_type = std::ptrdiff_t;
  327. using iterator_category = std::forward_iterator_tag;
  328. using pointer = __conditional_t<__constant_iterators,
  329. const value_type*, value_type*>;
  330. using reference = __conditional_t<__constant_iterators,
  331. const value_type&, value_type&>;
  332. _Node_iterator() = default;
  333. explicit
  334. _Node_iterator(__node_type* __p) noexcept
  335. : __base_type(__p) { }
  336. reference
  337. operator*() const noexcept
  338. { return this->_M_cur->_M_v(); }
  339. pointer
  340. operator->() const noexcept
  341. { return this->_M_cur->_M_valptr(); }
  342. _Node_iterator&
  343. operator++() noexcept
  344. {
  345. this->_M_incr();
  346. return *this;
  347. }
  348. _Node_iterator
  349. operator++(int) noexcept
  350. {
  351. _Node_iterator __tmp(*this);
  352. this->_M_incr();
  353. return __tmp;
  354. }
  355. };
  356. /// Node const_iterators, used to iterate through all the hashtable.
  357. template<typename _Value, bool __constant_iterators, bool __cache>
  358. struct _Node_const_iterator
  359. : public _Node_iterator_base<_Value, __cache>
  360. {
  361. private:
  362. using __base_type = _Node_iterator_base<_Value, __cache>;
  363. using __node_type = typename __base_type::__node_type;
  364. public:
  365. typedef _Value value_type;
  366. typedef std::ptrdiff_t difference_type;
  367. typedef std::forward_iterator_tag iterator_category;
  368. typedef const value_type* pointer;
  369. typedef const value_type& reference;
  370. _Node_const_iterator() = default;
  371. explicit
  372. _Node_const_iterator(__node_type* __p) noexcept
  373. : __base_type(__p) { }
  374. _Node_const_iterator(const _Node_iterator<_Value, __constant_iterators,
  375. __cache>& __x) noexcept
  376. : __base_type(__x._M_cur) { }
  377. reference
  378. operator*() const noexcept
  379. { return this->_M_cur->_M_v(); }
  380. pointer
  381. operator->() const noexcept
  382. { return this->_M_cur->_M_valptr(); }
  383. _Node_const_iterator&
  384. operator++() noexcept
  385. {
  386. this->_M_incr();
  387. return *this;
  388. }
  389. _Node_const_iterator
  390. operator++(int) noexcept
  391. {
  392. _Node_const_iterator __tmp(*this);
  393. this->_M_incr();
  394. return __tmp;
  395. }
  396. };
  397. // Many of class template _Hashtable's template parameters are policy
  398. // classes. These are defaults for the policies.
  399. /// Default range hashing function: use division to fold a large number
  400. /// into the range [0, N).
  401. struct _Mod_range_hashing
  402. {
  403. typedef std::size_t first_argument_type;
  404. typedef std::size_t second_argument_type;
  405. typedef std::size_t result_type;
  406. result_type
  407. operator()(first_argument_type __num,
  408. second_argument_type __den) const noexcept
  409. { return __num % __den; }
  410. };
  411. /// Default ranged hash function H. In principle it should be a
  412. /// function object composed from objects of type H1 and H2 such that
  413. /// h(k, N) = h2(h1(k), N), but that would mean making extra copies of
  414. /// h1 and h2. So instead we'll just use a tag to tell class template
  415. /// hashtable to do that composition.
  416. struct _Default_ranged_hash { };
  417. /// Default value for rehash policy. Bucket size is (usually) the
  418. /// smallest prime that keeps the load factor small enough.
  419. struct _Prime_rehash_policy
  420. {
  421. using __has_load_factor = true_type;
  422. _Prime_rehash_policy(float __z = 1.0) noexcept
  423. : _M_max_load_factor(__z), _M_next_resize(0) { }
  424. float
  425. max_load_factor() const noexcept
  426. { return _M_max_load_factor; }
  427. // Return a bucket size no smaller than n.
  428. std::size_t
  429. _M_next_bkt(std::size_t __n) const;
  430. // Return a bucket count appropriate for n elements
  431. std::size_t
  432. _M_bkt_for_elements(std::size_t __n) const
  433. { return __builtin_ceil(__n / (double)_M_max_load_factor); }
  434. // __n_bkt is current bucket count, __n_elt is current element count,
  435. // and __n_ins is number of elements to be inserted. Do we need to
  436. // increase bucket count? If so, return make_pair(true, n), where n
  437. // is the new bucket count. If not, return make_pair(false, 0).
  438. std::pair<bool, std::size_t>
  439. _M_need_rehash(std::size_t __n_bkt, std::size_t __n_elt,
  440. std::size_t __n_ins) const;
  441. typedef std::size_t _State;
  442. _State
  443. _M_state() const
  444. { return _M_next_resize; }
  445. void
  446. _M_reset() noexcept
  447. { _M_next_resize = 0; }
  448. void
  449. _M_reset(_State __state)
  450. { _M_next_resize = __state; }
  451. static const std::size_t _S_growth_factor = 2;
  452. float _M_max_load_factor;
  453. mutable std::size_t _M_next_resize;
  454. };
  455. /// Range hashing function assuming that second arg is a power of 2.
  456. struct _Mask_range_hashing
  457. {
  458. typedef std::size_t first_argument_type;
  459. typedef std::size_t second_argument_type;
  460. typedef std::size_t result_type;
  461. result_type
  462. operator()(first_argument_type __num,
  463. second_argument_type __den) const noexcept
  464. { return __num & (__den - 1); }
  465. };
  466. /// Compute closest power of 2 not less than __n
  467. inline std::size_t
  468. __clp2(std::size_t __n) noexcept
  469. {
  470. using __gnu_cxx::__int_traits;
  471. // Equivalent to return __n ? std::bit_ceil(__n) : 0;
  472. if (__n < 2)
  473. return __n;
  474. const unsigned __lz = sizeof(size_t) > sizeof(long)
  475. ? __builtin_clzll(__n - 1ull)
  476. : __builtin_clzl(__n - 1ul);
  477. // Doing two shifts avoids undefined behaviour when __lz == 0.
  478. return (size_t(1) << (__int_traits<size_t>::__digits - __lz - 1)) << 1;
  479. }
  480. /// Rehash policy providing power of 2 bucket numbers. Avoids modulo
  481. /// operations.
  482. struct _Power2_rehash_policy
  483. {
  484. using __has_load_factor = true_type;
  485. _Power2_rehash_policy(float __z = 1.0) noexcept
  486. : _M_max_load_factor(__z), _M_next_resize(0) { }
  487. float
  488. max_load_factor() const noexcept
  489. { return _M_max_load_factor; }
  490. // Return a bucket size no smaller than n (as long as n is not above the
  491. // highest power of 2).
  492. std::size_t
  493. _M_next_bkt(std::size_t __n) noexcept
  494. {
  495. if (__n == 0)
  496. // Special case on container 1st initialization with 0 bucket count
  497. // hint. We keep _M_next_resize to 0 to make sure that next time we
  498. // want to add an element allocation will take place.
  499. return 1;
  500. const auto __max_width = std::min<size_t>(sizeof(size_t), 8);
  501. const auto __max_bkt = size_t(1) << (__max_width * __CHAR_BIT__ - 1);
  502. std::size_t __res = __clp2(__n);
  503. if (__res == 0)
  504. __res = __max_bkt;
  505. else if (__res == 1)
  506. // If __res is 1 we force it to 2 to make sure there will be an
  507. // allocation so that nothing need to be stored in the initial
  508. // single bucket
  509. __res = 2;
  510. if (__res == __max_bkt)
  511. // Set next resize to the max value so that we never try to rehash again
  512. // as we already reach the biggest possible bucket number.
  513. // Note that it might result in max_load_factor not being respected.
  514. _M_next_resize = size_t(-1);
  515. else
  516. _M_next_resize
  517. = __builtin_floor(__res * (double)_M_max_load_factor);
  518. return __res;
  519. }
  520. // Return a bucket count appropriate for n elements
  521. std::size_t
  522. _M_bkt_for_elements(std::size_t __n) const noexcept
  523. { return __builtin_ceil(__n / (double)_M_max_load_factor); }
  524. // __n_bkt is current bucket count, __n_elt is current element count,
  525. // and __n_ins is number of elements to be inserted. Do we need to
  526. // increase bucket count? If so, return make_pair(true, n), where n
  527. // is the new bucket count. If not, return make_pair(false, 0).
  528. std::pair<bool, std::size_t>
  529. _M_need_rehash(std::size_t __n_bkt, std::size_t __n_elt,
  530. std::size_t __n_ins) noexcept
  531. {
  532. if (__n_elt + __n_ins > _M_next_resize)
  533. {
  534. // If _M_next_resize is 0 it means that we have nothing allocated so
  535. // far and that we start inserting elements. In this case we start
  536. // with an initial bucket size of 11.
  537. double __min_bkts
  538. = std::max<std::size_t>(__n_elt + __n_ins, _M_next_resize ? 0 : 11)
  539. / (double)_M_max_load_factor;
  540. if (__min_bkts >= __n_bkt)
  541. return { true,
  542. _M_next_bkt(std::max<std::size_t>(__builtin_floor(__min_bkts) + 1,
  543. __n_bkt * _S_growth_factor)) };
  544. _M_next_resize
  545. = __builtin_floor(__n_bkt * (double)_M_max_load_factor);
  546. return { false, 0 };
  547. }
  548. else
  549. return { false, 0 };
  550. }
  551. typedef std::size_t _State;
  552. _State
  553. _M_state() const noexcept
  554. { return _M_next_resize; }
  555. void
  556. _M_reset() noexcept
  557. { _M_next_resize = 0; }
  558. void
  559. _M_reset(_State __state) noexcept
  560. { _M_next_resize = __state; }
  561. static const std::size_t _S_growth_factor = 2;
  562. float _M_max_load_factor;
  563. std::size_t _M_next_resize;
  564. };
  565. // Base classes for std::_Hashtable. We define these base classes
  566. // because in some cases we want to do different things depending on
  567. // the value of a policy class. In some cases the policy class
  568. // affects which member functions and nested typedefs are defined;
  569. // we handle that by specializing base class templates. Several of
  570. // the base class templates need to access other members of class
  571. // template _Hashtable, so we use a variant of the "Curiously
  572. // Recurring Template Pattern" (CRTP) technique.
  573. /**
  574. * Primary class template _Map_base.
  575. *
  576. * If the hashtable has a value type of the form pair<const T1, T2> and
  577. * a key extraction policy (_ExtractKey) that returns the first part
  578. * of the pair, the hashtable gets a mapped_type typedef. If it
  579. * satisfies those criteria and also has unique keys, then it also
  580. * gets an operator[].
  581. */
  582. template<typename _Key, typename _Value, typename _Alloc,
  583. typename _ExtractKey, typename _Equal,
  584. typename _Hash, typename _RangeHash, typename _Unused,
  585. typename _RehashPolicy, typename _Traits,
  586. bool _Unique_keys = _Traits::__unique_keys::value>
  587. struct _Map_base { };
  588. /// Partial specialization, __unique_keys set to false, std::pair value type.
  589. template<typename _Key, typename _Val, typename _Alloc, typename _Equal,
  590. typename _Hash, typename _RangeHash, typename _Unused,
  591. typename _RehashPolicy, typename _Traits>
  592. struct _Map_base<_Key, pair<const _Key, _Val>, _Alloc, _Select1st, _Equal,
  593. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, false>
  594. {
  595. using mapped_type = _Val;
  596. };
  597. /// Partial specialization, __unique_keys set to true.
  598. template<typename _Key, typename _Val, typename _Alloc, typename _Equal,
  599. typename _Hash, typename _RangeHash, typename _Unused,
  600. typename _RehashPolicy, typename _Traits>
  601. struct _Map_base<_Key, pair<const _Key, _Val>, _Alloc, _Select1st, _Equal,
  602. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>
  603. {
  604. private:
  605. using __hashtable_base = _Hashtable_base<_Key, pair<const _Key, _Val>,
  606. _Select1st, _Equal, _Hash,
  607. _RangeHash, _Unused,
  608. _Traits>;
  609. using __hashtable = _Hashtable<_Key, pair<const _Key, _Val>, _Alloc,
  610. _Select1st, _Equal, _Hash, _RangeHash,
  611. _Unused, _RehashPolicy, _Traits>;
  612. using __hash_code = typename __hashtable_base::__hash_code;
  613. public:
  614. using key_type = typename __hashtable_base::key_type;
  615. using mapped_type = _Val;
  616. mapped_type&
  617. operator[](const key_type& __k);
  618. mapped_type&
  619. operator[](key_type&& __k);
  620. // _GLIBCXX_RESOLVE_LIB_DEFECTS
  621. // DR 761. unordered_map needs an at() member function.
  622. mapped_type&
  623. at(const key_type& __k)
  624. {
  625. auto __ite = static_cast<__hashtable*>(this)->find(__k);
  626. if (!__ite._M_cur)
  627. __throw_out_of_range(__N("unordered_map::at"));
  628. return __ite->second;
  629. }
  630. const mapped_type&
  631. at(const key_type& __k) const
  632. {
  633. auto __ite = static_cast<const __hashtable*>(this)->find(__k);
  634. if (!__ite._M_cur)
  635. __throw_out_of_range(__N("unordered_map::at"));
  636. return __ite->second;
  637. }
  638. };
  639. template<typename _Key, typename _Val, typename _Alloc, typename _Equal,
  640. typename _Hash, typename _RangeHash, typename _Unused,
  641. typename _RehashPolicy, typename _Traits>
  642. auto
  643. _Map_base<_Key, pair<const _Key, _Val>, _Alloc, _Select1st, _Equal,
  644. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>::
  645. operator[](const key_type& __k)
  646. -> mapped_type&
  647. {
  648. __hashtable* __h = static_cast<__hashtable*>(this);
  649. __hash_code __code = __h->_M_hash_code(__k);
  650. std::size_t __bkt = __h->_M_bucket_index(__code);
  651. if (auto __node = __h->_M_find_node(__bkt, __k, __code))
  652. return __node->_M_v().second;
  653. typename __hashtable::_Scoped_node __node {
  654. __h,
  655. std::piecewise_construct,
  656. std::tuple<const key_type&>(__k),
  657. std::tuple<>()
  658. };
  659. auto __pos
  660. = __h->_M_insert_unique_node(__bkt, __code, __node._M_node);
  661. __node._M_node = nullptr;
  662. return __pos->second;
  663. }
  664. template<typename _Key, typename _Val, typename _Alloc, typename _Equal,
  665. typename _Hash, typename _RangeHash, typename _Unused,
  666. typename _RehashPolicy, typename _Traits>
  667. auto
  668. _Map_base<_Key, pair<const _Key, _Val>, _Alloc, _Select1st, _Equal,
  669. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>::
  670. operator[](key_type&& __k)
  671. -> mapped_type&
  672. {
  673. __hashtable* __h = static_cast<__hashtable*>(this);
  674. __hash_code __code = __h->_M_hash_code(__k);
  675. std::size_t __bkt = __h->_M_bucket_index(__code);
  676. if (auto __node = __h->_M_find_node(__bkt, __k, __code))
  677. return __node->_M_v().second;
  678. typename __hashtable::_Scoped_node __node {
  679. __h,
  680. std::piecewise_construct,
  681. std::forward_as_tuple(std::move(__k)),
  682. std::tuple<>()
  683. };
  684. auto __pos
  685. = __h->_M_insert_unique_node(__bkt, __code, __node._M_node);
  686. __node._M_node = nullptr;
  687. return __pos->second;
  688. }
  689. // Partial specialization for unordered_map<const T, U>, see PR 104174.
  690. template<typename _Key, typename _Val, typename _Alloc, typename _Equal,
  691. typename _Hash, typename _RangeHash, typename _Unused,
  692. typename _RehashPolicy, typename _Traits, bool __uniq>
  693. struct _Map_base<const _Key, pair<const _Key, _Val>,
  694. _Alloc, _Select1st, _Equal, _Hash,
  695. _RangeHash, _Unused, _RehashPolicy, _Traits, __uniq>
  696. : _Map_base<_Key, pair<const _Key, _Val>, _Alloc, _Select1st, _Equal, _Hash,
  697. _RangeHash, _Unused, _RehashPolicy, _Traits, __uniq>
  698. { };
  699. /**
  700. * Primary class template _Insert_base.
  701. *
  702. * Defines @c insert member functions appropriate to all _Hashtables.
  703. */
  704. template<typename _Key, typename _Value, typename _Alloc,
  705. typename _ExtractKey, typename _Equal,
  706. typename _Hash, typename _RangeHash, typename _Unused,
  707. typename _RehashPolicy, typename _Traits>
  708. struct _Insert_base
  709. {
  710. protected:
  711. using __hashtable_base = _Hashtable_base<_Key, _Value, _ExtractKey,
  712. _Equal, _Hash, _RangeHash,
  713. _Unused, _Traits>;
  714. using __hashtable = _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  715. _Hash, _RangeHash,
  716. _Unused, _RehashPolicy, _Traits>;
  717. using __hash_cached = typename _Traits::__hash_cached;
  718. using __constant_iterators = typename _Traits::__constant_iterators;
  719. using __hashtable_alloc = _Hashtable_alloc<
  720. __alloc_rebind<_Alloc, _Hash_node<_Value,
  721. __hash_cached::value>>>;
  722. using value_type = typename __hashtable_base::value_type;
  723. using size_type = typename __hashtable_base::size_type;
  724. using __unique_keys = typename _Traits::__unique_keys;
  725. using __node_alloc_type = typename __hashtable_alloc::__node_alloc_type;
  726. using __node_gen_type = _AllocNode<__node_alloc_type>;
  727. __hashtable&
  728. _M_conjure_hashtable()
  729. { return *(static_cast<__hashtable*>(this)); }
  730. template<typename _InputIterator, typename _NodeGetter>
  731. void
  732. _M_insert_range(_InputIterator __first, _InputIterator __last,
  733. const _NodeGetter&, true_type __uks);
  734. template<typename _InputIterator, typename _NodeGetter>
  735. void
  736. _M_insert_range(_InputIterator __first, _InputIterator __last,
  737. const _NodeGetter&, false_type __uks);
  738. public:
  739. using iterator = _Node_iterator<_Value, __constant_iterators::value,
  740. __hash_cached::value>;
  741. using const_iterator = _Node_const_iterator<_Value,
  742. __constant_iterators::value,
  743. __hash_cached::value>;
  744. using __ireturn_type = __conditional_t<__unique_keys::value,
  745. std::pair<iterator, bool>,
  746. iterator>;
  747. __ireturn_type
  748. insert(const value_type& __v)
  749. {
  750. __hashtable& __h = _M_conjure_hashtable();
  751. __node_gen_type __node_gen(__h);
  752. return __h._M_insert(__v, __node_gen, __unique_keys{});
  753. }
  754. iterator
  755. insert(const_iterator __hint, const value_type& __v)
  756. {
  757. __hashtable& __h = _M_conjure_hashtable();
  758. __node_gen_type __node_gen(__h);
  759. return __h._M_insert(__hint, __v, __node_gen, __unique_keys{});
  760. }
  761. template<typename _KType, typename... _Args>
  762. std::pair<iterator, bool>
  763. try_emplace(const_iterator, _KType&& __k, _Args&&... __args)
  764. {
  765. __hashtable& __h = _M_conjure_hashtable();
  766. auto __code = __h._M_hash_code(__k);
  767. std::size_t __bkt = __h._M_bucket_index(__code);
  768. if (auto __node = __h._M_find_node(__bkt, __k, __code))
  769. return { iterator(__node), false };
  770. typename __hashtable::_Scoped_node __node {
  771. &__h,
  772. std::piecewise_construct,
  773. std::forward_as_tuple(std::forward<_KType>(__k)),
  774. std::forward_as_tuple(std::forward<_Args>(__args)...)
  775. };
  776. auto __it
  777. = __h._M_insert_unique_node(__bkt, __code, __node._M_node);
  778. __node._M_node = nullptr;
  779. return { __it, true };
  780. }
  781. void
  782. insert(initializer_list<value_type> __l)
  783. { this->insert(__l.begin(), __l.end()); }
  784. template<typename _InputIterator>
  785. void
  786. insert(_InputIterator __first, _InputIterator __last)
  787. {
  788. __hashtable& __h = _M_conjure_hashtable();
  789. __node_gen_type __node_gen(__h);
  790. return _M_insert_range(__first, __last, __node_gen, __unique_keys{});
  791. }
  792. };
  793. template<typename _Key, typename _Value, typename _Alloc,
  794. typename _ExtractKey, typename _Equal,
  795. typename _Hash, typename _RangeHash, typename _Unused,
  796. typename _RehashPolicy, typename _Traits>
  797. template<typename _InputIterator, typename _NodeGetter>
  798. void
  799. _Insert_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  800. _Hash, _RangeHash, _Unused,
  801. _RehashPolicy, _Traits>::
  802. _M_insert_range(_InputIterator __first, _InputIterator __last,
  803. const _NodeGetter& __node_gen, true_type __uks)
  804. {
  805. __hashtable& __h = _M_conjure_hashtable();
  806. for (; __first != __last; ++__first)
  807. __h._M_insert(*__first, __node_gen, __uks);
  808. }
  809. template<typename _Key, typename _Value, typename _Alloc,
  810. typename _ExtractKey, typename _Equal,
  811. typename _Hash, typename _RangeHash, typename _Unused,
  812. typename _RehashPolicy, typename _Traits>
  813. template<typename _InputIterator, typename _NodeGetter>
  814. void
  815. _Insert_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  816. _Hash, _RangeHash, _Unused,
  817. _RehashPolicy, _Traits>::
  818. _M_insert_range(_InputIterator __first, _InputIterator __last,
  819. const _NodeGetter& __node_gen, false_type __uks)
  820. {
  821. using __rehash_type = typename __hashtable::__rehash_type;
  822. using __rehash_state = typename __hashtable::__rehash_state;
  823. using pair_type = std::pair<bool, std::size_t>;
  824. size_type __n_elt = __detail::__distance_fw(__first, __last);
  825. if (__n_elt == 0)
  826. return;
  827. __hashtable& __h = _M_conjure_hashtable();
  828. __rehash_type& __rehash = __h._M_rehash_policy;
  829. const __rehash_state& __saved_state = __rehash._M_state();
  830. pair_type __do_rehash = __rehash._M_need_rehash(__h._M_bucket_count,
  831. __h._M_element_count,
  832. __n_elt);
  833. if (__do_rehash.first)
  834. __h._M_rehash(__do_rehash.second, __saved_state);
  835. for (; __first != __last; ++__first)
  836. __h._M_insert(*__first, __node_gen, __uks);
  837. }
  838. /**
  839. * Primary class template _Insert.
  840. *
  841. * Defines @c insert member functions that depend on _Hashtable policies,
  842. * via partial specializations.
  843. */
  844. template<typename _Key, typename _Value, typename _Alloc,
  845. typename _ExtractKey, typename _Equal,
  846. typename _Hash, typename _RangeHash, typename _Unused,
  847. typename _RehashPolicy, typename _Traits,
  848. bool _Constant_iterators = _Traits::__constant_iterators::value>
  849. struct _Insert;
  850. /// Specialization.
  851. template<typename _Key, typename _Value, typename _Alloc,
  852. typename _ExtractKey, typename _Equal,
  853. typename _Hash, typename _RangeHash, typename _Unused,
  854. typename _RehashPolicy, typename _Traits>
  855. struct _Insert<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  856. _Hash, _RangeHash, _Unused,
  857. _RehashPolicy, _Traits, true>
  858. : public _Insert_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  859. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>
  860. {
  861. using __base_type = _Insert_base<_Key, _Value, _Alloc, _ExtractKey,
  862. _Equal, _Hash, _RangeHash, _Unused,
  863. _RehashPolicy, _Traits>;
  864. using value_type = typename __base_type::value_type;
  865. using iterator = typename __base_type::iterator;
  866. using const_iterator = typename __base_type::const_iterator;
  867. using __ireturn_type = typename __base_type::__ireturn_type;
  868. using __unique_keys = typename __base_type::__unique_keys;
  869. using __hashtable = typename __base_type::__hashtable;
  870. using __node_gen_type = typename __base_type::__node_gen_type;
  871. using __base_type::insert;
  872. __ireturn_type
  873. insert(value_type&& __v)
  874. {
  875. __hashtable& __h = this->_M_conjure_hashtable();
  876. __node_gen_type __node_gen(__h);
  877. return __h._M_insert(std::move(__v), __node_gen, __unique_keys{});
  878. }
  879. iterator
  880. insert(const_iterator __hint, value_type&& __v)
  881. {
  882. __hashtable& __h = this->_M_conjure_hashtable();
  883. __node_gen_type __node_gen(__h);
  884. return __h._M_insert(__hint, std::move(__v), __node_gen,
  885. __unique_keys{});
  886. }
  887. };
  888. /// Specialization.
  889. template<typename _Key, typename _Value, typename _Alloc,
  890. typename _ExtractKey, typename _Equal,
  891. typename _Hash, typename _RangeHash, typename _Unused,
  892. typename _RehashPolicy, typename _Traits>
  893. struct _Insert<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  894. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, false>
  895. : public _Insert_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  896. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>
  897. {
  898. using __base_type = _Insert_base<_Key, _Value, _Alloc, _ExtractKey,
  899. _Equal, _Hash, _RangeHash, _Unused,
  900. _RehashPolicy, _Traits>;
  901. using value_type = typename __base_type::value_type;
  902. using iterator = typename __base_type::iterator;
  903. using const_iterator = typename __base_type::const_iterator;
  904. using __unique_keys = typename __base_type::__unique_keys;
  905. using __hashtable = typename __base_type::__hashtable;
  906. using __ireturn_type = typename __base_type::__ireturn_type;
  907. using __base_type::insert;
  908. template<typename _Pair>
  909. using __is_cons = std::is_constructible<value_type, _Pair&&>;
  910. template<typename _Pair>
  911. using _IFcons = std::enable_if<__is_cons<_Pair>::value>;
  912. template<typename _Pair>
  913. using _IFconsp = typename _IFcons<_Pair>::type;
  914. template<typename _Pair, typename = _IFconsp<_Pair>>
  915. __ireturn_type
  916. insert(_Pair&& __v)
  917. {
  918. __hashtable& __h = this->_M_conjure_hashtable();
  919. return __h._M_emplace(__unique_keys{}, std::forward<_Pair>(__v));
  920. }
  921. template<typename _Pair, typename = _IFconsp<_Pair>>
  922. iterator
  923. insert(const_iterator __hint, _Pair&& __v)
  924. {
  925. __hashtable& __h = this->_M_conjure_hashtable();
  926. return __h._M_emplace(__hint, __unique_keys{},
  927. std::forward<_Pair>(__v));
  928. }
  929. };
  930. template<typename _Policy>
  931. using __has_load_factor = typename _Policy::__has_load_factor;
  932. /**
  933. * Primary class template _Rehash_base.
  934. *
  935. * Give hashtable the max_load_factor functions and reserve iff the
  936. * rehash policy supports it.
  937. */
  938. template<typename _Key, typename _Value, typename _Alloc,
  939. typename _ExtractKey, typename _Equal,
  940. typename _Hash, typename _RangeHash, typename _Unused,
  941. typename _RehashPolicy, typename _Traits,
  942. typename =
  943. __detected_or_t<false_type, __has_load_factor, _RehashPolicy>>
  944. struct _Rehash_base;
  945. /// Specialization when rehash policy doesn't provide load factor management.
  946. template<typename _Key, typename _Value, typename _Alloc,
  947. typename _ExtractKey, typename _Equal,
  948. typename _Hash, typename _RangeHash, typename _Unused,
  949. typename _RehashPolicy, typename _Traits>
  950. struct _Rehash_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  951. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits,
  952. false_type /* Has load factor */>
  953. {
  954. };
  955. /// Specialization when rehash policy provide load factor management.
  956. template<typename _Key, typename _Value, typename _Alloc,
  957. typename _ExtractKey, typename _Equal,
  958. typename _Hash, typename _RangeHash, typename _Unused,
  959. typename _RehashPolicy, typename _Traits>
  960. struct _Rehash_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  961. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits,
  962. true_type /* Has load factor */>
  963. {
  964. private:
  965. using __hashtable = _Hashtable<_Key, _Value, _Alloc, _ExtractKey,
  966. _Equal, _Hash, _RangeHash, _Unused,
  967. _RehashPolicy, _Traits>;
  968. public:
  969. float
  970. max_load_factor() const noexcept
  971. {
  972. const __hashtable* __this = static_cast<const __hashtable*>(this);
  973. return __this->__rehash_policy().max_load_factor();
  974. }
  975. void
  976. max_load_factor(float __z)
  977. {
  978. __hashtable* __this = static_cast<__hashtable*>(this);
  979. __this->__rehash_policy(_RehashPolicy(__z));
  980. }
  981. void
  982. reserve(std::size_t __n)
  983. {
  984. __hashtable* __this = static_cast<__hashtable*>(this);
  985. __this->rehash(__this->__rehash_policy()._M_bkt_for_elements(__n));
  986. }
  987. };
  988. /**
  989. * Primary class template _Hashtable_ebo_helper.
  990. *
  991. * Helper class using EBO when it is not forbidden (the type is not
  992. * final) and when it is worth it (the type is empty.)
  993. */
  994. template<int _Nm, typename _Tp,
  995. bool __use_ebo = !__is_final(_Tp) && __is_empty(_Tp)>
  996. struct _Hashtable_ebo_helper;
  997. /// Specialization using EBO.
  998. template<int _Nm, typename _Tp>
  999. struct _Hashtable_ebo_helper<_Nm, _Tp, true>
  1000. : private _Tp
  1001. {
  1002. _Hashtable_ebo_helper() noexcept(noexcept(_Tp())) : _Tp() { }
  1003. template<typename _OtherTp>
  1004. _Hashtable_ebo_helper(_OtherTp&& __tp)
  1005. : _Tp(std::forward<_OtherTp>(__tp))
  1006. { }
  1007. const _Tp& _M_cget() const { return static_cast<const _Tp&>(*this); }
  1008. _Tp& _M_get() { return static_cast<_Tp&>(*this); }
  1009. };
  1010. /// Specialization not using EBO.
  1011. template<int _Nm, typename _Tp>
  1012. struct _Hashtable_ebo_helper<_Nm, _Tp, false>
  1013. {
  1014. _Hashtable_ebo_helper() = default;
  1015. template<typename _OtherTp>
  1016. _Hashtable_ebo_helper(_OtherTp&& __tp)
  1017. : _M_tp(std::forward<_OtherTp>(__tp))
  1018. { }
  1019. const _Tp& _M_cget() const { return _M_tp; }
  1020. _Tp& _M_get() { return _M_tp; }
  1021. private:
  1022. _Tp _M_tp{};
  1023. };
  1024. /**
  1025. * Primary class template _Local_iterator_base.
  1026. *
  1027. * Base class for local iterators, used to iterate within a bucket
  1028. * but not between buckets.
  1029. */
  1030. template<typename _Key, typename _Value, typename _ExtractKey,
  1031. typename _Hash, typename _RangeHash, typename _Unused,
  1032. bool __cache_hash_code>
  1033. struct _Local_iterator_base;
  1034. /**
  1035. * Primary class template _Hash_code_base.
  1036. *
  1037. * Encapsulates two policy issues that aren't quite orthogonal.
  1038. * (1) the difference between using a ranged hash function and using
  1039. * the combination of a hash function and a range-hashing function.
  1040. * In the former case we don't have such things as hash codes, so
  1041. * we have a dummy type as placeholder.
  1042. * (2) Whether or not we cache hash codes. Caching hash codes is
  1043. * meaningless if we have a ranged hash function.
  1044. *
  1045. * We also put the key extraction objects here, for convenience.
  1046. * Each specialization derives from one or more of the template
  1047. * parameters to benefit from Ebo. This is important as this type
  1048. * is inherited in some cases by the _Local_iterator_base type used
  1049. * to implement local_iterator and const_local_iterator. As with
  1050. * any iterator type we prefer to make it as small as possible.
  1051. */
  1052. template<typename _Key, typename _Value, typename _ExtractKey,
  1053. typename _Hash, typename _RangeHash, typename _Unused,
  1054. bool __cache_hash_code>
  1055. struct _Hash_code_base
  1056. : private _Hashtable_ebo_helper<1, _Hash>
  1057. {
  1058. private:
  1059. using __ebo_hash = _Hashtable_ebo_helper<1, _Hash>;
  1060. // Gives the local iterator implementation access to _M_bucket_index().
  1061. friend struct _Local_iterator_base<_Key, _Value, _ExtractKey,
  1062. _Hash, _RangeHash, _Unused, false>;
  1063. public:
  1064. typedef _Hash hasher;
  1065. hasher
  1066. hash_function() const
  1067. { return _M_hash(); }
  1068. protected:
  1069. typedef std::size_t __hash_code;
  1070. // We need the default constructor for the local iterators and _Hashtable
  1071. // default constructor.
  1072. _Hash_code_base() = default;
  1073. _Hash_code_base(const _Hash& __hash) : __ebo_hash(__hash) { }
  1074. __hash_code
  1075. _M_hash_code(const _Key& __k) const
  1076. {
  1077. static_assert(__is_invocable<const _Hash&, const _Key&>{},
  1078. "hash function must be invocable with an argument of key type");
  1079. return _M_hash()(__k);
  1080. }
  1081. template<typename _Kt>
  1082. __hash_code
  1083. _M_hash_code_tr(const _Kt& __k) const
  1084. {
  1085. static_assert(__is_invocable<const _Hash&, const _Kt&>{},
  1086. "hash function must be invocable with an argument of key type");
  1087. return _M_hash()(__k);
  1088. }
  1089. __hash_code
  1090. _M_hash_code(const _Hash&,
  1091. const _Hash_node_value<_Value, true>& __n) const
  1092. { return __n._M_hash_code; }
  1093. // Compute hash code using _Hash as __n _M_hash_code, if present, was
  1094. // computed using _H2.
  1095. template<typename _H2>
  1096. __hash_code
  1097. _M_hash_code(const _H2&,
  1098. const _Hash_node_value<_Value, __cache_hash_code>& __n) const
  1099. { return _M_hash_code(_ExtractKey{}(__n._M_v())); }
  1100. __hash_code
  1101. _M_hash_code(const _Hash_node_value<_Value, false>& __n) const
  1102. { return _M_hash_code(_ExtractKey{}(__n._M_v())); }
  1103. __hash_code
  1104. _M_hash_code(const _Hash_node_value<_Value, true>& __n) const
  1105. { return __n._M_hash_code; }
  1106. std::size_t
  1107. _M_bucket_index(__hash_code __c, std::size_t __bkt_count) const
  1108. { return _RangeHash{}(__c, __bkt_count); }
  1109. std::size_t
  1110. _M_bucket_index(const _Hash_node_value<_Value, false>& __n,
  1111. std::size_t __bkt_count) const
  1112. noexcept( noexcept(declval<const _Hash&>()(declval<const _Key&>()))
  1113. && noexcept(declval<const _RangeHash&>()((__hash_code)0,
  1114. (std::size_t)0)) )
  1115. {
  1116. return _RangeHash{}(_M_hash_code(_ExtractKey{}(__n._M_v())),
  1117. __bkt_count);
  1118. }
  1119. std::size_t
  1120. _M_bucket_index(const _Hash_node_value<_Value, true>& __n,
  1121. std::size_t __bkt_count) const
  1122. noexcept( noexcept(declval<const _RangeHash&>()((__hash_code)0,
  1123. (std::size_t)0)) )
  1124. { return _RangeHash{}(__n._M_hash_code, __bkt_count); }
  1125. void
  1126. _M_store_code(_Hash_node_code_cache<false>&, __hash_code) const
  1127. { }
  1128. void
  1129. _M_copy_code(_Hash_node_code_cache<false>&,
  1130. const _Hash_node_code_cache<false>&) const
  1131. { }
  1132. void
  1133. _M_store_code(_Hash_node_code_cache<true>& __n, __hash_code __c) const
  1134. { __n._M_hash_code = __c; }
  1135. void
  1136. _M_copy_code(_Hash_node_code_cache<true>& __to,
  1137. const _Hash_node_code_cache<true>& __from) const
  1138. { __to._M_hash_code = __from._M_hash_code; }
  1139. void
  1140. _M_swap(_Hash_code_base& __x)
  1141. { std::swap(__ebo_hash::_M_get(), __x.__ebo_hash::_M_get()); }
  1142. const _Hash&
  1143. _M_hash() const { return __ebo_hash::_M_cget(); }
  1144. };
  1145. /// Partial specialization used when nodes contain a cached hash code.
  1146. template<typename _Key, typename _Value, typename _ExtractKey,
  1147. typename _Hash, typename _RangeHash, typename _Unused>
  1148. struct _Local_iterator_base<_Key, _Value, _ExtractKey,
  1149. _Hash, _RangeHash, _Unused, true>
  1150. : public _Node_iterator_base<_Value, true>
  1151. {
  1152. protected:
  1153. using __base_node_iter = _Node_iterator_base<_Value, true>;
  1154. using __hash_code_base = _Hash_code_base<_Key, _Value, _ExtractKey,
  1155. _Hash, _RangeHash, _Unused, true>;
  1156. _Local_iterator_base() = default;
  1157. _Local_iterator_base(const __hash_code_base&,
  1158. _Hash_node<_Value, true>* __p,
  1159. std::size_t __bkt, std::size_t __bkt_count)
  1160. : __base_node_iter(__p), _M_bucket(__bkt), _M_bucket_count(__bkt_count)
  1161. { }
  1162. void
  1163. _M_incr()
  1164. {
  1165. __base_node_iter::_M_incr();
  1166. if (this->_M_cur)
  1167. {
  1168. std::size_t __bkt
  1169. = _RangeHash{}(this->_M_cur->_M_hash_code, _M_bucket_count);
  1170. if (__bkt != _M_bucket)
  1171. this->_M_cur = nullptr;
  1172. }
  1173. }
  1174. std::size_t _M_bucket;
  1175. std::size_t _M_bucket_count;
  1176. public:
  1177. std::size_t
  1178. _M_get_bucket() const { return _M_bucket; } // for debug mode
  1179. };
  1180. // Uninitialized storage for a _Hash_code_base.
  1181. // This type is DefaultConstructible and Assignable even if the
  1182. // _Hash_code_base type isn't, so that _Local_iterator_base<..., false>
  1183. // can be DefaultConstructible and Assignable.
  1184. template<typename _Tp, bool _IsEmpty = std::is_empty<_Tp>::value>
  1185. struct _Hash_code_storage
  1186. {
  1187. __gnu_cxx::__aligned_buffer<_Tp> _M_storage;
  1188. _Tp*
  1189. _M_h() { return _M_storage._M_ptr(); }
  1190. const _Tp*
  1191. _M_h() const { return _M_storage._M_ptr(); }
  1192. };
  1193. // Empty partial specialization for empty _Hash_code_base types.
  1194. template<typename _Tp>
  1195. struct _Hash_code_storage<_Tp, true>
  1196. {
  1197. static_assert( std::is_empty<_Tp>::value, "Type must be empty" );
  1198. // As _Tp is an empty type there will be no bytes written/read through
  1199. // the cast pointer, so no strict-aliasing violation.
  1200. _Tp*
  1201. _M_h() { return reinterpret_cast<_Tp*>(this); }
  1202. const _Tp*
  1203. _M_h() const { return reinterpret_cast<const _Tp*>(this); }
  1204. };
  1205. template<typename _Key, typename _Value, typename _ExtractKey,
  1206. typename _Hash, typename _RangeHash, typename _Unused>
  1207. using __hash_code_for_local_iter
  1208. = _Hash_code_storage<_Hash_code_base<_Key, _Value, _ExtractKey,
  1209. _Hash, _RangeHash, _Unused, false>>;
  1210. // Partial specialization used when hash codes are not cached
  1211. template<typename _Key, typename _Value, typename _ExtractKey,
  1212. typename _Hash, typename _RangeHash, typename _Unused>
  1213. struct _Local_iterator_base<_Key, _Value, _ExtractKey,
  1214. _Hash, _RangeHash, _Unused, false>
  1215. : __hash_code_for_local_iter<_Key, _Value, _ExtractKey, _Hash, _RangeHash,
  1216. _Unused>
  1217. , _Node_iterator_base<_Value, false>
  1218. {
  1219. protected:
  1220. using __hash_code_base = _Hash_code_base<_Key, _Value, _ExtractKey,
  1221. _Hash, _RangeHash, _Unused, false>;
  1222. using __node_iter_base = _Node_iterator_base<_Value, false>;
  1223. _Local_iterator_base() : _M_bucket_count(-1) { }
  1224. _Local_iterator_base(const __hash_code_base& __base,
  1225. _Hash_node<_Value, false>* __p,
  1226. std::size_t __bkt, std::size_t __bkt_count)
  1227. : __node_iter_base(__p), _M_bucket(__bkt), _M_bucket_count(__bkt_count)
  1228. { _M_init(__base); }
  1229. ~_Local_iterator_base()
  1230. {
  1231. if (_M_bucket_count != size_t(-1))
  1232. _M_destroy();
  1233. }
  1234. _Local_iterator_base(const _Local_iterator_base& __iter)
  1235. : __node_iter_base(__iter._M_cur), _M_bucket(__iter._M_bucket)
  1236. , _M_bucket_count(__iter._M_bucket_count)
  1237. {
  1238. if (_M_bucket_count != size_t(-1))
  1239. _M_init(*__iter._M_h());
  1240. }
  1241. _Local_iterator_base&
  1242. operator=(const _Local_iterator_base& __iter)
  1243. {
  1244. if (_M_bucket_count != -1)
  1245. _M_destroy();
  1246. this->_M_cur = __iter._M_cur;
  1247. _M_bucket = __iter._M_bucket;
  1248. _M_bucket_count = __iter._M_bucket_count;
  1249. if (_M_bucket_count != -1)
  1250. _M_init(*__iter._M_h());
  1251. return *this;
  1252. }
  1253. void
  1254. _M_incr()
  1255. {
  1256. __node_iter_base::_M_incr();
  1257. if (this->_M_cur)
  1258. {
  1259. std::size_t __bkt = this->_M_h()->_M_bucket_index(*this->_M_cur,
  1260. _M_bucket_count);
  1261. if (__bkt != _M_bucket)
  1262. this->_M_cur = nullptr;
  1263. }
  1264. }
  1265. std::size_t _M_bucket;
  1266. std::size_t _M_bucket_count;
  1267. void
  1268. _M_init(const __hash_code_base& __base)
  1269. { ::new(this->_M_h()) __hash_code_base(__base); }
  1270. void
  1271. _M_destroy() { this->_M_h()->~__hash_code_base(); }
  1272. public:
  1273. std::size_t
  1274. _M_get_bucket() const { return _M_bucket; } // for debug mode
  1275. };
  1276. /// local iterators
  1277. template<typename _Key, typename _Value, typename _ExtractKey,
  1278. typename _Hash, typename _RangeHash, typename _Unused,
  1279. bool __constant_iterators, bool __cache>
  1280. struct _Local_iterator
  1281. : public _Local_iterator_base<_Key, _Value, _ExtractKey,
  1282. _Hash, _RangeHash, _Unused, __cache>
  1283. {
  1284. private:
  1285. using __base_type = _Local_iterator_base<_Key, _Value, _ExtractKey,
  1286. _Hash, _RangeHash, _Unused, __cache>;
  1287. using __hash_code_base = typename __base_type::__hash_code_base;
  1288. public:
  1289. using value_type = _Value;
  1290. using pointer = __conditional_t<__constant_iterators,
  1291. const value_type*, value_type*>;
  1292. using reference = __conditional_t<__constant_iterators,
  1293. const value_type&, value_type&>;
  1294. using difference_type = ptrdiff_t;
  1295. using iterator_category = forward_iterator_tag;
  1296. _Local_iterator() = default;
  1297. _Local_iterator(const __hash_code_base& __base,
  1298. _Hash_node<_Value, __cache>* __n,
  1299. std::size_t __bkt, std::size_t __bkt_count)
  1300. : __base_type(__base, __n, __bkt, __bkt_count)
  1301. { }
  1302. reference
  1303. operator*() const
  1304. { return this->_M_cur->_M_v(); }
  1305. pointer
  1306. operator->() const
  1307. { return this->_M_cur->_M_valptr(); }
  1308. _Local_iterator&
  1309. operator++()
  1310. {
  1311. this->_M_incr();
  1312. return *this;
  1313. }
  1314. _Local_iterator
  1315. operator++(int)
  1316. {
  1317. _Local_iterator __tmp(*this);
  1318. this->_M_incr();
  1319. return __tmp;
  1320. }
  1321. };
  1322. /// local const_iterators
  1323. template<typename _Key, typename _Value, typename _ExtractKey,
  1324. typename _Hash, typename _RangeHash, typename _Unused,
  1325. bool __constant_iterators, bool __cache>
  1326. struct _Local_const_iterator
  1327. : public _Local_iterator_base<_Key, _Value, _ExtractKey,
  1328. _Hash, _RangeHash, _Unused, __cache>
  1329. {
  1330. private:
  1331. using __base_type = _Local_iterator_base<_Key, _Value, _ExtractKey,
  1332. _Hash, _RangeHash, _Unused, __cache>;
  1333. using __hash_code_base = typename __base_type::__hash_code_base;
  1334. public:
  1335. typedef _Value value_type;
  1336. typedef const value_type* pointer;
  1337. typedef const value_type& reference;
  1338. typedef std::ptrdiff_t difference_type;
  1339. typedef std::forward_iterator_tag iterator_category;
  1340. _Local_const_iterator() = default;
  1341. _Local_const_iterator(const __hash_code_base& __base,
  1342. _Hash_node<_Value, __cache>* __n,
  1343. std::size_t __bkt, std::size_t __bkt_count)
  1344. : __base_type(__base, __n, __bkt, __bkt_count)
  1345. { }
  1346. _Local_const_iterator(const _Local_iterator<_Key, _Value, _ExtractKey,
  1347. _Hash, _RangeHash, _Unused,
  1348. __constant_iterators,
  1349. __cache>& __x)
  1350. : __base_type(__x)
  1351. { }
  1352. reference
  1353. operator*() const
  1354. { return this->_M_cur->_M_v(); }
  1355. pointer
  1356. operator->() const
  1357. { return this->_M_cur->_M_valptr(); }
  1358. _Local_const_iterator&
  1359. operator++()
  1360. {
  1361. this->_M_incr();
  1362. return *this;
  1363. }
  1364. _Local_const_iterator
  1365. operator++(int)
  1366. {
  1367. _Local_const_iterator __tmp(*this);
  1368. this->_M_incr();
  1369. return __tmp;
  1370. }
  1371. };
  1372. /**
  1373. * Primary class template _Hashtable_base.
  1374. *
  1375. * Helper class adding management of _Equal functor to
  1376. * _Hash_code_base type.
  1377. *
  1378. * Base class templates are:
  1379. * - __detail::_Hash_code_base
  1380. * - __detail::_Hashtable_ebo_helper
  1381. */
  1382. template<typename _Key, typename _Value, typename _ExtractKey,
  1383. typename _Equal, typename _Hash, typename _RangeHash,
  1384. typename _Unused, typename _Traits>
  1385. struct _Hashtable_base
  1386. : public _Hash_code_base<_Key, _Value, _ExtractKey, _Hash, _RangeHash,
  1387. _Unused, _Traits::__hash_cached::value>,
  1388. private _Hashtable_ebo_helper<0, _Equal>
  1389. {
  1390. public:
  1391. typedef _Key key_type;
  1392. typedef _Value value_type;
  1393. typedef _Equal key_equal;
  1394. typedef std::size_t size_type;
  1395. typedef std::ptrdiff_t difference_type;
  1396. using __traits_type = _Traits;
  1397. using __hash_cached = typename __traits_type::__hash_cached;
  1398. using __hash_code_base = _Hash_code_base<_Key, _Value, _ExtractKey,
  1399. _Hash, _RangeHash, _Unused,
  1400. __hash_cached::value>;
  1401. using __hash_code = typename __hash_code_base::__hash_code;
  1402. private:
  1403. using _EqualEBO = _Hashtable_ebo_helper<0, _Equal>;
  1404. static bool
  1405. _S_equals(__hash_code, const _Hash_node_code_cache<false>&)
  1406. { return true; }
  1407. static bool
  1408. _S_node_equals(const _Hash_node_code_cache<false>&,
  1409. const _Hash_node_code_cache<false>&)
  1410. { return true; }
  1411. static bool
  1412. _S_equals(__hash_code __c, const _Hash_node_code_cache<true>& __n)
  1413. { return __c == __n._M_hash_code; }
  1414. static bool
  1415. _S_node_equals(const _Hash_node_code_cache<true>& __lhn,
  1416. const _Hash_node_code_cache<true>& __rhn)
  1417. { return __lhn._M_hash_code == __rhn._M_hash_code; }
  1418. protected:
  1419. _Hashtable_base() = default;
  1420. _Hashtable_base(const _Hash& __hash, const _Equal& __eq)
  1421. : __hash_code_base(__hash), _EqualEBO(__eq)
  1422. { }
  1423. bool
  1424. _M_key_equals(const _Key& __k,
  1425. const _Hash_node_value<_Value,
  1426. __hash_cached::value>& __n) const
  1427. {
  1428. static_assert(__is_invocable<const _Equal&, const _Key&, const _Key&>{},
  1429. "key equality predicate must be invocable with two arguments of "
  1430. "key type");
  1431. return _M_eq()(__k, _ExtractKey{}(__n._M_v()));
  1432. }
  1433. template<typename _Kt>
  1434. bool
  1435. _M_key_equals_tr(const _Kt& __k,
  1436. const _Hash_node_value<_Value,
  1437. __hash_cached::value>& __n) const
  1438. {
  1439. static_assert(
  1440. __is_invocable<const _Equal&, const _Kt&, const _Key&>{},
  1441. "key equality predicate must be invocable with two arguments of "
  1442. "key type");
  1443. return _M_eq()(__k, _ExtractKey{}(__n._M_v()));
  1444. }
  1445. bool
  1446. _M_equals(const _Key& __k, __hash_code __c,
  1447. const _Hash_node_value<_Value, __hash_cached::value>& __n) const
  1448. { return _S_equals(__c, __n) && _M_key_equals(__k, __n); }
  1449. template<typename _Kt>
  1450. bool
  1451. _M_equals_tr(const _Kt& __k, __hash_code __c,
  1452. const _Hash_node_value<_Value,
  1453. __hash_cached::value>& __n) const
  1454. { return _S_equals(__c, __n) && _M_key_equals_tr(__k, __n); }
  1455. bool
  1456. _M_node_equals(
  1457. const _Hash_node_value<_Value, __hash_cached::value>& __lhn,
  1458. const _Hash_node_value<_Value, __hash_cached::value>& __rhn) const
  1459. {
  1460. return _S_node_equals(__lhn, __rhn)
  1461. && _M_key_equals(_ExtractKey{}(__lhn._M_v()), __rhn);
  1462. }
  1463. void
  1464. _M_swap(_Hashtable_base& __x)
  1465. {
  1466. __hash_code_base::_M_swap(__x);
  1467. std::swap(_EqualEBO::_M_get(), __x._EqualEBO::_M_get());
  1468. }
  1469. const _Equal&
  1470. _M_eq() const { return _EqualEBO::_M_cget(); }
  1471. };
  1472. /**
  1473. * Primary class template _Equality.
  1474. *
  1475. * This is for implementing equality comparison for unordered
  1476. * containers, per N3068, by John Lakos and Pablo Halpern.
  1477. * Algorithmically, we follow closely the reference implementations
  1478. * therein.
  1479. */
  1480. template<typename _Key, typename _Value, typename _Alloc,
  1481. typename _ExtractKey, typename _Equal,
  1482. typename _Hash, typename _RangeHash, typename _Unused,
  1483. typename _RehashPolicy, typename _Traits,
  1484. bool _Unique_keys = _Traits::__unique_keys::value>
  1485. struct _Equality;
  1486. /// unordered_map and unordered_set specializations.
  1487. template<typename _Key, typename _Value, typename _Alloc,
  1488. typename _ExtractKey, typename _Equal,
  1489. typename _Hash, typename _RangeHash, typename _Unused,
  1490. typename _RehashPolicy, typename _Traits>
  1491. struct _Equality<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1492. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>
  1493. {
  1494. using __hashtable = _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1495. _Hash, _RangeHash, _Unused,
  1496. _RehashPolicy, _Traits>;
  1497. bool
  1498. _M_equal(const __hashtable&) const;
  1499. };
  1500. template<typename _Key, typename _Value, typename _Alloc,
  1501. typename _ExtractKey, typename _Equal,
  1502. typename _Hash, typename _RangeHash, typename _Unused,
  1503. typename _RehashPolicy, typename _Traits>
  1504. bool
  1505. _Equality<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1506. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>::
  1507. _M_equal(const __hashtable& __other) const
  1508. {
  1509. using __node_type = typename __hashtable::__node_type;
  1510. const __hashtable* __this = static_cast<const __hashtable*>(this);
  1511. if (__this->size() != __other.size())
  1512. return false;
  1513. for (auto __itx = __this->begin(); __itx != __this->end(); ++__itx)
  1514. {
  1515. std::size_t __ybkt = __other._M_bucket_index(*__itx._M_cur);
  1516. auto __prev_n = __other._M_buckets[__ybkt];
  1517. if (!__prev_n)
  1518. return false;
  1519. for (__node_type* __n = static_cast<__node_type*>(__prev_n->_M_nxt);;
  1520. __n = __n->_M_next())
  1521. {
  1522. if (__n->_M_v() == *__itx)
  1523. break;
  1524. if (!__n->_M_nxt
  1525. || __other._M_bucket_index(*__n->_M_next()) != __ybkt)
  1526. return false;
  1527. }
  1528. }
  1529. return true;
  1530. }
  1531. /// unordered_multiset and unordered_multimap specializations.
  1532. template<typename _Key, typename _Value, typename _Alloc,
  1533. typename _ExtractKey, typename _Equal,
  1534. typename _Hash, typename _RangeHash, typename _Unused,
  1535. typename _RehashPolicy, typename _Traits>
  1536. struct _Equality<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1537. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, false>
  1538. {
  1539. using __hashtable = _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1540. _Hash, _RangeHash, _Unused,
  1541. _RehashPolicy, _Traits>;
  1542. bool
  1543. _M_equal(const __hashtable&) const;
  1544. };
  1545. template<typename _Key, typename _Value, typename _Alloc,
  1546. typename _ExtractKey, typename _Equal,
  1547. typename _Hash, typename _RangeHash, typename _Unused,
  1548. typename _RehashPolicy, typename _Traits>
  1549. bool
  1550. _Equality<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1551. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, false>::
  1552. _M_equal(const __hashtable& __other) const
  1553. {
  1554. using __node_type = typename __hashtable::__node_type;
  1555. const __hashtable* __this = static_cast<const __hashtable*>(this);
  1556. if (__this->size() != __other.size())
  1557. return false;
  1558. for (auto __itx = __this->begin(); __itx != __this->end();)
  1559. {
  1560. std::size_t __x_count = 1;
  1561. auto __itx_end = __itx;
  1562. for (++__itx_end; __itx_end != __this->end()
  1563. && __this->key_eq()(_ExtractKey{}(*__itx),
  1564. _ExtractKey{}(*__itx_end));
  1565. ++__itx_end)
  1566. ++__x_count;
  1567. std::size_t __ybkt = __other._M_bucket_index(*__itx._M_cur);
  1568. auto __y_prev_n = __other._M_buckets[__ybkt];
  1569. if (!__y_prev_n)
  1570. return false;
  1571. __node_type* __y_n = static_cast<__node_type*>(__y_prev_n->_M_nxt);
  1572. for (;;)
  1573. {
  1574. if (__this->key_eq()(_ExtractKey{}(__y_n->_M_v()),
  1575. _ExtractKey{}(*__itx)))
  1576. break;
  1577. auto __y_ref_n = __y_n;
  1578. for (__y_n = __y_n->_M_next(); __y_n; __y_n = __y_n->_M_next())
  1579. if (!__other._M_node_equals(*__y_ref_n, *__y_n))
  1580. break;
  1581. if (!__y_n || __other._M_bucket_index(*__y_n) != __ybkt)
  1582. return false;
  1583. }
  1584. typename __hashtable::const_iterator __ity(__y_n);
  1585. for (auto __ity_end = __ity; __ity_end != __other.end(); ++__ity_end)
  1586. if (--__x_count == 0)
  1587. break;
  1588. if (__x_count != 0)
  1589. return false;
  1590. if (!std::is_permutation(__itx, __itx_end, __ity))
  1591. return false;
  1592. __itx = __itx_end;
  1593. }
  1594. return true;
  1595. }
  1596. /**
  1597. * This type deals with all allocation and keeps an allocator instance
  1598. * through inheritance to benefit from EBO when possible.
  1599. */
  1600. template<typename _NodeAlloc>
  1601. struct _Hashtable_alloc : private _Hashtable_ebo_helper<0, _NodeAlloc>
  1602. {
  1603. private:
  1604. using __ebo_node_alloc = _Hashtable_ebo_helper<0, _NodeAlloc>;
  1605. template<typename>
  1606. struct __get_value_type;
  1607. template<typename _Val, bool _Cache_hash_code>
  1608. struct __get_value_type<_Hash_node<_Val, _Cache_hash_code>>
  1609. { using type = _Val; };
  1610. public:
  1611. using __node_type = typename _NodeAlloc::value_type;
  1612. using __node_alloc_type = _NodeAlloc;
  1613. // Use __gnu_cxx to benefit from _S_always_equal and al.
  1614. using __node_alloc_traits = __gnu_cxx::__alloc_traits<__node_alloc_type>;
  1615. using __value_alloc_traits = typename __node_alloc_traits::template
  1616. rebind_traits<typename __get_value_type<__node_type>::type>;
  1617. using __node_ptr = __node_type*;
  1618. using __node_base = _Hash_node_base;
  1619. using __node_base_ptr = __node_base*;
  1620. using __buckets_alloc_type =
  1621. __alloc_rebind<__node_alloc_type, __node_base_ptr>;
  1622. using __buckets_alloc_traits = std::allocator_traits<__buckets_alloc_type>;
  1623. using __buckets_ptr = __node_base_ptr*;
  1624. _Hashtable_alloc() = default;
  1625. _Hashtable_alloc(const _Hashtable_alloc&) = default;
  1626. _Hashtable_alloc(_Hashtable_alloc&&) = default;
  1627. template<typename _Alloc>
  1628. _Hashtable_alloc(_Alloc&& __a)
  1629. : __ebo_node_alloc(std::forward<_Alloc>(__a))
  1630. { }
  1631. __node_alloc_type&
  1632. _M_node_allocator()
  1633. { return __ebo_node_alloc::_M_get(); }
  1634. const __node_alloc_type&
  1635. _M_node_allocator() const
  1636. { return __ebo_node_alloc::_M_cget(); }
  1637. // Allocate a node and construct an element within it.
  1638. template<typename... _Args>
  1639. __node_ptr
  1640. _M_allocate_node(_Args&&... __args);
  1641. // Destroy the element within a node and deallocate the node.
  1642. void
  1643. _M_deallocate_node(__node_ptr __n);
  1644. // Deallocate a node.
  1645. void
  1646. _M_deallocate_node_ptr(__node_ptr __n);
  1647. // Deallocate the linked list of nodes pointed to by __n.
  1648. // The elements within the nodes are destroyed.
  1649. void
  1650. _M_deallocate_nodes(__node_ptr __n);
  1651. __buckets_ptr
  1652. _M_allocate_buckets(std::size_t __bkt_count);
  1653. void
  1654. _M_deallocate_buckets(__buckets_ptr, std::size_t __bkt_count);
  1655. };
  1656. // Definitions of class template _Hashtable_alloc's out-of-line member
  1657. // functions.
  1658. template<typename _NodeAlloc>
  1659. template<typename... _Args>
  1660. auto
  1661. _Hashtable_alloc<_NodeAlloc>::_M_allocate_node(_Args&&... __args)
  1662. -> __node_ptr
  1663. {
  1664. auto __nptr = __node_alloc_traits::allocate(_M_node_allocator(), 1);
  1665. __node_ptr __n = std::__to_address(__nptr);
  1666. __try
  1667. {
  1668. ::new ((void*)__n) __node_type;
  1669. __node_alloc_traits::construct(_M_node_allocator(),
  1670. __n->_M_valptr(),
  1671. std::forward<_Args>(__args)...);
  1672. return __n;
  1673. }
  1674. __catch(...)
  1675. {
  1676. __node_alloc_traits::deallocate(_M_node_allocator(), __nptr, 1);
  1677. __throw_exception_again;
  1678. }
  1679. }
  1680. template<typename _NodeAlloc>
  1681. void
  1682. _Hashtable_alloc<_NodeAlloc>::_M_deallocate_node(__node_ptr __n)
  1683. {
  1684. __node_alloc_traits::destroy(_M_node_allocator(), __n->_M_valptr());
  1685. _M_deallocate_node_ptr(__n);
  1686. }
  1687. template<typename _NodeAlloc>
  1688. void
  1689. _Hashtable_alloc<_NodeAlloc>::_M_deallocate_node_ptr(__node_ptr __n)
  1690. {
  1691. typedef typename __node_alloc_traits::pointer _Ptr;
  1692. auto __ptr = std::pointer_traits<_Ptr>::pointer_to(*__n);
  1693. __n->~__node_type();
  1694. __node_alloc_traits::deallocate(_M_node_allocator(), __ptr, 1);
  1695. }
  1696. template<typename _NodeAlloc>
  1697. void
  1698. _Hashtable_alloc<_NodeAlloc>::_M_deallocate_nodes(__node_ptr __n)
  1699. {
  1700. while (__n)
  1701. {
  1702. __node_ptr __tmp = __n;
  1703. __n = __n->_M_next();
  1704. _M_deallocate_node(__tmp);
  1705. }
  1706. }
  1707. template<typename _NodeAlloc>
  1708. auto
  1709. _Hashtable_alloc<_NodeAlloc>::_M_allocate_buckets(std::size_t __bkt_count)
  1710. -> __buckets_ptr
  1711. {
  1712. __buckets_alloc_type __alloc(_M_node_allocator());
  1713. auto __ptr = __buckets_alloc_traits::allocate(__alloc, __bkt_count);
  1714. __buckets_ptr __p = std::__to_address(__ptr);
  1715. __builtin_memset(__p, 0, __bkt_count * sizeof(__node_base_ptr));
  1716. return __p;
  1717. }
  1718. template<typename _NodeAlloc>
  1719. void
  1720. _Hashtable_alloc<_NodeAlloc>::
  1721. _M_deallocate_buckets(__buckets_ptr __bkts,
  1722. std::size_t __bkt_count)
  1723. {
  1724. typedef typename __buckets_alloc_traits::pointer _Ptr;
  1725. auto __ptr = std::pointer_traits<_Ptr>::pointer_to(*__bkts);
  1726. __buckets_alloc_type __alloc(_M_node_allocator());
  1727. __buckets_alloc_traits::deallocate(__alloc, __ptr, __bkt_count);
  1728. }
  1729. ///@} hashtable-detail
  1730. } // namespace __detail
  1731. /// @endcond
  1732. _GLIBCXX_END_NAMESPACE_VERSION
  1733. } // namespace std
  1734. #endif // _HASHTABLE_POLICY_H