hashtable.h 88 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700
  1. // hashtable.h header -*- C++ -*-
  2. // Copyright (C) 2007-2022 Free Software Foundation, Inc.
  3. //
  4. // This file is part of the GNU ISO C++ Library. This library is free
  5. // software; you can redistribute it and/or modify it under the
  6. // terms of the GNU General Public License as published by the
  7. // Free Software Foundation; either version 3, or (at your option)
  8. // any later version.
  9. // This library is distributed in the hope that it will be useful,
  10. // but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  12. // GNU General Public License for more details.
  13. // Under Section 7 of GPL version 3, you are granted additional
  14. // permissions described in the GCC Runtime Library Exception, version
  15. // 3.1, as published by the Free Software Foundation.
  16. // You should have received a copy of the GNU General Public License and
  17. // a copy of the GCC Runtime Library Exception along with this program;
  18. // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
  19. // <http://www.gnu.org/licenses/>.
  20. /** @file bits/hashtable.h
  21. * This is an internal header file, included by other library headers.
  22. * Do not attempt to use it directly. @headername{unordered_map, unordered_set}
  23. */
  24. #ifndef _HASHTABLE_H
  25. #define _HASHTABLE_H 1
  26. #pragma GCC system_header
  27. #include <bits/hashtable_policy.h>
  28. #include <bits/enable_special_members.h>
  29. #if __cplusplus > 201402L
  30. # include <bits/node_handle.h>
  31. #endif
  32. #include <bits/functional_hash.h>
  33. #include <bits/stl_function.h> // equal_to, _Identity, _Select1st
  34. namespace std _GLIBCXX_VISIBILITY(default)
  35. {
  36. _GLIBCXX_BEGIN_NAMESPACE_VERSION
  37. /// @cond undocumented
  38. template<typename _Tp, typename _Hash>
  39. using __cache_default
  40. = __not_<__and_<// Do not cache for fast hasher.
  41. __is_fast_hash<_Hash>,
  42. // Mandatory to have erase not throwing.
  43. __is_nothrow_invocable<const _Hash&, const _Tp&>>>;
  44. // Helper to conditionally delete the default constructor.
  45. // The _Hash_node_base type is used to distinguish this specialization
  46. // from any other potentially-overlapping subobjects of the hashtable.
  47. template<typename _Equal, typename _Hash, typename _Allocator>
  48. using _Hashtable_enable_default_ctor
  49. = _Enable_default_constructor<__and_<is_default_constructible<_Equal>,
  50. is_default_constructible<_Hash>,
  51. is_default_constructible<_Allocator>>{},
  52. __detail::_Hash_node_base>;
  53. /**
  54. * Primary class template _Hashtable.
  55. *
  56. * @ingroup hashtable-detail
  57. *
  58. * @tparam _Value CopyConstructible type.
  59. *
  60. * @tparam _Key CopyConstructible type.
  61. *
  62. * @tparam _Alloc An allocator type
  63. * ([lib.allocator.requirements]) whose _Alloc::value_type is
  64. * _Value. As a conforming extension, we allow for
  65. * _Alloc::value_type != _Value.
  66. *
  67. * @tparam _ExtractKey Function object that takes an object of type
  68. * _Value and returns a value of type _Key.
  69. *
  70. * @tparam _Equal Function object that takes two objects of type k
  71. * and returns a bool-like value that is true if the two objects
  72. * are considered equal.
  73. *
  74. * @tparam _Hash The hash function. A unary function object with
  75. * argument type _Key and result type size_t. Return values should
  76. * be distributed over the entire range [0, numeric_limits<size_t>:::max()].
  77. *
  78. * @tparam _RangeHash The range-hashing function (in the terminology of
  79. * Tavori and Dreizin). A binary function object whose argument
  80. * types and result type are all size_t. Given arguments r and N,
  81. * the return value is in the range [0, N).
  82. *
  83. * @tparam _Unused Not used.
  84. *
  85. * @tparam _RehashPolicy Policy class with three members, all of
  86. * which govern the bucket count. _M_next_bkt(n) returns a bucket
  87. * count no smaller than n. _M_bkt_for_elements(n) returns a
  88. * bucket count appropriate for an element count of n.
  89. * _M_need_rehash(n_bkt, n_elt, n_ins) determines whether, if the
  90. * current bucket count is n_bkt and the current element count is
  91. * n_elt, we need to increase the bucket count for n_ins insertions.
  92. * If so, returns make_pair(true, n), where n is the new bucket count. If
  93. * not, returns make_pair(false, <anything>)
  94. *
  95. * @tparam _Traits Compile-time class with three boolean
  96. * std::integral_constant members: __cache_hash_code, __constant_iterators,
  97. * __unique_keys.
  98. *
  99. * Each _Hashtable data structure has:
  100. *
  101. * - _Bucket[] _M_buckets
  102. * - _Hash_node_base _M_before_begin
  103. * - size_type _M_bucket_count
  104. * - size_type _M_element_count
  105. *
  106. * with _Bucket being _Hash_node_base* and _Hash_node containing:
  107. *
  108. * - _Hash_node* _M_next
  109. * - Tp _M_value
  110. * - size_t _M_hash_code if cache_hash_code is true
  111. *
  112. * In terms of Standard containers the hashtable is like the aggregation of:
  113. *
  114. * - std::forward_list<_Node> containing the elements
  115. * - std::vector<std::forward_list<_Node>::iterator> representing the buckets
  116. *
  117. * The non-empty buckets contain the node before the first node in the
  118. * bucket. This design makes it possible to implement something like a
  119. * std::forward_list::insert_after on container insertion and
  120. * std::forward_list::erase_after on container erase
  121. * calls. _M_before_begin is equivalent to
  122. * std::forward_list::before_begin. Empty buckets contain
  123. * nullptr. Note that one of the non-empty buckets contains
  124. * &_M_before_begin which is not a dereferenceable node so the
  125. * node pointer in a bucket shall never be dereferenced, only its
  126. * next node can be.
  127. *
  128. * Walking through a bucket's nodes requires a check on the hash code to
  129. * see if each node is still in the bucket. Such a design assumes a
  130. * quite efficient hash functor and is one of the reasons it is
  131. * highly advisable to set __cache_hash_code to true.
  132. *
  133. * The container iterators are simply built from nodes. This way
  134. * incrementing the iterator is perfectly efficient independent of
  135. * how many empty buckets there are in the container.
  136. *
  137. * On insert we compute the element's hash code and use it to find the
  138. * bucket index. If the element must be inserted in an empty bucket
  139. * we add it at the beginning of the singly linked list and make the
  140. * bucket point to _M_before_begin. The bucket that used to point to
  141. * _M_before_begin, if any, is updated to point to its new before
  142. * begin node.
  143. *
  144. * On erase, the simple iterator design requires using the hash
  145. * functor to get the index of the bucket to update. For this
  146. * reason, when __cache_hash_code is set to false the hash functor must
  147. * not throw and this is enforced by a static assertion.
  148. *
  149. * Functionality is implemented by decomposition into base classes,
  150. * where the derived _Hashtable class is used in _Map_base,
  151. * _Insert, _Rehash_base, and _Equality base classes to access the
  152. * "this" pointer. _Hashtable_base is used in the base classes as a
  153. * non-recursive, fully-completed-type so that detailed nested type
  154. * information, such as iterator type and node type, can be
  155. * used. This is similar to the "Curiously Recurring Template
  156. * Pattern" (CRTP) technique, but uses a reconstructed, not
  157. * explicitly passed, template pattern.
  158. *
  159. * Base class templates are:
  160. * - __detail::_Hashtable_base
  161. * - __detail::_Map_base
  162. * - __detail::_Insert
  163. * - __detail::_Rehash_base
  164. * - __detail::_Equality
  165. */
  166. template<typename _Key, typename _Value, typename _Alloc,
  167. typename _ExtractKey, typename _Equal,
  168. typename _Hash, typename _RangeHash, typename _Unused,
  169. typename _RehashPolicy, typename _Traits>
  170. class _Hashtable
  171. : public __detail::_Hashtable_base<_Key, _Value, _ExtractKey, _Equal,
  172. _Hash, _RangeHash, _Unused, _Traits>,
  173. public __detail::_Map_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  174. _Hash, _RangeHash, _Unused,
  175. _RehashPolicy, _Traits>,
  176. public __detail::_Insert<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  177. _Hash, _RangeHash, _Unused,
  178. _RehashPolicy, _Traits>,
  179. public __detail::_Rehash_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  180. _Hash, _RangeHash, _Unused,
  181. _RehashPolicy, _Traits>,
  182. public __detail::_Equality<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  183. _Hash, _RangeHash, _Unused,
  184. _RehashPolicy, _Traits>,
  185. private __detail::_Hashtable_alloc<
  186. __alloc_rebind<_Alloc,
  187. __detail::_Hash_node<_Value,
  188. _Traits::__hash_cached::value>>>,
  189. private _Hashtable_enable_default_ctor<_Equal, _Hash, _Alloc>
  190. {
  191. static_assert(is_same<typename remove_cv<_Value>::type, _Value>::value,
  192. "unordered container must have a non-const, non-volatile value_type");
  193. #if __cplusplus > 201703L || defined __STRICT_ANSI__
  194. static_assert(is_same<typename _Alloc::value_type, _Value>{},
  195. "unordered container must have the same value_type as its allocator");
  196. #endif
  197. using __traits_type = _Traits;
  198. using __hash_cached = typename __traits_type::__hash_cached;
  199. using __constant_iterators = typename __traits_type::__constant_iterators;
  200. using __node_type = __detail::_Hash_node<_Value, __hash_cached::value>;
  201. using __node_alloc_type = __alloc_rebind<_Alloc, __node_type>;
  202. using __hashtable_alloc = __detail::_Hashtable_alloc<__node_alloc_type>;
  203. using __node_value_type =
  204. __detail::_Hash_node_value<_Value, __hash_cached::value>;
  205. using __node_ptr = typename __hashtable_alloc::__node_ptr;
  206. using __value_alloc_traits =
  207. typename __hashtable_alloc::__value_alloc_traits;
  208. using __node_alloc_traits =
  209. typename __hashtable_alloc::__node_alloc_traits;
  210. using __node_base = typename __hashtable_alloc::__node_base;
  211. using __node_base_ptr = typename __hashtable_alloc::__node_base_ptr;
  212. using __buckets_ptr = typename __hashtable_alloc::__buckets_ptr;
  213. using __insert_base = __detail::_Insert<_Key, _Value, _Alloc, _ExtractKey,
  214. _Equal, _Hash,
  215. _RangeHash, _Unused,
  216. _RehashPolicy, _Traits>;
  217. using __enable_default_ctor
  218. = _Hashtable_enable_default_ctor<_Equal, _Hash, _Alloc>;
  219. public:
  220. typedef _Key key_type;
  221. typedef _Value value_type;
  222. typedef _Alloc allocator_type;
  223. typedef _Equal key_equal;
  224. // mapped_type, if present, comes from _Map_base.
  225. // hasher, if present, comes from _Hash_code_base/_Hashtable_base.
  226. typedef typename __value_alloc_traits::pointer pointer;
  227. typedef typename __value_alloc_traits::const_pointer const_pointer;
  228. typedef value_type& reference;
  229. typedef const value_type& const_reference;
  230. using iterator = typename __insert_base::iterator;
  231. using const_iterator = typename __insert_base::const_iterator;
  232. using local_iterator = __detail::_Local_iterator<key_type, _Value,
  233. _ExtractKey, _Hash, _RangeHash, _Unused,
  234. __constant_iterators::value,
  235. __hash_cached::value>;
  236. using const_local_iterator = __detail::_Local_const_iterator<
  237. key_type, _Value,
  238. _ExtractKey, _Hash, _RangeHash, _Unused,
  239. __constant_iterators::value, __hash_cached::value>;
  240. private:
  241. using __rehash_type = _RehashPolicy;
  242. using __rehash_state = typename __rehash_type::_State;
  243. using __unique_keys = typename __traits_type::__unique_keys;
  244. using __hashtable_base = __detail::
  245. _Hashtable_base<_Key, _Value, _ExtractKey,
  246. _Equal, _Hash, _RangeHash, _Unused, _Traits>;
  247. using __hash_code_base = typename __hashtable_base::__hash_code_base;
  248. using __hash_code = typename __hashtable_base::__hash_code;
  249. using __ireturn_type = typename __insert_base::__ireturn_type;
  250. using __map_base = __detail::_Map_base<_Key, _Value, _Alloc, _ExtractKey,
  251. _Equal, _Hash, _RangeHash, _Unused,
  252. _RehashPolicy, _Traits>;
  253. using __rehash_base = __detail::_Rehash_base<_Key, _Value, _Alloc,
  254. _ExtractKey, _Equal,
  255. _Hash, _RangeHash, _Unused,
  256. _RehashPolicy, _Traits>;
  257. using __eq_base = __detail::_Equality<_Key, _Value, _Alloc, _ExtractKey,
  258. _Equal, _Hash, _RangeHash, _Unused,
  259. _RehashPolicy, _Traits>;
  260. using __reuse_or_alloc_node_gen_t =
  261. __detail::_ReuseOrAllocNode<__node_alloc_type>;
  262. using __alloc_node_gen_t =
  263. __detail::_AllocNode<__node_alloc_type>;
  264. using __node_builder_t =
  265. __detail::_NodeBuilder<_ExtractKey>;
  266. // Simple RAII type for managing a node containing an element
  267. struct _Scoped_node
  268. {
  269. // Take ownership of a node with a constructed element.
  270. _Scoped_node(__node_ptr __n, __hashtable_alloc* __h)
  271. : _M_h(__h), _M_node(__n) { }
  272. // Allocate a node and construct an element within it.
  273. template<typename... _Args>
  274. _Scoped_node(__hashtable_alloc* __h, _Args&&... __args)
  275. : _M_h(__h),
  276. _M_node(__h->_M_allocate_node(std::forward<_Args>(__args)...))
  277. { }
  278. // Destroy element and deallocate node.
  279. ~_Scoped_node() { if (_M_node) _M_h->_M_deallocate_node(_M_node); };
  280. _Scoped_node(const _Scoped_node&) = delete;
  281. _Scoped_node& operator=(const _Scoped_node&) = delete;
  282. __hashtable_alloc* _M_h;
  283. __node_ptr _M_node;
  284. };
  285. template<typename _Ht>
  286. static constexpr
  287. __conditional_t<std::is_lvalue_reference<_Ht>::value,
  288. const value_type&, value_type&&>
  289. __fwd_value_for(value_type& __val) noexcept
  290. { return std::move(__val); }
  291. // Compile-time diagnostics.
  292. // _Hash_code_base has everything protected, so use this derived type to
  293. // access it.
  294. struct __hash_code_base_access : __hash_code_base
  295. { using __hash_code_base::_M_bucket_index; };
  296. // To get bucket index we need _RangeHash not to throw.
  297. static_assert(is_nothrow_default_constructible<_RangeHash>::value,
  298. "Functor used to map hash code to bucket index"
  299. " must be nothrow default constructible");
  300. static_assert(noexcept(
  301. std::declval<const _RangeHash&>()((std::size_t)0, (std::size_t)0)),
  302. "Functor used to map hash code to bucket index must be"
  303. " noexcept");
  304. // To compute bucket index we also need _ExtratKey not to throw.
  305. static_assert(is_nothrow_default_constructible<_ExtractKey>::value,
  306. "_ExtractKey must be nothrow default constructible");
  307. static_assert(noexcept(
  308. std::declval<const _ExtractKey&>()(std::declval<_Value>())),
  309. "_ExtractKey functor must be noexcept invocable");
  310. template<typename _Keya, typename _Valuea, typename _Alloca,
  311. typename _ExtractKeya, typename _Equala,
  312. typename _Hasha, typename _RangeHasha, typename _Unuseda,
  313. typename _RehashPolicya, typename _Traitsa,
  314. bool _Unique_keysa>
  315. friend struct __detail::_Map_base;
  316. template<typename _Keya, typename _Valuea, typename _Alloca,
  317. typename _ExtractKeya, typename _Equala,
  318. typename _Hasha, typename _RangeHasha, typename _Unuseda,
  319. typename _RehashPolicya, typename _Traitsa>
  320. friend struct __detail::_Insert_base;
  321. template<typename _Keya, typename _Valuea, typename _Alloca,
  322. typename _ExtractKeya, typename _Equala,
  323. typename _Hasha, typename _RangeHasha, typename _Unuseda,
  324. typename _RehashPolicya, typename _Traitsa,
  325. bool _Constant_iteratorsa>
  326. friend struct __detail::_Insert;
  327. template<typename _Keya, typename _Valuea, typename _Alloca,
  328. typename _ExtractKeya, typename _Equala,
  329. typename _Hasha, typename _RangeHasha, typename _Unuseda,
  330. typename _RehashPolicya, typename _Traitsa,
  331. bool _Unique_keysa>
  332. friend struct __detail::_Equality;
  333. public:
  334. using size_type = typename __hashtable_base::size_type;
  335. using difference_type = typename __hashtable_base::difference_type;
  336. #if __cplusplus > 201402L
  337. using node_type = _Node_handle<_Key, _Value, __node_alloc_type>;
  338. using insert_return_type = _Node_insert_return<iterator, node_type>;
  339. #endif
  340. private:
  341. __buckets_ptr _M_buckets = &_M_single_bucket;
  342. size_type _M_bucket_count = 1;
  343. __node_base _M_before_begin;
  344. size_type _M_element_count = 0;
  345. _RehashPolicy _M_rehash_policy;
  346. // A single bucket used when only need for 1 bucket. Especially
  347. // interesting in move semantic to leave hashtable with only 1 bucket
  348. // which is not allocated so that we can have those operations noexcept
  349. // qualified.
  350. // Note that we can't leave hashtable with 0 bucket without adding
  351. // numerous checks in the code to avoid 0 modulus.
  352. __node_base_ptr _M_single_bucket = nullptr;
  353. void
  354. _M_update_bbegin()
  355. {
  356. if (_M_begin())
  357. _M_buckets[_M_bucket_index(*_M_begin())] = &_M_before_begin;
  358. }
  359. void
  360. _M_update_bbegin(__node_ptr __n)
  361. {
  362. _M_before_begin._M_nxt = __n;
  363. _M_update_bbegin();
  364. }
  365. bool
  366. _M_uses_single_bucket(__buckets_ptr __bkts) const
  367. { return __builtin_expect(__bkts == &_M_single_bucket, false); }
  368. bool
  369. _M_uses_single_bucket() const
  370. { return _M_uses_single_bucket(_M_buckets); }
  371. static constexpr size_t
  372. __small_size_threshold() noexcept
  373. {
  374. return
  375. __detail::_Hashtable_hash_traits<_Hash>::__small_size_threshold();
  376. }
  377. __hashtable_alloc&
  378. _M_base_alloc() { return *this; }
  379. __buckets_ptr
  380. _M_allocate_buckets(size_type __bkt_count)
  381. {
  382. if (__builtin_expect(__bkt_count == 1, false))
  383. {
  384. _M_single_bucket = nullptr;
  385. return &_M_single_bucket;
  386. }
  387. return __hashtable_alloc::_M_allocate_buckets(__bkt_count);
  388. }
  389. void
  390. _M_deallocate_buckets(__buckets_ptr __bkts, size_type __bkt_count)
  391. {
  392. if (_M_uses_single_bucket(__bkts))
  393. return;
  394. __hashtable_alloc::_M_deallocate_buckets(__bkts, __bkt_count);
  395. }
  396. void
  397. _M_deallocate_buckets()
  398. { _M_deallocate_buckets(_M_buckets, _M_bucket_count); }
  399. // Gets bucket begin, deals with the fact that non-empty buckets contain
  400. // their before begin node.
  401. __node_ptr
  402. _M_bucket_begin(size_type __bkt) const;
  403. __node_ptr
  404. _M_begin() const
  405. { return static_cast<__node_ptr>(_M_before_begin._M_nxt); }
  406. // Assign *this using another _Hashtable instance. Whether elements
  407. // are copied or moved depends on the _Ht reference.
  408. template<typename _Ht>
  409. void
  410. _M_assign_elements(_Ht&&);
  411. template<typename _Ht, typename _NodeGenerator>
  412. void
  413. _M_assign(_Ht&&, const _NodeGenerator&);
  414. void
  415. _M_move_assign(_Hashtable&&, true_type);
  416. void
  417. _M_move_assign(_Hashtable&&, false_type);
  418. void
  419. _M_reset() noexcept;
  420. _Hashtable(const _Hash& __h, const _Equal& __eq,
  421. const allocator_type& __a)
  422. : __hashtable_base(__h, __eq),
  423. __hashtable_alloc(__node_alloc_type(__a)),
  424. __enable_default_ctor(_Enable_default_constructor_tag{})
  425. { }
  426. template<bool _No_realloc = true>
  427. static constexpr bool
  428. _S_nothrow_move()
  429. {
  430. #if __cplusplus <= 201402L
  431. return __and_<__bool_constant<_No_realloc>,
  432. is_nothrow_copy_constructible<_Hash>,
  433. is_nothrow_copy_constructible<_Equal>>::value;
  434. #else
  435. if constexpr (_No_realloc)
  436. if constexpr (is_nothrow_copy_constructible<_Hash>())
  437. return is_nothrow_copy_constructible<_Equal>();
  438. return false;
  439. #endif
  440. }
  441. _Hashtable(_Hashtable&& __ht, __node_alloc_type&& __a,
  442. true_type /* alloc always equal */)
  443. noexcept(_S_nothrow_move());
  444. _Hashtable(_Hashtable&&, __node_alloc_type&&,
  445. false_type /* alloc always equal */);
  446. template<typename _InputIterator>
  447. _Hashtable(_InputIterator __first, _InputIterator __last,
  448. size_type __bkt_count_hint,
  449. const _Hash&, const _Equal&, const allocator_type&,
  450. true_type __uks);
  451. template<typename _InputIterator>
  452. _Hashtable(_InputIterator __first, _InputIterator __last,
  453. size_type __bkt_count_hint,
  454. const _Hash&, const _Equal&, const allocator_type&,
  455. false_type __uks);
  456. public:
  457. // Constructor, destructor, assignment, swap
  458. _Hashtable() = default;
  459. _Hashtable(const _Hashtable&);
  460. _Hashtable(const _Hashtable&, const allocator_type&);
  461. explicit
  462. _Hashtable(size_type __bkt_count_hint,
  463. const _Hash& __hf = _Hash(),
  464. const key_equal& __eql = key_equal(),
  465. const allocator_type& __a = allocator_type());
  466. // Use delegating constructors.
  467. _Hashtable(_Hashtable&& __ht)
  468. noexcept(_S_nothrow_move())
  469. : _Hashtable(std::move(__ht), std::move(__ht._M_node_allocator()),
  470. true_type{})
  471. { }
  472. _Hashtable(_Hashtable&& __ht, const allocator_type& __a)
  473. noexcept(_S_nothrow_move<__node_alloc_traits::_S_always_equal()>())
  474. : _Hashtable(std::move(__ht), __node_alloc_type(__a),
  475. typename __node_alloc_traits::is_always_equal{})
  476. { }
  477. explicit
  478. _Hashtable(const allocator_type& __a)
  479. : __hashtable_alloc(__node_alloc_type(__a)),
  480. __enable_default_ctor(_Enable_default_constructor_tag{})
  481. { }
  482. template<typename _InputIterator>
  483. _Hashtable(_InputIterator __f, _InputIterator __l,
  484. size_type __bkt_count_hint = 0,
  485. const _Hash& __hf = _Hash(),
  486. const key_equal& __eql = key_equal(),
  487. const allocator_type& __a = allocator_type())
  488. : _Hashtable(__f, __l, __bkt_count_hint, __hf, __eql, __a,
  489. __unique_keys{})
  490. { }
  491. _Hashtable(initializer_list<value_type> __l,
  492. size_type __bkt_count_hint = 0,
  493. const _Hash& __hf = _Hash(),
  494. const key_equal& __eql = key_equal(),
  495. const allocator_type& __a = allocator_type())
  496. : _Hashtable(__l.begin(), __l.end(), __bkt_count_hint,
  497. __hf, __eql, __a, __unique_keys{})
  498. { }
  499. _Hashtable&
  500. operator=(const _Hashtable& __ht);
  501. _Hashtable&
  502. operator=(_Hashtable&& __ht)
  503. noexcept(__node_alloc_traits::_S_nothrow_move()
  504. && is_nothrow_move_assignable<_Hash>::value
  505. && is_nothrow_move_assignable<_Equal>::value)
  506. {
  507. constexpr bool __move_storage =
  508. __node_alloc_traits::_S_propagate_on_move_assign()
  509. || __node_alloc_traits::_S_always_equal();
  510. _M_move_assign(std::move(__ht), __bool_constant<__move_storage>());
  511. return *this;
  512. }
  513. _Hashtable&
  514. operator=(initializer_list<value_type> __l)
  515. {
  516. __reuse_or_alloc_node_gen_t __roan(_M_begin(), *this);
  517. _M_before_begin._M_nxt = nullptr;
  518. clear();
  519. // We consider that all elements of __l are going to be inserted.
  520. auto __l_bkt_count = _M_rehash_policy._M_bkt_for_elements(__l.size());
  521. // Do not shrink to keep potential user reservation.
  522. if (_M_bucket_count < __l_bkt_count)
  523. rehash(__l_bkt_count);
  524. this->_M_insert_range(__l.begin(), __l.end(), __roan, __unique_keys{});
  525. return *this;
  526. }
  527. ~_Hashtable() noexcept;
  528. void
  529. swap(_Hashtable&)
  530. noexcept(__and_<__is_nothrow_swappable<_Hash>,
  531. __is_nothrow_swappable<_Equal>>::value);
  532. // Basic container operations
  533. iterator
  534. begin() noexcept
  535. { return iterator(_M_begin()); }
  536. const_iterator
  537. begin() const noexcept
  538. { return const_iterator(_M_begin()); }
  539. iterator
  540. end() noexcept
  541. { return iterator(nullptr); }
  542. const_iterator
  543. end() const noexcept
  544. { return const_iterator(nullptr); }
  545. const_iterator
  546. cbegin() const noexcept
  547. { return const_iterator(_M_begin()); }
  548. const_iterator
  549. cend() const noexcept
  550. { return const_iterator(nullptr); }
  551. size_type
  552. size() const noexcept
  553. { return _M_element_count; }
  554. _GLIBCXX_NODISCARD bool
  555. empty() const noexcept
  556. { return size() == 0; }
  557. allocator_type
  558. get_allocator() const noexcept
  559. { return allocator_type(this->_M_node_allocator()); }
  560. size_type
  561. max_size() const noexcept
  562. { return __node_alloc_traits::max_size(this->_M_node_allocator()); }
  563. // Observers
  564. key_equal
  565. key_eq() const
  566. { return this->_M_eq(); }
  567. // hash_function, if present, comes from _Hash_code_base.
  568. // Bucket operations
  569. size_type
  570. bucket_count() const noexcept
  571. { return _M_bucket_count; }
  572. size_type
  573. max_bucket_count() const noexcept
  574. { return max_size(); }
  575. size_type
  576. bucket_size(size_type __bkt) const
  577. { return std::distance(begin(__bkt), end(__bkt)); }
  578. size_type
  579. bucket(const key_type& __k) const
  580. { return _M_bucket_index(this->_M_hash_code(__k)); }
  581. local_iterator
  582. begin(size_type __bkt)
  583. {
  584. return local_iterator(*this, _M_bucket_begin(__bkt),
  585. __bkt, _M_bucket_count);
  586. }
  587. local_iterator
  588. end(size_type __bkt)
  589. { return local_iterator(*this, nullptr, __bkt, _M_bucket_count); }
  590. const_local_iterator
  591. begin(size_type __bkt) const
  592. {
  593. return const_local_iterator(*this, _M_bucket_begin(__bkt),
  594. __bkt, _M_bucket_count);
  595. }
  596. const_local_iterator
  597. end(size_type __bkt) const
  598. { return const_local_iterator(*this, nullptr, __bkt, _M_bucket_count); }
  599. // DR 691.
  600. const_local_iterator
  601. cbegin(size_type __bkt) const
  602. {
  603. return const_local_iterator(*this, _M_bucket_begin(__bkt),
  604. __bkt, _M_bucket_count);
  605. }
  606. const_local_iterator
  607. cend(size_type __bkt) const
  608. { return const_local_iterator(*this, nullptr, __bkt, _M_bucket_count); }
  609. float
  610. load_factor() const noexcept
  611. {
  612. return static_cast<float>(size()) / static_cast<float>(bucket_count());
  613. }
  614. // max_load_factor, if present, comes from _Rehash_base.
  615. // Generalization of max_load_factor. Extension, not found in
  616. // TR1. Only useful if _RehashPolicy is something other than
  617. // the default.
  618. const _RehashPolicy&
  619. __rehash_policy() const
  620. { return _M_rehash_policy; }
  621. void
  622. __rehash_policy(const _RehashPolicy& __pol)
  623. { _M_rehash_policy = __pol; }
  624. // Lookup.
  625. iterator
  626. find(const key_type& __k);
  627. const_iterator
  628. find(const key_type& __k) const;
  629. size_type
  630. count(const key_type& __k) const;
  631. std::pair<iterator, iterator>
  632. equal_range(const key_type& __k);
  633. std::pair<const_iterator, const_iterator>
  634. equal_range(const key_type& __k) const;
  635. #if __cplusplus >= 202002L
  636. #define __cpp_lib_generic_unordered_lookup 201811L
  637. template<typename _Kt,
  638. typename = __has_is_transparent_t<_Hash, _Kt>,
  639. typename = __has_is_transparent_t<_Equal, _Kt>>
  640. iterator
  641. _M_find_tr(const _Kt& __k);
  642. template<typename _Kt,
  643. typename = __has_is_transparent_t<_Hash, _Kt>,
  644. typename = __has_is_transparent_t<_Equal, _Kt>>
  645. const_iterator
  646. _M_find_tr(const _Kt& __k) const;
  647. template<typename _Kt,
  648. typename = __has_is_transparent_t<_Hash, _Kt>,
  649. typename = __has_is_transparent_t<_Equal, _Kt>>
  650. size_type
  651. _M_count_tr(const _Kt& __k) const;
  652. template<typename _Kt,
  653. typename = __has_is_transparent_t<_Hash, _Kt>,
  654. typename = __has_is_transparent_t<_Equal, _Kt>>
  655. pair<iterator, iterator>
  656. _M_equal_range_tr(const _Kt& __k);
  657. template<typename _Kt,
  658. typename = __has_is_transparent_t<_Hash, _Kt>,
  659. typename = __has_is_transparent_t<_Equal, _Kt>>
  660. pair<const_iterator, const_iterator>
  661. _M_equal_range_tr(const _Kt& __k) const;
  662. #endif // C++20
  663. private:
  664. // Bucket index computation helpers.
  665. size_type
  666. _M_bucket_index(const __node_value_type& __n) const noexcept
  667. { return __hash_code_base::_M_bucket_index(__n, _M_bucket_count); }
  668. size_type
  669. _M_bucket_index(__hash_code __c) const
  670. { return __hash_code_base::_M_bucket_index(__c, _M_bucket_count); }
  671. __node_base_ptr
  672. _M_find_before_node(const key_type&);
  673. // Find and insert helper functions and types
  674. // Find the node before the one matching the criteria.
  675. __node_base_ptr
  676. _M_find_before_node(size_type, const key_type&, __hash_code) const;
  677. template<typename _Kt>
  678. __node_base_ptr
  679. _M_find_before_node_tr(size_type, const _Kt&, __hash_code) const;
  680. __node_ptr
  681. _M_find_node(size_type __bkt, const key_type& __key,
  682. __hash_code __c) const
  683. {
  684. __node_base_ptr __before_n = _M_find_before_node(__bkt, __key, __c);
  685. if (__before_n)
  686. return static_cast<__node_ptr>(__before_n->_M_nxt);
  687. return nullptr;
  688. }
  689. template<typename _Kt>
  690. __node_ptr
  691. _M_find_node_tr(size_type __bkt, const _Kt& __key,
  692. __hash_code __c) const
  693. {
  694. auto __before_n = _M_find_before_node_tr(__bkt, __key, __c);
  695. if (__before_n)
  696. return static_cast<__node_ptr>(__before_n->_M_nxt);
  697. return nullptr;
  698. }
  699. // Insert a node at the beginning of a bucket.
  700. void
  701. _M_insert_bucket_begin(size_type, __node_ptr);
  702. // Remove the bucket first node
  703. void
  704. _M_remove_bucket_begin(size_type __bkt, __node_ptr __next_n,
  705. size_type __next_bkt);
  706. // Get the node before __n in the bucket __bkt
  707. __node_base_ptr
  708. _M_get_previous_node(size_type __bkt, __node_ptr __n);
  709. pair<const_iterator, __hash_code>
  710. _M_compute_hash_code(const_iterator __hint, const key_type& __k) const;
  711. // Insert node __n with hash code __code, in bucket __bkt if no
  712. // rehash (assumes no element with same key already present).
  713. // Takes ownership of __n if insertion succeeds, throws otherwise.
  714. iterator
  715. _M_insert_unique_node(size_type __bkt, __hash_code,
  716. __node_ptr __n, size_type __n_elt = 1);
  717. // Insert node __n with key __k and hash code __code.
  718. // Takes ownership of __n if insertion succeeds, throws otherwise.
  719. iterator
  720. _M_insert_multi_node(__node_ptr __hint,
  721. __hash_code __code, __node_ptr __n);
  722. template<typename... _Args>
  723. std::pair<iterator, bool>
  724. _M_emplace(true_type __uks, _Args&&... __args);
  725. template<typename... _Args>
  726. iterator
  727. _M_emplace(false_type __uks, _Args&&... __args)
  728. { return _M_emplace(cend(), __uks, std::forward<_Args>(__args)...); }
  729. // Emplace with hint, useless when keys are unique.
  730. template<typename... _Args>
  731. iterator
  732. _M_emplace(const_iterator, true_type __uks, _Args&&... __args)
  733. { return _M_emplace(__uks, std::forward<_Args>(__args)...).first; }
  734. template<typename... _Args>
  735. iterator
  736. _M_emplace(const_iterator, false_type __uks, _Args&&... __args);
  737. template<typename _Kt, typename _Arg, typename _NodeGenerator>
  738. std::pair<iterator, bool>
  739. _M_insert_unique(_Kt&&, _Arg&&, const _NodeGenerator&);
  740. template<typename _Kt>
  741. static __conditional_t<
  742. __and_<__is_nothrow_invocable<_Hash&, const key_type&>,
  743. __not_<__is_nothrow_invocable<_Hash&, _Kt>>>::value,
  744. key_type, _Kt&&>
  745. _S_forward_key(_Kt&& __k)
  746. { return std::forward<_Kt>(__k); }
  747. static const key_type&
  748. _S_forward_key(const key_type& __k)
  749. { return __k; }
  750. static key_type&&
  751. _S_forward_key(key_type&& __k)
  752. { return std::move(__k); }
  753. template<typename _Arg, typename _NodeGenerator>
  754. std::pair<iterator, bool>
  755. _M_insert(_Arg&& __arg, const _NodeGenerator& __node_gen,
  756. true_type /* __uks */)
  757. {
  758. return _M_insert_unique(
  759. _S_forward_key(_ExtractKey{}(std::forward<_Arg>(__arg))),
  760. std::forward<_Arg>(__arg), __node_gen);
  761. }
  762. template<typename _Arg, typename _NodeGenerator>
  763. iterator
  764. _M_insert(_Arg&& __arg, const _NodeGenerator& __node_gen,
  765. false_type __uks)
  766. {
  767. return _M_insert(cend(), std::forward<_Arg>(__arg), __node_gen,
  768. __uks);
  769. }
  770. // Insert with hint, not used when keys are unique.
  771. template<typename _Arg, typename _NodeGenerator>
  772. iterator
  773. _M_insert(const_iterator, _Arg&& __arg,
  774. const _NodeGenerator& __node_gen, true_type __uks)
  775. {
  776. return
  777. _M_insert(std::forward<_Arg>(__arg), __node_gen, __uks).first;
  778. }
  779. // Insert with hint when keys are not unique.
  780. template<typename _Arg, typename _NodeGenerator>
  781. iterator
  782. _M_insert(const_iterator, _Arg&&,
  783. const _NodeGenerator&, false_type __uks);
  784. size_type
  785. _M_erase(true_type __uks, const key_type&);
  786. size_type
  787. _M_erase(false_type __uks, const key_type&);
  788. iterator
  789. _M_erase(size_type __bkt, __node_base_ptr __prev_n, __node_ptr __n);
  790. public:
  791. // Emplace
  792. template<typename... _Args>
  793. __ireturn_type
  794. emplace(_Args&&... __args)
  795. { return _M_emplace(__unique_keys{}, std::forward<_Args>(__args)...); }
  796. template<typename... _Args>
  797. iterator
  798. emplace_hint(const_iterator __hint, _Args&&... __args)
  799. {
  800. return _M_emplace(__hint, __unique_keys{},
  801. std::forward<_Args>(__args)...);
  802. }
  803. // Insert member functions via inheritance.
  804. // Erase
  805. iterator
  806. erase(const_iterator);
  807. // LWG 2059.
  808. iterator
  809. erase(iterator __it)
  810. { return erase(const_iterator(__it)); }
  811. size_type
  812. erase(const key_type& __k)
  813. { return _M_erase(__unique_keys{}, __k); }
  814. iterator
  815. erase(const_iterator, const_iterator);
  816. void
  817. clear() noexcept;
  818. // Set number of buckets keeping it appropriate for container's number
  819. // of elements.
  820. void rehash(size_type __bkt_count);
  821. // DR 1189.
  822. // reserve, if present, comes from _Rehash_base.
  823. #if __cplusplus > 201402L
  824. /// Re-insert an extracted node into a container with unique keys.
  825. insert_return_type
  826. _M_reinsert_node(node_type&& __nh)
  827. {
  828. insert_return_type __ret;
  829. if (__nh.empty())
  830. __ret.position = end();
  831. else
  832. {
  833. __glibcxx_assert(get_allocator() == __nh.get_allocator());
  834. const key_type& __k = __nh._M_key();
  835. __hash_code __code = this->_M_hash_code(__k);
  836. size_type __bkt = _M_bucket_index(__code);
  837. if (__node_ptr __n = _M_find_node(__bkt, __k, __code))
  838. {
  839. __ret.node = std::move(__nh);
  840. __ret.position = iterator(__n);
  841. __ret.inserted = false;
  842. }
  843. else
  844. {
  845. __ret.position
  846. = _M_insert_unique_node(__bkt, __code, __nh._M_ptr);
  847. __nh._M_ptr = nullptr;
  848. __ret.inserted = true;
  849. }
  850. }
  851. return __ret;
  852. }
  853. /// Re-insert an extracted node into a container with equivalent keys.
  854. iterator
  855. _M_reinsert_node_multi(const_iterator __hint, node_type&& __nh)
  856. {
  857. if (__nh.empty())
  858. return end();
  859. __glibcxx_assert(get_allocator() == __nh.get_allocator());
  860. const key_type& __k = __nh._M_key();
  861. auto __code = this->_M_hash_code(__k);
  862. auto __ret
  863. = _M_insert_multi_node(__hint._M_cur, __code, __nh._M_ptr);
  864. __nh._M_ptr = nullptr;
  865. return __ret;
  866. }
  867. private:
  868. node_type
  869. _M_extract_node(size_t __bkt, __node_base_ptr __prev_n)
  870. {
  871. __node_ptr __n = static_cast<__node_ptr>(__prev_n->_M_nxt);
  872. if (__prev_n == _M_buckets[__bkt])
  873. _M_remove_bucket_begin(__bkt, __n->_M_next(),
  874. __n->_M_nxt ? _M_bucket_index(*__n->_M_next()) : 0);
  875. else if (__n->_M_nxt)
  876. {
  877. size_type __next_bkt = _M_bucket_index(*__n->_M_next());
  878. if (__next_bkt != __bkt)
  879. _M_buckets[__next_bkt] = __prev_n;
  880. }
  881. __prev_n->_M_nxt = __n->_M_nxt;
  882. __n->_M_nxt = nullptr;
  883. --_M_element_count;
  884. return { __n, this->_M_node_allocator() };
  885. }
  886. public:
  887. // Extract a node.
  888. node_type
  889. extract(const_iterator __pos)
  890. {
  891. size_t __bkt = _M_bucket_index(*__pos._M_cur);
  892. return _M_extract_node(__bkt,
  893. _M_get_previous_node(__bkt, __pos._M_cur));
  894. }
  895. /// Extract a node.
  896. node_type
  897. extract(const _Key& __k)
  898. {
  899. node_type __nh;
  900. __hash_code __code = this->_M_hash_code(__k);
  901. std::size_t __bkt = _M_bucket_index(__code);
  902. if (__node_base_ptr __prev_node = _M_find_before_node(__bkt, __k, __code))
  903. __nh = _M_extract_node(__bkt, __prev_node);
  904. return __nh;
  905. }
  906. /// Merge from a compatible container into one with unique keys.
  907. template<typename _Compatible_Hashtable>
  908. void
  909. _M_merge_unique(_Compatible_Hashtable& __src)
  910. {
  911. static_assert(is_same_v<typename _Compatible_Hashtable::node_type,
  912. node_type>, "Node types are compatible");
  913. __glibcxx_assert(get_allocator() == __src.get_allocator());
  914. auto __n_elt = __src.size();
  915. for (auto __i = __src.cbegin(), __end = __src.cend(); __i != __end;)
  916. {
  917. auto __pos = __i++;
  918. const key_type& __k = _ExtractKey{}(*__pos);
  919. __hash_code __code
  920. = this->_M_hash_code(__src.hash_function(), *__pos._M_cur);
  921. size_type __bkt = _M_bucket_index(__code);
  922. if (_M_find_node(__bkt, __k, __code) == nullptr)
  923. {
  924. auto __nh = __src.extract(__pos);
  925. _M_insert_unique_node(__bkt, __code, __nh._M_ptr, __n_elt);
  926. __nh._M_ptr = nullptr;
  927. __n_elt = 1;
  928. }
  929. else if (__n_elt != 1)
  930. --__n_elt;
  931. }
  932. }
  933. /// Merge from a compatible container into one with equivalent keys.
  934. template<typename _Compatible_Hashtable>
  935. void
  936. _M_merge_multi(_Compatible_Hashtable& __src)
  937. {
  938. static_assert(is_same_v<typename _Compatible_Hashtable::node_type,
  939. node_type>, "Node types are compatible");
  940. __glibcxx_assert(get_allocator() == __src.get_allocator());
  941. __node_ptr __hint = nullptr;
  942. this->reserve(size() + __src.size());
  943. for (auto __i = __src.cbegin(), __end = __src.cend(); __i != __end;)
  944. {
  945. auto __pos = __i++;
  946. __hash_code __code
  947. = this->_M_hash_code(__src.hash_function(), *__pos._M_cur);
  948. auto __nh = __src.extract(__pos);
  949. __hint = _M_insert_multi_node(__hint, __code, __nh._M_ptr)._M_cur;
  950. __nh._M_ptr = nullptr;
  951. }
  952. }
  953. #endif // C++17
  954. private:
  955. // Helper rehash method used when keys are unique.
  956. void _M_rehash_aux(size_type __bkt_count, true_type __uks);
  957. // Helper rehash method used when keys can be non-unique.
  958. void _M_rehash_aux(size_type __bkt_count, false_type __uks);
  959. // Unconditionally change size of bucket array to n, restore
  960. // hash policy state to __state on exception.
  961. void _M_rehash(size_type __bkt_count, const __rehash_state& __state);
  962. };
  963. // Definitions of class template _Hashtable's out-of-line member functions.
  964. template<typename _Key, typename _Value, typename _Alloc,
  965. typename _ExtractKey, typename _Equal,
  966. typename _Hash, typename _RangeHash, typename _Unused,
  967. typename _RehashPolicy, typename _Traits>
  968. auto
  969. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  970. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  971. _M_bucket_begin(size_type __bkt) const
  972. -> __node_ptr
  973. {
  974. __node_base_ptr __n = _M_buckets[__bkt];
  975. return __n ? static_cast<__node_ptr>(__n->_M_nxt) : nullptr;
  976. }
  977. template<typename _Key, typename _Value, typename _Alloc,
  978. typename _ExtractKey, typename _Equal,
  979. typename _Hash, typename _RangeHash, typename _Unused,
  980. typename _RehashPolicy, typename _Traits>
  981. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  982. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  983. _Hashtable(size_type __bkt_count_hint,
  984. const _Hash& __h, const _Equal& __eq, const allocator_type& __a)
  985. : _Hashtable(__h, __eq, __a)
  986. {
  987. auto __bkt_count = _M_rehash_policy._M_next_bkt(__bkt_count_hint);
  988. if (__bkt_count > _M_bucket_count)
  989. {
  990. _M_buckets = _M_allocate_buckets(__bkt_count);
  991. _M_bucket_count = __bkt_count;
  992. }
  993. }
  994. template<typename _Key, typename _Value, typename _Alloc,
  995. typename _ExtractKey, typename _Equal,
  996. typename _Hash, typename _RangeHash, typename _Unused,
  997. typename _RehashPolicy, typename _Traits>
  998. template<typename _InputIterator>
  999. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1000. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1001. _Hashtable(_InputIterator __f, _InputIterator __l,
  1002. size_type __bkt_count_hint,
  1003. const _Hash& __h, const _Equal& __eq,
  1004. const allocator_type& __a, true_type /* __uks */)
  1005. : _Hashtable(__bkt_count_hint, __h, __eq, __a)
  1006. {
  1007. for (; __f != __l; ++__f)
  1008. this->insert(*__f);
  1009. }
  1010. template<typename _Key, typename _Value, typename _Alloc,
  1011. typename _ExtractKey, typename _Equal,
  1012. typename _Hash, typename _RangeHash, typename _Unused,
  1013. typename _RehashPolicy, typename _Traits>
  1014. template<typename _InputIterator>
  1015. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1016. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1017. _Hashtable(_InputIterator __f, _InputIterator __l,
  1018. size_type __bkt_count_hint,
  1019. const _Hash& __h, const _Equal& __eq,
  1020. const allocator_type& __a, false_type /* __uks */)
  1021. : _Hashtable(__h, __eq, __a)
  1022. {
  1023. auto __nb_elems = __detail::__distance_fw(__f, __l);
  1024. auto __bkt_count =
  1025. _M_rehash_policy._M_next_bkt(
  1026. std::max(_M_rehash_policy._M_bkt_for_elements(__nb_elems),
  1027. __bkt_count_hint));
  1028. if (__bkt_count > _M_bucket_count)
  1029. {
  1030. _M_buckets = _M_allocate_buckets(__bkt_count);
  1031. _M_bucket_count = __bkt_count;
  1032. }
  1033. for (; __f != __l; ++__f)
  1034. this->insert(*__f);
  1035. }
  1036. template<typename _Key, typename _Value, typename _Alloc,
  1037. typename _ExtractKey, typename _Equal,
  1038. typename _Hash, typename _RangeHash, typename _Unused,
  1039. typename _RehashPolicy, typename _Traits>
  1040. auto
  1041. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1042. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1043. operator=(const _Hashtable& __ht)
  1044. -> _Hashtable&
  1045. {
  1046. if (&__ht == this)
  1047. return *this;
  1048. if (__node_alloc_traits::_S_propagate_on_copy_assign())
  1049. {
  1050. auto& __this_alloc = this->_M_node_allocator();
  1051. auto& __that_alloc = __ht._M_node_allocator();
  1052. if (!__node_alloc_traits::_S_always_equal()
  1053. && __this_alloc != __that_alloc)
  1054. {
  1055. // Replacement allocator cannot free existing storage.
  1056. this->_M_deallocate_nodes(_M_begin());
  1057. _M_before_begin._M_nxt = nullptr;
  1058. _M_deallocate_buckets();
  1059. _M_buckets = nullptr;
  1060. std::__alloc_on_copy(__this_alloc, __that_alloc);
  1061. __hashtable_base::operator=(__ht);
  1062. _M_bucket_count = __ht._M_bucket_count;
  1063. _M_element_count = __ht._M_element_count;
  1064. _M_rehash_policy = __ht._M_rehash_policy;
  1065. __alloc_node_gen_t __alloc_node_gen(*this);
  1066. __try
  1067. {
  1068. _M_assign(__ht, __alloc_node_gen);
  1069. }
  1070. __catch(...)
  1071. {
  1072. // _M_assign took care of deallocating all memory. Now we
  1073. // must make sure this instance remains in a usable state.
  1074. _M_reset();
  1075. __throw_exception_again;
  1076. }
  1077. return *this;
  1078. }
  1079. std::__alloc_on_copy(__this_alloc, __that_alloc);
  1080. }
  1081. // Reuse allocated buckets and nodes.
  1082. _M_assign_elements(__ht);
  1083. return *this;
  1084. }
  1085. template<typename _Key, typename _Value, typename _Alloc,
  1086. typename _ExtractKey, typename _Equal,
  1087. typename _Hash, typename _RangeHash, typename _Unused,
  1088. typename _RehashPolicy, typename _Traits>
  1089. template<typename _Ht>
  1090. void
  1091. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1092. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1093. _M_assign_elements(_Ht&& __ht)
  1094. {
  1095. __buckets_ptr __former_buckets = nullptr;
  1096. std::size_t __former_bucket_count = _M_bucket_count;
  1097. const __rehash_state& __former_state = _M_rehash_policy._M_state();
  1098. if (_M_bucket_count != __ht._M_bucket_count)
  1099. {
  1100. __former_buckets = _M_buckets;
  1101. _M_buckets = _M_allocate_buckets(__ht._M_bucket_count);
  1102. _M_bucket_count = __ht._M_bucket_count;
  1103. }
  1104. else
  1105. __builtin_memset(_M_buckets, 0,
  1106. _M_bucket_count * sizeof(__node_base_ptr));
  1107. __try
  1108. {
  1109. __hashtable_base::operator=(std::forward<_Ht>(__ht));
  1110. _M_element_count = __ht._M_element_count;
  1111. _M_rehash_policy = __ht._M_rehash_policy;
  1112. __reuse_or_alloc_node_gen_t __roan(_M_begin(), *this);
  1113. _M_before_begin._M_nxt = nullptr;
  1114. _M_assign(std::forward<_Ht>(__ht), __roan);
  1115. if (__former_buckets)
  1116. _M_deallocate_buckets(__former_buckets, __former_bucket_count);
  1117. }
  1118. __catch(...)
  1119. {
  1120. if (__former_buckets)
  1121. {
  1122. // Restore previous buckets.
  1123. _M_deallocate_buckets();
  1124. _M_rehash_policy._M_reset(__former_state);
  1125. _M_buckets = __former_buckets;
  1126. _M_bucket_count = __former_bucket_count;
  1127. }
  1128. __builtin_memset(_M_buckets, 0,
  1129. _M_bucket_count * sizeof(__node_base_ptr));
  1130. __throw_exception_again;
  1131. }
  1132. }
  1133. template<typename _Key, typename _Value, typename _Alloc,
  1134. typename _ExtractKey, typename _Equal,
  1135. typename _Hash, typename _RangeHash, typename _Unused,
  1136. typename _RehashPolicy, typename _Traits>
  1137. template<typename _Ht, typename _NodeGenerator>
  1138. void
  1139. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1140. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1141. _M_assign(_Ht&& __ht, const _NodeGenerator& __node_gen)
  1142. {
  1143. __buckets_ptr __buckets = nullptr;
  1144. if (!_M_buckets)
  1145. _M_buckets = __buckets = _M_allocate_buckets(_M_bucket_count);
  1146. __try
  1147. {
  1148. if (!__ht._M_before_begin._M_nxt)
  1149. return;
  1150. // First deal with the special first node pointed to by
  1151. // _M_before_begin.
  1152. __node_ptr __ht_n = __ht._M_begin();
  1153. __node_ptr __this_n
  1154. = __node_gen(__fwd_value_for<_Ht>(__ht_n->_M_v()));
  1155. this->_M_copy_code(*__this_n, *__ht_n);
  1156. _M_update_bbegin(__this_n);
  1157. // Then deal with other nodes.
  1158. __node_ptr __prev_n = __this_n;
  1159. for (__ht_n = __ht_n->_M_next(); __ht_n; __ht_n = __ht_n->_M_next())
  1160. {
  1161. __this_n = __node_gen(__fwd_value_for<_Ht>(__ht_n->_M_v()));
  1162. __prev_n->_M_nxt = __this_n;
  1163. this->_M_copy_code(*__this_n, *__ht_n);
  1164. size_type __bkt = _M_bucket_index(*__this_n);
  1165. if (!_M_buckets[__bkt])
  1166. _M_buckets[__bkt] = __prev_n;
  1167. __prev_n = __this_n;
  1168. }
  1169. }
  1170. __catch(...)
  1171. {
  1172. clear();
  1173. if (__buckets)
  1174. _M_deallocate_buckets();
  1175. __throw_exception_again;
  1176. }
  1177. }
  1178. template<typename _Key, typename _Value, typename _Alloc,
  1179. typename _ExtractKey, typename _Equal,
  1180. typename _Hash, typename _RangeHash, typename _Unused,
  1181. typename _RehashPolicy, typename _Traits>
  1182. void
  1183. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1184. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1185. _M_reset() noexcept
  1186. {
  1187. _M_rehash_policy._M_reset();
  1188. _M_bucket_count = 1;
  1189. _M_single_bucket = nullptr;
  1190. _M_buckets = &_M_single_bucket;
  1191. _M_before_begin._M_nxt = nullptr;
  1192. _M_element_count = 0;
  1193. }
  1194. template<typename _Key, typename _Value, typename _Alloc,
  1195. typename _ExtractKey, typename _Equal,
  1196. typename _Hash, typename _RangeHash, typename _Unused,
  1197. typename _RehashPolicy, typename _Traits>
  1198. void
  1199. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1200. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1201. _M_move_assign(_Hashtable&& __ht, true_type)
  1202. {
  1203. if (__builtin_expect(std::__addressof(__ht) == this, false))
  1204. return;
  1205. this->_M_deallocate_nodes(_M_begin());
  1206. _M_deallocate_buckets();
  1207. __hashtable_base::operator=(std::move(__ht));
  1208. _M_rehash_policy = __ht._M_rehash_policy;
  1209. if (!__ht._M_uses_single_bucket())
  1210. _M_buckets = __ht._M_buckets;
  1211. else
  1212. {
  1213. _M_buckets = &_M_single_bucket;
  1214. _M_single_bucket = __ht._M_single_bucket;
  1215. }
  1216. _M_bucket_count = __ht._M_bucket_count;
  1217. _M_before_begin._M_nxt = __ht._M_before_begin._M_nxt;
  1218. _M_element_count = __ht._M_element_count;
  1219. std::__alloc_on_move(this->_M_node_allocator(), __ht._M_node_allocator());
  1220. // Fix bucket containing the _M_before_begin pointer that can't be moved.
  1221. _M_update_bbegin();
  1222. __ht._M_reset();
  1223. }
  1224. template<typename _Key, typename _Value, typename _Alloc,
  1225. typename _ExtractKey, typename _Equal,
  1226. typename _Hash, typename _RangeHash, typename _Unused,
  1227. typename _RehashPolicy, typename _Traits>
  1228. void
  1229. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1230. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1231. _M_move_assign(_Hashtable&& __ht, false_type)
  1232. {
  1233. if (__ht._M_node_allocator() == this->_M_node_allocator())
  1234. _M_move_assign(std::move(__ht), true_type{});
  1235. else
  1236. {
  1237. // Can't move memory, move elements then.
  1238. _M_assign_elements(std::move(__ht));
  1239. __ht.clear();
  1240. }
  1241. }
  1242. template<typename _Key, typename _Value, typename _Alloc,
  1243. typename _ExtractKey, typename _Equal,
  1244. typename _Hash, typename _RangeHash, typename _Unused,
  1245. typename _RehashPolicy, typename _Traits>
  1246. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1247. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1248. _Hashtable(const _Hashtable& __ht)
  1249. : __hashtable_base(__ht),
  1250. __map_base(__ht),
  1251. __rehash_base(__ht),
  1252. __hashtable_alloc(
  1253. __node_alloc_traits::_S_select_on_copy(__ht._M_node_allocator())),
  1254. __enable_default_ctor(__ht),
  1255. _M_buckets(nullptr),
  1256. _M_bucket_count(__ht._M_bucket_count),
  1257. _M_element_count(__ht._M_element_count),
  1258. _M_rehash_policy(__ht._M_rehash_policy)
  1259. {
  1260. __alloc_node_gen_t __alloc_node_gen(*this);
  1261. _M_assign(__ht, __alloc_node_gen);
  1262. }
  1263. template<typename _Key, typename _Value, typename _Alloc,
  1264. typename _ExtractKey, typename _Equal,
  1265. typename _Hash, typename _RangeHash, typename _Unused,
  1266. typename _RehashPolicy, typename _Traits>
  1267. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1268. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1269. _Hashtable(_Hashtable&& __ht, __node_alloc_type&& __a,
  1270. true_type /* alloc always equal */)
  1271. noexcept(_S_nothrow_move())
  1272. : __hashtable_base(__ht),
  1273. __map_base(__ht),
  1274. __rehash_base(__ht),
  1275. __hashtable_alloc(std::move(__a)),
  1276. __enable_default_ctor(__ht),
  1277. _M_buckets(__ht._M_buckets),
  1278. _M_bucket_count(__ht._M_bucket_count),
  1279. _M_before_begin(__ht._M_before_begin._M_nxt),
  1280. _M_element_count(__ht._M_element_count),
  1281. _M_rehash_policy(__ht._M_rehash_policy)
  1282. {
  1283. // Update buckets if __ht is using its single bucket.
  1284. if (__ht._M_uses_single_bucket())
  1285. {
  1286. _M_buckets = &_M_single_bucket;
  1287. _M_single_bucket = __ht._M_single_bucket;
  1288. }
  1289. // Fix bucket containing the _M_before_begin pointer that can't be moved.
  1290. _M_update_bbegin();
  1291. __ht._M_reset();
  1292. }
  1293. template<typename _Key, typename _Value, typename _Alloc,
  1294. typename _ExtractKey, typename _Equal,
  1295. typename _Hash, typename _RangeHash, typename _Unused,
  1296. typename _RehashPolicy, typename _Traits>
  1297. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1298. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1299. _Hashtable(const _Hashtable& __ht, const allocator_type& __a)
  1300. : __hashtable_base(__ht),
  1301. __map_base(__ht),
  1302. __rehash_base(__ht),
  1303. __hashtable_alloc(__node_alloc_type(__a)),
  1304. __enable_default_ctor(__ht),
  1305. _M_buckets(),
  1306. _M_bucket_count(__ht._M_bucket_count),
  1307. _M_element_count(__ht._M_element_count),
  1308. _M_rehash_policy(__ht._M_rehash_policy)
  1309. {
  1310. __alloc_node_gen_t __alloc_node_gen(*this);
  1311. _M_assign(__ht, __alloc_node_gen);
  1312. }
  1313. template<typename _Key, typename _Value, typename _Alloc,
  1314. typename _ExtractKey, typename _Equal,
  1315. typename _Hash, typename _RangeHash, typename _Unused,
  1316. typename _RehashPolicy, typename _Traits>
  1317. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1318. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1319. _Hashtable(_Hashtable&& __ht, __node_alloc_type&& __a,
  1320. false_type /* alloc always equal */)
  1321. : __hashtable_base(__ht),
  1322. __map_base(__ht),
  1323. __rehash_base(__ht),
  1324. __hashtable_alloc(std::move(__a)),
  1325. __enable_default_ctor(__ht),
  1326. _M_buckets(nullptr),
  1327. _M_bucket_count(__ht._M_bucket_count),
  1328. _M_element_count(__ht._M_element_count),
  1329. _M_rehash_policy(__ht._M_rehash_policy)
  1330. {
  1331. if (__ht._M_node_allocator() == this->_M_node_allocator())
  1332. {
  1333. if (__ht._M_uses_single_bucket())
  1334. {
  1335. _M_buckets = &_M_single_bucket;
  1336. _M_single_bucket = __ht._M_single_bucket;
  1337. }
  1338. else
  1339. _M_buckets = __ht._M_buckets;
  1340. // Fix bucket containing the _M_before_begin pointer that can't be
  1341. // moved.
  1342. _M_update_bbegin(__ht._M_begin());
  1343. __ht._M_reset();
  1344. }
  1345. else
  1346. {
  1347. __alloc_node_gen_t __alloc_gen(*this);
  1348. using _Fwd_Ht = __conditional_t<
  1349. __move_if_noexcept_cond<value_type>::value,
  1350. const _Hashtable&, _Hashtable&&>;
  1351. _M_assign(std::forward<_Fwd_Ht>(__ht), __alloc_gen);
  1352. __ht.clear();
  1353. }
  1354. }
  1355. template<typename _Key, typename _Value, typename _Alloc,
  1356. typename _ExtractKey, typename _Equal,
  1357. typename _Hash, typename _RangeHash, typename _Unused,
  1358. typename _RehashPolicy, typename _Traits>
  1359. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1360. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1361. ~_Hashtable() noexcept
  1362. {
  1363. // Getting a bucket index from a node shall not throw because it is used
  1364. // in methods (erase, swap...) that shall not throw. Need a complete
  1365. // type to check this, so do it in the destructor not at class scope.
  1366. static_assert(noexcept(declval<const __hash_code_base_access&>()
  1367. ._M_bucket_index(declval<const __node_value_type&>(),
  1368. (std::size_t)0)),
  1369. "Cache the hash code or qualify your functors involved"
  1370. " in hash code and bucket index computation with noexcept");
  1371. clear();
  1372. _M_deallocate_buckets();
  1373. }
  1374. template<typename _Key, typename _Value, typename _Alloc,
  1375. typename _ExtractKey, typename _Equal,
  1376. typename _Hash, typename _RangeHash, typename _Unused,
  1377. typename _RehashPolicy, typename _Traits>
  1378. void
  1379. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1380. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1381. swap(_Hashtable& __x)
  1382. noexcept(__and_<__is_nothrow_swappable<_Hash>,
  1383. __is_nothrow_swappable<_Equal>>::value)
  1384. {
  1385. // The only base class with member variables is hash_code_base.
  1386. // We define _Hash_code_base::_M_swap because different
  1387. // specializations have different members.
  1388. this->_M_swap(__x);
  1389. std::__alloc_on_swap(this->_M_node_allocator(), __x._M_node_allocator());
  1390. std::swap(_M_rehash_policy, __x._M_rehash_policy);
  1391. // Deal properly with potentially moved instances.
  1392. if (this->_M_uses_single_bucket())
  1393. {
  1394. if (!__x._M_uses_single_bucket())
  1395. {
  1396. _M_buckets = __x._M_buckets;
  1397. __x._M_buckets = &__x._M_single_bucket;
  1398. }
  1399. }
  1400. else if (__x._M_uses_single_bucket())
  1401. {
  1402. __x._M_buckets = _M_buckets;
  1403. _M_buckets = &_M_single_bucket;
  1404. }
  1405. else
  1406. std::swap(_M_buckets, __x._M_buckets);
  1407. std::swap(_M_bucket_count, __x._M_bucket_count);
  1408. std::swap(_M_before_begin._M_nxt, __x._M_before_begin._M_nxt);
  1409. std::swap(_M_element_count, __x._M_element_count);
  1410. std::swap(_M_single_bucket, __x._M_single_bucket);
  1411. // Fix buckets containing the _M_before_begin pointers that can't be
  1412. // swapped.
  1413. _M_update_bbegin();
  1414. __x._M_update_bbegin();
  1415. }
  1416. template<typename _Key, typename _Value, typename _Alloc,
  1417. typename _ExtractKey, typename _Equal,
  1418. typename _Hash, typename _RangeHash, typename _Unused,
  1419. typename _RehashPolicy, typename _Traits>
  1420. auto
  1421. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1422. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1423. find(const key_type& __k)
  1424. -> iterator
  1425. {
  1426. if (size() <= __small_size_threshold())
  1427. {
  1428. for (auto __it = begin(); __it != end(); ++__it)
  1429. if (this->_M_key_equals(__k, *__it._M_cur))
  1430. return __it;
  1431. return end();
  1432. }
  1433. __hash_code __code = this->_M_hash_code(__k);
  1434. std::size_t __bkt = _M_bucket_index(__code);
  1435. return iterator(_M_find_node(__bkt, __k, __code));
  1436. }
  1437. template<typename _Key, typename _Value, typename _Alloc,
  1438. typename _ExtractKey, typename _Equal,
  1439. typename _Hash, typename _RangeHash, typename _Unused,
  1440. typename _RehashPolicy, typename _Traits>
  1441. auto
  1442. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1443. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1444. find(const key_type& __k) const
  1445. -> const_iterator
  1446. {
  1447. if (size() <= __small_size_threshold())
  1448. {
  1449. for (auto __it = begin(); __it != end(); ++__it)
  1450. if (this->_M_key_equals(__k, *__it._M_cur))
  1451. return __it;
  1452. return end();
  1453. }
  1454. __hash_code __code = this->_M_hash_code(__k);
  1455. std::size_t __bkt = _M_bucket_index(__code);
  1456. return const_iterator(_M_find_node(__bkt, __k, __code));
  1457. }
  1458. #if __cplusplus > 201703L
  1459. template<typename _Key, typename _Value, typename _Alloc,
  1460. typename _ExtractKey, typename _Equal,
  1461. typename _Hash, typename _RangeHash, typename _Unused,
  1462. typename _RehashPolicy, typename _Traits>
  1463. template<typename _Kt, typename, typename>
  1464. auto
  1465. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1466. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1467. _M_find_tr(const _Kt& __k)
  1468. -> iterator
  1469. {
  1470. __hash_code __code = this->_M_hash_code_tr(__k);
  1471. std::size_t __bkt = _M_bucket_index(__code);
  1472. return iterator(_M_find_node_tr(__bkt, __k, __code));
  1473. }
  1474. template<typename _Key, typename _Value, typename _Alloc,
  1475. typename _ExtractKey, typename _Equal,
  1476. typename _Hash, typename _RangeHash, typename _Unused,
  1477. typename _RehashPolicy, typename _Traits>
  1478. template<typename _Kt, typename, typename>
  1479. auto
  1480. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1481. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1482. _M_find_tr(const _Kt& __k) const
  1483. -> const_iterator
  1484. {
  1485. __hash_code __code = this->_M_hash_code_tr(__k);
  1486. std::size_t __bkt = _M_bucket_index(__code);
  1487. return const_iterator(_M_find_node_tr(__bkt, __k, __code));
  1488. }
  1489. #endif
  1490. template<typename _Key, typename _Value, typename _Alloc,
  1491. typename _ExtractKey, typename _Equal,
  1492. typename _Hash, typename _RangeHash, typename _Unused,
  1493. typename _RehashPolicy, typename _Traits>
  1494. auto
  1495. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1496. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1497. count(const key_type& __k) const
  1498. -> size_type
  1499. {
  1500. auto __it = find(__k);
  1501. if (!__it._M_cur)
  1502. return 0;
  1503. if (__unique_keys::value)
  1504. return 1;
  1505. // All equivalent values are next to each other, if we find a
  1506. // non-equivalent value after an equivalent one it means that we won't
  1507. // find any new equivalent value.
  1508. size_type __result = 1;
  1509. for (auto __ref = __it++;
  1510. __it._M_cur && this->_M_node_equals(*__ref._M_cur, *__it._M_cur);
  1511. ++__it)
  1512. ++__result;
  1513. return __result;
  1514. }
  1515. #if __cplusplus > 201703L
  1516. template<typename _Key, typename _Value, typename _Alloc,
  1517. typename _ExtractKey, typename _Equal,
  1518. typename _Hash, typename _RangeHash, typename _Unused,
  1519. typename _RehashPolicy, typename _Traits>
  1520. template<typename _Kt, typename, typename>
  1521. auto
  1522. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1523. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1524. _M_count_tr(const _Kt& __k) const
  1525. -> size_type
  1526. {
  1527. __hash_code __code = this->_M_hash_code_tr(__k);
  1528. std::size_t __bkt = _M_bucket_index(__code);
  1529. auto __n = _M_find_node_tr(__bkt, __k, __code);
  1530. if (!__n)
  1531. return 0;
  1532. // All equivalent values are next to each other, if we find a
  1533. // non-equivalent value after an equivalent one it means that we won't
  1534. // find any new equivalent value.
  1535. iterator __it(__n);
  1536. size_type __result = 1;
  1537. for (++__it;
  1538. __it._M_cur && this->_M_equals_tr(__k, __code, *__it._M_cur);
  1539. ++__it)
  1540. ++__result;
  1541. return __result;
  1542. }
  1543. #endif
  1544. template<typename _Key, typename _Value, typename _Alloc,
  1545. typename _ExtractKey, typename _Equal,
  1546. typename _Hash, typename _RangeHash, typename _Unused,
  1547. typename _RehashPolicy, typename _Traits>
  1548. auto
  1549. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1550. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1551. equal_range(const key_type& __k)
  1552. -> pair<iterator, iterator>
  1553. {
  1554. auto __ite = find(__k);
  1555. if (!__ite._M_cur)
  1556. return { __ite, __ite };
  1557. auto __beg = __ite++;
  1558. if (__unique_keys::value)
  1559. return { __beg, __ite };
  1560. // All equivalent values are next to each other, if we find a
  1561. // non-equivalent value after an equivalent one it means that we won't
  1562. // find any new equivalent value.
  1563. while (__ite._M_cur && this->_M_node_equals(*__beg._M_cur, *__ite._M_cur))
  1564. ++__ite;
  1565. return { __beg, __ite };
  1566. }
  1567. template<typename _Key, typename _Value, typename _Alloc,
  1568. typename _ExtractKey, typename _Equal,
  1569. typename _Hash, typename _RangeHash, typename _Unused,
  1570. typename _RehashPolicy, typename _Traits>
  1571. auto
  1572. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1573. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1574. equal_range(const key_type& __k) const
  1575. -> pair<const_iterator, const_iterator>
  1576. {
  1577. auto __ite = find(__k);
  1578. if (!__ite._M_cur)
  1579. return { __ite, __ite };
  1580. auto __beg = __ite++;
  1581. if (__unique_keys::value)
  1582. return { __beg, __ite };
  1583. // All equivalent values are next to each other, if we find a
  1584. // non-equivalent value after an equivalent one it means that we won't
  1585. // find any new equivalent value.
  1586. while (__ite._M_cur && this->_M_node_equals(*__beg._M_cur, *__ite._M_cur))
  1587. ++__ite;
  1588. return { __beg, __ite };
  1589. }
  1590. #if __cplusplus > 201703L
  1591. template<typename _Key, typename _Value, typename _Alloc,
  1592. typename _ExtractKey, typename _Equal,
  1593. typename _Hash, typename _RangeHash, typename _Unused,
  1594. typename _RehashPolicy, typename _Traits>
  1595. template<typename _Kt, typename, typename>
  1596. auto
  1597. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1598. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1599. _M_equal_range_tr(const _Kt& __k)
  1600. -> pair<iterator, iterator>
  1601. {
  1602. __hash_code __code = this->_M_hash_code_tr(__k);
  1603. std::size_t __bkt = _M_bucket_index(__code);
  1604. auto __n = _M_find_node_tr(__bkt, __k, __code);
  1605. iterator __ite(__n);
  1606. if (!__n)
  1607. return { __ite, __ite };
  1608. // All equivalent values are next to each other, if we find a
  1609. // non-equivalent value after an equivalent one it means that we won't
  1610. // find any new equivalent value.
  1611. auto __beg = __ite++;
  1612. while (__ite._M_cur && this->_M_equals_tr(__k, __code, *__ite._M_cur))
  1613. ++__ite;
  1614. return { __beg, __ite };
  1615. }
  1616. template<typename _Key, typename _Value, typename _Alloc,
  1617. typename _ExtractKey, typename _Equal,
  1618. typename _Hash, typename _RangeHash, typename _Unused,
  1619. typename _RehashPolicy, typename _Traits>
  1620. template<typename _Kt, typename, typename>
  1621. auto
  1622. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1623. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1624. _M_equal_range_tr(const _Kt& __k) const
  1625. -> pair<const_iterator, const_iterator>
  1626. {
  1627. __hash_code __code = this->_M_hash_code_tr(__k);
  1628. std::size_t __bkt = _M_bucket_index(__code);
  1629. auto __n = _M_find_node_tr(__bkt, __k, __code);
  1630. const_iterator __ite(__n);
  1631. if (!__n)
  1632. return { __ite, __ite };
  1633. // All equivalent values are next to each other, if we find a
  1634. // non-equivalent value after an equivalent one it means that we won't
  1635. // find any new equivalent value.
  1636. auto __beg = __ite++;
  1637. while (__ite._M_cur && this->_M_equals_tr(__k, __code, *__ite._M_cur))
  1638. ++__ite;
  1639. return { __beg, __ite };
  1640. }
  1641. #endif
  1642. // Find the node before the one whose key compares equal to k.
  1643. // Return nullptr if no node is found.
  1644. template<typename _Key, typename _Value, typename _Alloc,
  1645. typename _ExtractKey, typename _Equal,
  1646. typename _Hash, typename _RangeHash, typename _Unused,
  1647. typename _RehashPolicy, typename _Traits>
  1648. auto
  1649. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1650. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1651. _M_find_before_node(const key_type& __k)
  1652. -> __node_base_ptr
  1653. {
  1654. __node_base_ptr __prev_p = &_M_before_begin;
  1655. if (!__prev_p->_M_nxt)
  1656. return nullptr;
  1657. for (__node_ptr __p = static_cast<__node_ptr>(__prev_p->_M_nxt);
  1658. __p != nullptr;
  1659. __p = __p->_M_next())
  1660. {
  1661. if (this->_M_key_equals(__k, *__p))
  1662. return __prev_p;
  1663. __prev_p = __p;
  1664. }
  1665. return nullptr;
  1666. }
  1667. // Find the node before the one whose key compares equal to k in the bucket
  1668. // bkt. Return nullptr if no node is found.
  1669. template<typename _Key, typename _Value, typename _Alloc,
  1670. typename _ExtractKey, typename _Equal,
  1671. typename _Hash, typename _RangeHash, typename _Unused,
  1672. typename _RehashPolicy, typename _Traits>
  1673. auto
  1674. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1675. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1676. _M_find_before_node(size_type __bkt, const key_type& __k,
  1677. __hash_code __code) const
  1678. -> __node_base_ptr
  1679. {
  1680. __node_base_ptr __prev_p = _M_buckets[__bkt];
  1681. if (!__prev_p)
  1682. return nullptr;
  1683. for (__node_ptr __p = static_cast<__node_ptr>(__prev_p->_M_nxt);;
  1684. __p = __p->_M_next())
  1685. {
  1686. if (this->_M_equals(__k, __code, *__p))
  1687. return __prev_p;
  1688. if (!__p->_M_nxt || _M_bucket_index(*__p->_M_next()) != __bkt)
  1689. break;
  1690. __prev_p = __p;
  1691. }
  1692. return nullptr;
  1693. }
  1694. template<typename _Key, typename _Value, typename _Alloc,
  1695. typename _ExtractKey, typename _Equal,
  1696. typename _Hash, typename _RangeHash, typename _Unused,
  1697. typename _RehashPolicy, typename _Traits>
  1698. template<typename _Kt>
  1699. auto
  1700. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1701. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1702. _M_find_before_node_tr(size_type __bkt, const _Kt& __k,
  1703. __hash_code __code) const
  1704. -> __node_base_ptr
  1705. {
  1706. __node_base_ptr __prev_p = _M_buckets[__bkt];
  1707. if (!__prev_p)
  1708. return nullptr;
  1709. for (__node_ptr __p = static_cast<__node_ptr>(__prev_p->_M_nxt);;
  1710. __p = __p->_M_next())
  1711. {
  1712. if (this->_M_equals_tr(__k, __code, *__p))
  1713. return __prev_p;
  1714. if (!__p->_M_nxt || _M_bucket_index(*__p->_M_next()) != __bkt)
  1715. break;
  1716. __prev_p = __p;
  1717. }
  1718. return nullptr;
  1719. }
  1720. template<typename _Key, typename _Value, typename _Alloc,
  1721. typename _ExtractKey, typename _Equal,
  1722. typename _Hash, typename _RangeHash, typename _Unused,
  1723. typename _RehashPolicy, typename _Traits>
  1724. void
  1725. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1726. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1727. _M_insert_bucket_begin(size_type __bkt, __node_ptr __node)
  1728. {
  1729. if (_M_buckets[__bkt])
  1730. {
  1731. // Bucket is not empty, we just need to insert the new node
  1732. // after the bucket before begin.
  1733. __node->_M_nxt = _M_buckets[__bkt]->_M_nxt;
  1734. _M_buckets[__bkt]->_M_nxt = __node;
  1735. }
  1736. else
  1737. {
  1738. // The bucket is empty, the new node is inserted at the
  1739. // beginning of the singly-linked list and the bucket will
  1740. // contain _M_before_begin pointer.
  1741. __node->_M_nxt = _M_before_begin._M_nxt;
  1742. _M_before_begin._M_nxt = __node;
  1743. if (__node->_M_nxt)
  1744. // We must update former begin bucket that is pointing to
  1745. // _M_before_begin.
  1746. _M_buckets[_M_bucket_index(*__node->_M_next())] = __node;
  1747. _M_buckets[__bkt] = &_M_before_begin;
  1748. }
  1749. }
  1750. template<typename _Key, typename _Value, typename _Alloc,
  1751. typename _ExtractKey, typename _Equal,
  1752. typename _Hash, typename _RangeHash, typename _Unused,
  1753. typename _RehashPolicy, typename _Traits>
  1754. void
  1755. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1756. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1757. _M_remove_bucket_begin(size_type __bkt, __node_ptr __next,
  1758. size_type __next_bkt)
  1759. {
  1760. if (!__next || __next_bkt != __bkt)
  1761. {
  1762. // Bucket is now empty
  1763. // First update next bucket if any
  1764. if (__next)
  1765. _M_buckets[__next_bkt] = _M_buckets[__bkt];
  1766. // Second update before begin node if necessary
  1767. if (&_M_before_begin == _M_buckets[__bkt])
  1768. _M_before_begin._M_nxt = __next;
  1769. _M_buckets[__bkt] = nullptr;
  1770. }
  1771. }
  1772. template<typename _Key, typename _Value, typename _Alloc,
  1773. typename _ExtractKey, typename _Equal,
  1774. typename _Hash, typename _RangeHash, typename _Unused,
  1775. typename _RehashPolicy, typename _Traits>
  1776. auto
  1777. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1778. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1779. _M_get_previous_node(size_type __bkt, __node_ptr __n)
  1780. -> __node_base_ptr
  1781. {
  1782. __node_base_ptr __prev_n = _M_buckets[__bkt];
  1783. while (__prev_n->_M_nxt != __n)
  1784. __prev_n = __prev_n->_M_nxt;
  1785. return __prev_n;
  1786. }
  1787. template<typename _Key, typename _Value, typename _Alloc,
  1788. typename _ExtractKey, typename _Equal,
  1789. typename _Hash, typename _RangeHash, typename _Unused,
  1790. typename _RehashPolicy, typename _Traits>
  1791. template<typename... _Args>
  1792. auto
  1793. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1794. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1795. _M_emplace(true_type /* __uks */, _Args&&... __args)
  1796. -> pair<iterator, bool>
  1797. {
  1798. // First build the node to get access to the hash code
  1799. _Scoped_node __node { this, std::forward<_Args>(__args)... };
  1800. const key_type& __k = _ExtractKey{}(__node._M_node->_M_v());
  1801. if (size() <= __small_size_threshold())
  1802. {
  1803. for (auto __it = begin(); __it != end(); ++__it)
  1804. if (this->_M_key_equals(__k, *__it._M_cur))
  1805. // There is already an equivalent node, no insertion
  1806. return { __it, false };
  1807. }
  1808. __hash_code __code = this->_M_hash_code(__k);
  1809. size_type __bkt = _M_bucket_index(__code);
  1810. if (size() > __small_size_threshold())
  1811. if (__node_ptr __p = _M_find_node(__bkt, __k, __code))
  1812. // There is already an equivalent node, no insertion
  1813. return { iterator(__p), false };
  1814. // Insert the node
  1815. auto __pos = _M_insert_unique_node(__bkt, __code, __node._M_node);
  1816. __node._M_node = nullptr;
  1817. return { __pos, true };
  1818. }
  1819. template<typename _Key, typename _Value, typename _Alloc,
  1820. typename _ExtractKey, typename _Equal,
  1821. typename _Hash, typename _RangeHash, typename _Unused,
  1822. typename _RehashPolicy, typename _Traits>
  1823. template<typename... _Args>
  1824. auto
  1825. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1826. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1827. _M_emplace(const_iterator __hint, false_type /* __uks */,
  1828. _Args&&... __args)
  1829. -> iterator
  1830. {
  1831. // First build the node to get its hash code.
  1832. _Scoped_node __node { this, std::forward<_Args>(__args)... };
  1833. const key_type& __k = _ExtractKey{}(__node._M_node->_M_v());
  1834. auto __res = this->_M_compute_hash_code(__hint, __k);
  1835. auto __pos
  1836. = _M_insert_multi_node(__res.first._M_cur, __res.second,
  1837. __node._M_node);
  1838. __node._M_node = nullptr;
  1839. return __pos;
  1840. }
  1841. template<typename _Key, typename _Value, typename _Alloc,
  1842. typename _ExtractKey, typename _Equal,
  1843. typename _Hash, typename _RangeHash, typename _Unused,
  1844. typename _RehashPolicy, typename _Traits>
  1845. auto
  1846. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1847. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1848. _M_compute_hash_code(const_iterator __hint, const key_type& __k) const
  1849. -> pair<const_iterator, __hash_code>
  1850. {
  1851. if (size() <= __small_size_threshold())
  1852. {
  1853. if (__hint != cend())
  1854. {
  1855. for (auto __it = __hint; __it != cend(); ++__it)
  1856. if (this->_M_key_equals(__k, *__it._M_cur))
  1857. return { __it, this->_M_hash_code(*__it._M_cur) };
  1858. }
  1859. for (auto __it = cbegin(); __it != __hint; ++__it)
  1860. if (this->_M_key_equals(__k, *__it._M_cur))
  1861. return { __it, this->_M_hash_code(*__it._M_cur) };
  1862. }
  1863. return { __hint, this->_M_hash_code(__k) };
  1864. }
  1865. template<typename _Key, typename _Value, typename _Alloc,
  1866. typename _ExtractKey, typename _Equal,
  1867. typename _Hash, typename _RangeHash, typename _Unused,
  1868. typename _RehashPolicy, typename _Traits>
  1869. auto
  1870. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1871. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1872. _M_insert_unique_node(size_type __bkt, __hash_code __code,
  1873. __node_ptr __node, size_type __n_elt)
  1874. -> iterator
  1875. {
  1876. const __rehash_state& __saved_state = _M_rehash_policy._M_state();
  1877. std::pair<bool, std::size_t> __do_rehash
  1878. = _M_rehash_policy._M_need_rehash(_M_bucket_count, _M_element_count,
  1879. __n_elt);
  1880. if (__do_rehash.first)
  1881. {
  1882. _M_rehash(__do_rehash.second, __saved_state);
  1883. __bkt = _M_bucket_index(__code);
  1884. }
  1885. this->_M_store_code(*__node, __code);
  1886. // Always insert at the beginning of the bucket.
  1887. _M_insert_bucket_begin(__bkt, __node);
  1888. ++_M_element_count;
  1889. return iterator(__node);
  1890. }
  1891. template<typename _Key, typename _Value, typename _Alloc,
  1892. typename _ExtractKey, typename _Equal,
  1893. typename _Hash, typename _RangeHash, typename _Unused,
  1894. typename _RehashPolicy, typename _Traits>
  1895. auto
  1896. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1897. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1898. _M_insert_multi_node(__node_ptr __hint,
  1899. __hash_code __code, __node_ptr __node)
  1900. -> iterator
  1901. {
  1902. const __rehash_state& __saved_state = _M_rehash_policy._M_state();
  1903. std::pair<bool, std::size_t> __do_rehash
  1904. = _M_rehash_policy._M_need_rehash(_M_bucket_count, _M_element_count, 1);
  1905. if (__do_rehash.first)
  1906. _M_rehash(__do_rehash.second, __saved_state);
  1907. this->_M_store_code(*__node, __code);
  1908. const key_type& __k = _ExtractKey{}(__node->_M_v());
  1909. size_type __bkt = _M_bucket_index(__code);
  1910. // Find the node before an equivalent one or use hint if it exists and
  1911. // if it is equivalent.
  1912. __node_base_ptr __prev
  1913. = __builtin_expect(__hint != nullptr, false)
  1914. && this->_M_equals(__k, __code, *__hint)
  1915. ? __hint
  1916. : _M_find_before_node(__bkt, __k, __code);
  1917. if (__prev)
  1918. {
  1919. // Insert after the node before the equivalent one.
  1920. __node->_M_nxt = __prev->_M_nxt;
  1921. __prev->_M_nxt = __node;
  1922. if (__builtin_expect(__prev == __hint, false))
  1923. // hint might be the last bucket node, in this case we need to
  1924. // update next bucket.
  1925. if (__node->_M_nxt
  1926. && !this->_M_equals(__k, __code, *__node->_M_next()))
  1927. {
  1928. size_type __next_bkt = _M_bucket_index(*__node->_M_next());
  1929. if (__next_bkt != __bkt)
  1930. _M_buckets[__next_bkt] = __node;
  1931. }
  1932. }
  1933. else
  1934. // The inserted node has no equivalent in the hashtable. We must
  1935. // insert the new node at the beginning of the bucket to preserve
  1936. // equivalent elements' relative positions.
  1937. _M_insert_bucket_begin(__bkt, __node);
  1938. ++_M_element_count;
  1939. return iterator(__node);
  1940. }
  1941. // Insert v if no element with its key is already present.
  1942. template<typename _Key, typename _Value, typename _Alloc,
  1943. typename _ExtractKey, typename _Equal,
  1944. typename _Hash, typename _RangeHash, typename _Unused,
  1945. typename _RehashPolicy, typename _Traits>
  1946. template<typename _Kt, typename _Arg, typename _NodeGenerator>
  1947. auto
  1948. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1949. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1950. _M_insert_unique(_Kt&& __k, _Arg&& __v,
  1951. const _NodeGenerator& __node_gen)
  1952. -> pair<iterator, bool>
  1953. {
  1954. if (size() <= __small_size_threshold())
  1955. for (auto __it = begin(); __it != end(); ++__it)
  1956. if (this->_M_key_equals_tr(__k, *__it._M_cur))
  1957. return { __it, false };
  1958. __hash_code __code = this->_M_hash_code_tr(__k);
  1959. size_type __bkt = _M_bucket_index(__code);
  1960. if (size() > __small_size_threshold())
  1961. if (__node_ptr __node = _M_find_node_tr(__bkt, __k, __code))
  1962. return { iterator(__node), false };
  1963. _Scoped_node __node {
  1964. __node_builder_t::_S_build(std::forward<_Kt>(__k),
  1965. std::forward<_Arg>(__v),
  1966. __node_gen),
  1967. this
  1968. };
  1969. auto __pos
  1970. = _M_insert_unique_node(__bkt, __code, __node._M_node);
  1971. __node._M_node = nullptr;
  1972. return { __pos, true };
  1973. }
  1974. // Insert v unconditionally.
  1975. template<typename _Key, typename _Value, typename _Alloc,
  1976. typename _ExtractKey, typename _Equal,
  1977. typename _Hash, typename _RangeHash, typename _Unused,
  1978. typename _RehashPolicy, typename _Traits>
  1979. template<typename _Arg, typename _NodeGenerator>
  1980. auto
  1981. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1982. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1983. _M_insert(const_iterator __hint, _Arg&& __v,
  1984. const _NodeGenerator& __node_gen,
  1985. false_type /* __uks */)
  1986. -> iterator
  1987. {
  1988. // First allocate new node so that we don't do anything if it throws.
  1989. _Scoped_node __node{ __node_gen(std::forward<_Arg>(__v)), this };
  1990. // Second compute the hash code so that we don't rehash if it throws.
  1991. auto __res = this->_M_compute_hash_code(
  1992. __hint, _ExtractKey{}(__node._M_node->_M_v()));
  1993. auto __pos
  1994. = _M_insert_multi_node(__res.first._M_cur, __res.second,
  1995. __node._M_node);
  1996. __node._M_node = nullptr;
  1997. return __pos;
  1998. }
  1999. template<typename _Key, typename _Value, typename _Alloc,
  2000. typename _ExtractKey, typename _Equal,
  2001. typename _Hash, typename _RangeHash, typename _Unused,
  2002. typename _RehashPolicy, typename _Traits>
  2003. auto
  2004. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  2005. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  2006. erase(const_iterator __it)
  2007. -> iterator
  2008. {
  2009. __node_ptr __n = __it._M_cur;
  2010. std::size_t __bkt = _M_bucket_index(*__n);
  2011. // Look for previous node to unlink it from the erased one, this
  2012. // is why we need buckets to contain the before begin to make
  2013. // this search fast.
  2014. __node_base_ptr __prev_n = _M_get_previous_node(__bkt, __n);
  2015. return _M_erase(__bkt, __prev_n, __n);
  2016. }
  2017. template<typename _Key, typename _Value, typename _Alloc,
  2018. typename _ExtractKey, typename _Equal,
  2019. typename _Hash, typename _RangeHash, typename _Unused,
  2020. typename _RehashPolicy, typename _Traits>
  2021. auto
  2022. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  2023. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  2024. _M_erase(size_type __bkt, __node_base_ptr __prev_n, __node_ptr __n)
  2025. -> iterator
  2026. {
  2027. if (__prev_n == _M_buckets[__bkt])
  2028. _M_remove_bucket_begin(__bkt, __n->_M_next(),
  2029. __n->_M_nxt ? _M_bucket_index(*__n->_M_next()) : 0);
  2030. else if (__n->_M_nxt)
  2031. {
  2032. size_type __next_bkt = _M_bucket_index(*__n->_M_next());
  2033. if (__next_bkt != __bkt)
  2034. _M_buckets[__next_bkt] = __prev_n;
  2035. }
  2036. __prev_n->_M_nxt = __n->_M_nxt;
  2037. iterator __result(__n->_M_next());
  2038. this->_M_deallocate_node(__n);
  2039. --_M_element_count;
  2040. return __result;
  2041. }
  2042. template<typename _Key, typename _Value, typename _Alloc,
  2043. typename _ExtractKey, typename _Equal,
  2044. typename _Hash, typename _RangeHash, typename _Unused,
  2045. typename _RehashPolicy, typename _Traits>
  2046. auto
  2047. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  2048. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  2049. _M_erase(true_type /* __uks */, const key_type& __k)
  2050. -> size_type
  2051. {
  2052. __node_base_ptr __prev_n;
  2053. __node_ptr __n;
  2054. std::size_t __bkt;
  2055. if (size() <= __small_size_threshold())
  2056. {
  2057. __prev_n = _M_find_before_node(__k);
  2058. if (!__prev_n)
  2059. return 0;
  2060. // We found a matching node, erase it.
  2061. __n = static_cast<__node_ptr>(__prev_n->_M_nxt);
  2062. __bkt = _M_bucket_index(*__n);
  2063. }
  2064. else
  2065. {
  2066. __hash_code __code = this->_M_hash_code(__k);
  2067. __bkt = _M_bucket_index(__code);
  2068. // Look for the node before the first matching node.
  2069. __prev_n = _M_find_before_node(__bkt, __k, __code);
  2070. if (!__prev_n)
  2071. return 0;
  2072. // We found a matching node, erase it.
  2073. __n = static_cast<__node_ptr>(__prev_n->_M_nxt);
  2074. }
  2075. _M_erase(__bkt, __prev_n, __n);
  2076. return 1;
  2077. }
  2078. template<typename _Key, typename _Value, typename _Alloc,
  2079. typename _ExtractKey, typename _Equal,
  2080. typename _Hash, typename _RangeHash, typename _Unused,
  2081. typename _RehashPolicy, typename _Traits>
  2082. auto
  2083. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  2084. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  2085. _M_erase(false_type /* __uks */, const key_type& __k)
  2086. -> size_type
  2087. {
  2088. std::size_t __bkt;
  2089. __node_base_ptr __prev_n;
  2090. __node_ptr __n;
  2091. if (size() <= __small_size_threshold())
  2092. {
  2093. __prev_n = _M_find_before_node(__k);
  2094. if (!__prev_n)
  2095. return 0;
  2096. // We found a matching node, erase it.
  2097. __n = static_cast<__node_ptr>(__prev_n->_M_nxt);
  2098. __bkt = _M_bucket_index(*__n);
  2099. }
  2100. else
  2101. {
  2102. __hash_code __code = this->_M_hash_code(__k);
  2103. __bkt = _M_bucket_index(__code);
  2104. // Look for the node before the first matching node.
  2105. __prev_n = _M_find_before_node(__bkt, __k, __code);
  2106. if (!__prev_n)
  2107. return 0;
  2108. __n = static_cast<__node_ptr>(__prev_n->_M_nxt);
  2109. }
  2110. // _GLIBCXX_RESOLVE_LIB_DEFECTS
  2111. // 526. Is it undefined if a function in the standard changes
  2112. // in parameters?
  2113. // We use one loop to find all matching nodes and another to deallocate
  2114. // them so that the key stays valid during the first loop. It might be
  2115. // invalidated indirectly when destroying nodes.
  2116. __node_ptr __n_last = __n->_M_next();
  2117. while (__n_last && this->_M_node_equals(*__n, *__n_last))
  2118. __n_last = __n_last->_M_next();
  2119. std::size_t __n_last_bkt = __n_last ? _M_bucket_index(*__n_last) : __bkt;
  2120. // Deallocate nodes.
  2121. size_type __result = 0;
  2122. do
  2123. {
  2124. __node_ptr __p = __n->_M_next();
  2125. this->_M_deallocate_node(__n);
  2126. __n = __p;
  2127. ++__result;
  2128. }
  2129. while (__n != __n_last);
  2130. _M_element_count -= __result;
  2131. if (__prev_n == _M_buckets[__bkt])
  2132. _M_remove_bucket_begin(__bkt, __n_last, __n_last_bkt);
  2133. else if (__n_last_bkt != __bkt)
  2134. _M_buckets[__n_last_bkt] = __prev_n;
  2135. __prev_n->_M_nxt = __n_last;
  2136. return __result;
  2137. }
  2138. template<typename _Key, typename _Value, typename _Alloc,
  2139. typename _ExtractKey, typename _Equal,
  2140. typename _Hash, typename _RangeHash, typename _Unused,
  2141. typename _RehashPolicy, typename _Traits>
  2142. auto
  2143. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  2144. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  2145. erase(const_iterator __first, const_iterator __last)
  2146. -> iterator
  2147. {
  2148. __node_ptr __n = __first._M_cur;
  2149. __node_ptr __last_n = __last._M_cur;
  2150. if (__n == __last_n)
  2151. return iterator(__n);
  2152. std::size_t __bkt = _M_bucket_index(*__n);
  2153. __node_base_ptr __prev_n = _M_get_previous_node(__bkt, __n);
  2154. bool __is_bucket_begin = __n == _M_bucket_begin(__bkt);
  2155. std::size_t __n_bkt = __bkt;
  2156. for (;;)
  2157. {
  2158. do
  2159. {
  2160. __node_ptr __tmp = __n;
  2161. __n = __n->_M_next();
  2162. this->_M_deallocate_node(__tmp);
  2163. --_M_element_count;
  2164. if (!__n)
  2165. break;
  2166. __n_bkt = _M_bucket_index(*__n);
  2167. }
  2168. while (__n != __last_n && __n_bkt == __bkt);
  2169. if (__is_bucket_begin)
  2170. _M_remove_bucket_begin(__bkt, __n, __n_bkt);
  2171. if (__n == __last_n)
  2172. break;
  2173. __is_bucket_begin = true;
  2174. __bkt = __n_bkt;
  2175. }
  2176. if (__n && (__n_bkt != __bkt || __is_bucket_begin))
  2177. _M_buckets[__n_bkt] = __prev_n;
  2178. __prev_n->_M_nxt = __n;
  2179. return iterator(__n);
  2180. }
  2181. template<typename _Key, typename _Value, typename _Alloc,
  2182. typename _ExtractKey, typename _Equal,
  2183. typename _Hash, typename _RangeHash, typename _Unused,
  2184. typename _RehashPolicy, typename _Traits>
  2185. void
  2186. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  2187. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  2188. clear() noexcept
  2189. {
  2190. this->_M_deallocate_nodes(_M_begin());
  2191. __builtin_memset(_M_buckets, 0,
  2192. _M_bucket_count * sizeof(__node_base_ptr));
  2193. _M_element_count = 0;
  2194. _M_before_begin._M_nxt = nullptr;
  2195. }
  2196. template<typename _Key, typename _Value, typename _Alloc,
  2197. typename _ExtractKey, typename _Equal,
  2198. typename _Hash, typename _RangeHash, typename _Unused,
  2199. typename _RehashPolicy, typename _Traits>
  2200. void
  2201. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  2202. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  2203. rehash(size_type __bkt_count)
  2204. {
  2205. const __rehash_state& __saved_state = _M_rehash_policy._M_state();
  2206. __bkt_count
  2207. = std::max(_M_rehash_policy._M_bkt_for_elements(_M_element_count + 1),
  2208. __bkt_count);
  2209. __bkt_count = _M_rehash_policy._M_next_bkt(__bkt_count);
  2210. if (__bkt_count != _M_bucket_count)
  2211. _M_rehash(__bkt_count, __saved_state);
  2212. else
  2213. // No rehash, restore previous state to keep it consistent with
  2214. // container state.
  2215. _M_rehash_policy._M_reset(__saved_state);
  2216. }
  2217. template<typename _Key, typename _Value, typename _Alloc,
  2218. typename _ExtractKey, typename _Equal,
  2219. typename _Hash, typename _RangeHash, typename _Unused,
  2220. typename _RehashPolicy, typename _Traits>
  2221. void
  2222. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  2223. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  2224. _M_rehash(size_type __bkt_count, const __rehash_state& __state)
  2225. {
  2226. __try
  2227. {
  2228. _M_rehash_aux(__bkt_count, __unique_keys{});
  2229. }
  2230. __catch(...)
  2231. {
  2232. // A failure here means that buckets allocation failed. We only
  2233. // have to restore hash policy previous state.
  2234. _M_rehash_policy._M_reset(__state);
  2235. __throw_exception_again;
  2236. }
  2237. }
  2238. // Rehash when there is no equivalent elements.
  2239. template<typename _Key, typename _Value, typename _Alloc,
  2240. typename _ExtractKey, typename _Equal,
  2241. typename _Hash, typename _RangeHash, typename _Unused,
  2242. typename _RehashPolicy, typename _Traits>
  2243. void
  2244. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  2245. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  2246. _M_rehash_aux(size_type __bkt_count, true_type /* __uks */)
  2247. {
  2248. __buckets_ptr __new_buckets = _M_allocate_buckets(__bkt_count);
  2249. __node_ptr __p = _M_begin();
  2250. _M_before_begin._M_nxt = nullptr;
  2251. std::size_t __bbegin_bkt = 0;
  2252. while (__p)
  2253. {
  2254. __node_ptr __next = __p->_M_next();
  2255. std::size_t __bkt
  2256. = __hash_code_base::_M_bucket_index(*__p, __bkt_count);
  2257. if (!__new_buckets[__bkt])
  2258. {
  2259. __p->_M_nxt = _M_before_begin._M_nxt;
  2260. _M_before_begin._M_nxt = __p;
  2261. __new_buckets[__bkt] = &_M_before_begin;
  2262. if (__p->_M_nxt)
  2263. __new_buckets[__bbegin_bkt] = __p;
  2264. __bbegin_bkt = __bkt;
  2265. }
  2266. else
  2267. {
  2268. __p->_M_nxt = __new_buckets[__bkt]->_M_nxt;
  2269. __new_buckets[__bkt]->_M_nxt = __p;
  2270. }
  2271. __p = __next;
  2272. }
  2273. _M_deallocate_buckets();
  2274. _M_bucket_count = __bkt_count;
  2275. _M_buckets = __new_buckets;
  2276. }
  2277. // Rehash when there can be equivalent elements, preserve their relative
  2278. // order.
  2279. template<typename _Key, typename _Value, typename _Alloc,
  2280. typename _ExtractKey, typename _Equal,
  2281. typename _Hash, typename _RangeHash, typename _Unused,
  2282. typename _RehashPolicy, typename _Traits>
  2283. void
  2284. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  2285. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  2286. _M_rehash_aux(size_type __bkt_count, false_type /* __uks */)
  2287. {
  2288. __buckets_ptr __new_buckets = _M_allocate_buckets(__bkt_count);
  2289. __node_ptr __p = _M_begin();
  2290. _M_before_begin._M_nxt = nullptr;
  2291. std::size_t __bbegin_bkt = 0;
  2292. std::size_t __prev_bkt = 0;
  2293. __node_ptr __prev_p = nullptr;
  2294. bool __check_bucket = false;
  2295. while (__p)
  2296. {
  2297. __node_ptr __next = __p->_M_next();
  2298. std::size_t __bkt
  2299. = __hash_code_base::_M_bucket_index(*__p, __bkt_count);
  2300. if (__prev_p && __prev_bkt == __bkt)
  2301. {
  2302. // Previous insert was already in this bucket, we insert after
  2303. // the previously inserted one to preserve equivalent elements
  2304. // relative order.
  2305. __p->_M_nxt = __prev_p->_M_nxt;
  2306. __prev_p->_M_nxt = __p;
  2307. // Inserting after a node in a bucket require to check that we
  2308. // haven't change the bucket last node, in this case next
  2309. // bucket containing its before begin node must be updated. We
  2310. // schedule a check as soon as we move out of the sequence of
  2311. // equivalent nodes to limit the number of checks.
  2312. __check_bucket = true;
  2313. }
  2314. else
  2315. {
  2316. if (__check_bucket)
  2317. {
  2318. // Check if we shall update the next bucket because of
  2319. // insertions into __prev_bkt bucket.
  2320. if (__prev_p->_M_nxt)
  2321. {
  2322. std::size_t __next_bkt
  2323. = __hash_code_base::_M_bucket_index(
  2324. *__prev_p->_M_next(), __bkt_count);
  2325. if (__next_bkt != __prev_bkt)
  2326. __new_buckets[__next_bkt] = __prev_p;
  2327. }
  2328. __check_bucket = false;
  2329. }
  2330. if (!__new_buckets[__bkt])
  2331. {
  2332. __p->_M_nxt = _M_before_begin._M_nxt;
  2333. _M_before_begin._M_nxt = __p;
  2334. __new_buckets[__bkt] = &_M_before_begin;
  2335. if (__p->_M_nxt)
  2336. __new_buckets[__bbegin_bkt] = __p;
  2337. __bbegin_bkt = __bkt;
  2338. }
  2339. else
  2340. {
  2341. __p->_M_nxt = __new_buckets[__bkt]->_M_nxt;
  2342. __new_buckets[__bkt]->_M_nxt = __p;
  2343. }
  2344. }
  2345. __prev_p = __p;
  2346. __prev_bkt = __bkt;
  2347. __p = __next;
  2348. }
  2349. if (__check_bucket && __prev_p->_M_nxt)
  2350. {
  2351. std::size_t __next_bkt
  2352. = __hash_code_base::_M_bucket_index(*__prev_p->_M_next(),
  2353. __bkt_count);
  2354. if (__next_bkt != __prev_bkt)
  2355. __new_buckets[__next_bkt] = __prev_p;
  2356. }
  2357. _M_deallocate_buckets();
  2358. _M_bucket_count = __bkt_count;
  2359. _M_buckets = __new_buckets;
  2360. }
  2361. #if __cplusplus > 201402L
  2362. template<typename, typename, typename> class _Hash_merge_helper { };
  2363. #endif // C++17
  2364. #if __cpp_deduction_guides >= 201606
  2365. // Used to constrain deduction guides
  2366. template<typename _Hash>
  2367. using _RequireNotAllocatorOrIntegral
  2368. = __enable_if_t<!__or_<is_integral<_Hash>, __is_allocator<_Hash>>::value>;
  2369. #endif
  2370. /// @endcond
  2371. _GLIBCXX_END_NAMESPACE_VERSION
  2372. } // namespace std
  2373. #endif // _HASHTABLE_H