You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

robin_hood.h 88KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430
  1. // ______ _____ ______ _________
  2. // ______________ ___ /_ ___(_)_______ ___ /_ ______ ______ ______ /
  3. // __ ___/_ __ \__ __ \__ / __ __ \ __ __ \_ __ \_ __ \_ __ /
  4. // _ / / /_/ /_ /_/ /_ / _ / / / _ / / // /_/ // /_/ // /_/ /
  5. // /_/ \____/ /_.___/ /_/ /_/ /_/ ________/_/ /_/ \____/ \____/ \__,_/
  6. // _/_____/
  7. //
  8. // Fast & memory efficient hashtable based on robin hood hashing for C++11/14/17/20
  9. // https://github.com/martinus/robin-hood-hashing
  10. //
  11. // Licensed under the MIT License <http://opensource.org/licenses/MIT>.
  12. // SPDX-License-Identifier: MIT
  13. // Copyright (c) 2018-2020 Martin Ankerl <http://martin.ankerl.com>
  14. //
  15. // Permission is hereby granted, free of charge, to any person obtaining a copy
  16. // of this software and associated documentation files (the "Software"), to deal
  17. // in the Software without restriction, including without limitation the rights
  18. // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
  19. // copies of the Software, and to permit persons to whom the Software is
  20. // furnished to do so, subject to the following conditions:
  21. //
  22. // The above copyright notice and this permission notice shall be included in all
  23. // copies or substantial portions of the Software.
  24. //
  25. // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  26. // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  27. // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
  28. // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  29. // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  30. // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
  31. // SOFTWARE.
  32. #ifndef ROBIN_HOOD_H_INCLUDED
  33. #define ROBIN_HOOD_H_INCLUDED
  34. // see https://semver.org/
  35. #define ROBIN_HOOD_VERSION_MAJOR 3 // for incompatible API changes
  36. #define ROBIN_HOOD_VERSION_MINOR 9 // for adding functionality in a backwards-compatible manner
  37. #define ROBIN_HOOD_VERSION_PATCH 1 // for backwards-compatible bug fixes
  38. #include <algorithm>
  39. #include <cstdlib>
  40. #include <cstring>
  41. #include <functional>
  42. #include <limits>
  43. #include <memory> // only to support hash of smart pointers
  44. #include <stdexcept>
  45. #include <string>
  46. #include <type_traits>
  47. #include <utility>
  48. #if __cplusplus >= 201703L
  49. # include <string_view>
  50. #endif
  51. // #define ROBIN_HOOD_LOG_ENABLED
  52. #ifdef ROBIN_HOOD_LOG_ENABLED
  53. # include <iostream>
  54. # define ROBIN_HOOD_LOG(...) \
  55. std::cout << __FUNCTION__ << "@" << __LINE__ << ": " << __VA_ARGS__ << std::endl;
  56. #else
  57. # define ROBIN_HOOD_LOG(x)
  58. #endif
  59. // #define ROBIN_HOOD_TRACE_ENABLED
  60. #ifdef ROBIN_HOOD_TRACE_ENABLED
  61. # include <iostream>
  62. # define ROBIN_HOOD_TRACE(...) \
  63. std::cout << __FUNCTION__ << "@" << __LINE__ << ": " << __VA_ARGS__ << std::endl;
  64. #else
  65. # define ROBIN_HOOD_TRACE(x)
  66. #endif
  67. // #define ROBIN_HOOD_COUNT_ENABLED
  68. #ifdef ROBIN_HOOD_COUNT_ENABLED
  69. # include <iostream>
  70. # define ROBIN_HOOD_COUNT(x) ++counts().x;
  71. namespace robin_hood {
  72. struct Counts {
  73. uint64_t shiftUp{};
  74. uint64_t shiftDown{};
  75. };
  76. inline std::ostream& operator<<(std::ostream& os, Counts const& c) {
  77. return os << c.shiftUp << " shiftUp" << std::endl << c.shiftDown << " shiftDown" << std::endl;
  78. }
  79. static Counts& counts() {
  80. static Counts counts{};
  81. return counts;
  82. }
  83. } // namespace robin_hood
  84. #else
  85. # define ROBIN_HOOD_COUNT(x)
  86. #endif
  87. // all non-argument macros should use this facility. See
  88. // https://www.fluentcpp.com/2019/05/28/better-macros-better-flags/
  89. #define ROBIN_HOOD(x) ROBIN_HOOD_PRIVATE_DEFINITION_##x()
  90. // mark unused members with this macro
  91. #define ROBIN_HOOD_UNUSED(identifier)
  92. // bitness
  93. #if SIZE_MAX == UINT32_MAX
  94. # define ROBIN_HOOD_PRIVATE_DEFINITION_BITNESS() 32
  95. #elif SIZE_MAX == UINT64_MAX
  96. # define ROBIN_HOOD_PRIVATE_DEFINITION_BITNESS() 64
  97. #else
  98. # error Unsupported bitness
  99. #endif
  100. // endianess
  101. #ifdef _MSC_VER
  102. # define ROBIN_HOOD_PRIVATE_DEFINITION_LITTLE_ENDIAN() 1
  103. # define ROBIN_HOOD_PRIVATE_DEFINITION_BIG_ENDIAN() 0
  104. #else
  105. # define ROBIN_HOOD_PRIVATE_DEFINITION_LITTLE_ENDIAN() \
  106. (__BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__)
  107. # define ROBIN_HOOD_PRIVATE_DEFINITION_BIG_ENDIAN() (__BYTE_ORDER__ == __ORDER_BIG_ENDIAN__)
  108. #endif
  109. // inline
  110. #ifdef _MSC_VER
  111. # define ROBIN_HOOD_PRIVATE_DEFINITION_NOINLINE() __declspec(noinline)
  112. #else
  113. # define ROBIN_HOOD_PRIVATE_DEFINITION_NOINLINE() __attribute__((noinline))
  114. #endif
  115. // exceptions
  116. #if !defined(__cpp_exceptions) && !defined(__EXCEPTIONS) && !defined(_CPPUNWIND)
  117. # define ROBIN_HOOD_PRIVATE_DEFINITION_HAS_EXCEPTIONS() 0
  118. #else
  119. # define ROBIN_HOOD_PRIVATE_DEFINITION_HAS_EXCEPTIONS() 1
  120. #endif
  121. // count leading/trailing bits
  122. #if !defined(ROBIN_HOOD_DISABLE_INTRINSICS)
  123. # ifdef _MSC_VER
  124. # if ROBIN_HOOD(BITNESS) == 32
  125. # define ROBIN_HOOD_PRIVATE_DEFINITION_BITSCANFORWARD() _BitScanForward
  126. # else
  127. # define ROBIN_HOOD_PRIVATE_DEFINITION_BITSCANFORWARD() _BitScanForward64
  128. # endif
  129. # include <intrin.h>
  130. # pragma intrinsic(ROBIN_HOOD(BITSCANFORWARD))
  131. # define ROBIN_HOOD_COUNT_TRAILING_ZEROES(x) \
  132. [](size_t mask) noexcept -> int { \
  133. unsigned long index; \
  134. return ROBIN_HOOD(BITSCANFORWARD)(&index, mask) ? static_cast<int>(index) \
  135. : ROBIN_HOOD(BITNESS); \
  136. }(x)
  137. # else
  138. # if ROBIN_HOOD(BITNESS) == 32
  139. # define ROBIN_HOOD_PRIVATE_DEFINITION_CTZ() __builtin_ctzl
  140. # define ROBIN_HOOD_PRIVATE_DEFINITION_CLZ() __builtin_clzl
  141. # else
  142. # define ROBIN_HOOD_PRIVATE_DEFINITION_CTZ() __builtin_ctzll
  143. # define ROBIN_HOOD_PRIVATE_DEFINITION_CLZ() __builtin_clzll
  144. # endif
  145. # define ROBIN_HOOD_COUNT_LEADING_ZEROES(x) ((x) ? ROBIN_HOOD(CLZ)(x) : ROBIN_HOOD(BITNESS))
  146. # define ROBIN_HOOD_COUNT_TRAILING_ZEROES(x) ((x) ? ROBIN_HOOD(CTZ)(x) : ROBIN_HOOD(BITNESS))
  147. # endif
  148. #endif
  149. // fallthrough
  150. #ifndef __has_cpp_attribute // For backwards compatibility
  151. # define __has_cpp_attribute(x) 0
  152. #endif
  153. #if __has_cpp_attribute(clang::fallthrough)
  154. # define ROBIN_HOOD_PRIVATE_DEFINITION_FALLTHROUGH() [[clang::fallthrough]]
  155. #elif __has_cpp_attribute(gnu::fallthrough)
  156. # define ROBIN_HOOD_PRIVATE_DEFINITION_FALLTHROUGH() [[gnu::fallthrough]]
  157. #else
  158. # define ROBIN_HOOD_PRIVATE_DEFINITION_FALLTHROUGH()
  159. #endif
  160. // likely/unlikely
  161. #ifdef _MSC_VER
  162. # define ROBIN_HOOD_LIKELY(condition) condition
  163. # define ROBIN_HOOD_UNLIKELY(condition) condition
  164. #else
  165. # define ROBIN_HOOD_LIKELY(condition) __builtin_expect(condition, 1)
  166. # define ROBIN_HOOD_UNLIKELY(condition) __builtin_expect(condition, 0)
  167. #endif
  168. // detect if native wchar_t type is availiable in MSVC
  169. #ifdef _MSC_VER
  170. # ifdef _NATIVE_WCHAR_T_DEFINED
  171. # define ROBIN_HOOD_PRIVATE_DEFINITION_HAS_NATIVE_WCHART() 1
  172. # else
  173. # define ROBIN_HOOD_PRIVATE_DEFINITION_HAS_NATIVE_WCHART() 0
  174. # endif
  175. #else
  176. # define ROBIN_HOOD_PRIVATE_DEFINITION_HAS_NATIVE_WCHART() 1
  177. #endif
  178. // workaround missing "is_trivially_copyable" in g++ < 5.0
  179. // See https://stackoverflow.com/a/31798726/48181
  180. #if defined(__GNUC__) && __GNUC__ < 5
  181. # define ROBIN_HOOD_IS_TRIVIALLY_COPYABLE(...) __has_trivial_copy(__VA_ARGS__)
  182. #else
  183. # define ROBIN_HOOD_IS_TRIVIALLY_COPYABLE(...) std::is_trivially_copyable<__VA_ARGS__>::value
  184. #endif
  185. // helpers for C++ versions, see https://gcc.gnu.org/onlinedocs/cpp/Standard-Predefined-Macros.html
  186. #define ROBIN_HOOD_PRIVATE_DEFINITION_CXX() __cplusplus
  187. #define ROBIN_HOOD_PRIVATE_DEFINITION_CXX98() 199711L
  188. #define ROBIN_HOOD_PRIVATE_DEFINITION_CXX11() 201103L
  189. #define ROBIN_HOOD_PRIVATE_DEFINITION_CXX14() 201402L
  190. #define ROBIN_HOOD_PRIVATE_DEFINITION_CXX17() 201703L
  191. #if ROBIN_HOOD(CXX) >= ROBIN_HOOD(CXX17)
  192. # define ROBIN_HOOD_PRIVATE_DEFINITION_NODISCARD() [[nodiscard]]
  193. #else
  194. # define ROBIN_HOOD_PRIVATE_DEFINITION_NODISCARD()
  195. #endif
  196. namespace robin_hood {
  197. #if ROBIN_HOOD(CXX) >= ROBIN_HOOD(CXX14)
  198. # define ROBIN_HOOD_STD std
  199. #else
  200. // c++11 compatibility layer
  201. namespace ROBIN_HOOD_STD {
  202. template <class T>
  203. struct alignment_of
  204. : std::integral_constant<std::size_t, alignof(typename std::remove_all_extents<T>::type)> {};
  205. template <class T, T... Ints>
  206. class integer_sequence {
  207. public:
  208. using value_type = T;
  209. static_assert(std::is_integral<value_type>::value, "not integral type");
  210. static constexpr std::size_t size() noexcept {
  211. return sizeof...(Ints);
  212. }
  213. };
  214. template <std::size_t... Inds>
  215. using index_sequence = integer_sequence<std::size_t, Inds...>;
  216. namespace detail_ {
  217. template <class T, T Begin, T End, bool>
  218. struct IntSeqImpl {
  219. using TValue = T;
  220. static_assert(std::is_integral<TValue>::value, "not integral type");
  221. static_assert(Begin >= 0 && Begin < End, "unexpected argument (Begin<0 || Begin<=End)");
  222. template <class, class>
  223. struct IntSeqCombiner;
  224. template <TValue... Inds0, TValue... Inds1>
  225. struct IntSeqCombiner<integer_sequence<TValue, Inds0...>, integer_sequence<TValue, Inds1...>> {
  226. using TResult = integer_sequence<TValue, Inds0..., Inds1...>;
  227. };
  228. using TResult =
  229. typename IntSeqCombiner<typename IntSeqImpl<TValue, Begin, Begin + (End - Begin) / 2,
  230. (End - Begin) / 2 == 1>::TResult,
  231. typename IntSeqImpl<TValue, Begin + (End - Begin) / 2, End,
  232. (End - Begin + 1) / 2 == 1>::TResult>::TResult;
  233. };
  234. template <class T, T Begin>
  235. struct IntSeqImpl<T, Begin, Begin, false> {
  236. using TValue = T;
  237. static_assert(std::is_integral<TValue>::value, "not integral type");
  238. static_assert(Begin >= 0, "unexpected argument (Begin<0)");
  239. using TResult = integer_sequence<TValue>;
  240. };
  241. template <class T, T Begin, T End>
  242. struct IntSeqImpl<T, Begin, End, true> {
  243. using TValue = T;
  244. static_assert(std::is_integral<TValue>::value, "not integral type");
  245. static_assert(Begin >= 0, "unexpected argument (Begin<0)");
  246. using TResult = integer_sequence<TValue, Begin>;
  247. };
  248. } // namespace detail_
  249. template <class T, T N>
  250. using make_integer_sequence = typename detail_::IntSeqImpl<T, 0, N, (N - 0) == 1>::TResult;
  251. template <std::size_t N>
  252. using make_index_sequence = make_integer_sequence<std::size_t, N>;
  253. template <class... T>
  254. using index_sequence_for = make_index_sequence<sizeof...(T)>;
  255. } // namespace ROBIN_HOOD_STD
  256. #endif
  257. namespace detail {
  258. // make sure we static_cast to the correct type for hash_int
  259. #if ROBIN_HOOD(BITNESS) == 64
  260. using SizeT = uint64_t;
  261. #else
  262. using SizeT = uint32_t;
  263. #endif
  264. template <typename T>
  265. T rotr(T x, unsigned k) {
  266. return (x >> k) | (x << (8U * sizeof(T) - k));
  267. }
  268. // This cast gets rid of warnings like "cast from 'uint8_t*' {aka 'unsigned char*'} to
  269. // 'uint64_t*' {aka 'long unsigned int*'} increases required alignment of target type". Use with
  270. // care!
  271. template <typename T>
  272. inline T reinterpret_cast_no_cast_align_warning(void* ptr) noexcept {
  273. return reinterpret_cast<T>(ptr);
  274. }
  275. template <typename T>
  276. inline T reinterpret_cast_no_cast_align_warning(void const* ptr) noexcept {
  277. return reinterpret_cast<T>(ptr);
  278. }
  279. // make sure this is not inlined as it is slow and dramatically enlarges code, thus making other
  280. // inlinings more difficult. Throws are also generally the slow path.
  281. template <typename E, typename... Args>
  282. [[noreturn]] ROBIN_HOOD(NOINLINE)
  283. #if ROBIN_HOOD(HAS_EXCEPTIONS)
  284. void doThrow(Args&&... args) {
  285. // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-array-to-pointer-decay)
  286. throw E(std::forward<Args>(args)...);
  287. }
  288. #else
  289. void doThrow(Args&&... ROBIN_HOOD_UNUSED(args) /*unused*/) {
  290. abort();
  291. }
  292. #endif
  293. template <typename E, typename T, typename... Args>
  294. T* assertNotNull(T* t, Args&&... args) {
  295. if (ROBIN_HOOD_UNLIKELY(nullptr == t)) {
  296. doThrow<E>(std::forward<Args>(args)...);
  297. }
  298. return t;
  299. }
  300. template <typename T>
  301. inline T unaligned_load(void const* ptr) noexcept {
  302. // using memcpy so we don't get into unaligned load problems.
  303. // compiler should optimize this very well anyways.
  304. T t;
  305. std::memcpy(&t, ptr, sizeof(T));
  306. return t;
  307. }
  308. // Allocates bulks of memory for objects of type T. This deallocates the memory in the destructor,
  309. // and keeps a linked list of the allocated memory around. Overhead per allocation is the size of a
  310. // pointer.
  311. template <typename T, size_t MinNumAllocs = 4, size_t MaxNumAllocs = 256>
  312. class BulkPoolAllocator {
  313. public:
  314. BulkPoolAllocator() noexcept = default;
  315. // does not copy anything, just creates a new allocator.
  316. BulkPoolAllocator(const BulkPoolAllocator& ROBIN_HOOD_UNUSED(o) /*unused*/) noexcept
  317. : mHead(nullptr)
  318. , mListForFree(nullptr) {}
  319. BulkPoolAllocator(BulkPoolAllocator&& o) noexcept
  320. : mHead(o.mHead)
  321. , mListForFree(o.mListForFree) {
  322. o.mListForFree = nullptr;
  323. o.mHead = nullptr;
  324. }
  325. BulkPoolAllocator& operator=(BulkPoolAllocator&& o) noexcept {
  326. reset();
  327. mHead = o.mHead;
  328. mListForFree = o.mListForFree;
  329. o.mListForFree = nullptr;
  330. o.mHead = nullptr;
  331. return *this;
  332. }
  333. BulkPoolAllocator&
  334. // NOLINTNEXTLINE(bugprone-unhandled-self-assignment,cert-oop54-cpp)
  335. operator=(const BulkPoolAllocator& ROBIN_HOOD_UNUSED(o) /*unused*/) noexcept {
  336. // does not do anything
  337. return *this;
  338. }
  339. ~BulkPoolAllocator() noexcept {
  340. reset();
  341. }
  342. // Deallocates all allocated memory.
  343. void reset() noexcept {
  344. while (mListForFree) {
  345. T* tmp = *mListForFree;
  346. ROBIN_HOOD_LOG("std::free")
  347. std::free(mListForFree);
  348. mListForFree = reinterpret_cast_no_cast_align_warning<T**>(tmp);
  349. }
  350. mHead = nullptr;
  351. }
  352. // allocates, but does NOT initialize. Use in-place new constructor, e.g.
  353. // T* obj = pool.allocate();
  354. // ::new (static_cast<void*>(obj)) T();
  355. T* allocate() {
  356. T* tmp = mHead;
  357. if (!tmp) {
  358. tmp = performAllocation();
  359. }
  360. mHead = *reinterpret_cast_no_cast_align_warning<T**>(tmp);
  361. return tmp;
  362. }
  363. // does not actually deallocate but puts it in store.
  364. // make sure you have already called the destructor! e.g. with
  365. // obj->~T();
  366. // pool.deallocate(obj);
  367. void deallocate(T* obj) noexcept {
  368. *reinterpret_cast_no_cast_align_warning<T**>(obj) = mHead;
  369. mHead = obj;
  370. }
  371. // Adds an already allocated block of memory to the allocator. This allocator is from now on
  372. // responsible for freeing the data (with free()). If the provided data is not large enough to
  373. // make use of, it is immediately freed. Otherwise it is reused and freed in the destructor.
  374. void addOrFree(void* ptr, const size_t numBytes) noexcept {
  375. // calculate number of available elements in ptr
  376. if (numBytes < ALIGNMENT + ALIGNED_SIZE) {
  377. // not enough data for at least one element. Free and return.
  378. ROBIN_HOOD_LOG("std::free")
  379. std::free(ptr);
  380. } else {
  381. ROBIN_HOOD_LOG("add to buffer")
  382. add(ptr, numBytes);
  383. }
  384. }
  385. void swap(BulkPoolAllocator<T, MinNumAllocs, MaxNumAllocs>& other) noexcept {
  386. using std::swap;
  387. swap(mHead, other.mHead);
  388. swap(mListForFree, other.mListForFree);
  389. }
  390. private:
  391. // iterates the list of allocated memory to calculate how many to alloc next.
  392. // Recalculating this each time saves us a size_t member.
  393. // This ignores the fact that memory blocks might have been added manually with addOrFree. In
  394. // practice, this should not matter much.
  395. ROBIN_HOOD(NODISCARD) size_t calcNumElementsToAlloc() const noexcept {
  396. auto tmp = mListForFree;
  397. size_t numAllocs = MinNumAllocs;
  398. while (numAllocs * 2 <= MaxNumAllocs && tmp) {
  399. auto x = reinterpret_cast<T***>(tmp);
  400. tmp = *x;
  401. numAllocs *= 2;
  402. }
  403. return numAllocs;
  404. }
  405. // WARNING: Underflow if numBytes < ALIGNMENT! This is guarded in addOrFree().
  406. void add(void* ptr, const size_t numBytes) noexcept {
  407. const size_t numElements = (numBytes - ALIGNMENT) / ALIGNED_SIZE;
  408. auto data = reinterpret_cast<T**>(ptr);
  409. // link free list
  410. auto x = reinterpret_cast<T***>(data);
  411. *x = mListForFree;
  412. mListForFree = data;
  413. // create linked list for newly allocated data
  414. auto* const headT =
  415. reinterpret_cast_no_cast_align_warning<T*>(reinterpret_cast<char*>(ptr) + ALIGNMENT);
  416. auto* const head = reinterpret_cast<char*>(headT);
  417. // Visual Studio compiler automatically unrolls this loop, which is pretty cool
  418. for (size_t i = 0; i < numElements; ++i) {
  419. *reinterpret_cast_no_cast_align_warning<char**>(head + i * ALIGNED_SIZE) =
  420. head + (i + 1) * ALIGNED_SIZE;
  421. }
  422. // last one points to 0
  423. *reinterpret_cast_no_cast_align_warning<T**>(head + (numElements - 1) * ALIGNED_SIZE) =
  424. mHead;
  425. mHead = headT;
  426. }
  427. // Called when no memory is available (mHead == 0).
  428. // Don't inline this slow path.
  429. ROBIN_HOOD(NOINLINE) T* performAllocation() {
  430. size_t const numElementsToAlloc = calcNumElementsToAlloc();
  431. // alloc new memory: [prev |T, T, ... T]
  432. size_t const bytes = ALIGNMENT + ALIGNED_SIZE * numElementsToAlloc;
  433. ROBIN_HOOD_LOG("std::malloc " << bytes << " = " << ALIGNMENT << " + " << ALIGNED_SIZE
  434. << " * " << numElementsToAlloc)
  435. add(assertNotNull<std::bad_alloc>(std::malloc(bytes)), bytes);
  436. return mHead;
  437. }
  438. // enforce byte alignment of the T's
  439. #if ROBIN_HOOD(CXX) >= ROBIN_HOOD(CXX14)
  440. static constexpr size_t ALIGNMENT =
  441. (std::max)(std::alignment_of<T>::value, std::alignment_of<T*>::value);
  442. #else
  443. static const size_t ALIGNMENT =
  444. (ROBIN_HOOD_STD::alignment_of<T>::value > ROBIN_HOOD_STD::alignment_of<T*>::value)
  445. ? ROBIN_HOOD_STD::alignment_of<T>::value
  446. : +ROBIN_HOOD_STD::alignment_of<T*>::value; // the + is for walkarround
  447. #endif
  448. static constexpr size_t ALIGNED_SIZE = ((sizeof(T) - 1) / ALIGNMENT + 1) * ALIGNMENT;
  449. static_assert(MinNumAllocs >= 1, "MinNumAllocs");
  450. static_assert(MaxNumAllocs >= MinNumAllocs, "MaxNumAllocs");
  451. static_assert(ALIGNED_SIZE >= sizeof(T*), "ALIGNED_SIZE");
  452. static_assert(0 == (ALIGNED_SIZE % sizeof(T*)), "ALIGNED_SIZE mod");
  453. static_assert(ALIGNMENT >= sizeof(T*), "ALIGNMENT");
  454. T* mHead{nullptr};
  455. T** mListForFree{nullptr};
  456. };
  457. template <typename T, size_t MinSize, size_t MaxSize, bool IsFlat>
  458. struct NodeAllocator;
  459. // dummy allocator that does nothing
  460. template <typename T, size_t MinSize, size_t MaxSize>
  461. struct NodeAllocator<T, MinSize, MaxSize, true> {
  462. // we are not using the data, so just free it.
  463. void addOrFree(void* ptr, size_t ROBIN_HOOD_UNUSED(numBytes) /*unused*/) noexcept {
  464. ROBIN_HOOD_LOG("std::free")
  465. std::free(ptr);
  466. }
  467. };
  468. template <typename T, size_t MinSize, size_t MaxSize>
  469. struct NodeAllocator<T, MinSize, MaxSize, false> : public BulkPoolAllocator<T, MinSize, MaxSize> {};
  470. // dummy hash, unsed as mixer when robin_hood::hash is already used
  471. template <typename T>
  472. struct identity_hash {
  473. constexpr size_t operator()(T const& obj) const noexcept {
  474. return static_cast<size_t>(obj);
  475. }
  476. };
  477. // c++14 doesn't have is_nothrow_swappable, and clang++ 6.0.1 doesn't like it either, so I'm making
  478. // my own here.
  479. namespace swappable {
  480. #if ROBIN_HOOD(CXX) < ROBIN_HOOD(CXX17)
  481. using std::swap;
  482. template <typename T>
  483. struct nothrow {
  484. static const bool value = noexcept(swap(std::declval<T&>(), std::declval<T&>()));
  485. };
  486. #else
  487. template <typename T>
  488. struct nothrow {
  489. static const bool value = std::is_nothrow_swappable<T>::value;
  490. };
  491. #endif
  492. } // namespace swappable
  493. } // namespace detail
  494. struct is_transparent_tag {};
  495. // A custom pair implementation is used in the map because std::pair is not is_trivially_copyable,
  496. // which means it would not be allowed to be used in std::memcpy. This struct is copyable, which is
  497. // also tested.
  498. template <typename T1, typename T2>
  499. struct pair {
  500. using first_type = T1;
  501. using second_type = T2;
  502. template <typename U1 = T1, typename U2 = T2,
  503. typename = typename std::enable_if<std::is_default_constructible<U1>::value &&
  504. std::is_default_constructible<U2>::value>::type>
  505. constexpr pair() noexcept(noexcept(U1()) && noexcept(U2()))
  506. : first()
  507. , second() {}
  508. // pair constructors are explicit so we don't accidentally call this ctor when we don't have to.
  509. explicit constexpr pair(std::pair<T1, T2> const& o) noexcept(
  510. noexcept(T1(std::declval<T1 const&>())) && noexcept(T2(std::declval<T2 const&>())))
  511. : first(o.first)
  512. , second(o.second) {}
  513. // pair constructors are explicit so we don't accidentally call this ctor when we don't have to.
  514. explicit constexpr pair(std::pair<T1, T2>&& o) noexcept(noexcept(
  515. T1(std::move(std::declval<T1&&>()))) && noexcept(T2(std::move(std::declval<T2&&>()))))
  516. : first(std::move(o.first))
  517. , second(std::move(o.second)) {}
  518. constexpr pair(T1&& a, T2&& b) noexcept(noexcept(
  519. T1(std::move(std::declval<T1&&>()))) && noexcept(T2(std::move(std::declval<T2&&>()))))
  520. : first(std::move(a))
  521. , second(std::move(b)) {}
  522. template <typename U1, typename U2>
  523. constexpr pair(U1&& a, U2&& b) noexcept(noexcept(T1(std::forward<U1>(
  524. std::declval<U1&&>()))) && noexcept(T2(std::forward<U2>(std::declval<U2&&>()))))
  525. : first(std::forward<U1>(a))
  526. , second(std::forward<U2>(b)) {}
  527. template <typename... U1, typename... U2>
  528. constexpr pair(
  529. std::piecewise_construct_t /*unused*/, std::tuple<U1...> a,
  530. std::tuple<U2...> b) noexcept(noexcept(pair(std::declval<std::tuple<U1...>&>(),
  531. std::declval<std::tuple<U2...>&>(),
  532. ROBIN_HOOD_STD::index_sequence_for<U1...>(),
  533. ROBIN_HOOD_STD::index_sequence_for<U2...>())))
  534. : pair(a, b, ROBIN_HOOD_STD::index_sequence_for<U1...>(),
  535. ROBIN_HOOD_STD::index_sequence_for<U2...>()) {}
  536. // constructor called from the std::piecewise_construct_t ctor
  537. template <typename... U1, size_t... I1, typename... U2, size_t... I2>
  538. pair(std::tuple<U1...>& a, std::tuple<U2...>& b, ROBIN_HOOD_STD::index_sequence<I1...> /*unused*/, ROBIN_HOOD_STD::index_sequence<I2...> /*unused*/) noexcept(
  539. noexcept(T1(std::forward<U1>(std::get<I1>(
  540. std::declval<std::tuple<
  541. U1...>&>()))...)) && noexcept(T2(std::
  542. forward<U2>(std::get<I2>(
  543. std::declval<std::tuple<U2...>&>()))...)))
  544. : first(std::forward<U1>(std::get<I1>(a))...)
  545. , second(std::forward<U2>(std::get<I2>(b))...) {
  546. // make visual studio compiler happy about warning about unused a & b.
  547. // Visual studio's pair implementation disables warning 4100.
  548. (void)a;
  549. (void)b;
  550. }
  551. void swap(pair<T1, T2>& o) noexcept((detail::swappable::nothrow<T1>::value) &&
  552. (detail::swappable::nothrow<T2>::value)) {
  553. using std::swap;
  554. swap(first, o.first);
  555. swap(second, o.second);
  556. }
  557. T1 first; // NOLINT(misc-non-private-member-variables-in-classes)
  558. T2 second; // NOLINT(misc-non-private-member-variables-in-classes)
  559. };
  560. template <typename A, typename B>
  561. inline void swap(pair<A, B>& a, pair<A, B>& b) noexcept(
  562. noexcept(std::declval<pair<A, B>&>().swap(std::declval<pair<A, B>&>()))) {
  563. a.swap(b);
  564. }
  565. template <typename A, typename B>
  566. inline constexpr bool operator==(pair<A, B> const& x, pair<A, B> const& y) {
  567. return (x.first == y.first) && (x.second == y.second);
  568. }
  569. template <typename A, typename B>
  570. inline constexpr bool operator!=(pair<A, B> const& x, pair<A, B> const& y) {
  571. return !(x == y);
  572. }
  573. template <typename A, typename B>
  574. inline constexpr bool operator<(pair<A, B> const& x, pair<A, B> const& y) noexcept(noexcept(
  575. std::declval<A const&>() < std::declval<A const&>()) && noexcept(std::declval<B const&>() <
  576. std::declval<B const&>())) {
  577. return x.first < y.first || (!(y.first < x.first) && x.second < y.second);
  578. }
  579. template <typename A, typename B>
  580. inline constexpr bool operator>(pair<A, B> const& x, pair<A, B> const& y) {
  581. return y < x;
  582. }
  583. template <typename A, typename B>
  584. inline constexpr bool operator<=(pair<A, B> const& x, pair<A, B> const& y) {
  585. return !(x > y);
  586. }
  587. template <typename A, typename B>
  588. inline constexpr bool operator>=(pair<A, B> const& x, pair<A, B> const& y) {
  589. return !(x < y);
  590. }
  591. inline size_t hash_bytes(void const* ptr, size_t len) noexcept {
  592. static constexpr uint64_t m = UINT64_C(0xc6a4a7935bd1e995);
  593. static constexpr uint64_t seed = UINT64_C(0xe17a1465);
  594. static constexpr unsigned int r = 47;
  595. auto const* const data64 = static_cast<uint64_t const*>(ptr);
  596. uint64_t h = seed ^ (len * m);
  597. size_t const n_blocks = len / 8;
  598. for (size_t i = 0; i < n_blocks; ++i) {
  599. auto k = detail::unaligned_load<uint64_t>(data64 + i);
  600. k *= m;
  601. k ^= k >> r;
  602. k *= m;
  603. h ^= k;
  604. h *= m;
  605. }
  606. auto const* const data8 = reinterpret_cast<uint8_t const*>(data64 + n_blocks);
  607. switch (len & 7U) {
  608. case 7:
  609. h ^= static_cast<uint64_t>(data8[6]) << 48U;
  610. ROBIN_HOOD(FALLTHROUGH); // FALLTHROUGH
  611. case 6:
  612. h ^= static_cast<uint64_t>(data8[5]) << 40U;
  613. ROBIN_HOOD(FALLTHROUGH); // FALLTHROUGH
  614. case 5:
  615. h ^= static_cast<uint64_t>(data8[4]) << 32U;
  616. ROBIN_HOOD(FALLTHROUGH); // FALLTHROUGH
  617. case 4:
  618. h ^= static_cast<uint64_t>(data8[3]) << 24U;
  619. ROBIN_HOOD(FALLTHROUGH); // FALLTHROUGH
  620. case 3:
  621. h ^= static_cast<uint64_t>(data8[2]) << 16U;
  622. ROBIN_HOOD(FALLTHROUGH); // FALLTHROUGH
  623. case 2:
  624. h ^= static_cast<uint64_t>(data8[1]) << 8U;
  625. ROBIN_HOOD(FALLTHROUGH); // FALLTHROUGH
  626. case 1:
  627. h ^= static_cast<uint64_t>(data8[0]);
  628. h *= m;
  629. ROBIN_HOOD(FALLTHROUGH); // FALLTHROUGH
  630. default:
  631. break;
  632. }
  633. h ^= h >> r;
  634. h *= m;
  635. h ^= h >> r;
  636. return static_cast<size_t>(h);
  637. }
  638. inline size_t hash_int(uint64_t x) noexcept {
  639. // inspired by lemire's strongly universal hashing
  640. // https://lemire.me/blog/2018/08/15/fast-strongly-universal-64-bit-hashing-everywhere/
  641. //
  642. // Instead of shifts, we use rotations so we don't lose any bits.
  643. //
  644. // Added a final multiplcation with a constant for more mixing. It is most important that
  645. // the lower bits are well mixed.
  646. auto h1 = x * UINT64_C(0xA24BAED4963EE407);
  647. auto h2 = detail::rotr(x, 32U) * UINT64_C(0x9FB21C651E98DF25);
  648. auto h = detail::rotr(h1 + h2, 32U);
  649. return static_cast<size_t>(h);
  650. }
  651. // A thin wrapper around std::hash, performing an additional simple mixing step of the result.
  652. template <typename T, typename Enable = void>
  653. struct hash : public std::hash<T> {
  654. size_t operator()(T const& obj) const
  655. noexcept(noexcept(std::declval<std::hash<T>>().operator()(std::declval<T const&>()))) {
  656. // call base hash
  657. auto result = std::hash<T>::operator()(obj);
  658. // return mixed of that, to be save against identity has
  659. return hash_int(static_cast<detail::SizeT>(result));
  660. }
  661. };
  662. template <typename CharT>
  663. struct hash<std::basic_string<CharT>> {
  664. size_t operator()(std::basic_string<CharT> const& str) const noexcept {
  665. return hash_bytes(str.data(), sizeof(CharT) * str.size());
  666. }
  667. };
  668. #if ROBIN_HOOD(CXX) >= ROBIN_HOOD(CXX17)
  669. template <typename CharT>
  670. struct hash<std::basic_string_view<CharT>> {
  671. size_t operator()(std::basic_string_view<CharT> const& sv) const noexcept {
  672. return hash_bytes(sv.data(), sizeof(CharT) * sv.size());
  673. }
  674. };
  675. #endif
  676. template <class T>
  677. struct hash<T*> {
  678. size_t operator()(T* ptr) const noexcept {
  679. return hash_int(reinterpret_cast<detail::SizeT>(ptr));
  680. }
  681. };
  682. template <class T>
  683. struct hash<std::unique_ptr<T>> {
  684. size_t operator()(std::unique_ptr<T> const& ptr) const noexcept {
  685. return hash_int(reinterpret_cast<detail::SizeT>(ptr.get()));
  686. }
  687. };
  688. template <class T>
  689. struct hash<std::shared_ptr<T>> {
  690. size_t operator()(std::shared_ptr<T> const& ptr) const noexcept {
  691. return hash_int(reinterpret_cast<detail::SizeT>(ptr.get()));
  692. }
  693. };
  694. template <typename Enum>
  695. struct hash<Enum, typename std::enable_if<std::is_enum<Enum>::value>::type> {
  696. size_t operator()(Enum e) const noexcept {
  697. using Underlying = typename std::underlying_type<Enum>::type;
  698. return hash<Underlying>{}(static_cast<Underlying>(e));
  699. }
  700. };
  701. #define ROBIN_HOOD_HASH_INT(T) \
  702. template <> \
  703. struct hash<T> { \
  704. size_t operator()(T const& obj) const noexcept { \
  705. return hash_int(static_cast<uint64_t>(obj)); \
  706. } \
  707. }
  708. #if defined(__GNUC__) && !defined(__clang__)
  709. # pragma GCC diagnostic push
  710. # pragma GCC diagnostic ignored "-Wuseless-cast"
  711. #endif
  712. // see https://en.cppreference.com/w/cpp/utility/hash
  713. ROBIN_HOOD_HASH_INT(bool);
  714. ROBIN_HOOD_HASH_INT(char);
  715. ROBIN_HOOD_HASH_INT(signed char);
  716. ROBIN_HOOD_HASH_INT(unsigned char);
  717. ROBIN_HOOD_HASH_INT(char16_t);
  718. ROBIN_HOOD_HASH_INT(char32_t);
  719. #if ROBIN_HOOD(HAS_NATIVE_WCHART)
  720. ROBIN_HOOD_HASH_INT(wchar_t);
  721. #endif
  722. ROBIN_HOOD_HASH_INT(short);
  723. ROBIN_HOOD_HASH_INT(unsigned short);
  724. ROBIN_HOOD_HASH_INT(int);
  725. ROBIN_HOOD_HASH_INT(unsigned int);
  726. ROBIN_HOOD_HASH_INT(long);
  727. ROBIN_HOOD_HASH_INT(long long);
  728. ROBIN_HOOD_HASH_INT(unsigned long);
  729. ROBIN_HOOD_HASH_INT(unsigned long long);
  730. #if defined(__GNUC__) && !defined(__clang__)
  731. # pragma GCC diagnostic pop
  732. #endif
  733. namespace detail {
  734. template <typename T>
  735. struct void_type {
  736. using type = void;
  737. };
  738. template <typename T, typename = void>
  739. struct has_is_transparent : public std::false_type {};
  740. template <typename T>
  741. struct has_is_transparent<T, typename void_type<typename T::is_transparent>::type>
  742. : public std::true_type {};
  743. // using wrapper classes for hash and key_equal prevents the diamond problem when the same type
  744. // is used. see https://stackoverflow.com/a/28771920/48181
  745. template <typename T>
  746. struct WrapHash : public T {
  747. WrapHash() = default;
  748. explicit WrapHash(T const& o) noexcept(noexcept(T(std::declval<T const&>())))
  749. : T(o) {}
  750. };
  751. template <typename T>
  752. struct WrapKeyEqual : public T {
  753. WrapKeyEqual() = default;
  754. explicit WrapKeyEqual(T const& o) noexcept(noexcept(T(std::declval<T const&>())))
  755. : T(o) {}
  756. };
  757. // A highly optimized hashmap implementation, using the Robin Hood algorithm.
  758. //
  759. // In most cases, this map should be usable as a drop-in replacement for std::unordered_map, but
  760. // be about 2x faster in most cases and require much less allocations.
  761. //
  762. // This implementation uses the following memory layout:
  763. //
  764. // [Node, Node, ... Node | info, info, ... infoSentinel ]
  765. //
  766. // * Node: either a DataNode that directly has the std::pair<key, val> as member,
  767. // or a DataNode with a pointer to std::pair<key,val>. Which DataNode representation to use
  768. // depends on how fast the swap() operation is. Heuristically, this is automatically choosen
  769. // based on sizeof(). there are always 2^n Nodes.
  770. //
  771. // * info: Each Node in the map has a corresponding info byte, so there are 2^n info bytes.
  772. // Each byte is initialized to 0, meaning the corresponding Node is empty. Set to 1 means the
  773. // corresponding node contains data. Set to 2 means the corresponding Node is filled, but it
  774. // actually belongs to the previous position and was pushed out because that place is already
  775. // taken.
  776. //
  777. // * infoSentinel: Sentinel byte set to 1, so that iterator's ++ can stop at end() without the
  778. // need for a idx variable.
  779. //
  780. // According to STL, order of templates has effect on throughput. That's why I've moved the
  781. // boolean to the front.
  782. // https://www.reddit.com/r/cpp/comments/ahp6iu/compile_time_binary_size_reductions_and_cs_future/eeguck4/
  783. template <bool IsFlat, size_t MaxLoadFactor100, typename Key, typename T, typename Hash,
  784. typename KeyEqual>
  785. class Table
  786. : public WrapHash<Hash>,
  787. public WrapKeyEqual<KeyEqual>,
  788. detail::NodeAllocator<
  789. typename std::conditional<
  790. std::is_void<T>::value, Key,
  791. robin_hood::pair<typename std::conditional<IsFlat, Key, Key const>::type, T>>::type,
  792. 4, 16384, IsFlat> {
  793. public:
  794. static constexpr bool is_flat = IsFlat;
  795. static constexpr bool is_map = !std::is_void<T>::value;
  796. static constexpr bool is_set = !is_map;
  797. static constexpr bool is_transparent =
  798. has_is_transparent<Hash>::value && has_is_transparent<KeyEqual>::value;
  799. using key_type = Key;
  800. using mapped_type = T;
  801. using value_type = typename std::conditional<
  802. is_set, Key,
  803. robin_hood::pair<typename std::conditional<is_flat, Key, Key const>::type, T>>::type;
  804. using size_type = size_t;
  805. using hasher = Hash;
  806. using key_equal = KeyEqual;
  807. using Self = Table<IsFlat, MaxLoadFactor100, key_type, mapped_type, hasher, key_equal>;
  808. private:
  809. static_assert(MaxLoadFactor100 > 10 && MaxLoadFactor100 < 100,
  810. "MaxLoadFactor100 needs to be >10 && < 100");
  811. using WHash = WrapHash<Hash>;
  812. using WKeyEqual = WrapKeyEqual<KeyEqual>;
  813. // configuration defaults
  814. // make sure we have 8 elements, needed to quickly rehash mInfo
  815. static constexpr size_t InitialNumElements = sizeof(uint64_t);
  816. static constexpr uint32_t InitialInfoNumBits = 5;
  817. static constexpr uint8_t InitialInfoInc = 1U << InitialInfoNumBits;
  818. static constexpr size_t InfoMask = InitialInfoInc - 1U;
  819. static constexpr uint8_t InitialInfoHashShift = 0;
  820. using DataPool = detail::NodeAllocator<value_type, 4, 16384, IsFlat>;
  821. // type needs to be wider than uint8_t.
  822. using InfoType = uint32_t;
  823. // DataNode ////////////////////////////////////////////////////////
  824. // Primary template for the data node. We have special implementations for small and big
  825. // objects. For large objects it is assumed that swap() is fairly slow, so we allocate these
  826. // on the heap so swap merely swaps a pointer.
  827. template <typename M, bool>
  828. class DataNode {};
  829. // Small: just allocate on the stack.
  830. template <typename M>
  831. class DataNode<M, true> final {
  832. public:
  833. template <typename... Args>
  834. explicit DataNode(M& ROBIN_HOOD_UNUSED(map) /*unused*/, Args&&... args) noexcept(
  835. noexcept(value_type(std::forward<Args>(args)...)))
  836. : mData(std::forward<Args>(args)...) {}
  837. DataNode(M& ROBIN_HOOD_UNUSED(map) /*unused*/, DataNode<M, true>&& n) noexcept(
  838. std::is_nothrow_move_constructible<value_type>::value)
  839. : mData(std::move(n.mData)) {}
  840. // doesn't do anything
  841. void destroy(M& ROBIN_HOOD_UNUSED(map) /*unused*/) noexcept {}
  842. void destroyDoNotDeallocate() noexcept {}
  843. value_type const* operator->() const noexcept {
  844. return &mData;
  845. }
  846. value_type* operator->() noexcept {
  847. return &mData;
  848. }
  849. const value_type& operator*() const noexcept {
  850. return mData;
  851. }
  852. value_type& operator*() noexcept {
  853. return mData;
  854. }
  855. template <typename VT = value_type>
  856. ROBIN_HOOD(NODISCARD)
  857. typename std::enable_if<is_map, typename VT::first_type&>::type getFirst() noexcept {
  858. return mData.first;
  859. }
  860. template <typename VT = value_type>
  861. ROBIN_HOOD(NODISCARD)
  862. typename std::enable_if<is_set, VT&>::type getFirst() noexcept {
  863. return mData;
  864. }
  865. template <typename VT = value_type>
  866. ROBIN_HOOD(NODISCARD)
  867. typename std::enable_if<is_map, typename VT::first_type const&>::type
  868. getFirst() const noexcept {
  869. return mData.first;
  870. }
  871. template <typename VT = value_type>
  872. ROBIN_HOOD(NODISCARD)
  873. typename std::enable_if<is_set, VT const&>::type getFirst() const noexcept {
  874. return mData;
  875. }
  876. template <typename MT = mapped_type>
  877. ROBIN_HOOD(NODISCARD)
  878. typename std::enable_if<is_map, MT&>::type getSecond() noexcept {
  879. return mData.second;
  880. }
  881. template <typename MT = mapped_type>
  882. ROBIN_HOOD(NODISCARD)
  883. typename std::enable_if<is_set, MT const&>::type getSecond() const noexcept {
  884. return mData.second;
  885. }
  886. void swap(DataNode<M, true>& o) noexcept(
  887. noexcept(std::declval<value_type>().swap(std::declval<value_type>()))) {
  888. mData.swap(o.mData);
  889. }
  890. private:
  891. value_type mData;
  892. };
  893. // big object: allocate on heap.
  894. template <typename M>
  895. class DataNode<M, false> {
  896. public:
  897. template <typename... Args>
  898. explicit DataNode(M& map, Args&&... args)
  899. : mData(map.allocate()) {
  900. ::new (static_cast<void*>(mData)) value_type(std::forward<Args>(args)...);
  901. }
  902. DataNode(M& ROBIN_HOOD_UNUSED(map) /*unused*/, DataNode<M, false>&& n) noexcept
  903. : mData(std::move(n.mData)) {}
  904. void destroy(M& map) noexcept {
  905. // don't deallocate, just put it into list of datapool.
  906. mData->~value_type();
  907. map.deallocate(mData);
  908. }
  909. void destroyDoNotDeallocate() noexcept {
  910. mData->~value_type();
  911. }
  912. value_type const* operator->() const noexcept {
  913. return mData;
  914. }
  915. value_type* operator->() noexcept {
  916. return mData;
  917. }
  918. const value_type& operator*() const {
  919. return *mData;
  920. }
  921. value_type& operator*() {
  922. return *mData;
  923. }
  924. template <typename VT = value_type>
  925. ROBIN_HOOD(NODISCARD)
  926. typename std::enable_if<is_map, typename VT::first_type&>::type getFirst() noexcept {
  927. return mData->first;
  928. }
  929. template <typename VT = value_type>
  930. ROBIN_HOOD(NODISCARD)
  931. typename std::enable_if<is_set, VT&>::type getFirst() noexcept {
  932. return *mData;
  933. }
  934. template <typename VT = value_type>
  935. ROBIN_HOOD(NODISCARD)
  936. typename std::enable_if<is_map, typename VT::first_type const&>::type
  937. getFirst() const noexcept {
  938. return mData->first;
  939. }
  940. template <typename VT = value_type>
  941. ROBIN_HOOD(NODISCARD)
  942. typename std::enable_if<is_set, VT const&>::type getFirst() const noexcept {
  943. return *mData;
  944. }
  945. template <typename MT = mapped_type>
  946. ROBIN_HOOD(NODISCARD)
  947. typename std::enable_if<is_map, MT&>::type getSecond() noexcept {
  948. return mData->second;
  949. }
  950. template <typename MT = mapped_type>
  951. ROBIN_HOOD(NODISCARD)
  952. typename std::enable_if<is_map, MT const&>::type getSecond() const noexcept {
  953. return mData->second;
  954. }
  955. void swap(DataNode<M, false>& o) noexcept {
  956. using std::swap;
  957. swap(mData, o.mData);
  958. }
  959. private:
  960. value_type* mData;
  961. };
  962. using Node = DataNode<Self, IsFlat>;
  963. // helpers for doInsert: extract first entry (only const required)
  964. ROBIN_HOOD(NODISCARD) key_type const& getFirstConst(Node const& n) const noexcept {
  965. return n.getFirst();
  966. }
  967. // in case we have void mapped_type, we are not using a pair, thus we just route k through.
  968. // No need to disable this because it's just not used if not applicable.
  969. ROBIN_HOOD(NODISCARD) key_type const& getFirstConst(key_type const& k) const noexcept {
  970. return k;
  971. }
  972. // in case we have non-void mapped_type, we have a standard robin_hood::pair
  973. template <typename Q = mapped_type>
  974. ROBIN_HOOD(NODISCARD)
  975. typename std::enable_if<!std::is_void<Q>::value, key_type const&>::type
  976. getFirstConst(value_type const& vt) const noexcept {
  977. return vt.first;
  978. }
  979. // Cloner //////////////////////////////////////////////////////////
  980. template <typename M, bool UseMemcpy>
  981. struct Cloner;
  982. // fast path: Just copy data, without allocating anything.
  983. template <typename M>
  984. struct Cloner<M, true> {
  985. void operator()(M const& source, M& target) const {
  986. auto const* const src = reinterpret_cast<char const*>(source.mKeyVals);
  987. auto* tgt = reinterpret_cast<char*>(target.mKeyVals);
  988. auto const numElementsWithBuffer = target.calcNumElementsWithBuffer(target.mMask + 1);
  989. std::copy(src, src + target.calcNumBytesTotal(numElementsWithBuffer), tgt);
  990. }
  991. };
  992. template <typename M>
  993. struct Cloner<M, false> {
  994. void operator()(M const& s, M& t) const {
  995. auto const numElementsWithBuffer = t.calcNumElementsWithBuffer(t.mMask + 1);
  996. std::copy(s.mInfo, s.mInfo + t.calcNumBytesInfo(numElementsWithBuffer), t.mInfo);
  997. for (size_t i = 0; i < numElementsWithBuffer; ++i) {
  998. if (t.mInfo[i]) {
  999. ::new (static_cast<void*>(t.mKeyVals + i)) Node(t, *s.mKeyVals[i]);
  1000. }
  1001. }
  1002. }
  1003. };
  1004. // Destroyer ///////////////////////////////////////////////////////
  1005. template <typename M, bool IsFlatAndTrivial>
  1006. struct Destroyer {};
  1007. template <typename M>
  1008. struct Destroyer<M, true> {
  1009. void nodes(M& m) const noexcept {
  1010. m.mNumElements = 0;
  1011. }
  1012. void nodesDoNotDeallocate(M& m) const noexcept {
  1013. m.mNumElements = 0;
  1014. }
  1015. };
  1016. template <typename M>
  1017. struct Destroyer<M, false> {
  1018. void nodes(M& m) const noexcept {
  1019. m.mNumElements = 0;
  1020. // clear also resets mInfo to 0, that's sometimes not necessary.
  1021. auto const numElementsWithBuffer = m.calcNumElementsWithBuffer(m.mMask + 1);
  1022. for (size_t idx = 0; idx < numElementsWithBuffer; ++idx) {
  1023. if (0 != m.mInfo[idx]) {
  1024. Node& n = m.mKeyVals[idx];
  1025. n.destroy(m);
  1026. n.~Node();
  1027. }
  1028. }
  1029. }
  1030. void nodesDoNotDeallocate(M& m) const noexcept {
  1031. m.mNumElements = 0;
  1032. // clear also resets mInfo to 0, that's sometimes not necessary.
  1033. auto const numElementsWithBuffer = m.calcNumElementsWithBuffer(m.mMask + 1);
  1034. for (size_t idx = 0; idx < numElementsWithBuffer; ++idx) {
  1035. if (0 != m.mInfo[idx]) {
  1036. Node& n = m.mKeyVals[idx];
  1037. n.destroyDoNotDeallocate();
  1038. n.~Node();
  1039. }
  1040. }
  1041. }
  1042. };
  1043. // Iter ////////////////////////////////////////////////////////////
  1044. struct fast_forward_tag {};
  1045. // generic iterator for both const_iterator and iterator.
  1046. template <bool IsConst>
  1047. // NOLINTNEXTLINE(hicpp-special-member-functions,cppcoreguidelines-special-member-functions)
  1048. class Iter {
  1049. private:
  1050. using NodePtr = typename std::conditional<IsConst, Node const*, Node*>::type;
  1051. public:
  1052. using difference_type = std::ptrdiff_t;
  1053. using value_type = typename Self::value_type;
  1054. using reference = typename std::conditional<IsConst, value_type const&, value_type&>::type;
  1055. using pointer = typename std::conditional<IsConst, value_type const*, value_type*>::type;
  1056. using iterator_category = std::forward_iterator_tag;
  1057. // default constructed iterator can be compared to itself, but WON'T return true when
  1058. // compared to end().
  1059. Iter() = default;
  1060. // Rule of zero: nothing specified. The conversion constructor is only enabled for
  1061. // iterator to const_iterator, so it doesn't accidentally work as a copy ctor.
  1062. // Conversion constructor from iterator to const_iterator.
  1063. template <bool OtherIsConst,
  1064. typename = typename std::enable_if<IsConst && !OtherIsConst>::type>
  1065. // NOLINTNEXTLINE(hicpp-explicit-conversions)
  1066. Iter(Iter<OtherIsConst> const& other) noexcept
  1067. : mKeyVals(other.mKeyVals)
  1068. , mInfo(other.mInfo) {}
  1069. Iter(NodePtr valPtr, uint8_t const* infoPtr) noexcept
  1070. : mKeyVals(valPtr)
  1071. , mInfo(infoPtr) {}
  1072. Iter(NodePtr valPtr, uint8_t const* infoPtr,
  1073. fast_forward_tag ROBIN_HOOD_UNUSED(tag) /*unused*/) noexcept
  1074. : mKeyVals(valPtr)
  1075. , mInfo(infoPtr) {
  1076. fastForward();
  1077. }
  1078. template <bool OtherIsConst,
  1079. typename = typename std::enable_if<IsConst && !OtherIsConst>::type>
  1080. Iter& operator=(Iter<OtherIsConst> const& other) noexcept {
  1081. mKeyVals = other.mKeyVals;
  1082. mInfo = other.mInfo;
  1083. return *this;
  1084. }
  1085. // prefix increment. Undefined behavior if we are at end()!
  1086. Iter& operator++() noexcept {
  1087. mInfo++;
  1088. mKeyVals++;
  1089. fastForward();
  1090. return *this;
  1091. }
  1092. Iter operator++(int) noexcept {
  1093. Iter tmp = *this;
  1094. ++(*this);
  1095. return tmp;
  1096. }
  1097. reference operator*() const {
  1098. return **mKeyVals;
  1099. }
  1100. pointer operator->() const {
  1101. return &**mKeyVals;
  1102. }
  1103. template <bool O>
  1104. bool operator==(Iter<O> const& o) const noexcept {
  1105. return mKeyVals == o.mKeyVals;
  1106. }
  1107. template <bool O>
  1108. bool operator!=(Iter<O> const& o) const noexcept {
  1109. return mKeyVals != o.mKeyVals;
  1110. }
  1111. private:
  1112. // fast forward to the next non-free info byte
  1113. // I've tried a few variants that don't depend on intrinsics, but unfortunately they are
  1114. // quite a bit slower than this one. So I've reverted that change again. See map_benchmark.
  1115. void fastForward() noexcept {
  1116. size_t n = 0;
  1117. while (0U == (n = detail::unaligned_load<size_t>(mInfo))) {
  1118. mInfo += sizeof(size_t);
  1119. mKeyVals += sizeof(size_t);
  1120. }
  1121. #if defined(ROBIN_HOOD_DISABLE_INTRINSICS)
  1122. // we know for certain that within the next 8 bytes we'll find a non-zero one.
  1123. if (ROBIN_HOOD_UNLIKELY(0U == detail::unaligned_load<uint32_t>(mInfo))) {
  1124. mInfo += 4;
  1125. mKeyVals += 4;
  1126. }
  1127. if (ROBIN_HOOD_UNLIKELY(0U == detail::unaligned_load<uint16_t>(mInfo))) {
  1128. mInfo += 2;
  1129. mKeyVals += 2;
  1130. }
  1131. if (ROBIN_HOOD_UNLIKELY(0U == *mInfo)) {
  1132. mInfo += 1;
  1133. mKeyVals += 1;
  1134. }
  1135. #else
  1136. # if ROBIN_HOOD(LITTLE_ENDIAN)
  1137. auto inc = ROBIN_HOOD_COUNT_TRAILING_ZEROES(n) / 8;
  1138. # else
  1139. auto inc = ROBIN_HOOD_COUNT_LEADING_ZEROES(n) / 8;
  1140. # endif
  1141. mInfo += inc;
  1142. mKeyVals += inc;
  1143. #endif
  1144. }
  1145. friend class Table<IsFlat, MaxLoadFactor100, key_type, mapped_type, hasher, key_equal>;
  1146. NodePtr mKeyVals{nullptr};
  1147. uint8_t const* mInfo{nullptr};
  1148. };
  1149. ////////////////////////////////////////////////////////////////////
  1150. // highly performance relevant code.
  1151. // Lower bits are used for indexing into the array (2^n size)
  1152. // The upper 1-5 bits need to be a reasonable good hash, to save comparisons.
  1153. template <typename HashKey>
  1154. void keyToIdx(HashKey&& key, size_t* idx, InfoType* info) const {
  1155. // for a user-specified hash that is *not* robin_hood::hash, apply robin_hood::hash as
  1156. // an additional mixing step. This serves as a bad hash prevention, if the given data is
  1157. // badly mixed.
  1158. using Mix =
  1159. typename std::conditional<std::is_same<::robin_hood::hash<key_type>, hasher>::value,
  1160. ::robin_hood::detail::identity_hash<size_t>,
  1161. ::robin_hood::hash<size_t>>::type;
  1162. // the lower InitialInfoNumBits are reserved for info.
  1163. auto h = Mix{}(WHash::operator()(key));
  1164. *info = mInfoInc + static_cast<InfoType>((h & InfoMask) >> mInfoHashShift);
  1165. *idx = (h >> InitialInfoNumBits) & mMask;
  1166. }
  1167. // forwards the index by one, wrapping around at the end
  1168. void next(InfoType* info, size_t* idx) const noexcept {
  1169. *idx = *idx + 1;
  1170. *info += mInfoInc;
  1171. }
  1172. void nextWhileLess(InfoType* info, size_t* idx) const noexcept {
  1173. // unrolling this by hand did not bring any speedups.
  1174. while (*info < mInfo[*idx]) {
  1175. next(info, idx);
  1176. }
  1177. }
  1178. // Shift everything up by one element. Tries to move stuff around.
  1179. void
  1180. shiftUp(size_t startIdx,
  1181. size_t const insertion_idx) noexcept(std::is_nothrow_move_assignable<Node>::value) {
  1182. auto idx = startIdx;
  1183. ::new (static_cast<void*>(mKeyVals + idx)) Node(std::move(mKeyVals[idx - 1]));
  1184. while (--idx != insertion_idx) {
  1185. mKeyVals[idx] = std::move(mKeyVals[idx - 1]);
  1186. }
  1187. idx = startIdx;
  1188. while (idx != insertion_idx) {
  1189. ROBIN_HOOD_COUNT(shiftUp)
  1190. mInfo[idx] = static_cast<uint8_t>(mInfo[idx - 1] + mInfoInc);
  1191. if (ROBIN_HOOD_UNLIKELY(mInfo[idx] + mInfoInc > 0xFF)) {
  1192. mMaxNumElementsAllowed = 0;
  1193. }
  1194. --idx;
  1195. }
  1196. }
  1197. void shiftDown(size_t idx) noexcept(std::is_nothrow_move_assignable<Node>::value) {
  1198. // until we find one that is either empty or has zero offset.
  1199. // TODO(martinus) we don't need to move everything, just the last one for the same
  1200. // bucket.
  1201. mKeyVals[idx].destroy(*this);
  1202. // until we find one that is either empty or has zero offset.
  1203. while (mInfo[idx + 1] >= 2 * mInfoInc) {
  1204. ROBIN_HOOD_COUNT(shiftDown)
  1205. mInfo[idx] = static_cast<uint8_t>(mInfo[idx + 1] - mInfoInc);
  1206. mKeyVals[idx] = std::move(mKeyVals[idx + 1]);
  1207. ++idx;
  1208. }
  1209. mInfo[idx] = 0;
  1210. // don't destroy, we've moved it
  1211. // mKeyVals[idx].destroy(*this);
  1212. mKeyVals[idx].~Node();
  1213. }
  1214. // copy of find(), except that it returns iterator instead of const_iterator.
  1215. template <typename Other>
  1216. ROBIN_HOOD(NODISCARD)
  1217. size_t findIdx(Other const& key) const {
  1218. size_t idx{};
  1219. InfoType info{};
  1220. keyToIdx(key, &idx, &info);
  1221. do {
  1222. // unrolling this twice gives a bit of a speedup. More unrolling did not help.
  1223. if (info == mInfo[idx] &&
  1224. ROBIN_HOOD_LIKELY(WKeyEqual::operator()(key, mKeyVals[idx].getFirst()))) {
  1225. return idx;
  1226. }
  1227. next(&info, &idx);
  1228. if (info == mInfo[idx] &&
  1229. ROBIN_HOOD_LIKELY(WKeyEqual::operator()(key, mKeyVals[idx].getFirst()))) {
  1230. return idx;
  1231. }
  1232. next(&info, &idx);
  1233. } while (info <= mInfo[idx]);
  1234. // nothing found!
  1235. return mMask == 0 ? 0
  1236. : static_cast<size_t>(std::distance(
  1237. mKeyVals, reinterpret_cast_no_cast_align_warning<Node*>(mInfo)));
  1238. }
  1239. void cloneData(const Table& o) {
  1240. Cloner<Table, IsFlat && ROBIN_HOOD_IS_TRIVIALLY_COPYABLE(Node)>()(o, *this);
  1241. }
  1242. // inserts a keyval that is guaranteed to be new, e.g. when the hashmap is resized.
  1243. // @return index where the element was created
  1244. size_t insert_move(Node&& keyval) {
  1245. // we don't retry, fail if overflowing
  1246. // don't need to check max num elements
  1247. if (0 == mMaxNumElementsAllowed && !try_increase_info()) {
  1248. throwOverflowError(); // impossible to reach LCOV_EXCL_LINE
  1249. }
  1250. size_t idx{};
  1251. InfoType info{};
  1252. keyToIdx(keyval.getFirst(), &idx, &info);
  1253. // skip forward. Use <= because we are certain that the element is not there.
  1254. while (info <= mInfo[idx]) {
  1255. idx = idx + 1;
  1256. info += mInfoInc;
  1257. }
  1258. // key not found, so we are now exactly where we want to insert it.
  1259. auto const insertion_idx = idx;
  1260. auto const insertion_info = static_cast<uint8_t>(info);
  1261. if (ROBIN_HOOD_UNLIKELY(insertion_info + mInfoInc > 0xFF)) {
  1262. mMaxNumElementsAllowed = 0;
  1263. }
  1264. // find an empty spot
  1265. while (0 != mInfo[idx]) {
  1266. next(&info, &idx);
  1267. }
  1268. auto& l = mKeyVals[insertion_idx];
  1269. if (idx == insertion_idx) {
  1270. ::new (static_cast<void*>(&l)) Node(std::move(keyval));
  1271. } else {
  1272. shiftUp(idx, insertion_idx);
  1273. l = std::move(keyval);
  1274. }
  1275. // put at empty spot
  1276. mInfo[insertion_idx] = insertion_info;
  1277. ++mNumElements;
  1278. return insertion_idx;
  1279. }
  1280. public:
  1281. using iterator = Iter<false>;
  1282. using const_iterator = Iter<true>;
  1283. Table() noexcept(noexcept(Hash()) && noexcept(KeyEqual()))
  1284. : WHash()
  1285. , WKeyEqual() {
  1286. ROBIN_HOOD_TRACE(this)
  1287. }
  1288. // Creates an empty hash map. Nothing is allocated yet, this happens at the first insert.
  1289. // This tremendously speeds up ctor & dtor of a map that never receives an element. The
  1290. // penalty is payed at the first insert, and not before. Lookup of this empty map works
  1291. // because everybody points to DummyInfoByte::b. parameter bucket_count is dictated by the
  1292. // standard, but we can ignore it.
  1293. explicit Table(
  1294. size_t ROBIN_HOOD_UNUSED(bucket_count) /*unused*/, const Hash& h = Hash{},
  1295. const KeyEqual& equal = KeyEqual{}) noexcept(noexcept(Hash(h)) && noexcept(KeyEqual(equal)))
  1296. : WHash(h)
  1297. , WKeyEqual(equal) {
  1298. ROBIN_HOOD_TRACE(this)
  1299. }
  1300. template <typename Iter>
  1301. Table(Iter first, Iter last, size_t ROBIN_HOOD_UNUSED(bucket_count) /*unused*/ = 0,
  1302. const Hash& h = Hash{}, const KeyEqual& equal = KeyEqual{})
  1303. : WHash(h)
  1304. , WKeyEqual(equal) {
  1305. ROBIN_HOOD_TRACE(this)
  1306. insert(first, last);
  1307. }
  1308. Table(std::initializer_list<value_type> initlist,
  1309. size_t ROBIN_HOOD_UNUSED(bucket_count) /*unused*/ = 0, const Hash& h = Hash{},
  1310. const KeyEqual& equal = KeyEqual{})
  1311. : WHash(h)
  1312. , WKeyEqual(equal) {
  1313. ROBIN_HOOD_TRACE(this)
  1314. insert(initlist.begin(), initlist.end());
  1315. }
  1316. Table(Table&& o) noexcept
  1317. : WHash(std::move(static_cast<WHash&>(o)))
  1318. , WKeyEqual(std::move(static_cast<WKeyEqual&>(o)))
  1319. , DataPool(std::move(static_cast<DataPool&>(o))) {
  1320. ROBIN_HOOD_TRACE(this)
  1321. if (o.mMask) {
  1322. mKeyVals = std::move(o.mKeyVals);
  1323. mInfo = std::move(o.mInfo);
  1324. mNumElements = std::move(o.mNumElements);
  1325. mMask = std::move(o.mMask);
  1326. mMaxNumElementsAllowed = std::move(o.mMaxNumElementsAllowed);
  1327. mInfoInc = std::move(o.mInfoInc);
  1328. mInfoHashShift = std::move(o.mInfoHashShift);
  1329. // set other's mask to 0 so its destructor won't do anything
  1330. o.init();
  1331. }
  1332. }
  1333. Table& operator=(Table&& o) noexcept {
  1334. ROBIN_HOOD_TRACE(this)
  1335. if (&o != this) {
  1336. if (o.mMask) {
  1337. // only move stuff if the other map actually has some data
  1338. destroy();
  1339. mKeyVals = std::move(o.mKeyVals);
  1340. mInfo = std::move(o.mInfo);
  1341. mNumElements = std::move(o.mNumElements);
  1342. mMask = std::move(o.mMask);
  1343. mMaxNumElementsAllowed = std::move(o.mMaxNumElementsAllowed);
  1344. mInfoInc = std::move(o.mInfoInc);
  1345. mInfoHashShift = std::move(o.mInfoHashShift);
  1346. WHash::operator=(std::move(static_cast<WHash&>(o)));
  1347. WKeyEqual::operator=(std::move(static_cast<WKeyEqual&>(o)));
  1348. DataPool::operator=(std::move(static_cast<DataPool&>(o)));
  1349. o.init();
  1350. } else {
  1351. // nothing in the other map => just clear us.
  1352. clear();
  1353. }
  1354. }
  1355. return *this;
  1356. }
  1357. Table(const Table& o)
  1358. : WHash(static_cast<const WHash&>(o))
  1359. , WKeyEqual(static_cast<const WKeyEqual&>(o))
  1360. , DataPool(static_cast<const DataPool&>(o)) {
  1361. ROBIN_HOOD_TRACE(this)
  1362. if (!o.empty()) {
  1363. // not empty: create an exact copy. it is also possible to just iterate through all
  1364. // elements and insert them, but copying is probably faster.
  1365. auto const numElementsWithBuffer = calcNumElementsWithBuffer(o.mMask + 1);
  1366. auto const numBytesTotal = calcNumBytesTotal(numElementsWithBuffer);
  1367. ROBIN_HOOD_LOG("std::malloc " << numBytesTotal << " = calcNumBytesTotal("
  1368. << numElementsWithBuffer << ")")
  1369. mKeyVals = static_cast<Node*>(
  1370. detail::assertNotNull<std::bad_alloc>(std::malloc(numBytesTotal)));
  1371. // no need for calloc because clonData does memcpy
  1372. mInfo = reinterpret_cast<uint8_t*>(mKeyVals + numElementsWithBuffer);
  1373. mNumElements = o.mNumElements;
  1374. mMask = o.mMask;
  1375. mMaxNumElementsAllowed = o.mMaxNumElementsAllowed;
  1376. mInfoInc = o.mInfoInc;
  1377. mInfoHashShift = o.mInfoHashShift;
  1378. cloneData(o);
  1379. }
  1380. }
  1381. // Creates a copy of the given map. Copy constructor of each entry is used.
  1382. // Not sure why clang-tidy thinks this doesn't handle self assignment, it does
  1383. // NOLINTNEXTLINE(bugprone-unhandled-self-assignment,cert-oop54-cpp)
  1384. Table& operator=(Table const& o) {
  1385. ROBIN_HOOD_TRACE(this)
  1386. if (&o == this) {
  1387. // prevent assigning of itself
  1388. return *this;
  1389. }
  1390. // we keep using the old allocator and not assign the new one, because we want to keep
  1391. // the memory available. when it is the same size.
  1392. if (o.empty()) {
  1393. if (0 == mMask) {
  1394. // nothing to do, we are empty too
  1395. return *this;
  1396. }
  1397. // not empty: destroy what we have there
  1398. // clear also resets mInfo to 0, that's sometimes not necessary.
  1399. destroy();
  1400. init();
  1401. WHash::operator=(static_cast<const WHash&>(o));
  1402. WKeyEqual::operator=(static_cast<const WKeyEqual&>(o));
  1403. DataPool::operator=(static_cast<DataPool const&>(o));
  1404. return *this;
  1405. }
  1406. // clean up old stuff
  1407. Destroyer<Self, IsFlat && std::is_trivially_destructible<Node>::value>{}.nodes(*this);
  1408. if (mMask != o.mMask) {
  1409. // no luck: we don't have the same array size allocated, so we need to realloc.
  1410. if (0 != mMask) {
  1411. // only deallocate if we actually have data!
  1412. ROBIN_HOOD_LOG("std::free")
  1413. std::free(mKeyVals);
  1414. }
  1415. auto const numElementsWithBuffer = calcNumElementsWithBuffer(o.mMask + 1);
  1416. auto const numBytesTotal = calcNumBytesTotal(numElementsWithBuffer);
  1417. ROBIN_HOOD_LOG("std::malloc " << numBytesTotal << " = calcNumBytesTotal("
  1418. << numElementsWithBuffer << ")")
  1419. mKeyVals = static_cast<Node*>(
  1420. detail::assertNotNull<std::bad_alloc>(std::malloc(numBytesTotal)));
  1421. // no need for calloc here because cloneData performs a memcpy.
  1422. mInfo = reinterpret_cast<uint8_t*>(mKeyVals + numElementsWithBuffer);
  1423. // sentinel is set in cloneData
  1424. }
  1425. WHash::operator=(static_cast<const WHash&>(o));
  1426. WKeyEqual::operator=(static_cast<const WKeyEqual&>(o));
  1427. DataPool::operator=(static_cast<DataPool const&>(o));
  1428. mNumElements = o.mNumElements;
  1429. mMask = o.mMask;
  1430. mMaxNumElementsAllowed = o.mMaxNumElementsAllowed;
  1431. mInfoInc = o.mInfoInc;
  1432. mInfoHashShift = o.mInfoHashShift;
  1433. cloneData(o);
  1434. return *this;
  1435. }
  1436. // Swaps everything between the two maps.
  1437. void swap(Table& o) {
  1438. ROBIN_HOOD_TRACE(this)
  1439. using std::swap;
  1440. swap(o, *this);
  1441. }
  1442. // Clears all data, without resizing.
  1443. void clear() {
  1444. ROBIN_HOOD_TRACE(this)
  1445. if (empty()) {
  1446. // don't do anything! also important because we don't want to write to
  1447. // DummyInfoByte::b, even though we would just write 0 to it.
  1448. return;
  1449. }
  1450. Destroyer<Self, IsFlat && std::is_trivially_destructible<Node>::value>{}.nodes(*this);
  1451. auto const numElementsWithBuffer = calcNumElementsWithBuffer(mMask + 1);
  1452. // clear everything, then set the sentinel again
  1453. uint8_t const z = 0;
  1454. std::fill(mInfo, mInfo + calcNumBytesInfo(numElementsWithBuffer), z);
  1455. mInfo[numElementsWithBuffer] = 1;
  1456. mInfoInc = InitialInfoInc;
  1457. mInfoHashShift = InitialInfoHashShift;
  1458. }
  1459. // Destroys the map and all it's contents.
  1460. ~Table() {
  1461. ROBIN_HOOD_TRACE(this)
  1462. destroy();
  1463. }
  1464. // Checks if both tables contain the same entries. Order is irrelevant.
  1465. bool operator==(const Table& other) const {
  1466. ROBIN_HOOD_TRACE(this)
  1467. if (other.size() != size()) {
  1468. return false;
  1469. }
  1470. for (auto const& otherEntry : other) {
  1471. if (!has(otherEntry)) {
  1472. return false;
  1473. }
  1474. }
  1475. return true;
  1476. }
  1477. bool operator!=(const Table& other) const {
  1478. ROBIN_HOOD_TRACE(this)
  1479. return !operator==(other);
  1480. }
  1481. template <typename Q = mapped_type>
  1482. typename std::enable_if<!std::is_void<Q>::value, Q&>::type operator[](const key_type& key) {
  1483. ROBIN_HOOD_TRACE(this)
  1484. return doCreateByKey(key);
  1485. }
  1486. template <typename Q = mapped_type>
  1487. typename std::enable_if<!std::is_void<Q>::value, Q&>::type operator[](key_type&& key) {
  1488. ROBIN_HOOD_TRACE(this)
  1489. return doCreateByKey(std::move(key));
  1490. }
  1491. template <typename Iter>
  1492. void insert(Iter first, Iter last) {
  1493. for (; first != last; ++first) {
  1494. // value_type ctor needed because this might be called with std::pair's
  1495. insert(value_type(*first));
  1496. }
  1497. }
  1498. template <typename... Args>
  1499. std::pair<iterator, bool> emplace(Args&&... args) {
  1500. ROBIN_HOOD_TRACE(this)
  1501. Node n{*this, std::forward<Args>(args)...};
  1502. auto r = doInsert(std::move(n));
  1503. if (!r.second) {
  1504. // insertion not possible: destroy node
  1505. // NOLINTNEXTLINE(bugprone-use-after-move)
  1506. n.destroy(*this);
  1507. }
  1508. return r;
  1509. }
  1510. template <typename... Args>
  1511. std::pair<iterator, bool> try_emplace(const key_type& key, Args&&... args) {
  1512. return try_emplace_impl(key, std::forward<Args>(args)...);
  1513. }
  1514. template <typename... Args>
  1515. std::pair<iterator, bool> try_emplace(key_type&& key, Args&&... args) {
  1516. return try_emplace_impl(std::move(key), std::forward<Args>(args)...);
  1517. }
  1518. template <typename... Args>
  1519. std::pair<iterator, bool> try_emplace(const_iterator hint, const key_type& key,
  1520. Args&&... args) {
  1521. (void)hint;
  1522. return try_emplace_impl(key, std::forward<Args>(args)...);
  1523. }
  1524. template <typename... Args>
  1525. std::pair<iterator, bool> try_emplace(const_iterator hint, key_type&& key, Args&&... args) {
  1526. (void)hint;
  1527. return try_emplace_impl(std::move(key), std::forward<Args>(args)...);
  1528. }
  1529. template <typename Mapped>
  1530. std::pair<iterator, bool> insert_or_assign(const key_type& key, Mapped&& obj) {
  1531. return insert_or_assign_impl(key, std::forward<Mapped>(obj));
  1532. }
  1533. template <typename Mapped>
  1534. std::pair<iterator, bool> insert_or_assign(key_type&& key, Mapped&& obj) {
  1535. return insert_or_assign_impl(std::move(key), std::forward<Mapped>(obj));
  1536. }
  1537. template <typename Mapped>
  1538. std::pair<iterator, bool> insert_or_assign(const_iterator hint, const key_type& key,
  1539. Mapped&& obj) {
  1540. (void)hint;
  1541. return insert_or_assign_impl(key, std::forward<Mapped>(obj));
  1542. }
  1543. template <typename Mapped>
  1544. std::pair<iterator, bool> insert_or_assign(const_iterator hint, key_type&& key, Mapped&& obj) {
  1545. (void)hint;
  1546. return insert_or_assign_impl(std::move(key), std::forward<Mapped>(obj));
  1547. }
  1548. std::pair<iterator, bool> insert(const value_type& keyval) {
  1549. ROBIN_HOOD_TRACE(this)
  1550. return doInsert(keyval);
  1551. }
  1552. std::pair<iterator, bool> insert(value_type&& keyval) {
  1553. return doInsert(std::move(keyval));
  1554. }
  1555. // Returns 1 if key is found, 0 otherwise.
  1556. size_t count(const key_type& key) const { // NOLINT(modernize-use-nodiscard)
  1557. ROBIN_HOOD_TRACE(this)
  1558. auto kv = mKeyVals + findIdx(key);
  1559. if (kv != reinterpret_cast_no_cast_align_warning<Node*>(mInfo)) {
  1560. return 1;
  1561. }
  1562. return 0;
  1563. }
  1564. template <typename OtherKey, typename Self_ = Self>
  1565. // NOLINTNEXTLINE(modernize-use-nodiscard)
  1566. typename std::enable_if<Self_::is_transparent, size_t>::type count(const OtherKey& key) const {
  1567. ROBIN_HOOD_TRACE(this)
  1568. auto kv = mKeyVals + findIdx(key);
  1569. if (kv != reinterpret_cast_no_cast_align_warning<Node*>(mInfo)) {
  1570. return 1;
  1571. }
  1572. return 0;
  1573. }
  1574. bool contains(const key_type& key) const { // NOLINT(modernize-use-nodiscard)
  1575. return 1U == count(key);
  1576. }
  1577. template <typename OtherKey, typename Self_ = Self>
  1578. // NOLINTNEXTLINE(modernize-use-nodiscard)
  1579. typename std::enable_if<Self_::is_transparent, bool>::type contains(const OtherKey& key) const {
  1580. return 1U == count(key);
  1581. }
  1582. // Returns a reference to the value found for key.
  1583. // Throws std::out_of_range if element cannot be found
  1584. template <typename Q = mapped_type>
  1585. // NOLINTNEXTLINE(modernize-use-nodiscard)
  1586. typename std::enable_if<!std::is_void<Q>::value, Q&>::type at(key_type const& key) {
  1587. ROBIN_HOOD_TRACE(this)
  1588. auto kv = mKeyVals + findIdx(key);
  1589. if (kv == reinterpret_cast_no_cast_align_warning<Node*>(mInfo)) {
  1590. doThrow<std::out_of_range>("key not found");
  1591. }
  1592. return kv->getSecond();
  1593. }
  1594. // Returns a reference to the value found for key.
  1595. // Throws std::out_of_range if element cannot be found
  1596. template <typename Q = mapped_type>
  1597. // NOLINTNEXTLINE(modernize-use-nodiscard)
  1598. typename std::enable_if<!std::is_void<Q>::value, Q const&>::type at(key_type const& key) const {
  1599. ROBIN_HOOD_TRACE(this)
  1600. auto kv = mKeyVals + findIdx(key);
  1601. if (kv == reinterpret_cast_no_cast_align_warning<Node*>(mInfo)) {
  1602. doThrow<std::out_of_range>("key not found");
  1603. }
  1604. return kv->getSecond();
  1605. }
  1606. const_iterator find(const key_type& key) const { // NOLINT(modernize-use-nodiscard)
  1607. ROBIN_HOOD_TRACE(this)
  1608. const size_t idx = findIdx(key);
  1609. return const_iterator{mKeyVals + idx, mInfo + idx};
  1610. }
  1611. template <typename OtherKey>
  1612. const_iterator find(const OtherKey& key, is_transparent_tag /*unused*/) const {
  1613. ROBIN_HOOD_TRACE(this)
  1614. const size_t idx = findIdx(key);
  1615. return const_iterator{mKeyVals + idx, mInfo + idx};
  1616. }
  1617. template <typename OtherKey, typename Self_ = Self>
  1618. typename std::enable_if<Self_::is_transparent, // NOLINT(modernize-use-nodiscard)
  1619. const_iterator>::type // NOLINT(modernize-use-nodiscard)
  1620. find(const OtherKey& key) const { // NOLINT(modernize-use-nodiscard)
  1621. ROBIN_HOOD_TRACE(this)
  1622. const size_t idx = findIdx(key);
  1623. return const_iterator{mKeyVals + idx, mInfo + idx};
  1624. }
  1625. iterator find(const key_type& key) {
  1626. ROBIN_HOOD_TRACE(this)
  1627. const size_t idx = findIdx(key);
  1628. return iterator{mKeyVals + idx, mInfo + idx};
  1629. }
  1630. template <typename OtherKey>
  1631. iterator find(const OtherKey& key, is_transparent_tag /*unused*/) {
  1632. ROBIN_HOOD_TRACE(this)
  1633. const size_t idx = findIdx(key);
  1634. return iterator{mKeyVals + idx, mInfo + idx};
  1635. }
  1636. template <typename OtherKey, typename Self_ = Self>
  1637. typename std::enable_if<Self_::is_transparent, iterator>::type find(const OtherKey& key) {
  1638. ROBIN_HOOD_TRACE(this)
  1639. const size_t idx = findIdx(key);
  1640. return iterator{mKeyVals + idx, mInfo + idx};
  1641. }
  1642. iterator begin() {
  1643. ROBIN_HOOD_TRACE(this)
  1644. if (empty()) {
  1645. return end();
  1646. }
  1647. return iterator(mKeyVals, mInfo, fast_forward_tag{});
  1648. }
  1649. const_iterator begin() const { // NOLINT(modernize-use-nodiscard)
  1650. ROBIN_HOOD_TRACE(this)
  1651. return cbegin();
  1652. }
  1653. const_iterator cbegin() const { // NOLINT(modernize-use-nodiscard)
  1654. ROBIN_HOOD_TRACE(this)
  1655. if (empty()) {
  1656. return cend();
  1657. }
  1658. return const_iterator(mKeyVals, mInfo, fast_forward_tag{});
  1659. }
  1660. iterator end() {
  1661. ROBIN_HOOD_TRACE(this)
  1662. // no need to supply valid info pointer: end() must not be dereferenced, and only node
  1663. // pointer is compared.
  1664. return iterator{reinterpret_cast_no_cast_align_warning<Node*>(mInfo), nullptr};
  1665. }
  1666. const_iterator end() const { // NOLINT(modernize-use-nodiscard)
  1667. ROBIN_HOOD_TRACE(this)
  1668. return cend();
  1669. }
  1670. const_iterator cend() const { // NOLINT(modernize-use-nodiscard)
  1671. ROBIN_HOOD_TRACE(this)
  1672. return const_iterator{reinterpret_cast_no_cast_align_warning<Node*>(mInfo), nullptr};
  1673. }
  1674. iterator erase(const_iterator pos) {
  1675. ROBIN_HOOD_TRACE(this)
  1676. // its safe to perform const cast here
  1677. // NOLINTNEXTLINE(cppcoreguidelines-pro-type-const-cast)
  1678. return erase(iterator{const_cast<Node*>(pos.mKeyVals), const_cast<uint8_t*>(pos.mInfo)});
  1679. }
  1680. // Erases element at pos, returns iterator to the next element.
  1681. iterator erase(iterator pos) {
  1682. ROBIN_HOOD_TRACE(this)
  1683. // we assume that pos always points to a valid entry, and not end().
  1684. auto const idx = static_cast<size_t>(pos.mKeyVals - mKeyVals);
  1685. shiftDown(idx);
  1686. --mNumElements;
  1687. if (*pos.mInfo) {
  1688. // we've backward shifted, return this again
  1689. return pos;
  1690. }
  1691. // no backward shift, return next element
  1692. return ++pos;
  1693. }
  1694. size_t erase(const key_type& key) {
  1695. ROBIN_HOOD_TRACE(this)
  1696. size_t idx{};
  1697. InfoType info{};
  1698. keyToIdx(key, &idx, &info);
  1699. // check while info matches with the source idx
  1700. do {
  1701. if (info == mInfo[idx] && WKeyEqual::operator()(key, mKeyVals[idx].getFirst())) {
  1702. shiftDown(idx);
  1703. --mNumElements;
  1704. return 1;
  1705. }
  1706. next(&info, &idx);
  1707. } while (info <= mInfo[idx]);
  1708. // nothing found to delete
  1709. return 0;
  1710. }
  1711. // reserves space for the specified number of elements. Makes sure the old data fits.
  1712. // exactly the same as reserve(c).
  1713. void rehash(size_t c) {
  1714. // forces a reserve
  1715. reserve(c, true);
  1716. }
  1717. // reserves space for the specified number of elements. Makes sure the old data fits.
  1718. // Exactly the same as rehash(c). Use rehash(0) to shrink to fit.
  1719. void reserve(size_t c) {
  1720. // reserve, but don't force rehash
  1721. reserve(c, false);
  1722. }
  1723. size_type size() const noexcept { // NOLINT(modernize-use-nodiscard)
  1724. ROBIN_HOOD_TRACE(this)
  1725. return mNumElements;
  1726. }
  1727. size_type max_size() const noexcept { // NOLINT(modernize-use-nodiscard)
  1728. ROBIN_HOOD_TRACE(this)
  1729. return static_cast<size_type>(-1);
  1730. }
  1731. ROBIN_HOOD(NODISCARD) bool empty() const noexcept {
  1732. ROBIN_HOOD_TRACE(this)
  1733. return 0 == mNumElements;
  1734. }
  1735. float max_load_factor() const noexcept { // NOLINT(modernize-use-nodiscard)
  1736. ROBIN_HOOD_TRACE(this)
  1737. return MaxLoadFactor100 / 100.0F;
  1738. }
  1739. // Average number of elements per bucket. Since we allow only 1 per bucket
  1740. float load_factor() const noexcept { // NOLINT(modernize-use-nodiscard)
  1741. ROBIN_HOOD_TRACE(this)
  1742. return static_cast<float>(size()) / static_cast<float>(mMask + 1);
  1743. }
  1744. ROBIN_HOOD(NODISCARD) size_t mask() const noexcept {
  1745. ROBIN_HOOD_TRACE(this)
  1746. return mMask;
  1747. }
  1748. ROBIN_HOOD(NODISCARD) size_t calcMaxNumElementsAllowed(size_t maxElements) const noexcept {
  1749. if (ROBIN_HOOD_LIKELY(maxElements <= (std::numeric_limits<size_t>::max)() / 100)) {
  1750. return maxElements * MaxLoadFactor100 / 100;
  1751. }
  1752. // we might be a bit inprecise, but since maxElements is quite large that doesn't matter
  1753. return (maxElements / 100) * MaxLoadFactor100;
  1754. }
  1755. ROBIN_HOOD(NODISCARD) size_t calcNumBytesInfo(size_t numElements) const noexcept {
  1756. // we add a uint64_t, which houses the sentinel (first byte) and padding so we can load
  1757. // 64bit types.
  1758. return numElements + sizeof(uint64_t);
  1759. }
  1760. ROBIN_HOOD(NODISCARD)
  1761. size_t calcNumElementsWithBuffer(size_t numElements) const noexcept {
  1762. auto maxNumElementsAllowed = calcMaxNumElementsAllowed(numElements);
  1763. return numElements + (std::min)(maxNumElementsAllowed, (static_cast<size_t>(0xFF)));
  1764. }
  1765. // calculation only allowed for 2^n values
  1766. ROBIN_HOOD(NODISCARD) size_t calcNumBytesTotal(size_t numElements) const {
  1767. #if ROBIN_HOOD(BITNESS) == 64
  1768. return numElements * sizeof(Node) + calcNumBytesInfo(numElements);
  1769. #else
  1770. // make sure we're doing 64bit operations, so we are at least safe against 32bit overflows.
  1771. auto const ne = static_cast<uint64_t>(numElements);
  1772. auto const s = static_cast<uint64_t>(sizeof(Node));
  1773. auto const infos = static_cast<uint64_t>(calcNumBytesInfo(numElements));
  1774. auto const total64 = ne * s + infos;
  1775. auto const total = static_cast<size_t>(total64);
  1776. if (ROBIN_HOOD_UNLIKELY(static_cast<uint64_t>(total) != total64)) {
  1777. throwOverflowError();
  1778. }
  1779. return total;
  1780. #endif
  1781. }
  1782. private:
  1783. template <typename Q = mapped_type>
  1784. ROBIN_HOOD(NODISCARD)
  1785. typename std::enable_if<!std::is_void<Q>::value, bool>::type has(const value_type& e) const {
  1786. ROBIN_HOOD_TRACE(this)
  1787. auto it = find(e.first);
  1788. return it != end() && it->second == e.second;
  1789. }
  1790. template <typename Q = mapped_type>
  1791. ROBIN_HOOD(NODISCARD)
  1792. typename std::enable_if<std::is_void<Q>::value, bool>::type has(const value_type& e) const {
  1793. ROBIN_HOOD_TRACE(this)
  1794. return find(e) != end();
  1795. }
  1796. void reserve(size_t c, bool forceRehash) {
  1797. ROBIN_HOOD_TRACE(this)
  1798. auto const minElementsAllowed = (std::max)(c, mNumElements);
  1799. auto newSize = InitialNumElements;
  1800. while (calcMaxNumElementsAllowed(newSize) < minElementsAllowed && newSize != 0) {
  1801. newSize *= 2;
  1802. }
  1803. if (ROBIN_HOOD_UNLIKELY(newSize == 0)) {
  1804. throwOverflowError();
  1805. }
  1806. ROBIN_HOOD_LOG("newSize > mMask + 1: " << newSize << " > " << mMask << " + 1")
  1807. // only actually do anything when the new size is bigger than the old one. This prevents to
  1808. // continuously allocate for each reserve() call.
  1809. if (forceRehash || newSize > mMask + 1) {
  1810. rehashPowerOfTwo(newSize);
  1811. }
  1812. }
  1813. // reserves space for at least the specified number of elements.
  1814. // only works if numBuckets if power of two
  1815. void rehashPowerOfTwo(size_t numBuckets) {
  1816. ROBIN_HOOD_TRACE(this)
  1817. Node* const oldKeyVals = mKeyVals;
  1818. uint8_t const* const oldInfo = mInfo;
  1819. const size_t oldMaxElementsWithBuffer = calcNumElementsWithBuffer(mMask + 1);
  1820. // resize operation: move stuff
  1821. init_data(numBuckets);
  1822. if (oldMaxElementsWithBuffer > 1) {
  1823. for (size_t i = 0; i < oldMaxElementsWithBuffer; ++i) {
  1824. if (oldInfo[i] != 0) {
  1825. insert_move(std::move(oldKeyVals[i]));
  1826. // destroy the node but DON'T destroy the data.
  1827. oldKeyVals[i].~Node();
  1828. }
  1829. }
  1830. // this check is not necessary as it's guarded by the previous if, but it helps silence
  1831. // g++'s overeager "attempt to free a non-heap object 'map'
  1832. // [-Werror=free-nonheap-object]" warning.
  1833. if (oldKeyVals != reinterpret_cast_no_cast_align_warning<Node*>(&mMask)) {
  1834. // don't destroy old data: put it into the pool instead
  1835. DataPool::addOrFree(oldKeyVals, calcNumBytesTotal(oldMaxElementsWithBuffer));
  1836. }
  1837. }
  1838. }
  1839. ROBIN_HOOD(NOINLINE) void throwOverflowError() const {
  1840. #if ROBIN_HOOD(HAS_EXCEPTIONS)
  1841. throw std::overflow_error("robin_hood::map overflow");
  1842. #else
  1843. abort();
  1844. #endif
  1845. }
  1846. template <typename OtherKey, typename... Args>
  1847. std::pair<iterator, bool> try_emplace_impl(OtherKey&& key, Args&&... args) {
  1848. ROBIN_HOOD_TRACE(this)
  1849. auto it = find(key);
  1850. if (it == end()) {
  1851. return emplace(std::piecewise_construct,
  1852. std::forward_as_tuple(std::forward<OtherKey>(key)),
  1853. std::forward_as_tuple(std::forward<Args>(args)...));
  1854. }
  1855. return {it, false};
  1856. }
  1857. template <typename OtherKey, typename Mapped>
  1858. std::pair<iterator, bool> insert_or_assign_impl(OtherKey&& key, Mapped&& obj) {
  1859. ROBIN_HOOD_TRACE(this)
  1860. auto it = find(key);
  1861. if (it == end()) {
  1862. return emplace(std::forward<OtherKey>(key), std::forward<Mapped>(obj));
  1863. }
  1864. it->second = std::forward<Mapped>(obj);
  1865. return {it, false};
  1866. }
  1867. void init_data(size_t max_elements) {
  1868. mNumElements = 0;
  1869. mMask = max_elements - 1;
  1870. mMaxNumElementsAllowed = calcMaxNumElementsAllowed(max_elements);
  1871. auto const numElementsWithBuffer = calcNumElementsWithBuffer(max_elements);
  1872. // calloc also zeroes everything
  1873. auto const numBytesTotal = calcNumBytesTotal(numElementsWithBuffer);
  1874. ROBIN_HOOD_LOG("std::calloc " << numBytesTotal << " = calcNumBytesTotal("
  1875. << numElementsWithBuffer << ")")
  1876. mKeyVals = reinterpret_cast<Node*>(
  1877. detail::assertNotNull<std::bad_alloc>(std::calloc(1, numBytesTotal)));
  1878. mInfo = reinterpret_cast<uint8_t*>(mKeyVals + numElementsWithBuffer);
  1879. // set sentinel
  1880. mInfo[numElementsWithBuffer] = 1;
  1881. mInfoInc = InitialInfoInc;
  1882. mInfoHashShift = InitialInfoHashShift;
  1883. }
  1884. template <typename Arg, typename Q = mapped_type>
  1885. typename std::enable_if<!std::is_void<Q>::value, Q&>::type doCreateByKey(Arg&& key) {
  1886. while (true) {
  1887. size_t idx{};
  1888. InfoType info{};
  1889. keyToIdx(key, &idx, &info);
  1890. nextWhileLess(&info, &idx);
  1891. // while we potentially have a match. Can't do a do-while here because when mInfo is
  1892. // 0 we don't want to skip forward
  1893. while (info == mInfo[idx]) {
  1894. if (WKeyEqual::operator()(key, mKeyVals[idx].getFirst())) {
  1895. // key already exists, do not insert.
  1896. return mKeyVals[idx].getSecond();
  1897. }
  1898. next(&info, &idx);
  1899. }
  1900. // unlikely that this evaluates to true
  1901. if (ROBIN_HOOD_UNLIKELY(mNumElements >= mMaxNumElementsAllowed)) {
  1902. increase_size();
  1903. continue;
  1904. }
  1905. // key not found, so we are now exactly where we want to insert it.
  1906. auto const insertion_idx = idx;
  1907. auto const insertion_info = info;
  1908. if (ROBIN_HOOD_UNLIKELY(insertion_info + mInfoInc > 0xFF)) {
  1909. mMaxNumElementsAllowed = 0;
  1910. }
  1911. // find an empty spot
  1912. while (0 != mInfo[idx]) {
  1913. next(&info, &idx);
  1914. }
  1915. auto& l = mKeyVals[insertion_idx];
  1916. if (idx == insertion_idx) {
  1917. // put at empty spot. This forwards all arguments into the node where the object
  1918. // is constructed exactly where it is needed.
  1919. ::new (static_cast<void*>(&l))
  1920. Node(*this, std::piecewise_construct,
  1921. std::forward_as_tuple(std::forward<Arg>(key)), std::forward_as_tuple());
  1922. } else {
  1923. shiftUp(idx, insertion_idx);
  1924. l = Node(*this, std::piecewise_construct,
  1925. std::forward_as_tuple(std::forward<Arg>(key)), std::forward_as_tuple());
  1926. }
  1927. // mKeyVals[idx].getFirst() = std::move(key);
  1928. mInfo[insertion_idx] = static_cast<uint8_t>(insertion_info);
  1929. ++mNumElements;
  1930. return mKeyVals[insertion_idx].getSecond();
  1931. }
  1932. }
  1933. // This is exactly the same code as operator[], except for the return values
  1934. template <typename Arg>
  1935. std::pair<iterator, bool> doInsert(Arg&& keyval) {
  1936. while (true) {
  1937. size_t idx{};
  1938. InfoType info{};
  1939. keyToIdx(getFirstConst(keyval), &idx, &info);
  1940. nextWhileLess(&info, &idx);
  1941. // while we potentially have a match
  1942. while (info == mInfo[idx]) {
  1943. if (WKeyEqual::operator()(getFirstConst(keyval), mKeyVals[idx].getFirst())) {
  1944. // key already exists, do NOT insert.
  1945. // see http://en.cppreference.com/w/cpp/container/unordered_map/insert
  1946. return std::make_pair<iterator, bool>(iterator(mKeyVals + idx, mInfo + idx),
  1947. false);
  1948. }
  1949. next(&info, &idx);
  1950. }
  1951. // unlikely that this evaluates to true
  1952. if (ROBIN_HOOD_UNLIKELY(mNumElements >= mMaxNumElementsAllowed)) {
  1953. increase_size();
  1954. continue;
  1955. }
  1956. // key not found, so we are now exactly where we want to insert it.
  1957. auto const insertion_idx = idx;
  1958. auto const insertion_info = info;
  1959. if (ROBIN_HOOD_UNLIKELY(insertion_info + mInfoInc > 0xFF)) {
  1960. mMaxNumElementsAllowed = 0;
  1961. }
  1962. // find an empty spot
  1963. while (0 != mInfo[idx]) {
  1964. next(&info, &idx);
  1965. }
  1966. auto& l = mKeyVals[insertion_idx];
  1967. if (idx == insertion_idx) {
  1968. ::new (static_cast<void*>(&l)) Node(*this, std::forward<Arg>(keyval));
  1969. } else {
  1970. shiftUp(idx, insertion_idx);
  1971. l = Node(*this, std::forward<Arg>(keyval));
  1972. }
  1973. // put at empty spot
  1974. mInfo[insertion_idx] = static_cast<uint8_t>(insertion_info);
  1975. ++mNumElements;
  1976. return std::make_pair(iterator(mKeyVals + insertion_idx, mInfo + insertion_idx), true);
  1977. }
  1978. }
  1979. bool try_increase_info() {
  1980. ROBIN_HOOD_LOG("mInfoInc=" << mInfoInc << ", numElements=" << mNumElements
  1981. << ", maxNumElementsAllowed="
  1982. << calcMaxNumElementsAllowed(mMask + 1))
  1983. if (mInfoInc <= 2) {
  1984. // need to be > 2 so that shift works (otherwise undefined behavior!)
  1985. return false;
  1986. }
  1987. // we got space left, try to make info smaller
  1988. mInfoInc = static_cast<uint8_t>(mInfoInc >> 1U);
  1989. // remove one bit of the hash, leaving more space for the distance info.
  1990. // This is extremely fast because we can operate on 8 bytes at once.
  1991. ++mInfoHashShift;
  1992. auto const numElementsWithBuffer = calcNumElementsWithBuffer(mMask + 1);
  1993. for (size_t i = 0; i < numElementsWithBuffer; i += 8) {
  1994. auto val = unaligned_load<uint64_t>(mInfo + i);
  1995. val = (val >> 1U) & UINT64_C(0x7f7f7f7f7f7f7f7f);
  1996. std::memcpy(mInfo + i, &val, sizeof(val));
  1997. }
  1998. // update sentinel, which might have been cleared out!
  1999. mInfo[numElementsWithBuffer] = 1;
  2000. mMaxNumElementsAllowed = calcMaxNumElementsAllowed(mMask + 1);
  2001. return true;
  2002. }
  2003. void increase_size() {
  2004. // nothing allocated yet? just allocate InitialNumElements
  2005. if (0 == mMask) {
  2006. init_data(InitialNumElements);
  2007. return;
  2008. }
  2009. auto const maxNumElementsAllowed = calcMaxNumElementsAllowed(mMask + 1);
  2010. if (mNumElements < maxNumElementsAllowed && try_increase_info()) {
  2011. return;
  2012. }
  2013. ROBIN_HOOD_LOG("mNumElements=" << mNumElements << ", maxNumElementsAllowed="
  2014. << maxNumElementsAllowed << ", load="
  2015. << (static_cast<double>(mNumElements) * 100.0 /
  2016. (static_cast<double>(mMask) + 1)))
  2017. // it seems we have a really bad hash function! don't try to resize again
  2018. if (mNumElements * 2 < calcMaxNumElementsAllowed(mMask + 1)) {
  2019. throwOverflowError();
  2020. }
  2021. rehashPowerOfTwo((mMask + 1) * 2);
  2022. }
  2023. void destroy() {
  2024. if (0 == mMask) {
  2025. // don't deallocate!
  2026. return;
  2027. }
  2028. Destroyer<Self, IsFlat && std::is_trivially_destructible<Node>::value>{}
  2029. .nodesDoNotDeallocate(*this);
  2030. // This protection against not deleting mMask shouldn't be needed as it's sufficiently
  2031. // protected with the 0==mMask check, but I have this anyways because g++ 7 otherwise
  2032. // reports a compile error: attempt to free a non-heap object 'fm'
  2033. // [-Werror=free-nonheap-object]
  2034. if (mKeyVals != reinterpret_cast_no_cast_align_warning<Node*>(&mMask)) {
  2035. ROBIN_HOOD_LOG("std::free")
  2036. std::free(mKeyVals);
  2037. }
  2038. }
  2039. void init() noexcept {
  2040. mKeyVals = reinterpret_cast_no_cast_align_warning<Node*>(&mMask);
  2041. mInfo = reinterpret_cast<uint8_t*>(&mMask);
  2042. mNumElements = 0;
  2043. mMask = 0;
  2044. mMaxNumElementsAllowed = 0;
  2045. mInfoInc = InitialInfoInc;
  2046. mInfoHashShift = InitialInfoHashShift;
  2047. }
  2048. // members are sorted so no padding occurs
  2049. Node* mKeyVals = reinterpret_cast_no_cast_align_warning<Node*>(&mMask); // 8 byte 8
  2050. uint8_t* mInfo = reinterpret_cast<uint8_t*>(&mMask); // 8 byte 16
  2051. size_t mNumElements = 0; // 8 byte 24
  2052. size_t mMask = 0; // 8 byte 32
  2053. size_t mMaxNumElementsAllowed = 0; // 8 byte 40
  2054. InfoType mInfoInc = InitialInfoInc; // 4 byte 44
  2055. InfoType mInfoHashShift = InitialInfoHashShift; // 4 byte 48
  2056. // 16 byte 56 if NodeAllocator
  2057. };
  2058. } // namespace detail
  2059. // map
  2060. template <typename Key, typename T, typename Hash = hash<Key>,
  2061. typename KeyEqual = std::equal_to<Key>, size_t MaxLoadFactor100 = 80>
  2062. using unordered_flat_map = detail::Table<true, MaxLoadFactor100, Key, T, Hash, KeyEqual>;
  2063. template <typename Key, typename T, typename Hash = hash<Key>,
  2064. typename KeyEqual = std::equal_to<Key>, size_t MaxLoadFactor100 = 80>
  2065. using unordered_node_map = detail::Table<false, MaxLoadFactor100, Key, T, Hash, KeyEqual>;
  2066. template <typename Key, typename T, typename Hash = hash<Key>,
  2067. typename KeyEqual = std::equal_to<Key>, size_t MaxLoadFactor100 = 80>
  2068. using unordered_map =
  2069. detail::Table<sizeof(robin_hood::pair<Key, T>) <= sizeof(size_t) * 6 &&
  2070. std::is_nothrow_move_constructible<robin_hood::pair<Key, T>>::value &&
  2071. std::is_nothrow_move_assignable<robin_hood::pair<Key, T>>::value,
  2072. MaxLoadFactor100, Key, T, Hash, KeyEqual>;
  2073. // set
  2074. template <typename Key, typename Hash = hash<Key>, typename KeyEqual = std::equal_to<Key>,
  2075. size_t MaxLoadFactor100 = 80>
  2076. using unordered_flat_set = detail::Table<true, MaxLoadFactor100, Key, void, Hash, KeyEqual>;
  2077. template <typename Key, typename Hash = hash<Key>, typename KeyEqual = std::equal_to<Key>,
  2078. size_t MaxLoadFactor100 = 80>
  2079. using unordered_node_set = detail::Table<false, MaxLoadFactor100, Key, void, Hash, KeyEqual>;
  2080. template <typename Key, typename Hash = hash<Key>, typename KeyEqual = std::equal_to<Key>,
  2081. size_t MaxLoadFactor100 = 80>
  2082. using unordered_set = detail::Table<sizeof(Key) <= sizeof(size_t) * 6 &&
  2083. std::is_nothrow_move_constructible<Key>::value &&
  2084. std::is_nothrow_move_assignable<Key>::value,
  2085. MaxLoadFactor100, Key, void, Hash, KeyEqual>;
  2086. } // namespace robin_hood
  2087. #endif