1 |
742 |
jeremybenn |
// Internal policy header for TR1 unordered_set and unordered_map -*- C++ -*-
|
2 |
|
|
|
3 |
|
|
// Copyright (C) 2010, 2011 Free Software Foundation, Inc.
|
4 |
|
|
//
|
5 |
|
|
// This file is part of the GNU ISO C++ Library. This library is free
|
6 |
|
|
// software; you can redistribute it and/or modify it under the
|
7 |
|
|
// terms of the GNU General Public License as published by the
|
8 |
|
|
// Free Software Foundation; either version 3, or (at your option)
|
9 |
|
|
// any later version.
|
10 |
|
|
|
11 |
|
|
// This library is distributed in the hope that it will be useful,
|
12 |
|
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
13 |
|
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
14 |
|
|
// GNU General Public License for more details.
|
15 |
|
|
|
16 |
|
|
// Under Section 7 of GPL version 3, you are granted additional
|
17 |
|
|
// permissions described in the GCC Runtime Library Exception, version
|
18 |
|
|
// 3.1, as published by the Free Software Foundation.
|
19 |
|
|
|
20 |
|
|
// You should have received a copy of the GNU General Public License and
|
21 |
|
|
// a copy of the GCC Runtime Library Exception along with this program;
|
22 |
|
|
// see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
|
23 |
|
|
// <http://www.gnu.org/licenses/>.
|
24 |
|
|
|
25 |
|
|
/** @file tr1/hashtable_policy.h
|
26 |
|
|
* This is an internal header file, included by other library headers.
|
27 |
|
|
* Do not attempt to use it directly.
|
28 |
|
|
* @headername{tr1/unordered_map, tr1/unordered_set}
|
29 |
|
|
*/
|
30 |
|
|
|
31 |
|
|
namespace std _GLIBCXX_VISIBILITY(default)
|
32 |
|
|
{
|
33 |
|
|
namespace tr1
|
34 |
|
|
{
|
35 |
|
|
namespace __detail
|
36 |
|
|
{
|
37 |
|
|
_GLIBCXX_BEGIN_NAMESPACE_VERSION
|
38 |
|
|
|
39 |
|
|
// Helper function: return distance(first, last) for forward
|
40 |
|
|
// iterators, or 0 for input iterators.
|
41 |
|
|
template<class _Iterator>
|
42 |
|
|
inline typename std::iterator_traits<_Iterator>::difference_type
|
43 |
|
|
__distance_fw(_Iterator __first, _Iterator __last,
|
44 |
|
|
std::input_iterator_tag)
|
45 |
|
|
{ return 0; }
|
46 |
|
|
|
47 |
|
|
template<class _Iterator>
|
48 |
|
|
inline typename std::iterator_traits<_Iterator>::difference_type
|
49 |
|
|
__distance_fw(_Iterator __first, _Iterator __last,
|
50 |
|
|
std::forward_iterator_tag)
|
51 |
|
|
{ return std::distance(__first, __last); }
|
52 |
|
|
|
53 |
|
|
template<class _Iterator>
|
54 |
|
|
inline typename std::iterator_traits<_Iterator>::difference_type
|
55 |
|
|
__distance_fw(_Iterator __first, _Iterator __last)
|
56 |
|
|
{
|
57 |
|
|
typedef typename std::iterator_traits<_Iterator>::iterator_category _Tag;
|
58 |
|
|
return __distance_fw(__first, __last, _Tag());
|
59 |
|
|
}
|
60 |
|
|
|
61 |
|
|
// Auxiliary types used for all instantiations of _Hashtable: nodes
|
62 |
|
|
// and iterators.
|
63 |
|
|
|
64 |
|
|
// Nodes, used to wrap elements stored in the hash table. A policy
|
65 |
|
|
// template parameter of class template _Hashtable controls whether
|
66 |
|
|
// nodes also store a hash code. In some cases (e.g. strings) this
|
67 |
|
|
// may be a performance win.
|
68 |
|
|
template<typename _Value, bool __cache_hash_code>
|
69 |
|
|
struct _Hash_node;
|
70 |
|
|
|
71 |
|
|
template<typename _Value>
|
72 |
|
|
struct _Hash_node<_Value, true>
|
73 |
|
|
{
|
74 |
|
|
_Value _M_v;
|
75 |
|
|
std::size_t _M_hash_code;
|
76 |
|
|
_Hash_node* _M_next;
|
77 |
|
|
};
|
78 |
|
|
|
79 |
|
|
template<typename _Value>
|
80 |
|
|
struct _Hash_node<_Value, false>
|
81 |
|
|
{
|
82 |
|
|
_Value _M_v;
|
83 |
|
|
_Hash_node* _M_next;
|
84 |
|
|
};
|
85 |
|
|
|
86 |
|
|
// Local iterators, used to iterate within a bucket but not between
|
87 |
|
|
// buckets.
|
88 |
|
|
template<typename _Value, bool __cache>
|
89 |
|
|
struct _Node_iterator_base
|
90 |
|
|
{
|
91 |
|
|
_Node_iterator_base(_Hash_node<_Value, __cache>* __p)
|
92 |
|
|
: _M_cur(__p) { }
|
93 |
|
|
|
94 |
|
|
void
|
95 |
|
|
_M_incr()
|
96 |
|
|
{ _M_cur = _M_cur->_M_next; }
|
97 |
|
|
|
98 |
|
|
_Hash_node<_Value, __cache>* _M_cur;
|
99 |
|
|
};
|
100 |
|
|
|
101 |
|
|
template<typename _Value, bool __cache>
|
102 |
|
|
inline bool
|
103 |
|
|
operator==(const _Node_iterator_base<_Value, __cache>& __x,
|
104 |
|
|
const _Node_iterator_base<_Value, __cache>& __y)
|
105 |
|
|
{ return __x._M_cur == __y._M_cur; }
|
106 |
|
|
|
107 |
|
|
template<typename _Value, bool __cache>
|
108 |
|
|
inline bool
|
109 |
|
|
operator!=(const _Node_iterator_base<_Value, __cache>& __x,
|
110 |
|
|
const _Node_iterator_base<_Value, __cache>& __y)
|
111 |
|
|
{ return __x._M_cur != __y._M_cur; }
|
112 |
|
|
|
113 |
|
|
template<typename _Value, bool __constant_iterators, bool __cache>
|
114 |
|
|
struct _Node_iterator
|
115 |
|
|
: public _Node_iterator_base<_Value, __cache>
|
116 |
|
|
{
|
117 |
|
|
typedef _Value value_type;
|
118 |
|
|
typedef typename
|
119 |
|
|
__gnu_cxx::__conditional_type<__constant_iterators,
|
120 |
|
|
const _Value*, _Value*>::__type
|
121 |
|
|
pointer;
|
122 |
|
|
typedef typename
|
123 |
|
|
__gnu_cxx::__conditional_type<__constant_iterators,
|
124 |
|
|
const _Value&, _Value&>::__type
|
125 |
|
|
reference;
|
126 |
|
|
typedef std::ptrdiff_t difference_type;
|
127 |
|
|
typedef std::forward_iterator_tag iterator_category;
|
128 |
|
|
|
129 |
|
|
_Node_iterator()
|
130 |
|
|
: _Node_iterator_base<_Value, __cache>(0) { }
|
131 |
|
|
|
132 |
|
|
explicit
|
133 |
|
|
_Node_iterator(_Hash_node<_Value, __cache>* __p)
|
134 |
|
|
: _Node_iterator_base<_Value, __cache>(__p) { }
|
135 |
|
|
|
136 |
|
|
reference
|
137 |
|
|
operator*() const
|
138 |
|
|
{ return this->_M_cur->_M_v; }
|
139 |
|
|
|
140 |
|
|
pointer
|
141 |
|
|
operator->() const
|
142 |
|
|
{ return std::__addressof(this->_M_cur->_M_v); }
|
143 |
|
|
|
144 |
|
|
_Node_iterator&
|
145 |
|
|
operator++()
|
146 |
|
|
{
|
147 |
|
|
this->_M_incr();
|
148 |
|
|
return *this;
|
149 |
|
|
}
|
150 |
|
|
|
151 |
|
|
_Node_iterator
|
152 |
|
|
operator++(int)
|
153 |
|
|
{
|
154 |
|
|
_Node_iterator __tmp(*this);
|
155 |
|
|
this->_M_incr();
|
156 |
|
|
return __tmp;
|
157 |
|
|
}
|
158 |
|
|
};
|
159 |
|
|
|
160 |
|
|
template<typename _Value, bool __constant_iterators, bool __cache>
|
161 |
|
|
struct _Node_const_iterator
|
162 |
|
|
: public _Node_iterator_base<_Value, __cache>
|
163 |
|
|
{
|
164 |
|
|
typedef _Value value_type;
|
165 |
|
|
typedef const _Value* pointer;
|
166 |
|
|
typedef const _Value& reference;
|
167 |
|
|
typedef std::ptrdiff_t difference_type;
|
168 |
|
|
typedef std::forward_iterator_tag iterator_category;
|
169 |
|
|
|
170 |
|
|
_Node_const_iterator()
|
171 |
|
|
: _Node_iterator_base<_Value, __cache>(0) { }
|
172 |
|
|
|
173 |
|
|
explicit
|
174 |
|
|
_Node_const_iterator(_Hash_node<_Value, __cache>* __p)
|
175 |
|
|
: _Node_iterator_base<_Value, __cache>(__p) { }
|
176 |
|
|
|
177 |
|
|
_Node_const_iterator(const _Node_iterator<_Value, __constant_iterators,
|
178 |
|
|
__cache>& __x)
|
179 |
|
|
: _Node_iterator_base<_Value, __cache>(__x._M_cur) { }
|
180 |
|
|
|
181 |
|
|
reference
|
182 |
|
|
operator*() const
|
183 |
|
|
{ return this->_M_cur->_M_v; }
|
184 |
|
|
|
185 |
|
|
pointer
|
186 |
|
|
operator->() const
|
187 |
|
|
{ return std::__addressof(this->_M_cur->_M_v); }
|
188 |
|
|
|
189 |
|
|
_Node_const_iterator&
|
190 |
|
|
operator++()
|
191 |
|
|
{
|
192 |
|
|
this->_M_incr();
|
193 |
|
|
return *this;
|
194 |
|
|
}
|
195 |
|
|
|
196 |
|
|
_Node_const_iterator
|
197 |
|
|
operator++(int)
|
198 |
|
|
{
|
199 |
|
|
_Node_const_iterator __tmp(*this);
|
200 |
|
|
this->_M_incr();
|
201 |
|
|
return __tmp;
|
202 |
|
|
}
|
203 |
|
|
};
|
204 |
|
|
|
205 |
|
|
template<typename _Value, bool __cache>
|
206 |
|
|
struct _Hashtable_iterator_base
|
207 |
|
|
{
|
208 |
|
|
_Hashtable_iterator_base(_Hash_node<_Value, __cache>* __node,
|
209 |
|
|
_Hash_node<_Value, __cache>** __bucket)
|
210 |
|
|
: _M_cur_node(__node), _M_cur_bucket(__bucket) { }
|
211 |
|
|
|
212 |
|
|
void
|
213 |
|
|
_M_incr()
|
214 |
|
|
{
|
215 |
|
|
_M_cur_node = _M_cur_node->_M_next;
|
216 |
|
|
if (!_M_cur_node)
|
217 |
|
|
_M_incr_bucket();
|
218 |
|
|
}
|
219 |
|
|
|
220 |
|
|
void
|
221 |
|
|
_M_incr_bucket();
|
222 |
|
|
|
223 |
|
|
_Hash_node<_Value, __cache>* _M_cur_node;
|
224 |
|
|
_Hash_node<_Value, __cache>** _M_cur_bucket;
|
225 |
|
|
};
|
226 |
|
|
|
227 |
|
|
// Global iterators, used for arbitrary iteration within a hash
|
228 |
|
|
// table. Larger and more expensive than local iterators.
|
229 |
|
|
template<typename _Value, bool __cache>
|
230 |
|
|
void
|
231 |
|
|
_Hashtable_iterator_base<_Value, __cache>::
|
232 |
|
|
_M_incr_bucket()
|
233 |
|
|
{
|
234 |
|
|
++_M_cur_bucket;
|
235 |
|
|
|
236 |
|
|
// This loop requires the bucket array to have a non-null sentinel.
|
237 |
|
|
while (!*_M_cur_bucket)
|
238 |
|
|
++_M_cur_bucket;
|
239 |
|
|
_M_cur_node = *_M_cur_bucket;
|
240 |
|
|
}
|
241 |
|
|
|
242 |
|
|
template<typename _Value, bool __cache>
|
243 |
|
|
inline bool
|
244 |
|
|
operator==(const _Hashtable_iterator_base<_Value, __cache>& __x,
|
245 |
|
|
const _Hashtable_iterator_base<_Value, __cache>& __y)
|
246 |
|
|
{ return __x._M_cur_node == __y._M_cur_node; }
|
247 |
|
|
|
248 |
|
|
template<typename _Value, bool __cache>
|
249 |
|
|
inline bool
|
250 |
|
|
operator!=(const _Hashtable_iterator_base<_Value, __cache>& __x,
|
251 |
|
|
const _Hashtable_iterator_base<_Value, __cache>& __y)
|
252 |
|
|
{ return __x._M_cur_node != __y._M_cur_node; }
|
253 |
|
|
|
254 |
|
|
template<typename _Value, bool __constant_iterators, bool __cache>
|
255 |
|
|
struct _Hashtable_iterator
|
256 |
|
|
: public _Hashtable_iterator_base<_Value, __cache>
|
257 |
|
|
{
|
258 |
|
|
typedef _Value value_type;
|
259 |
|
|
typedef typename
|
260 |
|
|
__gnu_cxx::__conditional_type<__constant_iterators,
|
261 |
|
|
const _Value*, _Value*>::__type
|
262 |
|
|
pointer;
|
263 |
|
|
typedef typename
|
264 |
|
|
__gnu_cxx::__conditional_type<__constant_iterators,
|
265 |
|
|
const _Value&, _Value&>::__type
|
266 |
|
|
reference;
|
267 |
|
|
typedef std::ptrdiff_t difference_type;
|
268 |
|
|
typedef std::forward_iterator_tag iterator_category;
|
269 |
|
|
|
270 |
|
|
_Hashtable_iterator()
|
271 |
|
|
: _Hashtable_iterator_base<_Value, __cache>(0, 0) { }
|
272 |
|
|
|
273 |
|
|
_Hashtable_iterator(_Hash_node<_Value, __cache>* __p,
|
274 |
|
|
_Hash_node<_Value, __cache>** __b)
|
275 |
|
|
: _Hashtable_iterator_base<_Value, __cache>(__p, __b) { }
|
276 |
|
|
|
277 |
|
|
explicit
|
278 |
|
|
_Hashtable_iterator(_Hash_node<_Value, __cache>** __b)
|
279 |
|
|
: _Hashtable_iterator_base<_Value, __cache>(*__b, __b) { }
|
280 |
|
|
|
281 |
|
|
reference
|
282 |
|
|
operator*() const
|
283 |
|
|
{ return this->_M_cur_node->_M_v; }
|
284 |
|
|
|
285 |
|
|
pointer
|
286 |
|
|
operator->() const
|
287 |
|
|
{ return std::__addressof(this->_M_cur_node->_M_v); }
|
288 |
|
|
|
289 |
|
|
_Hashtable_iterator&
|
290 |
|
|
operator++()
|
291 |
|
|
{
|
292 |
|
|
this->_M_incr();
|
293 |
|
|
return *this;
|
294 |
|
|
}
|
295 |
|
|
|
296 |
|
|
_Hashtable_iterator
|
297 |
|
|
operator++(int)
|
298 |
|
|
{
|
299 |
|
|
_Hashtable_iterator __tmp(*this);
|
300 |
|
|
this->_M_incr();
|
301 |
|
|
return __tmp;
|
302 |
|
|
}
|
303 |
|
|
};
|
304 |
|
|
|
305 |
|
|
template<typename _Value, bool __constant_iterators, bool __cache>
|
306 |
|
|
struct _Hashtable_const_iterator
|
307 |
|
|
: public _Hashtable_iterator_base<_Value, __cache>
|
308 |
|
|
{
|
309 |
|
|
typedef _Value value_type;
|
310 |
|
|
typedef const _Value* pointer;
|
311 |
|
|
typedef const _Value& reference;
|
312 |
|
|
typedef std::ptrdiff_t difference_type;
|
313 |
|
|
typedef std::forward_iterator_tag iterator_category;
|
314 |
|
|
|
315 |
|
|
_Hashtable_const_iterator()
|
316 |
|
|
: _Hashtable_iterator_base<_Value, __cache>(0, 0) { }
|
317 |
|
|
|
318 |
|
|
_Hashtable_const_iterator(_Hash_node<_Value, __cache>* __p,
|
319 |
|
|
_Hash_node<_Value, __cache>** __b)
|
320 |
|
|
: _Hashtable_iterator_base<_Value, __cache>(__p, __b) { }
|
321 |
|
|
|
322 |
|
|
explicit
|
323 |
|
|
_Hashtable_const_iterator(_Hash_node<_Value, __cache>** __b)
|
324 |
|
|
: _Hashtable_iterator_base<_Value, __cache>(*__b, __b) { }
|
325 |
|
|
|
326 |
|
|
_Hashtable_const_iterator(const _Hashtable_iterator<_Value,
|
327 |
|
|
__constant_iterators, __cache>& __x)
|
328 |
|
|
: _Hashtable_iterator_base<_Value, __cache>(__x._M_cur_node,
|
329 |
|
|
__x._M_cur_bucket) { }
|
330 |
|
|
|
331 |
|
|
reference
|
332 |
|
|
operator*() const
|
333 |
|
|
{ return this->_M_cur_node->_M_v; }
|
334 |
|
|
|
335 |
|
|
pointer
|
336 |
|
|
operator->() const
|
337 |
|
|
{ return std::__addressof(this->_M_cur_node->_M_v); }
|
338 |
|
|
|
339 |
|
|
_Hashtable_const_iterator&
|
340 |
|
|
operator++()
|
341 |
|
|
{
|
342 |
|
|
this->_M_incr();
|
343 |
|
|
return *this;
|
344 |
|
|
}
|
345 |
|
|
|
346 |
|
|
_Hashtable_const_iterator
|
347 |
|
|
operator++(int)
|
348 |
|
|
{
|
349 |
|
|
_Hashtable_const_iterator __tmp(*this);
|
350 |
|
|
this->_M_incr();
|
351 |
|
|
return __tmp;
|
352 |
|
|
}
|
353 |
|
|
};
|
354 |
|
|
|
355 |
|
|
|
356 |
|
|
// Many of class template _Hashtable's template parameters are policy
|
357 |
|
|
// classes. These are defaults for the policies.
|
358 |
|
|
|
359 |
|
|
// Default range hashing function: use division to fold a large number
|
360 |
|
|
// into the range [0, N).
|
361 |
|
|
struct _Mod_range_hashing
|
362 |
|
|
{
|
363 |
|
|
typedef std::size_t first_argument_type;
|
364 |
|
|
typedef std::size_t second_argument_type;
|
365 |
|
|
typedef std::size_t result_type;
|
366 |
|
|
|
367 |
|
|
result_type
|
368 |
|
|
operator()(first_argument_type __num, second_argument_type __den) const
|
369 |
|
|
{ return __num % __den; }
|
370 |
|
|
};
|
371 |
|
|
|
372 |
|
|
// Default ranged hash function H. In principle it should be a
|
373 |
|
|
// function object composed from objects of type H1 and H2 such that
|
374 |
|
|
// h(k, N) = h2(h1(k), N), but that would mean making extra copies of
|
375 |
|
|
// h1 and h2. So instead we'll just use a tag to tell class template
|
376 |
|
|
// hashtable to do that composition.
|
377 |
|
|
struct _Default_ranged_hash { };
|
378 |
|
|
|
379 |
|
|
// Default value for rehash policy. Bucket size is (usually) the
|
380 |
|
|
// smallest prime that keeps the load factor small enough.
|
381 |
|
|
struct _Prime_rehash_policy
|
382 |
|
|
{
|
383 |
|
|
_Prime_rehash_policy(float __z = 1.0)
|
384 |
|
|
: _M_max_load_factor(__z), _M_growth_factor(2.f), _M_next_resize(0) { }
|
385 |
|
|
|
386 |
|
|
float
|
387 |
|
|
max_load_factor() const
|
388 |
|
|
{ return _M_max_load_factor; }
|
389 |
|
|
|
390 |
|
|
// Return a bucket size no smaller than n.
|
391 |
|
|
std::size_t
|
392 |
|
|
_M_next_bkt(std::size_t __n) const;
|
393 |
|
|
|
394 |
|
|
// Return a bucket count appropriate for n elements
|
395 |
|
|
std::size_t
|
396 |
|
|
_M_bkt_for_elements(std::size_t __n) const;
|
397 |
|
|
|
398 |
|
|
// __n_bkt is current bucket count, __n_elt is current element count,
|
399 |
|
|
// and __n_ins is number of elements to be inserted. Do we need to
|
400 |
|
|
// increase bucket count? If so, return make_pair(true, n), where n
|
401 |
|
|
// is the new bucket count. If not, return make_pair(false, 0).
|
402 |
|
|
std::pair<bool, std::size_t>
|
403 |
|
|
_M_need_rehash(std::size_t __n_bkt, std::size_t __n_elt,
|
404 |
|
|
std::size_t __n_ins) const;
|
405 |
|
|
|
406 |
|
|
enum { _S_n_primes = sizeof(unsigned long) != 8 ? 256 : 256 + 48 };
|
407 |
|
|
|
408 |
|
|
float _M_max_load_factor;
|
409 |
|
|
float _M_growth_factor;
|
410 |
|
|
mutable std::size_t _M_next_resize;
|
411 |
|
|
};
|
412 |
|
|
|
413 |
|
|
extern const unsigned long __prime_list[];
|
414 |
|
|
|
415 |
|
|
// XXX This is a hack. There's no good reason for any of
|
416 |
|
|
// _Prime_rehash_policy's member functions to be inline.
|
417 |
|
|
|
418 |
|
|
// Return a prime no smaller than n.
|
419 |
|
|
inline std::size_t
|
420 |
|
|
_Prime_rehash_policy::
|
421 |
|
|
_M_next_bkt(std::size_t __n) const
|
422 |
|
|
{
|
423 |
|
|
const unsigned long* __p = std::lower_bound(__prime_list, __prime_list
|
424 |
|
|
+ _S_n_primes, __n);
|
425 |
|
|
_M_next_resize =
|
426 |
|
|
static_cast<std::size_t>(__builtin_ceil(*__p * _M_max_load_factor));
|
427 |
|
|
return *__p;
|
428 |
|
|
}
|
429 |
|
|
|
430 |
|
|
// Return the smallest prime p such that alpha p >= n, where alpha
|
431 |
|
|
// is the load factor.
|
432 |
|
|
inline std::size_t
|
433 |
|
|
_Prime_rehash_policy::
|
434 |
|
|
_M_bkt_for_elements(std::size_t __n) const
|
435 |
|
|
{
|
436 |
|
|
const float __min_bkts = __n / _M_max_load_factor;
|
437 |
|
|
const unsigned long* __p = std::lower_bound(__prime_list, __prime_list
|
438 |
|
|
+ _S_n_primes, __min_bkts);
|
439 |
|
|
_M_next_resize =
|
440 |
|
|
static_cast<std::size_t>(__builtin_ceil(*__p * _M_max_load_factor));
|
441 |
|
|
return *__p;
|
442 |
|
|
}
|
443 |
|
|
|
444 |
|
|
// Finds the smallest prime p such that alpha p > __n_elt + __n_ins.
|
445 |
|
|
// If p > __n_bkt, return make_pair(true, p); otherwise return
|
446 |
|
|
// make_pair(false, 0). In principle this isn't very different from
|
447 |
|
|
// _M_bkt_for_elements.
|
448 |
|
|
|
449 |
|
|
// The only tricky part is that we're caching the element count at
|
450 |
|
|
// which we need to rehash, so we don't have to do a floating-point
|
451 |
|
|
// multiply for every insertion.
|
452 |
|
|
|
453 |
|
|
inline std::pair<bool, std::size_t>
|
454 |
|
|
_Prime_rehash_policy::
|
455 |
|
|
_M_need_rehash(std::size_t __n_bkt, std::size_t __n_elt,
|
456 |
|
|
std::size_t __n_ins) const
|
457 |
|
|
{
|
458 |
|
|
if (__n_elt + __n_ins > _M_next_resize)
|
459 |
|
|
{
|
460 |
|
|
float __min_bkts = ((float(__n_ins) + float(__n_elt))
|
461 |
|
|
/ _M_max_load_factor);
|
462 |
|
|
if (__min_bkts > __n_bkt)
|
463 |
|
|
{
|
464 |
|
|
__min_bkts = std::max(__min_bkts, _M_growth_factor * __n_bkt);
|
465 |
|
|
const unsigned long* __p =
|
466 |
|
|
std::lower_bound(__prime_list, __prime_list + _S_n_primes,
|
467 |
|
|
__min_bkts);
|
468 |
|
|
_M_next_resize = static_cast<std::size_t>
|
469 |
|
|
(__builtin_ceil(*__p * _M_max_load_factor));
|
470 |
|
|
return std::make_pair(true, *__p);
|
471 |
|
|
}
|
472 |
|
|
else
|
473 |
|
|
{
|
474 |
|
|
_M_next_resize = static_cast<std::size_t>
|
475 |
|
|
(__builtin_ceil(__n_bkt * _M_max_load_factor));
|
476 |
|
|
return std::make_pair(false, 0);
|
477 |
|
|
}
|
478 |
|
|
}
|
479 |
|
|
else
|
480 |
|
|
return std::make_pair(false, 0);
|
481 |
|
|
}
|
482 |
|
|
|
483 |
|
|
// Base classes for std::tr1::_Hashtable. We define these base
|
484 |
|
|
// classes because in some cases we want to do different things
|
485 |
|
|
// depending on the value of a policy class. In some cases the
|
486 |
|
|
// policy class affects which member functions and nested typedefs
|
487 |
|
|
// are defined; we handle that by specializing base class templates.
|
488 |
|
|
// Several of the base class templates need to access other members
|
489 |
|
|
// of class template _Hashtable, so we use the "curiously recurring
|
490 |
|
|
// template pattern" for them.
|
491 |
|
|
|
492 |
|
|
// class template _Map_base. If the hashtable has a value type of the
|
493 |
|
|
// form pair<T1, T2> and a key extraction policy that returns the
|
494 |
|
|
// first part of the pair, the hashtable gets a mapped_type typedef.
|
495 |
|
|
// If it satisfies those criteria and also has unique keys, then it
|
496 |
|
|
// also gets an operator[].
|
497 |
|
|
template<typename _Key, typename _Value, typename _Ex, bool __unique,
|
498 |
|
|
typename _Hashtable>
|
499 |
|
|
struct _Map_base { };
|
500 |
|
|
|
501 |
|
|
template<typename _Key, typename _Pair, typename _Hashtable>
|
502 |
|
|
struct _Map_base<_Key, _Pair, std::_Select1st<_Pair>, false, _Hashtable>
|
503 |
|
|
{
|
504 |
|
|
typedef typename _Pair::second_type mapped_type;
|
505 |
|
|
};
|
506 |
|
|
|
507 |
|
|
template<typename _Key, typename _Pair, typename _Hashtable>
|
508 |
|
|
struct _Map_base<_Key, _Pair, std::_Select1st<_Pair>, true, _Hashtable>
|
509 |
|
|
{
|
510 |
|
|
typedef typename _Pair::second_type mapped_type;
|
511 |
|
|
|
512 |
|
|
mapped_type&
|
513 |
|
|
operator[](const _Key& __k);
|
514 |
|
|
};
|
515 |
|
|
|
516 |
|
|
template<typename _Key, typename _Pair, typename _Hashtable>
|
517 |
|
|
typename _Map_base<_Key, _Pair, std::_Select1st<_Pair>,
|
518 |
|
|
true, _Hashtable>::mapped_type&
|
519 |
|
|
_Map_base<_Key, _Pair, std::_Select1st<_Pair>, true, _Hashtable>::
|
520 |
|
|
operator[](const _Key& __k)
|
521 |
|
|
{
|
522 |
|
|
_Hashtable* __h = static_cast<_Hashtable*>(this);
|
523 |
|
|
typename _Hashtable::_Hash_code_type __code = __h->_M_hash_code(__k);
|
524 |
|
|
std::size_t __n = __h->_M_bucket_index(__k, __code,
|
525 |
|
|
__h->_M_bucket_count);
|
526 |
|
|
|
527 |
|
|
typename _Hashtable::_Node* __p =
|
528 |
|
|
__h->_M_find_node(__h->_M_buckets[__n], __k, __code);
|
529 |
|
|
if (!__p)
|
530 |
|
|
return __h->_M_insert_bucket(std::make_pair(__k, mapped_type()),
|
531 |
|
|
__n, __code)->second;
|
532 |
|
|
return (__p->_M_v).second;
|
533 |
|
|
}
|
534 |
|
|
|
535 |
|
|
// class template _Rehash_base. Give hashtable the max_load_factor
|
536 |
|
|
// functions iff the rehash policy is _Prime_rehash_policy.
|
537 |
|
|
template<typename _RehashPolicy, typename _Hashtable>
|
538 |
|
|
struct _Rehash_base { };
|
539 |
|
|
|
540 |
|
|
template<typename _Hashtable>
|
541 |
|
|
struct _Rehash_base<_Prime_rehash_policy, _Hashtable>
|
542 |
|
|
{
|
543 |
|
|
float
|
544 |
|
|
max_load_factor() const
|
545 |
|
|
{
|
546 |
|
|
const _Hashtable* __this = static_cast<const _Hashtable*>(this);
|
547 |
|
|
return __this->__rehash_policy().max_load_factor();
|
548 |
|
|
}
|
549 |
|
|
|
550 |
|
|
void
|
551 |
|
|
max_load_factor(float __z)
|
552 |
|
|
{
|
553 |
|
|
_Hashtable* __this = static_cast<_Hashtable*>(this);
|
554 |
|
|
__this->__rehash_policy(_Prime_rehash_policy(__z));
|
555 |
|
|
}
|
556 |
|
|
};
|
557 |
|
|
|
558 |
|
|
// Class template _Hash_code_base. Encapsulates two policy issues that
|
559 |
|
|
// aren't quite orthogonal.
|
560 |
|
|
// (1) the difference between using a ranged hash function and using
|
561 |
|
|
// the combination of a hash function and a range-hashing function.
|
562 |
|
|
// In the former case we don't have such things as hash codes, so
|
563 |
|
|
// we have a dummy type as placeholder.
|
564 |
|
|
// (2) Whether or not we cache hash codes. Caching hash codes is
|
565 |
|
|
// meaningless if we have a ranged hash function.
|
566 |
|
|
// We also put the key extraction and equality comparison function
|
567 |
|
|
// objects here, for convenience.
|
568 |
|
|
|
569 |
|
|
// Primary template: unused except as a hook for specializations.
|
570 |
|
|
template<typename _Key, typename _Value,
|
571 |
|
|
typename _ExtractKey, typename _Equal,
|
572 |
|
|
typename _H1, typename _H2, typename _Hash,
|
573 |
|
|
bool __cache_hash_code>
|
574 |
|
|
struct _Hash_code_base;
|
575 |
|
|
|
576 |
|
|
// Specialization: ranged hash function, no caching hash codes. H1
|
577 |
|
|
// and H2 are provided but ignored. We define a dummy hash code type.
|
578 |
|
|
template<typename _Key, typename _Value,
|
579 |
|
|
typename _ExtractKey, typename _Equal,
|
580 |
|
|
typename _H1, typename _H2, typename _Hash>
|
581 |
|
|
struct _Hash_code_base<_Key, _Value, _ExtractKey, _Equal, _H1, _H2,
|
582 |
|
|
_Hash, false>
|
583 |
|
|
{
|
584 |
|
|
protected:
|
585 |
|
|
_Hash_code_base(const _ExtractKey& __ex, const _Equal& __eq,
|
586 |
|
|
const _H1&, const _H2&, const _Hash& __h)
|
587 |
|
|
: _M_extract(__ex), _M_eq(__eq), _M_ranged_hash(__h) { }
|
588 |
|
|
|
589 |
|
|
typedef void* _Hash_code_type;
|
590 |
|
|
|
591 |
|
|
_Hash_code_type
|
592 |
|
|
_M_hash_code(const _Key& __key) const
|
593 |
|
|
{ return 0; }
|
594 |
|
|
|
595 |
|
|
std::size_t
|
596 |
|
|
_M_bucket_index(const _Key& __k, _Hash_code_type,
|
597 |
|
|
std::size_t __n) const
|
598 |
|
|
{ return _M_ranged_hash(__k, __n); }
|
599 |
|
|
|
600 |
|
|
std::size_t
|
601 |
|
|
_M_bucket_index(const _Hash_node<_Value, false>* __p,
|
602 |
|
|
std::size_t __n) const
|
603 |
|
|
{ return _M_ranged_hash(_M_extract(__p->_M_v), __n); }
|
604 |
|
|
|
605 |
|
|
bool
|
606 |
|
|
_M_compare(const _Key& __k, _Hash_code_type,
|
607 |
|
|
_Hash_node<_Value, false>* __n) const
|
608 |
|
|
{ return _M_eq(__k, _M_extract(__n->_M_v)); }
|
609 |
|
|
|
610 |
|
|
void
|
611 |
|
|
_M_store_code(_Hash_node<_Value, false>*, _Hash_code_type) const
|
612 |
|
|
{ }
|
613 |
|
|
|
614 |
|
|
void
|
615 |
|
|
_M_copy_code(_Hash_node<_Value, false>*,
|
616 |
|
|
const _Hash_node<_Value, false>*) const
|
617 |
|
|
{ }
|
618 |
|
|
|
619 |
|
|
void
|
620 |
|
|
_M_swap(_Hash_code_base& __x)
|
621 |
|
|
{
|
622 |
|
|
std::swap(_M_extract, __x._M_extract);
|
623 |
|
|
std::swap(_M_eq, __x._M_eq);
|
624 |
|
|
std::swap(_M_ranged_hash, __x._M_ranged_hash);
|
625 |
|
|
}
|
626 |
|
|
|
627 |
|
|
protected:
|
628 |
|
|
_ExtractKey _M_extract;
|
629 |
|
|
_Equal _M_eq;
|
630 |
|
|
_Hash _M_ranged_hash;
|
631 |
|
|
};
|
632 |
|
|
|
633 |
|
|
|
634 |
|
|
// No specialization for ranged hash function while caching hash codes.
|
635 |
|
|
// That combination is meaningless, and trying to do it is an error.
|
636 |
|
|
|
637 |
|
|
|
638 |
|
|
// Specialization: ranged hash function, cache hash codes. This
|
639 |
|
|
// combination is meaningless, so we provide only a declaration
|
640 |
|
|
// and no definition.
|
641 |
|
|
template<typename _Key, typename _Value,
|
642 |
|
|
typename _ExtractKey, typename _Equal,
|
643 |
|
|
typename _H1, typename _H2, typename _Hash>
|
644 |
|
|
struct _Hash_code_base<_Key, _Value, _ExtractKey, _Equal, _H1, _H2,
|
645 |
|
|
_Hash, true>;
|
646 |
|
|
|
647 |
|
|
// Specialization: hash function and range-hashing function, no
|
648 |
|
|
// caching of hash codes. H is provided but ignored. Provides
|
649 |
|
|
// typedef and accessor required by TR1.
|
650 |
|
|
template<typename _Key, typename _Value,
|
651 |
|
|
typename _ExtractKey, typename _Equal,
|
652 |
|
|
typename _H1, typename _H2>
|
653 |
|
|
struct _Hash_code_base<_Key, _Value, _ExtractKey, _Equal, _H1, _H2,
|
654 |
|
|
_Default_ranged_hash, false>
|
655 |
|
|
{
|
656 |
|
|
typedef _H1 hasher;
|
657 |
|
|
|
658 |
|
|
hasher
|
659 |
|
|
hash_function() const
|
660 |
|
|
{ return _M_h1; }
|
661 |
|
|
|
662 |
|
|
protected:
|
663 |
|
|
_Hash_code_base(const _ExtractKey& __ex, const _Equal& __eq,
|
664 |
|
|
const _H1& __h1, const _H2& __h2,
|
665 |
|
|
const _Default_ranged_hash&)
|
666 |
|
|
: _M_extract(__ex), _M_eq(__eq), _M_h1(__h1), _M_h2(__h2) { }
|
667 |
|
|
|
668 |
|
|
typedef std::size_t _Hash_code_type;
|
669 |
|
|
|
670 |
|
|
_Hash_code_type
|
671 |
|
|
_M_hash_code(const _Key& __k) const
|
672 |
|
|
{ return _M_h1(__k); }
|
673 |
|
|
|
674 |
|
|
std::size_t
|
675 |
|
|
_M_bucket_index(const _Key&, _Hash_code_type __c,
|
676 |
|
|
std::size_t __n) const
|
677 |
|
|
{ return _M_h2(__c, __n); }
|
678 |
|
|
|
679 |
|
|
std::size_t
|
680 |
|
|
_M_bucket_index(const _Hash_node<_Value, false>* __p,
|
681 |
|
|
std::size_t __n) const
|
682 |
|
|
{ return _M_h2(_M_h1(_M_extract(__p->_M_v)), __n); }
|
683 |
|
|
|
684 |
|
|
bool
|
685 |
|
|
_M_compare(const _Key& __k, _Hash_code_type,
|
686 |
|
|
_Hash_node<_Value, false>* __n) const
|
687 |
|
|
{ return _M_eq(__k, _M_extract(__n->_M_v)); }
|
688 |
|
|
|
689 |
|
|
void
|
690 |
|
|
_M_store_code(_Hash_node<_Value, false>*, _Hash_code_type) const
|
691 |
|
|
{ }
|
692 |
|
|
|
693 |
|
|
void
|
694 |
|
|
_M_copy_code(_Hash_node<_Value, false>*,
|
695 |
|
|
const _Hash_node<_Value, false>*) const
|
696 |
|
|
{ }
|
697 |
|
|
|
698 |
|
|
void
|
699 |
|
|
_M_swap(_Hash_code_base& __x)
|
700 |
|
|
{
|
701 |
|
|
std::swap(_M_extract, __x._M_extract);
|
702 |
|
|
std::swap(_M_eq, __x._M_eq);
|
703 |
|
|
std::swap(_M_h1, __x._M_h1);
|
704 |
|
|
std::swap(_M_h2, __x._M_h2);
|
705 |
|
|
}
|
706 |
|
|
|
707 |
|
|
protected:
|
708 |
|
|
_ExtractKey _M_extract;
|
709 |
|
|
_Equal _M_eq;
|
710 |
|
|
_H1 _M_h1;
|
711 |
|
|
_H2 _M_h2;
|
712 |
|
|
};
|
713 |
|
|
|
714 |
|
|
// Specialization: hash function and range-hashing function,
|
715 |
|
|
// caching hash codes. H is provided but ignored. Provides
|
716 |
|
|
// typedef and accessor required by TR1.
|
717 |
|
|
template<typename _Key, typename _Value,
|
718 |
|
|
typename _ExtractKey, typename _Equal,
|
719 |
|
|
typename _H1, typename _H2>
|
720 |
|
|
struct _Hash_code_base<_Key, _Value, _ExtractKey, _Equal, _H1, _H2,
|
721 |
|
|
_Default_ranged_hash, true>
|
722 |
|
|
{
|
723 |
|
|
typedef _H1 hasher;
|
724 |
|
|
|
725 |
|
|
hasher
|
726 |
|
|
hash_function() const
|
727 |
|
|
{ return _M_h1; }
|
728 |
|
|
|
729 |
|
|
protected:
|
730 |
|
|
_Hash_code_base(const _ExtractKey& __ex, const _Equal& __eq,
|
731 |
|
|
const _H1& __h1, const _H2& __h2,
|
732 |
|
|
const _Default_ranged_hash&)
|
733 |
|
|
: _M_extract(__ex), _M_eq(__eq), _M_h1(__h1), _M_h2(__h2) { }
|
734 |
|
|
|
735 |
|
|
typedef std::size_t _Hash_code_type;
|
736 |
|
|
|
737 |
|
|
_Hash_code_type
|
738 |
|
|
_M_hash_code(const _Key& __k) const
|
739 |
|
|
{ return _M_h1(__k); }
|
740 |
|
|
|
741 |
|
|
std::size_t
|
742 |
|
|
_M_bucket_index(const _Key&, _Hash_code_type __c,
|
743 |
|
|
std::size_t __n) const
|
744 |
|
|
{ return _M_h2(__c, __n); }
|
745 |
|
|
|
746 |
|
|
std::size_t
|
747 |
|
|
_M_bucket_index(const _Hash_node<_Value, true>* __p,
|
748 |
|
|
std::size_t __n) const
|
749 |
|
|
{ return _M_h2(__p->_M_hash_code, __n); }
|
750 |
|
|
|
751 |
|
|
bool
|
752 |
|
|
_M_compare(const _Key& __k, _Hash_code_type __c,
|
753 |
|
|
_Hash_node<_Value, true>* __n) const
|
754 |
|
|
{ return __c == __n->_M_hash_code && _M_eq(__k, _M_extract(__n->_M_v)); }
|
755 |
|
|
|
756 |
|
|
void
|
757 |
|
|
_M_store_code(_Hash_node<_Value, true>* __n, _Hash_code_type __c) const
|
758 |
|
|
{ __n->_M_hash_code = __c; }
|
759 |
|
|
|
760 |
|
|
void
|
761 |
|
|
_M_copy_code(_Hash_node<_Value, true>* __to,
|
762 |
|
|
const _Hash_node<_Value, true>* __from) const
|
763 |
|
|
{ __to->_M_hash_code = __from->_M_hash_code; }
|
764 |
|
|
|
765 |
|
|
void
|
766 |
|
|
_M_swap(_Hash_code_base& __x)
|
767 |
|
|
{
|
768 |
|
|
std::swap(_M_extract, __x._M_extract);
|
769 |
|
|
std::swap(_M_eq, __x._M_eq);
|
770 |
|
|
std::swap(_M_h1, __x._M_h1);
|
771 |
|
|
std::swap(_M_h2, __x._M_h2);
|
772 |
|
|
}
|
773 |
|
|
|
774 |
|
|
protected:
|
775 |
|
|
_ExtractKey _M_extract;
|
776 |
|
|
_Equal _M_eq;
|
777 |
|
|
_H1 _M_h1;
|
778 |
|
|
_H2 _M_h2;
|
779 |
|
|
};
|
780 |
|
|
_GLIBCXX_END_NAMESPACE_VERSION
|
781 |
|
|
} // namespace __detail
|
782 |
|
|
}
|
783 |
|
|
}
|