OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [tags/] [gnu-src/] [gcc-4.5.1/] [gcc-4.5.1-or32-1.0rc4/] [libstdc++-v3/] [include/] [bits/] [hashtable_policy.h] - Blame information for rev 519

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 424 jeremybenn
// Internal policy header for unordered_set and unordered_map -*- C++ -*-
2
 
3
// Copyright (C) 2010 Free Software Foundation, Inc.
4
//
5
// This file is part of the GNU ISO C++ Library.  This library is free
6
// software; you can redistribute it and/or modify it under the
7
// terms of the GNU General Public License as published by the
8
// Free Software Foundation; either version 3, or (at your option)
9
// any later version.
10
 
11
// This library is distributed in the hope that it will be useful,
12
// but WITHOUT ANY WARRANTY; without even the implied warranty of
13
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14
// GNU General Public License for more details.
15
 
16
// Under Section 7 of GPL version 3, you are granted additional
17
// permissions described in the GCC Runtime Library Exception, version
18
// 3.1, as published by the Free Software Foundation.
19
 
20
// You should have received a copy of the GNU General Public License and
21
// a copy of the GCC Runtime Library Exception along with this program;
22
// see the files COPYING3 and COPYING.RUNTIME respectively.  If not, see
23
// <http://www.gnu.org/licenses/>.
24
 
25
/** @file bits/hashtable_policy.h
26
 *  This is an internal header file, included by other library headers.
27
 *  You should not attempt to use it directly.
28
 */
29
 
30
#ifndef _HASHTABLE_POLICY_H
31
#define _HASHTABLE_POLICY_H 1
32
 
33
namespace std
34
{
35
namespace __detail
36
{
37
  // Helper function: return distance(first, last) for forward
38
  // iterators, or 0 for input iterators.
39
  template<class _Iterator>
40
    inline typename std::iterator_traits<_Iterator>::difference_type
41
    __distance_fw(_Iterator __first, _Iterator __last,
42
                  std::input_iterator_tag)
43
    { return 0; }
44
 
45
  template<class _Iterator>
46
    inline typename std::iterator_traits<_Iterator>::difference_type
47
    __distance_fw(_Iterator __first, _Iterator __last,
48
                  std::forward_iterator_tag)
49
    { return std::distance(__first, __last); }
50
 
51
  template<class _Iterator>
52
    inline typename std::iterator_traits<_Iterator>::difference_type
53
    __distance_fw(_Iterator __first, _Iterator __last)
54
    {
55
      typedef typename std::iterator_traits<_Iterator>::iterator_category _Tag;
56
      return __distance_fw(__first, __last, _Tag());
57
    }
58
 
59
  // Auxiliary types used for all instantiations of _Hashtable: nodes
60
  // and iterators.
61
 
62
  // Nodes, used to wrap elements stored in the hash table.  A policy
63
  // template parameter of class template _Hashtable controls whether
64
  // nodes also store a hash code. In some cases (e.g. strings) this
65
  // may be a performance win.
66
  template<typename _Value, bool __cache_hash_code>
67
    struct _Hash_node;
68
 
69
  template<typename _Value>
70
    struct _Hash_node<_Value, true>
71
    {
72
      _Value       _M_v;
73
      std::size_t  _M_hash_code;
74
      _Hash_node*  _M_next;
75
 
76
      template<typename... _Args>
77
        _Hash_node(_Args&&... __args)
78
        : _M_v(std::forward<_Args>(__args)...),
79
          _M_hash_code(), _M_next() { }
80
    };
81
 
82
  template<typename _Value>
83
    struct _Hash_node<_Value, false>
84
    {
85
      _Value       _M_v;
86
      _Hash_node*  _M_next;
87
 
88
      template<typename... _Args>
89
        _Hash_node(_Args&&... __args)
90
        : _M_v(std::forward<_Args>(__args)...),
91
          _M_next() { }
92
    };
93
 
94
  // Local iterators, used to iterate within a bucket but not between
95
  // buckets.
96
  template<typename _Value, bool __cache>
97
    struct _Node_iterator_base
98
    {
99
      _Node_iterator_base(_Hash_node<_Value, __cache>* __p)
100
      : _M_cur(__p) { }
101
 
102
      void
103
      _M_incr()
104
      { _M_cur = _M_cur->_M_next; }
105
 
106
      _Hash_node<_Value, __cache>*  _M_cur;
107
    };
108
 
109
  template<typename _Value, bool __cache>
110
    inline bool
111
    operator==(const _Node_iterator_base<_Value, __cache>& __x,
112
               const _Node_iterator_base<_Value, __cache>& __y)
113
    { return __x._M_cur == __y._M_cur; }
114
 
115
  template<typename _Value, bool __cache>
116
    inline bool
117
    operator!=(const _Node_iterator_base<_Value, __cache>& __x,
118
               const _Node_iterator_base<_Value, __cache>& __y)
119
    { return __x._M_cur != __y._M_cur; }
120
 
121
  template<typename _Value, bool __constant_iterators, bool __cache>
122
    struct _Node_iterator
123
    : public _Node_iterator_base<_Value, __cache>
124
    {
125
      typedef _Value                                   value_type;
126
      typedef typename std::conditional<__constant_iterators,
127
                                        const _Value*, _Value*>::type
128
                                                       pointer;
129
      typedef typename std::conditional<__constant_iterators,
130
                                        const _Value&, _Value&>::type
131
                                                       reference;
132
      typedef std::ptrdiff_t                           difference_type;
133
      typedef std::forward_iterator_tag                iterator_category;
134
 
135
      _Node_iterator()
136
      : _Node_iterator_base<_Value, __cache>(0) { }
137
 
138
      explicit
139
      _Node_iterator(_Hash_node<_Value, __cache>* __p)
140
      : _Node_iterator_base<_Value, __cache>(__p) { }
141
 
142
      reference
143
      operator*() const
144
      { return this->_M_cur->_M_v; }
145
 
146
      pointer
147
      operator->() const
148
      { return &this->_M_cur->_M_v; }
149
 
150
      _Node_iterator&
151
      operator++()
152
      {
153
        this->_M_incr();
154
        return *this;
155
      }
156
 
157
      _Node_iterator
158
      operator++(int)
159
      {
160
        _Node_iterator __tmp(*this);
161
        this->_M_incr();
162
        return __tmp;
163
      }
164
    };
165
 
166
  template<typename _Value, bool __constant_iterators, bool __cache>
167
    struct _Node_const_iterator
168
    : public _Node_iterator_base<_Value, __cache>
169
    {
170
      typedef _Value                                   value_type;
171
      typedef const _Value*                            pointer;
172
      typedef const _Value&                            reference;
173
      typedef std::ptrdiff_t                           difference_type;
174
      typedef std::forward_iterator_tag                iterator_category;
175
 
176
      _Node_const_iterator()
177
      : _Node_iterator_base<_Value, __cache>(0) { }
178
 
179
      explicit
180
      _Node_const_iterator(_Hash_node<_Value, __cache>* __p)
181
      : _Node_iterator_base<_Value, __cache>(__p) { }
182
 
183
      _Node_const_iterator(const _Node_iterator<_Value, __constant_iterators,
184
                           __cache>& __x)
185
      : _Node_iterator_base<_Value, __cache>(__x._M_cur) { }
186
 
187
      reference
188
      operator*() const
189
      { return this->_M_cur->_M_v; }
190
 
191
      pointer
192
      operator->() const
193
      { return &this->_M_cur->_M_v; }
194
 
195
      _Node_const_iterator&
196
      operator++()
197
      {
198
        this->_M_incr();
199
        return *this;
200
      }
201
 
202
      _Node_const_iterator
203
      operator++(int)
204
      {
205
        _Node_const_iterator __tmp(*this);
206
        this->_M_incr();
207
        return __tmp;
208
      }
209
    };
210
 
211
  template<typename _Value, bool __cache>
212
    struct _Hashtable_iterator_base
213
    {
214
      _Hashtable_iterator_base(_Hash_node<_Value, __cache>* __node,
215
                               _Hash_node<_Value, __cache>** __bucket)
216
      : _M_cur_node(__node), _M_cur_bucket(__bucket) { }
217
 
218
      void
219
      _M_incr()
220
      {
221
        _M_cur_node = _M_cur_node->_M_next;
222
        if (!_M_cur_node)
223
          _M_incr_bucket();
224
      }
225
 
226
      void
227
      _M_incr_bucket();
228
 
229
      _Hash_node<_Value, __cache>*   _M_cur_node;
230
      _Hash_node<_Value, __cache>**  _M_cur_bucket;
231
    };
232
 
233
  // Global iterators, used for arbitrary iteration within a hash
234
  // table.  Larger and more expensive than local iterators.
235
  template<typename _Value, bool __cache>
236
    void
237
    _Hashtable_iterator_base<_Value, __cache>::
238
    _M_incr_bucket()
239
    {
240
      ++_M_cur_bucket;
241
 
242
      // This loop requires the bucket array to have a non-null sentinel.
243
      while (!*_M_cur_bucket)
244
        ++_M_cur_bucket;
245
      _M_cur_node = *_M_cur_bucket;
246
    }
247
 
248
  template<typename _Value, bool __cache>
249
    inline bool
250
    operator==(const _Hashtable_iterator_base<_Value, __cache>& __x,
251
               const _Hashtable_iterator_base<_Value, __cache>& __y)
252
    { return __x._M_cur_node == __y._M_cur_node; }
253
 
254
  template<typename _Value, bool __cache>
255
    inline bool
256
    operator!=(const _Hashtable_iterator_base<_Value, __cache>& __x,
257
               const _Hashtable_iterator_base<_Value, __cache>& __y)
258
    { return __x._M_cur_node != __y._M_cur_node; }
259
 
260
  template<typename _Value, bool __constant_iterators, bool __cache>
261
    struct _Hashtable_iterator
262
    : public _Hashtable_iterator_base<_Value, __cache>
263
    {
264
      typedef _Value                                   value_type;
265
      typedef typename std::conditional<__constant_iterators,
266
                                        const _Value*, _Value*>::type
267
                                                       pointer;
268
      typedef typename std::conditional<__constant_iterators,
269
                                        const _Value&, _Value&>::type
270
                                                       reference;
271
      typedef std::ptrdiff_t                           difference_type;
272
      typedef std::forward_iterator_tag                iterator_category;
273
 
274
      _Hashtable_iterator()
275
      : _Hashtable_iterator_base<_Value, __cache>(0, 0) { }
276
 
277
      _Hashtable_iterator(_Hash_node<_Value, __cache>* __p,
278
                          _Hash_node<_Value, __cache>** __b)
279
      : _Hashtable_iterator_base<_Value, __cache>(__p, __b) { }
280
 
281
      explicit
282
      _Hashtable_iterator(_Hash_node<_Value, __cache>** __b)
283
      : _Hashtable_iterator_base<_Value, __cache>(*__b, __b) { }
284
 
285
      reference
286
      operator*() const
287
      { return this->_M_cur_node->_M_v; }
288
 
289
      pointer
290
      operator->() const
291
      { return &this->_M_cur_node->_M_v; }
292
 
293
      _Hashtable_iterator&
294
      operator++()
295
      {
296
        this->_M_incr();
297
        return *this;
298
      }
299
 
300
      _Hashtable_iterator
301
      operator++(int)
302
      {
303
        _Hashtable_iterator __tmp(*this);
304
        this->_M_incr();
305
        return __tmp;
306
      }
307
    };
308
 
309
  template<typename _Value, bool __constant_iterators, bool __cache>
310
    struct _Hashtable_const_iterator
311
    : public _Hashtable_iterator_base<_Value, __cache>
312
    {
313
      typedef _Value                                   value_type;
314
      typedef const _Value*                            pointer;
315
      typedef const _Value&                            reference;
316
      typedef std::ptrdiff_t                           difference_type;
317
      typedef std::forward_iterator_tag                iterator_category;
318
 
319
      _Hashtable_const_iterator()
320
      : _Hashtable_iterator_base<_Value, __cache>(0, 0) { }
321
 
322
      _Hashtable_const_iterator(_Hash_node<_Value, __cache>* __p,
323
                                _Hash_node<_Value, __cache>** __b)
324
      : _Hashtable_iterator_base<_Value, __cache>(__p, __b) { }
325
 
326
      explicit
327
      _Hashtable_const_iterator(_Hash_node<_Value, __cache>** __b)
328
      : _Hashtable_iterator_base<_Value, __cache>(*__b, __b) { }
329
 
330
      _Hashtable_const_iterator(const _Hashtable_iterator<_Value,
331
                                __constant_iterators, __cache>& __x)
332
      : _Hashtable_iterator_base<_Value, __cache>(__x._M_cur_node,
333
                                                  __x._M_cur_bucket) { }
334
 
335
      reference
336
      operator*() const
337
      { return this->_M_cur_node->_M_v; }
338
 
339
      pointer
340
      operator->() const
341
      { return &this->_M_cur_node->_M_v; }
342
 
343
      _Hashtable_const_iterator&
344
      operator++()
345
      {
346
        this->_M_incr();
347
        return *this;
348
      }
349
 
350
      _Hashtable_const_iterator
351
      operator++(int)
352
      {
353
        _Hashtable_const_iterator __tmp(*this);
354
        this->_M_incr();
355
        return __tmp;
356
      }
357
    };
358
 
359
 
360
  // Many of class template _Hashtable's template parameters are policy
361
  // classes.  These are defaults for the policies.
362
 
363
  // Default range hashing function: use division to fold a large number
364
  // into the range [0, N).
365
  struct _Mod_range_hashing
366
  {
367
    typedef std::size_t first_argument_type;
368
    typedef std::size_t second_argument_type;
369
    typedef std::size_t result_type;
370
 
371
    result_type
372
    operator()(first_argument_type __num, second_argument_type __den) const
373
    { return __num % __den; }
374
  };
375
 
376
  // Default ranged hash function H.  In principle it should be a
377
  // function object composed from objects of type H1 and H2 such that
378
  // h(k, N) = h2(h1(k), N), but that would mean making extra copies of
379
  // h1 and h2.  So instead we'll just use a tag to tell class template
380
  // hashtable to do that composition.
381
  struct _Default_ranged_hash { };
382
 
383
  // Default value for rehash policy.  Bucket size is (usually) the
384
  // smallest prime that keeps the load factor small enough.
385
  struct _Prime_rehash_policy
386
  {
387
    _Prime_rehash_policy(float __z = 1.0)
388
    : _M_max_load_factor(__z), _M_growth_factor(2.f), _M_next_resize(0) { }
389
 
390
    float
391
    max_load_factor() const
392
    { return _M_max_load_factor; }
393
 
394
    // Return a bucket size no smaller than n.
395
    std::size_t
396
    _M_next_bkt(std::size_t __n) const;
397
 
398
    // Return a bucket count appropriate for n elements
399
    std::size_t
400
    _M_bkt_for_elements(std::size_t __n) const;
401
 
402
    // __n_bkt is current bucket count, __n_elt is current element count,
403
    // and __n_ins is number of elements to be inserted.  Do we need to
404
    // increase bucket count?  If so, return make_pair(true, n), where n
405
    // is the new bucket count.  If not, return make_pair(false, 0).
406
    std::pair<bool, std::size_t>
407
    _M_need_rehash(std::size_t __n_bkt, std::size_t __n_elt,
408
                   std::size_t __n_ins) const;
409
 
410
    enum { _S_n_primes = sizeof(unsigned long) != 8 ? 256 : 256 + 48 };
411
 
412
    float                _M_max_load_factor;
413
    float                _M_growth_factor;
414
    mutable std::size_t  _M_next_resize;
415
  };
416
 
417
  extern const unsigned long __prime_list[];
418
 
419
  // XXX This is a hack.  There's no good reason for any of
420
  // _Prime_rehash_policy's member functions to be inline.  
421
 
422
  // Return a prime no smaller than n.
423
  inline std::size_t
424
  _Prime_rehash_policy::
425
  _M_next_bkt(std::size_t __n) const
426
  {
427
    const unsigned long* __p = std::lower_bound(__prime_list, __prime_list
428
                                                + _S_n_primes, __n);
429
    _M_next_resize =
430
      static_cast<std::size_t>(__builtin_ceil(*__p * _M_max_load_factor));
431
    return *__p;
432
  }
433
 
434
  // Return the smallest prime p such that alpha p >= n, where alpha
435
  // is the load factor.
436
  inline std::size_t
437
  _Prime_rehash_policy::
438
  _M_bkt_for_elements(std::size_t __n) const
439
  {
440
    const float __min_bkts = __n / _M_max_load_factor;
441
    const unsigned long* __p = std::lower_bound(__prime_list, __prime_list
442
                                                + _S_n_primes, __min_bkts);
443
    _M_next_resize =
444
      static_cast<std::size_t>(__builtin_ceil(*__p * _M_max_load_factor));
445
    return *__p;
446
  }
447
 
448
  // Finds the smallest prime p such that alpha p > __n_elt + __n_ins.
449
  // If p > __n_bkt, return make_pair(true, p); otherwise return
450
  // make_pair(false, 0).  In principle this isn't very different from 
451
  // _M_bkt_for_elements.
452
 
453
  // The only tricky part is that we're caching the element count at
454
  // which we need to rehash, so we don't have to do a floating-point
455
  // multiply for every insertion.
456
 
457
  inline std::pair<bool, std::size_t>
458
  _Prime_rehash_policy::
459
  _M_need_rehash(std::size_t __n_bkt, std::size_t __n_elt,
460
                 std::size_t __n_ins) const
461
  {
462
    if (__n_elt + __n_ins > _M_next_resize)
463
      {
464
        float __min_bkts = ((float(__n_ins) + float(__n_elt))
465
                            / _M_max_load_factor);
466
        if (__min_bkts > __n_bkt)
467
          {
468
            __min_bkts = std::max(__min_bkts, _M_growth_factor * __n_bkt);
469
            const unsigned long* __p =
470
              std::lower_bound(__prime_list, __prime_list + _S_n_primes,
471
                               __min_bkts);
472
            _M_next_resize = static_cast<std::size_t>
473
              (__builtin_ceil(*__p * _M_max_load_factor));
474
            return std::make_pair(true, *__p);
475
          }
476
        else
477
          {
478
            _M_next_resize = static_cast<std::size_t>
479
              (__builtin_ceil(__n_bkt * _M_max_load_factor));
480
            return std::make_pair(false, 0);
481
          }
482
      }
483
    else
484
      return std::make_pair(false, 0);
485
  }
486
 
487
  // Base classes for std::_Hashtable.  We define these base classes
488
  // because in some cases we want to do different things depending
489
  // on the value of a policy class.  In some cases the policy class
490
  // affects which member functions and nested typedefs are defined;
491
  // we handle that by specializing base class templates.  Several of
492
  // the base class templates need to access other members of class
493
  // template _Hashtable, so we use the "curiously recurring template
494
  // pattern" for them.
495
 
496
  // class template _Map_base.  If the hashtable has a value type of
497
  // the form pair<T1, T2> and a key extraction policy that returns the
498
  // first part of the pair, the hashtable gets a mapped_type typedef.
499
  // If it satisfies those criteria and also has unique keys, then it
500
  // also gets an operator[].  
501
  template<typename _Key, typename _Value, typename _Ex, bool __unique,
502
           typename _Hashtable>
503
    struct _Map_base { };
504
 
505
  template<typename _Key, typename _Pair, typename _Hashtable>
506
    struct _Map_base<_Key, _Pair, std::_Select1st<_Pair>, false, _Hashtable>
507
    {
508
      typedef typename _Pair::second_type mapped_type;
509
    };
510
 
511
  template<typename _Key, typename _Pair, typename _Hashtable>
512
    struct _Map_base<_Key, _Pair, std::_Select1st<_Pair>, true, _Hashtable>
513
    {
514
      typedef typename _Pair::second_type mapped_type;
515
 
516
      mapped_type&
517
      operator[](const _Key& __k);
518
 
519
      // _GLIBCXX_RESOLVE_LIB_DEFECTS
520
      // DR 761. unordered_map needs an at() member function.
521
      mapped_type&
522
      at(const _Key& __k);
523
 
524
      const mapped_type&
525
      at(const _Key& __k) const;
526
    };
527
 
528
  template<typename _Key, typename _Pair, typename _Hashtable>
529
    typename _Map_base<_Key, _Pair, std::_Select1st<_Pair>,
530
                       true, _Hashtable>::mapped_type&
531
    _Map_base<_Key, _Pair, std::_Select1st<_Pair>, true, _Hashtable>::
532
    operator[](const _Key& __k)
533
    {
534
      _Hashtable* __h = static_cast<_Hashtable*>(this);
535
      typename _Hashtable::_Hash_code_type __code = __h->_M_hash_code(__k);
536
      std::size_t __n = __h->_M_bucket_index(__k, __code,
537
                                             __h->_M_bucket_count);
538
 
539
      typename _Hashtable::_Node* __p =
540
        __h->_M_find_node(__h->_M_buckets[__n], __k, __code);
541
      if (!__p)
542
        return __h->_M_insert_bucket(std::make_pair(__k, mapped_type()),
543
                                     __n, __code)->second;
544
      return (__p->_M_v).second;
545
    }
546
 
547
  template<typename _Key, typename _Pair, typename _Hashtable>
548
    typename _Map_base<_Key, _Pair, std::_Select1st<_Pair>,
549
                       true, _Hashtable>::mapped_type&
550
    _Map_base<_Key, _Pair, std::_Select1st<_Pair>, true, _Hashtable>::
551
    at(const _Key& __k)
552
    {
553
      _Hashtable* __h = static_cast<_Hashtable*>(this);
554
      typename _Hashtable::_Hash_code_type __code = __h->_M_hash_code(__k);
555
      std::size_t __n = __h->_M_bucket_index(__k, __code,
556
                                             __h->_M_bucket_count);
557
 
558
      typename _Hashtable::_Node* __p =
559
        __h->_M_find_node(__h->_M_buckets[__n], __k, __code);
560
      if (!__p)
561
        __throw_out_of_range(__N("_Map_base::at"));
562
      return (__p->_M_v).second;
563
    }
564
 
565
  template<typename _Key, typename _Pair, typename _Hashtable>
566
    const typename _Map_base<_Key, _Pair, std::_Select1st<_Pair>,
567
                             true, _Hashtable>::mapped_type&
568
    _Map_base<_Key, _Pair, std::_Select1st<_Pair>, true, _Hashtable>::
569
    at(const _Key& __k) const
570
    {
571
      const _Hashtable* __h = static_cast<const _Hashtable*>(this);
572
      typename _Hashtable::_Hash_code_type __code = __h->_M_hash_code(__k);
573
      std::size_t __n = __h->_M_bucket_index(__k, __code,
574
                                             __h->_M_bucket_count);
575
 
576
      typename _Hashtable::_Node* __p =
577
        __h->_M_find_node(__h->_M_buckets[__n], __k, __code);
578
      if (!__p)
579
        __throw_out_of_range(__N("_Map_base::at"));
580
      return (__p->_M_v).second;
581
    }
582
 
583
  // class template _Rehash_base.  Give hashtable the max_load_factor
584
  // functions and reserve iff the rehash policy is _Prime_rehash_policy.
585
  template<typename _RehashPolicy, typename _Hashtable>
586
    struct _Rehash_base { };
587
 
588
  template<typename _Hashtable>
589
    struct _Rehash_base<_Prime_rehash_policy, _Hashtable>
590
    {
591
      float
592
      max_load_factor() const
593
      {
594
        const _Hashtable* __this = static_cast<const _Hashtable*>(this);
595
        return __this->__rehash_policy().max_load_factor();
596
      }
597
 
598
      void
599
      max_load_factor(float __z)
600
      {
601
        _Hashtable* __this = static_cast<_Hashtable*>(this);
602
        __this->__rehash_policy(_Prime_rehash_policy(__z));
603
      }
604
 
605
      void
606
      reserve(std::size_t __n)
607
      {
608
        _Hashtable* __this = static_cast<_Hashtable*>(this);
609
        __this->rehash(__builtin_ceil(__n / max_load_factor()));
610
      }
611
    };
612
 
613
  // Class template _Hash_code_base.  Encapsulates two policy issues that
614
  // aren't quite orthogonal.
615
  //   (1) the difference between using a ranged hash function and using
616
  //       the combination of a hash function and a range-hashing function.
617
  //       In the former case we don't have such things as hash codes, so
618
  //       we have a dummy type as placeholder.
619
  //   (2) Whether or not we cache hash codes.  Caching hash codes is
620
  //       meaningless if we have a ranged hash function.
621
  // We also put the key extraction and equality comparison function 
622
  // objects here, for convenience.
623
 
624
  // Primary template: unused except as a hook for specializations.  
625
  template<typename _Key, typename _Value,
626
           typename _ExtractKey, typename _Equal,
627
           typename _H1, typename _H2, typename _Hash,
628
           bool __cache_hash_code>
629
    struct _Hash_code_base;
630
 
631
  // Specialization: ranged hash function, no caching hash codes.  H1
632
  // and H2 are provided but ignored.  We define a dummy hash code type.
633
  template<typename _Key, typename _Value,
634
           typename _ExtractKey, typename _Equal,
635
           typename _H1, typename _H2, typename _Hash>
636
    struct _Hash_code_base<_Key, _Value, _ExtractKey, _Equal, _H1, _H2,
637
                           _Hash, false>
638
    {
639
    protected:
640
      _Hash_code_base(const _ExtractKey& __ex, const _Equal& __eq,
641
                      const _H1&, const _H2&, const _Hash& __h)
642
      : _M_extract(__ex), _M_eq(__eq), _M_ranged_hash(__h) { }
643
 
644
      typedef void* _Hash_code_type;
645
 
646
      _Hash_code_type
647
      _M_hash_code(const _Key& __key) const
648
      { return 0; }
649
 
650
      std::size_t
651
      _M_bucket_index(const _Key& __k, _Hash_code_type,
652
                      std::size_t __n) const
653
      { return _M_ranged_hash(__k, __n); }
654
 
655
      std::size_t
656
      _M_bucket_index(const _Hash_node<_Value, false>* __p,
657
                      std::size_t __n) const
658
      { return _M_ranged_hash(_M_extract(__p->_M_v), __n); }
659
 
660
      bool
661
      _M_compare(const _Key& __k, _Hash_code_type,
662
                 _Hash_node<_Value, false>* __n) const
663
      { return _M_eq(__k, _M_extract(__n->_M_v)); }
664
 
665
      void
666
      _M_store_code(_Hash_node<_Value, false>*, _Hash_code_type) const
667
      { }
668
 
669
      void
670
      _M_copy_code(_Hash_node<_Value, false>*,
671
                   const _Hash_node<_Value, false>*) const
672
      { }
673
 
674
      void
675
      _M_swap(_Hash_code_base& __x)
676
      {
677
        std::swap(_M_extract, __x._M_extract);
678
        std::swap(_M_eq, __x._M_eq);
679
        std::swap(_M_ranged_hash, __x._M_ranged_hash);
680
      }
681
 
682
    protected:
683
      _ExtractKey  _M_extract;
684
      _Equal       _M_eq;
685
      _Hash        _M_ranged_hash;
686
    };
687
 
688
 
689
  // No specialization for ranged hash function while caching hash codes.
690
  // That combination is meaningless, and trying to do it is an error.
691
 
692
 
693
  // Specialization: ranged hash function, cache hash codes.  This
694
  // combination is meaningless, so we provide only a declaration
695
  // and no definition.  
696
  template<typename _Key, typename _Value,
697
           typename _ExtractKey, typename _Equal,
698
           typename _H1, typename _H2, typename _Hash>
699
    struct _Hash_code_base<_Key, _Value, _ExtractKey, _Equal, _H1, _H2,
700
                           _Hash, true>;
701
 
702
  // Specialization: hash function and range-hashing function, no
703
  // caching of hash codes.  H is provided but ignored.  Provides
704
  // typedef and accessor required by TR1.  
705
  template<typename _Key, typename _Value,
706
           typename _ExtractKey, typename _Equal,
707
           typename _H1, typename _H2>
708
    struct _Hash_code_base<_Key, _Value, _ExtractKey, _Equal, _H1, _H2,
709
                           _Default_ranged_hash, false>
710
    {
711
      typedef _H1 hasher;
712
 
713
      hasher
714
      hash_function() const
715
      { return _M_h1; }
716
 
717
    protected:
718
      _Hash_code_base(const _ExtractKey& __ex, const _Equal& __eq,
719
                      const _H1& __h1, const _H2& __h2,
720
                      const _Default_ranged_hash&)
721
      : _M_extract(__ex), _M_eq(__eq), _M_h1(__h1), _M_h2(__h2) { }
722
 
723
      typedef std::size_t _Hash_code_type;
724
 
725
      _Hash_code_type
726
      _M_hash_code(const _Key& __k) const
727
      { return _M_h1(__k); }
728
 
729
      std::size_t
730
      _M_bucket_index(const _Key&, _Hash_code_type __c,
731
                      std::size_t __n) const
732
      { return _M_h2(__c, __n); }
733
 
734
      std::size_t
735
      _M_bucket_index(const _Hash_node<_Value, false>* __p,
736
                      std::size_t __n) const
737
      { return _M_h2(_M_h1(_M_extract(__p->_M_v)), __n); }
738
 
739
      bool
740
      _M_compare(const _Key& __k, _Hash_code_type,
741
                 _Hash_node<_Value, false>* __n) const
742
      { return _M_eq(__k, _M_extract(__n->_M_v)); }
743
 
744
      void
745
      _M_store_code(_Hash_node<_Value, false>*, _Hash_code_type) const
746
      { }
747
 
748
      void
749
      _M_copy_code(_Hash_node<_Value, false>*,
750
                   const _Hash_node<_Value, false>*) const
751
      { }
752
 
753
      void
754
      _M_swap(_Hash_code_base& __x)
755
      {
756
        std::swap(_M_extract, __x._M_extract);
757
        std::swap(_M_eq, __x._M_eq);
758
        std::swap(_M_h1, __x._M_h1);
759
        std::swap(_M_h2, __x._M_h2);
760
      }
761
 
762
    protected:
763
      _ExtractKey  _M_extract;
764
      _Equal       _M_eq;
765
      _H1          _M_h1;
766
      _H2          _M_h2;
767
    };
768
 
769
  // Specialization: hash function and range-hashing function, 
770
  // caching hash codes.  H is provided but ignored.  Provides
771
  // typedef and accessor required by TR1.
772
  template<typename _Key, typename _Value,
773
           typename _ExtractKey, typename _Equal,
774
           typename _H1, typename _H2>
775
    struct _Hash_code_base<_Key, _Value, _ExtractKey, _Equal, _H1, _H2,
776
                           _Default_ranged_hash, true>
777
    {
778
      typedef _H1 hasher;
779
 
780
      hasher
781
      hash_function() const
782
      { return _M_h1; }
783
 
784
    protected:
785
      _Hash_code_base(const _ExtractKey& __ex, const _Equal& __eq,
786
                      const _H1& __h1, const _H2& __h2,
787
                      const _Default_ranged_hash&)
788
      : _M_extract(__ex), _M_eq(__eq), _M_h1(__h1), _M_h2(__h2) { }
789
 
790
      typedef std::size_t _Hash_code_type;
791
 
792
      _Hash_code_type
793
      _M_hash_code(const _Key& __k) const
794
      { return _M_h1(__k); }
795
 
796
      std::size_t
797
      _M_bucket_index(const _Key&, _Hash_code_type __c,
798
                      std::size_t __n) const
799
      { return _M_h2(__c, __n); }
800
 
801
      std::size_t
802
      _M_bucket_index(const _Hash_node<_Value, true>* __p,
803
                      std::size_t __n) const
804
      { return _M_h2(__p->_M_hash_code, __n); }
805
 
806
      bool
807
      _M_compare(const _Key& __k, _Hash_code_type __c,
808
                 _Hash_node<_Value, true>* __n) const
809
      { return __c == __n->_M_hash_code && _M_eq(__k, _M_extract(__n->_M_v)); }
810
 
811
      void
812
      _M_store_code(_Hash_node<_Value, true>* __n, _Hash_code_type __c) const
813
      { __n->_M_hash_code = __c; }
814
 
815
      void
816
      _M_copy_code(_Hash_node<_Value, true>* __to,
817
                   const _Hash_node<_Value, true>* __from) const
818
      { __to->_M_hash_code = __from->_M_hash_code; }
819
 
820
      void
821
      _M_swap(_Hash_code_base& __x)
822
      {
823
        std::swap(_M_extract, __x._M_extract);
824
        std::swap(_M_eq, __x._M_eq);
825
        std::swap(_M_h1, __x._M_h1);
826
        std::swap(_M_h2, __x._M_h2);
827
      }
828
 
829
    protected:
830
      _ExtractKey  _M_extract;
831
      _Equal       _M_eq;
832
      _H1          _M_h1;
833
      _H2          _M_h2;
834
    };
835
 
836
 
837
  // Class template _Equality_base.  This is for implementing equality
838
  // comparison for unordered containers, per N3068, by John Lakos and
839
  // Pablo Halpern.  Algorithmically, we follow closely the reference
840
  // implementations therein.
841
  template<typename _ExtractKey, bool __unique_keys,
842
           typename _Hashtable>
843
    struct _Equality_base;
844
 
845
  template<typename _ExtractKey, typename _Hashtable>
846
    struct _Equality_base<_ExtractKey, true, _Hashtable>
847
    {
848
      bool _M_equal(const _Hashtable&) const;
849
    };
850
 
851
  template<typename _ExtractKey, typename _Hashtable>
852
    bool
853
    _Equality_base<_ExtractKey, true, _Hashtable>::
854
    _M_equal(const _Hashtable& __other) const
855
    {
856
      const _Hashtable* __this = static_cast<const _Hashtable*>(this);
857
 
858
      if (__this->size() != __other.size())
859
        return false;
860
 
861
      for (auto __itx = __this->begin(); __itx != __this->end(); ++__itx)
862
        {
863
          const auto __ity = __other.find(_ExtractKey()(*__itx));
864
          if (__ity == __other.end() || *__ity != *__itx)
865
            return false;
866
        }
867
      return true;
868
    }
869
 
870
  template<typename _ExtractKey, typename _Hashtable>
871
    struct _Equality_base<_ExtractKey, false, _Hashtable>
872
    {
873
      bool _M_equal(const _Hashtable&) const;
874
 
875
    private:
876
      template<typename _Uiterator>
877
        static bool
878
        _S_is_permutation(_Uiterator, _Uiterator, _Uiterator);
879
    };
880
 
881
  // See std::is_permutation in N3068.
882
  template<typename _ExtractKey, typename _Hashtable>
883
    template<typename _Uiterator>
884
      bool
885
      _Equality_base<_ExtractKey, false, _Hashtable>::
886
      _S_is_permutation(_Uiterator __first1, _Uiterator __last1,
887
                        _Uiterator __first2)
888
      {
889
        for (; __first1 != __last1; ++__first1, ++__first2)
890
          if (!(*__first1 == *__first2))
891
            break;
892
 
893
        if (__first1 == __last1)
894
          return true;
895
 
896
        _Uiterator __last2 = __first2;
897
        std::advance(__last2, std::distance(__first1, __last1));
898
 
899
        for (_Uiterator __it1 = __first1; __it1 != __last1; ++__it1)
900
          {
901
            _Uiterator __tmp =  __first1;
902
            while (__tmp != __it1 && !(*__tmp == *__it1))
903
              ++__tmp;
904
 
905
            // We've seen this one before.
906
            if (__tmp != __it1)
907
              continue;
908
 
909
            std::ptrdiff_t __n2 = 0;
910
            for (__tmp = __first2; __tmp != __last2; ++__tmp)
911
              if (*__tmp == *__it1)
912
                ++__n2;
913
 
914
            if (!__n2)
915
              return false;
916
 
917
            std::ptrdiff_t __n1 = 0;
918
            for (__tmp = __it1; __tmp != __last1; ++__tmp)
919
              if (*__tmp == *__it1)
920
                ++__n1;
921
 
922
            if (__n1 != __n2)
923
              return false;
924
          }
925
        return true;
926
      }
927
 
928
  template<typename _ExtractKey, typename _Hashtable>
929
    bool
930
    _Equality_base<_ExtractKey, false, _Hashtable>::
931
    _M_equal(const _Hashtable& __other) const
932
    {
933
      const _Hashtable* __this = static_cast<const _Hashtable*>(this);
934
 
935
      if (__this->size() != __other.size())
936
        return false;
937
 
938
      for (auto __itx = __this->begin(); __itx != __this->end();)
939
        {
940
          const auto __xrange = __this->equal_range(_ExtractKey()(*__itx));
941
          const auto __yrange = __other.equal_range(_ExtractKey()(*__itx));
942
 
943
          if (std::distance(__xrange.first, __xrange.second)
944
              != std::distance(__yrange.first, __yrange.second))
945
            return false;
946
 
947
          if (!_S_is_permutation(__xrange.first,
948
                                 __xrange.second,
949
                                 __yrange.first))
950
            return false;
951
 
952
          __itx = __xrange.second;
953
        }
954
      return true;
955
    }
956
} // namespace __detail
957
}
958
 
959
#endif // _HASHTABLE_POLICY_H

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.