OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [tags/] [gnu-src/] [gcc-4.5.1/] [gcc-4.5.1-or32-1.0rc4/] [libstdc++-v3/] [include/] [tr1/] [hashtable_policy.h] - Blame information for rev 519

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 424 jeremybenn
// Internal policy header for TR1 unordered_set and unordered_map -*- C++ -*-
2
 
3
// Copyright (C) 2010 Free Software Foundation, Inc.
4
//
5
// This file is part of the GNU ISO C++ Library.  This library is free
6
// software; you can redistribute it and/or modify it under the
7
// terms of the GNU General Public License as published by the
8
// Free Software Foundation; either version 3, or (at your option)
9
// any later version.
10
 
11
// This library is distributed in the hope that it will be useful,
12
// but WITHOUT ANY WARRANTY; without even the implied warranty of
13
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14
// GNU General Public License for more details.
15
 
16
// Under Section 7 of GPL version 3, you are granted additional
17
// permissions described in the GCC Runtime Library Exception, version
18
// 3.1, as published by the Free Software Foundation.
19
 
20
// You should have received a copy of the GNU General Public License and
21
// a copy of the GCC Runtime Library Exception along with this program;
22
// see the files COPYING3 and COPYING.RUNTIME respectively.  If not, see
23
// <http://www.gnu.org/licenses/>.
24
 
25
/** @file tr1/hashtable_policy.h
26
 *  This is an internal header file, included by other library headers.
27
 *  You should not attempt to use it directly.
28
 */
29
 
30
namespace std
31
{
32
namespace tr1
33
{
34
namespace __detail
35
{
36
  // Helper function: return distance(first, last) for forward
37
  // iterators, or 0 for input iterators.
38
  template<class _Iterator>
39
    inline typename std::iterator_traits<_Iterator>::difference_type
40
    __distance_fw(_Iterator __first, _Iterator __last,
41
                  std::input_iterator_tag)
42
    { return 0; }
43
 
44
  template<class _Iterator>
45
    inline typename std::iterator_traits<_Iterator>::difference_type
46
    __distance_fw(_Iterator __first, _Iterator __last,
47
                  std::forward_iterator_tag)
48
    { return std::distance(__first, __last); }
49
 
50
  template<class _Iterator>
51
    inline typename std::iterator_traits<_Iterator>::difference_type
52
    __distance_fw(_Iterator __first, _Iterator __last)
53
    {
54
      typedef typename std::iterator_traits<_Iterator>::iterator_category _Tag;
55
      return __distance_fw(__first, __last, _Tag());
56
    }
57
 
58
  // Auxiliary types used for all instantiations of _Hashtable: nodes
59
  // and iterators.
60
 
61
  // Nodes, used to wrap elements stored in the hash table.  A policy
62
  // template parameter of class template _Hashtable controls whether
63
  // nodes also store a hash code. In some cases (e.g. strings) this
64
  // may be a performance win.
65
  template<typename _Value, bool __cache_hash_code>
66
    struct _Hash_node;
67
 
68
  template<typename _Value>
69
    struct _Hash_node<_Value, true>
70
    {
71
      _Value       _M_v;
72
      std::size_t  _M_hash_code;
73
      _Hash_node*  _M_next;
74
    };
75
 
76
  template<typename _Value>
77
    struct _Hash_node<_Value, false>
78
    {
79
      _Value       _M_v;
80
      _Hash_node*  _M_next;
81
    };
82
 
83
  // Local iterators, used to iterate within a bucket but not between
84
  // buckets.
85
  template<typename _Value, bool __cache>
86
    struct _Node_iterator_base
87
    {
88
      _Node_iterator_base(_Hash_node<_Value, __cache>* __p)
89
      : _M_cur(__p) { }
90
 
91
      void
92
      _M_incr()
93
      { _M_cur = _M_cur->_M_next; }
94
 
95
      _Hash_node<_Value, __cache>*  _M_cur;
96
    };
97
 
98
  template<typename _Value, bool __cache>
99
    inline bool
100
    operator==(const _Node_iterator_base<_Value, __cache>& __x,
101
               const _Node_iterator_base<_Value, __cache>& __y)
102
    { return __x._M_cur == __y._M_cur; }
103
 
104
  template<typename _Value, bool __cache>
105
    inline bool
106
    operator!=(const _Node_iterator_base<_Value, __cache>& __x,
107
               const _Node_iterator_base<_Value, __cache>& __y)
108
    { return __x._M_cur != __y._M_cur; }
109
 
110
  template<typename _Value, bool __constant_iterators, bool __cache>
111
    struct _Node_iterator
112
    : public _Node_iterator_base<_Value, __cache>
113
    {
114
      typedef _Value                                   value_type;
115
      typedef typename
116
      __gnu_cxx::__conditional_type<__constant_iterators,
117
                                    const _Value*, _Value*>::__type
118
                                                       pointer;
119
      typedef typename
120
      __gnu_cxx::__conditional_type<__constant_iterators,
121
                                    const _Value&, _Value&>::__type
122
                                                       reference;
123
      typedef std::ptrdiff_t                           difference_type;
124
      typedef std::forward_iterator_tag                iterator_category;
125
 
126
      _Node_iterator()
127
      : _Node_iterator_base<_Value, __cache>(0) { }
128
 
129
      explicit
130
      _Node_iterator(_Hash_node<_Value, __cache>* __p)
131
      : _Node_iterator_base<_Value, __cache>(__p) { }
132
 
133
      reference
134
      operator*() const
135
      { return this->_M_cur->_M_v; }
136
 
137
      pointer
138
      operator->() const
139
      { return &this->_M_cur->_M_v; }
140
 
141
      _Node_iterator&
142
      operator++()
143
      {
144
        this->_M_incr();
145
        return *this;
146
      }
147
 
148
      _Node_iterator
149
      operator++(int)
150
      {
151
        _Node_iterator __tmp(*this);
152
        this->_M_incr();
153
        return __tmp;
154
      }
155
    };
156
 
157
  template<typename _Value, bool __constant_iterators, bool __cache>
158
    struct _Node_const_iterator
159
    : public _Node_iterator_base<_Value, __cache>
160
    {
161
      typedef _Value                                   value_type;
162
      typedef const _Value*                            pointer;
163
      typedef const _Value&                            reference;
164
      typedef std::ptrdiff_t                           difference_type;
165
      typedef std::forward_iterator_tag                iterator_category;
166
 
167
      _Node_const_iterator()
168
      : _Node_iterator_base<_Value, __cache>(0) { }
169
 
170
      explicit
171
      _Node_const_iterator(_Hash_node<_Value, __cache>* __p)
172
      : _Node_iterator_base<_Value, __cache>(__p) { }
173
 
174
      _Node_const_iterator(const _Node_iterator<_Value, __constant_iterators,
175
                           __cache>& __x)
176
      : _Node_iterator_base<_Value, __cache>(__x._M_cur) { }
177
 
178
      reference
179
      operator*() const
180
      { return this->_M_cur->_M_v; }
181
 
182
      pointer
183
      operator->() const
184
      { return &this->_M_cur->_M_v; }
185
 
186
      _Node_const_iterator&
187
      operator++()
188
      {
189
        this->_M_incr();
190
        return *this;
191
      }
192
 
193
      _Node_const_iterator
194
      operator++(int)
195
      {
196
        _Node_const_iterator __tmp(*this);
197
        this->_M_incr();
198
        return __tmp;
199
      }
200
    };
201
 
202
  template<typename _Value, bool __cache>
203
    struct _Hashtable_iterator_base
204
    {
205
      _Hashtable_iterator_base(_Hash_node<_Value, __cache>* __node,
206
                               _Hash_node<_Value, __cache>** __bucket)
207
      : _M_cur_node(__node), _M_cur_bucket(__bucket) { }
208
 
209
      void
210
      _M_incr()
211
      {
212
        _M_cur_node = _M_cur_node->_M_next;
213
        if (!_M_cur_node)
214
          _M_incr_bucket();
215
      }
216
 
217
      void
218
      _M_incr_bucket();
219
 
220
      _Hash_node<_Value, __cache>*   _M_cur_node;
221
      _Hash_node<_Value, __cache>**  _M_cur_bucket;
222
    };
223
 
224
  // Global iterators, used for arbitrary iteration within a hash
225
  // table.  Larger and more expensive than local iterators.
226
  template<typename _Value, bool __cache>
227
    void
228
    _Hashtable_iterator_base<_Value, __cache>::
229
    _M_incr_bucket()
230
    {
231
      ++_M_cur_bucket;
232
 
233
      // This loop requires the bucket array to have a non-null sentinel.
234
      while (!*_M_cur_bucket)
235
        ++_M_cur_bucket;
236
      _M_cur_node = *_M_cur_bucket;
237
    }
238
 
239
  template<typename _Value, bool __cache>
240
    inline bool
241
    operator==(const _Hashtable_iterator_base<_Value, __cache>& __x,
242
               const _Hashtable_iterator_base<_Value, __cache>& __y)
243
    { return __x._M_cur_node == __y._M_cur_node; }
244
 
245
  template<typename _Value, bool __cache>
246
    inline bool
247
    operator!=(const _Hashtable_iterator_base<_Value, __cache>& __x,
248
               const _Hashtable_iterator_base<_Value, __cache>& __y)
249
    { return __x._M_cur_node != __y._M_cur_node; }
250
 
251
  template<typename _Value, bool __constant_iterators, bool __cache>
252
    struct _Hashtable_iterator
253
    : public _Hashtable_iterator_base<_Value, __cache>
254
    {
255
      typedef _Value                                   value_type;
256
      typedef typename
257
      __gnu_cxx::__conditional_type<__constant_iterators,
258
                                    const _Value*, _Value*>::__type
259
                                                       pointer;
260
      typedef typename
261
      __gnu_cxx::__conditional_type<__constant_iterators,
262
                                    const _Value&, _Value&>::__type
263
                                                       reference;
264
      typedef std::ptrdiff_t                           difference_type;
265
      typedef std::forward_iterator_tag                iterator_category;
266
 
267
      _Hashtable_iterator()
268
      : _Hashtable_iterator_base<_Value, __cache>(0, 0) { }
269
 
270
      _Hashtable_iterator(_Hash_node<_Value, __cache>* __p,
271
                          _Hash_node<_Value, __cache>** __b)
272
      : _Hashtable_iterator_base<_Value, __cache>(__p, __b) { }
273
 
274
      explicit
275
      _Hashtable_iterator(_Hash_node<_Value, __cache>** __b)
276
      : _Hashtable_iterator_base<_Value, __cache>(*__b, __b) { }
277
 
278
      reference
279
      operator*() const
280
      { return this->_M_cur_node->_M_v; }
281
 
282
      pointer
283
      operator->() const
284
      { return &this->_M_cur_node->_M_v; }
285
 
286
      _Hashtable_iterator&
287
      operator++()
288
      {
289
        this->_M_incr();
290
        return *this;
291
      }
292
 
293
      _Hashtable_iterator
294
      operator++(int)
295
      {
296
        _Hashtable_iterator __tmp(*this);
297
        this->_M_incr();
298
        return __tmp;
299
      }
300
    };
301
 
302
  template<typename _Value, bool __constant_iterators, bool __cache>
303
    struct _Hashtable_const_iterator
304
    : public _Hashtable_iterator_base<_Value, __cache>
305
    {
306
      typedef _Value                                   value_type;
307
      typedef const _Value*                            pointer;
308
      typedef const _Value&                            reference;
309
      typedef std::ptrdiff_t                           difference_type;
310
      typedef std::forward_iterator_tag                iterator_category;
311
 
312
      _Hashtable_const_iterator()
313
      : _Hashtable_iterator_base<_Value, __cache>(0, 0) { }
314
 
315
      _Hashtable_const_iterator(_Hash_node<_Value, __cache>* __p,
316
                                _Hash_node<_Value, __cache>** __b)
317
      : _Hashtable_iterator_base<_Value, __cache>(__p, __b) { }
318
 
319
      explicit
320
      _Hashtable_const_iterator(_Hash_node<_Value, __cache>** __b)
321
      : _Hashtable_iterator_base<_Value, __cache>(*__b, __b) { }
322
 
323
      _Hashtable_const_iterator(const _Hashtable_iterator<_Value,
324
                                __constant_iterators, __cache>& __x)
325
      : _Hashtable_iterator_base<_Value, __cache>(__x._M_cur_node,
326
                                                  __x._M_cur_bucket) { }
327
 
328
      reference
329
      operator*() const
330
      { return this->_M_cur_node->_M_v; }
331
 
332
      pointer
333
      operator->() const
334
      { return &this->_M_cur_node->_M_v; }
335
 
336
      _Hashtable_const_iterator&
337
      operator++()
338
      {
339
        this->_M_incr();
340
        return *this;
341
      }
342
 
343
      _Hashtable_const_iterator
344
      operator++(int)
345
      {
346
        _Hashtable_const_iterator __tmp(*this);
347
        this->_M_incr();
348
        return __tmp;
349
      }
350
    };
351
 
352
 
353
  // Many of class template _Hashtable's template parameters are policy
354
  // classes.  These are defaults for the policies.
355
 
356
  // Default range hashing function: use division to fold a large number
357
  // into the range [0, N).
358
  struct _Mod_range_hashing
359
  {
360
    typedef std::size_t first_argument_type;
361
    typedef std::size_t second_argument_type;
362
    typedef std::size_t result_type;
363
 
364
    result_type
365
    operator()(first_argument_type __num, second_argument_type __den) const
366
    { return __num % __den; }
367
  };
368
 
369
  // Default ranged hash function H.  In principle it should be a
370
  // function object composed from objects of type H1 and H2 such that
371
  // h(k, N) = h2(h1(k), N), but that would mean making extra copies of
372
  // h1 and h2.  So instead we'll just use a tag to tell class template
373
  // hashtable to do that composition.
374
  struct _Default_ranged_hash { };
375
 
376
  // Default value for rehash policy.  Bucket size is (usually) the
377
  // smallest prime that keeps the load factor small enough.
378
  struct _Prime_rehash_policy
379
  {
380
    _Prime_rehash_policy(float __z = 1.0)
381
    : _M_max_load_factor(__z), _M_growth_factor(2.f), _M_next_resize(0) { }
382
 
383
    float
384
    max_load_factor() const
385
    { return _M_max_load_factor; }
386
 
387
    // Return a bucket size no smaller than n.
388
    std::size_t
389
    _M_next_bkt(std::size_t __n) const;
390
 
391
    // Return a bucket count appropriate for n elements
392
    std::size_t
393
    _M_bkt_for_elements(std::size_t __n) const;
394
 
395
    // __n_bkt is current bucket count, __n_elt is current element count,
396
    // and __n_ins is number of elements to be inserted.  Do we need to
397
    // increase bucket count?  If so, return make_pair(true, n), where n
398
    // is the new bucket count.  If not, return make_pair(false, 0).
399
    std::pair<bool, std::size_t>
400
    _M_need_rehash(std::size_t __n_bkt, std::size_t __n_elt,
401
                   std::size_t __n_ins) const;
402
 
403
    enum { _S_n_primes = sizeof(unsigned long) != 8 ? 256 : 256 + 48 };
404
 
405
    float                _M_max_load_factor;
406
    float                _M_growth_factor;
407
    mutable std::size_t  _M_next_resize;
408
  };
409
 
410
  extern const unsigned long __prime_list[];
411
 
412
  // XXX This is a hack.  There's no good reason for any of
413
  // _Prime_rehash_policy's member functions to be inline.  
414
 
415
  // Return a prime no smaller than n.
416
  inline std::size_t
417
  _Prime_rehash_policy::
418
  _M_next_bkt(std::size_t __n) const
419
  {
420
    const unsigned long* __p = std::lower_bound(__prime_list, __prime_list
421
                                                + _S_n_primes, __n);
422
    _M_next_resize =
423
      static_cast<std::size_t>(__builtin_ceil(*__p * _M_max_load_factor));
424
    return *__p;
425
  }
426
 
427
  // Return the smallest prime p such that alpha p >= n, where alpha
428
  // is the load factor.
429
  inline std::size_t
430
  _Prime_rehash_policy::
431
  _M_bkt_for_elements(std::size_t __n) const
432
  {
433
    const float __min_bkts = __n / _M_max_load_factor;
434
    const unsigned long* __p = std::lower_bound(__prime_list, __prime_list
435
                                                + _S_n_primes, __min_bkts);
436
    _M_next_resize =
437
      static_cast<std::size_t>(__builtin_ceil(*__p * _M_max_load_factor));
438
    return *__p;
439
  }
440
 
441
  // Finds the smallest prime p such that alpha p > __n_elt + __n_ins.
442
  // If p > __n_bkt, return make_pair(true, p); otherwise return
443
  // make_pair(false, 0).  In principle this isn't very different from 
444
  // _M_bkt_for_elements.
445
 
446
  // The only tricky part is that we're caching the element count at
447
  // which we need to rehash, so we don't have to do a floating-point
448
  // multiply for every insertion.
449
 
450
  inline std::pair<bool, std::size_t>
451
  _Prime_rehash_policy::
452
  _M_need_rehash(std::size_t __n_bkt, std::size_t __n_elt,
453
                 std::size_t __n_ins) const
454
  {
455
    if (__n_elt + __n_ins > _M_next_resize)
456
      {
457
        float __min_bkts = ((float(__n_ins) + float(__n_elt))
458
                            / _M_max_load_factor);
459
        if (__min_bkts > __n_bkt)
460
          {
461
            __min_bkts = std::max(__min_bkts, _M_growth_factor * __n_bkt);
462
            const unsigned long* __p =
463
              std::lower_bound(__prime_list, __prime_list + _S_n_primes,
464
                               __min_bkts);
465
            _M_next_resize = static_cast<std::size_t>
466
              (__builtin_ceil(*__p * _M_max_load_factor));
467
            return std::make_pair(true, *__p);
468
          }
469
        else
470
          {
471
            _M_next_resize = static_cast<std::size_t>
472
              (__builtin_ceil(__n_bkt * _M_max_load_factor));
473
            return std::make_pair(false, 0);
474
          }
475
      }
476
    else
477
      return std::make_pair(false, 0);
478
  }
479
 
480
  // Base classes for std::tr1::_Hashtable.  We define these base
481
  // classes because in some cases we want to do different things
482
  // depending on the value of a policy class.  In some cases the
483
  // policy class affects which member functions and nested typedefs
484
  // are defined; we handle that by specializing base class templates.
485
  // Several of the base class templates need to access other members
486
  // of class template _Hashtable, so we use the "curiously recurring
487
  // template pattern" for them.
488
 
489
  // class template _Map_base.  If the hashtable has a value type of the
490
  // form pair<T1, T2> and a key extraction policy that returns the
491
  // first part of the pair, the hashtable gets a mapped_type typedef.
492
  // If it satisfies those criteria and also has unique keys, then it
493
  // also gets an operator[].  
494
  template<typename _Key, typename _Value, typename _Ex, bool __unique,
495
           typename _Hashtable>
496
    struct _Map_base { };
497
 
498
  template<typename _Key, typename _Pair, typename _Hashtable>
499
    struct _Map_base<_Key, _Pair, std::_Select1st<_Pair>, false, _Hashtable>
500
    {
501
      typedef typename _Pair::second_type mapped_type;
502
    };
503
 
504
  template<typename _Key, typename _Pair, typename _Hashtable>
505
    struct _Map_base<_Key, _Pair, std::_Select1st<_Pair>, true, _Hashtable>
506
    {
507
      typedef typename _Pair::second_type mapped_type;
508
 
509
      mapped_type&
510
      operator[](const _Key& __k);
511
    };
512
 
513
  template<typename _Key, typename _Pair, typename _Hashtable>
514
    typename _Map_base<_Key, _Pair, std::_Select1st<_Pair>,
515
                       true, _Hashtable>::mapped_type&
516
    _Map_base<_Key, _Pair, std::_Select1st<_Pair>, true, _Hashtable>::
517
    operator[](const _Key& __k)
518
    {
519
      _Hashtable* __h = static_cast<_Hashtable*>(this);
520
      typename _Hashtable::_Hash_code_type __code = __h->_M_hash_code(__k);
521
      std::size_t __n = __h->_M_bucket_index(__k, __code,
522
                                             __h->_M_bucket_count);
523
 
524
      typename _Hashtable::_Node* __p =
525
        __h->_M_find_node(__h->_M_buckets[__n], __k, __code);
526
      if (!__p)
527
        return __h->_M_insert_bucket(std::make_pair(__k, mapped_type()),
528
                                     __n, __code)->second;
529
      return (__p->_M_v).second;
530
    }
531
 
532
  // class template _Rehash_base.  Give hashtable the max_load_factor
533
  // functions iff the rehash policy is _Prime_rehash_policy.
534
  template<typename _RehashPolicy, typename _Hashtable>
535
    struct _Rehash_base { };
536
 
537
  template<typename _Hashtable>
538
    struct _Rehash_base<_Prime_rehash_policy, _Hashtable>
539
    {
540
      float
541
      max_load_factor() const
542
      {
543
        const _Hashtable* __this = static_cast<const _Hashtable*>(this);
544
        return __this->__rehash_policy().max_load_factor();
545
      }
546
 
547
      void
548
      max_load_factor(float __z)
549
      {
550
        _Hashtable* __this = static_cast<_Hashtable*>(this);
551
        __this->__rehash_policy(_Prime_rehash_policy(__z));
552
      }
553
    };
554
 
555
  // Class template _Hash_code_base.  Encapsulates two policy issues that
556
  // aren't quite orthogonal.
557
  //   (1) the difference between using a ranged hash function and using
558
  //       the combination of a hash function and a range-hashing function.
559
  //       In the former case we don't have such things as hash codes, so
560
  //       we have a dummy type as placeholder.
561
  //   (2) Whether or not we cache hash codes.  Caching hash codes is
562
  //       meaningless if we have a ranged hash function.
563
  // We also put the key extraction and equality comparison function 
564
  // objects here, for convenience.
565
 
566
  // Primary template: unused except as a hook for specializations.  
567
  template<typename _Key, typename _Value,
568
           typename _ExtractKey, typename _Equal,
569
           typename _H1, typename _H2, typename _Hash,
570
           bool __cache_hash_code>
571
    struct _Hash_code_base;
572
 
573
  // Specialization: ranged hash function, no caching hash codes.  H1
574
  // and H2 are provided but ignored.  We define a dummy hash code type.
575
  template<typename _Key, typename _Value,
576
           typename _ExtractKey, typename _Equal,
577
           typename _H1, typename _H2, typename _Hash>
578
    struct _Hash_code_base<_Key, _Value, _ExtractKey, _Equal, _H1, _H2,
579
                           _Hash, false>
580
    {
581
    protected:
582
      _Hash_code_base(const _ExtractKey& __ex, const _Equal& __eq,
583
                      const _H1&, const _H2&, const _Hash& __h)
584
      : _M_extract(__ex), _M_eq(__eq), _M_ranged_hash(__h) { }
585
 
586
      typedef void* _Hash_code_type;
587
 
588
      _Hash_code_type
589
      _M_hash_code(const _Key& __key) const
590
      { return 0; }
591
 
592
      std::size_t
593
      _M_bucket_index(const _Key& __k, _Hash_code_type,
594
                      std::size_t __n) const
595
      { return _M_ranged_hash(__k, __n); }
596
 
597
      std::size_t
598
      _M_bucket_index(const _Hash_node<_Value, false>* __p,
599
                      std::size_t __n) const
600
      { return _M_ranged_hash(_M_extract(__p->_M_v), __n); }
601
 
602
      bool
603
      _M_compare(const _Key& __k, _Hash_code_type,
604
                 _Hash_node<_Value, false>* __n) const
605
      { return _M_eq(__k, _M_extract(__n->_M_v)); }
606
 
607
      void
608
      _M_store_code(_Hash_node<_Value, false>*, _Hash_code_type) const
609
      { }
610
 
611
      void
612
      _M_copy_code(_Hash_node<_Value, false>*,
613
                   const _Hash_node<_Value, false>*) const
614
      { }
615
 
616
      void
617
      _M_swap(_Hash_code_base& __x)
618
      {
619
        std::swap(_M_extract, __x._M_extract);
620
        std::swap(_M_eq, __x._M_eq);
621
        std::swap(_M_ranged_hash, __x._M_ranged_hash);
622
      }
623
 
624
    protected:
625
      _ExtractKey  _M_extract;
626
      _Equal       _M_eq;
627
      _Hash        _M_ranged_hash;
628
    };
629
 
630
 
631
  // No specialization for ranged hash function while caching hash codes.
632
  // That combination is meaningless, and trying to do it is an error.
633
 
634
 
635
  // Specialization: ranged hash function, cache hash codes.  This
636
  // combination is meaningless, so we provide only a declaration
637
  // and no definition.  
638
  template<typename _Key, typename _Value,
639
           typename _ExtractKey, typename _Equal,
640
           typename _H1, typename _H2, typename _Hash>
641
    struct _Hash_code_base<_Key, _Value, _ExtractKey, _Equal, _H1, _H2,
642
                           _Hash, true>;
643
 
644
  // Specialization: hash function and range-hashing function, no
645
  // caching of hash codes.  H is provided but ignored.  Provides
646
  // typedef and accessor required by TR1.  
647
  template<typename _Key, typename _Value,
648
           typename _ExtractKey, typename _Equal,
649
           typename _H1, typename _H2>
650
    struct _Hash_code_base<_Key, _Value, _ExtractKey, _Equal, _H1, _H2,
651
                           _Default_ranged_hash, false>
652
    {
653
      typedef _H1 hasher;
654
 
655
      hasher
656
      hash_function() const
657
      { return _M_h1; }
658
 
659
    protected:
660
      _Hash_code_base(const _ExtractKey& __ex, const _Equal& __eq,
661
                      const _H1& __h1, const _H2& __h2,
662
                      const _Default_ranged_hash&)
663
      : _M_extract(__ex), _M_eq(__eq), _M_h1(__h1), _M_h2(__h2) { }
664
 
665
      typedef std::size_t _Hash_code_type;
666
 
667
      _Hash_code_type
668
      _M_hash_code(const _Key& __k) const
669
      { return _M_h1(__k); }
670
 
671
      std::size_t
672
      _M_bucket_index(const _Key&, _Hash_code_type __c,
673
                      std::size_t __n) const
674
      { return _M_h2(__c, __n); }
675
 
676
      std::size_t
677
      _M_bucket_index(const _Hash_node<_Value, false>* __p,
678
                      std::size_t __n) const
679
      { return _M_h2(_M_h1(_M_extract(__p->_M_v)), __n); }
680
 
681
      bool
682
      _M_compare(const _Key& __k, _Hash_code_type,
683
                 _Hash_node<_Value, false>* __n) const
684
      { return _M_eq(__k, _M_extract(__n->_M_v)); }
685
 
686
      void
687
      _M_store_code(_Hash_node<_Value, false>*, _Hash_code_type) const
688
      { }
689
 
690
      void
691
      _M_copy_code(_Hash_node<_Value, false>*,
692
                   const _Hash_node<_Value, false>*) const
693
      { }
694
 
695
      void
696
      _M_swap(_Hash_code_base& __x)
697
      {
698
        std::swap(_M_extract, __x._M_extract);
699
        std::swap(_M_eq, __x._M_eq);
700
        std::swap(_M_h1, __x._M_h1);
701
        std::swap(_M_h2, __x._M_h2);
702
      }
703
 
704
    protected:
705
      _ExtractKey  _M_extract;
706
      _Equal       _M_eq;
707
      _H1          _M_h1;
708
      _H2          _M_h2;
709
    };
710
 
711
  // Specialization: hash function and range-hashing function, 
712
  // caching hash codes.  H is provided but ignored.  Provides
713
  // typedef and accessor required by TR1.
714
  template<typename _Key, typename _Value,
715
           typename _ExtractKey, typename _Equal,
716
           typename _H1, typename _H2>
717
    struct _Hash_code_base<_Key, _Value, _ExtractKey, _Equal, _H1, _H2,
718
                           _Default_ranged_hash, true>
719
    {
720
      typedef _H1 hasher;
721
 
722
      hasher
723
      hash_function() const
724
      { return _M_h1; }
725
 
726
    protected:
727
      _Hash_code_base(const _ExtractKey& __ex, const _Equal& __eq,
728
                      const _H1& __h1, const _H2& __h2,
729
                      const _Default_ranged_hash&)
730
      : _M_extract(__ex), _M_eq(__eq), _M_h1(__h1), _M_h2(__h2) { }
731
 
732
      typedef std::size_t _Hash_code_type;
733
 
734
      _Hash_code_type
735
      _M_hash_code(const _Key& __k) const
736
      { return _M_h1(__k); }
737
 
738
      std::size_t
739
      _M_bucket_index(const _Key&, _Hash_code_type __c,
740
                      std::size_t __n) const
741
      { return _M_h2(__c, __n); }
742
 
743
      std::size_t
744
      _M_bucket_index(const _Hash_node<_Value, true>* __p,
745
                      std::size_t __n) const
746
      { return _M_h2(__p->_M_hash_code, __n); }
747
 
748
      bool
749
      _M_compare(const _Key& __k, _Hash_code_type __c,
750
                 _Hash_node<_Value, true>* __n) const
751
      { return __c == __n->_M_hash_code && _M_eq(__k, _M_extract(__n->_M_v)); }
752
 
753
      void
754
      _M_store_code(_Hash_node<_Value, true>* __n, _Hash_code_type __c) const
755
      { __n->_M_hash_code = __c; }
756
 
757
      void
758
      _M_copy_code(_Hash_node<_Value, true>* __to,
759
                   const _Hash_node<_Value, true>* __from) const
760
      { __to->_M_hash_code = __from->_M_hash_code; }
761
 
762
      void
763
      _M_swap(_Hash_code_base& __x)
764
      {
765
        std::swap(_M_extract, __x._M_extract);
766
        std::swap(_M_eq, __x._M_eq);
767
        std::swap(_M_h1, __x._M_h1);
768
        std::swap(_M_h2, __x._M_h2);
769
      }
770
 
771
    protected:
772
      _ExtractKey  _M_extract;
773
      _Equal       _M_eq;
774
      _H1          _M_h1;
775
      _H2          _M_h2;
776
    };
777
} // namespace __detail
778
}
779
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.