OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [rtos/] [ecos-3.0/] [packages/] [language/] [cxx/] [ustl/] [current/] [include/] [ustl/] [ualgobase.h] - Blame information for rev 786

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 786 skrzyp
// This file is part of the uSTL library, an STL implementation.
2
//
3
// Copyright (c) 2005-2009 by Mike Sharov <msharov@users.sourceforge.net>
4
// This file is free software, distributed under the MIT License.
5
 
6
#ifndef UALGOBASE_H_683A0BE77546133C4CE0E3622CFAA2EB
7
#define UALGOBASE_H_683A0BE77546133C4CE0E3622CFAA2EB
8
 
9
#include "uutility.h"
10
#include <string.h>
11
 
12
namespace ustl {
13
 
14
/// Assigns the contents of a to b and the contents of b to a.
15
/// This is used as a primitive operation by many other algorithms. 
16
/// \ingroup SwapAlgorithms
17
///
18
template <typename Assignable>
19
inline void swap (Assignable& a, Assignable& b)
20
{
21
    Assignable tmp = a;
22
    a = b;
23
    b = tmp;
24
}
25
 
26
/// Equivalent to swap (*a, *b)
27
/// \ingroup SwapAlgorithms
28
///
29
template <typename Iterator>
30
inline void iter_swap (Iterator a, Iterator b)
31
{
32
    swap (*a, *b);
33
}
34
 
35
/// Copy copies elements from the range [first, last) to the range
36
/// [result, result + (last - first)). That is, it performs the assignments
37
/// *result = *first, *(result + 1) = *(first + 1), and so on. [1] Generally,
38
/// for every integer n from 0 to last - first, copy performs the assignment
39
/// *(result + n) = *(first + n). Assignments are performed in forward order,
40
/// i.e. in order of increasing n. 
41
/// \ingroup MutatingAlgorithms
42
///
43
template <typename InputIterator, typename OutputIterator>
44
inline OutputIterator copy (InputIterator first, InputIterator last, OutputIterator result)
45
{
46
    for (; first != last; ++result, ++first)
47
        *result = *first;
48
    return (result);
49
}
50
 
51
/// Copy_n copies elements from the range [first, first + n) to the range
52
/// [result, result + n). That is, it performs the assignments
53
/// *result = *first, *(result + 1) = *(first + 1), and so on. Generally,
54
/// for every integer i from 0 up to (but not including) n, copy_n performs
55
/// the assignment *(result + i) = *(first + i). Assignments are performed
56
/// in forward order, i.e. in order of increasing n.
57
/// \ingroup MutatingAlgorithms
58
///
59
template <typename InputIterator, typename OutputIterator>
60
inline OutputIterator copy_n (InputIterator first, size_t count, OutputIterator result)
61
{
62
    for (; count; --count, ++result, ++first)
63
        *result = *first;
64
    return (result);
65
}
66
 
67
/// \brief Copy copies elements from the range (last, first] to result.
68
/// \ingroup MutatingAlgorithms
69
/// Copies elements starting at last, decrementing both last and result.
70
///
71
template <typename InputIterator, typename OutputIterator>
72
inline OutputIterator copy_backward (InputIterator first, InputIterator last, OutputIterator result)
73
{
74
    while (first != last)
75
        *--result = *--last;
76
    return (result);
77
}
78
 
79
/// For_each applies the function object f to each element in the range
80
/// [first, last); f's return value, if any, is ignored. Applications are
81
/// performed in forward order, i.e. from first to last. For_each returns
82
/// the function object after it has been applied to each element.
83
/// \ingroup MutatingAlgorithms
84
///
85
template <typename InputIterator, typename UnaryFunction>
86
inline UnaryFunction for_each (InputIterator first, InputIterator last, UnaryFunction f)
87
{
88
    for (; first != last; ++first)
89
        f (*first);
90
    return (f);
91
}
92
 
93
/// Fill assigns the value value to every element in the range [first, last).
94
/// That is, for every iterator i in [first, last),
95
/// it performs the assignment *i = value.
96
/// \ingroup GeneratorAlgorithms
97
///
98
template <typename ForwardIterator, typename T>
99
inline void fill (ForwardIterator first, ForwardIterator last, const T& value)
100
{
101
    for (; first != last; ++first)
102
        *first = value;
103
}
104
 
105
/// Fill_n assigns the value value to every element in the range
106
/// [first, first+count). That is, for every iterator i in [first, first+count),
107
/// it performs the assignment *i = value. The return value is first + count.
108
/// \ingroup GeneratorAlgorithms
109
///
110
template <typename OutputIterator, typename T>
111
inline OutputIterator fill_n (OutputIterator first, size_t count, const T& value)
112
{
113
    for (; count; --count, ++first)
114
        *first = value;
115
    return (first);
116
}
117
 
118
#if CPU_HAS_MMX
119
extern "C" void copy_n_fast (const void* src, size_t count, void* dest) throw();
120
#else
121
inline void copy_n_fast (const void* src, size_t count, void* dest) throw()
122
    { memcpy (dest, src, count); }
123
#endif
124
#if __i386__ || __x86_64__
125
extern "C" void copy_backward_fast (const void* first, const void* last, void* result) throw();
126
#else
127
inline void copy_backward_fast (const void* first, const void* last, void* result) throw()
128
{
129
    const size_t nBytes (distance (first, last));
130
    memmove (advance (result, -nBytes), first, nBytes);
131
}
132
#endif
133
extern "C" void fill_n8_fast (uint8_t* dest, size_t count, uint8_t v) throw();
134
extern "C" void fill_n16_fast (uint16_t* dest, size_t count, uint16_t v) throw();
135
extern "C" void fill_n32_fast (uint32_t* dest, size_t count, uint32_t v) throw();
136
extern "C" void rotate_fast (void* first, void* middle, void* last) throw();
137
 
138
#if __GNUC__ >= 4
139
/// \brief Computes the number of 1 bits in a number.
140
/// \ingroup ConditionAlgorithms
141
inline size_t popcount (uint32_t v)     { return (__builtin_popcount (v)); }
142
#if HAVE_INT64_T
143
inline size_t popcount (uint64_t v)     { return (__builtin_popcountll (v)); }
144
#endif
145
#else
146
size_t popcount (uint32_t v);
147
#if HAVE_INT64_T
148
size_t popcount (uint64_t v);
149
#endif  // HAVE_INT64_T
150
#endif  // __GNUC__
151
 
152
//----------------------------------------------------------------------
153
// Optimized versions for standard types
154
//----------------------------------------------------------------------
155
 
156
#if WANT_UNROLLED_COPY
157
 
158
template <typename T>
159
inline T* unrolled_copy (const T* first, size_t count, T* result)
160
{
161
    copy_n_fast (first, count * sizeof(T), result);
162
    return (advance (result, count));
163
}
164
 
165
template <>
166
inline uint8_t* copy_backward (const uint8_t* first, const uint8_t* last, uint8_t* result)
167
{
168
    copy_backward_fast (first, last, result);
169
    return (result);
170
}
171
 
172
template <typename T>
173
inline T* unrolled_fill (T* result, size_t count, T value)
174
{
175
    for (; count; --count, ++result)
176
        *result = value;
177
    return (result);
178
}
179
template <> inline uint8_t* unrolled_fill (uint8_t* result, size_t count, uint8_t value)
180
    { fill_n8_fast (result, count, value); return (advance (result, count)); }
181
template <> inline uint16_t* unrolled_fill (uint16_t* result, size_t count, uint16_t value)
182
    { fill_n16_fast (result, count, value); return (advance (result, count)); }
183
template <> inline uint32_t* unrolled_fill (uint32_t* result, size_t count, uint32_t value)
184
    { fill_n32_fast (result, count, value); return (advance (result, count)); }
185
template <> inline float* unrolled_fill (float* result, size_t count, float value)
186
    { fill_n32_fast ((uint32_t*) result, count, *noalias_cast<uint32_t*>(&value)); return (advance (result, count)); }
187
 
188
#if CPU_HAS_MMX
189
#define UNROLLED_COPY_SPECIALIZATION(type)                                              \
190
template <> inline type* copy (const type* first, const type* last, type* result)       \
191
{ return (unrolled_copy (first, distance (first, last), result)); }                     \
192
template <> inline type* copy_n (const type* first, size_t count, type* result)         \
193
{ return (unrolled_copy (first, count, result)); }
194
#define UNROLLED_FILL_SPECIALIZATION(type)                                              \
195
template <> inline void fill (type* first, type* last, const type& value)               \
196
{ unrolled_fill (first, distance (first, last), value); }                               \
197
template <> inline type* fill_n (type* first, size_t count, const type& value)          \
198
{ return (unrolled_fill (first, count, value)); }
199
UNROLLED_COPY_SPECIALIZATION(uint8_t)
200
UNROLLED_FILL_SPECIALIZATION(uint8_t)
201
UNROLLED_COPY_SPECIALIZATION(uint16_t)
202
UNROLLED_FILL_SPECIALIZATION(uint16_t)
203
UNROLLED_COPY_SPECIALIZATION(uint32_t)
204
UNROLLED_FILL_SPECIALIZATION(uint32_t)
205
UNROLLED_COPY_SPECIALIZATION(float)
206
UNROLLED_FILL_SPECIALIZATION(float)
207
#undef UNROLLED_FILL_SPECIALIZATION
208
#undef UNROLLED_COPY_SPECIALIZATION
209
#endif // WANT_UNROLLED_COPY
210
#endif // CPU_HAS_MMX
211
 
212
// Specializations for void* and char*, aliasing the above optimized versions.
213
//
214
// All these need duplication with const and non-const arguments, since
215
// otherwise the compiler will default to the unoptimized version for
216
// pointers not const in the caller's context, such as local variables.
217
// These are all inline, but they sure slow down compilation... :(
218
//
219
#define COPY_ALIAS_FUNC(ctype, type, alias_type)                        \
220
template <> inline type* copy (ctype* first, ctype* last, type* result) \
221
{ return ((type*) copy ((const alias_type*) first, (const alias_type*) last, (alias_type*) result)); }
222
#if WANT_UNROLLED_COPY
223
#if HAVE_THREE_CHAR_TYPES
224
COPY_ALIAS_FUNC(const char, char, uint8_t)
225
COPY_ALIAS_FUNC(char, char, uint8_t)
226
#endif
227
COPY_ALIAS_FUNC(const int8_t, int8_t, uint8_t)
228
COPY_ALIAS_FUNC(int8_t, int8_t, uint8_t)
229
COPY_ALIAS_FUNC(uint8_t, uint8_t, uint8_t)
230
COPY_ALIAS_FUNC(const int16_t, int16_t, uint16_t)
231
COPY_ALIAS_FUNC(int16_t, int16_t, uint16_t)
232
COPY_ALIAS_FUNC(uint16_t, uint16_t, uint16_t)
233
#if CPU_HAS_MMX || (SIZE_OF_LONG > 4)
234
COPY_ALIAS_FUNC(const int32_t, int32_t, uint32_t)
235
COPY_ALIAS_FUNC(int32_t, int32_t, uint32_t)
236
COPY_ALIAS_FUNC(uint32_t, uint32_t, uint32_t)
237
#endif
238
#endif
239
COPY_ALIAS_FUNC(const void, void, uint8_t)
240
COPY_ALIAS_FUNC(void, void, uint8_t)
241
#undef COPY_ALIAS_FUNC
242
#define COPY_BACKWARD_ALIAS_FUNC(ctype, type, alias_type)                               \
243
template <> inline type* copy_backward (ctype* first, ctype* last, type* result)        \
244
{ return ((type*) copy_backward ((const alias_type*) first, (const alias_type*) last, (alias_type*) result)); }
245
#if WANT_UNROLLED_COPY
246
#if HAVE_THREE_CHAR_TYPES
247
COPY_BACKWARD_ALIAS_FUNC(char, char, uint8_t)
248
#endif
249
COPY_BACKWARD_ALIAS_FUNC(uint8_t, uint8_t, uint8_t)
250
COPY_BACKWARD_ALIAS_FUNC(int8_t, int8_t, uint8_t)
251
COPY_BACKWARD_ALIAS_FUNC(uint16_t, uint16_t, uint8_t)
252
COPY_BACKWARD_ALIAS_FUNC(const uint16_t, uint16_t, uint8_t)
253
COPY_BACKWARD_ALIAS_FUNC(int16_t, int16_t, uint8_t)
254
COPY_BACKWARD_ALIAS_FUNC(const int16_t, int16_t, uint8_t)
255
#endif
256
COPY_BACKWARD_ALIAS_FUNC(void, void, uint8_t)
257
COPY_BACKWARD_ALIAS_FUNC(const void, void, uint8_t)
258
#undef COPY_BACKWARD_ALIAS_FUNC
259
#define FILL_ALIAS_FUNC(type, alias_type, v_type)                               \
260
template <> inline void fill (type* first, type* last, const v_type& value)     \
261
{ fill ((alias_type*) first, (alias_type*) last, (const alias_type&) value); }
262
FILL_ALIAS_FUNC(void, uint8_t, char)
263
FILL_ALIAS_FUNC(void, uint8_t, uint8_t)
264
#if WANT_UNROLLED_COPY
265
#if HAVE_THREE_CHAR_TYPES
266
FILL_ALIAS_FUNC(char, uint8_t, char)
267
FILL_ALIAS_FUNC(char, uint8_t, uint8_t)
268
#endif
269
FILL_ALIAS_FUNC(int8_t, uint8_t, int8_t)
270
FILL_ALIAS_FUNC(int16_t, uint16_t, int16_t)
271
#if CPU_HAS_MMX || (SIZE_OF_LONG > 4)
272
FILL_ALIAS_FUNC(int32_t, uint32_t, int32_t)
273
#endif
274
#endif
275
#undef FILL_ALIAS_FUNC
276
#define COPY_N_ALIAS_FUNC(ctype, type, alias_type)                                      \
277
template <> inline type* copy_n (ctype* first, size_t count, type* result)      \
278
{ return ((type*) copy_n ((const alias_type*) first, count, (alias_type*) result)); }
279
COPY_N_ALIAS_FUNC(const void, void, uint8_t)
280
COPY_N_ALIAS_FUNC(void, void, uint8_t)
281
#if WANT_UNROLLED_COPY
282
#if HAVE_THREE_CHAR_TYPES
283
COPY_N_ALIAS_FUNC(const char, char, uint8_t)
284
COPY_N_ALIAS_FUNC(char, char, uint8_t)
285
#endif
286
COPY_N_ALIAS_FUNC(int8_t, int8_t, uint8_t)
287
COPY_N_ALIAS_FUNC(uint8_t, uint8_t, uint8_t)
288
COPY_N_ALIAS_FUNC(const int8_t, int8_t, uint8_t)
289
COPY_N_ALIAS_FUNC(int16_t, int16_t, uint16_t)
290
COPY_N_ALIAS_FUNC(uint16_t, uint16_t, uint16_t)
291
COPY_N_ALIAS_FUNC(const int16_t, int16_t, uint16_t)
292
#if CPU_HAS_MMX || (SIZE_OF_LONG > 4)
293
COPY_N_ALIAS_FUNC(int32_t, int32_t, uint32_t)
294
COPY_N_ALIAS_FUNC(uint32_t, uint32_t, uint32_t)
295
COPY_N_ALIAS_FUNC(const int32_t, int32_t, uint32_t)
296
#endif
297
#endif
298
#undef COPY_N_ALIAS_FUNC
299
#define FILL_N_ALIAS_FUNC(type, alias_type, v_type)                             \
300
template <> inline type* fill_n (type* first, size_t n, const v_type& value)    \
301
{ return ((type*) fill_n ((alias_type*) first, n, (const alias_type&) value)); }
302
FILL_N_ALIAS_FUNC(void, uint8_t, char)
303
FILL_N_ALIAS_FUNC(void, uint8_t, uint8_t)
304
#if WANT_UNROLLED_COPY
305
#if HAVE_THREE_CHAR_TYPES
306
FILL_N_ALIAS_FUNC(char, uint8_t, char)
307
FILL_N_ALIAS_FUNC(char, uint8_t, uint8_t)
308
#endif
309
FILL_N_ALIAS_FUNC(int8_t, uint8_t, int8_t)
310
FILL_N_ALIAS_FUNC(int16_t, uint16_t, int16_t)
311
#if CPU_HAS_MMX || (SIZE_OF_LONG > 4)
312
FILL_N_ALIAS_FUNC(int32_t, uint32_t, int32_t)
313
#endif
314
#endif
315
#undef FILL_N_ALIAS_FUNC
316
 
317
extern const char _FmtPrtChr[2][8];
318
 
319
} // namespace ustl
320
 
321
#endif

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.