1 |
106 |
markom |
/* This file is part of the program psim.
|
2 |
|
|
|
3 |
|
|
Copyright (C) 1994-1995, Andrew Cagney <cagney@highland.com.au>
|
4 |
|
|
|
5 |
|
|
This program is free software; you can redistribute it and/or modify
|
6 |
|
|
it under the terms of the GNU General Public License as published by
|
7 |
|
|
the Free Software Foundation; either version 2 of the License, or
|
8 |
|
|
(at your option) any later version.
|
9 |
|
|
|
10 |
|
|
This program is distributed in the hope that it will be useful,
|
11 |
|
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
12 |
|
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
13 |
|
|
GNU General Public License for more details.
|
14 |
|
|
|
15 |
|
|
You should have received a copy of the GNU General Public License
|
16 |
|
|
along with this program; if not, write to the Free Software
|
17 |
|
|
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
|
18 |
|
|
|
19 |
|
|
*/
|
20 |
|
|
|
21 |
|
|
|
22 |
|
|
#ifndef _BITS_H_
|
23 |
|
|
#define _BITS_H_
|
24 |
|
|
|
25 |
|
|
|
26 |
|
|
/* bit manipulation routines:
|
27 |
|
|
|
28 |
|
|
Bit numbering: The bits are numbered according to the PowerPC
|
29 |
|
|
convention - the left most (or most significant) is bit 0 while the
|
30 |
|
|
right most (least significant) is bit 1.
|
31 |
|
|
|
32 |
|
|
Size convention: Each macro is in three forms - <MACRO>32 which
|
33 |
|
|
operates in 32bit quantity (bits are numbered 0..31); <MACRO>64
|
34 |
|
|
which operates using 64bit quantites (and bits are numbered 0..64);
|
35 |
|
|
and <MACRO> which operates using the bit size of the target
|
36 |
|
|
architecture (bits are still numbered 0..63), with 32bit
|
37 |
|
|
architectures ignoring the first 32bits having bit 32 as the most
|
38 |
|
|
significant.
|
39 |
|
|
|
40 |
|
|
BIT*(POS): Quantity with just 1 bit set.
|
41 |
|
|
|
42 |
|
|
MASK*(FIRST, LAST): Create a constant bit mask of the specified
|
43 |
|
|
size with bits [FIRST .. LAST] set.
|
44 |
|
|
|
45 |
|
|
MASKED*(VALUE, FIRST, LAST): Masks out all but bits [FIRST
|
46 |
|
|
.. LAST].
|
47 |
|
|
|
48 |
|
|
EXTRACTED*(VALUE, FIRST, LAST): Masks out bits [FIRST .. LAST] but
|
49 |
|
|
also right shifts the masked value so that bit LAST becomes the
|
50 |
|
|
least significant (right most).
|
51 |
|
|
|
52 |
|
|
SHUFFLED**(VALUE, OLD, NEW): Mask then move a single bit from OLD
|
53 |
|
|
new NEW.
|
54 |
|
|
|
55 |
|
|
MOVED**(VALUE, OLD_FIRST, OLD_LAST, NEW_FIRST, NEW_LAST): Moves
|
56 |
|
|
things around so that bits OLD_FIRST..OLD_LAST are masked then
|
57 |
|
|
moved to NEW_FIRST..NEW_LAST.
|
58 |
|
|
|
59 |
|
|
INSERTED*(VALUE, FIRST, LAST): Takes VALUE and `inserts' the (LAST
|
60 |
|
|
- FIRST + 1) least significant bits into bit positions [ FIRST
|
61 |
|
|
.. LAST ]. This is almost the complement to EXTRACTED.
|
62 |
|
|
|
63 |
|
|
IEA_MASKED(SHOULD_MASK, ADDR): Convert the address to the targets
|
64 |
|
|
natural size. If in 32bit mode, discard the high 32bits.
|
65 |
|
|
|
66 |
|
|
EXTENDED(VALUE): Convert VALUE (32bits of it) to the targets
|
67 |
|
|
natural size. If in 64bit mode, sign extend the value.
|
68 |
|
|
|
69 |
|
|
ALIGN_*(VALUE): Round upwards the value so that it is aligned.
|
70 |
|
|
|
71 |
|
|
FLOOR_*(VALUE): Truncate the value so that it is aligned.
|
72 |
|
|
|
73 |
|
|
ROTL*(VALUE, NR_BITS): Return the value rotated by NR_BITS
|
74 |
|
|
|
75 |
|
|
*/
|
76 |
|
|
|
77 |
|
|
#define _MAKE_SHIFT(WIDTH, pos) ((WIDTH) - 1 - (pos))
|
78 |
|
|
|
79 |
|
|
|
80 |
|
|
/* MakeBit */
|
81 |
|
|
#define _BITn(WIDTH, pos) (((natural##WIDTH)(1)) \
|
82 |
|
|
<< _MAKE_SHIFT(WIDTH, pos))
|
83 |
|
|
|
84 |
|
|
#define BIT4(POS) (1 << _MAKE_SHIFT(4, POS))
|
85 |
|
|
#define BIT5(POS) (1 << _MAKE_SHIFT(5, POS))
|
86 |
|
|
#define BIT8(POS) (1 << _MAKE_SHIFT(8, POS))
|
87 |
|
|
#define BIT10(POS) (1 << _MAKE_SHIFT(10, POS))
|
88 |
|
|
#define BIT32(POS) _BITn(32, POS)
|
89 |
|
|
#define BIT64(POS) _BITn(64, POS)
|
90 |
|
|
|
91 |
|
|
#if (WITH_TARGET_WORD_BITSIZE == 64)
|
92 |
|
|
#define BIT(POS) BIT64(POS)
|
93 |
|
|
#else
|
94 |
|
|
#define BIT(POS) (((POS) < 32) ? 0 : _BITn(32, (POS)-32))
|
95 |
|
|
#endif
|
96 |
|
|
|
97 |
|
|
|
98 |
|
|
/* multi bit mask */
|
99 |
|
|
#define _MASKn(WIDTH, START, STOP) \
|
100 |
|
|
(((((unsigned##WIDTH)0) - 1) \
|
101 |
|
|
>> (WIDTH - ((STOP) - (START) + 1))) \
|
102 |
|
|
<< (WIDTH - 1 - (STOP)))
|
103 |
|
|
|
104 |
|
|
#define MASK32(START, STOP) _MASKn(32, START, STOP)
|
105 |
|
|
#define MASK64(START, STOP) _MASKn(64, START, STOP)
|
106 |
|
|
|
107 |
|
|
#if (WITH_TARGET_WORD_BITSIZE == 64)
|
108 |
|
|
#define MASK(START, STOP) \
|
109 |
|
|
(((START) <= (STOP)) \
|
110 |
|
|
? _MASKn(64, START, STOP) \
|
111 |
|
|
: (_MASKn(64, 0, STOP) \
|
112 |
|
|
| _MASKn(64, START, 63)))
|
113 |
|
|
#else
|
114 |
|
|
#define MASK(START, STOP) \
|
115 |
|
|
(((START) <= (STOP)) \
|
116 |
|
|
? (((STOP) < 32) \
|
117 |
|
|
? 0 \
|
118 |
|
|
: _MASKn(32, \
|
119 |
|
|
(START) < 32 ? 0 : (START) - 32, \
|
120 |
|
|
(STOP)-32)) \
|
121 |
|
|
: (_MASKn(32, \
|
122 |
|
|
(START) < 32 ? 0 : (START) - 32, \
|
123 |
|
|
31) \
|
124 |
|
|
| (((STOP) < 32) \
|
125 |
|
|
? 0 \
|
126 |
|
|
: _MASKn(32, \
|
127 |
|
|
0, \
|
128 |
|
|
(STOP) - 32))))
|
129 |
|
|
#endif
|
130 |
|
|
|
131 |
|
|
|
132 |
|
|
/* mask the required bits, leaving them in place */
|
133 |
|
|
|
134 |
|
|
INLINE_BITS\
|
135 |
|
|
(unsigned32) MASKED32
|
136 |
|
|
(unsigned32 word,
|
137 |
|
|
unsigned start,
|
138 |
|
|
unsigned stop);
|
139 |
|
|
|
140 |
|
|
INLINE_BITS\
|
141 |
|
|
(unsigned64) MASKED64
|
142 |
|
|
(unsigned64 word,
|
143 |
|
|
unsigned start,
|
144 |
|
|
unsigned stop);
|
145 |
|
|
|
146 |
|
|
INLINE_BITS\
|
147 |
|
|
(unsigned_word) MASKED
|
148 |
|
|
(unsigned_word word,
|
149 |
|
|
unsigned start,
|
150 |
|
|
unsigned stop);
|
151 |
|
|
|
152 |
|
|
|
153 |
|
|
/* extract the required bits aligning them with the lsb */
|
154 |
|
|
#define _EXTRACTEDn(WIDTH, WORD, START, STOP) \
|
155 |
|
|
((((natural##WIDTH)(WORD)) >> (WIDTH - (STOP) - 1)) \
|
156 |
|
|
& _MASKn(WIDTH, WIDTH-1+(START)-(STOP), WIDTH-1))
|
157 |
|
|
|
158 |
|
|
/* #define EXTRACTED10(WORD, START, STOP) _EXTRACTEDn(10, WORD, START, STOP) */
|
159 |
|
|
#define EXTRACTED32(WORD, START, STOP) _EXTRACTEDn(32, WORD, START, STOP)
|
160 |
|
|
#define EXTRACTED64(WORD, START, STOP) _EXTRACTEDn(64, WORD, START, STOP)
|
161 |
|
|
|
162 |
|
|
INLINE_BITS\
|
163 |
|
|
(unsigned_word) EXTRACTED
|
164 |
|
|
(unsigned_word val,
|
165 |
|
|
unsigned start,
|
166 |
|
|
unsigned stop);
|
167 |
|
|
|
168 |
|
|
|
169 |
|
|
/* move a single bit around */
|
170 |
|
|
/* NB: the wierdness (N>O?N-O:0) is to stop a warning from GCC */
|
171 |
|
|
#define _SHUFFLEDn(N, WORD, OLD, NEW) \
|
172 |
|
|
((OLD) < (NEW) \
|
173 |
|
|
? (((unsigned##N)(WORD) \
|
174 |
|
|
>> (((NEW) > (OLD)) ? ((NEW) - (OLD)) : 0)) \
|
175 |
|
|
& MASK32((NEW), (NEW))) \
|
176 |
|
|
: (((unsigned##N)(WORD) \
|
177 |
|
|
<< (((OLD) > (NEW)) ? ((OLD) - (NEW)) : 0)) \
|
178 |
|
|
& MASK32((NEW), (NEW))))
|
179 |
|
|
|
180 |
|
|
#define SHUFFLED32(WORD, OLD, NEW) _SHUFFLEDn(32, WORD, OLD, NEW)
|
181 |
|
|
#define SHUFFLED64(WORD, OLD, NEW) _SHUFFLEDn(64, WORD, OLD, NEW)
|
182 |
|
|
|
183 |
|
|
#define SHUFFLED(WORD, OLD, NEW) _SHUFFLEDn(_word, WORD, OLD, NEW)
|
184 |
|
|
|
185 |
|
|
|
186 |
|
|
/* move a group of bits around */
|
187 |
|
|
#define _INSERTEDn(N, WORD, START, STOP) \
|
188 |
|
|
(((natural##N)(WORD) << _MAKE_SHIFT(N, STOP)) & _MASKn(N, START, STOP))
|
189 |
|
|
|
190 |
|
|
#define INSERTED32(WORD, START, STOP) _INSERTEDn(32, WORD, START, STOP)
|
191 |
|
|
#define INSERTED64(WORD, START, STOP) _INSERTEDn(64, WORD, START, STOP)
|
192 |
|
|
|
193 |
|
|
INLINE_BITS\
|
194 |
|
|
(unsigned_word) INSERTED
|
195 |
|
|
(unsigned_word val,
|
196 |
|
|
unsigned start,
|
197 |
|
|
unsigned stop);
|
198 |
|
|
|
199 |
|
|
|
200 |
|
|
/* depending on MODE return a 64bit or 32bit (sign extended) value */
|
201 |
|
|
#if (WITH_TARGET_WORD_BITSIZE == 64)
|
202 |
|
|
#define EXTENDED(X) ((signed64)(signed32)(X))
|
203 |
|
|
#else
|
204 |
|
|
#define EXTENDED(X) (X)
|
205 |
|
|
#endif
|
206 |
|
|
|
207 |
|
|
|
208 |
|
|
/* memory alignment macro's */
|
209 |
|
|
#define _ALIGNa(A,X) (((X) + ((A) - 1)) & ~((A) - 1))
|
210 |
|
|
#define _FLOORa(A,X) ((X) & ~((A) - 1))
|
211 |
|
|
|
212 |
|
|
#define ALIGN_8(X) _ALIGNa(8, X)
|
213 |
|
|
#define ALIGN_16(X) _ALIGNa(16, X)
|
214 |
|
|
|
215 |
|
|
#define ALIGN_PAGE(X) _ALIGNa(0x1000, X)
|
216 |
|
|
#define FLOOR_PAGE(X) ((X) & ~(0x1000 - 1))
|
217 |
|
|
|
218 |
|
|
|
219 |
|
|
/* bit bliting macro's */
|
220 |
|
|
#define BLIT32(V, POS, BIT) \
|
221 |
|
|
do { \
|
222 |
|
|
if (BIT) \
|
223 |
|
|
V |= BIT32(POS); \
|
224 |
|
|
else \
|
225 |
|
|
V &= ~BIT32(POS); \
|
226 |
|
|
} while (0)
|
227 |
|
|
#define MBLIT32(V, LO, HI, VAL) \
|
228 |
|
|
do { \
|
229 |
|
|
(V) = (((V) & ~MASK32((LO), (HI))) \
|
230 |
|
|
| INSERTED32(VAL, LO, HI)); \
|
231 |
|
|
} while (0)
|
232 |
|
|
|
233 |
|
|
|
234 |
|
|
/* some rotate functions to make things easier
|
235 |
|
|
|
236 |
|
|
NOTE: These are functions not macro's as the latter tickles bugs in
|
237 |
|
|
gcc-2.6.3 */
|
238 |
|
|
|
239 |
|
|
#define _ROTLn(N, VAL, SHIFT) \
|
240 |
|
|
(((VAL) << (SHIFT)) | ((VAL) >> ((N)-(SHIFT))))
|
241 |
|
|
|
242 |
|
|
INLINE_BITS\
|
243 |
|
|
(unsigned32) ROTL32
|
244 |
|
|
(unsigned32 val,
|
245 |
|
|
long shift);
|
246 |
|
|
|
247 |
|
|
INLINE_BITS\
|
248 |
|
|
(unsigned64) ROTL64
|
249 |
|
|
(unsigned64 val,
|
250 |
|
|
long shift);
|
251 |
|
|
|
252 |
|
|
|
253 |
|
|
#if (BITS_INLINE & INCLUDE_MODULE)
|
254 |
|
|
#include "bits.c"
|
255 |
|
|
#endif
|
256 |
|
|
|
257 |
|
|
#endif /* _BITS_H_ */
|