1 |
12 |
jlechner |
/* Allocate registers within a basic block, for GNU compiler.
|
2 |
|
|
Copyright (C) 1987, 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998,
|
3 |
|
|
1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
|
4 |
|
|
|
5 |
|
|
This file is part of GCC.
|
6 |
|
|
|
7 |
|
|
GCC is free software; you can redistribute it and/or modify it under
|
8 |
|
|
the terms of the GNU General Public License as published by the Free
|
9 |
|
|
Software Foundation; either version 2, or (at your option) any later
|
10 |
|
|
version.
|
11 |
|
|
|
12 |
|
|
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
|
13 |
|
|
WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
14 |
|
|
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
15 |
|
|
for more details.
|
16 |
|
|
|
17 |
|
|
You should have received a copy of the GNU General Public License
|
18 |
|
|
along with GCC; see the file COPYING. If not, write to the Free
|
19 |
|
|
Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
|
20 |
|
|
02110-1301, USA. */
|
21 |
|
|
|
22 |
|
|
/* Allocation of hard register numbers to pseudo registers is done in
|
23 |
|
|
two passes. In this pass we consider only regs that are born and
|
24 |
|
|
die once within one basic block. We do this one basic block at a
|
25 |
|
|
time. Then the next pass allocates the registers that remain.
|
26 |
|
|
Two passes are used because this pass uses methods that work only
|
27 |
|
|
on linear code, but that do a better job than the general methods
|
28 |
|
|
used in global_alloc, and more quickly too.
|
29 |
|
|
|
30 |
|
|
The assignments made are recorded in the vector reg_renumber
|
31 |
|
|
whose space is allocated here. The rtl code itself is not altered.
|
32 |
|
|
|
33 |
|
|
We assign each instruction in the basic block a number
|
34 |
|
|
which is its order from the beginning of the block.
|
35 |
|
|
Then we can represent the lifetime of a pseudo register with
|
36 |
|
|
a pair of numbers, and check for conflicts easily.
|
37 |
|
|
We can record the availability of hard registers with a
|
38 |
|
|
HARD_REG_SET for each instruction. The HARD_REG_SET
|
39 |
|
|
contains 0 or 1 for each hard reg.
|
40 |
|
|
|
41 |
|
|
To avoid register shuffling, we tie registers together when one
|
42 |
|
|
dies by being copied into another, or dies in an instruction that
|
43 |
|
|
does arithmetic to produce another. The tied registers are
|
44 |
|
|
allocated as one. Registers with different reg class preferences
|
45 |
|
|
can never be tied unless the class preferred by one is a subclass
|
46 |
|
|
of the one preferred by the other.
|
47 |
|
|
|
48 |
|
|
Tying is represented with "quantity numbers".
|
49 |
|
|
A non-tied register is given a new quantity number.
|
50 |
|
|
Tied registers have the same quantity number.
|
51 |
|
|
|
52 |
|
|
We have provision to exempt registers, even when they are contained
|
53 |
|
|
within the block, that can be tied to others that are not contained in it.
|
54 |
|
|
This is so that global_alloc could process them both and tie them then.
|
55 |
|
|
But this is currently disabled since tying in global_alloc is not
|
56 |
|
|
yet implemented. */
|
57 |
|
|
|
58 |
|
|
/* Pseudos allocated here can be reallocated by global.c if the hard register
|
59 |
|
|
is used as a spill register. Currently we don't allocate such pseudos
|
60 |
|
|
here if their preferred class is likely to be used by spills. */
|
61 |
|
|
|
62 |
|
|
#include "config.h"
|
63 |
|
|
#include "system.h"
|
64 |
|
|
#include "coretypes.h"
|
65 |
|
|
#include "tm.h"
|
66 |
|
|
#include "hard-reg-set.h"
|
67 |
|
|
#include "rtl.h"
|
68 |
|
|
#include "tm_p.h"
|
69 |
|
|
#include "flags.h"
|
70 |
|
|
#include "regs.h"
|
71 |
|
|
#include "function.h"
|
72 |
|
|
#include "insn-config.h"
|
73 |
|
|
#include "insn-attr.h"
|
74 |
|
|
#include "recog.h"
|
75 |
|
|
#include "output.h"
|
76 |
|
|
#include "toplev.h"
|
77 |
|
|
#include "except.h"
|
78 |
|
|
#include "integrate.h"
|
79 |
|
|
#include "reload.h"
|
80 |
|
|
#include "ggc.h"
|
81 |
|
|
#include "timevar.h"
|
82 |
|
|
#include "tree-pass.h"
|
83 |
|
|
|
84 |
|
|
/* Next quantity number available for allocation. */
|
85 |
|
|
|
86 |
|
|
static int next_qty;
|
87 |
|
|
|
88 |
|
|
/* Information we maintain about each quantity. */
|
89 |
|
|
struct qty
|
90 |
|
|
{
|
91 |
|
|
/* The number of refs to quantity Q. */
|
92 |
|
|
|
93 |
|
|
int n_refs;
|
94 |
|
|
|
95 |
|
|
/* The frequency of uses of quantity Q. */
|
96 |
|
|
|
97 |
|
|
int freq;
|
98 |
|
|
|
99 |
|
|
/* Insn number (counting from head of basic block)
|
100 |
|
|
where quantity Q was born. -1 if birth has not been recorded. */
|
101 |
|
|
|
102 |
|
|
int birth;
|
103 |
|
|
|
104 |
|
|
/* Insn number (counting from head of basic block)
|
105 |
|
|
where given quantity died. Due to the way tying is done,
|
106 |
|
|
and the fact that we consider in this pass only regs that die but once,
|
107 |
|
|
a quantity can die only once. Each quantity's life span
|
108 |
|
|
is a set of consecutive insns. -1 if death has not been recorded. */
|
109 |
|
|
|
110 |
|
|
int death;
|
111 |
|
|
|
112 |
|
|
/* Number of words needed to hold the data in given quantity.
|
113 |
|
|
This depends on its machine mode. It is used for these purposes:
|
114 |
|
|
1. It is used in computing the relative importance of qtys,
|
115 |
|
|
which determines the order in which we look for regs for them.
|
116 |
|
|
2. It is used in rules that prevent tying several registers of
|
117 |
|
|
different sizes in a way that is geometrically impossible
|
118 |
|
|
(see combine_regs). */
|
119 |
|
|
|
120 |
|
|
int size;
|
121 |
|
|
|
122 |
|
|
/* Number of times a reg tied to given qty lives across a CALL_INSN. */
|
123 |
|
|
|
124 |
|
|
int n_calls_crossed;
|
125 |
|
|
|
126 |
|
|
/* Number of times a reg tied to given qty lives across a CALL_INSN
|
127 |
|
|
that might throw. */
|
128 |
|
|
|
129 |
|
|
int n_throwing_calls_crossed;
|
130 |
|
|
|
131 |
|
|
/* The register number of one pseudo register whose reg_qty value is Q.
|
132 |
|
|
This register should be the head of the chain
|
133 |
|
|
maintained in reg_next_in_qty. */
|
134 |
|
|
|
135 |
|
|
int first_reg;
|
136 |
|
|
|
137 |
|
|
/* Reg class contained in (smaller than) the preferred classes of all
|
138 |
|
|
the pseudo regs that are tied in given quantity.
|
139 |
|
|
This is the preferred class for allocating that quantity. */
|
140 |
|
|
|
141 |
|
|
enum reg_class min_class;
|
142 |
|
|
|
143 |
|
|
/* Register class within which we allocate given qty if we can't get
|
144 |
|
|
its preferred class. */
|
145 |
|
|
|
146 |
|
|
enum reg_class alternate_class;
|
147 |
|
|
|
148 |
|
|
/* This holds the mode of the registers that are tied to given qty,
|
149 |
|
|
or VOIDmode if registers with differing modes are tied together. */
|
150 |
|
|
|
151 |
|
|
enum machine_mode mode;
|
152 |
|
|
|
153 |
|
|
/* the hard reg number chosen for given quantity,
|
154 |
|
|
or -1 if none was found. */
|
155 |
|
|
|
156 |
|
|
short phys_reg;
|
157 |
|
|
};
|
158 |
|
|
|
159 |
|
|
static struct qty *qty;
|
160 |
|
|
|
161 |
|
|
/* These fields are kept separately to speedup their clearing. */
|
162 |
|
|
|
163 |
|
|
/* We maintain two hard register sets that indicate suggested hard registers
|
164 |
|
|
for each quantity. The first, phys_copy_sugg, contains hard registers
|
165 |
|
|
that are tied to the quantity by a simple copy. The second contains all
|
166 |
|
|
hard registers that are tied to the quantity via an arithmetic operation.
|
167 |
|
|
|
168 |
|
|
The former register set is given priority for allocation. This tends to
|
169 |
|
|
eliminate copy insns. */
|
170 |
|
|
|
171 |
|
|
/* Element Q is a set of hard registers that are suggested for quantity Q by
|
172 |
|
|
copy insns. */
|
173 |
|
|
|
174 |
|
|
static HARD_REG_SET *qty_phys_copy_sugg;
|
175 |
|
|
|
176 |
|
|
/* Element Q is a set of hard registers that are suggested for quantity Q by
|
177 |
|
|
arithmetic insns. */
|
178 |
|
|
|
179 |
|
|
static HARD_REG_SET *qty_phys_sugg;
|
180 |
|
|
|
181 |
|
|
/* Element Q is the number of suggested registers in qty_phys_copy_sugg. */
|
182 |
|
|
|
183 |
|
|
static short *qty_phys_num_copy_sugg;
|
184 |
|
|
|
185 |
|
|
/* Element Q is the number of suggested registers in qty_phys_sugg. */
|
186 |
|
|
|
187 |
|
|
static short *qty_phys_num_sugg;
|
188 |
|
|
|
189 |
|
|
/* If (REG N) has been assigned a quantity number, is a register number
|
190 |
|
|
of another register assigned the same quantity number, or -1 for the
|
191 |
|
|
end of the chain. qty->first_reg point to the head of this chain. */
|
192 |
|
|
|
193 |
|
|
static int *reg_next_in_qty;
|
194 |
|
|
|
195 |
|
|
/* reg_qty[N] (where N is a pseudo reg number) is the qty number of that reg
|
196 |
|
|
if it is >= 0,
|
197 |
|
|
of -1 if this register cannot be allocated by local-alloc,
|
198 |
|
|
or -2 if not known yet.
|
199 |
|
|
|
200 |
|
|
Note that if we see a use or death of pseudo register N with
|
201 |
|
|
reg_qty[N] == -2, register N must be local to the current block. If
|
202 |
|
|
it were used in more than one block, we would have reg_qty[N] == -1.
|
203 |
|
|
This relies on the fact that if reg_basic_block[N] is >= 0, register N
|
204 |
|
|
will not appear in any other block. We save a considerable number of
|
205 |
|
|
tests by exploiting this.
|
206 |
|
|
|
207 |
|
|
If N is < FIRST_PSEUDO_REGISTER, reg_qty[N] is undefined and should not
|
208 |
|
|
be referenced. */
|
209 |
|
|
|
210 |
|
|
static int *reg_qty;
|
211 |
|
|
|
212 |
|
|
/* The offset (in words) of register N within its quantity.
|
213 |
|
|
This can be nonzero if register N is SImode, and has been tied
|
214 |
|
|
to a subreg of a DImode register. */
|
215 |
|
|
|
216 |
|
|
static char *reg_offset;
|
217 |
|
|
|
218 |
|
|
/* Vector of substitutions of register numbers,
|
219 |
|
|
used to map pseudo regs into hardware regs.
|
220 |
|
|
This is set up as a result of register allocation.
|
221 |
|
|
Element N is the hard reg assigned to pseudo reg N,
|
222 |
|
|
or is -1 if no hard reg was assigned.
|
223 |
|
|
If N is a hard reg number, element N is N. */
|
224 |
|
|
|
225 |
|
|
short *reg_renumber;
|
226 |
|
|
|
227 |
|
|
/* Set of hard registers live at the current point in the scan
|
228 |
|
|
of the instructions in a basic block. */
|
229 |
|
|
|
230 |
|
|
static HARD_REG_SET regs_live;
|
231 |
|
|
|
232 |
|
|
/* Each set of hard registers indicates registers live at a particular
|
233 |
|
|
point in the basic block. For N even, regs_live_at[N] says which
|
234 |
|
|
hard registers are needed *after* insn N/2 (i.e., they may not
|
235 |
|
|
conflict with the outputs of insn N/2 or the inputs of insn N/2 + 1.
|
236 |
|
|
|
237 |
|
|
If an object is to conflict with the inputs of insn J but not the
|
238 |
|
|
outputs of insn J + 1, we say it is born at index J*2 - 1. Similarly,
|
239 |
|
|
if it is to conflict with the outputs of insn J but not the inputs of
|
240 |
|
|
insn J + 1, it is said to die at index J*2 + 1. */
|
241 |
|
|
|
242 |
|
|
static HARD_REG_SET *regs_live_at;
|
243 |
|
|
|
244 |
|
|
/* Communicate local vars `insn_number' and `insn'
|
245 |
|
|
from `block_alloc' to `reg_is_set', `wipe_dead_reg', and `alloc_qty'. */
|
246 |
|
|
static int this_insn_number;
|
247 |
|
|
static rtx this_insn;
|
248 |
|
|
|
249 |
|
|
struct equivalence
|
250 |
|
|
{
|
251 |
|
|
/* Set when an attempt should be made to replace a register
|
252 |
|
|
with the associated src_p entry. */
|
253 |
|
|
|
254 |
|
|
char replace;
|
255 |
|
|
|
256 |
|
|
/* Set when a REG_EQUIV note is found or created. Use to
|
257 |
|
|
keep track of what memory accesses might be created later,
|
258 |
|
|
e.g. by reload. */
|
259 |
|
|
|
260 |
|
|
rtx replacement;
|
261 |
|
|
|
262 |
|
|
rtx *src_p;
|
263 |
|
|
|
264 |
|
|
/* Loop depth is used to recognize equivalences which appear
|
265 |
|
|
to be present within the same loop (or in an inner loop). */
|
266 |
|
|
|
267 |
|
|
int loop_depth;
|
268 |
|
|
|
269 |
|
|
/* The list of each instruction which initializes this register. */
|
270 |
|
|
|
271 |
|
|
rtx init_insns;
|
272 |
|
|
|
273 |
|
|
/* Nonzero if this had a preexisting REG_EQUIV note. */
|
274 |
|
|
|
275 |
|
|
int is_arg_equivalence;
|
276 |
|
|
};
|
277 |
|
|
|
278 |
|
|
/* reg_equiv[N] (where N is a pseudo reg number) is the equivalence
|
279 |
|
|
structure for that register. */
|
280 |
|
|
|
281 |
|
|
static struct equivalence *reg_equiv;
|
282 |
|
|
|
283 |
|
|
/* Nonzero if we recorded an equivalence for a LABEL_REF. */
|
284 |
|
|
static int recorded_label_ref;
|
285 |
|
|
|
286 |
|
|
static void alloc_qty (int, enum machine_mode, int, int);
|
287 |
|
|
static void validate_equiv_mem_from_store (rtx, rtx, void *);
|
288 |
|
|
static int validate_equiv_mem (rtx, rtx, rtx);
|
289 |
|
|
static int equiv_init_varies_p (rtx);
|
290 |
|
|
static int equiv_init_movable_p (rtx, int);
|
291 |
|
|
static int contains_replace_regs (rtx);
|
292 |
|
|
static int memref_referenced_p (rtx, rtx);
|
293 |
|
|
static int memref_used_between_p (rtx, rtx, rtx);
|
294 |
|
|
static void update_equiv_regs (void);
|
295 |
|
|
static void no_equiv (rtx, rtx, void *);
|
296 |
|
|
static void block_alloc (int);
|
297 |
|
|
static int qty_sugg_compare (int, int);
|
298 |
|
|
static int qty_sugg_compare_1 (const void *, const void *);
|
299 |
|
|
static int qty_compare (int, int);
|
300 |
|
|
static int qty_compare_1 (const void *, const void *);
|
301 |
|
|
static int combine_regs (rtx, rtx, int, int, rtx, int);
|
302 |
|
|
static int reg_meets_class_p (int, enum reg_class);
|
303 |
|
|
static void update_qty_class (int, int);
|
304 |
|
|
static void reg_is_set (rtx, rtx, void *);
|
305 |
|
|
static void reg_is_born (rtx, int);
|
306 |
|
|
static void wipe_dead_reg (rtx, int);
|
307 |
|
|
static int find_free_reg (enum reg_class, enum machine_mode, int, int, int,
|
308 |
|
|
int, int);
|
309 |
|
|
static void mark_life (int, enum machine_mode, int);
|
310 |
|
|
static void post_mark_life (int, enum machine_mode, int, int, int);
|
311 |
|
|
static int no_conflict_p (rtx, rtx, rtx);
|
312 |
|
|
static int requires_inout (const char *);
|
313 |
|
|
|
314 |
|
|
/* Allocate a new quantity (new within current basic block)
|
315 |
|
|
for register number REGNO which is born at index BIRTH
|
316 |
|
|
within the block. MODE and SIZE are info on reg REGNO. */
|
317 |
|
|
|
318 |
|
|
static void
|
319 |
|
|
alloc_qty (int regno, enum machine_mode mode, int size, int birth)
|
320 |
|
|
{
|
321 |
|
|
int qtyno = next_qty++;
|
322 |
|
|
|
323 |
|
|
reg_qty[regno] = qtyno;
|
324 |
|
|
reg_offset[regno] = 0;
|
325 |
|
|
reg_next_in_qty[regno] = -1;
|
326 |
|
|
|
327 |
|
|
qty[qtyno].first_reg = regno;
|
328 |
|
|
qty[qtyno].size = size;
|
329 |
|
|
qty[qtyno].mode = mode;
|
330 |
|
|
qty[qtyno].birth = birth;
|
331 |
|
|
qty[qtyno].n_calls_crossed = REG_N_CALLS_CROSSED (regno);
|
332 |
|
|
qty[qtyno].n_throwing_calls_crossed = REG_N_THROWING_CALLS_CROSSED (regno);
|
333 |
|
|
qty[qtyno].min_class = reg_preferred_class (regno);
|
334 |
|
|
qty[qtyno].alternate_class = reg_alternate_class (regno);
|
335 |
|
|
qty[qtyno].n_refs = REG_N_REFS (regno);
|
336 |
|
|
qty[qtyno].freq = REG_FREQ (regno);
|
337 |
|
|
}
|
338 |
|
|
|
339 |
|
|
/* Main entry point of this file. */
|
340 |
|
|
|
341 |
|
|
int
|
342 |
|
|
local_alloc (void)
|
343 |
|
|
{
|
344 |
|
|
int i;
|
345 |
|
|
int max_qty;
|
346 |
|
|
basic_block b;
|
347 |
|
|
|
348 |
|
|
/* We need to keep track of whether or not we recorded a LABEL_REF so
|
349 |
|
|
that we know if the jump optimizer needs to be rerun. */
|
350 |
|
|
recorded_label_ref = 0;
|
351 |
|
|
|
352 |
|
|
/* Leaf functions and non-leaf functions have different needs.
|
353 |
|
|
If defined, let the machine say what kind of ordering we
|
354 |
|
|
should use. */
|
355 |
|
|
#ifdef ORDER_REGS_FOR_LOCAL_ALLOC
|
356 |
|
|
ORDER_REGS_FOR_LOCAL_ALLOC;
|
357 |
|
|
#endif
|
358 |
|
|
|
359 |
|
|
/* Promote REG_EQUAL notes to REG_EQUIV notes and adjust status of affected
|
360 |
|
|
registers. */
|
361 |
|
|
update_equiv_regs ();
|
362 |
|
|
|
363 |
|
|
/* This sets the maximum number of quantities we can have. Quantity
|
364 |
|
|
numbers start at zero and we can have one for each pseudo. */
|
365 |
|
|
max_qty = (max_regno - FIRST_PSEUDO_REGISTER);
|
366 |
|
|
|
367 |
|
|
/* Allocate vectors of temporary data.
|
368 |
|
|
See the declarations of these variables, above,
|
369 |
|
|
for what they mean. */
|
370 |
|
|
|
371 |
|
|
qty = xmalloc (max_qty * sizeof (struct qty));
|
372 |
|
|
qty_phys_copy_sugg = xmalloc (max_qty * sizeof (HARD_REG_SET));
|
373 |
|
|
qty_phys_num_copy_sugg = xmalloc (max_qty * sizeof (short));
|
374 |
|
|
qty_phys_sugg = xmalloc (max_qty * sizeof (HARD_REG_SET));
|
375 |
|
|
qty_phys_num_sugg = xmalloc (max_qty * sizeof (short));
|
376 |
|
|
|
377 |
|
|
reg_qty = xmalloc (max_regno * sizeof (int));
|
378 |
|
|
reg_offset = xmalloc (max_regno * sizeof (char));
|
379 |
|
|
reg_next_in_qty = xmalloc (max_regno * sizeof (int));
|
380 |
|
|
|
381 |
|
|
/* Determine which pseudo-registers can be allocated by local-alloc.
|
382 |
|
|
In general, these are the registers used only in a single block and
|
383 |
|
|
which only die once.
|
384 |
|
|
|
385 |
|
|
We need not be concerned with which block actually uses the register
|
386 |
|
|
since we will never see it outside that block. */
|
387 |
|
|
|
388 |
|
|
for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
|
389 |
|
|
{
|
390 |
|
|
if (REG_BASIC_BLOCK (i) >= 0 && REG_N_DEATHS (i) == 1)
|
391 |
|
|
reg_qty[i] = -2;
|
392 |
|
|
else
|
393 |
|
|
reg_qty[i] = -1;
|
394 |
|
|
}
|
395 |
|
|
|
396 |
|
|
/* Force loop below to initialize entire quantity array. */
|
397 |
|
|
next_qty = max_qty;
|
398 |
|
|
|
399 |
|
|
/* Allocate each block's local registers, block by block. */
|
400 |
|
|
|
401 |
|
|
FOR_EACH_BB (b)
|
402 |
|
|
{
|
403 |
|
|
/* NEXT_QTY indicates which elements of the `qty_...'
|
404 |
|
|
vectors might need to be initialized because they were used
|
405 |
|
|
for the previous block; it is set to the entire array before
|
406 |
|
|
block 0. Initialize those, with explicit loop if there are few,
|
407 |
|
|
else with bzero and bcopy. Do not initialize vectors that are
|
408 |
|
|
explicit set by `alloc_qty'. */
|
409 |
|
|
|
410 |
|
|
if (next_qty < 6)
|
411 |
|
|
{
|
412 |
|
|
for (i = 0; i < next_qty; i++)
|
413 |
|
|
{
|
414 |
|
|
CLEAR_HARD_REG_SET (qty_phys_copy_sugg[i]);
|
415 |
|
|
qty_phys_num_copy_sugg[i] = 0;
|
416 |
|
|
CLEAR_HARD_REG_SET (qty_phys_sugg[i]);
|
417 |
|
|
qty_phys_num_sugg[i] = 0;
|
418 |
|
|
}
|
419 |
|
|
}
|
420 |
|
|
else
|
421 |
|
|
{
|
422 |
|
|
#define CLEAR(vector) \
|
423 |
|
|
memset ((vector), 0, (sizeof (*(vector))) * next_qty);
|
424 |
|
|
|
425 |
|
|
CLEAR (qty_phys_copy_sugg);
|
426 |
|
|
CLEAR (qty_phys_num_copy_sugg);
|
427 |
|
|
CLEAR (qty_phys_sugg);
|
428 |
|
|
CLEAR (qty_phys_num_sugg);
|
429 |
|
|
}
|
430 |
|
|
|
431 |
|
|
next_qty = 0;
|
432 |
|
|
|
433 |
|
|
block_alloc (b->index);
|
434 |
|
|
}
|
435 |
|
|
|
436 |
|
|
free (qty);
|
437 |
|
|
free (qty_phys_copy_sugg);
|
438 |
|
|
free (qty_phys_num_copy_sugg);
|
439 |
|
|
free (qty_phys_sugg);
|
440 |
|
|
free (qty_phys_num_sugg);
|
441 |
|
|
|
442 |
|
|
free (reg_qty);
|
443 |
|
|
free (reg_offset);
|
444 |
|
|
free (reg_next_in_qty);
|
445 |
|
|
|
446 |
|
|
return recorded_label_ref;
|
447 |
|
|
}
|
448 |
|
|
|
449 |
|
|
/* Used for communication between the following two functions: contains
|
450 |
|
|
a MEM that we wish to ensure remains unchanged. */
|
451 |
|
|
static rtx equiv_mem;
|
452 |
|
|
|
453 |
|
|
/* Set nonzero if EQUIV_MEM is modified. */
|
454 |
|
|
static int equiv_mem_modified;
|
455 |
|
|
|
456 |
|
|
/* If EQUIV_MEM is modified by modifying DEST, indicate that it is modified.
|
457 |
|
|
Called via note_stores. */
|
458 |
|
|
|
459 |
|
|
static void
|
460 |
|
|
validate_equiv_mem_from_store (rtx dest, rtx set ATTRIBUTE_UNUSED,
|
461 |
|
|
void *data ATTRIBUTE_UNUSED)
|
462 |
|
|
{
|
463 |
|
|
if ((REG_P (dest)
|
464 |
|
|
&& reg_overlap_mentioned_p (dest, equiv_mem))
|
465 |
|
|
|| (MEM_P (dest)
|
466 |
|
|
&& true_dependence (dest, VOIDmode, equiv_mem, rtx_varies_p)))
|
467 |
|
|
equiv_mem_modified = 1;
|
468 |
|
|
}
|
469 |
|
|
|
470 |
|
|
/* Verify that no store between START and the death of REG invalidates
|
471 |
|
|
MEMREF. MEMREF is invalidated by modifying a register used in MEMREF,
|
472 |
|
|
by storing into an overlapping memory location, or with a non-const
|
473 |
|
|
CALL_INSN.
|
474 |
|
|
|
475 |
|
|
Return 1 if MEMREF remains valid. */
|
476 |
|
|
|
477 |
|
|
static int
|
478 |
|
|
validate_equiv_mem (rtx start, rtx reg, rtx memref)
|
479 |
|
|
{
|
480 |
|
|
rtx insn;
|
481 |
|
|
rtx note;
|
482 |
|
|
|
483 |
|
|
equiv_mem = memref;
|
484 |
|
|
equiv_mem_modified = 0;
|
485 |
|
|
|
486 |
|
|
/* If the memory reference has side effects or is volatile, it isn't a
|
487 |
|
|
valid equivalence. */
|
488 |
|
|
if (side_effects_p (memref))
|
489 |
|
|
return 0;
|
490 |
|
|
|
491 |
|
|
for (insn = start; insn && ! equiv_mem_modified; insn = NEXT_INSN (insn))
|
492 |
|
|
{
|
493 |
|
|
if (! INSN_P (insn))
|
494 |
|
|
continue;
|
495 |
|
|
|
496 |
|
|
if (find_reg_note (insn, REG_DEAD, reg))
|
497 |
|
|
return 1;
|
498 |
|
|
|
499 |
|
|
if (CALL_P (insn) && ! MEM_READONLY_P (memref)
|
500 |
|
|
&& ! CONST_OR_PURE_CALL_P (insn))
|
501 |
|
|
return 0;
|
502 |
|
|
|
503 |
|
|
note_stores (PATTERN (insn), validate_equiv_mem_from_store, NULL);
|
504 |
|
|
|
505 |
|
|
/* If a register mentioned in MEMREF is modified via an
|
506 |
|
|
auto-increment, we lose the equivalence. Do the same if one
|
507 |
|
|
dies; although we could extend the life, it doesn't seem worth
|
508 |
|
|
the trouble. */
|
509 |
|
|
|
510 |
|
|
for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
|
511 |
|
|
if ((REG_NOTE_KIND (note) == REG_INC
|
512 |
|
|
|| REG_NOTE_KIND (note) == REG_DEAD)
|
513 |
|
|
&& REG_P (XEXP (note, 0))
|
514 |
|
|
&& reg_overlap_mentioned_p (XEXP (note, 0), memref))
|
515 |
|
|
return 0;
|
516 |
|
|
}
|
517 |
|
|
|
518 |
|
|
return 0;
|
519 |
|
|
}
|
520 |
|
|
|
521 |
|
|
/* Returns zero if X is known to be invariant. */
|
522 |
|
|
|
523 |
|
|
static int
|
524 |
|
|
equiv_init_varies_p (rtx x)
|
525 |
|
|
{
|
526 |
|
|
RTX_CODE code = GET_CODE (x);
|
527 |
|
|
int i;
|
528 |
|
|
const char *fmt;
|
529 |
|
|
|
530 |
|
|
switch (code)
|
531 |
|
|
{
|
532 |
|
|
case MEM:
|
533 |
|
|
return !MEM_READONLY_P (x) || equiv_init_varies_p (XEXP (x, 0));
|
534 |
|
|
|
535 |
|
|
case CONST:
|
536 |
|
|
case CONST_INT:
|
537 |
|
|
case CONST_DOUBLE:
|
538 |
|
|
case CONST_VECTOR:
|
539 |
|
|
case SYMBOL_REF:
|
540 |
|
|
case LABEL_REF:
|
541 |
|
|
return 0;
|
542 |
|
|
|
543 |
|
|
case REG:
|
544 |
|
|
return reg_equiv[REGNO (x)].replace == 0 && rtx_varies_p (x, 0);
|
545 |
|
|
|
546 |
|
|
case ASM_OPERANDS:
|
547 |
|
|
if (MEM_VOLATILE_P (x))
|
548 |
|
|
return 1;
|
549 |
|
|
|
550 |
|
|
/* Fall through. */
|
551 |
|
|
|
552 |
|
|
default:
|
553 |
|
|
break;
|
554 |
|
|
}
|
555 |
|
|
|
556 |
|
|
fmt = GET_RTX_FORMAT (code);
|
557 |
|
|
for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
|
558 |
|
|
if (fmt[i] == 'e')
|
559 |
|
|
{
|
560 |
|
|
if (equiv_init_varies_p (XEXP (x, i)))
|
561 |
|
|
return 1;
|
562 |
|
|
}
|
563 |
|
|
else if (fmt[i] == 'E')
|
564 |
|
|
{
|
565 |
|
|
int j;
|
566 |
|
|
for (j = 0; j < XVECLEN (x, i); j++)
|
567 |
|
|
if (equiv_init_varies_p (XVECEXP (x, i, j)))
|
568 |
|
|
return 1;
|
569 |
|
|
}
|
570 |
|
|
|
571 |
|
|
return 0;
|
572 |
|
|
}
|
573 |
|
|
|
574 |
|
|
/* Returns nonzero if X (used to initialize register REGNO) is movable.
|
575 |
|
|
X is only movable if the registers it uses have equivalent initializations
|
576 |
|
|
which appear to be within the same loop (or in an inner loop) and movable
|
577 |
|
|
or if they are not candidates for local_alloc and don't vary. */
|
578 |
|
|
|
579 |
|
|
static int
|
580 |
|
|
equiv_init_movable_p (rtx x, int regno)
|
581 |
|
|
{
|
582 |
|
|
int i, j;
|
583 |
|
|
const char *fmt;
|
584 |
|
|
enum rtx_code code = GET_CODE (x);
|
585 |
|
|
|
586 |
|
|
switch (code)
|
587 |
|
|
{
|
588 |
|
|
case SET:
|
589 |
|
|
return equiv_init_movable_p (SET_SRC (x), regno);
|
590 |
|
|
|
591 |
|
|
case CC0:
|
592 |
|
|
case CLOBBER:
|
593 |
|
|
return 0;
|
594 |
|
|
|
595 |
|
|
case PRE_INC:
|
596 |
|
|
case PRE_DEC:
|
597 |
|
|
case POST_INC:
|
598 |
|
|
case POST_DEC:
|
599 |
|
|
case PRE_MODIFY:
|
600 |
|
|
case POST_MODIFY:
|
601 |
|
|
return 0;
|
602 |
|
|
|
603 |
|
|
case REG:
|
604 |
|
|
return (reg_equiv[REGNO (x)].loop_depth >= reg_equiv[regno].loop_depth
|
605 |
|
|
&& reg_equiv[REGNO (x)].replace)
|
606 |
|
|
|| (REG_BASIC_BLOCK (REGNO (x)) < 0 && ! rtx_varies_p (x, 0));
|
607 |
|
|
|
608 |
|
|
case UNSPEC_VOLATILE:
|
609 |
|
|
return 0;
|
610 |
|
|
|
611 |
|
|
case ASM_OPERANDS:
|
612 |
|
|
if (MEM_VOLATILE_P (x))
|
613 |
|
|
return 0;
|
614 |
|
|
|
615 |
|
|
/* Fall through. */
|
616 |
|
|
|
617 |
|
|
default:
|
618 |
|
|
break;
|
619 |
|
|
}
|
620 |
|
|
|
621 |
|
|
fmt = GET_RTX_FORMAT (code);
|
622 |
|
|
for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
|
623 |
|
|
switch (fmt[i])
|
624 |
|
|
{
|
625 |
|
|
case 'e':
|
626 |
|
|
if (! equiv_init_movable_p (XEXP (x, i), regno))
|
627 |
|
|
return 0;
|
628 |
|
|
break;
|
629 |
|
|
case 'E':
|
630 |
|
|
for (j = XVECLEN (x, i) - 1; j >= 0; j--)
|
631 |
|
|
if (! equiv_init_movable_p (XVECEXP (x, i, j), regno))
|
632 |
|
|
return 0;
|
633 |
|
|
break;
|
634 |
|
|
}
|
635 |
|
|
|
636 |
|
|
return 1;
|
637 |
|
|
}
|
638 |
|
|
|
639 |
|
|
/* TRUE if X uses any registers for which reg_equiv[REGNO].replace is true. */
|
640 |
|
|
|
641 |
|
|
static int
|
642 |
|
|
contains_replace_regs (rtx x)
|
643 |
|
|
{
|
644 |
|
|
int i, j;
|
645 |
|
|
const char *fmt;
|
646 |
|
|
enum rtx_code code = GET_CODE (x);
|
647 |
|
|
|
648 |
|
|
switch (code)
|
649 |
|
|
{
|
650 |
|
|
case CONST_INT:
|
651 |
|
|
case CONST:
|
652 |
|
|
case LABEL_REF:
|
653 |
|
|
case SYMBOL_REF:
|
654 |
|
|
case CONST_DOUBLE:
|
655 |
|
|
case CONST_VECTOR:
|
656 |
|
|
case PC:
|
657 |
|
|
case CC0:
|
658 |
|
|
case HIGH:
|
659 |
|
|
return 0;
|
660 |
|
|
|
661 |
|
|
case REG:
|
662 |
|
|
return reg_equiv[REGNO (x)].replace;
|
663 |
|
|
|
664 |
|
|
default:
|
665 |
|
|
break;
|
666 |
|
|
}
|
667 |
|
|
|
668 |
|
|
fmt = GET_RTX_FORMAT (code);
|
669 |
|
|
for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
|
670 |
|
|
switch (fmt[i])
|
671 |
|
|
{
|
672 |
|
|
case 'e':
|
673 |
|
|
if (contains_replace_regs (XEXP (x, i)))
|
674 |
|
|
return 1;
|
675 |
|
|
break;
|
676 |
|
|
case 'E':
|
677 |
|
|
for (j = XVECLEN (x, i) - 1; j >= 0; j--)
|
678 |
|
|
if (contains_replace_regs (XVECEXP (x, i, j)))
|
679 |
|
|
return 1;
|
680 |
|
|
break;
|
681 |
|
|
}
|
682 |
|
|
|
683 |
|
|
return 0;
|
684 |
|
|
}
|
685 |
|
|
|
686 |
|
|
/* TRUE if X references a memory location that would be affected by a store
|
687 |
|
|
to MEMREF. */
|
688 |
|
|
|
689 |
|
|
static int
|
690 |
|
|
memref_referenced_p (rtx memref, rtx x)
|
691 |
|
|
{
|
692 |
|
|
int i, j;
|
693 |
|
|
const char *fmt;
|
694 |
|
|
enum rtx_code code = GET_CODE (x);
|
695 |
|
|
|
696 |
|
|
switch (code)
|
697 |
|
|
{
|
698 |
|
|
case CONST_INT:
|
699 |
|
|
case CONST:
|
700 |
|
|
case LABEL_REF:
|
701 |
|
|
case SYMBOL_REF:
|
702 |
|
|
case CONST_DOUBLE:
|
703 |
|
|
case CONST_VECTOR:
|
704 |
|
|
case PC:
|
705 |
|
|
case CC0:
|
706 |
|
|
case HIGH:
|
707 |
|
|
case LO_SUM:
|
708 |
|
|
return 0;
|
709 |
|
|
|
710 |
|
|
case REG:
|
711 |
|
|
return (reg_equiv[REGNO (x)].replacement
|
712 |
|
|
&& memref_referenced_p (memref,
|
713 |
|
|
reg_equiv[REGNO (x)].replacement));
|
714 |
|
|
|
715 |
|
|
case MEM:
|
716 |
|
|
if (true_dependence (memref, VOIDmode, x, rtx_varies_p))
|
717 |
|
|
return 1;
|
718 |
|
|
break;
|
719 |
|
|
|
720 |
|
|
case SET:
|
721 |
|
|
/* If we are setting a MEM, it doesn't count (its address does), but any
|
722 |
|
|
other SET_DEST that has a MEM in it is referencing the MEM. */
|
723 |
|
|
if (MEM_P (SET_DEST (x)))
|
724 |
|
|
{
|
725 |
|
|
if (memref_referenced_p (memref, XEXP (SET_DEST (x), 0)))
|
726 |
|
|
return 1;
|
727 |
|
|
}
|
728 |
|
|
else if (memref_referenced_p (memref, SET_DEST (x)))
|
729 |
|
|
return 1;
|
730 |
|
|
|
731 |
|
|
return memref_referenced_p (memref, SET_SRC (x));
|
732 |
|
|
|
733 |
|
|
default:
|
734 |
|
|
break;
|
735 |
|
|
}
|
736 |
|
|
|
737 |
|
|
fmt = GET_RTX_FORMAT (code);
|
738 |
|
|
for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
|
739 |
|
|
switch (fmt[i])
|
740 |
|
|
{
|
741 |
|
|
case 'e':
|
742 |
|
|
if (memref_referenced_p (memref, XEXP (x, i)))
|
743 |
|
|
return 1;
|
744 |
|
|
break;
|
745 |
|
|
case 'E':
|
746 |
|
|
for (j = XVECLEN (x, i) - 1; j >= 0; j--)
|
747 |
|
|
if (memref_referenced_p (memref, XVECEXP (x, i, j)))
|
748 |
|
|
return 1;
|
749 |
|
|
break;
|
750 |
|
|
}
|
751 |
|
|
|
752 |
|
|
return 0;
|
753 |
|
|
}
|
754 |
|
|
|
755 |
|
|
/* TRUE if some insn in the range (START, END] references a memory location
|
756 |
|
|
that would be affected by a store to MEMREF. */
|
757 |
|
|
|
758 |
|
|
static int
|
759 |
|
|
memref_used_between_p (rtx memref, rtx start, rtx end)
|
760 |
|
|
{
|
761 |
|
|
rtx insn;
|
762 |
|
|
|
763 |
|
|
for (insn = NEXT_INSN (start); insn != NEXT_INSN (end);
|
764 |
|
|
insn = NEXT_INSN (insn))
|
765 |
|
|
{
|
766 |
|
|
if (!INSN_P (insn))
|
767 |
|
|
continue;
|
768 |
|
|
|
769 |
|
|
if (memref_referenced_p (memref, PATTERN (insn)))
|
770 |
|
|
return 1;
|
771 |
|
|
|
772 |
|
|
/* Nonconst functions may access memory. */
|
773 |
|
|
if (CALL_P (insn)
|
774 |
|
|
&& (! CONST_OR_PURE_CALL_P (insn)
|
775 |
|
|
|| pure_call_p (insn)))
|
776 |
|
|
return 1;
|
777 |
|
|
}
|
778 |
|
|
|
779 |
|
|
return 0;
|
780 |
|
|
}
|
781 |
|
|
|
782 |
|
|
/* Find registers that are equivalent to a single value throughout the
|
783 |
|
|
compilation (either because they can be referenced in memory or are set once
|
784 |
|
|
from a single constant). Lower their priority for a register.
|
785 |
|
|
|
786 |
|
|
If such a register is only referenced once, try substituting its value
|
787 |
|
|
into the using insn. If it succeeds, we can eliminate the register
|
788 |
|
|
completely.
|
789 |
|
|
|
790 |
|
|
Initialize the REG_EQUIV_INIT array of initializing insns. */
|
791 |
|
|
|
792 |
|
|
static void
|
793 |
|
|
update_equiv_regs (void)
|
794 |
|
|
{
|
795 |
|
|
rtx insn;
|
796 |
|
|
basic_block bb;
|
797 |
|
|
int loop_depth;
|
798 |
|
|
regset_head cleared_regs;
|
799 |
|
|
int clear_regnos = 0;
|
800 |
|
|
|
801 |
|
|
reg_equiv = xcalloc (max_regno, sizeof *reg_equiv);
|
802 |
|
|
INIT_REG_SET (&cleared_regs);
|
803 |
|
|
reg_equiv_init = ggc_alloc_cleared (max_regno * sizeof (rtx));
|
804 |
|
|
reg_equiv_init_size = max_regno;
|
805 |
|
|
|
806 |
|
|
init_alias_analysis ();
|
807 |
|
|
|
808 |
|
|
/* Scan the insns and find which registers have equivalences. Do this
|
809 |
|
|
in a separate scan of the insns because (due to -fcse-follow-jumps)
|
810 |
|
|
a register can be set below its use. */
|
811 |
|
|
FOR_EACH_BB (bb)
|
812 |
|
|
{
|
813 |
|
|
loop_depth = bb->loop_depth;
|
814 |
|
|
|
815 |
|
|
for (insn = BB_HEAD (bb);
|
816 |
|
|
insn != NEXT_INSN (BB_END (bb));
|
817 |
|
|
insn = NEXT_INSN (insn))
|
818 |
|
|
{
|
819 |
|
|
rtx note;
|
820 |
|
|
rtx set;
|
821 |
|
|
rtx dest, src;
|
822 |
|
|
int regno;
|
823 |
|
|
|
824 |
|
|
if (! INSN_P (insn))
|
825 |
|
|
continue;
|
826 |
|
|
|
827 |
|
|
for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
|
828 |
|
|
if (REG_NOTE_KIND (note) == REG_INC)
|
829 |
|
|
no_equiv (XEXP (note, 0), note, NULL);
|
830 |
|
|
|
831 |
|
|
set = single_set (insn);
|
832 |
|
|
|
833 |
|
|
/* If this insn contains more (or less) than a single SET,
|
834 |
|
|
only mark all destinations as having no known equivalence. */
|
835 |
|
|
if (set == 0)
|
836 |
|
|
{
|
837 |
|
|
note_stores (PATTERN (insn), no_equiv, NULL);
|
838 |
|
|
continue;
|
839 |
|
|
}
|
840 |
|
|
else if (GET_CODE (PATTERN (insn)) == PARALLEL)
|
841 |
|
|
{
|
842 |
|
|
int i;
|
843 |
|
|
|
844 |
|
|
for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
|
845 |
|
|
{
|
846 |
|
|
rtx part = XVECEXP (PATTERN (insn), 0, i);
|
847 |
|
|
if (part != set)
|
848 |
|
|
note_stores (part, no_equiv, NULL);
|
849 |
|
|
}
|
850 |
|
|
}
|
851 |
|
|
|
852 |
|
|
dest = SET_DEST (set);
|
853 |
|
|
src = SET_SRC (set);
|
854 |
|
|
|
855 |
|
|
/* See if this is setting up the equivalence between an argument
|
856 |
|
|
register and its stack slot. */
|
857 |
|
|
note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
|
858 |
|
|
if (note)
|
859 |
|
|
{
|
860 |
|
|
gcc_assert (REG_P (dest));
|
861 |
|
|
regno = REGNO (dest);
|
862 |
|
|
|
863 |
|
|
/* Note that we don't want to clear reg_equiv_init even if there
|
864 |
|
|
are multiple sets of this register. */
|
865 |
|
|
reg_equiv[regno].is_arg_equivalence = 1;
|
866 |
|
|
|
867 |
|
|
/* Record for reload that this is an equivalencing insn. */
|
868 |
|
|
if (rtx_equal_p (src, XEXP (note, 0)))
|
869 |
|
|
reg_equiv_init[regno]
|
870 |
|
|
= gen_rtx_INSN_LIST (VOIDmode, insn, reg_equiv_init[regno]);
|
871 |
|
|
|
872 |
|
|
/* Continue normally in case this is a candidate for
|
873 |
|
|
replacements. */
|
874 |
|
|
}
|
875 |
|
|
|
876 |
|
|
if (!optimize)
|
877 |
|
|
continue;
|
878 |
|
|
|
879 |
|
|
/* We only handle the case of a pseudo register being set
|
880 |
|
|
once, or always to the same value. */
|
881 |
|
|
/* ??? The mn10200 port breaks if we add equivalences for
|
882 |
|
|
values that need an ADDRESS_REGS register and set them equivalent
|
883 |
|
|
to a MEM of a pseudo. The actual problem is in the over-conservative
|
884 |
|
|
handling of INPADDR_ADDRESS / INPUT_ADDRESS / INPUT triples in
|
885 |
|
|
calculate_needs, but we traditionally work around this problem
|
886 |
|
|
here by rejecting equivalences when the destination is in a register
|
887 |
|
|
that's likely spilled. This is fragile, of course, since the
|
888 |
|
|
preferred class of a pseudo depends on all instructions that set
|
889 |
|
|
or use it. */
|
890 |
|
|
|
891 |
|
|
if (!REG_P (dest)
|
892 |
|
|
|| (regno = REGNO (dest)) < FIRST_PSEUDO_REGISTER
|
893 |
|
|
|| reg_equiv[regno].init_insns == const0_rtx
|
894 |
|
|
|| (CLASS_LIKELY_SPILLED_P (reg_preferred_class (regno))
|
895 |
|
|
&& MEM_P (src) && ! reg_equiv[regno].is_arg_equivalence))
|
896 |
|
|
{
|
897 |
|
|
/* This might be setting a SUBREG of a pseudo, a pseudo that is
|
898 |
|
|
also set somewhere else to a constant. */
|
899 |
|
|
note_stores (set, no_equiv, NULL);
|
900 |
|
|
continue;
|
901 |
|
|
}
|
902 |
|
|
|
903 |
|
|
note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
|
904 |
|
|
|
905 |
|
|
/* cse sometimes generates function invariants, but doesn't put a
|
906 |
|
|
REG_EQUAL note on the insn. Since this note would be redundant,
|
907 |
|
|
there's no point creating it earlier than here. */
|
908 |
|
|
if (! note && ! rtx_varies_p (src, 0))
|
909 |
|
|
note = set_unique_reg_note (insn, REG_EQUAL, src);
|
910 |
|
|
|
911 |
|
|
/* Don't bother considering a REG_EQUAL note containing an EXPR_LIST
|
912 |
|
|
since it represents a function call */
|
913 |
|
|
if (note && GET_CODE (XEXP (note, 0)) == EXPR_LIST)
|
914 |
|
|
note = NULL_RTX;
|
915 |
|
|
|
916 |
|
|
if (REG_N_SETS (regno) != 1
|
917 |
|
|
&& (! note
|
918 |
|
|
|| rtx_varies_p (XEXP (note, 0), 0)
|
919 |
|
|
|| (reg_equiv[regno].replacement
|
920 |
|
|
&& ! rtx_equal_p (XEXP (note, 0),
|
921 |
|
|
reg_equiv[regno].replacement))))
|
922 |
|
|
{
|
923 |
|
|
no_equiv (dest, set, NULL);
|
924 |
|
|
continue;
|
925 |
|
|
}
|
926 |
|
|
/* Record this insn as initializing this register. */
|
927 |
|
|
reg_equiv[regno].init_insns
|
928 |
|
|
= gen_rtx_INSN_LIST (VOIDmode, insn, reg_equiv[regno].init_insns);
|
929 |
|
|
|
930 |
|
|
/* If this register is known to be equal to a constant, record that
|
931 |
|
|
it is always equivalent to the constant. */
|
932 |
|
|
if (note && ! rtx_varies_p (XEXP (note, 0), 0))
|
933 |
|
|
PUT_MODE (note, (enum machine_mode) REG_EQUIV);
|
934 |
|
|
|
935 |
|
|
/* If this insn introduces a "constant" register, decrease the priority
|
936 |
|
|
of that register. Record this insn if the register is only used once
|
937 |
|
|
more and the equivalence value is the same as our source.
|
938 |
|
|
|
939 |
|
|
The latter condition is checked for two reasons: First, it is an
|
940 |
|
|
indication that it may be more efficient to actually emit the insn
|
941 |
|
|
as written (if no registers are available, reload will substitute
|
942 |
|
|
the equivalence). Secondly, it avoids problems with any registers
|
943 |
|
|
dying in this insn whose death notes would be missed.
|
944 |
|
|
|
945 |
|
|
If we don't have a REG_EQUIV note, see if this insn is loading
|
946 |
|
|
a register used only in one basic block from a MEM. If so, and the
|
947 |
|
|
MEM remains unchanged for the life of the register, add a REG_EQUIV
|
948 |
|
|
note. */
|
949 |
|
|
|
950 |
|
|
note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
|
951 |
|
|
|
952 |
|
|
if (note == 0 && REG_BASIC_BLOCK (regno) >= 0
|
953 |
|
|
&& MEM_P (SET_SRC (set))
|
954 |
|
|
&& validate_equiv_mem (insn, dest, SET_SRC (set)))
|
955 |
|
|
REG_NOTES (insn) = note = gen_rtx_EXPR_LIST (REG_EQUIV, SET_SRC (set),
|
956 |
|
|
REG_NOTES (insn));
|
957 |
|
|
|
958 |
|
|
if (note)
|
959 |
|
|
{
|
960 |
|
|
int regno = REGNO (dest);
|
961 |
|
|
rtx x = XEXP (note, 0);
|
962 |
|
|
|
963 |
|
|
/* If we haven't done so, record for reload that this is an
|
964 |
|
|
equivalencing insn. */
|
965 |
|
|
if (!reg_equiv[regno].is_arg_equivalence
|
966 |
|
|
&& (!MEM_P (x) || rtx_equal_p (src, x)))
|
967 |
|
|
reg_equiv_init[regno]
|
968 |
|
|
= gen_rtx_INSN_LIST (VOIDmode, insn, reg_equiv_init[regno]);
|
969 |
|
|
|
970 |
|
|
/* Record whether or not we created a REG_EQUIV note for a LABEL_REF.
|
971 |
|
|
We might end up substituting the LABEL_REF for uses of the
|
972 |
|
|
pseudo here or later. That kind of transformation may turn an
|
973 |
|
|
indirect jump into a direct jump, in which case we must rerun the
|
974 |
|
|
jump optimizer to ensure that the JUMP_LABEL fields are valid. */
|
975 |
|
|
if (GET_CODE (x) == LABEL_REF
|
976 |
|
|
|| (GET_CODE (x) == CONST
|
977 |
|
|
&& GET_CODE (XEXP (x, 0)) == PLUS
|
978 |
|
|
&& (GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)))
|
979 |
|
|
recorded_label_ref = 1;
|
980 |
|
|
|
981 |
|
|
reg_equiv[regno].replacement = x;
|
982 |
|
|
reg_equiv[regno].src_p = &SET_SRC (set);
|
983 |
|
|
reg_equiv[regno].loop_depth = loop_depth;
|
984 |
|
|
|
985 |
|
|
/* Don't mess with things live during setjmp. */
|
986 |
|
|
if (REG_LIVE_LENGTH (regno) >= 0 && optimize)
|
987 |
|
|
{
|
988 |
|
|
/* Note that the statement below does not affect the priority
|
989 |
|
|
in local-alloc! */
|
990 |
|
|
REG_LIVE_LENGTH (regno) *= 2;
|
991 |
|
|
|
992 |
|
|
/* If the register is referenced exactly twice, meaning it is
|
993 |
|
|
set once and used once, indicate that the reference may be
|
994 |
|
|
replaced by the equivalence we computed above. Do this
|
995 |
|
|
even if the register is only used in one block so that
|
996 |
|
|
dependencies can be handled where the last register is
|
997 |
|
|
used in a different block (i.e. HIGH / LO_SUM sequences)
|
998 |
|
|
and to reduce the number of registers alive across
|
999 |
|
|
calls. */
|
1000 |
|
|
|
1001 |
|
|
if (REG_N_REFS (regno) == 2
|
1002 |
|
|
&& (rtx_equal_p (x, src)
|
1003 |
|
|
|| ! equiv_init_varies_p (src))
|
1004 |
|
|
&& NONJUMP_INSN_P (insn)
|
1005 |
|
|
&& equiv_init_movable_p (PATTERN (insn), regno))
|
1006 |
|
|
reg_equiv[regno].replace = 1;
|
1007 |
|
|
}
|
1008 |
|
|
}
|
1009 |
|
|
}
|
1010 |
|
|
}
|
1011 |
|
|
|
1012 |
|
|
if (!optimize)
|
1013 |
|
|
goto out;
|
1014 |
|
|
|
1015 |
|
|
/* A second pass, to gather additional equivalences with memory. This needs
|
1016 |
|
|
to be done after we know which registers we are going to replace. */
|
1017 |
|
|
|
1018 |
|
|
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
|
1019 |
|
|
{
|
1020 |
|
|
rtx set, src, dest;
|
1021 |
|
|
unsigned regno;
|
1022 |
|
|
|
1023 |
|
|
if (! INSN_P (insn))
|
1024 |
|
|
continue;
|
1025 |
|
|
|
1026 |
|
|
set = single_set (insn);
|
1027 |
|
|
if (! set)
|
1028 |
|
|
continue;
|
1029 |
|
|
|
1030 |
|
|
dest = SET_DEST (set);
|
1031 |
|
|
src = SET_SRC (set);
|
1032 |
|
|
|
1033 |
|
|
/* If this sets a MEM to the contents of a REG that is only used
|
1034 |
|
|
in a single basic block, see if the register is always equivalent
|
1035 |
|
|
to that memory location and if moving the store from INSN to the
|
1036 |
|
|
insn that set REG is safe. If so, put a REG_EQUIV note on the
|
1037 |
|
|
initializing insn.
|
1038 |
|
|
|
1039 |
|
|
Don't add a REG_EQUIV note if the insn already has one. The existing
|
1040 |
|
|
REG_EQUIV is likely more useful than the one we are adding.
|
1041 |
|
|
|
1042 |
|
|
If one of the regs in the address has reg_equiv[REGNO].replace set,
|
1043 |
|
|
then we can't add this REG_EQUIV note. The reg_equiv[REGNO].replace
|
1044 |
|
|
optimization may move the set of this register immediately before
|
1045 |
|
|
insn, which puts it after reg_equiv[REGNO].init_insns, and hence
|
1046 |
|
|
the mention in the REG_EQUIV note would be to an uninitialized
|
1047 |
|
|
pseudo. */
|
1048 |
|
|
|
1049 |
|
|
if (MEM_P (dest) && REG_P (src)
|
1050 |
|
|
&& (regno = REGNO (src)) >= FIRST_PSEUDO_REGISTER
|
1051 |
|
|
&& REG_BASIC_BLOCK (regno) >= 0
|
1052 |
|
|
&& REG_N_SETS (regno) == 1
|
1053 |
|
|
&& reg_equiv[regno].init_insns != 0
|
1054 |
|
|
&& reg_equiv[regno].init_insns != const0_rtx
|
1055 |
|
|
&& ! find_reg_note (XEXP (reg_equiv[regno].init_insns, 0),
|
1056 |
|
|
REG_EQUIV, NULL_RTX)
|
1057 |
|
|
&& ! contains_replace_regs (XEXP (dest, 0)))
|
1058 |
|
|
{
|
1059 |
|
|
rtx init_insn = XEXP (reg_equiv[regno].init_insns, 0);
|
1060 |
|
|
if (validate_equiv_mem (init_insn, src, dest)
|
1061 |
|
|
&& ! memref_used_between_p (dest, init_insn, insn))
|
1062 |
|
|
{
|
1063 |
|
|
REG_NOTES (init_insn)
|
1064 |
|
|
= gen_rtx_EXPR_LIST (REG_EQUIV, dest,
|
1065 |
|
|
REG_NOTES (init_insn));
|
1066 |
|
|
/* This insn makes the equivalence, not the one initializing
|
1067 |
|
|
the register. */
|
1068 |
|
|
reg_equiv_init[regno]
|
1069 |
|
|
= gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX);
|
1070 |
|
|
}
|
1071 |
|
|
}
|
1072 |
|
|
}
|
1073 |
|
|
|
1074 |
|
|
/* Now scan all regs killed in an insn to see if any of them are
|
1075 |
|
|
registers only used that once. If so, see if we can replace the
|
1076 |
|
|
reference with the equivalent form. If we can, delete the
|
1077 |
|
|
initializing reference and this register will go away. If we
|
1078 |
|
|
can't replace the reference, and the initializing reference is
|
1079 |
|
|
within the same loop (or in an inner loop), then move the register
|
1080 |
|
|
initialization just before the use, so that they are in the same
|
1081 |
|
|
basic block. */
|
1082 |
|
|
FOR_EACH_BB_REVERSE (bb)
|
1083 |
|
|
{
|
1084 |
|
|
loop_depth = bb->loop_depth;
|
1085 |
|
|
for (insn = BB_END (bb);
|
1086 |
|
|
insn != PREV_INSN (BB_HEAD (bb));
|
1087 |
|
|
insn = PREV_INSN (insn))
|
1088 |
|
|
{
|
1089 |
|
|
rtx link;
|
1090 |
|
|
|
1091 |
|
|
if (! INSN_P (insn))
|
1092 |
|
|
continue;
|
1093 |
|
|
|
1094 |
|
|
/* Don't substitute into a non-local goto, this confuses CFG. */
|
1095 |
|
|
if (JUMP_P (insn)
|
1096 |
|
|
&& find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
|
1097 |
|
|
continue;
|
1098 |
|
|
|
1099 |
|
|
for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
|
1100 |
|
|
{
|
1101 |
|
|
if (REG_NOTE_KIND (link) == REG_DEAD
|
1102 |
|
|
/* Make sure this insn still refers to the register. */
|
1103 |
|
|
&& reg_mentioned_p (XEXP (link, 0), PATTERN (insn)))
|
1104 |
|
|
{
|
1105 |
|
|
int regno = REGNO (XEXP (link, 0));
|
1106 |
|
|
rtx equiv_insn;
|
1107 |
|
|
|
1108 |
|
|
if (! reg_equiv[regno].replace
|
1109 |
|
|
|| reg_equiv[regno].loop_depth < loop_depth)
|
1110 |
|
|
continue;
|
1111 |
|
|
|
1112 |
|
|
/* reg_equiv[REGNO].replace gets set only when
|
1113 |
|
|
REG_N_REFS[REGNO] is 2, i.e. the register is set
|
1114 |
|
|
once and used once. (If it were only set, but not used,
|
1115 |
|
|
flow would have deleted the setting insns.) Hence
|
1116 |
|
|
there can only be one insn in reg_equiv[REGNO].init_insns. */
|
1117 |
|
|
gcc_assert (reg_equiv[regno].init_insns
|
1118 |
|
|
&& !XEXP (reg_equiv[regno].init_insns, 1));
|
1119 |
|
|
equiv_insn = XEXP (reg_equiv[regno].init_insns, 0);
|
1120 |
|
|
|
1121 |
|
|
/* We may not move instructions that can throw, since
|
1122 |
|
|
that changes basic block boundaries and we are not
|
1123 |
|
|
prepared to adjust the CFG to match. */
|
1124 |
|
|
if (can_throw_internal (equiv_insn))
|
1125 |
|
|
continue;
|
1126 |
|
|
|
1127 |
|
|
if (asm_noperands (PATTERN (equiv_insn)) < 0
|
1128 |
|
|
&& validate_replace_rtx (regno_reg_rtx[regno],
|
1129 |
|
|
*(reg_equiv[regno].src_p), insn))
|
1130 |
|
|
{
|
1131 |
|
|
rtx equiv_link;
|
1132 |
|
|
rtx last_link;
|
1133 |
|
|
rtx note;
|
1134 |
|
|
|
1135 |
|
|
/* Find the last note. */
|
1136 |
|
|
for (last_link = link; XEXP (last_link, 1);
|
1137 |
|
|
last_link = XEXP (last_link, 1))
|
1138 |
|
|
;
|
1139 |
|
|
|
1140 |
|
|
/* Append the REG_DEAD notes from equiv_insn. */
|
1141 |
|
|
equiv_link = REG_NOTES (equiv_insn);
|
1142 |
|
|
while (equiv_link)
|
1143 |
|
|
{
|
1144 |
|
|
note = equiv_link;
|
1145 |
|
|
equiv_link = XEXP (equiv_link, 1);
|
1146 |
|
|
if (REG_NOTE_KIND (note) == REG_DEAD)
|
1147 |
|
|
{
|
1148 |
|
|
remove_note (equiv_insn, note);
|
1149 |
|
|
XEXP (last_link, 1) = note;
|
1150 |
|
|
XEXP (note, 1) = NULL_RTX;
|
1151 |
|
|
last_link = note;
|
1152 |
|
|
}
|
1153 |
|
|
}
|
1154 |
|
|
|
1155 |
|
|
remove_death (regno, insn);
|
1156 |
|
|
REG_N_REFS (regno) = 0;
|
1157 |
|
|
REG_FREQ (regno) = 0;
|
1158 |
|
|
delete_insn (equiv_insn);
|
1159 |
|
|
|
1160 |
|
|
reg_equiv[regno].init_insns
|
1161 |
|
|
= XEXP (reg_equiv[regno].init_insns, 1);
|
1162 |
|
|
|
1163 |
|
|
/* Remember to clear REGNO from all basic block's live
|
1164 |
|
|
info. */
|
1165 |
|
|
SET_REGNO_REG_SET (&cleared_regs, regno);
|
1166 |
|
|
clear_regnos++;
|
1167 |
|
|
reg_equiv_init[regno] = NULL_RTX;
|
1168 |
|
|
}
|
1169 |
|
|
/* Move the initialization of the register to just before
|
1170 |
|
|
INSN. Update the flow information. */
|
1171 |
|
|
else if (PREV_INSN (insn) != equiv_insn)
|
1172 |
|
|
{
|
1173 |
|
|
rtx new_insn;
|
1174 |
|
|
|
1175 |
|
|
new_insn = emit_insn_before (PATTERN (equiv_insn), insn);
|
1176 |
|
|
REG_NOTES (new_insn) = REG_NOTES (equiv_insn);
|
1177 |
|
|
REG_NOTES (equiv_insn) = 0;
|
1178 |
|
|
|
1179 |
|
|
/* Make sure this insn is recognized before
|
1180 |
|
|
reload begins, otherwise
|
1181 |
|
|
eliminate_regs_in_insn will die. */
|
1182 |
|
|
INSN_CODE (new_insn) = INSN_CODE (equiv_insn);
|
1183 |
|
|
|
1184 |
|
|
delete_insn (equiv_insn);
|
1185 |
|
|
|
1186 |
|
|
XEXP (reg_equiv[regno].init_insns, 0) = new_insn;
|
1187 |
|
|
|
1188 |
|
|
REG_BASIC_BLOCK (regno) = bb->index;
|
1189 |
|
|
REG_N_CALLS_CROSSED (regno) = 0;
|
1190 |
|
|
REG_N_THROWING_CALLS_CROSSED (regno) = 0;
|
1191 |
|
|
REG_LIVE_LENGTH (regno) = 2;
|
1192 |
|
|
|
1193 |
|
|
if (insn == BB_HEAD (bb))
|
1194 |
|
|
BB_HEAD (bb) = PREV_INSN (insn);
|
1195 |
|
|
|
1196 |
|
|
/* Remember to clear REGNO from all basic block's live
|
1197 |
|
|
info. */
|
1198 |
|
|
SET_REGNO_REG_SET (&cleared_regs, regno);
|
1199 |
|
|
clear_regnos++;
|
1200 |
|
|
reg_equiv_init[regno]
|
1201 |
|
|
= gen_rtx_INSN_LIST (VOIDmode, new_insn, NULL_RTX);
|
1202 |
|
|
}
|
1203 |
|
|
}
|
1204 |
|
|
}
|
1205 |
|
|
}
|
1206 |
|
|
}
|
1207 |
|
|
|
1208 |
|
|
/* Clear all dead REGNOs from all basic block's live info. */
|
1209 |
|
|
if (clear_regnos)
|
1210 |
|
|
{
|
1211 |
|
|
unsigned j;
|
1212 |
|
|
|
1213 |
|
|
if (clear_regnos > 8)
|
1214 |
|
|
{
|
1215 |
|
|
FOR_EACH_BB (bb)
|
1216 |
|
|
{
|
1217 |
|
|
AND_COMPL_REG_SET (bb->il.rtl->global_live_at_start,
|
1218 |
|
|
&cleared_regs);
|
1219 |
|
|
AND_COMPL_REG_SET (bb->il.rtl->global_live_at_end,
|
1220 |
|
|
&cleared_regs);
|
1221 |
|
|
}
|
1222 |
|
|
}
|
1223 |
|
|
else
|
1224 |
|
|
{
|
1225 |
|
|
reg_set_iterator rsi;
|
1226 |
|
|
EXECUTE_IF_SET_IN_REG_SET (&cleared_regs, 0, j, rsi)
|
1227 |
|
|
{
|
1228 |
|
|
FOR_EACH_BB (bb)
|
1229 |
|
|
{
|
1230 |
|
|
CLEAR_REGNO_REG_SET (bb->il.rtl->global_live_at_start, j);
|
1231 |
|
|
CLEAR_REGNO_REG_SET (bb->il.rtl->global_live_at_end, j);
|
1232 |
|
|
}
|
1233 |
|
|
}
|
1234 |
|
|
}
|
1235 |
|
|
}
|
1236 |
|
|
|
1237 |
|
|
out:
|
1238 |
|
|
/* Clean up. */
|
1239 |
|
|
end_alias_analysis ();
|
1240 |
|
|
CLEAR_REG_SET (&cleared_regs);
|
1241 |
|
|
free (reg_equiv);
|
1242 |
|
|
}
|
1243 |
|
|
|
1244 |
|
|
/* Mark REG as having no known equivalence.
|
1245 |
|
|
Some instructions might have been processed before and furnished
|
1246 |
|
|
with REG_EQUIV notes for this register; these notes will have to be
|
1247 |
|
|
removed.
|
1248 |
|
|
STORE is the piece of RTL that does the non-constant / conflicting
|
1249 |
|
|
assignment - a SET, CLOBBER or REG_INC note. It is currently not used,
|
1250 |
|
|
but needs to be there because this function is called from note_stores. */
|
1251 |
|
|
static void
|
1252 |
|
|
no_equiv (rtx reg, rtx store ATTRIBUTE_UNUSED, void *data ATTRIBUTE_UNUSED)
|
1253 |
|
|
{
|
1254 |
|
|
int regno;
|
1255 |
|
|
rtx list;
|
1256 |
|
|
|
1257 |
|
|
if (!REG_P (reg))
|
1258 |
|
|
return;
|
1259 |
|
|
regno = REGNO (reg);
|
1260 |
|
|
list = reg_equiv[regno].init_insns;
|
1261 |
|
|
if (list == const0_rtx)
|
1262 |
|
|
return;
|
1263 |
|
|
reg_equiv[regno].init_insns = const0_rtx;
|
1264 |
|
|
reg_equiv[regno].replacement = NULL_RTX;
|
1265 |
|
|
/* This doesn't matter for equivalences made for argument registers, we
|
1266 |
|
|
should keep their initialization insns. */
|
1267 |
|
|
if (reg_equiv[regno].is_arg_equivalence)
|
1268 |
|
|
return;
|
1269 |
|
|
reg_equiv_init[regno] = NULL_RTX;
|
1270 |
|
|
for (; list; list = XEXP (list, 1))
|
1271 |
|
|
{
|
1272 |
|
|
rtx insn = XEXP (list, 0);
|
1273 |
|
|
remove_note (insn, find_reg_note (insn, REG_EQUIV, NULL_RTX));
|
1274 |
|
|
}
|
1275 |
|
|
}
|
1276 |
|
|
|
1277 |
|
|
/* Allocate hard regs to the pseudo regs used only within block number B.
|
1278 |
|
|
Only the pseudos that die but once can be handled. */
|
1279 |
|
|
|
1280 |
|
|
static void
|
1281 |
|
|
block_alloc (int b)
|
1282 |
|
|
{
|
1283 |
|
|
int i, q;
|
1284 |
|
|
rtx insn;
|
1285 |
|
|
rtx note, hard_reg;
|
1286 |
|
|
int insn_number = 0;
|
1287 |
|
|
int insn_count = 0;
|
1288 |
|
|
int max_uid = get_max_uid ();
|
1289 |
|
|
int *qty_order;
|
1290 |
|
|
int no_conflict_combined_regno = -1;
|
1291 |
|
|
|
1292 |
|
|
/* Count the instructions in the basic block. */
|
1293 |
|
|
|
1294 |
|
|
insn = BB_END (BASIC_BLOCK (b));
|
1295 |
|
|
while (1)
|
1296 |
|
|
{
|
1297 |
|
|
if (!NOTE_P (insn))
|
1298 |
|
|
{
|
1299 |
|
|
++insn_count;
|
1300 |
|
|
gcc_assert (insn_count <= max_uid);
|
1301 |
|
|
}
|
1302 |
|
|
if (insn == BB_HEAD (BASIC_BLOCK (b)))
|
1303 |
|
|
break;
|
1304 |
|
|
insn = PREV_INSN (insn);
|
1305 |
|
|
}
|
1306 |
|
|
|
1307 |
|
|
/* +2 to leave room for a post_mark_life at the last insn and for
|
1308 |
|
|
the birth of a CLOBBER in the first insn. */
|
1309 |
|
|
regs_live_at = xcalloc ((2 * insn_count + 2), sizeof (HARD_REG_SET));
|
1310 |
|
|
|
1311 |
|
|
/* Initialize table of hardware registers currently live. */
|
1312 |
|
|
|
1313 |
|
|
REG_SET_TO_HARD_REG_SET (regs_live,
|
1314 |
|
|
BASIC_BLOCK (b)->il.rtl->global_live_at_start);
|
1315 |
|
|
|
1316 |
|
|
/* This loop scans the instructions of the basic block
|
1317 |
|
|
and assigns quantities to registers.
|
1318 |
|
|
It computes which registers to tie. */
|
1319 |
|
|
|
1320 |
|
|
insn = BB_HEAD (BASIC_BLOCK (b));
|
1321 |
|
|
while (1)
|
1322 |
|
|
{
|
1323 |
|
|
if (!NOTE_P (insn))
|
1324 |
|
|
insn_number++;
|
1325 |
|
|
|
1326 |
|
|
if (INSN_P (insn))
|
1327 |
|
|
{
|
1328 |
|
|
rtx link, set;
|
1329 |
|
|
int win = 0;
|
1330 |
|
|
rtx r0, r1 = NULL_RTX;
|
1331 |
|
|
int combined_regno = -1;
|
1332 |
|
|
int i;
|
1333 |
|
|
|
1334 |
|
|
this_insn_number = insn_number;
|
1335 |
|
|
this_insn = insn;
|
1336 |
|
|
|
1337 |
|
|
extract_insn (insn);
|
1338 |
|
|
which_alternative = -1;
|
1339 |
|
|
|
1340 |
|
|
/* Is this insn suitable for tying two registers?
|
1341 |
|
|
If so, try doing that.
|
1342 |
|
|
Suitable insns are those with at least two operands and where
|
1343 |
|
|
operand 0 is an output that is a register that is not
|
1344 |
|
|
earlyclobber.
|
1345 |
|
|
|
1346 |
|
|
We can tie operand 0 with some operand that dies in this insn.
|
1347 |
|
|
First look for operands that are required to be in the same
|
1348 |
|
|
register as operand 0. If we find such, only try tying that
|
1349 |
|
|
operand or one that can be put into that operand if the
|
1350 |
|
|
operation is commutative. If we don't find an operand
|
1351 |
|
|
that is required to be in the same register as operand 0,
|
1352 |
|
|
we can tie with any operand.
|
1353 |
|
|
|
1354 |
|
|
Subregs in place of regs are also ok.
|
1355 |
|
|
|
1356 |
|
|
If tying is done, WIN is set nonzero. */
|
1357 |
|
|
|
1358 |
|
|
if (optimize
|
1359 |
|
|
&& recog_data.n_operands > 1
|
1360 |
|
|
&& recog_data.constraints[0][0] == '='
|
1361 |
|
|
&& recog_data.constraints[0][1] != '&')
|
1362 |
|
|
{
|
1363 |
|
|
/* If non-negative, is an operand that must match operand 0. */
|
1364 |
|
|
int must_match_0 = -1;
|
1365 |
|
|
/* Counts number of alternatives that require a match with
|
1366 |
|
|
operand 0. */
|
1367 |
|
|
int n_matching_alts = 0;
|
1368 |
|
|
|
1369 |
|
|
for (i = 1; i < recog_data.n_operands; i++)
|
1370 |
|
|
{
|
1371 |
|
|
const char *p = recog_data.constraints[i];
|
1372 |
|
|
int this_match = requires_inout (p);
|
1373 |
|
|
|
1374 |
|
|
n_matching_alts += this_match;
|
1375 |
|
|
if (this_match == recog_data.n_alternatives)
|
1376 |
|
|
must_match_0 = i;
|
1377 |
|
|
}
|
1378 |
|
|
|
1379 |
|
|
r0 = recog_data.operand[0];
|
1380 |
|
|
for (i = 1; i < recog_data.n_operands; i++)
|
1381 |
|
|
{
|
1382 |
|
|
/* Skip this operand if we found an operand that
|
1383 |
|
|
must match operand 0 and this operand isn't it
|
1384 |
|
|
and can't be made to be it by commutativity. */
|
1385 |
|
|
|
1386 |
|
|
if (must_match_0 >= 0 && i != must_match_0
|
1387 |
|
|
&& ! (i == must_match_0 + 1
|
1388 |
|
|
&& recog_data.constraints[i-1][0] == '%')
|
1389 |
|
|
&& ! (i == must_match_0 - 1
|
1390 |
|
|
&& recog_data.constraints[i][0] == '%'))
|
1391 |
|
|
continue;
|
1392 |
|
|
|
1393 |
|
|
/* Likewise if each alternative has some operand that
|
1394 |
|
|
must match operand zero. In that case, skip any
|
1395 |
|
|
operand that doesn't list operand 0 since we know that
|
1396 |
|
|
the operand always conflicts with operand 0. We
|
1397 |
|
|
ignore commutativity in this case to keep things simple. */
|
1398 |
|
|
if (n_matching_alts == recog_data.n_alternatives
|
1399 |
|
|
&& 0 == requires_inout (recog_data.constraints[i]))
|
1400 |
|
|
continue;
|
1401 |
|
|
|
1402 |
|
|
r1 = recog_data.operand[i];
|
1403 |
|
|
|
1404 |
|
|
/* If the operand is an address, find a register in it.
|
1405 |
|
|
There may be more than one register, but we only try one
|
1406 |
|
|
of them. */
|
1407 |
|
|
if (recog_data.constraints[i][0] == 'p'
|
1408 |
|
|
|| EXTRA_ADDRESS_CONSTRAINT (recog_data.constraints[i][0],
|
1409 |
|
|
recog_data.constraints[i]))
|
1410 |
|
|
while (GET_CODE (r1) == PLUS || GET_CODE (r1) == MULT)
|
1411 |
|
|
r1 = XEXP (r1, 0);
|
1412 |
|
|
|
1413 |
|
|
/* Avoid making a call-saved register unnecessarily
|
1414 |
|
|
clobbered. */
|
1415 |
|
|
hard_reg = get_hard_reg_initial_reg (cfun, r1);
|
1416 |
|
|
if (hard_reg != NULL_RTX)
|
1417 |
|
|
{
|
1418 |
|
|
if (REG_P (hard_reg)
|
1419 |
|
|
&& REGNO (hard_reg) < FIRST_PSEUDO_REGISTER
|
1420 |
|
|
&& !call_used_regs[REGNO (hard_reg)])
|
1421 |
|
|
continue;
|
1422 |
|
|
}
|
1423 |
|
|
|
1424 |
|
|
if (REG_P (r0) || GET_CODE (r0) == SUBREG)
|
1425 |
|
|
{
|
1426 |
|
|
/* We have two priorities for hard register preferences.
|
1427 |
|
|
If we have a move insn or an insn whose first input
|
1428 |
|
|
can only be in the same register as the output, give
|
1429 |
|
|
priority to an equivalence found from that insn. */
|
1430 |
|
|
int may_save_copy
|
1431 |
|
|
= (r1 == recog_data.operand[i] && must_match_0 >= 0);
|
1432 |
|
|
|
1433 |
|
|
if (REG_P (r1) || GET_CODE (r1) == SUBREG)
|
1434 |
|
|
win = combine_regs (r1, r0, may_save_copy,
|
1435 |
|
|
insn_number, insn, 0);
|
1436 |
|
|
}
|
1437 |
|
|
if (win)
|
1438 |
|
|
break;
|
1439 |
|
|
}
|
1440 |
|
|
}
|
1441 |
|
|
|
1442 |
|
|
/* Recognize an insn sequence with an ultimate result
|
1443 |
|
|
which can safely overlap one of the inputs.
|
1444 |
|
|
The sequence begins with a CLOBBER of its result,
|
1445 |
|
|
and ends with an insn that copies the result to itself
|
1446 |
|
|
and has a REG_EQUAL note for an equivalent formula.
|
1447 |
|
|
That note indicates what the inputs are.
|
1448 |
|
|
The result and the input can overlap if each insn in
|
1449 |
|
|
the sequence either doesn't mention the input
|
1450 |
|
|
or has a REG_NO_CONFLICT note to inhibit the conflict.
|
1451 |
|
|
|
1452 |
|
|
We do the combining test at the CLOBBER so that the
|
1453 |
|
|
destination register won't have had a quantity number
|
1454 |
|
|
assigned, since that would prevent combining. */
|
1455 |
|
|
|
1456 |
|
|
if (optimize
|
1457 |
|
|
&& GET_CODE (PATTERN (insn)) == CLOBBER
|
1458 |
|
|
&& (r0 = XEXP (PATTERN (insn), 0),
|
1459 |
|
|
REG_P (r0))
|
1460 |
|
|
&& (link = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0
|
1461 |
|
|
&& XEXP (link, 0) != 0
|
1462 |
|
|
&& NONJUMP_INSN_P (XEXP (link, 0))
|
1463 |
|
|
&& (set = single_set (XEXP (link, 0))) != 0
|
1464 |
|
|
&& SET_DEST (set) == r0 && SET_SRC (set) == r0
|
1465 |
|
|
&& (note = find_reg_note (XEXP (link, 0), REG_EQUAL,
|
1466 |
|
|
NULL_RTX)) != 0)
|
1467 |
|
|
{
|
1468 |
|
|
if (r1 = XEXP (note, 0), REG_P (r1)
|
1469 |
|
|
/* Check that we have such a sequence. */
|
1470 |
|
|
&& no_conflict_p (insn, r0, r1))
|
1471 |
|
|
win = combine_regs (r1, r0, 1, insn_number, insn, 1);
|
1472 |
|
|
else if (GET_RTX_FORMAT (GET_CODE (XEXP (note, 0)))[0] == 'e'
|
1473 |
|
|
&& (r1 = XEXP (XEXP (note, 0), 0),
|
1474 |
|
|
REG_P (r1) || GET_CODE (r1) == SUBREG)
|
1475 |
|
|
&& no_conflict_p (insn, r0, r1))
|
1476 |
|
|
win = combine_regs (r1, r0, 0, insn_number, insn, 1);
|
1477 |
|
|
|
1478 |
|
|
/* Here we care if the operation to be computed is
|
1479 |
|
|
commutative. */
|
1480 |
|
|
else if (COMMUTATIVE_P (XEXP (note, 0))
|
1481 |
|
|
&& (r1 = XEXP (XEXP (note, 0), 1),
|
1482 |
|
|
(REG_P (r1) || GET_CODE (r1) == SUBREG))
|
1483 |
|
|
&& no_conflict_p (insn, r0, r1))
|
1484 |
|
|
win = combine_regs (r1, r0, 0, insn_number, insn, 1);
|
1485 |
|
|
|
1486 |
|
|
/* If we did combine something, show the register number
|
1487 |
|
|
in question so that we know to ignore its death. */
|
1488 |
|
|
if (win)
|
1489 |
|
|
no_conflict_combined_regno = REGNO (r1);
|
1490 |
|
|
}
|
1491 |
|
|
|
1492 |
|
|
/* If registers were just tied, set COMBINED_REGNO
|
1493 |
|
|
to the number of the register used in this insn
|
1494 |
|
|
that was tied to the register set in this insn.
|
1495 |
|
|
This register's qty should not be "killed". */
|
1496 |
|
|
|
1497 |
|
|
if (win)
|
1498 |
|
|
{
|
1499 |
|
|
while (GET_CODE (r1) == SUBREG)
|
1500 |
|
|
r1 = SUBREG_REG (r1);
|
1501 |
|
|
combined_regno = REGNO (r1);
|
1502 |
|
|
}
|
1503 |
|
|
|
1504 |
|
|
/* Mark the death of everything that dies in this instruction,
|
1505 |
|
|
except for anything that was just combined. */
|
1506 |
|
|
|
1507 |
|
|
for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
|
1508 |
|
|
if (REG_NOTE_KIND (link) == REG_DEAD
|
1509 |
|
|
&& REG_P (XEXP (link, 0))
|
1510 |
|
|
&& combined_regno != (int) REGNO (XEXP (link, 0))
|
1511 |
|
|
&& (no_conflict_combined_regno != (int) REGNO (XEXP (link, 0))
|
1512 |
|
|
|| ! find_reg_note (insn, REG_NO_CONFLICT,
|
1513 |
|
|
XEXP (link, 0))))
|
1514 |
|
|
wipe_dead_reg (XEXP (link, 0), 0);
|
1515 |
|
|
|
1516 |
|
|
/* Allocate qty numbers for all registers local to this block
|
1517 |
|
|
that are born (set) in this instruction.
|
1518 |
|
|
A pseudo that already has a qty is not changed. */
|
1519 |
|
|
|
1520 |
|
|
note_stores (PATTERN (insn), reg_is_set, NULL);
|
1521 |
|
|
|
1522 |
|
|
/* If anything is set in this insn and then unused, mark it as dying
|
1523 |
|
|
after this insn, so it will conflict with our outputs. This
|
1524 |
|
|
can't match with something that combined, and it doesn't matter
|
1525 |
|
|
if it did. Do this after the calls to reg_is_set since these
|
1526 |
|
|
die after, not during, the current insn. */
|
1527 |
|
|
|
1528 |
|
|
for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
|
1529 |
|
|
if (REG_NOTE_KIND (link) == REG_UNUSED
|
1530 |
|
|
&& REG_P (XEXP (link, 0)))
|
1531 |
|
|
wipe_dead_reg (XEXP (link, 0), 1);
|
1532 |
|
|
|
1533 |
|
|
/* If this is an insn that has a REG_RETVAL note pointing at a
|
1534 |
|
|
CLOBBER insn, we have reached the end of a REG_NO_CONFLICT
|
1535 |
|
|
block, so clear any register number that combined within it. */
|
1536 |
|
|
if ((note = find_reg_note (insn, REG_RETVAL, NULL_RTX)) != 0
|
1537 |
|
|
&& NONJUMP_INSN_P (XEXP (note, 0))
|
1538 |
|
|
&& GET_CODE (PATTERN (XEXP (note, 0))) == CLOBBER)
|
1539 |
|
|
no_conflict_combined_regno = -1;
|
1540 |
|
|
}
|
1541 |
|
|
|
1542 |
|
|
/* Set the registers live after INSN_NUMBER. Note that we never
|
1543 |
|
|
record the registers live before the block's first insn, since no
|
1544 |
|
|
pseudos we care about are live before that insn. */
|
1545 |
|
|
|
1546 |
|
|
IOR_HARD_REG_SET (regs_live_at[2 * insn_number], regs_live);
|
1547 |
|
|
IOR_HARD_REG_SET (regs_live_at[2 * insn_number + 1], regs_live);
|
1548 |
|
|
|
1549 |
|
|
if (insn == BB_END (BASIC_BLOCK (b)))
|
1550 |
|
|
break;
|
1551 |
|
|
|
1552 |
|
|
insn = NEXT_INSN (insn);
|
1553 |
|
|
}
|
1554 |
|
|
|
1555 |
|
|
/* Now every register that is local to this basic block
|
1556 |
|
|
should have been given a quantity, or else -1 meaning ignore it.
|
1557 |
|
|
Every quantity should have a known birth and death.
|
1558 |
|
|
|
1559 |
|
|
Order the qtys so we assign them registers in order of the
|
1560 |
|
|
number of suggested registers they need so we allocate those with
|
1561 |
|
|
the most restrictive needs first. */
|
1562 |
|
|
|
1563 |
|
|
qty_order = xmalloc (next_qty * sizeof (int));
|
1564 |
|
|
for (i = 0; i < next_qty; i++)
|
1565 |
|
|
qty_order[i] = i;
|
1566 |
|
|
|
1567 |
|
|
#define EXCHANGE(I1, I2) \
|
1568 |
|
|
{ i = qty_order[I1]; qty_order[I1] = qty_order[I2]; qty_order[I2] = i; }
|
1569 |
|
|
|
1570 |
|
|
switch (next_qty)
|
1571 |
|
|
{
|
1572 |
|
|
case 3:
|
1573 |
|
|
/* Make qty_order[2] be the one to allocate last. */
|
1574 |
|
|
if (qty_sugg_compare (0, 1) > 0)
|
1575 |
|
|
EXCHANGE (0, 1);
|
1576 |
|
|
if (qty_sugg_compare (1, 2) > 0)
|
1577 |
|
|
EXCHANGE (2, 1);
|
1578 |
|
|
|
1579 |
|
|
/* ... Fall through ... */
|
1580 |
|
|
case 2:
|
1581 |
|
|
/* Put the best one to allocate in qty_order[0]. */
|
1582 |
|
|
if (qty_sugg_compare (0, 1) > 0)
|
1583 |
|
|
EXCHANGE (0, 1);
|
1584 |
|
|
|
1585 |
|
|
/* ... Fall through ... */
|
1586 |
|
|
|
1587 |
|
|
case 1:
|
1588 |
|
|
case 0:
|
1589 |
|
|
/* Nothing to do here. */
|
1590 |
|
|
break;
|
1591 |
|
|
|
1592 |
|
|
default:
|
1593 |
|
|
qsort (qty_order, next_qty, sizeof (int), qty_sugg_compare_1);
|
1594 |
|
|
}
|
1595 |
|
|
|
1596 |
|
|
/* Try to put each quantity in a suggested physical register, if it has one.
|
1597 |
|
|
This may cause registers to be allocated that otherwise wouldn't be, but
|
1598 |
|
|
this seems acceptable in local allocation (unlike global allocation). */
|
1599 |
|
|
for (i = 0; i < next_qty; i++)
|
1600 |
|
|
{
|
1601 |
|
|
q = qty_order[i];
|
1602 |
|
|
if (qty_phys_num_sugg[q] != 0 || qty_phys_num_copy_sugg[q] != 0)
|
1603 |
|
|
qty[q].phys_reg = find_free_reg (qty[q].min_class, qty[q].mode, q,
|
1604 |
|
|
0, 1, qty[q].birth, qty[q].death);
|
1605 |
|
|
else
|
1606 |
|
|
qty[q].phys_reg = -1;
|
1607 |
|
|
}
|
1608 |
|
|
|
1609 |
|
|
/* Order the qtys so we assign them registers in order of
|
1610 |
|
|
decreasing length of life. Normally call qsort, but if we
|
1611 |
|
|
have only a very small number of quantities, sort them ourselves. */
|
1612 |
|
|
|
1613 |
|
|
for (i = 0; i < next_qty; i++)
|
1614 |
|
|
qty_order[i] = i;
|
1615 |
|
|
|
1616 |
|
|
#define EXCHANGE(I1, I2) \
|
1617 |
|
|
{ i = qty_order[I1]; qty_order[I1] = qty_order[I2]; qty_order[I2] = i; }
|
1618 |
|
|
|
1619 |
|
|
switch (next_qty)
|
1620 |
|
|
{
|
1621 |
|
|
case 3:
|
1622 |
|
|
/* Make qty_order[2] be the one to allocate last. */
|
1623 |
|
|
if (qty_compare (0, 1) > 0)
|
1624 |
|
|
EXCHANGE (0, 1);
|
1625 |
|
|
if (qty_compare (1, 2) > 0)
|
1626 |
|
|
EXCHANGE (2, 1);
|
1627 |
|
|
|
1628 |
|
|
/* ... Fall through ... */
|
1629 |
|
|
case 2:
|
1630 |
|
|
/* Put the best one to allocate in qty_order[0]. */
|
1631 |
|
|
if (qty_compare (0, 1) > 0)
|
1632 |
|
|
EXCHANGE (0, 1);
|
1633 |
|
|
|
1634 |
|
|
/* ... Fall through ... */
|
1635 |
|
|
|
1636 |
|
|
case 1:
|
1637 |
|
|
case 0:
|
1638 |
|
|
/* Nothing to do here. */
|
1639 |
|
|
break;
|
1640 |
|
|
|
1641 |
|
|
default:
|
1642 |
|
|
qsort (qty_order, next_qty, sizeof (int), qty_compare_1);
|
1643 |
|
|
}
|
1644 |
|
|
|
1645 |
|
|
/* Now for each qty that is not a hardware register,
|
1646 |
|
|
look for a hardware register to put it in.
|
1647 |
|
|
First try the register class that is cheapest for this qty,
|
1648 |
|
|
if there is more than one class. */
|
1649 |
|
|
|
1650 |
|
|
for (i = 0; i < next_qty; i++)
|
1651 |
|
|
{
|
1652 |
|
|
q = qty_order[i];
|
1653 |
|
|
if (qty[q].phys_reg < 0)
|
1654 |
|
|
{
|
1655 |
|
|
#ifdef INSN_SCHEDULING
|
1656 |
|
|
/* These values represent the adjusted lifetime of a qty so
|
1657 |
|
|
that it conflicts with qtys which appear near the start/end
|
1658 |
|
|
of this qty's lifetime.
|
1659 |
|
|
|
1660 |
|
|
The purpose behind extending the lifetime of this qty is to
|
1661 |
|
|
discourage the register allocator from creating false
|
1662 |
|
|
dependencies.
|
1663 |
|
|
|
1664 |
|
|
The adjustment value is chosen to indicate that this qty
|
1665 |
|
|
conflicts with all the qtys in the instructions immediately
|
1666 |
|
|
before and after the lifetime of this qty.
|
1667 |
|
|
|
1668 |
|
|
Experiments have shown that higher values tend to hurt
|
1669 |
|
|
overall code performance.
|
1670 |
|
|
|
1671 |
|
|
If allocation using the extended lifetime fails we will try
|
1672 |
|
|
again with the qty's unadjusted lifetime. */
|
1673 |
|
|
int fake_birth = MAX (0, qty[q].birth - 2 + qty[q].birth % 2);
|
1674 |
|
|
int fake_death = MIN (insn_number * 2 + 1,
|
1675 |
|
|
qty[q].death + 2 - qty[q].death % 2);
|
1676 |
|
|
#endif
|
1677 |
|
|
|
1678 |
|
|
if (N_REG_CLASSES > 1)
|
1679 |
|
|
{
|
1680 |
|
|
#ifdef INSN_SCHEDULING
|
1681 |
|
|
/* We try to avoid using hard registers allocated to qtys which
|
1682 |
|
|
are born immediately after this qty or die immediately before
|
1683 |
|
|
this qty.
|
1684 |
|
|
|
1685 |
|
|
This optimization is only appropriate when we will run
|
1686 |
|
|
a scheduling pass after reload and we are not optimizing
|
1687 |
|
|
for code size. */
|
1688 |
|
|
if (flag_schedule_insns_after_reload
|
1689 |
|
|
&& !optimize_size
|
1690 |
|
|
&& !SMALL_REGISTER_CLASSES)
|
1691 |
|
|
{
|
1692 |
|
|
qty[q].phys_reg = find_free_reg (qty[q].min_class,
|
1693 |
|
|
qty[q].mode, q, 0, 0,
|
1694 |
|
|
fake_birth, fake_death);
|
1695 |
|
|
if (qty[q].phys_reg >= 0)
|
1696 |
|
|
continue;
|
1697 |
|
|
}
|
1698 |
|
|
#endif
|
1699 |
|
|
qty[q].phys_reg = find_free_reg (qty[q].min_class,
|
1700 |
|
|
qty[q].mode, q, 0, 0,
|
1701 |
|
|
qty[q].birth, qty[q].death);
|
1702 |
|
|
if (qty[q].phys_reg >= 0)
|
1703 |
|
|
continue;
|
1704 |
|
|
}
|
1705 |
|
|
|
1706 |
|
|
#ifdef INSN_SCHEDULING
|
1707 |
|
|
/* Similarly, avoid false dependencies. */
|
1708 |
|
|
if (flag_schedule_insns_after_reload
|
1709 |
|
|
&& !optimize_size
|
1710 |
|
|
&& !SMALL_REGISTER_CLASSES
|
1711 |
|
|
&& qty[q].alternate_class != NO_REGS)
|
1712 |
|
|
qty[q].phys_reg = find_free_reg (qty[q].alternate_class,
|
1713 |
|
|
qty[q].mode, q, 0, 0,
|
1714 |
|
|
fake_birth, fake_death);
|
1715 |
|
|
#endif
|
1716 |
|
|
if (qty[q].alternate_class != NO_REGS)
|
1717 |
|
|
qty[q].phys_reg = find_free_reg (qty[q].alternate_class,
|
1718 |
|
|
qty[q].mode, q, 0, 0,
|
1719 |
|
|
qty[q].birth, qty[q].death);
|
1720 |
|
|
}
|
1721 |
|
|
}
|
1722 |
|
|
|
1723 |
|
|
/* Now propagate the register assignments
|
1724 |
|
|
to the pseudo regs belonging to the qtys. */
|
1725 |
|
|
|
1726 |
|
|
for (q = 0; q < next_qty; q++)
|
1727 |
|
|
if (qty[q].phys_reg >= 0)
|
1728 |
|
|
{
|
1729 |
|
|
for (i = qty[q].first_reg; i >= 0; i = reg_next_in_qty[i])
|
1730 |
|
|
reg_renumber[i] = qty[q].phys_reg + reg_offset[i];
|
1731 |
|
|
}
|
1732 |
|
|
|
1733 |
|
|
/* Clean up. */
|
1734 |
|
|
free (regs_live_at);
|
1735 |
|
|
free (qty_order);
|
1736 |
|
|
}
|
1737 |
|
|
|
1738 |
|
|
/* Compare two quantities' priority for getting real registers.
|
1739 |
|
|
We give shorter-lived quantities higher priority.
|
1740 |
|
|
Quantities with more references are also preferred, as are quantities that
|
1741 |
|
|
require multiple registers. This is the identical prioritization as
|
1742 |
|
|
done by global-alloc.
|
1743 |
|
|
|
1744 |
|
|
We used to give preference to registers with *longer* lives, but using
|
1745 |
|
|
the same algorithm in both local- and global-alloc can speed up execution
|
1746 |
|
|
of some programs by as much as a factor of three! */
|
1747 |
|
|
|
1748 |
|
|
/* Note that the quotient will never be bigger than
|
1749 |
|
|
the value of floor_log2 times the maximum number of
|
1750 |
|
|
times a register can occur in one insn (surely less than 100)
|
1751 |
|
|
weighted by frequency (max REG_FREQ_MAX).
|
1752 |
|
|
Multiplying this by 10000/REG_FREQ_MAX can't overflow.
|
1753 |
|
|
QTY_CMP_PRI is also used by qty_sugg_compare. */
|
1754 |
|
|
|
1755 |
|
|
#define QTY_CMP_PRI(q) \
|
1756 |
|
|
((int) (((double) (floor_log2 (qty[q].n_refs) * qty[q].freq * qty[q].size) \
|
1757 |
|
|
/ (qty[q].death - qty[q].birth)) * (10000 / REG_FREQ_MAX)))
|
1758 |
|
|
|
1759 |
|
|
static int
|
1760 |
|
|
qty_compare (int q1, int q2)
|
1761 |
|
|
{
|
1762 |
|
|
return QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
|
1763 |
|
|
}
|
1764 |
|
|
|
1765 |
|
|
static int
|
1766 |
|
|
qty_compare_1 (const void *q1p, const void *q2p)
|
1767 |
|
|
{
|
1768 |
|
|
int q1 = *(const int *) q1p, q2 = *(const int *) q2p;
|
1769 |
|
|
int tem = QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
|
1770 |
|
|
|
1771 |
|
|
if (tem != 0)
|
1772 |
|
|
return tem;
|
1773 |
|
|
|
1774 |
|
|
/* If qtys are equally good, sort by qty number,
|
1775 |
|
|
so that the results of qsort leave nothing to chance. */
|
1776 |
|
|
return q1 - q2;
|
1777 |
|
|
}
|
1778 |
|
|
|
1779 |
|
|
/* Compare two quantities' priority for getting real registers. This version
|
1780 |
|
|
is called for quantities that have suggested hard registers. First priority
|
1781 |
|
|
goes to quantities that have copy preferences, then to those that have
|
1782 |
|
|
normal preferences. Within those groups, quantities with the lower
|
1783 |
|
|
number of preferences have the highest priority. Of those, we use the same
|
1784 |
|
|
algorithm as above. */
|
1785 |
|
|
|
1786 |
|
|
#define QTY_CMP_SUGG(q) \
|
1787 |
|
|
(qty_phys_num_copy_sugg[q] \
|
1788 |
|
|
? qty_phys_num_copy_sugg[q] \
|
1789 |
|
|
: qty_phys_num_sugg[q] * FIRST_PSEUDO_REGISTER)
|
1790 |
|
|
|
1791 |
|
|
static int
|
1792 |
|
|
qty_sugg_compare (int q1, int q2)
|
1793 |
|
|
{
|
1794 |
|
|
int tem = QTY_CMP_SUGG (q1) - QTY_CMP_SUGG (q2);
|
1795 |
|
|
|
1796 |
|
|
if (tem != 0)
|
1797 |
|
|
return tem;
|
1798 |
|
|
|
1799 |
|
|
return QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
|
1800 |
|
|
}
|
1801 |
|
|
|
1802 |
|
|
static int
|
1803 |
|
|
qty_sugg_compare_1 (const void *q1p, const void *q2p)
|
1804 |
|
|
{
|
1805 |
|
|
int q1 = *(const int *) q1p, q2 = *(const int *) q2p;
|
1806 |
|
|
int tem = QTY_CMP_SUGG (q1) - QTY_CMP_SUGG (q2);
|
1807 |
|
|
|
1808 |
|
|
if (tem != 0)
|
1809 |
|
|
return tem;
|
1810 |
|
|
|
1811 |
|
|
tem = QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
|
1812 |
|
|
if (tem != 0)
|
1813 |
|
|
return tem;
|
1814 |
|
|
|
1815 |
|
|
/* If qtys are equally good, sort by qty number,
|
1816 |
|
|
so that the results of qsort leave nothing to chance. */
|
1817 |
|
|
return q1 - q2;
|
1818 |
|
|
}
|
1819 |
|
|
|
1820 |
|
|
#undef QTY_CMP_SUGG
|
1821 |
|
|
#undef QTY_CMP_PRI
|
1822 |
|
|
|
1823 |
|
|
/* Attempt to combine the two registers (rtx's) USEDREG and SETREG.
|
1824 |
|
|
Returns 1 if have done so, or 0 if cannot.
|
1825 |
|
|
|
1826 |
|
|
Combining registers means marking them as having the same quantity
|
1827 |
|
|
and adjusting the offsets within the quantity if either of
|
1828 |
|
|
them is a SUBREG.
|
1829 |
|
|
|
1830 |
|
|
We don't actually combine a hard reg with a pseudo; instead
|
1831 |
|
|
we just record the hard reg as the suggestion for the pseudo's quantity.
|
1832 |
|
|
If we really combined them, we could lose if the pseudo lives
|
1833 |
|
|
across an insn that clobbers the hard reg (eg, movmem).
|
1834 |
|
|
|
1835 |
|
|
ALREADY_DEAD is nonzero if USEDREG is known to be dead even though
|
1836 |
|
|
there is no REG_DEAD note on INSN. This occurs during the processing
|
1837 |
|
|
of REG_NO_CONFLICT blocks.
|
1838 |
|
|
|
1839 |
|
|
MAY_SAVE_COPY is nonzero if this insn is simply copying USEDREG to
|
1840 |
|
|
SETREG or if the input and output must share a register.
|
1841 |
|
|
In that case, we record a hard reg suggestion in QTY_PHYS_COPY_SUGG.
|
1842 |
|
|
|
1843 |
|
|
There are elaborate checks for the validity of combining. */
|
1844 |
|
|
|
1845 |
|
|
static int
|
1846 |
|
|
combine_regs (rtx usedreg, rtx setreg, int may_save_copy, int insn_number,
|
1847 |
|
|
rtx insn, int already_dead)
|
1848 |
|
|
{
|
1849 |
|
|
int ureg, sreg;
|
1850 |
|
|
int offset = 0;
|
1851 |
|
|
int usize, ssize;
|
1852 |
|
|
int sqty;
|
1853 |
|
|
|
1854 |
|
|
/* Determine the numbers and sizes of registers being used. If a subreg
|
1855 |
|
|
is present that does not change the entire register, don't consider
|
1856 |
|
|
this a copy insn. */
|
1857 |
|
|
|
1858 |
|
|
while (GET_CODE (usedreg) == SUBREG)
|
1859 |
|
|
{
|
1860 |
|
|
rtx subreg = SUBREG_REG (usedreg);
|
1861 |
|
|
|
1862 |
|
|
if (REG_P (subreg))
|
1863 |
|
|
{
|
1864 |
|
|
if (GET_MODE_SIZE (GET_MODE (subreg)) > UNITS_PER_WORD)
|
1865 |
|
|
may_save_copy = 0;
|
1866 |
|
|
|
1867 |
|
|
if (REGNO (subreg) < FIRST_PSEUDO_REGISTER)
|
1868 |
|
|
offset += subreg_regno_offset (REGNO (subreg),
|
1869 |
|
|
GET_MODE (subreg),
|
1870 |
|
|
SUBREG_BYTE (usedreg),
|
1871 |
|
|
GET_MODE (usedreg));
|
1872 |
|
|
else
|
1873 |
|
|
offset += (SUBREG_BYTE (usedreg)
|
1874 |
|
|
/ REGMODE_NATURAL_SIZE (GET_MODE (usedreg)));
|
1875 |
|
|
}
|
1876 |
|
|
|
1877 |
|
|
usedreg = subreg;
|
1878 |
|
|
}
|
1879 |
|
|
|
1880 |
|
|
if (!REG_P (usedreg))
|
1881 |
|
|
return 0;
|
1882 |
|
|
|
1883 |
|
|
ureg = REGNO (usedreg);
|
1884 |
|
|
if (ureg < FIRST_PSEUDO_REGISTER)
|
1885 |
|
|
usize = hard_regno_nregs[ureg][GET_MODE (usedreg)];
|
1886 |
|
|
else
|
1887 |
|
|
usize = ((GET_MODE_SIZE (GET_MODE (usedreg))
|
1888 |
|
|
+ (REGMODE_NATURAL_SIZE (GET_MODE (usedreg)) - 1))
|
1889 |
|
|
/ REGMODE_NATURAL_SIZE (GET_MODE (usedreg)));
|
1890 |
|
|
|
1891 |
|
|
while (GET_CODE (setreg) == SUBREG)
|
1892 |
|
|
{
|
1893 |
|
|
rtx subreg = SUBREG_REG (setreg);
|
1894 |
|
|
|
1895 |
|
|
if (REG_P (subreg))
|
1896 |
|
|
{
|
1897 |
|
|
if (GET_MODE_SIZE (GET_MODE (subreg)) > UNITS_PER_WORD)
|
1898 |
|
|
may_save_copy = 0;
|
1899 |
|
|
|
1900 |
|
|
if (REGNO (subreg) < FIRST_PSEUDO_REGISTER)
|
1901 |
|
|
offset -= subreg_regno_offset (REGNO (subreg),
|
1902 |
|
|
GET_MODE (subreg),
|
1903 |
|
|
SUBREG_BYTE (setreg),
|
1904 |
|
|
GET_MODE (setreg));
|
1905 |
|
|
else
|
1906 |
|
|
offset -= (SUBREG_BYTE (setreg)
|
1907 |
|
|
/ REGMODE_NATURAL_SIZE (GET_MODE (setreg)));
|
1908 |
|
|
}
|
1909 |
|
|
|
1910 |
|
|
setreg = subreg;
|
1911 |
|
|
}
|
1912 |
|
|
|
1913 |
|
|
if (!REG_P (setreg))
|
1914 |
|
|
return 0;
|
1915 |
|
|
|
1916 |
|
|
sreg = REGNO (setreg);
|
1917 |
|
|
if (sreg < FIRST_PSEUDO_REGISTER)
|
1918 |
|
|
ssize = hard_regno_nregs[sreg][GET_MODE (setreg)];
|
1919 |
|
|
else
|
1920 |
|
|
ssize = ((GET_MODE_SIZE (GET_MODE (setreg))
|
1921 |
|
|
+ (REGMODE_NATURAL_SIZE (GET_MODE (setreg)) - 1))
|
1922 |
|
|
/ REGMODE_NATURAL_SIZE (GET_MODE (setreg)));
|
1923 |
|
|
|
1924 |
|
|
/* If UREG is a pseudo-register that hasn't already been assigned a
|
1925 |
|
|
quantity number, it means that it is not local to this block or dies
|
1926 |
|
|
more than once. In either event, we can't do anything with it. */
|
1927 |
|
|
if ((ureg >= FIRST_PSEUDO_REGISTER && reg_qty[ureg] < 0)
|
1928 |
|
|
/* Do not combine registers unless one fits within the other. */
|
1929 |
|
|
|| (offset > 0 && usize + offset > ssize)
|
1930 |
|
|
|| (offset < 0 && usize + offset < ssize)
|
1931 |
|
|
/* Do not combine with a smaller already-assigned object
|
1932 |
|
|
if that smaller object is already combined with something bigger. */
|
1933 |
|
|
|| (ssize > usize && ureg >= FIRST_PSEUDO_REGISTER
|
1934 |
|
|
&& usize < qty[reg_qty[ureg]].size)
|
1935 |
|
|
/* Can't combine if SREG is not a register we can allocate. */
|
1936 |
|
|
|| (sreg >= FIRST_PSEUDO_REGISTER && reg_qty[sreg] == -1)
|
1937 |
|
|
/* Don't combine with a pseudo mentioned in a REG_NO_CONFLICT note.
|
1938 |
|
|
These have already been taken care of. This probably wouldn't
|
1939 |
|
|
combine anyway, but don't take any chances. */
|
1940 |
|
|
|| (ureg >= FIRST_PSEUDO_REGISTER
|
1941 |
|
|
&& find_reg_note (insn, REG_NO_CONFLICT, usedreg))
|
1942 |
|
|
/* Don't tie something to itself. In most cases it would make no
|
1943 |
|
|
difference, but it would screw up if the reg being tied to itself
|
1944 |
|
|
also dies in this insn. */
|
1945 |
|
|
|| ureg == sreg
|
1946 |
|
|
/* Don't try to connect two different hardware registers. */
|
1947 |
|
|
|| (ureg < FIRST_PSEUDO_REGISTER && sreg < FIRST_PSEUDO_REGISTER)
|
1948 |
|
|
/* Don't connect two different machine modes if they have different
|
1949 |
|
|
implications as to which registers may be used. */
|
1950 |
|
|
|| !MODES_TIEABLE_P (GET_MODE (usedreg), GET_MODE (setreg)))
|
1951 |
|
|
return 0;
|
1952 |
|
|
|
1953 |
|
|
/* Now, if UREG is a hard reg and SREG is a pseudo, record the hard reg in
|
1954 |
|
|
qty_phys_sugg for the pseudo instead of tying them.
|
1955 |
|
|
|
1956 |
|
|
Return "failure" so that the lifespan of UREG is terminated here;
|
1957 |
|
|
that way the two lifespans will be disjoint and nothing will prevent
|
1958 |
|
|
the pseudo reg from being given this hard reg. */
|
1959 |
|
|
|
1960 |
|
|
if (ureg < FIRST_PSEUDO_REGISTER)
|
1961 |
|
|
{
|
1962 |
|
|
/* Allocate a quantity number so we have a place to put our
|
1963 |
|
|
suggestions. */
|
1964 |
|
|
if (reg_qty[sreg] == -2)
|
1965 |
|
|
reg_is_born (setreg, 2 * insn_number);
|
1966 |
|
|
|
1967 |
|
|
if (reg_qty[sreg] >= 0)
|
1968 |
|
|
{
|
1969 |
|
|
if (may_save_copy
|
1970 |
|
|
&& ! TEST_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[sreg]], ureg))
|
1971 |
|
|
{
|
1972 |
|
|
SET_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[sreg]], ureg);
|
1973 |
|
|
qty_phys_num_copy_sugg[reg_qty[sreg]]++;
|
1974 |
|
|
}
|
1975 |
|
|
else if (! TEST_HARD_REG_BIT (qty_phys_sugg[reg_qty[sreg]], ureg))
|
1976 |
|
|
{
|
1977 |
|
|
SET_HARD_REG_BIT (qty_phys_sugg[reg_qty[sreg]], ureg);
|
1978 |
|
|
qty_phys_num_sugg[reg_qty[sreg]]++;
|
1979 |
|
|
}
|
1980 |
|
|
}
|
1981 |
|
|
return 0;
|
1982 |
|
|
}
|
1983 |
|
|
|
1984 |
|
|
/* Similarly for SREG a hard register and UREG a pseudo register. */
|
1985 |
|
|
|
1986 |
|
|
if (sreg < FIRST_PSEUDO_REGISTER)
|
1987 |
|
|
{
|
1988 |
|
|
if (may_save_copy
|
1989 |
|
|
&& ! TEST_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[ureg]], sreg))
|
1990 |
|
|
{
|
1991 |
|
|
SET_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[ureg]], sreg);
|
1992 |
|
|
qty_phys_num_copy_sugg[reg_qty[ureg]]++;
|
1993 |
|
|
}
|
1994 |
|
|
else if (! TEST_HARD_REG_BIT (qty_phys_sugg[reg_qty[ureg]], sreg))
|
1995 |
|
|
{
|
1996 |
|
|
SET_HARD_REG_BIT (qty_phys_sugg[reg_qty[ureg]], sreg);
|
1997 |
|
|
qty_phys_num_sugg[reg_qty[ureg]]++;
|
1998 |
|
|
}
|
1999 |
|
|
return 0;
|
2000 |
|
|
}
|
2001 |
|
|
|
2002 |
|
|
/* At this point we know that SREG and UREG are both pseudos.
|
2003 |
|
|
Do nothing if SREG already has a quantity or is a register that we
|
2004 |
|
|
don't allocate. */
|
2005 |
|
|
if (reg_qty[sreg] >= -1
|
2006 |
|
|
/* If we are not going to let any regs live across calls,
|
2007 |
|
|
don't tie a call-crossing reg to a non-call-crossing reg. */
|
2008 |
|
|
|| (current_function_has_nonlocal_label
|
2009 |
|
|
&& ((REG_N_CALLS_CROSSED (ureg) > 0)
|
2010 |
|
|
!= (REG_N_CALLS_CROSSED (sreg) > 0))))
|
2011 |
|
|
return 0;
|
2012 |
|
|
|
2013 |
|
|
/* We don't already know about SREG, so tie it to UREG
|
2014 |
|
|
if this is the last use of UREG, provided the classes they want
|
2015 |
|
|
are compatible. */
|
2016 |
|
|
|
2017 |
|
|
if ((already_dead || find_regno_note (insn, REG_DEAD, ureg))
|
2018 |
|
|
&& reg_meets_class_p (sreg, qty[reg_qty[ureg]].min_class))
|
2019 |
|
|
{
|
2020 |
|
|
/* Add SREG to UREG's quantity. */
|
2021 |
|
|
sqty = reg_qty[ureg];
|
2022 |
|
|
reg_qty[sreg] = sqty;
|
2023 |
|
|
reg_offset[sreg] = reg_offset[ureg] + offset;
|
2024 |
|
|
reg_next_in_qty[sreg] = qty[sqty].first_reg;
|
2025 |
|
|
qty[sqty].first_reg = sreg;
|
2026 |
|
|
|
2027 |
|
|
/* If SREG's reg class is smaller, set qty[SQTY].min_class. */
|
2028 |
|
|
update_qty_class (sqty, sreg);
|
2029 |
|
|
|
2030 |
|
|
/* Update info about quantity SQTY. */
|
2031 |
|
|
qty[sqty].n_calls_crossed += REG_N_CALLS_CROSSED (sreg);
|
2032 |
|
|
qty[sqty].n_throwing_calls_crossed
|
2033 |
|
|
+= REG_N_THROWING_CALLS_CROSSED (sreg);
|
2034 |
|
|
qty[sqty].n_refs += REG_N_REFS (sreg);
|
2035 |
|
|
qty[sqty].freq += REG_FREQ (sreg);
|
2036 |
|
|
if (usize < ssize)
|
2037 |
|
|
{
|
2038 |
|
|
int i;
|
2039 |
|
|
|
2040 |
|
|
for (i = qty[sqty].first_reg; i >= 0; i = reg_next_in_qty[i])
|
2041 |
|
|
reg_offset[i] -= offset;
|
2042 |
|
|
|
2043 |
|
|
qty[sqty].size = ssize;
|
2044 |
|
|
qty[sqty].mode = GET_MODE (setreg);
|
2045 |
|
|
}
|
2046 |
|
|
}
|
2047 |
|
|
else
|
2048 |
|
|
return 0;
|
2049 |
|
|
|
2050 |
|
|
return 1;
|
2051 |
|
|
}
|
2052 |
|
|
|
2053 |
|
|
/* Return 1 if the preferred class of REG allows it to be tied
|
2054 |
|
|
to a quantity or register whose class is CLASS.
|
2055 |
|
|
True if REG's reg class either contains or is contained in CLASS. */
|
2056 |
|
|
|
2057 |
|
|
static int
|
2058 |
|
|
reg_meets_class_p (int reg, enum reg_class class)
|
2059 |
|
|
{
|
2060 |
|
|
enum reg_class rclass = reg_preferred_class (reg);
|
2061 |
|
|
return (reg_class_subset_p (rclass, class)
|
2062 |
|
|
|| reg_class_subset_p (class, rclass));
|
2063 |
|
|
}
|
2064 |
|
|
|
2065 |
|
|
/* Update the class of QTYNO assuming that REG is being tied to it. */
|
2066 |
|
|
|
2067 |
|
|
static void
|
2068 |
|
|
update_qty_class (int qtyno, int reg)
|
2069 |
|
|
{
|
2070 |
|
|
enum reg_class rclass = reg_preferred_class (reg);
|
2071 |
|
|
if (reg_class_subset_p (rclass, qty[qtyno].min_class))
|
2072 |
|
|
qty[qtyno].min_class = rclass;
|
2073 |
|
|
|
2074 |
|
|
rclass = reg_alternate_class (reg);
|
2075 |
|
|
if (reg_class_subset_p (rclass, qty[qtyno].alternate_class))
|
2076 |
|
|
qty[qtyno].alternate_class = rclass;
|
2077 |
|
|
}
|
2078 |
|
|
|
2079 |
|
|
/* Handle something which alters the value of an rtx REG.
|
2080 |
|
|
|
2081 |
|
|
REG is whatever is set or clobbered. SETTER is the rtx that
|
2082 |
|
|
is modifying the register.
|
2083 |
|
|
|
2084 |
|
|
If it is not really a register, we do nothing.
|
2085 |
|
|
The file-global variables `this_insn' and `this_insn_number'
|
2086 |
|
|
carry info from `block_alloc'. */
|
2087 |
|
|
|
2088 |
|
|
static void
|
2089 |
|
|
reg_is_set (rtx reg, rtx setter, void *data ATTRIBUTE_UNUSED)
|
2090 |
|
|
{
|
2091 |
|
|
/* Note that note_stores will only pass us a SUBREG if it is a SUBREG of
|
2092 |
|
|
a hard register. These may actually not exist any more. */
|
2093 |
|
|
|
2094 |
|
|
if (GET_CODE (reg) != SUBREG
|
2095 |
|
|
&& !REG_P (reg))
|
2096 |
|
|
return;
|
2097 |
|
|
|
2098 |
|
|
/* Mark this register as being born. If it is used in a CLOBBER, mark
|
2099 |
|
|
it as being born halfway between the previous insn and this insn so that
|
2100 |
|
|
it conflicts with our inputs but not the outputs of the previous insn. */
|
2101 |
|
|
|
2102 |
|
|
reg_is_born (reg, 2 * this_insn_number - (GET_CODE (setter) == CLOBBER));
|
2103 |
|
|
}
|
2104 |
|
|
|
2105 |
|
|
/* Handle beginning of the life of register REG.
|
2106 |
|
|
BIRTH is the index at which this is happening. */
|
2107 |
|
|
|
2108 |
|
|
static void
|
2109 |
|
|
reg_is_born (rtx reg, int birth)
|
2110 |
|
|
{
|
2111 |
|
|
int regno;
|
2112 |
|
|
|
2113 |
|
|
if (GET_CODE (reg) == SUBREG)
|
2114 |
|
|
{
|
2115 |
|
|
regno = REGNO (SUBREG_REG (reg));
|
2116 |
|
|
if (regno < FIRST_PSEUDO_REGISTER)
|
2117 |
|
|
regno = subreg_regno (reg);
|
2118 |
|
|
}
|
2119 |
|
|
else
|
2120 |
|
|
regno = REGNO (reg);
|
2121 |
|
|
|
2122 |
|
|
if (regno < FIRST_PSEUDO_REGISTER)
|
2123 |
|
|
{
|
2124 |
|
|
mark_life (regno, GET_MODE (reg), 1);
|
2125 |
|
|
|
2126 |
|
|
/* If the register was to have been born earlier that the present
|
2127 |
|
|
insn, mark it as live where it is actually born. */
|
2128 |
|
|
if (birth < 2 * this_insn_number)
|
2129 |
|
|
post_mark_life (regno, GET_MODE (reg), 1, birth, 2 * this_insn_number);
|
2130 |
|
|
}
|
2131 |
|
|
else
|
2132 |
|
|
{
|
2133 |
|
|
if (reg_qty[regno] == -2)
|
2134 |
|
|
alloc_qty (regno, GET_MODE (reg), PSEUDO_REGNO_SIZE (regno), birth);
|
2135 |
|
|
|
2136 |
|
|
/* If this register has a quantity number, show that it isn't dead. */
|
2137 |
|
|
if (reg_qty[regno] >= 0)
|
2138 |
|
|
qty[reg_qty[regno]].death = -1;
|
2139 |
|
|
}
|
2140 |
|
|
}
|
2141 |
|
|
|
2142 |
|
|
/* Record the death of REG in the current insn. If OUTPUT_P is nonzero,
|
2143 |
|
|
REG is an output that is dying (i.e., it is never used), otherwise it
|
2144 |
|
|
is an input (the normal case).
|
2145 |
|
|
If OUTPUT_P is 1, then we extend the life past the end of this insn. */
|
2146 |
|
|
|
2147 |
|
|
static void
|
2148 |
|
|
wipe_dead_reg (rtx reg, int output_p)
|
2149 |
|
|
{
|
2150 |
|
|
int regno = REGNO (reg);
|
2151 |
|
|
|
2152 |
|
|
/* If this insn has multiple results,
|
2153 |
|
|
and the dead reg is used in one of the results,
|
2154 |
|
|
extend its life to after this insn,
|
2155 |
|
|
so it won't get allocated together with any other result of this insn.
|
2156 |
|
|
|
2157 |
|
|
It is unsafe to use !single_set here since it will ignore an unused
|
2158 |
|
|
output. Just because an output is unused does not mean the compiler
|
2159 |
|
|
can assume the side effect will not occur. Consider if REG appears
|
2160 |
|
|
in the address of an output and we reload the output. If we allocate
|
2161 |
|
|
REG to the same hard register as an unused output we could set the hard
|
2162 |
|
|
register before the output reload insn. */
|
2163 |
|
|
if (GET_CODE (PATTERN (this_insn)) == PARALLEL
|
2164 |
|
|
&& multiple_sets (this_insn))
|
2165 |
|
|
{
|
2166 |
|
|
int i;
|
2167 |
|
|
for (i = XVECLEN (PATTERN (this_insn), 0) - 1; i >= 0; i--)
|
2168 |
|
|
{
|
2169 |
|
|
rtx set = XVECEXP (PATTERN (this_insn), 0, i);
|
2170 |
|
|
if (GET_CODE (set) == SET
|
2171 |
|
|
&& !REG_P (SET_DEST (set))
|
2172 |
|
|
&& !rtx_equal_p (reg, SET_DEST (set))
|
2173 |
|
|
&& reg_overlap_mentioned_p (reg, SET_DEST (set)))
|
2174 |
|
|
output_p = 1;
|
2175 |
|
|
}
|
2176 |
|
|
}
|
2177 |
|
|
|
2178 |
|
|
/* If this register is used in an auto-increment address, then extend its
|
2179 |
|
|
life to after this insn, so that it won't get allocated together with
|
2180 |
|
|
the result of this insn. */
|
2181 |
|
|
if (! output_p && find_regno_note (this_insn, REG_INC, regno))
|
2182 |
|
|
output_p = 1;
|
2183 |
|
|
|
2184 |
|
|
if (regno < FIRST_PSEUDO_REGISTER)
|
2185 |
|
|
{
|
2186 |
|
|
mark_life (regno, GET_MODE (reg), 0);
|
2187 |
|
|
|
2188 |
|
|
/* If a hard register is dying as an output, mark it as in use at
|
2189 |
|
|
the beginning of this insn (the above statement would cause this
|
2190 |
|
|
not to happen). */
|
2191 |
|
|
if (output_p)
|
2192 |
|
|
post_mark_life (regno, GET_MODE (reg), 1,
|
2193 |
|
|
2 * this_insn_number, 2 * this_insn_number + 1);
|
2194 |
|
|
}
|
2195 |
|
|
|
2196 |
|
|
else if (reg_qty[regno] >= 0)
|
2197 |
|
|
qty[reg_qty[regno]].death = 2 * this_insn_number + output_p;
|
2198 |
|
|
}
|
2199 |
|
|
|
2200 |
|
|
/* Find a block of SIZE words of hard regs in reg_class CLASS
|
2201 |
|
|
that can hold something of machine-mode MODE
|
2202 |
|
|
(but actually we test only the first of the block for holding MODE)
|
2203 |
|
|
and still free between insn BORN_INDEX and insn DEAD_INDEX,
|
2204 |
|
|
and return the number of the first of them.
|
2205 |
|
|
Return -1 if such a block cannot be found.
|
2206 |
|
|
If QTYNO crosses calls, insist on a register preserved by calls,
|
2207 |
|
|
unless ACCEPT_CALL_CLOBBERED is nonzero.
|
2208 |
|
|
|
2209 |
|
|
If JUST_TRY_SUGGESTED is nonzero, only try to see if the suggested
|
2210 |
|
|
register is available. If not, return -1. */
|
2211 |
|
|
|
2212 |
|
|
static int
|
2213 |
|
|
find_free_reg (enum reg_class class, enum machine_mode mode, int qtyno,
|
2214 |
|
|
int accept_call_clobbered, int just_try_suggested,
|
2215 |
|
|
int born_index, int dead_index)
|
2216 |
|
|
{
|
2217 |
|
|
int i, ins;
|
2218 |
|
|
HARD_REG_SET first_used, used;
|
2219 |
|
|
#ifdef ELIMINABLE_REGS
|
2220 |
|
|
static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
|
2221 |
|
|
#endif
|
2222 |
|
|
|
2223 |
|
|
/* Validate our parameters. */
|
2224 |
|
|
gcc_assert (born_index >= 0 && born_index <= dead_index);
|
2225 |
|
|
|
2226 |
|
|
/* Don't let a pseudo live in a reg across a function call
|
2227 |
|
|
if we might get a nonlocal goto. */
|
2228 |
|
|
if (current_function_has_nonlocal_label
|
2229 |
|
|
&& qty[qtyno].n_calls_crossed > 0)
|
2230 |
|
|
return -1;
|
2231 |
|
|
|
2232 |
|
|
if (accept_call_clobbered)
|
2233 |
|
|
COPY_HARD_REG_SET (used, call_fixed_reg_set);
|
2234 |
|
|
else if (qty[qtyno].n_calls_crossed == 0)
|
2235 |
|
|
COPY_HARD_REG_SET (used, fixed_reg_set);
|
2236 |
|
|
else
|
2237 |
|
|
COPY_HARD_REG_SET (used, call_used_reg_set);
|
2238 |
|
|
|
2239 |
|
|
if (accept_call_clobbered)
|
2240 |
|
|
IOR_HARD_REG_SET (used, losing_caller_save_reg_set);
|
2241 |
|
|
|
2242 |
|
|
for (ins = born_index; ins < dead_index; ins++)
|
2243 |
|
|
IOR_HARD_REG_SET (used, regs_live_at[ins]);
|
2244 |
|
|
|
2245 |
|
|
IOR_COMPL_HARD_REG_SET (used, reg_class_contents[(int) class]);
|
2246 |
|
|
|
2247 |
|
|
/* Don't use the frame pointer reg in local-alloc even if
|
2248 |
|
|
we may omit the frame pointer, because if we do that and then we
|
2249 |
|
|
need a frame pointer, reload won't know how to move the pseudo
|
2250 |
|
|
to another hard reg. It can move only regs made by global-alloc.
|
2251 |
|
|
|
2252 |
|
|
This is true of any register that can be eliminated. */
|
2253 |
|
|
#ifdef ELIMINABLE_REGS
|
2254 |
|
|
for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
|
2255 |
|
|
SET_HARD_REG_BIT (used, eliminables[i].from);
|
2256 |
|
|
#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
|
2257 |
|
|
/* If FRAME_POINTER_REGNUM is not a real register, then protect the one
|
2258 |
|
|
that it might be eliminated into. */
|
2259 |
|
|
SET_HARD_REG_BIT (used, HARD_FRAME_POINTER_REGNUM);
|
2260 |
|
|
#endif
|
2261 |
|
|
#else
|
2262 |
|
|
SET_HARD_REG_BIT (used, FRAME_POINTER_REGNUM);
|
2263 |
|
|
#endif
|
2264 |
|
|
|
2265 |
|
|
#ifdef CANNOT_CHANGE_MODE_CLASS
|
2266 |
|
|
cannot_change_mode_set_regs (&used, mode, qty[qtyno].first_reg);
|
2267 |
|
|
#endif
|
2268 |
|
|
|
2269 |
|
|
/* Normally, the registers that can be used for the first register in
|
2270 |
|
|
a multi-register quantity are the same as those that can be used for
|
2271 |
|
|
subsequent registers. However, if just trying suggested registers,
|
2272 |
|
|
restrict our consideration to them. If there are copy-suggested
|
2273 |
|
|
register, try them. Otherwise, try the arithmetic-suggested
|
2274 |
|
|
registers. */
|
2275 |
|
|
COPY_HARD_REG_SET (first_used, used);
|
2276 |
|
|
|
2277 |
|
|
if (just_try_suggested)
|
2278 |
|
|
{
|
2279 |
|
|
if (qty_phys_num_copy_sugg[qtyno] != 0)
|
2280 |
|
|
IOR_COMPL_HARD_REG_SET (first_used, qty_phys_copy_sugg[qtyno]);
|
2281 |
|
|
else
|
2282 |
|
|
IOR_COMPL_HARD_REG_SET (first_used, qty_phys_sugg[qtyno]);
|
2283 |
|
|
}
|
2284 |
|
|
|
2285 |
|
|
/* If all registers are excluded, we can't do anything. */
|
2286 |
|
|
GO_IF_HARD_REG_SUBSET (reg_class_contents[(int) ALL_REGS], first_used, fail);
|
2287 |
|
|
|
2288 |
|
|
/* If at least one would be suitable, test each hard reg. */
|
2289 |
|
|
|
2290 |
|
|
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
|
2291 |
|
|
{
|
2292 |
|
|
#ifdef REG_ALLOC_ORDER
|
2293 |
|
|
int regno = reg_alloc_order[i];
|
2294 |
|
|
#else
|
2295 |
|
|
int regno = i;
|
2296 |
|
|
#endif
|
2297 |
|
|
if (! TEST_HARD_REG_BIT (first_used, regno)
|
2298 |
|
|
&& HARD_REGNO_MODE_OK (regno, mode)
|
2299 |
|
|
&& (qty[qtyno].n_calls_crossed == 0
|
2300 |
|
|
|| accept_call_clobbered
|
2301 |
|
|
|| ! HARD_REGNO_CALL_PART_CLOBBERED (regno, mode)))
|
2302 |
|
|
{
|
2303 |
|
|
int j;
|
2304 |
|
|
int size1 = hard_regno_nregs[regno][mode];
|
2305 |
|
|
for (j = 1; j < size1 && ! TEST_HARD_REG_BIT (used, regno + j); j++);
|
2306 |
|
|
if (j == size1)
|
2307 |
|
|
{
|
2308 |
|
|
/* Mark that this register is in use between its birth and death
|
2309 |
|
|
insns. */
|
2310 |
|
|
post_mark_life (regno, mode, 1, born_index, dead_index);
|
2311 |
|
|
return regno;
|
2312 |
|
|
}
|
2313 |
|
|
#ifndef REG_ALLOC_ORDER
|
2314 |
|
|
/* Skip starting points we know will lose. */
|
2315 |
|
|
i += j;
|
2316 |
|
|
#endif
|
2317 |
|
|
}
|
2318 |
|
|
}
|
2319 |
|
|
|
2320 |
|
|
fail:
|
2321 |
|
|
/* If we are just trying suggested register, we have just tried copy-
|
2322 |
|
|
suggested registers, and there are arithmetic-suggested registers,
|
2323 |
|
|
try them. */
|
2324 |
|
|
|
2325 |
|
|
/* If it would be profitable to allocate a call-clobbered register
|
2326 |
|
|
and save and restore it around calls, do that. */
|
2327 |
|
|
if (just_try_suggested && qty_phys_num_copy_sugg[qtyno] != 0
|
2328 |
|
|
&& qty_phys_num_sugg[qtyno] != 0)
|
2329 |
|
|
{
|
2330 |
|
|
/* Don't try the copy-suggested regs again. */
|
2331 |
|
|
qty_phys_num_copy_sugg[qtyno] = 0;
|
2332 |
|
|
return find_free_reg (class, mode, qtyno, accept_call_clobbered, 1,
|
2333 |
|
|
born_index, dead_index);
|
2334 |
|
|
}
|
2335 |
|
|
|
2336 |
|
|
/* We need not check to see if the current function has nonlocal
|
2337 |
|
|
labels because we don't put any pseudos that are live over calls in
|
2338 |
|
|
registers in that case. Avoid putting pseudos crossing calls that
|
2339 |
|
|
might throw into call used registers. */
|
2340 |
|
|
|
2341 |
|
|
if (! accept_call_clobbered
|
2342 |
|
|
&& flag_caller_saves
|
2343 |
|
|
&& ! just_try_suggested
|
2344 |
|
|
&& qty[qtyno].n_calls_crossed != 0
|
2345 |
|
|
&& qty[qtyno].n_throwing_calls_crossed == 0
|
2346 |
|
|
&& CALLER_SAVE_PROFITABLE (qty[qtyno].n_refs,
|
2347 |
|
|
qty[qtyno].n_calls_crossed))
|
2348 |
|
|
{
|
2349 |
|
|
i = find_free_reg (class, mode, qtyno, 1, 0, born_index, dead_index);
|
2350 |
|
|
if (i >= 0)
|
2351 |
|
|
caller_save_needed = 1;
|
2352 |
|
|
return i;
|
2353 |
|
|
}
|
2354 |
|
|
return -1;
|
2355 |
|
|
}
|
2356 |
|
|
|
2357 |
|
|
/* Mark that REGNO with machine-mode MODE is live starting from the current
|
2358 |
|
|
insn (if LIFE is nonzero) or dead starting at the current insn (if LIFE
|
2359 |
|
|
is zero). */
|
2360 |
|
|
|
2361 |
|
|
static void
|
2362 |
|
|
mark_life (int regno, enum machine_mode mode, int life)
|
2363 |
|
|
{
|
2364 |
|
|
int j = hard_regno_nregs[regno][mode];
|
2365 |
|
|
if (life)
|
2366 |
|
|
while (--j >= 0)
|
2367 |
|
|
SET_HARD_REG_BIT (regs_live, regno + j);
|
2368 |
|
|
else
|
2369 |
|
|
while (--j >= 0)
|
2370 |
|
|
CLEAR_HARD_REG_BIT (regs_live, regno + j);
|
2371 |
|
|
}
|
2372 |
|
|
|
2373 |
|
|
/* Mark register number REGNO (with machine-mode MODE) as live (if LIFE
|
2374 |
|
|
is nonzero) or dead (if LIFE is zero) from insn number BIRTH (inclusive)
|
2375 |
|
|
to insn number DEATH (exclusive). */
|
2376 |
|
|
|
2377 |
|
|
static void
|
2378 |
|
|
post_mark_life (int regno, enum machine_mode mode, int life, int birth,
|
2379 |
|
|
int death)
|
2380 |
|
|
{
|
2381 |
|
|
int j = hard_regno_nregs[regno][mode];
|
2382 |
|
|
HARD_REG_SET this_reg;
|
2383 |
|
|
|
2384 |
|
|
CLEAR_HARD_REG_SET (this_reg);
|
2385 |
|
|
while (--j >= 0)
|
2386 |
|
|
SET_HARD_REG_BIT (this_reg, regno + j);
|
2387 |
|
|
|
2388 |
|
|
if (life)
|
2389 |
|
|
while (birth < death)
|
2390 |
|
|
{
|
2391 |
|
|
IOR_HARD_REG_SET (regs_live_at[birth], this_reg);
|
2392 |
|
|
birth++;
|
2393 |
|
|
}
|
2394 |
|
|
else
|
2395 |
|
|
while (birth < death)
|
2396 |
|
|
{
|
2397 |
|
|
AND_COMPL_HARD_REG_SET (regs_live_at[birth], this_reg);
|
2398 |
|
|
birth++;
|
2399 |
|
|
}
|
2400 |
|
|
}
|
2401 |
|
|
|
2402 |
|
|
/* INSN is the CLOBBER insn that starts a REG_NO_NOCONFLICT block, R0
|
2403 |
|
|
is the register being clobbered, and R1 is a register being used in
|
2404 |
|
|
the equivalent expression.
|
2405 |
|
|
|
2406 |
|
|
If R1 dies in the block and has a REG_NO_CONFLICT note on every insn
|
2407 |
|
|
in which it is used, return 1.
|
2408 |
|
|
|
2409 |
|
|
Otherwise, return 0. */
|
2410 |
|
|
|
2411 |
|
|
static int
|
2412 |
|
|
no_conflict_p (rtx insn, rtx r0 ATTRIBUTE_UNUSED, rtx r1)
|
2413 |
|
|
{
|
2414 |
|
|
int ok = 0;
|
2415 |
|
|
rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
|
2416 |
|
|
rtx p, last;
|
2417 |
|
|
|
2418 |
|
|
/* If R1 is a hard register, return 0 since we handle this case
|
2419 |
|
|
when we scan the insns that actually use it. */
|
2420 |
|
|
|
2421 |
|
|
if (note == 0
|
2422 |
|
|
|| (REG_P (r1) && REGNO (r1) < FIRST_PSEUDO_REGISTER)
|
2423 |
|
|
|| (GET_CODE (r1) == SUBREG && REG_P (SUBREG_REG (r1))
|
2424 |
|
|
&& REGNO (SUBREG_REG (r1)) < FIRST_PSEUDO_REGISTER))
|
2425 |
|
|
return 0;
|
2426 |
|
|
|
2427 |
|
|
last = XEXP (note, 0);
|
2428 |
|
|
|
2429 |
|
|
for (p = NEXT_INSN (insn); p && p != last; p = NEXT_INSN (p))
|
2430 |
|
|
if (INSN_P (p))
|
2431 |
|
|
{
|
2432 |
|
|
if (find_reg_note (p, REG_DEAD, r1))
|
2433 |
|
|
ok = 1;
|
2434 |
|
|
|
2435 |
|
|
/* There must be a REG_NO_CONFLICT note on every insn, otherwise
|
2436 |
|
|
some earlier optimization pass has inserted instructions into
|
2437 |
|
|
the sequence, and it is not safe to perform this optimization.
|
2438 |
|
|
Note that emit_no_conflict_block always ensures that this is
|
2439 |
|
|
true when these sequences are created. */
|
2440 |
|
|
if (! find_reg_note (p, REG_NO_CONFLICT, r1))
|
2441 |
|
|
return 0;
|
2442 |
|
|
}
|
2443 |
|
|
|
2444 |
|
|
return ok;
|
2445 |
|
|
}
|
2446 |
|
|
|
2447 |
|
|
/* Return the number of alternatives for which the constraint string P
|
2448 |
|
|
indicates that the operand must be equal to operand 0 and that no register
|
2449 |
|
|
is acceptable. */
|
2450 |
|
|
|
2451 |
|
|
static int
|
2452 |
|
|
requires_inout (const char *p)
|
2453 |
|
|
{
|
2454 |
|
|
char c;
|
2455 |
|
|
int found_zero = 0;
|
2456 |
|
|
int reg_allowed = 0;
|
2457 |
|
|
int num_matching_alts = 0;
|
2458 |
|
|
int len;
|
2459 |
|
|
|
2460 |
|
|
for ( ; (c = *p); p += len)
|
2461 |
|
|
{
|
2462 |
|
|
len = CONSTRAINT_LEN (c, p);
|
2463 |
|
|
switch (c)
|
2464 |
|
|
{
|
2465 |
|
|
case '=': case '+': case '?':
|
2466 |
|
|
case '#': case '&': case '!':
|
2467 |
|
|
case '*': case '%':
|
2468 |
|
|
case 'm': case '<': case '>': case 'V': case 'o':
|
2469 |
|
|
case 'E': case 'F': case 'G': case 'H':
|
2470 |
|
|
case 's': case 'i': case 'n':
|
2471 |
|
|
case 'I': case 'J': case 'K': case 'L':
|
2472 |
|
|
case 'M': case 'N': case 'O': case 'P':
|
2473 |
|
|
case 'X':
|
2474 |
|
|
/* These don't say anything we care about. */
|
2475 |
|
|
break;
|
2476 |
|
|
|
2477 |
|
|
case ',':
|
2478 |
|
|
if (found_zero && ! reg_allowed)
|
2479 |
|
|
num_matching_alts++;
|
2480 |
|
|
|
2481 |
|
|
found_zero = reg_allowed = 0;
|
2482 |
|
|
break;
|
2483 |
|
|
|
2484 |
|
|
case '0':
|
2485 |
|
|
found_zero = 1;
|
2486 |
|
|
break;
|
2487 |
|
|
|
2488 |
|
|
case '1': case '2': case '3': case '4': case '5':
|
2489 |
|
|
case '6': case '7': case '8': case '9':
|
2490 |
|
|
/* Skip the balance of the matching constraint. */
|
2491 |
|
|
do
|
2492 |
|
|
p++;
|
2493 |
|
|
while (ISDIGIT (*p));
|
2494 |
|
|
len = 0;
|
2495 |
|
|
break;
|
2496 |
|
|
|
2497 |
|
|
default:
|
2498 |
|
|
if (REG_CLASS_FROM_CONSTRAINT (c, p) == NO_REGS
|
2499 |
|
|
&& !EXTRA_ADDRESS_CONSTRAINT (c, p))
|
2500 |
|
|
break;
|
2501 |
|
|
/* Fall through. */
|
2502 |
|
|
case 'p':
|
2503 |
|
|
case 'g': case 'r':
|
2504 |
|
|
reg_allowed = 1;
|
2505 |
|
|
break;
|
2506 |
|
|
}
|
2507 |
|
|
}
|
2508 |
|
|
|
2509 |
|
|
if (found_zero && ! reg_allowed)
|
2510 |
|
|
num_matching_alts++;
|
2511 |
|
|
|
2512 |
|
|
return num_matching_alts;
|
2513 |
|
|
}
|
2514 |
|
|
|
2515 |
|
|
void
|
2516 |
|
|
dump_local_alloc (FILE *file)
|
2517 |
|
|
{
|
2518 |
|
|
int i;
|
2519 |
|
|
for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
|
2520 |
|
|
if (reg_renumber[i] != -1)
|
2521 |
|
|
fprintf (file, ";; Register %d in %d.\n", i, reg_renumber[i]);
|
2522 |
|
|
}
|
2523 |
|
|
|
2524 |
|
|
/* Run old register allocator. Return TRUE if we must exit
|
2525 |
|
|
rest_of_compilation upon return. */
|
2526 |
|
|
static void
|
2527 |
|
|
rest_of_handle_local_alloc (void)
|
2528 |
|
|
{
|
2529 |
|
|
int rebuild_notes;
|
2530 |
|
|
|
2531 |
|
|
/* Determine if the current function is a leaf before running reload
|
2532 |
|
|
since this can impact optimizations done by the prologue and
|
2533 |
|
|
epilogue thus changing register elimination offsets. */
|
2534 |
|
|
current_function_is_leaf = leaf_function_p ();
|
2535 |
|
|
|
2536 |
|
|
/* Allocate the reg_renumber array. */
|
2537 |
|
|
allocate_reg_info (max_regno, FALSE, TRUE);
|
2538 |
|
|
|
2539 |
|
|
/* And the reg_equiv_memory_loc array. */
|
2540 |
|
|
VARRAY_GROW (reg_equiv_memory_loc_varray, max_regno);
|
2541 |
|
|
reg_equiv_memory_loc = &VARRAY_RTX (reg_equiv_memory_loc_varray, 0);
|
2542 |
|
|
|
2543 |
|
|
allocate_initial_values (reg_equiv_memory_loc);
|
2544 |
|
|
|
2545 |
|
|
regclass (get_insns (), max_reg_num (), dump_file);
|
2546 |
|
|
rebuild_notes = local_alloc ();
|
2547 |
|
|
|
2548 |
|
|
/* Local allocation may have turned an indirect jump into a direct
|
2549 |
|
|
jump. If so, we must rebuild the JUMP_LABEL fields of jumping
|
2550 |
|
|
instructions. */
|
2551 |
|
|
if (rebuild_notes)
|
2552 |
|
|
{
|
2553 |
|
|
timevar_push (TV_JUMP);
|
2554 |
|
|
|
2555 |
|
|
rebuild_jump_labels (get_insns ());
|
2556 |
|
|
purge_all_dead_edges ();
|
2557 |
|
|
delete_unreachable_blocks ();
|
2558 |
|
|
|
2559 |
|
|
timevar_pop (TV_JUMP);
|
2560 |
|
|
}
|
2561 |
|
|
|
2562 |
|
|
if (dump_enabled_p (pass_local_alloc.static_pass_number))
|
2563 |
|
|
{
|
2564 |
|
|
timevar_push (TV_DUMP);
|
2565 |
|
|
dump_flow_info (dump_file);
|
2566 |
|
|
dump_local_alloc (dump_file);
|
2567 |
|
|
timevar_pop (TV_DUMP);
|
2568 |
|
|
}
|
2569 |
|
|
}
|
2570 |
|
|
|
2571 |
|
|
struct tree_opt_pass pass_local_alloc =
|
2572 |
|
|
{
|
2573 |
|
|
"lreg", /* name */
|
2574 |
|
|
NULL, /* gate */
|
2575 |
|
|
rest_of_handle_local_alloc, /* execute */
|
2576 |
|
|
NULL, /* sub */
|
2577 |
|
|
NULL, /* next */
|
2578 |
|
|
0, /* static_pass_number */
|
2579 |
|
|
TV_LOCAL_ALLOC, /* tv_id */
|
2580 |
|
|
0, /* properties_required */
|
2581 |
|
|
0, /* properties_provided */
|
2582 |
|
|
0, /* properties_destroyed */
|
2583 |
|
|
0, /* todo_flags_start */
|
2584 |
|
|
TODO_dump_func |
|
2585 |
|
|
TODO_ggc_collect, /* todo_flags_finish */
|
2586 |
|
|
'l' /* letter */
|
2587 |
|
|
};
|
2588 |
|
|
|