OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-stable/] [gcc-4.5.1/] [gcc/] [reload1.c] - Blame information for rev 826

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 280 jeremybenn
/* Reload pseudo regs into hard regs for insns that require hard regs.
2
   Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3
   1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4
   Free Software Foundation, Inc.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
 
27
#include "machmode.h"
28
#include "hard-reg-set.h"
29
#include "rtl.h"
30
#include "tm_p.h"
31
#include "obstack.h"
32
#include "insn-config.h"
33
#include "flags.h"
34
#include "function.h"
35
#include "expr.h"
36
#include "optabs.h"
37
#include "regs.h"
38
#include "addresses.h"
39
#include "basic-block.h"
40
#include "reload.h"
41
#include "recog.h"
42
#include "output.h"
43
#include "real.h"
44
#include "toplev.h"
45
#include "except.h"
46
#include "tree.h"
47
#include "ira.h"
48
#include "df.h"
49
#include "target.h"
50
#include "emit-rtl.h"
51
 
52
/* This file contains the reload pass of the compiler, which is
53
   run after register allocation has been done.  It checks that
54
   each insn is valid (operands required to be in registers really
55
   are in registers of the proper class) and fixes up invalid ones
56
   by copying values temporarily into registers for the insns
57
   that need them.
58
 
59
   The results of register allocation are described by the vector
60
   reg_renumber; the insns still contain pseudo regs, but reg_renumber
61
   can be used to find which hard reg, if any, a pseudo reg is in.
62
 
63
   The technique we always use is to free up a few hard regs that are
64
   called ``reload regs'', and for each place where a pseudo reg
65
   must be in a hard reg, copy it temporarily into one of the reload regs.
66
 
67
   Reload regs are allocated locally for every instruction that needs
68
   reloads.  When there are pseudos which are allocated to a register that
69
   has been chosen as a reload reg, such pseudos must be ``spilled''.
70
   This means that they go to other hard regs, or to stack slots if no other
71
   available hard regs can be found.  Spilling can invalidate more
72
   insns, requiring additional need for reloads, so we must keep checking
73
   until the process stabilizes.
74
 
75
   For machines with different classes of registers, we must keep track
76
   of the register class needed for each reload, and make sure that
77
   we allocate enough reload registers of each class.
78
 
79
   The file reload.c contains the code that checks one insn for
80
   validity and reports the reloads that it needs.  This file
81
   is in charge of scanning the entire rtl code, accumulating the
82
   reload needs, spilling, assigning reload registers to use for
83
   fixing up each insn, and generating the new insns to copy values
84
   into the reload registers.  */
85
 
86
/* During reload_as_needed, element N contains a REG rtx for the hard reg
87
   into which reg N has been reloaded (perhaps for a previous insn).  */
88
static rtx *reg_last_reload_reg;
89
 
90
/* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
91
   for an output reload that stores into reg N.  */
92
static regset_head reg_has_output_reload;
93
 
94
/* Indicates which hard regs are reload-registers for an output reload
95
   in the current insn.  */
96
static HARD_REG_SET reg_is_output_reload;
97
 
98
/* Element N is the constant value to which pseudo reg N is equivalent,
99
   or zero if pseudo reg N is not equivalent to a constant.
100
   find_reloads looks at this in order to replace pseudo reg N
101
   with the constant it stands for.  */
102
rtx *reg_equiv_constant;
103
 
104
/* Element N is an invariant value to which pseudo reg N is equivalent.
105
   eliminate_regs_in_insn uses this to replace pseudos in particular
106
   contexts.  */
107
rtx *reg_equiv_invariant;
108
 
109
/* Element N is a memory location to which pseudo reg N is equivalent,
110
   prior to any register elimination (such as frame pointer to stack
111
   pointer).  Depending on whether or not it is a valid address, this value
112
   is transferred to either reg_equiv_address or reg_equiv_mem.  */
113
rtx *reg_equiv_memory_loc;
114
 
115
/* We allocate reg_equiv_memory_loc inside a varray so that the garbage
116
   collector can keep track of what is inside.  */
117
VEC(rtx,gc) *reg_equiv_memory_loc_vec;
118
 
119
/* Element N is the address of stack slot to which pseudo reg N is equivalent.
120
   This is used when the address is not valid as a memory address
121
   (because its displacement is too big for the machine.)  */
122
rtx *reg_equiv_address;
123
 
124
/* Element N is the memory slot to which pseudo reg N is equivalent,
125
   or zero if pseudo reg N is not equivalent to a memory slot.  */
126
rtx *reg_equiv_mem;
127
 
128
/* Element N is an EXPR_LIST of REG_EQUIVs containing MEMs with
129
   alternate representations of the location of pseudo reg N.  */
130
rtx *reg_equiv_alt_mem_list;
131
 
132
/* Widest width in which each pseudo reg is referred to (via subreg).  */
133
static unsigned int *reg_max_ref_width;
134
 
135
/* Element N is the list of insns that initialized reg N from its equivalent
136
   constant or memory slot.  */
137
rtx *reg_equiv_init;
138
int reg_equiv_init_size;
139
 
140
/* Vector to remember old contents of reg_renumber before spilling.  */
141
static short *reg_old_renumber;
142
 
143
/* During reload_as_needed, element N contains the last pseudo regno reloaded
144
   into hard register N.  If that pseudo reg occupied more than one register,
145
   reg_reloaded_contents points to that pseudo for each spill register in
146
   use; all of these must remain set for an inheritance to occur.  */
147
static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
148
 
149
/* During reload_as_needed, element N contains the insn for which
150
   hard register N was last used.   Its contents are significant only
151
   when reg_reloaded_valid is set for this register.  */
152
static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
153
 
154
/* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid.  */
155
static HARD_REG_SET reg_reloaded_valid;
156
/* Indicate if the register was dead at the end of the reload.
157
   This is only valid if reg_reloaded_contents is set and valid.  */
158
static HARD_REG_SET reg_reloaded_dead;
159
 
160
/* Indicate whether the register's current value is one that is not
161
   safe to retain across a call, even for registers that are normally
162
   call-saved.  This is only meaningful for members of reg_reloaded_valid.  */
163
static HARD_REG_SET reg_reloaded_call_part_clobbered;
164
 
165
/* Number of spill-regs so far; number of valid elements of spill_regs.  */
166
static int n_spills;
167
 
168
/* In parallel with spill_regs, contains REG rtx's for those regs.
169
   Holds the last rtx used for any given reg, or 0 if it has never
170
   been used for spilling yet.  This rtx is reused, provided it has
171
   the proper mode.  */
172
static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
173
 
174
/* In parallel with spill_regs, contains nonzero for a spill reg
175
   that was stored after the last time it was used.
176
   The precise value is the insn generated to do the store.  */
177
static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
178
 
179
/* This is the register that was stored with spill_reg_store.  This is a
180
   copy of reload_out / reload_out_reg when the value was stored; if
181
   reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg.  */
182
static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
183
 
184
/* This table is the inverse mapping of spill_regs:
185
   indexed by hard reg number,
186
   it contains the position of that reg in spill_regs,
187
   or -1 for something that is not in spill_regs.
188
 
189
   ?!?  This is no longer accurate.  */
190
static short spill_reg_order[FIRST_PSEUDO_REGISTER];
191
 
192
/* This reg set indicates registers that can't be used as spill registers for
193
   the currently processed insn.  These are the hard registers which are live
194
   during the insn, but not allocated to pseudos, as well as fixed
195
   registers.  */
196
static HARD_REG_SET bad_spill_regs;
197
 
198
/* These are the hard registers that can't be used as spill register for any
199
   insn.  This includes registers used for user variables and registers that
200
   we can't eliminate.  A register that appears in this set also can't be used
201
   to retry register allocation.  */
202
static HARD_REG_SET bad_spill_regs_global;
203
 
204
/* Describes order of use of registers for reloading
205
   of spilled pseudo-registers.  `n_spills' is the number of
206
   elements that are actually valid; new ones are added at the end.
207
 
208
   Both spill_regs and spill_reg_order are used on two occasions:
209
   once during find_reload_regs, where they keep track of the spill registers
210
   for a single insn, but also during reload_as_needed where they show all
211
   the registers ever used by reload.  For the latter case, the information
212
   is calculated during finish_spills.  */
213
static short spill_regs[FIRST_PSEUDO_REGISTER];
214
 
215
/* This vector of reg sets indicates, for each pseudo, which hard registers
216
   may not be used for retrying global allocation because the register was
217
   formerly spilled from one of them.  If we allowed reallocating a pseudo to
218
   a register that it was already allocated to, reload might not
219
   terminate.  */
220
static HARD_REG_SET *pseudo_previous_regs;
221
 
222
/* This vector of reg sets indicates, for each pseudo, which hard
223
   registers may not be used for retrying global allocation because they
224
   are used as spill registers during one of the insns in which the
225
   pseudo is live.  */
226
static HARD_REG_SET *pseudo_forbidden_regs;
227
 
228
/* All hard regs that have been used as spill registers for any insn are
229
   marked in this set.  */
230
static HARD_REG_SET used_spill_regs;
231
 
232
/* Index of last register assigned as a spill register.  We allocate in
233
   a round-robin fashion.  */
234
static int last_spill_reg;
235
 
236
/* Nonzero if indirect addressing is supported on the machine; this means
237
   that spilling (REG n) does not require reloading it into a register in
238
   order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))).  The
239
   value indicates the level of indirect addressing supported, e.g., two
240
   means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
241
   a hard register.  */
242
static char spill_indirect_levels;
243
 
244
/* Nonzero if indirect addressing is supported when the innermost MEM is
245
   of the form (MEM (SYMBOL_REF sym)).  It is assumed that the level to
246
   which these are valid is the same as spill_indirect_levels, above.  */
247
char indirect_symref_ok;
248
 
249
/* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid.  */
250
char double_reg_address_ok;
251
 
252
/* Record the stack slot for each spilled hard register.  */
253
static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
254
 
255
/* Width allocated so far for that stack slot.  */
256
static unsigned int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
257
 
258
/* Record which pseudos needed to be spilled.  */
259
static regset_head spilled_pseudos;
260
 
261
/* Record which pseudos changed their allocation in finish_spills.  */
262
static regset_head changed_allocation_pseudos;
263
 
264
/* Used for communication between order_regs_for_reload and count_pseudo.
265
   Used to avoid counting one pseudo twice.  */
266
static regset_head pseudos_counted;
267
 
268
/* First uid used by insns created by reload in this function.
269
   Used in find_equiv_reg.  */
270
int reload_first_uid;
271
 
272
/* Flag set by local-alloc or global-alloc if anything is live in
273
   a call-clobbered reg across calls.  */
274
int caller_save_needed;
275
 
276
/* Set to 1 while reload_as_needed is operating.
277
   Required by some machines to handle any generated moves differently.  */
278
int reload_in_progress = 0;
279
 
280
/* These arrays record the insn_code of insns that may be needed to
281
   perform input and output reloads of special objects.  They provide a
282
   place to pass a scratch register.  */
283
enum insn_code reload_in_optab[NUM_MACHINE_MODES];
284
enum insn_code reload_out_optab[NUM_MACHINE_MODES];
285
 
286
/* This obstack is used for allocation of rtl during register elimination.
287
   The allocated storage can be freed once find_reloads has processed the
288
   insn.  */
289
static struct obstack reload_obstack;
290
 
291
/* Points to the beginning of the reload_obstack.  All insn_chain structures
292
   are allocated first.  */
293
static char *reload_startobj;
294
 
295
/* The point after all insn_chain structures.  Used to quickly deallocate
296
   memory allocated in copy_reloads during calculate_needs_all_insns.  */
297
static char *reload_firstobj;
298
 
299
/* This points before all local rtl generated by register elimination.
300
   Used to quickly free all memory after processing one insn.  */
301
static char *reload_insn_firstobj;
302
 
303
/* List of insn_chain instructions, one for every insn that reload needs to
304
   examine.  */
305
struct insn_chain *reload_insn_chain;
306
 
307
/* List of all insns needing reloads.  */
308
static struct insn_chain *insns_need_reload;
309
 
310
/* This structure is used to record information about register eliminations.
311
   Each array entry describes one possible way of eliminating a register
312
   in favor of another.   If there is more than one way of eliminating a
313
   particular register, the most preferred should be specified first.  */
314
 
315
struct elim_table
316
{
317
  int from;                     /* Register number to be eliminated.  */
318
  int to;                       /* Register number used as replacement.  */
319
  HOST_WIDE_INT initial_offset; /* Initial difference between values.  */
320
  int can_eliminate;            /* Nonzero if this elimination can be done.  */
321
  int can_eliminate_previous;   /* Value returned by TARGET_CAN_ELIMINATE
322
                                   target hook in previous scan over insns
323
                                   made by reload.  */
324
  HOST_WIDE_INT offset;         /* Current offset between the two regs.  */
325
  HOST_WIDE_INT previous_offset;/* Offset at end of previous insn.  */
326
  int ref_outside_mem;          /* "to" has been referenced outside a MEM.  */
327
  rtx from_rtx;                 /* REG rtx for the register to be eliminated.
328
                                   We cannot simply compare the number since
329
                                   we might then spuriously replace a hard
330
                                   register corresponding to a pseudo
331
                                   assigned to the reg to be eliminated.  */
332
  rtx to_rtx;                   /* REG rtx for the replacement.  */
333
};
334
 
335
static struct elim_table *reg_eliminate = 0;
336
 
337
/* This is an intermediate structure to initialize the table.  It has
338
   exactly the members provided by ELIMINABLE_REGS.  */
339
static const struct elim_table_1
340
{
341
  const int from;
342
  const int to;
343
} reg_eliminate_1[] =
344
 
345
/* If a set of eliminable registers was specified, define the table from it.
346
   Otherwise, default to the normal case of the frame pointer being
347
   replaced by the stack pointer.  */
348
 
349
#ifdef ELIMINABLE_REGS
350
  ELIMINABLE_REGS;
351
#else
352
  {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
353
#endif
354
 
355
#define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
356
 
357
/* Record the number of pending eliminations that have an offset not equal
358
   to their initial offset.  If nonzero, we use a new copy of each
359
   replacement result in any insns encountered.  */
360
int num_not_at_initial_offset;
361
 
362
/* Count the number of registers that we may be able to eliminate.  */
363
static int num_eliminable;
364
/* And the number of registers that are equivalent to a constant that
365
   can be eliminated to frame_pointer / arg_pointer + constant.  */
366
static int num_eliminable_invariants;
367
 
368
/* For each label, we record the offset of each elimination.  If we reach
369
   a label by more than one path and an offset differs, we cannot do the
370
   elimination.  This information is indexed by the difference of the
371
   number of the label and the first label number.  We can't offset the
372
   pointer itself as this can cause problems on machines with segmented
373
   memory.  The first table is an array of flags that records whether we
374
   have yet encountered a label and the second table is an array of arrays,
375
   one entry in the latter array for each elimination.  */
376
 
377
static int first_label_num;
378
static char *offsets_known_at;
379
static HOST_WIDE_INT (*offsets_at)[NUM_ELIMINABLE_REGS];
380
 
381
/* Stack of addresses where an rtx has been changed.  We can undo the
382
   changes by popping items off the stack and restoring the original
383
   value at each location.
384
 
385
   We use this simplistic undo capability rather than copy_rtx as copy_rtx
386
   will not make a deep copy of a normally sharable rtx, such as
387
   (const (plus (symbol_ref) (const_int))).  If such an expression appears
388
   as R1 in gen_reload_chain_without_interm_reg_p, then a shared
389
   rtx expression would be changed.  See PR 42431.  */
390
 
391
typedef rtx *rtx_p;
392
DEF_VEC_P(rtx_p);
393
DEF_VEC_ALLOC_P(rtx_p,heap);
394
static VEC(rtx_p,heap) *substitute_stack;
395
 
396
/* Number of labels in the current function.  */
397
 
398
static int num_labels;
399
 
400
static void replace_pseudos_in (rtx *, enum machine_mode, rtx);
401
static void maybe_fix_stack_asms (void);
402
static void copy_reloads (struct insn_chain *);
403
static void calculate_needs_all_insns (int);
404
static int find_reg (struct insn_chain *, int);
405
static void find_reload_regs (struct insn_chain *);
406
static void select_reload_regs (void);
407
static void delete_caller_save_insns (void);
408
 
409
static void spill_failure (rtx, enum reg_class);
410
static void count_spilled_pseudo (int, int, int);
411
static void delete_dead_insn (rtx);
412
static void alter_reg (int, int, bool);
413
static void set_label_offsets (rtx, rtx, int);
414
static void check_eliminable_occurrences (rtx);
415
static void elimination_effects (rtx, enum machine_mode);
416
static int eliminate_regs_in_insn (rtx, int);
417
static void update_eliminable_offsets (void);
418
static void mark_not_eliminable (rtx, const_rtx, void *);
419
static void set_initial_elim_offsets (void);
420
static bool verify_initial_elim_offsets (void);
421
static void set_initial_label_offsets (void);
422
static void set_offsets_for_label (rtx);
423
static void init_elim_table (void);
424
static void update_eliminables (HARD_REG_SET *);
425
static void spill_hard_reg (unsigned int, int);
426
static int finish_spills (int);
427
static void scan_paradoxical_subregs (rtx);
428
static void count_pseudo (int);
429
static void order_regs_for_reload (struct insn_chain *);
430
static void reload_as_needed (int);
431
static void forget_old_reloads_1 (rtx, const_rtx, void *);
432
static void forget_marked_reloads (regset);
433
static int reload_reg_class_lower (const void *, const void *);
434
static void mark_reload_reg_in_use (unsigned int, int, enum reload_type,
435
                                    enum machine_mode);
436
static void clear_reload_reg_in_use (unsigned int, int, enum reload_type,
437
                                     enum machine_mode);
438
static int reload_reg_free_p (unsigned int, int, enum reload_type);
439
static int reload_reg_free_for_value_p (int, int, int, enum reload_type,
440
                                        rtx, rtx, int, int);
441
static int free_for_value_p (int, enum machine_mode, int, enum reload_type,
442
                             rtx, rtx, int, int);
443
static int reload_reg_reaches_end_p (unsigned int, int, enum reload_type);
444
static int allocate_reload_reg (struct insn_chain *, int, int);
445
static int conflicts_with_override (rtx);
446
static void failed_reload (rtx, int);
447
static int set_reload_reg (int, int);
448
static void choose_reload_regs_init (struct insn_chain *, rtx *);
449
static void choose_reload_regs (struct insn_chain *);
450
static void merge_assigned_reloads (rtx);
451
static void emit_input_reload_insns (struct insn_chain *, struct reload *,
452
                                     rtx, int);
453
static void emit_output_reload_insns (struct insn_chain *, struct reload *,
454
                                      int);
455
static void do_input_reload (struct insn_chain *, struct reload *, int);
456
static void do_output_reload (struct insn_chain *, struct reload *, int);
457
static void emit_reload_insns (struct insn_chain *);
458
static void delete_output_reload (rtx, int, int, rtx);
459
static void delete_address_reloads (rtx, rtx);
460
static void delete_address_reloads_1 (rtx, rtx, rtx);
461
static rtx inc_for_reload (rtx, rtx, rtx, int);
462
#ifdef AUTO_INC_DEC
463
static void add_auto_inc_notes (rtx, rtx);
464
#endif
465
static void substitute (rtx *, const_rtx, rtx);
466
static bool gen_reload_chain_without_interm_reg_p (int, int);
467
static int reloads_conflict (int, int);
468
static rtx gen_reload (rtx, rtx, int, enum reload_type);
469
static rtx emit_insn_if_valid_for_reload (rtx);
470
 
471
/* Initialize the reload pass.  This is called at the beginning of compilation
472
   and may be called again if the target is reinitialized.  */
473
 
474
void
475
init_reload (void)
476
{
477
  int i;
478
 
479
  /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
480
     Set spill_indirect_levels to the number of levels such addressing is
481
     permitted, zero if it is not permitted at all.  */
482
 
483
  rtx tem
484
    = gen_rtx_MEM (Pmode,
485
                   gen_rtx_PLUS (Pmode,
486
                                 gen_rtx_REG (Pmode,
487
                                              LAST_VIRTUAL_REGISTER + 1),
488
                                 GEN_INT (4)));
489
  spill_indirect_levels = 0;
490
 
491
  while (memory_address_p (QImode, tem))
492
    {
493
      spill_indirect_levels++;
494
      tem = gen_rtx_MEM (Pmode, tem);
495
    }
496
 
497
  /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)).  */
498
 
499
  tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
500
  indirect_symref_ok = memory_address_p (QImode, tem);
501
 
502
  /* See if reg+reg is a valid (and offsettable) address.  */
503
 
504
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
505
    {
506
      tem = gen_rtx_PLUS (Pmode,
507
                          gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
508
                          gen_rtx_REG (Pmode, i));
509
 
510
      /* This way, we make sure that reg+reg is an offsettable address.  */
511
      tem = plus_constant (tem, 4);
512
 
513
      if (memory_address_p (QImode, tem))
514
        {
515
          double_reg_address_ok = 1;
516
          break;
517
        }
518
    }
519
 
520
  /* Initialize obstack for our rtl allocation.  */
521
  gcc_obstack_init (&reload_obstack);
522
  reload_startobj = XOBNEWVAR (&reload_obstack, char, 0);
523
 
524
  INIT_REG_SET (&spilled_pseudos);
525
  INIT_REG_SET (&changed_allocation_pseudos);
526
  INIT_REG_SET (&pseudos_counted);
527
}
528
 
529
/* List of insn chains that are currently unused.  */
530
static struct insn_chain *unused_insn_chains = 0;
531
 
532
/* Allocate an empty insn_chain structure.  */
533
struct insn_chain *
534
new_insn_chain (void)
535
{
536
  struct insn_chain *c;
537
 
538
  if (unused_insn_chains == 0)
539
    {
540
      c = XOBNEW (&reload_obstack, struct insn_chain);
541
      INIT_REG_SET (&c->live_throughout);
542
      INIT_REG_SET (&c->dead_or_set);
543
    }
544
  else
545
    {
546
      c = unused_insn_chains;
547
      unused_insn_chains = c->next;
548
    }
549
  c->is_caller_save_insn = 0;
550
  c->need_operand_change = 0;
551
  c->need_reload = 0;
552
  c->need_elim = 0;
553
  return c;
554
}
555
 
556
/* Small utility function to set all regs in hard reg set TO which are
557
   allocated to pseudos in regset FROM.  */
558
 
559
void
560
compute_use_by_pseudos (HARD_REG_SET *to, regset from)
561
{
562
  unsigned int regno;
563
  reg_set_iterator rsi;
564
 
565
  EXECUTE_IF_SET_IN_REG_SET (from, FIRST_PSEUDO_REGISTER, regno, rsi)
566
    {
567
      int r = reg_renumber[regno];
568
 
569
      if (r < 0)
570
        {
571
          /* reload_combine uses the information from DF_LIVE_IN,
572
             which might still contain registers that have not
573
             actually been allocated since they have an
574
             equivalence.  */
575
          gcc_assert (ira_conflicts_p || reload_completed);
576
        }
577
      else
578
        add_to_hard_reg_set (to, PSEUDO_REGNO_MODE (regno), r);
579
    }
580
}
581
 
582
/* Replace all pseudos found in LOC with their corresponding
583
   equivalences.  */
584
 
585
static void
586
replace_pseudos_in (rtx *loc, enum machine_mode mem_mode, rtx usage)
587
{
588
  rtx x = *loc;
589
  enum rtx_code code;
590
  const char *fmt;
591
  int i, j;
592
 
593
  if (! x)
594
    return;
595
 
596
  code = GET_CODE (x);
597
  if (code == REG)
598
    {
599
      unsigned int regno = REGNO (x);
600
 
601
      if (regno < FIRST_PSEUDO_REGISTER)
602
        return;
603
 
604
      x = eliminate_regs (x, mem_mode, usage);
605
      if (x != *loc)
606
        {
607
          *loc = x;
608
          replace_pseudos_in (loc, mem_mode, usage);
609
          return;
610
        }
611
 
612
      if (reg_equiv_constant[regno])
613
        *loc = reg_equiv_constant[regno];
614
      else if (reg_equiv_mem[regno])
615
        *loc = reg_equiv_mem[regno];
616
      else if (reg_equiv_address[regno])
617
        *loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address[regno]);
618
      else
619
        {
620
          gcc_assert (!REG_P (regno_reg_rtx[regno])
621
                      || REGNO (regno_reg_rtx[regno]) != regno);
622
          *loc = regno_reg_rtx[regno];
623
        }
624
 
625
      return;
626
    }
627
  else if (code == MEM)
628
    {
629
      replace_pseudos_in (& XEXP (x, 0), GET_MODE (x), usage);
630
      return;
631
    }
632
 
633
  /* Process each of our operands recursively.  */
634
  fmt = GET_RTX_FORMAT (code);
635
  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
636
    if (*fmt == 'e')
637
      replace_pseudos_in (&XEXP (x, i), mem_mode, usage);
638
    else if (*fmt == 'E')
639
      for (j = 0; j < XVECLEN (x, i); j++)
640
        replace_pseudos_in (& XVECEXP (x, i, j), mem_mode, usage);
641
}
642
 
643
/* Determine if the current function has an exception receiver block
644
   that reaches the exit block via non-exceptional edges  */
645
 
646
static bool
647
has_nonexceptional_receiver (void)
648
{
649
  edge e;
650
  edge_iterator ei;
651
  basic_block *tos, *worklist, bb;
652
 
653
  /* If we're not optimizing, then just err on the safe side.  */
654
  if (!optimize)
655
    return true;
656
 
657
  /* First determine which blocks can reach exit via normal paths.  */
658
  tos = worklist = XNEWVEC (basic_block, n_basic_blocks + 1);
659
 
660
  FOR_EACH_BB (bb)
661
    bb->flags &= ~BB_REACHABLE;
662
 
663
  /* Place the exit block on our worklist.  */
664
  EXIT_BLOCK_PTR->flags |= BB_REACHABLE;
665
  *tos++ = EXIT_BLOCK_PTR;
666
 
667
  /* Iterate: find everything reachable from what we've already seen.  */
668
  while (tos != worklist)
669
    {
670
      bb = *--tos;
671
 
672
      FOR_EACH_EDGE (e, ei, bb->preds)
673
        if (!(e->flags & EDGE_ABNORMAL))
674
          {
675
            basic_block src = e->src;
676
 
677
            if (!(src->flags & BB_REACHABLE))
678
              {
679
                src->flags |= BB_REACHABLE;
680
                *tos++ = src;
681
              }
682
          }
683
    }
684
  free (worklist);
685
 
686
  /* Now see if there's a reachable block with an exceptional incoming
687
     edge.  */
688
  FOR_EACH_BB (bb)
689
    if (bb->flags & BB_REACHABLE)
690
      FOR_EACH_EDGE (e, ei, bb->preds)
691
        if (e->flags & EDGE_ABNORMAL)
692
          return true;
693
 
694
  /* No exceptional block reached exit unexceptionally.  */
695
  return false;
696
}
697
 
698
 
699
/* Global variables used by reload and its subroutines.  */
700
 
701
/* Set during calculate_needs if an insn needs register elimination.  */
702
static int something_needs_elimination;
703
/* Set during calculate_needs if an insn needs an operand changed.  */
704
static int something_needs_operands_changed;
705
 
706
/* Nonzero means we couldn't get enough spill regs.  */
707
static int failure;
708
 
709
/* Temporary array of pseudo-register number.  */
710
static int *temp_pseudo_reg_arr;
711
 
712
/* Main entry point for the reload pass.
713
 
714
   FIRST is the first insn of the function being compiled.
715
 
716
   GLOBAL nonzero means we were called from global_alloc
717
   and should attempt to reallocate any pseudoregs that we
718
   displace from hard regs we will use for reloads.
719
   If GLOBAL is zero, we do not have enough information to do that,
720
   so any pseudo reg that is spilled must go to the stack.
721
 
722
   Return value is nonzero if reload failed
723
   and we must not do any more for this function.  */
724
 
725
int
726
reload (rtx first, int global)
727
{
728
  int i, n;
729
  rtx insn;
730
  struct elim_table *ep;
731
  basic_block bb;
732
 
733
  /* Make sure even insns with volatile mem refs are recognizable.  */
734
  init_recog ();
735
 
736
  failure = 0;
737
 
738
  reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
739
 
740
  /* Make sure that the last insn in the chain
741
     is not something that needs reloading.  */
742
  emit_note (NOTE_INSN_DELETED);
743
 
744
  /* Enable find_equiv_reg to distinguish insns made by reload.  */
745
  reload_first_uid = get_max_uid ();
746
 
747
#ifdef SECONDARY_MEMORY_NEEDED
748
  /* Initialize the secondary memory table.  */
749
  clear_secondary_mem ();
750
#endif
751
 
752
  /* We don't have a stack slot for any spill reg yet.  */
753
  memset (spill_stack_slot, 0, sizeof spill_stack_slot);
754
  memset (spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
755
 
756
  /* Initialize the save area information for caller-save, in case some
757
     are needed.  */
758
  init_save_areas ();
759
 
760
  /* Compute which hard registers are now in use
761
     as homes for pseudo registers.
762
     This is done here rather than (eg) in global_alloc
763
     because this point is reached even if not optimizing.  */
764
  for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
765
    mark_home_live (i);
766
 
767
  /* A function that has a nonlocal label that can reach the exit
768
     block via non-exceptional paths must save all call-saved
769
     registers.  */
770
  if (cfun->has_nonlocal_label
771
      && has_nonexceptional_receiver ())
772
    crtl->saves_all_registers = 1;
773
 
774
  if (crtl->saves_all_registers)
775
    for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
776
      if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
777
        df_set_regs_ever_live (i, true);
778
 
779
  /* Find all the pseudo registers that didn't get hard regs
780
     but do have known equivalent constants or memory slots.
781
     These include parameters (known equivalent to parameter slots)
782
     and cse'd or loop-moved constant memory addresses.
783
 
784
     Record constant equivalents in reg_equiv_constant
785
     so they will be substituted by find_reloads.
786
     Record memory equivalents in reg_mem_equiv so they can
787
     be substituted eventually by altering the REG-rtx's.  */
788
 
789
  reg_equiv_constant = XCNEWVEC (rtx, max_regno);
790
  reg_equiv_invariant = XCNEWVEC (rtx, max_regno);
791
  reg_equiv_mem = XCNEWVEC (rtx, max_regno);
792
  reg_equiv_alt_mem_list = XCNEWVEC (rtx, max_regno);
793
  reg_equiv_address = XCNEWVEC (rtx, max_regno);
794
  reg_max_ref_width = XCNEWVEC (unsigned int, max_regno);
795
  reg_old_renumber = XCNEWVEC (short, max_regno);
796
  memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
797
  pseudo_forbidden_regs = XNEWVEC (HARD_REG_SET, max_regno);
798
  pseudo_previous_regs = XCNEWVEC (HARD_REG_SET, max_regno);
799
 
800
  CLEAR_HARD_REG_SET (bad_spill_regs_global);
801
 
802
  /* Look for REG_EQUIV notes; record what each pseudo is equivalent
803
     to.  Also find all paradoxical subregs and find largest such for
804
     each pseudo.  */
805
 
806
  num_eliminable_invariants = 0;
807
  for (insn = first; insn; insn = NEXT_INSN (insn))
808
    {
809
      rtx set = single_set (insn);
810
 
811
      /* We may introduce USEs that we want to remove at the end, so
812
         we'll mark them with QImode.  Make sure there are no
813
         previously-marked insns left by say regmove.  */
814
      if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
815
          && GET_MODE (insn) != VOIDmode)
816
        PUT_MODE (insn, VOIDmode);
817
 
818
      if (NONDEBUG_INSN_P (insn))
819
        scan_paradoxical_subregs (PATTERN (insn));
820
 
821
      if (set != 0 && REG_P (SET_DEST (set)))
822
        {
823
          rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
824
          rtx x;
825
 
826
          if (! note)
827
            continue;
828
 
829
          i = REGNO (SET_DEST (set));
830
          x = XEXP (note, 0);
831
 
832
          if (i <= LAST_VIRTUAL_REGISTER)
833
            continue;
834
 
835
          if (! function_invariant_p (x)
836
              || ! flag_pic
837
              /* A function invariant is often CONSTANT_P but may
838
                 include a register.  We promise to only pass
839
                 CONSTANT_P objects to LEGITIMATE_PIC_OPERAND_P.  */
840
              || (CONSTANT_P (x)
841
                  && LEGITIMATE_PIC_OPERAND_P (x)))
842
            {
843
              /* It can happen that a REG_EQUIV note contains a MEM
844
                 that is not a legitimate memory operand.  As later
845
                 stages of reload assume that all addresses found
846
                 in the reg_equiv_* arrays were originally legitimate,
847
                 we ignore such REG_EQUIV notes.  */
848
              if (memory_operand (x, VOIDmode))
849
                {
850
                  /* Always unshare the equivalence, so we can
851
                     substitute into this insn without touching the
852
                       equivalence.  */
853
                  reg_equiv_memory_loc[i] = copy_rtx (x);
854
                }
855
              else if (function_invariant_p (x))
856
                {
857
                  if (GET_CODE (x) == PLUS)
858
                    {
859
                      /* This is PLUS of frame pointer and a constant,
860
                         and might be shared.  Unshare it.  */
861
                      reg_equiv_invariant[i] = copy_rtx (x);
862
                      num_eliminable_invariants++;
863
                    }
864
                  else if (x == frame_pointer_rtx || x == arg_pointer_rtx)
865
                    {
866
                      reg_equiv_invariant[i] = x;
867
                      num_eliminable_invariants++;
868
                    }
869
                  else if (LEGITIMATE_CONSTANT_P (x))
870
                    reg_equiv_constant[i] = x;
871
                  else
872
                    {
873
                      reg_equiv_memory_loc[i]
874
                        = force_const_mem (GET_MODE (SET_DEST (set)), x);
875
                      if (! reg_equiv_memory_loc[i])
876
                        reg_equiv_init[i] = NULL_RTX;
877
                    }
878
                }
879
              else
880
                {
881
                  reg_equiv_init[i] = NULL_RTX;
882
                  continue;
883
                }
884
            }
885
          else
886
            reg_equiv_init[i] = NULL_RTX;
887
        }
888
    }
889
 
890
  if (dump_file)
891
    for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
892
      if (reg_equiv_init[i])
893
        {
894
          fprintf (dump_file, "init_insns for %u: ", i);
895
          print_inline_rtx (dump_file, reg_equiv_init[i], 20);
896
          fprintf (dump_file, "\n");
897
        }
898
 
899
  init_elim_table ();
900
 
901
  first_label_num = get_first_label_num ();
902
  num_labels = max_label_num () - first_label_num;
903
 
904
  /* Allocate the tables used to store offset information at labels.  */
905
  /* We used to use alloca here, but the size of what it would try to
906
     allocate would occasionally cause it to exceed the stack limit and
907
     cause a core dump.  */
908
  offsets_known_at = XNEWVEC (char, num_labels);
909
  offsets_at = (HOST_WIDE_INT (*)[NUM_ELIMINABLE_REGS]) xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (HOST_WIDE_INT));
910
 
911
  /* Alter each pseudo-reg rtx to contain its hard reg number.  Assign
912
     stack slots to the pseudos that lack hard regs or equivalents.
913
     Do not touch virtual registers.  */
914
 
915
  temp_pseudo_reg_arr = XNEWVEC (int, max_regno - LAST_VIRTUAL_REGISTER - 1);
916
  for (n = 0, i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
917
    temp_pseudo_reg_arr[n++] = i;
918
 
919
  if (ira_conflicts_p)
920
    /* Ask IRA to order pseudo-registers for better stack slot
921
       sharing.  */
922
    ira_sort_regnos_for_alter_reg (temp_pseudo_reg_arr, n, reg_max_ref_width);
923
 
924
  for (i = 0; i < n; i++)
925
    alter_reg (temp_pseudo_reg_arr[i], -1, false);
926
 
927
  /* If we have some registers we think can be eliminated, scan all insns to
928
     see if there is an insn that sets one of these registers to something
929
     other than itself plus a constant.  If so, the register cannot be
930
     eliminated.  Doing this scan here eliminates an extra pass through the
931
     main reload loop in the most common case where register elimination
932
     cannot be done.  */
933
  for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
934
    if (INSN_P (insn))
935
      note_stores (PATTERN (insn), mark_not_eliminable, NULL);
936
 
937
  maybe_fix_stack_asms ();
938
 
939
  insns_need_reload = 0;
940
  something_needs_elimination = 0;
941
 
942
  /* Initialize to -1, which means take the first spill register.  */
943
  last_spill_reg = -1;
944
 
945
  /* Spill any hard regs that we know we can't eliminate.  */
946
  CLEAR_HARD_REG_SET (used_spill_regs);
947
  /* There can be multiple ways to eliminate a register;
948
     they should be listed adjacently.
949
     Elimination for any register fails only if all possible ways fail.  */
950
  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; )
951
    {
952
      int from = ep->from;
953
      int can_eliminate = 0;
954
      do
955
        {
956
          can_eliminate |= ep->can_eliminate;
957
          ep++;
958
        }
959
      while (ep < &reg_eliminate[NUM_ELIMINABLE_REGS] && ep->from == from);
960
      if (! can_eliminate)
961
        spill_hard_reg (from, 1);
962
    }
963
 
964
#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
965
  if (frame_pointer_needed)
966
    spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
967
#endif
968
  finish_spills (global);
969
 
970
  /* From now on, we may need to generate moves differently.  We may also
971
     allow modifications of insns which cause them to not be recognized.
972
     Any such modifications will be cleaned up during reload itself.  */
973
  reload_in_progress = 1;
974
 
975
  /* This loop scans the entire function each go-round
976
     and repeats until one repetition spills no additional hard regs.  */
977
  for (;;)
978
    {
979
      int something_changed;
980
      int did_spill;
981
      HOST_WIDE_INT starting_frame_size;
982
 
983
      starting_frame_size = get_frame_size ();
984
 
985
      set_initial_elim_offsets ();
986
      set_initial_label_offsets ();
987
 
988
      /* For each pseudo register that has an equivalent location defined,
989
         try to eliminate any eliminable registers (such as the frame pointer)
990
         assuming initial offsets for the replacement register, which
991
         is the normal case.
992
 
993
         If the resulting location is directly addressable, substitute
994
         the MEM we just got directly for the old REG.
995
 
996
         If it is not addressable but is a constant or the sum of a hard reg
997
         and constant, it is probably not addressable because the constant is
998
         out of range, in that case record the address; we will generate
999
         hairy code to compute the address in a register each time it is
1000
         needed.  Similarly if it is a hard register, but one that is not
1001
         valid as an address register.
1002
 
1003
         If the location is not addressable, but does not have one of the
1004
         above forms, assign a stack slot.  We have to do this to avoid the
1005
         potential of producing lots of reloads if, e.g., a location involves
1006
         a pseudo that didn't get a hard register and has an equivalent memory
1007
         location that also involves a pseudo that didn't get a hard register.
1008
 
1009
         Perhaps at some point we will improve reload_when_needed handling
1010
         so this problem goes away.  But that's very hairy.  */
1011
 
1012
      for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1013
        if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
1014
          {
1015
            rtx x = eliminate_regs (reg_equiv_memory_loc[i], VOIDmode,
1016
                                    NULL_RTX);
1017
 
1018
            if (strict_memory_address_addr_space_p
1019
                  (GET_MODE (regno_reg_rtx[i]), XEXP (x, 0),
1020
                   MEM_ADDR_SPACE (x)))
1021
              reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
1022
            else if (CONSTANT_P (XEXP (x, 0))
1023
                     || (REG_P (XEXP (x, 0))
1024
                         && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
1025
                     || (GET_CODE (XEXP (x, 0)) == PLUS
1026
                         && REG_P (XEXP (XEXP (x, 0), 0))
1027
                         && (REGNO (XEXP (XEXP (x, 0), 0))
1028
                             < FIRST_PSEUDO_REGISTER)
1029
                         && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
1030
              reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
1031
            else
1032
              {
1033
                /* Make a new stack slot.  Then indicate that something
1034
                   changed so we go back and recompute offsets for
1035
                   eliminable registers because the allocation of memory
1036
                   below might change some offset.  reg_equiv_{mem,address}
1037
                   will be set up for this pseudo on the next pass around
1038
                   the loop.  */
1039
                reg_equiv_memory_loc[i] = 0;
1040
                reg_equiv_init[i] = 0;
1041
                alter_reg (i, -1, true);
1042
              }
1043
          }
1044
 
1045
      if (caller_save_needed)
1046
        setup_save_areas ();
1047
 
1048
      /* If we allocated another stack slot, redo elimination bookkeeping.  */
1049
      if (starting_frame_size != get_frame_size ())
1050
        continue;
1051
      if (starting_frame_size && crtl->stack_alignment_needed)
1052
        {
1053
          /* If we have a stack frame, we must align it now.  The
1054
             stack size may be a part of the offset computation for
1055
             register elimination.  So if this changes the stack size,
1056
             then repeat the elimination bookkeeping.  We don't
1057
             realign when there is no stack, as that will cause a
1058
             stack frame when none is needed should
1059
             STARTING_FRAME_OFFSET not be already aligned to
1060
             STACK_BOUNDARY.  */
1061
          assign_stack_local (BLKmode, 0, crtl->stack_alignment_needed);
1062
          if (starting_frame_size != get_frame_size ())
1063
            continue;
1064
        }
1065
 
1066
      if (caller_save_needed)
1067
        {
1068
          save_call_clobbered_regs ();
1069
          /* That might have allocated new insn_chain structures.  */
1070
          reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1071
        }
1072
 
1073
      calculate_needs_all_insns (global);
1074
 
1075
      if (! ira_conflicts_p)
1076
        /* Don't do it for IRA.  We need this info because we don't
1077
           change live_throughout and dead_or_set for chains when IRA
1078
           is used.  */
1079
        CLEAR_REG_SET (&spilled_pseudos);
1080
 
1081
      did_spill = 0;
1082
 
1083
      something_changed = 0;
1084
 
1085
      /* If we allocated any new memory locations, make another pass
1086
         since it might have changed elimination offsets.  */
1087
      if (starting_frame_size != get_frame_size ())
1088
        something_changed = 1;
1089
 
1090
      /* Even if the frame size remained the same, we might still have
1091
         changed elimination offsets, e.g. if find_reloads called
1092
         force_const_mem requiring the back end to allocate a constant
1093
         pool base register that needs to be saved on the stack.  */
1094
      else if (!verify_initial_elim_offsets ())
1095
        something_changed = 1;
1096
 
1097
      {
1098
        HARD_REG_SET to_spill;
1099
        CLEAR_HARD_REG_SET (to_spill);
1100
        update_eliminables (&to_spill);
1101
        AND_COMPL_HARD_REG_SET (used_spill_regs, to_spill);
1102
 
1103
        for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1104
          if (TEST_HARD_REG_BIT (to_spill, i))
1105
            {
1106
              spill_hard_reg (i, 1);
1107
              did_spill = 1;
1108
 
1109
              /* Regardless of the state of spills, if we previously had
1110
                 a register that we thought we could eliminate, but now can
1111
                 not eliminate, we must run another pass.
1112
 
1113
                 Consider pseudos which have an entry in reg_equiv_* which
1114
                 reference an eliminable register.  We must make another pass
1115
                 to update reg_equiv_* so that we do not substitute in the
1116
                 old value from when we thought the elimination could be
1117
                 performed.  */
1118
              something_changed = 1;
1119
            }
1120
      }
1121
 
1122
      select_reload_regs ();
1123
      if (failure)
1124
        goto failed;
1125
 
1126
      if (insns_need_reload != 0 || did_spill)
1127
        something_changed |= finish_spills (global);
1128
 
1129
      if (! something_changed)
1130
        break;
1131
 
1132
      if (caller_save_needed)
1133
        delete_caller_save_insns ();
1134
 
1135
      obstack_free (&reload_obstack, reload_firstobj);
1136
    }
1137
 
1138
  /* If global-alloc was run, notify it of any register eliminations we have
1139
     done.  */
1140
  if (global)
1141
    for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1142
      if (ep->can_eliminate)
1143
        mark_elimination (ep->from, ep->to);
1144
 
1145
  /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1146
     If that insn didn't set the register (i.e., it copied the register to
1147
     memory), just delete that insn instead of the equivalencing insn plus
1148
     anything now dead.  If we call delete_dead_insn on that insn, we may
1149
     delete the insn that actually sets the register if the register dies
1150
     there and that is incorrect.  */
1151
 
1152
  for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1153
    {
1154
      if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0)
1155
        {
1156
          rtx list;
1157
          for (list = reg_equiv_init[i]; list; list = XEXP (list, 1))
1158
            {
1159
              rtx equiv_insn = XEXP (list, 0);
1160
 
1161
              /* If we already deleted the insn or if it may trap, we can't
1162
                 delete it.  The latter case shouldn't happen, but can
1163
                 if an insn has a variable address, gets a REG_EH_REGION
1164
                 note added to it, and then gets converted into a load
1165
                 from a constant address.  */
1166
              if (NOTE_P (equiv_insn)
1167
                  || can_throw_internal (equiv_insn))
1168
                ;
1169
              else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
1170
                delete_dead_insn (equiv_insn);
1171
              else
1172
                SET_INSN_DELETED (equiv_insn);
1173
            }
1174
        }
1175
    }
1176
 
1177
  /* Use the reload registers where necessary
1178
     by generating move instructions to move the must-be-register
1179
     values into or out of the reload registers.  */
1180
 
1181
  if (insns_need_reload != 0 || something_needs_elimination
1182
      || something_needs_operands_changed)
1183
    {
1184
      HOST_WIDE_INT old_frame_size = get_frame_size ();
1185
 
1186
      reload_as_needed (global);
1187
 
1188
      gcc_assert (old_frame_size == get_frame_size ());
1189
 
1190
      gcc_assert (verify_initial_elim_offsets ());
1191
    }
1192
 
1193
  /* If we were able to eliminate the frame pointer, show that it is no
1194
     longer live at the start of any basic block.  If it ls live by
1195
     virtue of being in a pseudo, that pseudo will be marked live
1196
     and hence the frame pointer will be known to be live via that
1197
     pseudo.  */
1198
 
1199
  if (! frame_pointer_needed)
1200
    FOR_EACH_BB (bb)
1201
      bitmap_clear_bit (df_get_live_in (bb), HARD_FRAME_POINTER_REGNUM);
1202
 
1203
  /* Come here (with failure set nonzero) if we can't get enough spill
1204
     regs.  */
1205
 failed:
1206
 
1207
  CLEAR_REG_SET (&changed_allocation_pseudos);
1208
  CLEAR_REG_SET (&spilled_pseudos);
1209
  reload_in_progress = 0;
1210
 
1211
  /* Now eliminate all pseudo regs by modifying them into
1212
     their equivalent memory references.
1213
     The REG-rtx's for the pseudos are modified in place,
1214
     so all insns that used to refer to them now refer to memory.
1215
 
1216
     For a reg that has a reg_equiv_address, all those insns
1217
     were changed by reloading so that no insns refer to it any longer;
1218
     but the DECL_RTL of a variable decl may refer to it,
1219
     and if so this causes the debugging info to mention the variable.  */
1220
 
1221
  for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1222
    {
1223
      rtx addr = 0;
1224
 
1225
      if (reg_equiv_mem[i])
1226
        addr = XEXP (reg_equiv_mem[i], 0);
1227
 
1228
      if (reg_equiv_address[i])
1229
        addr = reg_equiv_address[i];
1230
 
1231
      if (addr)
1232
        {
1233
          if (reg_renumber[i] < 0)
1234
            {
1235
              rtx reg = regno_reg_rtx[i];
1236
 
1237
              REG_USERVAR_P (reg) = 0;
1238
              PUT_CODE (reg, MEM);
1239
              XEXP (reg, 0) = addr;
1240
              if (reg_equiv_memory_loc[i])
1241
                MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc[i]);
1242
              else
1243
                {
1244
                  MEM_IN_STRUCT_P (reg) = MEM_SCALAR_P (reg) = 0;
1245
                  MEM_ATTRS (reg) = 0;
1246
                }
1247
              MEM_NOTRAP_P (reg) = 1;
1248
            }
1249
          else if (reg_equiv_mem[i])
1250
            XEXP (reg_equiv_mem[i], 0) = addr;
1251
        }
1252
 
1253
      /* We don't want complex addressing modes in debug insns
1254
         if simpler ones will do, so delegitimize equivalences
1255
         in debug insns.  */
1256
      if (MAY_HAVE_DEBUG_INSNS && reg_renumber[i] < 0)
1257
        {
1258
          rtx reg = regno_reg_rtx[i];
1259
          rtx equiv = 0;
1260
          df_ref use, next;
1261
 
1262
          if (reg_equiv_constant[i])
1263
            equiv = reg_equiv_constant[i];
1264
          else if (reg_equiv_invariant[i])
1265
            equiv = reg_equiv_invariant[i];
1266
          else if (reg && MEM_P (reg))
1267
            equiv = targetm.delegitimize_address (reg);
1268
          else if (reg && REG_P (reg) && (int)REGNO (reg) != i)
1269
            equiv = reg;
1270
 
1271
          if (equiv == reg)
1272
            continue;
1273
 
1274
          for (use = DF_REG_USE_CHAIN (i); use; use = next)
1275
            {
1276
              insn = DF_REF_INSN (use);
1277
 
1278
              /* Make sure the next ref is for a different instruction,
1279
                 so that we're not affected by the rescan.  */
1280
              next = DF_REF_NEXT_REG (use);
1281
              while (next && DF_REF_INSN (next) == insn)
1282
                next = DF_REF_NEXT_REG (next);
1283
 
1284
              if (DEBUG_INSN_P (insn))
1285
                {
1286
                  if (!equiv)
1287
                    {
1288
                      INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
1289
                      df_insn_rescan_debug_internal (insn);
1290
                    }
1291
                  else
1292
                    INSN_VAR_LOCATION_LOC (insn)
1293
                      = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (insn),
1294
                                              reg, equiv);
1295
                }
1296
            }
1297
        }
1298
    }
1299
 
1300
  /* We must set reload_completed now since the cleanup_subreg_operands call
1301
     below will re-recognize each insn and reload may have generated insns
1302
     which are only valid during and after reload.  */
1303
  reload_completed = 1;
1304
 
1305
  /* Make a pass over all the insns and delete all USEs which we inserted
1306
     only to tag a REG_EQUAL note on them.  Remove all REG_DEAD and REG_UNUSED
1307
     notes.  Delete all CLOBBER insns, except those that refer to the return
1308
     value and the special mem:BLK CLOBBERs added to prevent the scheduler
1309
     from misarranging variable-array code, and simplify (subreg (reg))
1310
     operands.  Strip and regenerate REG_INC notes that may have been moved
1311
     around.  */
1312
 
1313
  for (insn = first; insn; insn = NEXT_INSN (insn))
1314
    if (INSN_P (insn))
1315
      {
1316
        rtx *pnote;
1317
 
1318
        if (CALL_P (insn))
1319
          replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn),
1320
                              VOIDmode, CALL_INSN_FUNCTION_USAGE (insn));
1321
 
1322
        if ((GET_CODE (PATTERN (insn)) == USE
1323
             /* We mark with QImode USEs introduced by reload itself.  */
1324
             && (GET_MODE (insn) == QImode
1325
                 || find_reg_note (insn, REG_EQUAL, NULL_RTX)))
1326
            || (GET_CODE (PATTERN (insn)) == CLOBBER
1327
                && (!MEM_P (XEXP (PATTERN (insn), 0))
1328
                    || GET_MODE (XEXP (PATTERN (insn), 0)) != BLKmode
1329
                    || (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0)) != SCRATCH
1330
                        && XEXP (XEXP (PATTERN (insn), 0), 0)
1331
                                != stack_pointer_rtx))
1332
                && (!REG_P (XEXP (PATTERN (insn), 0))
1333
                    || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))))
1334
          {
1335
            delete_insn (insn);
1336
            continue;
1337
          }
1338
 
1339
        /* Some CLOBBERs may survive until here and still reference unassigned
1340
           pseudos with const equivalent, which may in turn cause ICE in later
1341
           passes if the reference remains in place.  */
1342
        if (GET_CODE (PATTERN (insn)) == CLOBBER)
1343
          replace_pseudos_in (& XEXP (PATTERN (insn), 0),
1344
                              VOIDmode, PATTERN (insn));
1345
 
1346
        /* Discard obvious no-ops, even without -O.  This optimization
1347
           is fast and doesn't interfere with debugging.  */
1348
        if (NONJUMP_INSN_P (insn)
1349
            && GET_CODE (PATTERN (insn)) == SET
1350
            && REG_P (SET_SRC (PATTERN (insn)))
1351
            && REG_P (SET_DEST (PATTERN (insn)))
1352
            && (REGNO (SET_SRC (PATTERN (insn)))
1353
                == REGNO (SET_DEST (PATTERN (insn)))))
1354
          {
1355
            delete_insn (insn);
1356
            continue;
1357
          }
1358
 
1359
        pnote = &REG_NOTES (insn);
1360
        while (*pnote != 0)
1361
          {
1362
            if (REG_NOTE_KIND (*pnote) == REG_DEAD
1363
                || REG_NOTE_KIND (*pnote) == REG_UNUSED
1364
                || REG_NOTE_KIND (*pnote) == REG_INC)
1365
              *pnote = XEXP (*pnote, 1);
1366
            else
1367
              pnote = &XEXP (*pnote, 1);
1368
          }
1369
 
1370
#ifdef AUTO_INC_DEC
1371
        add_auto_inc_notes (insn, PATTERN (insn));
1372
#endif
1373
 
1374
        /* Simplify (subreg (reg)) if it appears as an operand.  */
1375
        cleanup_subreg_operands (insn);
1376
 
1377
        /* Clean up invalid ASMs so that they don't confuse later passes.
1378
           See PR 21299.  */
1379
        if (asm_noperands (PATTERN (insn)) >= 0)
1380
          {
1381
            extract_insn (insn);
1382
            if (!constrain_operands (1))
1383
              {
1384
                error_for_asm (insn,
1385
                               "%<asm%> operand has impossible constraints");
1386
                delete_insn (insn);
1387
                continue;
1388
              }
1389
          }
1390
      }
1391
 
1392
  /* If we are doing generic stack checking, give a warning if this
1393
     function's frame size is larger than we expect.  */
1394
  if (flag_stack_check == GENERIC_STACK_CHECK)
1395
    {
1396
      HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
1397
      static int verbose_warned = 0;
1398
 
1399
      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1400
        if (df_regs_ever_live_p (i) && ! fixed_regs[i] && call_used_regs[i])
1401
          size += UNITS_PER_WORD;
1402
 
1403
      if (size > STACK_CHECK_MAX_FRAME_SIZE)
1404
        {
1405
          warning (0, "frame size too large for reliable stack checking");
1406
          if (! verbose_warned)
1407
            {
1408
              warning (0, "try reducing the number of local variables");
1409
              verbose_warned = 1;
1410
            }
1411
        }
1412
    }
1413
 
1414
  /* Indicate that we no longer have known memory locations or constants.  */
1415
  if (reg_equiv_constant)
1416
    free (reg_equiv_constant);
1417
  if (reg_equiv_invariant)
1418
    free (reg_equiv_invariant);
1419
  reg_equiv_constant = 0;
1420
  reg_equiv_invariant = 0;
1421
  VEC_free (rtx, gc, reg_equiv_memory_loc_vec);
1422
  reg_equiv_memory_loc = 0;
1423
 
1424
  free (temp_pseudo_reg_arr);
1425
 
1426
  if (offsets_known_at)
1427
    free (offsets_known_at);
1428
  if (offsets_at)
1429
    free (offsets_at);
1430
 
1431
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1432
    if (reg_equiv_alt_mem_list[i])
1433
      free_EXPR_LIST_list (&reg_equiv_alt_mem_list[i]);
1434
  free (reg_equiv_alt_mem_list);
1435
 
1436
  free (reg_equiv_mem);
1437
  reg_equiv_init = 0;
1438
  free (reg_equiv_address);
1439
  free (reg_max_ref_width);
1440
  free (reg_old_renumber);
1441
  free (pseudo_previous_regs);
1442
  free (pseudo_forbidden_regs);
1443
 
1444
  CLEAR_HARD_REG_SET (used_spill_regs);
1445
  for (i = 0; i < n_spills; i++)
1446
    SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1447
 
1448
  /* Free all the insn_chain structures at once.  */
1449
  obstack_free (&reload_obstack, reload_startobj);
1450
  unused_insn_chains = 0;
1451
  fixup_abnormal_edges ();
1452
 
1453
  /* Replacing pseudos with their memory equivalents might have
1454
     created shared rtx.  Subsequent passes would get confused
1455
     by this, so unshare everything here.  */
1456
  unshare_all_rtl_again (first);
1457
 
1458
#ifdef STACK_BOUNDARY
1459
  /* init_emit has set the alignment of the hard frame pointer
1460
     to STACK_BOUNDARY.  It is very likely no longer valid if
1461
     the hard frame pointer was used for register allocation.  */
1462
  if (!frame_pointer_needed)
1463
    REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = BITS_PER_UNIT;
1464
#endif
1465
 
1466
  VEC_free (rtx_p, heap, substitute_stack);
1467
 
1468
  return failure;
1469
}
1470
 
1471
/* Yet another special case.  Unfortunately, reg-stack forces people to
1472
   write incorrect clobbers in asm statements.  These clobbers must not
1473
   cause the register to appear in bad_spill_regs, otherwise we'll call
1474
   fatal_insn later.  We clear the corresponding regnos in the live
1475
   register sets to avoid this.
1476
   The whole thing is rather sick, I'm afraid.  */
1477
 
1478
static void
1479
maybe_fix_stack_asms (void)
1480
{
1481
#ifdef STACK_REGS
1482
  const char *constraints[MAX_RECOG_OPERANDS];
1483
  enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
1484
  struct insn_chain *chain;
1485
 
1486
  for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1487
    {
1488
      int i, noperands;
1489
      HARD_REG_SET clobbered, allowed;
1490
      rtx pat;
1491
 
1492
      if (! INSN_P (chain->insn)
1493
          || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1494
        continue;
1495
      pat = PATTERN (chain->insn);
1496
      if (GET_CODE (pat) != PARALLEL)
1497
        continue;
1498
 
1499
      CLEAR_HARD_REG_SET (clobbered);
1500
      CLEAR_HARD_REG_SET (allowed);
1501
 
1502
      /* First, make a mask of all stack regs that are clobbered.  */
1503
      for (i = 0; i < XVECLEN (pat, 0); i++)
1504
        {
1505
          rtx t = XVECEXP (pat, 0, i);
1506
          if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1507
            SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1508
        }
1509
 
1510
      /* Get the operand values and constraints out of the insn.  */
1511
      decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1512
                           constraints, operand_mode, NULL);
1513
 
1514
      /* For every operand, see what registers are allowed.  */
1515
      for (i = 0; i < noperands; i++)
1516
        {
1517
          const char *p = constraints[i];
1518
          /* For every alternative, we compute the class of registers allowed
1519
             for reloading in CLS, and merge its contents into the reg set
1520
             ALLOWED.  */
1521
          int cls = (int) NO_REGS;
1522
 
1523
          for (;;)
1524
            {
1525
              char c = *p;
1526
 
1527
              if (c == '\0' || c == ',' || c == '#')
1528
                {
1529
                  /* End of one alternative - mark the regs in the current
1530
                     class, and reset the class.  */
1531
                  IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1532
                  cls = NO_REGS;
1533
                  p++;
1534
                  if (c == '#')
1535
                    do {
1536
                      c = *p++;
1537
                    } while (c != '\0' && c != ',');
1538
                  if (c == '\0')
1539
                    break;
1540
                  continue;
1541
                }
1542
 
1543
              switch (c)
1544
                {
1545
                case '=': case '+': case '*': case '%': case '?': case '!':
1546
                case '0': case '1': case '2': case '3': case '4': case '<':
1547
                case '>': case 'V': case 'o': case '&': case 'E': case 'F':
1548
                case 's': case 'i': case 'n': case 'X': case 'I': case 'J':
1549
                case 'K': case 'L': case 'M': case 'N': case 'O': case 'P':
1550
                case TARGET_MEM_CONSTRAINT:
1551
                  break;
1552
 
1553
                case 'p':
1554
                  cls = (int) reg_class_subunion[cls]
1555
                      [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
1556
                  break;
1557
 
1558
                case 'g':
1559
                case 'r':
1560
                  cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1561
                  break;
1562
 
1563
                default:
1564
                  if (EXTRA_ADDRESS_CONSTRAINT (c, p))
1565
                    cls = (int) reg_class_subunion[cls]
1566
                      [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
1567
                  else
1568
                    cls = (int) reg_class_subunion[cls]
1569
                      [(int) REG_CLASS_FROM_CONSTRAINT (c, p)];
1570
                }
1571
              p += CONSTRAINT_LEN (c, p);
1572
            }
1573
        }
1574
      /* Those of the registers which are clobbered, but allowed by the
1575
         constraints, must be usable as reload registers.  So clear them
1576
         out of the life information.  */
1577
      AND_HARD_REG_SET (allowed, clobbered);
1578
      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1579
        if (TEST_HARD_REG_BIT (allowed, i))
1580
          {
1581
            CLEAR_REGNO_REG_SET (&chain->live_throughout, i);
1582
            CLEAR_REGNO_REG_SET (&chain->dead_or_set, i);
1583
          }
1584
    }
1585
 
1586
#endif
1587
}
1588
 
1589
/* Copy the global variables n_reloads and rld into the corresponding elts
1590
   of CHAIN.  */
1591
static void
1592
copy_reloads (struct insn_chain *chain)
1593
{
1594
  chain->n_reloads = n_reloads;
1595
  chain->rld = XOBNEWVEC (&reload_obstack, struct reload, n_reloads);
1596
  memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1597
  reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1598
}
1599
 
1600
/* Walk the chain of insns, and determine for each whether it needs reloads
1601
   and/or eliminations.  Build the corresponding insns_need_reload list, and
1602
   set something_needs_elimination as appropriate.  */
1603
static void
1604
calculate_needs_all_insns (int global)
1605
{
1606
  struct insn_chain **pprev_reload = &insns_need_reload;
1607
  struct insn_chain *chain, *next = 0;
1608
 
1609
  something_needs_elimination = 0;
1610
 
1611
  reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1612
  for (chain = reload_insn_chain; chain != 0; chain = next)
1613
    {
1614
      rtx insn = chain->insn;
1615
 
1616
      next = chain->next;
1617
 
1618
      /* Clear out the shortcuts.  */
1619
      chain->n_reloads = 0;
1620
      chain->need_elim = 0;
1621
      chain->need_reload = 0;
1622
      chain->need_operand_change = 0;
1623
 
1624
      /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1625
         include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1626
         what effects this has on the known offsets at labels.  */
1627
 
1628
      if (LABEL_P (insn) || JUMP_P (insn)
1629
          || (INSN_P (insn) && REG_NOTES (insn) != 0))
1630
        set_label_offsets (insn, insn, 0);
1631
 
1632
      if (INSN_P (insn))
1633
        {
1634
          rtx old_body = PATTERN (insn);
1635
          int old_code = INSN_CODE (insn);
1636
          rtx old_notes = REG_NOTES (insn);
1637
          int did_elimination = 0;
1638
          int operands_changed = 0;
1639
          rtx set = single_set (insn);
1640
 
1641
          /* Skip insns that only set an equivalence.  */
1642
          if (set && REG_P (SET_DEST (set))
1643
              && reg_renumber[REGNO (SET_DEST (set))] < 0
1644
              && (reg_equiv_constant[REGNO (SET_DEST (set))]
1645
                  || (reg_equiv_invariant[REGNO (SET_DEST (set))]))
1646
                      && reg_equiv_init[REGNO (SET_DEST (set))])
1647
            continue;
1648
 
1649
          /* If needed, eliminate any eliminable registers.  */
1650
          if (num_eliminable || num_eliminable_invariants)
1651
            did_elimination = eliminate_regs_in_insn (insn, 0);
1652
 
1653
          /* Analyze the instruction.  */
1654
          operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1655
                                           global, spill_reg_order);
1656
 
1657
          /* If a no-op set needs more than one reload, this is likely
1658
             to be something that needs input address reloads.  We
1659
             can't get rid of this cleanly later, and it is of no use
1660
             anyway, so discard it now.
1661
             We only do this when expensive_optimizations is enabled,
1662
             since this complements reload inheritance / output
1663
             reload deletion, and it can make debugging harder.  */
1664
          if (flag_expensive_optimizations && n_reloads > 1)
1665
            {
1666
              rtx set = single_set (insn);
1667
              if (set
1668
                  &&
1669
                  ((SET_SRC (set) == SET_DEST (set)
1670
                    && REG_P (SET_SRC (set))
1671
                    && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1672
                   || (REG_P (SET_SRC (set)) && REG_P (SET_DEST (set))
1673
                       && reg_renumber[REGNO (SET_SRC (set))] < 0
1674
                       && reg_renumber[REGNO (SET_DEST (set))] < 0
1675
                       && reg_equiv_memory_loc[REGNO (SET_SRC (set))] != NULL
1676
                       && reg_equiv_memory_loc[REGNO (SET_DEST (set))] != NULL
1677
                       && rtx_equal_p (reg_equiv_memory_loc
1678
                                       [REGNO (SET_SRC (set))],
1679
                                       reg_equiv_memory_loc
1680
                                       [REGNO (SET_DEST (set))]))))
1681
                {
1682
                  if (ira_conflicts_p)
1683
                    /* Inform IRA about the insn deletion.  */
1684
                    ira_mark_memory_move_deletion (REGNO (SET_DEST (set)),
1685
                                                   REGNO (SET_SRC (set)));
1686
                  delete_insn (insn);
1687
                  /* Delete it from the reload chain.  */
1688
                  if (chain->prev)
1689
                    chain->prev->next = next;
1690
                  else
1691
                    reload_insn_chain = next;
1692
                  if (next)
1693
                    next->prev = chain->prev;
1694
                  chain->next = unused_insn_chains;
1695
                  unused_insn_chains = chain;
1696
                  continue;
1697
                }
1698
            }
1699
          if (num_eliminable)
1700
            update_eliminable_offsets ();
1701
 
1702
          /* Remember for later shortcuts which insns had any reloads or
1703
             register eliminations.  */
1704
          chain->need_elim = did_elimination;
1705
          chain->need_reload = n_reloads > 0;
1706
          chain->need_operand_change = operands_changed;
1707
 
1708
          /* Discard any register replacements done.  */
1709
          if (did_elimination)
1710
            {
1711
              obstack_free (&reload_obstack, reload_insn_firstobj);
1712
              PATTERN (insn) = old_body;
1713
              INSN_CODE (insn) = old_code;
1714
              REG_NOTES (insn) = old_notes;
1715
              something_needs_elimination = 1;
1716
            }
1717
 
1718
          something_needs_operands_changed |= operands_changed;
1719
 
1720
          if (n_reloads != 0)
1721
            {
1722
              copy_reloads (chain);
1723
              *pprev_reload = chain;
1724
              pprev_reload = &chain->next_need_reload;
1725
            }
1726
        }
1727
    }
1728
  *pprev_reload = 0;
1729
}
1730
 
1731
/* Comparison function for qsort to decide which of two reloads
1732
   should be handled first.  *P1 and *P2 are the reload numbers.  */
1733
 
1734
static int
1735
reload_reg_class_lower (const void *r1p, const void *r2p)
1736
{
1737
  int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
1738
  int t;
1739
 
1740
  /* Consider required reloads before optional ones.  */
1741
  t = rld[r1].optional - rld[r2].optional;
1742
  if (t != 0)
1743
    return t;
1744
 
1745
  /* Count all solitary classes before non-solitary ones.  */
1746
  t = ((reg_class_size[(int) rld[r2].rclass] == 1)
1747
       - (reg_class_size[(int) rld[r1].rclass] == 1));
1748
  if (t != 0)
1749
    return t;
1750
 
1751
  /* Aside from solitaires, consider all multi-reg groups first.  */
1752
  t = rld[r2].nregs - rld[r1].nregs;
1753
  if (t != 0)
1754
    return t;
1755
 
1756
  /* Consider reloads in order of increasing reg-class number.  */
1757
  t = (int) rld[r1].rclass - (int) rld[r2].rclass;
1758
  if (t != 0)
1759
    return t;
1760
 
1761
  /* If reloads are equally urgent, sort by reload number,
1762
     so that the results of qsort leave nothing to chance.  */
1763
  return r1 - r2;
1764
}
1765
 
1766
/* The cost of spilling each hard reg.  */
1767
static int spill_cost[FIRST_PSEUDO_REGISTER];
1768
 
1769
/* When spilling multiple hard registers, we use SPILL_COST for the first
1770
   spilled hard reg and SPILL_ADD_COST for subsequent regs.  SPILL_ADD_COST
1771
   only the first hard reg for a multi-reg pseudo.  */
1772
static int spill_add_cost[FIRST_PSEUDO_REGISTER];
1773
 
1774
/* Map of hard regno to pseudo regno currently occupying the hard
1775
   reg.  */
1776
static int hard_regno_to_pseudo_regno[FIRST_PSEUDO_REGISTER];
1777
 
1778
/* Update the spill cost arrays, considering that pseudo REG is live.  */
1779
 
1780
static void
1781
count_pseudo (int reg)
1782
{
1783
  int freq = REG_FREQ (reg);
1784
  int r = reg_renumber[reg];
1785
  int nregs;
1786
 
1787
  if (REGNO_REG_SET_P (&pseudos_counted, reg)
1788
      || REGNO_REG_SET_P (&spilled_pseudos, reg)
1789
      /* Ignore spilled pseudo-registers which can be here only if IRA
1790
         is used.  */
1791
      || (ira_conflicts_p && r < 0))
1792
    return;
1793
 
1794
  SET_REGNO_REG_SET (&pseudos_counted, reg);
1795
 
1796
  gcc_assert (r >= 0);
1797
 
1798
  spill_add_cost[r] += freq;
1799
  nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1800
  while (nregs-- > 0)
1801
    {
1802
      hard_regno_to_pseudo_regno[r + nregs] = reg;
1803
      spill_cost[r + nregs] += freq;
1804
    }
1805
}
1806
 
1807
/* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1808
   contents of BAD_SPILL_REGS for the insn described by CHAIN.  */
1809
 
1810
static void
1811
order_regs_for_reload (struct insn_chain *chain)
1812
{
1813
  unsigned i;
1814
  HARD_REG_SET used_by_pseudos;
1815
  HARD_REG_SET used_by_pseudos2;
1816
  reg_set_iterator rsi;
1817
 
1818
  COPY_HARD_REG_SET (bad_spill_regs, fixed_reg_set);
1819
 
1820
  memset (spill_cost, 0, sizeof spill_cost);
1821
  memset (spill_add_cost, 0, sizeof spill_add_cost);
1822
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1823
    hard_regno_to_pseudo_regno[i] = -1;
1824
 
1825
  /* Count number of uses of each hard reg by pseudo regs allocated to it
1826
     and then order them by decreasing use.  First exclude hard registers
1827
     that are live in or across this insn.  */
1828
 
1829
  REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
1830
  REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
1831
  IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos);
1832
  IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos2);
1833
 
1834
  /* Now find out which pseudos are allocated to it, and update
1835
     hard_reg_n_uses.  */
1836
  CLEAR_REG_SET (&pseudos_counted);
1837
 
1838
  EXECUTE_IF_SET_IN_REG_SET
1839
    (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
1840
    {
1841
      count_pseudo (i);
1842
    }
1843
  EXECUTE_IF_SET_IN_REG_SET
1844
    (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
1845
    {
1846
      count_pseudo (i);
1847
    }
1848
  CLEAR_REG_SET (&pseudos_counted);
1849
}
1850
 
1851
/* Vector of reload-numbers showing the order in which the reloads should
1852
   be processed.  */
1853
static short reload_order[MAX_RELOADS];
1854
 
1855
/* This is used to keep track of the spill regs used in one insn.  */
1856
static HARD_REG_SET used_spill_regs_local;
1857
 
1858
/* We decided to spill hard register SPILLED, which has a size of
1859
   SPILLED_NREGS.  Determine how pseudo REG, which is live during the insn,
1860
   is affected.  We will add it to SPILLED_PSEUDOS if necessary, and we will
1861
   update SPILL_COST/SPILL_ADD_COST.  */
1862
 
1863
static void
1864
count_spilled_pseudo (int spilled, int spilled_nregs, int reg)
1865
{
1866
  int freq = REG_FREQ (reg);
1867
  int r = reg_renumber[reg];
1868
  int nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1869
 
1870
  /* Ignore spilled pseudo-registers which can be here only if IRA is
1871
     used.  */
1872
  if ((ira_conflicts_p && r < 0)
1873
      || REGNO_REG_SET_P (&spilled_pseudos, reg)
1874
      || spilled + spilled_nregs <= r || r + nregs <= spilled)
1875
    return;
1876
 
1877
  SET_REGNO_REG_SET (&spilled_pseudos, reg);
1878
 
1879
  spill_add_cost[r] -= freq;
1880
  while (nregs-- > 0)
1881
    {
1882
      hard_regno_to_pseudo_regno[r + nregs] = -1;
1883
      spill_cost[r + nregs] -= freq;
1884
    }
1885
}
1886
 
1887
/* Find reload register to use for reload number ORDER.  */
1888
 
1889
static int
1890
find_reg (struct insn_chain *chain, int order)
1891
{
1892
  int rnum = reload_order[order];
1893
  struct reload *rl = rld + rnum;
1894
  int best_cost = INT_MAX;
1895
  int best_reg = -1;
1896
  unsigned int i, j, n;
1897
  int k;
1898
  HARD_REG_SET not_usable;
1899
  HARD_REG_SET used_by_other_reload;
1900
  reg_set_iterator rsi;
1901
  static int regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1902
  static int best_regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1903
 
1904
  COPY_HARD_REG_SET (not_usable, bad_spill_regs);
1905
  IOR_HARD_REG_SET (not_usable, bad_spill_regs_global);
1906
  IOR_COMPL_HARD_REG_SET (not_usable, reg_class_contents[rl->rclass]);
1907
 
1908
  CLEAR_HARD_REG_SET (used_by_other_reload);
1909
  for (k = 0; k < order; k++)
1910
    {
1911
      int other = reload_order[k];
1912
 
1913
      if (rld[other].regno >= 0 && reloads_conflict (other, rnum))
1914
        for (j = 0; j < rld[other].nregs; j++)
1915
          SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j);
1916
    }
1917
 
1918
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1919
    {
1920
#ifdef REG_ALLOC_ORDER
1921
      unsigned int regno = reg_alloc_order[i];
1922
#else
1923
      unsigned int regno = i;
1924
#endif
1925
 
1926
      if (! TEST_HARD_REG_BIT (not_usable, regno)
1927
          && ! TEST_HARD_REG_BIT (used_by_other_reload, regno)
1928
          && HARD_REGNO_MODE_OK (regno, rl->mode))
1929
        {
1930
          int this_cost = spill_cost[regno];
1931
          int ok = 1;
1932
          unsigned int this_nregs = hard_regno_nregs[regno][rl->mode];
1933
 
1934
          for (j = 1; j < this_nregs; j++)
1935
            {
1936
              this_cost += spill_add_cost[regno + j];
1937
              if ((TEST_HARD_REG_BIT (not_usable, regno + j))
1938
                  || TEST_HARD_REG_BIT (used_by_other_reload, regno + j))
1939
                ok = 0;
1940
            }
1941
          if (! ok)
1942
            continue;
1943
 
1944
          if (ira_conflicts_p)
1945
            {
1946
              /* Ask IRA to find a better pseudo-register for
1947
                 spilling.  */
1948
              for (n = j = 0; j < this_nregs; j++)
1949
                {
1950
                  int r = hard_regno_to_pseudo_regno[regno + j];
1951
 
1952
                  if (r < 0)
1953
                    continue;
1954
                  if (n == 0 || regno_pseudo_regs[n - 1] != r)
1955
                    regno_pseudo_regs[n++] = r;
1956
                }
1957
              regno_pseudo_regs[n++] = -1;
1958
              if (best_reg < 0
1959
                  || ira_better_spill_reload_regno_p (regno_pseudo_regs,
1960
                                                      best_regno_pseudo_regs,
1961
                                                      rl->in, rl->out,
1962
                                                      chain->insn))
1963
                {
1964
                  best_reg = regno;
1965
                  for (j = 0;; j++)
1966
                    {
1967
                      best_regno_pseudo_regs[j] = regno_pseudo_regs[j];
1968
                      if (regno_pseudo_regs[j] < 0)
1969
                        break;
1970
                    }
1971
                }
1972
              continue;
1973
            }
1974
 
1975
          if (rl->in && REG_P (rl->in) && REGNO (rl->in) == regno)
1976
            this_cost--;
1977
          if (rl->out && REG_P (rl->out) && REGNO (rl->out) == regno)
1978
            this_cost--;
1979
          if (this_cost < best_cost
1980
              /* Among registers with equal cost, prefer caller-saved ones, or
1981
                 use REG_ALLOC_ORDER if it is defined.  */
1982
              || (this_cost == best_cost
1983
#ifdef REG_ALLOC_ORDER
1984
                  && (inv_reg_alloc_order[regno]
1985
                      < inv_reg_alloc_order[best_reg])
1986
#else
1987
                  && call_used_regs[regno]
1988
                  && ! call_used_regs[best_reg]
1989
#endif
1990
                  ))
1991
            {
1992
              best_reg = regno;
1993
              best_cost = this_cost;
1994
            }
1995
        }
1996
    }
1997
  if (best_reg == -1)
1998
    return 0;
1999
 
2000
  if (dump_file)
2001
    fprintf (dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
2002
 
2003
  rl->nregs = hard_regno_nregs[best_reg][rl->mode];
2004
  rl->regno = best_reg;
2005
 
2006
  EXECUTE_IF_SET_IN_REG_SET
2007
    (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j, rsi)
2008
    {
2009
      count_spilled_pseudo (best_reg, rl->nregs, j);
2010
    }
2011
 
2012
  EXECUTE_IF_SET_IN_REG_SET
2013
    (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j, rsi)
2014
    {
2015
      count_spilled_pseudo (best_reg, rl->nregs, j);
2016
    }
2017
 
2018
  for (i = 0; i < rl->nregs; i++)
2019
    {
2020
      gcc_assert (spill_cost[best_reg + i] == 0);
2021
      gcc_assert (spill_add_cost[best_reg + i] == 0);
2022
      gcc_assert (hard_regno_to_pseudo_regno[best_reg + i] == -1);
2023
      SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i);
2024
    }
2025
  return 1;
2026
}
2027
 
2028
/* Find more reload regs to satisfy the remaining need of an insn, which
2029
   is given by CHAIN.
2030
   Do it by ascending class number, since otherwise a reg
2031
   might be spilled for a big class and might fail to count
2032
   for a smaller class even though it belongs to that class.  */
2033
 
2034
static void
2035
find_reload_regs (struct insn_chain *chain)
2036
{
2037
  int i;
2038
 
2039
  /* In order to be certain of getting the registers we need,
2040
     we must sort the reloads into order of increasing register class.
2041
     Then our grabbing of reload registers will parallel the process
2042
     that provided the reload registers.  */
2043
  for (i = 0; i < chain->n_reloads; i++)
2044
    {
2045
      /* Show whether this reload already has a hard reg.  */
2046
      if (chain->rld[i].reg_rtx)
2047
        {
2048
          int regno = REGNO (chain->rld[i].reg_rtx);
2049
          chain->rld[i].regno = regno;
2050
          chain->rld[i].nregs
2051
            = hard_regno_nregs[regno][GET_MODE (chain->rld[i].reg_rtx)];
2052
        }
2053
      else
2054
        chain->rld[i].regno = -1;
2055
      reload_order[i] = i;
2056
    }
2057
 
2058
  n_reloads = chain->n_reloads;
2059
  memcpy (rld, chain->rld, n_reloads * sizeof (struct reload));
2060
 
2061
  CLEAR_HARD_REG_SET (used_spill_regs_local);
2062
 
2063
  if (dump_file)
2064
    fprintf (dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
2065
 
2066
  qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
2067
 
2068
  /* Compute the order of preference for hard registers to spill.  */
2069
 
2070
  order_regs_for_reload (chain);
2071
 
2072
  for (i = 0; i < n_reloads; i++)
2073
    {
2074
      int r = reload_order[i];
2075
 
2076
      /* Ignore reloads that got marked inoperative.  */
2077
      if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
2078
          && ! rld[r].optional
2079
          && rld[r].regno == -1)
2080
        if (! find_reg (chain, i))
2081
          {
2082
            if (dump_file)
2083
              fprintf (dump_file, "reload failure for reload %d\n", r);
2084
            spill_failure (chain->insn, rld[r].rclass);
2085
            failure = 1;
2086
            return;
2087
          }
2088
    }
2089
 
2090
  COPY_HARD_REG_SET (chain->used_spill_regs, used_spill_regs_local);
2091
  IOR_HARD_REG_SET (used_spill_regs, used_spill_regs_local);
2092
 
2093
  memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
2094
}
2095
 
2096
static void
2097
select_reload_regs (void)
2098
{
2099
  struct insn_chain *chain;
2100
 
2101
  /* Try to satisfy the needs for each insn.  */
2102
  for (chain = insns_need_reload; chain != 0;
2103
       chain = chain->next_need_reload)
2104
    find_reload_regs (chain);
2105
}
2106
 
2107
/* Delete all insns that were inserted by emit_caller_save_insns during
2108
   this iteration.  */
2109
static void
2110
delete_caller_save_insns (void)
2111
{
2112
  struct insn_chain *c = reload_insn_chain;
2113
 
2114
  while (c != 0)
2115
    {
2116
      while (c != 0 && c->is_caller_save_insn)
2117
        {
2118
          struct insn_chain *next = c->next;
2119
          rtx insn = c->insn;
2120
 
2121
          if (c == reload_insn_chain)
2122
            reload_insn_chain = next;
2123
          delete_insn (insn);
2124
 
2125
          if (next)
2126
            next->prev = c->prev;
2127
          if (c->prev)
2128
            c->prev->next = next;
2129
          c->next = unused_insn_chains;
2130
          unused_insn_chains = c;
2131
          c = next;
2132
        }
2133
      if (c != 0)
2134
        c = c->next;
2135
    }
2136
}
2137
 
2138
/* Handle the failure to find a register to spill.
2139
   INSN should be one of the insns which needed this particular spill reg.  */
2140
 
2141
static void
2142
spill_failure (rtx insn, enum reg_class rclass)
2143
{
2144
  if (asm_noperands (PATTERN (insn)) >= 0)
2145
    error_for_asm (insn, "can't find a register in class %qs while "
2146
                   "reloading %<asm%>",
2147
                   reg_class_names[rclass]);
2148
  else
2149
    {
2150
      error ("unable to find a register to spill in class %qs",
2151
             reg_class_names[rclass]);
2152
 
2153
      if (dump_file)
2154
        {
2155
          fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
2156
          debug_reload_to_stream (dump_file);
2157
        }
2158
      fatal_insn ("this is the insn:", insn);
2159
    }
2160
}
2161
 
2162
/* Delete an unneeded INSN and any previous insns who sole purpose is loading
2163
   data that is dead in INSN.  */
2164
 
2165
static void
2166
delete_dead_insn (rtx insn)
2167
{
2168
  rtx prev = prev_real_insn (insn);
2169
  rtx prev_dest;
2170
 
2171
  /* If the previous insn sets a register that dies in our insn, delete it
2172
     too.  */
2173
  if (prev && GET_CODE (PATTERN (prev)) == SET
2174
      && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
2175
      && reg_mentioned_p (prev_dest, PATTERN (insn))
2176
      && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
2177
      && ! side_effects_p (SET_SRC (PATTERN (prev))))
2178
    delete_dead_insn (prev);
2179
 
2180
  SET_INSN_DELETED (insn);
2181
}
2182
 
2183
/* Modify the home of pseudo-reg I.
2184
   The new home is present in reg_renumber[I].
2185
 
2186
   FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2187
   or it may be -1, meaning there is none or it is not relevant.
2188
   This is used so that all pseudos spilled from a given hard reg
2189
   can share one stack slot.  */
2190
 
2191
static void
2192
alter_reg (int i, int from_reg, bool dont_share_p)
2193
{
2194
  /* When outputting an inline function, this can happen
2195
     for a reg that isn't actually used.  */
2196
  if (regno_reg_rtx[i] == 0)
2197
    return;
2198
 
2199
  /* If the reg got changed to a MEM at rtl-generation time,
2200
     ignore it.  */
2201
  if (!REG_P (regno_reg_rtx[i]))
2202
    return;
2203
 
2204
  /* Modify the reg-rtx to contain the new hard reg
2205
     number or else to contain its pseudo reg number.  */
2206
  SET_REGNO (regno_reg_rtx[i],
2207
             reg_renumber[i] >= 0 ? reg_renumber[i] : i);
2208
 
2209
  /* If we have a pseudo that is needed but has no hard reg or equivalent,
2210
     allocate a stack slot for it.  */
2211
 
2212
  if (reg_renumber[i] < 0
2213
      && REG_N_REFS (i) > 0
2214
      && reg_equiv_constant[i] == 0
2215
      && (reg_equiv_invariant[i] == 0 || reg_equiv_init[i] == 0)
2216
      && reg_equiv_memory_loc[i] == 0)
2217
    {
2218
      rtx x = NULL_RTX;
2219
      enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2220
      unsigned int inherent_size = PSEUDO_REGNO_BYTES (i);
2221
      unsigned int inherent_align = GET_MODE_ALIGNMENT (mode);
2222
      unsigned int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2223
      unsigned int min_align = reg_max_ref_width[i] * BITS_PER_UNIT;
2224
      int adjust = 0;
2225
 
2226
      if (ira_conflicts_p)
2227
        {
2228
          /* Mark the spill for IRA.  */
2229
          SET_REGNO_REG_SET (&spilled_pseudos, i);
2230
          if (!dont_share_p)
2231
            x = ira_reuse_stack_slot (i, inherent_size, total_size);
2232
        }
2233
 
2234
      if (x)
2235
        ;
2236
 
2237
      /* Each pseudo reg has an inherent size which comes from its own mode,
2238
         and a total size which provides room for paradoxical subregs
2239
         which refer to the pseudo reg in wider modes.
2240
 
2241
         We can use a slot already allocated if it provides both
2242
         enough inherent space and enough total space.
2243
         Otherwise, we allocate a new slot, making sure that it has no less
2244
         inherent space, and no less total space, then the previous slot.  */
2245
      else if (from_reg == -1 || (!dont_share_p && ira_conflicts_p))
2246
        {
2247
          rtx stack_slot;
2248
 
2249
          /* No known place to spill from => no slot to reuse.  */
2250
          x = assign_stack_local (mode, total_size,
2251
                                  min_align > inherent_align
2252
                                  || total_size > inherent_size ? -1 : 0);
2253
 
2254
          stack_slot = x;
2255
 
2256
          /* Cancel the big-endian correction done in assign_stack_local.
2257
             Get the address of the beginning of the slot.  This is so we
2258
             can do a big-endian correction unconditionally below.  */
2259
          if (BYTES_BIG_ENDIAN)
2260
            {
2261
              adjust = inherent_size - total_size;
2262
              if (adjust)
2263
                stack_slot
2264
                  = adjust_address_nv (x, mode_for_size (total_size
2265
                                                         * BITS_PER_UNIT,
2266
                                                         MODE_INT, 1),
2267
                                       adjust);
2268
            }
2269
 
2270
          if (! dont_share_p && ira_conflicts_p)
2271
            /* Inform IRA about allocation a new stack slot.  */
2272
            ira_mark_new_stack_slot (stack_slot, i, total_size);
2273
        }
2274
 
2275
      /* Reuse a stack slot if possible.  */
2276
      else if (spill_stack_slot[from_reg] != 0
2277
               && spill_stack_slot_width[from_reg] >= total_size
2278
               && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2279
                   >= inherent_size)
2280
               && MEM_ALIGN (spill_stack_slot[from_reg]) >= min_align)
2281
        x = spill_stack_slot[from_reg];
2282
 
2283
      /* Allocate a bigger slot.  */
2284
      else
2285
        {
2286
          /* Compute maximum size needed, both for inherent size
2287
             and for total size.  */
2288
          rtx stack_slot;
2289
 
2290
          if (spill_stack_slot[from_reg])
2291
            {
2292
              if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2293
                  > inherent_size)
2294
                mode = GET_MODE (spill_stack_slot[from_reg]);
2295
              if (spill_stack_slot_width[from_reg] > total_size)
2296
                total_size = spill_stack_slot_width[from_reg];
2297
              if (MEM_ALIGN (spill_stack_slot[from_reg]) > min_align)
2298
                min_align = MEM_ALIGN (spill_stack_slot[from_reg]);
2299
            }
2300
 
2301
          /* Make a slot with that size.  */
2302
          x = assign_stack_local (mode, total_size,
2303
                                  min_align > inherent_align
2304
                                  || total_size > inherent_size ? -1 : 0);
2305
          stack_slot = x;
2306
 
2307
          /* Cancel the  big-endian correction done in assign_stack_local.
2308
             Get the address of the beginning of the slot.  This is so we
2309
             can do a big-endian correction unconditionally below.  */
2310
          if (BYTES_BIG_ENDIAN)
2311
            {
2312
              adjust = GET_MODE_SIZE (mode) - total_size;
2313
              if (adjust)
2314
                stack_slot
2315
                  = adjust_address_nv (x, mode_for_size (total_size
2316
                                                         * BITS_PER_UNIT,
2317
                                                         MODE_INT, 1),
2318
                                       adjust);
2319
            }
2320
 
2321
          spill_stack_slot[from_reg] = stack_slot;
2322
          spill_stack_slot_width[from_reg] = total_size;
2323
        }
2324
 
2325
      /* On a big endian machine, the "address" of the slot
2326
         is the address of the low part that fits its inherent mode.  */
2327
      if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2328
        adjust += (total_size - inherent_size);
2329
 
2330
      /* If we have any adjustment to make, or if the stack slot is the
2331
         wrong mode, make a new stack slot.  */
2332
      x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
2333
 
2334
      /* Set all of the memory attributes as appropriate for a spill.  */
2335
      set_mem_attrs_for_spill (x);
2336
 
2337
      /* Save the stack slot for later.  */
2338
      reg_equiv_memory_loc[i] = x;
2339
    }
2340
}
2341
 
2342
/* Mark the slots in regs_ever_live for the hard regs used by
2343
   pseudo-reg number REGNO, accessed in MODE.  */
2344
 
2345
static void
2346
mark_home_live_1 (int regno, enum machine_mode mode)
2347
{
2348
  int i, lim;
2349
 
2350
  i = reg_renumber[regno];
2351
  if (i < 0)
2352
    return;
2353
  lim = end_hard_regno (mode, i);
2354
  while (i < lim)
2355
    df_set_regs_ever_live(i++, true);
2356
}
2357
 
2358
/* Mark the slots in regs_ever_live for the hard regs
2359
   used by pseudo-reg number REGNO.  */
2360
 
2361
void
2362
mark_home_live (int regno)
2363
{
2364
  if (reg_renumber[regno] >= 0)
2365
    mark_home_live_1 (regno, PSEUDO_REGNO_MODE (regno));
2366
}
2367
 
2368
/* This function handles the tracking of elimination offsets around branches.
2369
 
2370
   X is a piece of RTL being scanned.
2371
 
2372
   INSN is the insn that it came from, if any.
2373
 
2374
   INITIAL_P is nonzero if we are to set the offset to be the initial
2375
   offset and zero if we are setting the offset of the label to be the
2376
   current offset.  */
2377
 
2378
static void
2379
set_label_offsets (rtx x, rtx insn, int initial_p)
2380
{
2381
  enum rtx_code code = GET_CODE (x);
2382
  rtx tem;
2383
  unsigned int i;
2384
  struct elim_table *p;
2385
 
2386
  switch (code)
2387
    {
2388
    case LABEL_REF:
2389
      if (LABEL_REF_NONLOCAL_P (x))
2390
        return;
2391
 
2392
      x = XEXP (x, 0);
2393
 
2394
      /* ... fall through ...  */
2395
 
2396
    case CODE_LABEL:
2397
      /* If we know nothing about this label, set the desired offsets.  Note
2398
         that this sets the offset at a label to be the offset before a label
2399
         if we don't know anything about the label.  This is not correct for
2400
         the label after a BARRIER, but is the best guess we can make.  If
2401
         we guessed wrong, we will suppress an elimination that might have
2402
         been possible had we been able to guess correctly.  */
2403
 
2404
      if (! offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num])
2405
        {
2406
          for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2407
            offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2408
              = (initial_p ? reg_eliminate[i].initial_offset
2409
                 : reg_eliminate[i].offset);
2410
          offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num] = 1;
2411
        }
2412
 
2413
      /* Otherwise, if this is the definition of a label and it is
2414
         preceded by a BARRIER, set our offsets to the known offset of
2415
         that label.  */
2416
 
2417
      else if (x == insn
2418
               && (tem = prev_nonnote_insn (insn)) != 0
2419
               && BARRIER_P (tem))
2420
        set_offsets_for_label (insn);
2421
      else
2422
        /* If neither of the above cases is true, compare each offset
2423
           with those previously recorded and suppress any eliminations
2424
           where the offsets disagree.  */
2425
 
2426
        for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2427
          if (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2428
              != (initial_p ? reg_eliminate[i].initial_offset
2429
                  : reg_eliminate[i].offset))
2430
            reg_eliminate[i].can_eliminate = 0;
2431
 
2432
      return;
2433
 
2434
    case JUMP_INSN:
2435
      set_label_offsets (PATTERN (insn), insn, initial_p);
2436
 
2437
      /* ... fall through ...  */
2438
 
2439
    case INSN:
2440
    case CALL_INSN:
2441
      /* Any labels mentioned in REG_LABEL_OPERAND notes can be branched
2442
         to indirectly and hence must have all eliminations at their
2443
         initial offsets.  */
2444
      for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2445
        if (REG_NOTE_KIND (tem) == REG_LABEL_OPERAND)
2446
          set_label_offsets (XEXP (tem, 0), insn, 1);
2447
      return;
2448
 
2449
    case PARALLEL:
2450
    case ADDR_VEC:
2451
    case ADDR_DIFF_VEC:
2452
      /* Each of the labels in the parallel or address vector must be
2453
         at their initial offsets.  We want the first field for PARALLEL
2454
         and ADDR_VEC and the second field for ADDR_DIFF_VEC.  */
2455
 
2456
      for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2457
        set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2458
                           insn, initial_p);
2459
      return;
2460
 
2461
    case SET:
2462
      /* We only care about setting PC.  If the source is not RETURN,
2463
         IF_THEN_ELSE, or a label, disable any eliminations not at
2464
         their initial offsets.  Similarly if any arm of the IF_THEN_ELSE
2465
         isn't one of those possibilities.  For branches to a label,
2466
         call ourselves recursively.
2467
 
2468
         Note that this can disable elimination unnecessarily when we have
2469
         a non-local goto since it will look like a non-constant jump to
2470
         someplace in the current function.  This isn't a significant
2471
         problem since such jumps will normally be when all elimination
2472
         pairs are back to their initial offsets.  */
2473
 
2474
      if (SET_DEST (x) != pc_rtx)
2475
        return;
2476
 
2477
      switch (GET_CODE (SET_SRC (x)))
2478
        {
2479
        case PC:
2480
        case RETURN:
2481
          return;
2482
 
2483
        case LABEL_REF:
2484
          set_label_offsets (SET_SRC (x), insn, initial_p);
2485
          return;
2486
 
2487
        case IF_THEN_ELSE:
2488
          tem = XEXP (SET_SRC (x), 1);
2489
          if (GET_CODE (tem) == LABEL_REF)
2490
            set_label_offsets (XEXP (tem, 0), insn, initial_p);
2491
          else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2492
            break;
2493
 
2494
          tem = XEXP (SET_SRC (x), 2);
2495
          if (GET_CODE (tem) == LABEL_REF)
2496
            set_label_offsets (XEXP (tem, 0), insn, initial_p);
2497
          else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2498
            break;
2499
          return;
2500
 
2501
        default:
2502
          break;
2503
        }
2504
 
2505
      /* If we reach here, all eliminations must be at their initial
2506
         offset because we are doing a jump to a variable address.  */
2507
      for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2508
        if (p->offset != p->initial_offset)
2509
          p->can_eliminate = 0;
2510
      break;
2511
 
2512
    default:
2513
      break;
2514
    }
2515
}
2516
 
2517
/* Scan X and replace any eliminable registers (such as fp) with a
2518
   replacement (such as sp), plus an offset.
2519
 
2520
   MEM_MODE is the mode of an enclosing MEM.  We need this to know how
2521
   much to adjust a register for, e.g., PRE_DEC.  Also, if we are inside a
2522
   MEM, we are allowed to replace a sum of a register and the constant zero
2523
   with the register, which we cannot do outside a MEM.  In addition, we need
2524
   to record the fact that a register is referenced outside a MEM.
2525
 
2526
   If INSN is an insn, it is the insn containing X.  If we replace a REG
2527
   in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
2528
   CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2529
   the REG is being modified.
2530
 
2531
   Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2532
   That's used when we eliminate in expressions stored in notes.
2533
   This means, do not set ref_outside_mem even if the reference
2534
   is outside of MEMs.
2535
 
2536
   REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2537
   replacements done assuming all offsets are at their initial values.  If
2538
   they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2539
   encounter, return the actual location so that find_reloads will do
2540
   the proper thing.  */
2541
 
2542
static rtx
2543
eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
2544
                  bool may_use_invariant)
2545
{
2546
  enum rtx_code code = GET_CODE (x);
2547
  struct elim_table *ep;
2548
  int regno;
2549
  rtx new_rtx;
2550
  int i, j;
2551
  const char *fmt;
2552
  int copied = 0;
2553
 
2554
  if (! current_function_decl)
2555
    return x;
2556
 
2557
  switch (code)
2558
    {
2559
    case CONST_INT:
2560
    case CONST_DOUBLE:
2561
    case CONST_FIXED:
2562
    case CONST_VECTOR:
2563
    case CONST:
2564
    case SYMBOL_REF:
2565
    case CODE_LABEL:
2566
    case PC:
2567
    case CC0:
2568
    case ASM_INPUT:
2569
    case ADDR_VEC:
2570
    case ADDR_DIFF_VEC:
2571
    case RETURN:
2572
      return x;
2573
 
2574
    case REG:
2575
      regno = REGNO (x);
2576
 
2577
      /* First handle the case where we encounter a bare register that
2578
         is eliminable.  Replace it with a PLUS.  */
2579
      if (regno < FIRST_PSEUDO_REGISTER)
2580
        {
2581
          for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2582
               ep++)
2583
            if (ep->from_rtx == x && ep->can_eliminate)
2584
              return plus_constant (ep->to_rtx, ep->previous_offset);
2585
 
2586
        }
2587
      else if (reg_renumber && reg_renumber[regno] < 0
2588
               && reg_equiv_invariant && reg_equiv_invariant[regno])
2589
        {
2590
          if (may_use_invariant || (insn && DEBUG_INSN_P (insn)))
2591
            return eliminate_regs_1 (copy_rtx (reg_equiv_invariant[regno]),
2592
                                     mem_mode, insn, true);
2593
          /* There exists at least one use of REGNO that cannot be
2594
             eliminated.  Prevent the defining insn from being deleted.  */
2595
          reg_equiv_init[regno] = NULL_RTX;
2596
          alter_reg (regno, -1, true);
2597
        }
2598
      return x;
2599
 
2600
    /* You might think handling MINUS in a manner similar to PLUS is a
2601
       good idea.  It is not.  It has been tried multiple times and every
2602
       time the change has had to have been reverted.
2603
 
2604
       Other parts of reload know a PLUS is special (gen_reload for example)
2605
       and require special code to handle code a reloaded PLUS operand.
2606
 
2607
       Also consider backends where the flags register is clobbered by a
2608
       MINUS, but we can emit a PLUS that does not clobber flags (IA-32,
2609
       lea instruction comes to mind).  If we try to reload a MINUS, we
2610
       may kill the flags register that was holding a useful value.
2611
 
2612
       So, please before trying to handle MINUS, consider reload as a
2613
       whole instead of this little section as well as the backend issues.  */
2614
    case PLUS:
2615
      /* If this is the sum of an eliminable register and a constant, rework
2616
         the sum.  */
2617
      if (REG_P (XEXP (x, 0))
2618
          && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2619
          && CONSTANT_P (XEXP (x, 1)))
2620
        {
2621
          for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2622
               ep++)
2623
            if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2624
              {
2625
                /* The only time we want to replace a PLUS with a REG (this
2626
                   occurs when the constant operand of the PLUS is the negative
2627
                   of the offset) is when we are inside a MEM.  We won't want
2628
                   to do so at other times because that would change the
2629
                   structure of the insn in a way that reload can't handle.
2630
                   We special-case the commonest situation in
2631
                   eliminate_regs_in_insn, so just replace a PLUS with a
2632
                   PLUS here, unless inside a MEM.  */
2633
                if (mem_mode != 0 && CONST_INT_P (XEXP (x, 1))
2634
                    && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2635
                  return ep->to_rtx;
2636
                else
2637
                  return gen_rtx_PLUS (Pmode, ep->to_rtx,
2638
                                       plus_constant (XEXP (x, 1),
2639
                                                      ep->previous_offset));
2640
              }
2641
 
2642
          /* If the register is not eliminable, we are done since the other
2643
             operand is a constant.  */
2644
          return x;
2645
        }
2646
 
2647
      /* If this is part of an address, we want to bring any constant to the
2648
         outermost PLUS.  We will do this by doing register replacement in
2649
         our operands and seeing if a constant shows up in one of them.
2650
 
2651
         Note that there is no risk of modifying the structure of the insn,
2652
         since we only get called for its operands, thus we are either
2653
         modifying the address inside a MEM, or something like an address
2654
         operand of a load-address insn.  */
2655
 
2656
      {
2657
        rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true);
2658
        rtx new1 = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true);
2659
 
2660
        if (reg_renumber && (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)))
2661
          {
2662
            /* If one side is a PLUS and the other side is a pseudo that
2663
               didn't get a hard register but has a reg_equiv_constant,
2664
               we must replace the constant here since it may no longer
2665
               be in the position of any operand.  */
2666
            if (GET_CODE (new0) == PLUS && REG_P (new1)
2667
                && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2668
                && reg_renumber[REGNO (new1)] < 0
2669
                && reg_equiv_constant != 0
2670
                && reg_equiv_constant[REGNO (new1)] != 0)
2671
              new1 = reg_equiv_constant[REGNO (new1)];
2672
            else if (GET_CODE (new1) == PLUS && REG_P (new0)
2673
                     && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2674
                     && reg_renumber[REGNO (new0)] < 0
2675
                     && reg_equiv_constant[REGNO (new0)] != 0)
2676
              new0 = reg_equiv_constant[REGNO (new0)];
2677
 
2678
            new_rtx = form_sum (GET_MODE (x), new0, new1);
2679
 
2680
            /* As above, if we are not inside a MEM we do not want to
2681
               turn a PLUS into something else.  We might try to do so here
2682
               for an addition of 0 if we aren't optimizing.  */
2683
            if (! mem_mode && GET_CODE (new_rtx) != PLUS)
2684
              return gen_rtx_PLUS (GET_MODE (x), new_rtx, const0_rtx);
2685
            else
2686
              return new_rtx;
2687
          }
2688
      }
2689
      return x;
2690
 
2691
    case MULT:
2692
      /* If this is the product of an eliminable register and a
2693
         constant, apply the distribute law and move the constant out
2694
         so that we have (plus (mult ..) ..).  This is needed in order
2695
         to keep load-address insns valid.   This case is pathological.
2696
         We ignore the possibility of overflow here.  */
2697
      if (REG_P (XEXP (x, 0))
2698
          && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2699
          && CONST_INT_P (XEXP (x, 1)))
2700
        for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2701
             ep++)
2702
          if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2703
            {
2704
              if (! mem_mode
2705
                  /* Refs inside notes or in DEBUG_INSNs don't count for
2706
                     this purpose.  */
2707
                  && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2708
                                      || GET_CODE (insn) == INSN_LIST
2709
                                      || DEBUG_INSN_P (insn))))
2710
                ep->ref_outside_mem = 1;
2711
 
2712
              return
2713
                plus_constant (gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2714
                               ep->previous_offset * INTVAL (XEXP (x, 1)));
2715
            }
2716
 
2717
      /* ... fall through ...  */
2718
 
2719
    case CALL:
2720
    case COMPARE:
2721
    /* See comments before PLUS about handling MINUS.  */
2722
    case MINUS:
2723
    case DIV:      case UDIV:
2724
    case MOD:      case UMOD:
2725
    case AND:      case IOR:      case XOR:
2726
    case ROTATERT: case ROTATE:
2727
    case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2728
    case NE:       case EQ:
2729
    case GE:       case GT:       case GEU:    case GTU:
2730
    case LE:       case LT:       case LEU:    case LTU:
2731
      {
2732
        rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false);
2733
        rtx new1 = XEXP (x, 1)
2734
                   ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, false) : 0;
2735
 
2736
        if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2737
          return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2738
      }
2739
      return x;
2740
 
2741
    case EXPR_LIST:
2742
      /* If we have something in XEXP (x, 0), the usual case, eliminate it.  */
2743
      if (XEXP (x, 0))
2744
        {
2745
          new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true);
2746
          if (new_rtx != XEXP (x, 0))
2747
            {
2748
              /* If this is a REG_DEAD note, it is not valid anymore.
2749
                 Using the eliminated version could result in creating a
2750
                 REG_DEAD note for the stack or frame pointer.  */
2751
              if (REG_NOTE_KIND (x) == REG_DEAD)
2752
                return (XEXP (x, 1)
2753
                        ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true)
2754
                        : NULL_RTX);
2755
 
2756
              x = alloc_reg_note (REG_NOTE_KIND (x), new_rtx, XEXP (x, 1));
2757
            }
2758
        }
2759
 
2760
      /* ... fall through ...  */
2761
 
2762
    case INSN_LIST:
2763
      /* Now do eliminations in the rest of the chain.  If this was
2764
         an EXPR_LIST, this might result in allocating more memory than is
2765
         strictly needed, but it simplifies the code.  */
2766
      if (XEXP (x, 1))
2767
        {
2768
          new_rtx = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true);
2769
          if (new_rtx != XEXP (x, 1))
2770
            return
2771
              gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new_rtx);
2772
        }
2773
      return x;
2774
 
2775
    case PRE_INC:
2776
    case POST_INC:
2777
    case PRE_DEC:
2778
    case POST_DEC:
2779
      /* We do not support elimination of a register that is modified.
2780
         elimination_effects has already make sure that this does not
2781
         happen.  */
2782
      return x;
2783
 
2784
    case PRE_MODIFY:
2785
    case POST_MODIFY:
2786
      /* We do not support elimination of a register that is modified.
2787
         elimination_effects has already make sure that this does not
2788
         happen.  The only remaining case we need to consider here is
2789
         that the increment value may be an eliminable register.  */
2790
      if (GET_CODE (XEXP (x, 1)) == PLUS
2791
          && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2792
        {
2793
          rtx new_rtx = eliminate_regs_1 (XEXP (XEXP (x, 1), 1), mem_mode,
2794
                                      insn, true);
2795
 
2796
          if (new_rtx != XEXP (XEXP (x, 1), 1))
2797
            return gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (x, 0),
2798
                                   gen_rtx_PLUS (GET_MODE (x),
2799
                                                 XEXP (x, 0), new_rtx));
2800
        }
2801
      return x;
2802
 
2803
    case STRICT_LOW_PART:
2804
    case NEG:          case NOT:
2805
    case SIGN_EXTEND:  case ZERO_EXTEND:
2806
    case TRUNCATE:     case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2807
    case FLOAT:        case FIX:
2808
    case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2809
    case ABS:
2810
    case SQRT:
2811
    case FFS:
2812
    case CLZ:
2813
    case CTZ:
2814
    case POPCOUNT:
2815
    case PARITY:
2816
    case BSWAP:
2817
      new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false);
2818
      if (new_rtx != XEXP (x, 0))
2819
        return gen_rtx_fmt_e (code, GET_MODE (x), new_rtx);
2820
      return x;
2821
 
2822
    case SUBREG:
2823
      /* Similar to above processing, but preserve SUBREG_BYTE.
2824
         Convert (subreg (mem)) to (mem) if not paradoxical.
2825
         Also, if we have a non-paradoxical (subreg (pseudo)) and the
2826
         pseudo didn't get a hard reg, we must replace this with the
2827
         eliminated version of the memory location because push_reload
2828
         may do the replacement in certain circumstances.  */
2829
      if (REG_P (SUBREG_REG (x))
2830
          && (GET_MODE_SIZE (GET_MODE (x))
2831
              <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2832
          && reg_equiv_memory_loc != 0
2833
          && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2834
        {
2835
          new_rtx = SUBREG_REG (x);
2836
        }
2837
      else
2838
        new_rtx = eliminate_regs_1 (SUBREG_REG (x), mem_mode, insn, false);
2839
 
2840
      if (new_rtx != SUBREG_REG (x))
2841
        {
2842
          int x_size = GET_MODE_SIZE (GET_MODE (x));
2843
          int new_size = GET_MODE_SIZE (GET_MODE (new_rtx));
2844
 
2845
          if (MEM_P (new_rtx)
2846
              && ((x_size < new_size
2847
#ifdef WORD_REGISTER_OPERATIONS
2848
                   /* On these machines, combine can create rtl of the form
2849
                      (set (subreg:m1 (reg:m2 R) 0) ...)
2850
                      where m1 < m2, and expects something interesting to
2851
                      happen to the entire word.  Moreover, it will use the
2852
                      (reg:m2 R) later, expecting all bits to be preserved.
2853
                      So if the number of words is the same, preserve the
2854
                      subreg so that push_reload can see it.  */
2855
                   && ! ((x_size - 1) / UNITS_PER_WORD
2856
                         == (new_size -1 ) / UNITS_PER_WORD)
2857
#endif
2858
                   )
2859
                  || x_size == new_size)
2860
              )
2861
            return adjust_address_nv (new_rtx, GET_MODE (x), SUBREG_BYTE (x));
2862
          else
2863
            return gen_rtx_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x));
2864
        }
2865
 
2866
      return x;
2867
 
2868
    case MEM:
2869
      /* Our only special processing is to pass the mode of the MEM to our
2870
         recursive call and copy the flags.  While we are here, handle this
2871
         case more efficiently.  */
2872
      return
2873
        replace_equiv_address_nv (x,
2874
                                  eliminate_regs_1 (XEXP (x, 0), GET_MODE (x),
2875
                                                    insn, true));
2876
 
2877
    case USE:
2878
      /* Handle insn_list USE that a call to a pure function may generate.  */
2879
      new_rtx = eliminate_regs_1 (XEXP (x, 0), VOIDmode, insn, false);
2880
      if (new_rtx != XEXP (x, 0))
2881
        return gen_rtx_USE (GET_MODE (x), new_rtx);
2882
      return x;
2883
 
2884
    case CLOBBER:
2885
      gcc_assert (insn && DEBUG_INSN_P (insn));
2886
      break;
2887
 
2888
    case ASM_OPERANDS:
2889
    case SET:
2890
      gcc_unreachable ();
2891
 
2892
    default:
2893
      break;
2894
    }
2895
 
2896
  /* Process each of our operands recursively.  If any have changed, make a
2897
     copy of the rtx.  */
2898
  fmt = GET_RTX_FORMAT (code);
2899
  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2900
    {
2901
      if (*fmt == 'e')
2902
        {
2903
          new_rtx = eliminate_regs_1 (XEXP (x, i), mem_mode, insn, false);
2904
          if (new_rtx != XEXP (x, i) && ! copied)
2905
            {
2906
              x = shallow_copy_rtx (x);
2907
              copied = 1;
2908
            }
2909
          XEXP (x, i) = new_rtx;
2910
        }
2911
      else if (*fmt == 'E')
2912
        {
2913
          int copied_vec = 0;
2914
          for (j = 0; j < XVECLEN (x, i); j++)
2915
            {
2916
              new_rtx = eliminate_regs_1 (XVECEXP (x, i, j), mem_mode, insn, false);
2917
              if (new_rtx != XVECEXP (x, i, j) && ! copied_vec)
2918
                {
2919
                  rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2920
                                             XVEC (x, i)->elem);
2921
                  if (! copied)
2922
                    {
2923
                      x = shallow_copy_rtx (x);
2924
                      copied = 1;
2925
                    }
2926
                  XVEC (x, i) = new_v;
2927
                  copied_vec = 1;
2928
                }
2929
              XVECEXP (x, i, j) = new_rtx;
2930
            }
2931
        }
2932
    }
2933
 
2934
  return x;
2935
}
2936
 
2937
rtx
2938
eliminate_regs (rtx x, enum machine_mode mem_mode, rtx insn)
2939
{
2940
  return eliminate_regs_1 (x, mem_mode, insn, false);
2941
}
2942
 
2943
/* Scan rtx X for modifications of elimination target registers.  Update
2944
   the table of eliminables to reflect the changed state.  MEM_MODE is
2945
   the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM.  */
2946
 
2947
static void
2948
elimination_effects (rtx x, enum machine_mode mem_mode)
2949
{
2950
  enum rtx_code code = GET_CODE (x);
2951
  struct elim_table *ep;
2952
  int regno;
2953
  int i, j;
2954
  const char *fmt;
2955
 
2956
  switch (code)
2957
    {
2958
    case CONST_INT:
2959
    case CONST_DOUBLE:
2960
    case CONST_FIXED:
2961
    case CONST_VECTOR:
2962
    case CONST:
2963
    case SYMBOL_REF:
2964
    case CODE_LABEL:
2965
    case PC:
2966
    case CC0:
2967
    case ASM_INPUT:
2968
    case ADDR_VEC:
2969
    case ADDR_DIFF_VEC:
2970
    case RETURN:
2971
      return;
2972
 
2973
    case REG:
2974
      regno = REGNO (x);
2975
 
2976
      /* First handle the case where we encounter a bare register that
2977
         is eliminable.  Replace it with a PLUS.  */
2978
      if (regno < FIRST_PSEUDO_REGISTER)
2979
        {
2980
          for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2981
               ep++)
2982
            if (ep->from_rtx == x && ep->can_eliminate)
2983
              {
2984
                if (! mem_mode)
2985
                  ep->ref_outside_mem = 1;
2986
                return;
2987
              }
2988
 
2989
        }
2990
      else if (reg_renumber[regno] < 0 && reg_equiv_constant
2991
               && reg_equiv_constant[regno]
2992
               && ! function_invariant_p (reg_equiv_constant[regno]))
2993
        elimination_effects (reg_equiv_constant[regno], mem_mode);
2994
      return;
2995
 
2996
    case PRE_INC:
2997
    case POST_INC:
2998
    case PRE_DEC:
2999
    case POST_DEC:
3000
    case POST_MODIFY:
3001
    case PRE_MODIFY:
3002
      /* If we modify the source of an elimination rule, disable it.  */
3003
      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3004
        if (ep->from_rtx == XEXP (x, 0))
3005
          ep->can_eliminate = 0;
3006
 
3007
      /* If we modify the target of an elimination rule by adding a constant,
3008
         update its offset.  If we modify the target in any other way, we'll
3009
         have to disable the rule as well.  */
3010
      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3011
        if (ep->to_rtx == XEXP (x, 0))
3012
          {
3013
            int size = GET_MODE_SIZE (mem_mode);
3014
 
3015
            /* If more bytes than MEM_MODE are pushed, account for them.  */
3016
#ifdef PUSH_ROUNDING
3017
            if (ep->to_rtx == stack_pointer_rtx)
3018
              size = PUSH_ROUNDING (size);
3019
#endif
3020
            if (code == PRE_DEC || code == POST_DEC)
3021
              ep->offset += size;
3022
            else if (code == PRE_INC || code == POST_INC)
3023
              ep->offset -= size;
3024
            else if (code == PRE_MODIFY || code == POST_MODIFY)
3025
              {
3026
                if (GET_CODE (XEXP (x, 1)) == PLUS
3027
                    && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
3028
                    && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
3029
                  ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
3030
                else
3031
                  ep->can_eliminate = 0;
3032
              }
3033
          }
3034
 
3035
      /* These two aren't unary operators.  */
3036
      if (code == POST_MODIFY || code == PRE_MODIFY)
3037
        break;
3038
 
3039
      /* Fall through to generic unary operation case.  */
3040
    case STRICT_LOW_PART:
3041
    case NEG:          case NOT:
3042
    case SIGN_EXTEND:  case ZERO_EXTEND:
3043
    case TRUNCATE:     case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3044
    case FLOAT:        case FIX:
3045
    case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3046
    case ABS:
3047
    case SQRT:
3048
    case FFS:
3049
    case CLZ:
3050
    case CTZ:
3051
    case POPCOUNT:
3052
    case PARITY:
3053
    case BSWAP:
3054
      elimination_effects (XEXP (x, 0), mem_mode);
3055
      return;
3056
 
3057
    case SUBREG:
3058
      if (REG_P (SUBREG_REG (x))
3059
          && (GET_MODE_SIZE (GET_MODE (x))
3060
              <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3061
          && reg_equiv_memory_loc != 0
3062
          && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
3063
        return;
3064
 
3065
      elimination_effects (SUBREG_REG (x), mem_mode);
3066
      return;
3067
 
3068
    case USE:
3069
      /* If using a register that is the source of an eliminate we still
3070
         think can be performed, note it cannot be performed since we don't
3071
         know how this register is used.  */
3072
      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3073
        if (ep->from_rtx == XEXP (x, 0))
3074
          ep->can_eliminate = 0;
3075
 
3076
      elimination_effects (XEXP (x, 0), mem_mode);
3077
      return;
3078
 
3079
    case CLOBBER:
3080
      /* If clobbering a register that is the replacement register for an
3081
         elimination we still think can be performed, note that it cannot
3082
         be performed.  Otherwise, we need not be concerned about it.  */
3083
      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3084
        if (ep->to_rtx == XEXP (x, 0))
3085
          ep->can_eliminate = 0;
3086
 
3087
      elimination_effects (XEXP (x, 0), mem_mode);
3088
      return;
3089
 
3090
    case SET:
3091
      /* Check for setting a register that we know about.  */
3092
      if (REG_P (SET_DEST (x)))
3093
        {
3094
          /* See if this is setting the replacement register for an
3095
             elimination.
3096
 
3097
             If DEST is the hard frame pointer, we do nothing because we
3098
             assume that all assignments to the frame pointer are for
3099
             non-local gotos and are being done at a time when they are valid
3100
             and do not disturb anything else.  Some machines want to
3101
             eliminate a fake argument pointer (or even a fake frame pointer)
3102
             with either the real frame or the stack pointer.  Assignments to
3103
             the hard frame pointer must not prevent this elimination.  */
3104
 
3105
          for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3106
               ep++)
3107
            if (ep->to_rtx == SET_DEST (x)
3108
                && SET_DEST (x) != hard_frame_pointer_rtx)
3109
              {
3110
                /* If it is being incremented, adjust the offset.  Otherwise,
3111
                   this elimination can't be done.  */
3112
                rtx src = SET_SRC (x);
3113
 
3114
                if (GET_CODE (src) == PLUS
3115
                    && XEXP (src, 0) == SET_DEST (x)
3116
                    && CONST_INT_P (XEXP (src, 1)))
3117
                  ep->offset -= INTVAL (XEXP (src, 1));
3118
                else
3119
                  ep->can_eliminate = 0;
3120
              }
3121
        }
3122
 
3123
      elimination_effects (SET_DEST (x), VOIDmode);
3124
      elimination_effects (SET_SRC (x), VOIDmode);
3125
      return;
3126
 
3127
    case MEM:
3128
      /* Our only special processing is to pass the mode of the MEM to our
3129
         recursive call.  */
3130
      elimination_effects (XEXP (x, 0), GET_MODE (x));
3131
      return;
3132
 
3133
    default:
3134
      break;
3135
    }
3136
 
3137
  fmt = GET_RTX_FORMAT (code);
3138
  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3139
    {
3140
      if (*fmt == 'e')
3141
        elimination_effects (XEXP (x, i), mem_mode);
3142
      else if (*fmt == 'E')
3143
        for (j = 0; j < XVECLEN (x, i); j++)
3144
          elimination_effects (XVECEXP (x, i, j), mem_mode);
3145
    }
3146
}
3147
 
3148
/* Descend through rtx X and verify that no references to eliminable registers
3149
   remain.  If any do remain, mark the involved register as not
3150
   eliminable.  */
3151
 
3152
static void
3153
check_eliminable_occurrences (rtx x)
3154
{
3155
  const char *fmt;
3156
  int i;
3157
  enum rtx_code code;
3158
 
3159
  if (x == 0)
3160
    return;
3161
 
3162
  code = GET_CODE (x);
3163
 
3164
  if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3165
    {
3166
      struct elim_table *ep;
3167
 
3168
      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3169
        if (ep->from_rtx == x)
3170
          ep->can_eliminate = 0;
3171
      return;
3172
    }
3173
 
3174
  fmt = GET_RTX_FORMAT (code);
3175
  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3176
    {
3177
      if (*fmt == 'e')
3178
        check_eliminable_occurrences (XEXP (x, i));
3179
      else if (*fmt == 'E')
3180
        {
3181
          int j;
3182
          for (j = 0; j < XVECLEN (x, i); j++)
3183
            check_eliminable_occurrences (XVECEXP (x, i, j));
3184
        }
3185
    }
3186
}
3187
 
3188
/* Scan INSN and eliminate all eliminable registers in it.
3189
 
3190
   If REPLACE is nonzero, do the replacement destructively.  Also
3191
   delete the insn as dead it if it is setting an eliminable register.
3192
 
3193
   If REPLACE is zero, do all our allocations in reload_obstack.
3194
 
3195
   If no eliminations were done and this insn doesn't require any elimination
3196
   processing (these are not identical conditions: it might be updating sp,
3197
   but not referencing fp; this needs to be seen during reload_as_needed so
3198
   that the offset between fp and sp can be taken into consideration), zero
3199
   is returned.  Otherwise, 1 is returned.  */
3200
 
3201
static int
3202
eliminate_regs_in_insn (rtx insn, int replace)
3203
{
3204
  int icode = recog_memoized (insn);
3205
  rtx old_body = PATTERN (insn);
3206
  int insn_is_asm = asm_noperands (old_body) >= 0;
3207
  rtx old_set = single_set (insn);
3208
  rtx new_body;
3209
  int val = 0;
3210
  int i;
3211
  rtx substed_operand[MAX_RECOG_OPERANDS];
3212
  rtx orig_operand[MAX_RECOG_OPERANDS];
3213
  struct elim_table *ep;
3214
  rtx plus_src, plus_cst_src;
3215
 
3216
  if (! insn_is_asm && icode < 0)
3217
    {
3218
      gcc_assert (GET_CODE (PATTERN (insn)) == USE
3219
                  || GET_CODE (PATTERN (insn)) == CLOBBER
3220
                  || GET_CODE (PATTERN (insn)) == ADDR_VEC
3221
                  || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
3222
                  || GET_CODE (PATTERN (insn)) == ASM_INPUT
3223
                  || DEBUG_INSN_P (insn));
3224
      if (DEBUG_INSN_P (insn))
3225
        INSN_VAR_LOCATION_LOC (insn)
3226
          = eliminate_regs (INSN_VAR_LOCATION_LOC (insn), VOIDmode, insn);
3227
      return 0;
3228
    }
3229
 
3230
  if (old_set != 0 && REG_P (SET_DEST (old_set))
3231
      && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3232
    {
3233
      /* Check for setting an eliminable register.  */
3234
      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3235
        if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3236
          {
3237
#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3238
            /* If this is setting the frame pointer register to the
3239
               hardware frame pointer register and this is an elimination
3240
               that will be done (tested above), this insn is really
3241
               adjusting the frame pointer downward to compensate for
3242
               the adjustment done before a nonlocal goto.  */
3243
            if (ep->from == FRAME_POINTER_REGNUM
3244
                && ep->to == HARD_FRAME_POINTER_REGNUM)
3245
              {
3246
                rtx base = SET_SRC (old_set);
3247
                rtx base_insn = insn;
3248
                HOST_WIDE_INT offset = 0;
3249
 
3250
                while (base != ep->to_rtx)
3251
                  {
3252
                    rtx prev_insn, prev_set;
3253
 
3254
                    if (GET_CODE (base) == PLUS
3255
                        && CONST_INT_P (XEXP (base, 1)))
3256
                      {
3257
                        offset += INTVAL (XEXP (base, 1));
3258
                        base = XEXP (base, 0);
3259
                      }
3260
                    else if ((prev_insn = prev_nonnote_insn (base_insn)) != 0
3261
                             && (prev_set = single_set (prev_insn)) != 0
3262
                             && rtx_equal_p (SET_DEST (prev_set), base))
3263
                      {
3264
                        base = SET_SRC (prev_set);
3265
                        base_insn = prev_insn;
3266
                      }
3267
                    else
3268
                      break;
3269
                  }
3270
 
3271
                if (base == ep->to_rtx)
3272
                  {
3273
                    rtx src
3274
                      = plus_constant (ep->to_rtx, offset - ep->offset);
3275
 
3276
                    new_body = old_body;
3277
                    if (! replace)
3278
                      {
3279
                        new_body = copy_insn (old_body);
3280
                        if (REG_NOTES (insn))
3281
                          REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3282
                      }
3283
                    PATTERN (insn) = new_body;
3284
                    old_set = single_set (insn);
3285
 
3286
                    /* First see if this insn remains valid when we
3287
                       make the change.  If not, keep the INSN_CODE
3288
                       the same and let reload fit it up.  */
3289
                    validate_change (insn, &SET_SRC (old_set), src, 1);
3290
                    validate_change (insn, &SET_DEST (old_set),
3291
                                     ep->to_rtx, 1);
3292
                    if (! apply_change_group ())
3293
                      {
3294
                        SET_SRC (old_set) = src;
3295
                        SET_DEST (old_set) = ep->to_rtx;
3296
                      }
3297
 
3298
                    val = 1;
3299
                    goto done;
3300
                  }
3301
              }
3302
#endif
3303
 
3304
            /* In this case this insn isn't serving a useful purpose.  We
3305
               will delete it in reload_as_needed once we know that this
3306
               elimination is, in fact, being done.
3307
 
3308
               If REPLACE isn't set, we can't delete this insn, but needn't
3309
               process it since it won't be used unless something changes.  */
3310
            if (replace)
3311
              {
3312
                delete_dead_insn (insn);
3313
                return 1;
3314
              }
3315
            val = 1;
3316
            goto done;
3317
          }
3318
    }
3319
 
3320
  /* We allow one special case which happens to work on all machines we
3321
     currently support: a single set with the source or a REG_EQUAL
3322
     note being a PLUS of an eliminable register and a constant.  */
3323
  plus_src = plus_cst_src = 0;
3324
  if (old_set && REG_P (SET_DEST (old_set)))
3325
    {
3326
      if (GET_CODE (SET_SRC (old_set)) == PLUS)
3327
        plus_src = SET_SRC (old_set);
3328
      /* First see if the source is of the form (plus (...) CST).  */
3329
      if (plus_src
3330
          && CONST_INT_P (XEXP (plus_src, 1)))
3331
        plus_cst_src = plus_src;
3332
      else if (REG_P (SET_SRC (old_set))
3333
               || plus_src)
3334
        {
3335
          /* Otherwise, see if we have a REG_EQUAL note of the form
3336
             (plus (...) CST).  */
3337
          rtx links;
3338
          for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3339
            {
3340
              if ((REG_NOTE_KIND (links) == REG_EQUAL
3341
                   || REG_NOTE_KIND (links) == REG_EQUIV)
3342
                  && GET_CODE (XEXP (links, 0)) == PLUS
3343
                  && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3344
                {
3345
                  plus_cst_src = XEXP (links, 0);
3346
                  break;
3347
                }
3348
            }
3349
        }
3350
 
3351
      /* Check that the first operand of the PLUS is a hard reg or
3352
         the lowpart subreg of one.  */
3353
      if (plus_cst_src)
3354
        {
3355
          rtx reg = XEXP (plus_cst_src, 0);
3356
          if (GET_CODE (reg) == SUBREG && subreg_lowpart_p (reg))
3357
            reg = SUBREG_REG (reg);
3358
 
3359
          if (!REG_P (reg) || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3360
            plus_cst_src = 0;
3361
        }
3362
    }
3363
  if (plus_cst_src)
3364
    {
3365
      rtx reg = XEXP (plus_cst_src, 0);
3366
      HOST_WIDE_INT offset = INTVAL (XEXP (plus_cst_src, 1));
3367
 
3368
      if (GET_CODE (reg) == SUBREG)
3369
        reg = SUBREG_REG (reg);
3370
 
3371
      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3372
        if (ep->from_rtx == reg && ep->can_eliminate)
3373
          {
3374
            rtx to_rtx = ep->to_rtx;
3375
            offset += ep->offset;
3376
            offset = trunc_int_for_mode (offset, GET_MODE (plus_cst_src));
3377
 
3378
            if (GET_CODE (XEXP (plus_cst_src, 0)) == SUBREG)
3379
              to_rtx = gen_lowpart (GET_MODE (XEXP (plus_cst_src, 0)),
3380
                                    to_rtx);
3381
            /* If we have a nonzero offset, and the source is already
3382
               a simple REG, the following transformation would
3383
               increase the cost of the insn by replacing a simple REG
3384
               with (plus (reg sp) CST).  So try only when we already
3385
               had a PLUS before.  */
3386
            if (offset == 0 || plus_src)
3387
              {
3388
                rtx new_src = plus_constant (to_rtx, offset);
3389
 
3390
                new_body = old_body;
3391
                if (! replace)
3392
                  {
3393
                    new_body = copy_insn (old_body);
3394
                    if (REG_NOTES (insn))
3395
                      REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3396
                  }
3397
                PATTERN (insn) = new_body;
3398
                old_set = single_set (insn);
3399
 
3400
                /* First see if this insn remains valid when we make the
3401
                   change.  If not, try to replace the whole pattern with
3402
                   a simple set (this may help if the original insn was a
3403
                   PARALLEL that was only recognized as single_set due to
3404
                   REG_UNUSED notes).  If this isn't valid either, keep
3405
                   the INSN_CODE the same and let reload fix it up.  */
3406
                if (!validate_change (insn, &SET_SRC (old_set), new_src, 0))
3407
                  {
3408
                    rtx new_pat = gen_rtx_SET (VOIDmode,
3409
                                               SET_DEST (old_set), new_src);
3410
 
3411
                    if (!validate_change (insn, &PATTERN (insn), new_pat, 0))
3412
                      SET_SRC (old_set) = new_src;
3413
                  }
3414
              }
3415
            else
3416
              break;
3417
 
3418
            val = 1;
3419
            /* This can't have an effect on elimination offsets, so skip right
3420
               to the end.  */
3421
            goto done;
3422
          }
3423
    }
3424
 
3425
  /* Determine the effects of this insn on elimination offsets.  */
3426
  elimination_effects (old_body, VOIDmode);
3427
 
3428
  /* Eliminate all eliminable registers occurring in operands that
3429
     can be handled by reload.  */
3430
  extract_insn (insn);
3431
  for (i = 0; i < recog_data.n_operands; i++)
3432
    {
3433
      orig_operand[i] = recog_data.operand[i];
3434
      substed_operand[i] = recog_data.operand[i];
3435
 
3436
      /* For an asm statement, every operand is eliminable.  */
3437
      if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3438
        {
3439
          bool is_set_src, in_plus;
3440
 
3441
          /* Check for setting a register that we know about.  */
3442
          if (recog_data.operand_type[i] != OP_IN
3443
              && REG_P (orig_operand[i]))
3444
            {
3445
              /* If we are assigning to a register that can be eliminated, it
3446
                 must be as part of a PARALLEL, since the code above handles
3447
                 single SETs.  We must indicate that we can no longer
3448
                 eliminate this reg.  */
3449
              for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3450
                   ep++)
3451
                if (ep->from_rtx == orig_operand[i])
3452
                  ep->can_eliminate = 0;
3453
            }
3454
 
3455
          /* Companion to the above plus substitution, we can allow
3456
             invariants as the source of a plain move.  */
3457
          is_set_src = false;
3458
          if (old_set && recog_data.operand_loc[i] == &SET_SRC (old_set))
3459
            is_set_src = true;
3460
          in_plus = false;
3461
          if (plus_src
3462
              && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3463
                  || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3464
            in_plus = true;
3465
 
3466
          substed_operand[i]
3467
            = eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3468
                                replace ? insn : NULL_RTX,
3469
                                is_set_src || in_plus);
3470
          if (substed_operand[i] != orig_operand[i])
3471
            val = 1;
3472
          /* Terminate the search in check_eliminable_occurrences at
3473
             this point.  */
3474
          *recog_data.operand_loc[i] = 0;
3475
 
3476
          /* If an output operand changed from a REG to a MEM and INSN is an
3477
             insn, write a CLOBBER insn.  */
3478
          if (recog_data.operand_type[i] != OP_IN
3479
              && REG_P (orig_operand[i])
3480
              && MEM_P (substed_operand[i])
3481
              && replace)
3482
            emit_insn_after (gen_clobber (orig_operand[i]), insn);
3483
        }
3484
    }
3485
 
3486
  for (i = 0; i < recog_data.n_dups; i++)
3487
    *recog_data.dup_loc[i]
3488
      = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3489
 
3490
  /* If any eliminable remain, they aren't eliminable anymore.  */
3491
  check_eliminable_occurrences (old_body);
3492
 
3493
  /* Substitute the operands; the new values are in the substed_operand
3494
     array.  */
3495
  for (i = 0; i < recog_data.n_operands; i++)
3496
    *recog_data.operand_loc[i] = substed_operand[i];
3497
  for (i = 0; i < recog_data.n_dups; i++)
3498
    *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
3499
 
3500
  /* If we are replacing a body that was a (set X (plus Y Z)), try to
3501
     re-recognize the insn.  We do this in case we had a simple addition
3502
     but now can do this as a load-address.  This saves an insn in this
3503
     common case.
3504
     If re-recognition fails, the old insn code number will still be used,
3505
     and some register operands may have changed into PLUS expressions.
3506
     These will be handled by find_reloads by loading them into a register
3507
     again.  */
3508
 
3509
  if (val)
3510
    {
3511
      /* If we aren't replacing things permanently and we changed something,
3512
         make another copy to ensure that all the RTL is new.  Otherwise
3513
         things can go wrong if find_reload swaps commutative operands
3514
         and one is inside RTL that has been copied while the other is not.  */
3515
      new_body = old_body;
3516
      if (! replace)
3517
        {
3518
          new_body = copy_insn (old_body);
3519
          if (REG_NOTES (insn))
3520
            REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3521
        }
3522
      PATTERN (insn) = new_body;
3523
 
3524
      /* If we had a move insn but now we don't, rerecognize it.  This will
3525
         cause spurious re-recognition if the old move had a PARALLEL since
3526
         the new one still will, but we can't call single_set without
3527
         having put NEW_BODY into the insn and the re-recognition won't
3528
         hurt in this rare case.  */
3529
      /* ??? Why this huge if statement - why don't we just rerecognize the
3530
         thing always?  */
3531
      if (! insn_is_asm
3532
          && old_set != 0
3533
          && ((REG_P (SET_SRC (old_set))
3534
               && (GET_CODE (new_body) != SET
3535
                   || !REG_P (SET_SRC (new_body))))
3536
              /* If this was a load from or store to memory, compare
3537
                 the MEM in recog_data.operand to the one in the insn.
3538
                 If they are not equal, then rerecognize the insn.  */
3539
              || (old_set != 0
3540
                  && ((MEM_P (SET_SRC (old_set))
3541
                       && SET_SRC (old_set) != recog_data.operand[1])
3542
                      || (MEM_P (SET_DEST (old_set))
3543
                          && SET_DEST (old_set) != recog_data.operand[0])))
3544
              /* If this was an add insn before, rerecognize.  */
3545
              || GET_CODE (SET_SRC (old_set)) == PLUS))
3546
        {
3547
          int new_icode = recog (PATTERN (insn), insn, 0);
3548
          if (new_icode >= 0)
3549
            INSN_CODE (insn) = new_icode;
3550
        }
3551
    }
3552
 
3553
  /* Restore the old body.  If there were any changes to it, we made a copy
3554
     of it while the changes were still in place, so we'll correctly return
3555
     a modified insn below.  */
3556
  if (! replace)
3557
    {
3558
      /* Restore the old body.  */
3559
      for (i = 0; i < recog_data.n_operands; i++)
3560
        *recog_data.operand_loc[i] = orig_operand[i];
3561
      for (i = 0; i < recog_data.n_dups; i++)
3562
        *recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
3563
    }
3564
 
3565
  /* Update all elimination pairs to reflect the status after the current
3566
     insn.  The changes we make were determined by the earlier call to
3567
     elimination_effects.
3568
 
3569
     We also detect cases where register elimination cannot be done,
3570
     namely, if a register would be both changed and referenced outside a MEM
3571
     in the resulting insn since such an insn is often undefined and, even if
3572
     not, we cannot know what meaning will be given to it.  Note that it is
3573
     valid to have a register used in an address in an insn that changes it
3574
     (presumably with a pre- or post-increment or decrement).
3575
 
3576
     If anything changes, return nonzero.  */
3577
 
3578
  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3579
    {
3580
      if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3581
        ep->can_eliminate = 0;
3582
 
3583
      ep->ref_outside_mem = 0;
3584
 
3585
      if (ep->previous_offset != ep->offset)
3586
        val = 1;
3587
    }
3588
 
3589
 done:
3590
  /* If we changed something, perform elimination in REG_NOTES.  This is
3591
     needed even when REPLACE is zero because a REG_DEAD note might refer
3592
     to a register that we eliminate and could cause a different number
3593
     of spill registers to be needed in the final reload pass than in
3594
     the pre-passes.  */
3595
  if (val && REG_NOTES (insn) != 0)
3596
    REG_NOTES (insn)
3597
      = eliminate_regs_1 (REG_NOTES (insn), VOIDmode, REG_NOTES (insn), true);
3598
 
3599
  return val;
3600
}
3601
 
3602
/* Loop through all elimination pairs.
3603
   Recalculate the number not at initial offset.
3604
 
3605
   Compute the maximum offset (minimum offset if the stack does not
3606
   grow downward) for each elimination pair.  */
3607
 
3608
static void
3609
update_eliminable_offsets (void)
3610
{
3611
  struct elim_table *ep;
3612
 
3613
  num_not_at_initial_offset = 0;
3614
  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3615
    {
3616
      ep->previous_offset = ep->offset;
3617
      if (ep->can_eliminate && ep->offset != ep->initial_offset)
3618
        num_not_at_initial_offset++;
3619
    }
3620
}
3621
 
3622
/* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3623
   replacement we currently believe is valid, mark it as not eliminable if X
3624
   modifies DEST in any way other than by adding a constant integer to it.
3625
 
3626
   If DEST is the frame pointer, we do nothing because we assume that
3627
   all assignments to the hard frame pointer are nonlocal gotos and are being
3628
   done at a time when they are valid and do not disturb anything else.
3629
   Some machines want to eliminate a fake argument pointer with either the
3630
   frame or stack pointer.  Assignments to the hard frame pointer must not
3631
   prevent this elimination.
3632
 
3633
   Called via note_stores from reload before starting its passes to scan
3634
   the insns of the function.  */
3635
 
3636
static void
3637
mark_not_eliminable (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
3638
{
3639
  unsigned int i;
3640
 
3641
  /* A SUBREG of a hard register here is just changing its mode.  We should
3642
     not see a SUBREG of an eliminable hard register, but check just in
3643
     case.  */
3644
  if (GET_CODE (dest) == SUBREG)
3645
    dest = SUBREG_REG (dest);
3646
 
3647
  if (dest == hard_frame_pointer_rtx)
3648
    return;
3649
 
3650
  for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3651
    if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3652
        && (GET_CODE (x) != SET
3653
            || GET_CODE (SET_SRC (x)) != PLUS
3654
            || XEXP (SET_SRC (x), 0) != dest
3655
            || !CONST_INT_P (XEXP (SET_SRC (x), 1))))
3656
      {
3657
        reg_eliminate[i].can_eliminate_previous
3658
          = reg_eliminate[i].can_eliminate = 0;
3659
        num_eliminable--;
3660
      }
3661
}
3662
 
3663
/* Verify that the initial elimination offsets did not change since the
3664
   last call to set_initial_elim_offsets.  This is used to catch cases
3665
   where something illegal happened during reload_as_needed that could
3666
   cause incorrect code to be generated if we did not check for it.  */
3667
 
3668
static bool
3669
verify_initial_elim_offsets (void)
3670
{
3671
  HOST_WIDE_INT t;
3672
 
3673
  if (!num_eliminable)
3674
    return true;
3675
 
3676
#ifdef ELIMINABLE_REGS
3677
  {
3678
   struct elim_table *ep;
3679
 
3680
   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3681
     {
3682
       INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3683
       if (t != ep->initial_offset)
3684
         return false;
3685
     }
3686
  }
3687
#else
3688
  INITIAL_FRAME_POINTER_OFFSET (t);
3689
  if (t != reg_eliminate[0].initial_offset)
3690
    return false;
3691
#endif
3692
 
3693
  return true;
3694
}
3695
 
3696
/* Reset all offsets on eliminable registers to their initial values.  */
3697
 
3698
static void
3699
set_initial_elim_offsets (void)
3700
{
3701
  struct elim_table *ep = reg_eliminate;
3702
 
3703
#ifdef ELIMINABLE_REGS
3704
  for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3705
    {
3706
      INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3707
      ep->previous_offset = ep->offset = ep->initial_offset;
3708
    }
3709
#else
3710
  INITIAL_FRAME_POINTER_OFFSET (ep->initial_offset);
3711
  ep->previous_offset = ep->offset = ep->initial_offset;
3712
#endif
3713
 
3714
  num_not_at_initial_offset = 0;
3715
}
3716
 
3717
/* Subroutine of set_initial_label_offsets called via for_each_eh_label.  */
3718
 
3719
static void
3720
set_initial_eh_label_offset (rtx label)
3721
{
3722
  set_label_offsets (label, NULL_RTX, 1);
3723
}
3724
 
3725
/* Initialize the known label offsets.
3726
   Set a known offset for each forced label to be at the initial offset
3727
   of each elimination.  We do this because we assume that all
3728
   computed jumps occur from a location where each elimination is
3729
   at its initial offset.
3730
   For all other labels, show that we don't know the offsets.  */
3731
 
3732
static void
3733
set_initial_label_offsets (void)
3734
{
3735
  rtx x;
3736
  memset (offsets_known_at, 0, num_labels);
3737
 
3738
  for (x = forced_labels; x; x = XEXP (x, 1))
3739
    if (XEXP (x, 0))
3740
      set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
3741
 
3742
  for_each_eh_label (set_initial_eh_label_offset);
3743
}
3744
 
3745
/* Set all elimination offsets to the known values for the code label given
3746
   by INSN.  */
3747
 
3748
static void
3749
set_offsets_for_label (rtx insn)
3750
{
3751
  unsigned int i;
3752
  int label_nr = CODE_LABEL_NUMBER (insn);
3753
  struct elim_table *ep;
3754
 
3755
  num_not_at_initial_offset = 0;
3756
  for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3757
    {
3758
      ep->offset = ep->previous_offset
3759
                 = offsets_at[label_nr - first_label_num][i];
3760
      if (ep->can_eliminate && ep->offset != ep->initial_offset)
3761
        num_not_at_initial_offset++;
3762
    }
3763
}
3764
 
3765
/* See if anything that happened changes which eliminations are valid.
3766
   For example, on the SPARC, whether or not the frame pointer can
3767
   be eliminated can depend on what registers have been used.  We need
3768
   not check some conditions again (such as flag_omit_frame_pointer)
3769
   since they can't have changed.  */
3770
 
3771
static void
3772
update_eliminables (HARD_REG_SET *pset)
3773
{
3774
  int previous_frame_pointer_needed = frame_pointer_needed;
3775
  struct elim_table *ep;
3776
 
3777
  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3778
    if ((ep->from == HARD_FRAME_POINTER_REGNUM
3779
         && targetm.frame_pointer_required ())
3780
#ifdef ELIMINABLE_REGS
3781
        || ! targetm.can_eliminate (ep->from, ep->to)
3782
#endif
3783
        )
3784
      ep->can_eliminate = 0;
3785
 
3786
  /* Look for the case where we have discovered that we can't replace
3787
     register A with register B and that means that we will now be
3788
     trying to replace register A with register C.  This means we can
3789
     no longer replace register C with register B and we need to disable
3790
     such an elimination, if it exists.  This occurs often with A == ap,
3791
     B == sp, and C == fp.  */
3792
 
3793
  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3794
    {
3795
      struct elim_table *op;
3796
      int new_to = -1;
3797
 
3798
      if (! ep->can_eliminate && ep->can_eliminate_previous)
3799
        {
3800
          /* Find the current elimination for ep->from, if there is a
3801
             new one.  */
3802
          for (op = reg_eliminate;
3803
               op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3804
            if (op->from == ep->from && op->can_eliminate)
3805
              {
3806
                new_to = op->to;
3807
                break;
3808
              }
3809
 
3810
          /* See if there is an elimination of NEW_TO -> EP->TO.  If so,
3811
             disable it.  */
3812
          for (op = reg_eliminate;
3813
               op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3814
            if (op->from == new_to && op->to == ep->to)
3815
              op->can_eliminate = 0;
3816
        }
3817
    }
3818
 
3819
  /* See if any registers that we thought we could eliminate the previous
3820
     time are no longer eliminable.  If so, something has changed and we
3821
     must spill the register.  Also, recompute the number of eliminable
3822
     registers and see if the frame pointer is needed; it is if there is
3823
     no elimination of the frame pointer that we can perform.  */
3824
 
3825
  frame_pointer_needed = 1;
3826
  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3827
    {
3828
      if (ep->can_eliminate
3829
          && ep->from == FRAME_POINTER_REGNUM
3830
          && ep->to != HARD_FRAME_POINTER_REGNUM
3831
          && (! SUPPORTS_STACK_ALIGNMENT
3832
              || ! crtl->stack_realign_needed))
3833
        frame_pointer_needed = 0;
3834
 
3835
      if (! ep->can_eliminate && ep->can_eliminate_previous)
3836
        {
3837
          ep->can_eliminate_previous = 0;
3838
          SET_HARD_REG_BIT (*pset, ep->from);
3839
          num_eliminable--;
3840
        }
3841
    }
3842
 
3843
  /* If we didn't need a frame pointer last time, but we do now, spill
3844
     the hard frame pointer.  */
3845
  if (frame_pointer_needed && ! previous_frame_pointer_needed)
3846
    SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
3847
}
3848
 
3849
/* Return true if X is used as the target register of an elimination.  */
3850
 
3851
bool
3852
elimination_target_reg_p (rtx x)
3853
{
3854
  struct elim_table *ep;
3855
 
3856
  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3857
    if (ep->to_rtx == x && ep->can_eliminate)
3858
      return true;
3859
 
3860
  return false;
3861
}
3862
 
3863
/* Initialize the table of registers to eliminate.
3864
   Pre-condition: global flag frame_pointer_needed has been set before
3865
   calling this function.  */
3866
 
3867
static void
3868
init_elim_table (void)
3869
{
3870
  struct elim_table *ep;
3871
#ifdef ELIMINABLE_REGS
3872
  const struct elim_table_1 *ep1;
3873
#endif
3874
 
3875
  if (!reg_eliminate)
3876
    reg_eliminate = XCNEWVEC (struct elim_table, NUM_ELIMINABLE_REGS);
3877
 
3878
  num_eliminable = 0;
3879
 
3880
#ifdef ELIMINABLE_REGS
3881
  for (ep = reg_eliminate, ep1 = reg_eliminate_1;
3882
       ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
3883
    {
3884
      ep->from = ep1->from;
3885
      ep->to = ep1->to;
3886
      ep->can_eliminate = ep->can_eliminate_previous
3887
        = (targetm.can_eliminate (ep->from, ep->to)
3888
           && ! (ep->to == STACK_POINTER_REGNUM
3889
                 && frame_pointer_needed
3890
                 && (! SUPPORTS_STACK_ALIGNMENT
3891
                     || ! stack_realign_fp)));
3892
    }
3893
#else
3894
  reg_eliminate[0].from = reg_eliminate_1[0].from;
3895
  reg_eliminate[0].to = reg_eliminate_1[0].to;
3896
  reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
3897
    = ! frame_pointer_needed;
3898
#endif
3899
 
3900
  /* Count the number of eliminable registers and build the FROM and TO
3901
     REG rtx's.  Note that code in gen_rtx_REG will cause, e.g.,
3902
     gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
3903
     We depend on this.  */
3904
  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3905
    {
3906
      num_eliminable += ep->can_eliminate;
3907
      ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
3908
      ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
3909
    }
3910
}
3911
 
3912
/* Kick all pseudos out of hard register REGNO.
3913
 
3914
   If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3915
   because we found we can't eliminate some register.  In the case, no pseudos
3916
   are allowed to be in the register, even if they are only in a block that
3917
   doesn't require spill registers, unlike the case when we are spilling this
3918
   hard reg to produce another spill register.
3919
 
3920
   Return nonzero if any pseudos needed to be kicked out.  */
3921
 
3922
static void
3923
spill_hard_reg (unsigned int regno, int cant_eliminate)
3924
{
3925
  int i;
3926
 
3927
  if (cant_eliminate)
3928
    {
3929
      SET_HARD_REG_BIT (bad_spill_regs_global, regno);
3930
      df_set_regs_ever_live (regno, true);
3931
    }
3932
 
3933
  /* Spill every pseudo reg that was allocated to this reg
3934
     or to something that overlaps this reg.  */
3935
 
3936
  for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3937
    if (reg_renumber[i] >= 0
3938
        && (unsigned int) reg_renumber[i] <= regno
3939
        && end_hard_regno (PSEUDO_REGNO_MODE (i), reg_renumber[i]) > regno)
3940
      SET_REGNO_REG_SET (&spilled_pseudos, i);
3941
}
3942
 
3943
/* After find_reload_regs has been run for all insn that need reloads,
3944
   and/or spill_hard_regs was called, this function is used to actually
3945
   spill pseudo registers and try to reallocate them.  It also sets up the
3946
   spill_regs array for use by choose_reload_regs.  */
3947
 
3948
static int
3949
finish_spills (int global)
3950
{
3951
  struct insn_chain *chain;
3952
  int something_changed = 0;
3953
  unsigned i;
3954
  reg_set_iterator rsi;
3955
 
3956
  /* Build the spill_regs array for the function.  */
3957
  /* If there are some registers still to eliminate and one of the spill regs
3958
     wasn't ever used before, additional stack space may have to be
3959
     allocated to store this register.  Thus, we may have changed the offset
3960
     between the stack and frame pointers, so mark that something has changed.
3961
 
3962
     One might think that we need only set VAL to 1 if this is a call-used
3963
     register.  However, the set of registers that must be saved by the
3964
     prologue is not identical to the call-used set.  For example, the
3965
     register used by the call insn for the return PC is a call-used register,
3966
     but must be saved by the prologue.  */
3967
 
3968
  n_spills = 0;
3969
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3970
    if (TEST_HARD_REG_BIT (used_spill_regs, i))
3971
      {
3972
        spill_reg_order[i] = n_spills;
3973
        spill_regs[n_spills++] = i;
3974
        if (num_eliminable && ! df_regs_ever_live_p (i))
3975
          something_changed = 1;
3976
        df_set_regs_ever_live (i, true);
3977
      }
3978
    else
3979
      spill_reg_order[i] = -1;
3980
 
3981
  EXECUTE_IF_SET_IN_REG_SET (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i, rsi)
3982
    if (! ira_conflicts_p || reg_renumber[i] >= 0)
3983
      {
3984
        /* Record the current hard register the pseudo is allocated to
3985
           in pseudo_previous_regs so we avoid reallocating it to the
3986
           same hard reg in a later pass.  */
3987
        gcc_assert (reg_renumber[i] >= 0);
3988
 
3989
        SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
3990
        /* Mark it as no longer having a hard register home.  */
3991
        reg_renumber[i] = -1;
3992
        if (ira_conflicts_p)
3993
          /* Inform IRA about the change.  */
3994
          ira_mark_allocation_change (i);
3995
        /* We will need to scan everything again.  */
3996
        something_changed = 1;
3997
      }
3998
 
3999
  /* Retry global register allocation if possible.  */
4000
  if (global && ira_conflicts_p)
4001
    {
4002
      unsigned int n;
4003
 
4004
      memset (pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET));
4005
      /* For every insn that needs reloads, set the registers used as spill
4006
         regs in pseudo_forbidden_regs for every pseudo live across the
4007
         insn.  */
4008
      for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
4009
        {
4010
          EXECUTE_IF_SET_IN_REG_SET
4011
            (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
4012
            {
4013
              IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4014
                                chain->used_spill_regs);
4015
            }
4016
          EXECUTE_IF_SET_IN_REG_SET
4017
            (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
4018
            {
4019
              IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4020
                                chain->used_spill_regs);
4021
            }
4022
        }
4023
 
4024
      /* Retry allocating the pseudos spilled in IRA and the
4025
         reload.  For each reg, merge the various reg sets that
4026
         indicate which hard regs can't be used, and call
4027
         ira_reassign_pseudos.  */
4028
      for (n = 0, i = FIRST_PSEUDO_REGISTER; i < (unsigned) max_regno; i++)
4029
        if (reg_old_renumber[i] != reg_renumber[i])
4030
          {
4031
            if (reg_renumber[i] < 0)
4032
              temp_pseudo_reg_arr[n++] = i;
4033
            else
4034
              CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
4035
          }
4036
      if (ira_reassign_pseudos (temp_pseudo_reg_arr, n,
4037
                                bad_spill_regs_global,
4038
                                pseudo_forbidden_regs, pseudo_previous_regs,
4039
                                &spilled_pseudos))
4040
        something_changed = 1;
4041
    }
4042
  /* Fix up the register information in the insn chain.
4043
     This involves deleting those of the spilled pseudos which did not get
4044
     a new hard register home from the live_{before,after} sets.  */
4045
  for (chain = reload_insn_chain; chain; chain = chain->next)
4046
    {
4047
      HARD_REG_SET used_by_pseudos;
4048
      HARD_REG_SET used_by_pseudos2;
4049
 
4050
      if (! ira_conflicts_p)
4051
        {
4052
          /* Don't do it for IRA because IRA and the reload still can
4053
             assign hard registers to the spilled pseudos on next
4054
             reload iterations.  */
4055
          AND_COMPL_REG_SET (&chain->live_throughout, &spilled_pseudos);
4056
          AND_COMPL_REG_SET (&chain->dead_or_set, &spilled_pseudos);
4057
        }
4058
      /* Mark any unallocated hard regs as available for spills.  That
4059
         makes inheritance work somewhat better.  */
4060
      if (chain->need_reload)
4061
        {
4062
          REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
4063
          REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
4064
          IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
4065
 
4066
          compute_use_by_pseudos (&used_by_pseudos, &chain->live_throughout);
4067
          compute_use_by_pseudos (&used_by_pseudos, &chain->dead_or_set);
4068
          /* Value of chain->used_spill_regs from previous iteration
4069
             may be not included in the value calculated here because
4070
             of possible removing caller-saves insns (see function
4071
             delete_caller_save_insns.  */
4072
          COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
4073
          AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
4074
        }
4075
    }
4076
 
4077
  CLEAR_REG_SET (&changed_allocation_pseudos);
4078
  /* Let alter_reg modify the reg rtx's for the modified pseudos.  */
4079
  for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
4080
    {
4081
      int regno = reg_renumber[i];
4082
      if (reg_old_renumber[i] == regno)
4083
        continue;
4084
 
4085
      SET_REGNO_REG_SET (&changed_allocation_pseudos, i);
4086
 
4087
      alter_reg (i, reg_old_renumber[i], false);
4088
      reg_old_renumber[i] = regno;
4089
      if (dump_file)
4090
        {
4091
          if (regno == -1)
4092
            fprintf (dump_file, " Register %d now on stack.\n\n", i);
4093
          else
4094
            fprintf (dump_file, " Register %d now in %d.\n\n",
4095
                     i, reg_renumber[i]);
4096
        }
4097
    }
4098
 
4099
  return something_changed;
4100
}
4101
 
4102
/* Find all paradoxical subregs within X and update reg_max_ref_width.  */
4103
 
4104
static void
4105
scan_paradoxical_subregs (rtx x)
4106
{
4107
  int i;
4108
  const char *fmt;
4109
  enum rtx_code code = GET_CODE (x);
4110
 
4111
  switch (code)
4112
    {
4113
    case REG:
4114
    case CONST_INT:
4115
    case CONST:
4116
    case SYMBOL_REF:
4117
    case LABEL_REF:
4118
    case CONST_DOUBLE:
4119
    case CONST_FIXED:
4120
    case CONST_VECTOR: /* shouldn't happen, but just in case.  */
4121
    case CC0:
4122
    case PC:
4123
    case USE:
4124
    case CLOBBER:
4125
      return;
4126
 
4127
    case SUBREG:
4128
      if (REG_P (SUBREG_REG (x))
4129
          && (GET_MODE_SIZE (GET_MODE (x))
4130
              > reg_max_ref_width[REGNO (SUBREG_REG (x))]))
4131
        {
4132
          reg_max_ref_width[REGNO (SUBREG_REG (x))]
4133
            = GET_MODE_SIZE (GET_MODE (x));
4134
          mark_home_live_1 (REGNO (SUBREG_REG (x)), GET_MODE (x));
4135
        }
4136
      return;
4137
 
4138
    default:
4139
      break;
4140
    }
4141
 
4142
  fmt = GET_RTX_FORMAT (code);
4143
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4144
    {
4145
      if (fmt[i] == 'e')
4146
        scan_paradoxical_subregs (XEXP (x, i));
4147
      else if (fmt[i] == 'E')
4148
        {
4149
          int j;
4150
          for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4151
            scan_paradoxical_subregs (XVECEXP (x, i, j));
4152
        }
4153
    }
4154
}
4155
 
4156
/* A subroutine of reload_as_needed.  If INSN has a REG_EH_REGION note,
4157
   examine all of the reload insns between PREV and NEXT exclusive, and
4158
   annotate all that may trap.  */
4159
 
4160
static void
4161
fixup_eh_region_note (rtx insn, rtx prev, rtx next)
4162
{
4163
  rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
4164
  if (note == NULL)
4165
    return;
4166
  if (!insn_could_throw_p (insn))
4167
    remove_note (insn, note);
4168
  copy_reg_eh_region_note_forward (note, NEXT_INSN (prev), next);
4169
}
4170
 
4171
/* Reload pseudo-registers into hard regs around each insn as needed.
4172
   Additional register load insns are output before the insn that needs it
4173
   and perhaps store insns after insns that modify the reloaded pseudo reg.
4174
 
4175
   reg_last_reload_reg and reg_reloaded_contents keep track of
4176
   which registers are already available in reload registers.
4177
   We update these for the reloads that we perform,
4178
   as the insns are scanned.  */
4179
 
4180
static void
4181
reload_as_needed (int live_known)
4182
{
4183
  struct insn_chain *chain;
4184
#if defined (AUTO_INC_DEC)
4185
  int i;
4186
#endif
4187
  rtx x;
4188
 
4189
  memset (spill_reg_rtx, 0, sizeof spill_reg_rtx);
4190
  memset (spill_reg_store, 0, sizeof spill_reg_store);
4191
  reg_last_reload_reg = XCNEWVEC (rtx, max_regno);
4192
  INIT_REG_SET (&reg_has_output_reload);
4193
  CLEAR_HARD_REG_SET (reg_reloaded_valid);
4194
  CLEAR_HARD_REG_SET (reg_reloaded_call_part_clobbered);
4195
 
4196
  set_initial_elim_offsets ();
4197
 
4198
  for (chain = reload_insn_chain; chain; chain = chain->next)
4199
    {
4200
      rtx prev = 0;
4201
      rtx insn = chain->insn;
4202
      rtx old_next = NEXT_INSN (insn);
4203
#ifdef AUTO_INC_DEC
4204
      rtx old_prev = PREV_INSN (insn);
4205
#endif
4206
 
4207
      /* If we pass a label, copy the offsets from the label information
4208
         into the current offsets of each elimination.  */
4209
      if (LABEL_P (insn))
4210
        set_offsets_for_label (insn);
4211
 
4212
      else if (INSN_P (insn))
4213
        {
4214
          regset_head regs_to_forget;
4215
          INIT_REG_SET (&regs_to_forget);
4216
          note_stores (PATTERN (insn), forget_old_reloads_1, &regs_to_forget);
4217
 
4218
          /* If this is a USE and CLOBBER of a MEM, ensure that any
4219
             references to eliminable registers have been removed.  */
4220
 
4221
          if ((GET_CODE (PATTERN (insn)) == USE
4222
               || GET_CODE (PATTERN (insn)) == CLOBBER)
4223
              && MEM_P (XEXP (PATTERN (insn), 0)))
4224
            XEXP (XEXP (PATTERN (insn), 0), 0)
4225
              = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4226
                                GET_MODE (XEXP (PATTERN (insn), 0)),
4227
                                NULL_RTX);
4228
 
4229
          /* If we need to do register elimination processing, do so.
4230
             This might delete the insn, in which case we are done.  */
4231
          if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
4232
            {
4233
              eliminate_regs_in_insn (insn, 1);
4234
              if (NOTE_P (insn))
4235
                {
4236
                  update_eliminable_offsets ();
4237
                  CLEAR_REG_SET (&regs_to_forget);
4238
                  continue;
4239
                }
4240
            }
4241
 
4242
          /* If need_elim is nonzero but need_reload is zero, one might think
4243
             that we could simply set n_reloads to 0.  However, find_reloads
4244
             could have done some manipulation of the insn (such as swapping
4245
             commutative operands), and these manipulations are lost during
4246
             the first pass for every insn that needs register elimination.
4247
             So the actions of find_reloads must be redone here.  */
4248
 
4249
          if (! chain->need_elim && ! chain->need_reload
4250
              && ! chain->need_operand_change)
4251
            n_reloads = 0;
4252
          /* First find the pseudo regs that must be reloaded for this insn.
4253
             This info is returned in the tables reload_... (see reload.h).
4254
             Also modify the body of INSN by substituting RELOAD
4255
             rtx's for those pseudo regs.  */
4256
          else
4257
            {
4258
              CLEAR_REG_SET (&reg_has_output_reload);
4259
              CLEAR_HARD_REG_SET (reg_is_output_reload);
4260
 
4261
              find_reloads (insn, 1, spill_indirect_levels, live_known,
4262
                            spill_reg_order);
4263
            }
4264
 
4265
          if (n_reloads > 0)
4266
            {
4267
              rtx next = NEXT_INSN (insn);
4268
              rtx p;
4269
 
4270
              prev = PREV_INSN (insn);
4271
 
4272
              /* Now compute which reload regs to reload them into.  Perhaps
4273
                 reusing reload regs from previous insns, or else output
4274
                 load insns to reload them.  Maybe output store insns too.
4275
                 Record the choices of reload reg in reload_reg_rtx.  */
4276
              choose_reload_regs (chain);
4277
 
4278
              /* Merge any reloads that we didn't combine for fear of
4279
                 increasing the number of spill registers needed but now
4280
                 discover can be safely merged.  */
4281
              if (SMALL_REGISTER_CLASSES)
4282
                merge_assigned_reloads (insn);
4283
 
4284
              /* Generate the insns to reload operands into or out of
4285
                 their reload regs.  */
4286
              emit_reload_insns (chain);
4287
 
4288
              /* Substitute the chosen reload regs from reload_reg_rtx
4289
                 into the insn's body (or perhaps into the bodies of other
4290
                 load and store insn that we just made for reloading
4291
                 and that we moved the structure into).  */
4292
              subst_reloads (insn);
4293
 
4294
              /* Adjust the exception region notes for loads and stores.  */
4295
              if (flag_non_call_exceptions && !CALL_P (insn))
4296
                fixup_eh_region_note (insn, prev, next);
4297
 
4298
              /* If this was an ASM, make sure that all the reload insns
4299
                 we have generated are valid.  If not, give an error
4300
                 and delete them.  */
4301
              if (asm_noperands (PATTERN (insn)) >= 0)
4302
                for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
4303
                  if (p != insn && INSN_P (p)
4304
                      && GET_CODE (PATTERN (p)) != USE
4305
                      && (recog_memoized (p) < 0
4306
                          || (extract_insn (p), ! constrain_operands (1))))
4307
                    {
4308
                      error_for_asm (insn,
4309
                                     "%<asm%> operand requires "
4310
                                     "impossible reload");
4311
                      delete_insn (p);
4312
                    }
4313
            }
4314
 
4315
          if (num_eliminable && chain->need_elim)
4316
            update_eliminable_offsets ();
4317
 
4318
          /* Any previously reloaded spilled pseudo reg, stored in this insn,
4319
             is no longer validly lying around to save a future reload.
4320
             Note that this does not detect pseudos that were reloaded
4321
             for this insn in order to be stored in
4322
             (obeying register constraints).  That is correct; such reload
4323
             registers ARE still valid.  */
4324
          forget_marked_reloads (&regs_to_forget);
4325
          CLEAR_REG_SET (&regs_to_forget);
4326
 
4327
          /* There may have been CLOBBER insns placed after INSN.  So scan
4328
             between INSN and NEXT and use them to forget old reloads.  */
4329
          for (x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
4330
            if (NONJUMP_INSN_P (x) && GET_CODE (PATTERN (x)) == CLOBBER)
4331
              note_stores (PATTERN (x), forget_old_reloads_1, NULL);
4332
 
4333
#ifdef AUTO_INC_DEC
4334
          /* Likewise for regs altered by auto-increment in this insn.
4335
             REG_INC notes have been changed by reloading:
4336
             find_reloads_address_1 records substitutions for them,
4337
             which have been performed by subst_reloads above.  */
4338
          for (i = n_reloads - 1; i >= 0; i--)
4339
            {
4340
              rtx in_reg = rld[i].in_reg;
4341
              if (in_reg)
4342
                {
4343
                  enum rtx_code code = GET_CODE (in_reg);
4344
                  /* PRE_INC / PRE_DEC will have the reload register ending up
4345
                     with the same value as the stack slot, but that doesn't
4346
                     hold true for POST_INC / POST_DEC.  Either we have to
4347
                     convert the memory access to a true POST_INC / POST_DEC,
4348
                     or we can't use the reload register for inheritance.  */
4349
                  if ((code == POST_INC || code == POST_DEC)
4350
                      && TEST_HARD_REG_BIT (reg_reloaded_valid,
4351
                                            REGNO (rld[i].reg_rtx))
4352
                      /* Make sure it is the inc/dec pseudo, and not
4353
                         some other (e.g. output operand) pseudo.  */
4354
                      && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4355
                          == REGNO (XEXP (in_reg, 0))))
4356
 
4357
                    {
4358
                      rtx reload_reg = rld[i].reg_rtx;
4359
                      enum machine_mode mode = GET_MODE (reload_reg);
4360
                      int n = 0;
4361
                      rtx p;
4362
 
4363
                      for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
4364
                        {
4365
                          /* We really want to ignore REG_INC notes here, so
4366
                             use PATTERN (p) as argument to reg_set_p .  */
4367
                          if (reg_set_p (reload_reg, PATTERN (p)))
4368
                            break;
4369
                          n = count_occurrences (PATTERN (p), reload_reg, 0);
4370
                          if (! n)
4371
                            continue;
4372
                          if (n == 1)
4373
                            {
4374
                              rtx replace_reg
4375
                                = gen_rtx_fmt_e (code, mode, reload_reg);
4376
 
4377
                              validate_replace_rtx_group (reload_reg,
4378
                                                          replace_reg, p);
4379
                              n = verify_changes (0);
4380
 
4381
                              /* We must also verify that the constraints
4382
                                 are met after the replacement.  Make sure
4383
                                 extract_insn is only called for an insn
4384
                                 where the replacements were found to be
4385
                                 valid so far. */
4386
                              if (n)
4387
                                {
4388
                                  extract_insn (p);
4389
                                  n = constrain_operands (1);
4390
                                }
4391
 
4392
                              /* If the constraints were not met, then
4393
                                 undo the replacement, else confirm it.  */
4394
                              if (!n)
4395
                                cancel_changes (0);
4396
                              else
4397
                                confirm_change_group ();
4398
                            }
4399
                          break;
4400
                        }
4401
                      if (n == 1)
4402
                        {
4403
                          add_reg_note (p, REG_INC, reload_reg);
4404
                          /* Mark this as having an output reload so that the
4405
                             REG_INC processing code below won't invalidate
4406
                             the reload for inheritance.  */
4407
                          SET_HARD_REG_BIT (reg_is_output_reload,
4408
                                            REGNO (reload_reg));
4409
                          SET_REGNO_REG_SET (&reg_has_output_reload,
4410
                                             REGNO (XEXP (in_reg, 0)));
4411
                        }
4412
                      else
4413
                        forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
4414
                                              NULL);
4415
                    }
4416
                  else if ((code == PRE_INC || code == PRE_DEC)
4417
                           && TEST_HARD_REG_BIT (reg_reloaded_valid,
4418
                                                 REGNO (rld[i].reg_rtx))
4419
                           /* Make sure it is the inc/dec pseudo, and not
4420
                              some other (e.g. output operand) pseudo.  */
4421
                           && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4422
                               == REGNO (XEXP (in_reg, 0))))
4423
                    {
4424
                      SET_HARD_REG_BIT (reg_is_output_reload,
4425
                                        REGNO (rld[i].reg_rtx));
4426
                      SET_REGNO_REG_SET (&reg_has_output_reload,
4427
                                         REGNO (XEXP (in_reg, 0)));
4428
                    }
4429
                  else if (code == PRE_INC || code == PRE_DEC
4430
                           || code == POST_INC || code == POST_DEC)
4431
                    {
4432
                      int in_regno = REGNO (XEXP (in_reg, 0));
4433
 
4434
                      if (reg_last_reload_reg[in_regno] != NULL_RTX)
4435
                        {
4436
                          int in_hard_regno;
4437
                          bool forget_p = true;
4438
 
4439
                          in_hard_regno = REGNO (reg_last_reload_reg[in_regno]);
4440
                          if (TEST_HARD_REG_BIT (reg_reloaded_valid,
4441
                                                 in_hard_regno))
4442
                            {
4443
                              for (x = old_prev ? NEXT_INSN (old_prev) : insn;
4444
                                   x != old_next;
4445
                                   x = NEXT_INSN (x))
4446
                                if (x == reg_reloaded_insn[in_hard_regno])
4447
                                  {
4448
                                    forget_p = false;
4449
                                    break;
4450
                                  }
4451
                            }
4452
                          /* If for some reasons, we didn't set up
4453
                             reg_last_reload_reg in this insn,
4454
                             invalidate inheritance from previous
4455
                             insns for the incremented/decremented
4456
                             register.  Such registers will be not in
4457
                             reg_has_output_reload.  Invalidate it
4458
                             also if the corresponding element in
4459
                             reg_reloaded_insn is also
4460
                             invalidated.  */
4461
                          if (forget_p)
4462
                            forget_old_reloads_1 (XEXP (in_reg, 0),
4463
                                                  NULL_RTX, NULL);
4464
                        }
4465
                    }
4466
                }
4467
            }
4468
          /* If a pseudo that got a hard register is auto-incremented,
4469
             we must purge records of copying it into pseudos without
4470
             hard registers.  */
4471
          for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4472
            if (REG_NOTE_KIND (x) == REG_INC)
4473
              {
4474
                /* See if this pseudo reg was reloaded in this insn.
4475
                   If so, its last-reload info is still valid
4476
                   because it is based on this insn's reload.  */
4477
                for (i = 0; i < n_reloads; i++)
4478
                  if (rld[i].out == XEXP (x, 0))
4479
                    break;
4480
 
4481
                if (i == n_reloads)
4482
                  forget_old_reloads_1 (XEXP (x, 0), NULL_RTX, NULL);
4483
              }
4484
#endif
4485
        }
4486
      /* A reload reg's contents are unknown after a label.  */
4487
      if (LABEL_P (insn))
4488
        CLEAR_HARD_REG_SET (reg_reloaded_valid);
4489
 
4490
      /* Don't assume a reload reg is still good after a call insn
4491
         if it is a call-used reg, or if it contains a value that will
4492
         be partially clobbered by the call.  */
4493
      else if (CALL_P (insn))
4494
        {
4495
          AND_COMPL_HARD_REG_SET (reg_reloaded_valid, call_used_reg_set);
4496
          AND_COMPL_HARD_REG_SET (reg_reloaded_valid, reg_reloaded_call_part_clobbered);
4497
        }
4498
    }
4499
 
4500
  /* Clean up.  */
4501
  free (reg_last_reload_reg);
4502
  CLEAR_REG_SET (&reg_has_output_reload);
4503
}
4504
 
4505
/* Discard all record of any value reloaded from X,
4506
   or reloaded in X from someplace else;
4507
   unless X is an output reload reg of the current insn.
4508
 
4509
   X may be a hard reg (the reload reg)
4510
   or it may be a pseudo reg that was reloaded from.
4511
 
4512
   When DATA is non-NULL just mark the registers in regset
4513
   to be forgotten later.  */
4514
 
4515
static void
4516
forget_old_reloads_1 (rtx x, const_rtx ignored ATTRIBUTE_UNUSED,
4517
                      void *data)
4518
{
4519
  unsigned int regno;
4520
  unsigned int nr;
4521
  regset regs = (regset) data;
4522
 
4523
  /* note_stores does give us subregs of hard regs,
4524
     subreg_regno_offset requires a hard reg.  */
4525
  while (GET_CODE (x) == SUBREG)
4526
    {
4527
      /* We ignore the subreg offset when calculating the regno,
4528
         because we are using the entire underlying hard register
4529
         below.  */
4530
      x = SUBREG_REG (x);
4531
    }
4532
 
4533
  if (!REG_P (x))
4534
    return;
4535
 
4536
  regno = REGNO (x);
4537
 
4538
  if (regno >= FIRST_PSEUDO_REGISTER)
4539
    nr = 1;
4540
  else
4541
    {
4542
      unsigned int i;
4543
 
4544
      nr = hard_regno_nregs[regno][GET_MODE (x)];
4545
      /* Storing into a spilled-reg invalidates its contents.
4546
         This can happen if a block-local pseudo is allocated to that reg
4547
         and it wasn't spilled because this block's total need is 0.
4548
         Then some insn might have an optional reload and use this reg.  */
4549
      if (!regs)
4550
        for (i = 0; i < nr; i++)
4551
          /* But don't do this if the reg actually serves as an output
4552
             reload reg in the current instruction.  */
4553
          if (n_reloads == 0
4554
              || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4555
            {
4556
              CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4557
              spill_reg_store[regno + i] = 0;
4558
            }
4559
    }
4560
 
4561
  if (regs)
4562
    while (nr-- > 0)
4563
      SET_REGNO_REG_SET (regs, regno + nr);
4564
  else
4565
    {
4566
      /* Since value of X has changed,
4567
         forget any value previously copied from it.  */
4568
 
4569
      while (nr-- > 0)
4570
        /* But don't forget a copy if this is the output reload
4571
           that establishes the copy's validity.  */
4572
        if (n_reloads == 0
4573
            || !REGNO_REG_SET_P (&reg_has_output_reload, regno + nr))
4574
          reg_last_reload_reg[regno + nr] = 0;
4575
     }
4576
}
4577
 
4578
/* Forget the reloads marked in regset by previous function.  */
4579
static void
4580
forget_marked_reloads (regset regs)
4581
{
4582
  unsigned int reg;
4583
  reg_set_iterator rsi;
4584
  EXECUTE_IF_SET_IN_REG_SET (regs, 0, reg, rsi)
4585
    {
4586
      if (reg < FIRST_PSEUDO_REGISTER
4587
          /* But don't do this if the reg actually serves as an output
4588
             reload reg in the current instruction.  */
4589
          && (n_reloads == 0
4590
              || ! TEST_HARD_REG_BIT (reg_is_output_reload, reg)))
4591
          {
4592
            CLEAR_HARD_REG_BIT (reg_reloaded_valid, reg);
4593
            spill_reg_store[reg] = 0;
4594
          }
4595
      if (n_reloads == 0
4596
          || !REGNO_REG_SET_P (&reg_has_output_reload, reg))
4597
        reg_last_reload_reg[reg] = 0;
4598
    }
4599
}
4600
 
4601
/* The following HARD_REG_SETs indicate when each hard register is
4602
   used for a reload of various parts of the current insn.  */
4603
 
4604
/* If reg is unavailable for all reloads.  */
4605
static HARD_REG_SET reload_reg_unavailable;
4606
/* If reg is in use as a reload reg for a RELOAD_OTHER reload.  */
4607
static HARD_REG_SET reload_reg_used;
4608
/* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I.  */
4609
static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4610
/* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I.  */
4611
static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4612
/* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I.  */
4613
static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4614
/* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I.  */
4615
static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
4616
/* If reg is in use for a RELOAD_FOR_INPUT reload for operand I.  */
4617
static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4618
/* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I.  */
4619
static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4620
/* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload.  */
4621
static HARD_REG_SET reload_reg_used_in_op_addr;
4622
/* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload.  */
4623
static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4624
/* If reg is in use for a RELOAD_FOR_INSN reload.  */
4625
static HARD_REG_SET reload_reg_used_in_insn;
4626
/* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload.  */
4627
static HARD_REG_SET reload_reg_used_in_other_addr;
4628
 
4629
/* If reg is in use as a reload reg for any sort of reload.  */
4630
static HARD_REG_SET reload_reg_used_at_all;
4631
 
4632
/* If reg is use as an inherited reload.  We just mark the first register
4633
   in the group.  */
4634
static HARD_REG_SET reload_reg_used_for_inherit;
4635
 
4636
/* Records which hard regs are used in any way, either as explicit use or
4637
   by being allocated to a pseudo during any point of the current insn.  */
4638
static HARD_REG_SET reg_used_in_insn;
4639
 
4640
/* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4641
   TYPE. MODE is used to indicate how many consecutive regs are
4642
   actually used.  */
4643
 
4644
static void
4645
mark_reload_reg_in_use (unsigned int regno, int opnum, enum reload_type type,
4646
                        enum machine_mode mode)
4647
{
4648
  unsigned int nregs = hard_regno_nregs[regno][mode];
4649
  unsigned int i;
4650
 
4651
  for (i = regno; i < nregs + regno; i++)
4652
    {
4653
      switch (type)
4654
        {
4655
        case RELOAD_OTHER:
4656
          SET_HARD_REG_BIT (reload_reg_used, i);
4657
          break;
4658
 
4659
        case RELOAD_FOR_INPUT_ADDRESS:
4660
          SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4661
          break;
4662
 
4663
        case RELOAD_FOR_INPADDR_ADDRESS:
4664
          SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4665
          break;
4666
 
4667
        case RELOAD_FOR_OUTPUT_ADDRESS:
4668
          SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4669
          break;
4670
 
4671
        case RELOAD_FOR_OUTADDR_ADDRESS:
4672
          SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4673
          break;
4674
 
4675
        case RELOAD_FOR_OPERAND_ADDRESS:
4676
          SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4677
          break;
4678
 
4679
        case RELOAD_FOR_OPADDR_ADDR:
4680
          SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4681
          break;
4682
 
4683
        case RELOAD_FOR_OTHER_ADDRESS:
4684
          SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4685
          break;
4686
 
4687
        case RELOAD_FOR_INPUT:
4688
          SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4689
          break;
4690
 
4691
        case RELOAD_FOR_OUTPUT:
4692
          SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4693
          break;
4694
 
4695
        case RELOAD_FOR_INSN:
4696
          SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4697
          break;
4698
        }
4699
 
4700
      SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4701
    }
4702
}
4703
 
4704
/* Similarly, but show REGNO is no longer in use for a reload.  */
4705
 
4706
static void
4707
clear_reload_reg_in_use (unsigned int regno, int opnum,
4708
                         enum reload_type type, enum machine_mode mode)
4709
{
4710
  unsigned int nregs = hard_regno_nregs[regno][mode];
4711
  unsigned int start_regno, end_regno, r;
4712
  int i;
4713
  /* A complication is that for some reload types, inheritance might
4714
     allow multiple reloads of the same types to share a reload register.
4715
     We set check_opnum if we have to check only reloads with the same
4716
     operand number, and check_any if we have to check all reloads.  */
4717
  int check_opnum = 0;
4718
  int check_any = 0;
4719
  HARD_REG_SET *used_in_set;
4720
 
4721
  switch (type)
4722
    {
4723
    case RELOAD_OTHER:
4724
      used_in_set = &reload_reg_used;
4725
      break;
4726
 
4727
    case RELOAD_FOR_INPUT_ADDRESS:
4728
      used_in_set = &reload_reg_used_in_input_addr[opnum];
4729
      break;
4730
 
4731
    case RELOAD_FOR_INPADDR_ADDRESS:
4732
      check_opnum = 1;
4733
      used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
4734
      break;
4735
 
4736
    case RELOAD_FOR_OUTPUT_ADDRESS:
4737
      used_in_set = &reload_reg_used_in_output_addr[opnum];
4738
      break;
4739
 
4740
    case RELOAD_FOR_OUTADDR_ADDRESS:
4741
      check_opnum = 1;
4742
      used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
4743
      break;
4744
 
4745
    case RELOAD_FOR_OPERAND_ADDRESS:
4746
      used_in_set = &reload_reg_used_in_op_addr;
4747
      break;
4748
 
4749
    case RELOAD_FOR_OPADDR_ADDR:
4750
      check_any = 1;
4751
      used_in_set = &reload_reg_used_in_op_addr_reload;
4752
      break;
4753
 
4754
    case RELOAD_FOR_OTHER_ADDRESS:
4755
      used_in_set = &reload_reg_used_in_other_addr;
4756
      check_any = 1;
4757
      break;
4758
 
4759
    case RELOAD_FOR_INPUT:
4760
      used_in_set = &reload_reg_used_in_input[opnum];
4761
      break;
4762
 
4763
    case RELOAD_FOR_OUTPUT:
4764
      used_in_set = &reload_reg_used_in_output[opnum];
4765
      break;
4766
 
4767
    case RELOAD_FOR_INSN:
4768
      used_in_set = &reload_reg_used_in_insn;
4769
      break;
4770
    default:
4771
      gcc_unreachable ();
4772
    }
4773
  /* We resolve conflicts with remaining reloads of the same type by
4774
     excluding the intervals of reload registers by them from the
4775
     interval of freed reload registers.  Since we only keep track of
4776
     one set of interval bounds, we might have to exclude somewhat
4777
     more than what would be necessary if we used a HARD_REG_SET here.
4778
     But this should only happen very infrequently, so there should
4779
     be no reason to worry about it.  */
4780
 
4781
  start_regno = regno;
4782
  end_regno = regno + nregs;
4783
  if (check_opnum || check_any)
4784
    {
4785
      for (i = n_reloads - 1; i >= 0; i--)
4786
        {
4787
          if (rld[i].when_needed == type
4788
              && (check_any || rld[i].opnum == opnum)
4789
              && rld[i].reg_rtx)
4790
            {
4791
              unsigned int conflict_start = true_regnum (rld[i].reg_rtx);
4792
              unsigned int conflict_end
4793
                = end_hard_regno (rld[i].mode, conflict_start);
4794
 
4795
              /* If there is an overlap with the first to-be-freed register,
4796
                 adjust the interval start.  */
4797
              if (conflict_start <= start_regno && conflict_end > start_regno)
4798
                start_regno = conflict_end;
4799
              /* Otherwise, if there is a conflict with one of the other
4800
                 to-be-freed registers, adjust the interval end.  */
4801
              if (conflict_start > start_regno && conflict_start < end_regno)
4802
                end_regno = conflict_start;
4803
            }
4804
        }
4805
    }
4806
 
4807
  for (r = start_regno; r < end_regno; r++)
4808
    CLEAR_HARD_REG_BIT (*used_in_set, r);
4809
}
4810
 
4811
/* 1 if reg REGNO is free as a reload reg for a reload of the sort
4812
   specified by OPNUM and TYPE.  */
4813
 
4814
static int
4815
reload_reg_free_p (unsigned int regno, int opnum, enum reload_type type)
4816
{
4817
  int i;
4818
 
4819
  /* In use for a RELOAD_OTHER means it's not available for anything.  */
4820
  if (TEST_HARD_REG_BIT (reload_reg_used, regno)
4821
      || TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
4822
    return 0;
4823
 
4824
  switch (type)
4825
    {
4826
    case RELOAD_OTHER:
4827
      /* In use for anything means we can't use it for RELOAD_OTHER.  */
4828
      if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4829
          || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4830
          || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
4831
          || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4832
        return 0;
4833
 
4834
      for (i = 0; i < reload_n_operands; i++)
4835
        if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4836
            || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4837
            || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4838
            || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4839
            || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4840
            || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4841
          return 0;
4842
 
4843
      return 1;
4844
 
4845
    case RELOAD_FOR_INPUT:
4846
      if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4847
          || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4848
        return 0;
4849
 
4850
      if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4851
        return 0;
4852
 
4853
      /* If it is used for some other input, can't use it.  */
4854
      for (i = 0; i < reload_n_operands; i++)
4855
        if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4856
          return 0;
4857
 
4858
      /* If it is used in a later operand's address, can't use it.  */
4859
      for (i = opnum + 1; i < reload_n_operands; i++)
4860
        if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4861
            || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4862
          return 0;
4863
 
4864
      return 1;
4865
 
4866
    case RELOAD_FOR_INPUT_ADDRESS:
4867
      /* Can't use a register if it is used for an input address for this
4868
         operand or used as an input in an earlier one.  */
4869
      if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
4870
          || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4871
        return 0;
4872
 
4873
      for (i = 0; i < opnum; i++)
4874
        if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4875
          return 0;
4876
 
4877
      return 1;
4878
 
4879
    case RELOAD_FOR_INPADDR_ADDRESS:
4880
      /* Can't use a register if it is used for an input address
4881
         for this operand or used as an input in an earlier
4882
         one.  */
4883
      if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4884
        return 0;
4885
 
4886
      for (i = 0; i < opnum; i++)
4887
        if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4888
          return 0;
4889
 
4890
      return 1;
4891
 
4892
    case RELOAD_FOR_OUTPUT_ADDRESS:
4893
      /* Can't use a register if it is used for an output address for this
4894
         operand or used as an output in this or a later operand.  Note
4895
         that multiple output operands are emitted in reverse order, so
4896
         the conflicting ones are those with lower indices.  */
4897
      if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4898
        return 0;
4899
 
4900
      for (i = 0; i <= opnum; i++)
4901
        if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4902
          return 0;
4903
 
4904
      return 1;
4905
 
4906
    case RELOAD_FOR_OUTADDR_ADDRESS:
4907
      /* Can't use a register if it is used for an output address
4908
         for this operand or used as an output in this or a
4909
         later operand.  Note that multiple output operands are
4910
         emitted in reverse order, so the conflicting ones are
4911
         those with lower indices.  */
4912
      if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4913
        return 0;
4914
 
4915
      for (i = 0; i <= opnum; i++)
4916
        if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4917
          return 0;
4918
 
4919
      return 1;
4920
 
4921
    case RELOAD_FOR_OPERAND_ADDRESS:
4922
      for (i = 0; i < reload_n_operands; i++)
4923
        if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4924
          return 0;
4925
 
4926
      return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4927
              && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4928
 
4929
    case RELOAD_FOR_OPADDR_ADDR:
4930
      for (i = 0; i < reload_n_operands; i++)
4931
        if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4932
          return 0;
4933
 
4934
      return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4935
 
4936
    case RELOAD_FOR_OUTPUT:
4937
      /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4938
         outputs, or an operand address for this or an earlier output.
4939
         Note that multiple output operands are emitted in reverse order,
4940
         so the conflicting ones are those with higher indices.  */
4941
      if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4942
        return 0;
4943
 
4944
      for (i = 0; i < reload_n_operands; i++)
4945
        if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4946
          return 0;
4947
 
4948
      for (i = opnum; i < reload_n_operands; i++)
4949
        if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4950
            || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4951
          return 0;
4952
 
4953
      return 1;
4954
 
4955
    case RELOAD_FOR_INSN:
4956
      for (i = 0; i < reload_n_operands; i++)
4957
        if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4958
            || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4959
          return 0;
4960
 
4961
      return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4962
              && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4963
 
4964
    case RELOAD_FOR_OTHER_ADDRESS:
4965
      return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4966
 
4967
    default:
4968
      gcc_unreachable ();
4969
    }
4970
}
4971
 
4972
/* Return 1 if the value in reload reg REGNO, as used by a reload
4973
   needed for the part of the insn specified by OPNUM and TYPE,
4974
   is still available in REGNO at the end of the insn.
4975
 
4976
   We can assume that the reload reg was already tested for availability
4977
   at the time it is needed, and we should not check this again,
4978
   in case the reg has already been marked in use.  */
4979
 
4980
static int
4981
reload_reg_reaches_end_p (unsigned int regno, int opnum, enum reload_type type)
4982
{
4983
  int i;
4984
 
4985
  switch (type)
4986
    {
4987
    case RELOAD_OTHER:
4988
      /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4989
         its value must reach the end.  */
4990
      return 1;
4991
 
4992
      /* If this use is for part of the insn,
4993
         its value reaches if no subsequent part uses the same register.
4994
         Just like the above function, don't try to do this with lots
4995
         of fallthroughs.  */
4996
 
4997
    case RELOAD_FOR_OTHER_ADDRESS:
4998
      /* Here we check for everything else, since these don't conflict
4999
         with anything else and everything comes later.  */
5000
 
5001
      for (i = 0; i < reload_n_operands; i++)
5002
        if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5003
            || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5004
            || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
5005
            || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5006
            || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5007
            || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5008
          return 0;
5009
 
5010
      return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5011
              && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5012
              && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5013
              && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
5014
 
5015
    case RELOAD_FOR_INPUT_ADDRESS:
5016
    case RELOAD_FOR_INPADDR_ADDRESS:
5017
      /* Similar, except that we check only for this and subsequent inputs
5018
         and the address of only subsequent inputs and we do not need
5019
         to check for RELOAD_OTHER objects since they are known not to
5020
         conflict.  */
5021
 
5022
      for (i = opnum; i < reload_n_operands; i++)
5023
        if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5024
          return 0;
5025
 
5026
      for (i = opnum + 1; i < reload_n_operands; i++)
5027
        if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5028
            || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5029
          return 0;
5030
 
5031
      for (i = 0; i < reload_n_operands; i++)
5032
        if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5033
            || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5034
            || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5035
          return 0;
5036
 
5037
      if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5038
        return 0;
5039
 
5040
      return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5041
              && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5042
              && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5043
 
5044
    case RELOAD_FOR_INPUT:
5045
      /* Similar to input address, except we start at the next operand for
5046
         both input and input address and we do not check for
5047
         RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
5048
         would conflict.  */
5049
 
5050
      for (i = opnum + 1; i < reload_n_operands; i++)
5051
        if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5052
            || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5053
            || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5054
          return 0;
5055
 
5056
      /* ... fall through ...  */
5057
 
5058
    case RELOAD_FOR_OPERAND_ADDRESS:
5059
      /* Check outputs and their addresses.  */
5060
 
5061
      for (i = 0; i < reload_n_operands; i++)
5062
        if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5063
            || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5064
            || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5065
          return 0;
5066
 
5067
      return (!TEST_HARD_REG_BIT (reload_reg_used, regno));
5068
 
5069
    case RELOAD_FOR_OPADDR_ADDR:
5070
      for (i = 0; i < reload_n_operands; i++)
5071
        if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5072
            || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5073
            || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5074
          return 0;
5075
 
5076
      return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5077
              && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5078
              && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5079
 
5080
    case RELOAD_FOR_INSN:
5081
      /* These conflict with other outputs with RELOAD_OTHER.  So
5082
         we need only check for output addresses.  */
5083
 
5084
      opnum = reload_n_operands;
5085
 
5086
      /* ... fall through ...  */
5087
 
5088
    case RELOAD_FOR_OUTPUT:
5089
    case RELOAD_FOR_OUTPUT_ADDRESS:
5090
    case RELOAD_FOR_OUTADDR_ADDRESS:
5091
      /* We already know these can't conflict with a later output.  So the
5092
         only thing to check are later output addresses.
5093
         Note that multiple output operands are emitted in reverse order,
5094
         so the conflicting ones are those with lower indices.  */
5095
      for (i = 0; i < opnum; i++)
5096
        if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5097
            || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5098
          return 0;
5099
 
5100
      return 1;
5101
 
5102
    default:
5103
      gcc_unreachable ();
5104
    }
5105
}
5106
 
5107
/* Like reload_reg_reaches_end_p, but check that the condition holds for
5108
   every register in the range [REGNO, REGNO + NREGS).  */
5109
 
5110
static bool
5111
reload_regs_reach_end_p (unsigned int regno, int nregs,
5112
                         int opnum, enum reload_type type)
5113
{
5114
  int i;
5115
 
5116
  for (i = 0; i < nregs; i++)
5117
    if (!reload_reg_reaches_end_p (regno + i, opnum, type))
5118
      return false;
5119
  return true;
5120
}
5121
 
5122
 
5123
/*  Returns whether R1 and R2 are uniquely chained: the value of one
5124
    is used by the other, and that value is not used by any other
5125
    reload for this insn.  This is used to partially undo the decision
5126
    made in find_reloads when in the case of multiple
5127
    RELOAD_FOR_OPERAND_ADDRESS reloads it converts all
5128
    RELOAD_FOR_OPADDR_ADDR reloads into RELOAD_FOR_OPERAND_ADDRESS
5129
    reloads.  This code tries to avoid the conflict created by that
5130
    change.  It might be cleaner to explicitly keep track of which
5131
    RELOAD_FOR_OPADDR_ADDR reload is associated with which
5132
    RELOAD_FOR_OPERAND_ADDRESS reload, rather than to try to detect
5133
    this after the fact. */
5134
static bool
5135
reloads_unique_chain_p (int r1, int r2)
5136
{
5137
  int i;
5138
 
5139
  /* We only check input reloads.  */
5140
  if (! rld[r1].in || ! rld[r2].in)
5141
    return false;
5142
 
5143
  /* Avoid anything with output reloads.  */
5144
  if (rld[r1].out || rld[r2].out)
5145
    return false;
5146
 
5147
  /* "chained" means one reload is a component of the other reload,
5148
     not the same as the other reload.  */
5149
  if (rld[r1].opnum != rld[r2].opnum
5150
      || rtx_equal_p (rld[r1].in, rld[r2].in)
5151
      || rld[r1].optional || rld[r2].optional
5152
      || ! (reg_mentioned_p (rld[r1].in, rld[r2].in)
5153
            || reg_mentioned_p (rld[r2].in, rld[r1].in)))
5154
    return false;
5155
 
5156
  for (i = 0; i < n_reloads; i ++)
5157
    /* Look for input reloads that aren't our two */
5158
    if (i != r1 && i != r2 && rld[i].in)
5159
      {
5160
        /* If our reload is mentioned at all, it isn't a simple chain.  */
5161
        if (reg_mentioned_p (rld[r1].in, rld[i].in))
5162
          return false;
5163
      }
5164
  return true;
5165
}
5166
 
5167
/* The recursive function change all occurrences of WHAT in *WHERE
5168
   to REPL.  */
5169
static void
5170
substitute (rtx *where, const_rtx what, rtx repl)
5171
{
5172
  const char *fmt;
5173
  int i;
5174
  enum rtx_code code;
5175
 
5176
  if (*where == 0)
5177
    return;
5178
 
5179
  if (*where == what || rtx_equal_p (*where, what))
5180
    {
5181
      /* Record the location of the changed rtx.  */
5182
      VEC_safe_push (rtx_p, heap, substitute_stack, where);
5183
      *where = repl;
5184
      return;
5185
    }
5186
 
5187
  code = GET_CODE (*where);
5188
  fmt = GET_RTX_FORMAT (code);
5189
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5190
    {
5191
      if (fmt[i] == 'E')
5192
        {
5193
          int j;
5194
 
5195
          for (j = XVECLEN (*where, i) - 1; j >= 0; j--)
5196
            substitute (&XVECEXP (*where, i, j), what, repl);
5197
        }
5198
      else if (fmt[i] == 'e')
5199
        substitute (&XEXP (*where, i), what, repl);
5200
    }
5201
}
5202
 
5203
/* The function returns TRUE if chain of reload R1 and R2 (in any
5204
   order) can be evaluated without usage of intermediate register for
5205
   the reload containing another reload.  It is important to see
5206
   gen_reload to understand what the function is trying to do.  As an
5207
   example, let us have reload chain
5208
 
5209
      r2: const
5210
      r1: <something> + const
5211
 
5212
   and reload R2 got reload reg HR.  The function returns true if
5213
   there is a correct insn HR = HR + <something>.  Otherwise,
5214
   gen_reload will use intermediate register (and this is the reload
5215
   reg for R1) to reload <something>.
5216
 
5217
   We need this function to find a conflict for chain reloads.  In our
5218
   example, if HR = HR + <something> is incorrect insn, then we cannot
5219
   use HR as a reload register for R2.  If we do use it then we get a
5220
   wrong code:
5221
 
5222
      HR = const
5223
      HR = <something>
5224
      HR = HR + HR
5225
 
5226
*/
5227
static bool
5228
gen_reload_chain_without_interm_reg_p (int r1, int r2)
5229
{
5230
  /* Assume other cases in gen_reload are not possible for
5231
     chain reloads or do need an intermediate hard registers.  */
5232
  bool result = true;
5233
  int regno, n, code;
5234
  rtx out, in, tem, insn;
5235
  rtx last = get_last_insn ();
5236
 
5237
  /* Make r2 a component of r1.  */
5238
  if (reg_mentioned_p (rld[r1].in, rld[r2].in))
5239
    {
5240
      n = r1;
5241
      r1 = r2;
5242
      r2 = n;
5243
    }
5244
  gcc_assert (reg_mentioned_p (rld[r2].in, rld[r1].in));
5245
  regno = rld[r1].regno >= 0 ? rld[r1].regno : rld[r2].regno;
5246
  gcc_assert (regno >= 0);
5247
  out = gen_rtx_REG (rld[r1].mode, regno);
5248
  in = rld[r1].in;
5249
  substitute (&in, rld[r2].in, gen_rtx_REG (rld[r2].mode, regno));
5250
 
5251
  /* If IN is a paradoxical SUBREG, remove it and try to put the
5252
     opposite SUBREG on OUT.  Likewise for a paradoxical SUBREG on OUT.  */
5253
  if (GET_CODE (in) == SUBREG
5254
      && (GET_MODE_SIZE (GET_MODE (in))
5255
          > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
5256
      && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
5257
    in = SUBREG_REG (in), out = tem;
5258
 
5259
  if (GET_CODE (in) == PLUS
5260
      && (REG_P (XEXP (in, 0))
5261
          || GET_CODE (XEXP (in, 0)) == SUBREG
5262
          || MEM_P (XEXP (in, 0)))
5263
      && (REG_P (XEXP (in, 1))
5264
          || GET_CODE (XEXP (in, 1)) == SUBREG
5265
          || CONSTANT_P (XEXP (in, 1))
5266
          || MEM_P (XEXP (in, 1))))
5267
    {
5268
      insn = emit_insn (gen_rtx_SET (VOIDmode, out, in));
5269
      code = recog_memoized (insn);
5270
      result = false;
5271
 
5272
      if (code >= 0)
5273
        {
5274
          extract_insn (insn);
5275
          /* We want constrain operands to treat this insn strictly in
5276
             its validity determination, i.e., the way it would after
5277
             reload has completed.  */
5278
          result = constrain_operands (1);
5279
        }
5280
 
5281
      delete_insns_since (last);
5282
    }
5283
 
5284
  /* Restore the original value at each changed address within R1.  */
5285
  while (!VEC_empty (rtx_p, substitute_stack))
5286
    {
5287
      rtx *where = VEC_pop (rtx_p, substitute_stack);
5288
      *where = rld[r2].in;
5289
    }
5290
 
5291
  return result;
5292
}
5293
 
5294
/* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
5295
   Return 0 otherwise.
5296
 
5297
   This function uses the same algorithm as reload_reg_free_p above.  */
5298
 
5299
static int
5300
reloads_conflict (int r1, int r2)
5301
{
5302
  enum reload_type r1_type = rld[r1].when_needed;
5303
  enum reload_type r2_type = rld[r2].when_needed;
5304
  int r1_opnum = rld[r1].opnum;
5305
  int r2_opnum = rld[r2].opnum;
5306
 
5307
  /* RELOAD_OTHER conflicts with everything.  */
5308
  if (r2_type == RELOAD_OTHER)
5309
    return 1;
5310
 
5311
  /* Otherwise, check conflicts differently for each type.  */
5312
 
5313
  switch (r1_type)
5314
    {
5315
    case RELOAD_FOR_INPUT:
5316
      return (r2_type == RELOAD_FOR_INSN
5317
              || r2_type == RELOAD_FOR_OPERAND_ADDRESS
5318
              || r2_type == RELOAD_FOR_OPADDR_ADDR
5319
              || r2_type == RELOAD_FOR_INPUT
5320
              || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
5321
                   || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
5322
                  && r2_opnum > r1_opnum));
5323
 
5324
    case RELOAD_FOR_INPUT_ADDRESS:
5325
      return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
5326
              || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5327
 
5328
    case RELOAD_FOR_INPADDR_ADDRESS:
5329
      return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
5330
              || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5331
 
5332
    case RELOAD_FOR_OUTPUT_ADDRESS:
5333
      return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
5334
              || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5335
 
5336
    case RELOAD_FOR_OUTADDR_ADDRESS:
5337
      return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
5338
              || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5339
 
5340
    case RELOAD_FOR_OPERAND_ADDRESS:
5341
      return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
5342
              || (r2_type == RELOAD_FOR_OPERAND_ADDRESS
5343
                  && (!reloads_unique_chain_p (r1, r2)
5344
                      || !gen_reload_chain_without_interm_reg_p (r1, r2))));
5345
 
5346
    case RELOAD_FOR_OPADDR_ADDR:
5347
      return (r2_type == RELOAD_FOR_INPUT
5348
              || r2_type == RELOAD_FOR_OPADDR_ADDR);
5349
 
5350
    case RELOAD_FOR_OUTPUT:
5351
      return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
5352
              || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
5353
                   || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
5354
                  && r2_opnum >= r1_opnum));
5355
 
5356
    case RELOAD_FOR_INSN:
5357
      return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
5358
              || r2_type == RELOAD_FOR_INSN
5359
              || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
5360
 
5361
    case RELOAD_FOR_OTHER_ADDRESS:
5362
      return r2_type == RELOAD_FOR_OTHER_ADDRESS;
5363
 
5364
    case RELOAD_OTHER:
5365
      return 1;
5366
 
5367
    default:
5368
      gcc_unreachable ();
5369
    }
5370
}
5371
 
5372
/* Indexed by reload number, 1 if incoming value
5373
   inherited from previous insns.  */
5374
static char reload_inherited[MAX_RELOADS];
5375
 
5376
/* For an inherited reload, this is the insn the reload was inherited from,
5377
   if we know it.  Otherwise, this is 0.  */
5378
static rtx reload_inheritance_insn[MAX_RELOADS];
5379
 
5380
/* If nonzero, this is a place to get the value of the reload,
5381
   rather than using reload_in.  */
5382
static rtx reload_override_in[MAX_RELOADS];
5383
 
5384
/* For each reload, the hard register number of the register used,
5385
   or -1 if we did not need a register for this reload.  */
5386
static int reload_spill_index[MAX_RELOADS];
5387
 
5388
/* Index X is the value of rld[X].reg_rtx, adjusted for the input mode.  */
5389
static rtx reload_reg_rtx_for_input[MAX_RELOADS];
5390
 
5391
/* Index X is the value of rld[X].reg_rtx, adjusted for the output mode.  */
5392
static rtx reload_reg_rtx_for_output[MAX_RELOADS];
5393
 
5394
/* Subroutine of free_for_value_p, used to check a single register.
5395
   START_REGNO is the starting regno of the full reload register
5396
   (possibly comprising multiple hard registers) that we are considering.  */
5397
 
5398
static int
5399
reload_reg_free_for_value_p (int start_regno, int regno, int opnum,
5400
                             enum reload_type type, rtx value, rtx out,
5401
                             int reloadnum, int ignore_address_reloads)
5402
{
5403
  int time1;
5404
  /* Set if we see an input reload that must not share its reload register
5405
     with any new earlyclobber, but might otherwise share the reload
5406
     register with an output or input-output reload.  */
5407
  int check_earlyclobber = 0;
5408
  int i;
5409
  int copy = 0;
5410
 
5411
  if (TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5412
    return 0;
5413
 
5414
  if (out == const0_rtx)
5415
    {
5416
      copy = 1;
5417
      out = NULL_RTX;
5418
    }
5419
 
5420
  /* We use some pseudo 'time' value to check if the lifetimes of the
5421
     new register use would overlap with the one of a previous reload
5422
     that is not read-only or uses a different value.
5423
     The 'time' used doesn't have to be linear in any shape or form, just
5424
     monotonic.
5425
     Some reload types use different 'buckets' for each operand.
5426
     So there are MAX_RECOG_OPERANDS different time values for each
5427
     such reload type.
5428
     We compute TIME1 as the time when the register for the prospective
5429
     new reload ceases to be live, and TIME2 for each existing
5430
     reload as the time when that the reload register of that reload
5431
     becomes live.
5432
     Where there is little to be gained by exact lifetime calculations,
5433
     we just make conservative assumptions, i.e. a longer lifetime;
5434
     this is done in the 'default:' cases.  */
5435
  switch (type)
5436
    {
5437
    case RELOAD_FOR_OTHER_ADDRESS:
5438
      /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads.  */
5439
      time1 = copy ? 0 : 1;
5440
      break;
5441
    case RELOAD_OTHER:
5442
      time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
5443
      break;
5444
      /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5445
         RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT.  By adding 0 / 1 / 2 ,
5446
         respectively, to the time values for these, we get distinct time
5447
         values.  To get distinct time values for each operand, we have to
5448
         multiply opnum by at least three.  We round that up to four because
5449
         multiply by four is often cheaper.  */
5450
    case RELOAD_FOR_INPADDR_ADDRESS:
5451
      time1 = opnum * 4 + 2;
5452
      break;
5453
    case RELOAD_FOR_INPUT_ADDRESS:
5454
      time1 = opnum * 4 + 3;
5455
      break;
5456
    case RELOAD_FOR_INPUT:
5457
      /* All RELOAD_FOR_INPUT reloads remain live till the instruction
5458
         executes (inclusive).  */
5459
      time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
5460
      break;
5461
    case RELOAD_FOR_OPADDR_ADDR:
5462
      /* opnum * 4 + 4
5463
         <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
5464
      time1 = MAX_RECOG_OPERANDS * 4 + 1;
5465
      break;
5466
    case RELOAD_FOR_OPERAND_ADDRESS:
5467
      /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
5468
         is executed.  */
5469
      time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
5470
      break;
5471
    case RELOAD_FOR_OUTADDR_ADDRESS:
5472
      time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
5473
      break;
5474
    case RELOAD_FOR_OUTPUT_ADDRESS:
5475
      time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
5476
      break;
5477
    default:
5478
      time1 = MAX_RECOG_OPERANDS * 5 + 5;
5479
    }
5480
 
5481
  for (i = 0; i < n_reloads; i++)
5482
    {
5483
      rtx reg = rld[i].reg_rtx;
5484
      if (reg && REG_P (reg)
5485
          && ((unsigned) regno - true_regnum (reg)
5486
              <= hard_regno_nregs[REGNO (reg)][GET_MODE (reg)] - (unsigned) 1)
5487
          && i != reloadnum)
5488
        {
5489
          rtx other_input = rld[i].in;
5490
 
5491
          /* If the other reload loads the same input value, that
5492
             will not cause a conflict only if it's loading it into
5493
             the same register.  */
5494
          if (true_regnum (reg) != start_regno)
5495
            other_input = NULL_RTX;
5496
          if (! other_input || ! rtx_equal_p (other_input, value)
5497
              || rld[i].out || out)
5498
            {
5499
              int time2;
5500
              switch (rld[i].when_needed)
5501
                {
5502
                case RELOAD_FOR_OTHER_ADDRESS:
5503
                  time2 = 0;
5504
                  break;
5505
                case RELOAD_FOR_INPADDR_ADDRESS:
5506
                  /* find_reloads makes sure that a
5507
                     RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
5508
                     by at most one - the first -
5509
                     RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS .  If the
5510
                     address reload is inherited, the address address reload
5511
                     goes away, so we can ignore this conflict.  */
5512
                  if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
5513
                      && ignore_address_reloads
5514
                      /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
5515
                         Then the address address is still needed to store
5516
                         back the new address.  */
5517
                      && ! rld[reloadnum].out)
5518
                    continue;
5519
                  /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
5520
                     RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
5521
                     reloads go away.  */
5522
                  if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5523
                      && ignore_address_reloads
5524
                      /* Unless we are reloading an auto_inc expression.  */
5525
                      && ! rld[reloadnum].out)
5526
                    continue;
5527
                  time2 = rld[i].opnum * 4 + 2;
5528
                  break;
5529
                case RELOAD_FOR_INPUT_ADDRESS:
5530
                  if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5531
                      && ignore_address_reloads
5532
                      && ! rld[reloadnum].out)
5533
                    continue;
5534
                  time2 = rld[i].opnum * 4 + 3;
5535
                  break;
5536
                case RELOAD_FOR_INPUT:
5537
                  time2 = rld[i].opnum * 4 + 4;
5538
                  check_earlyclobber = 1;
5539
                  break;
5540
                  /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
5541
                     == MAX_RECOG_OPERAND * 4  */
5542
                case RELOAD_FOR_OPADDR_ADDR:
5543
                  if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
5544
                      && ignore_address_reloads
5545
                      && ! rld[reloadnum].out)
5546
                    continue;
5547
                  time2 = MAX_RECOG_OPERANDS * 4 + 1;
5548
                  break;
5549
                case RELOAD_FOR_OPERAND_ADDRESS:
5550
                  time2 = MAX_RECOG_OPERANDS * 4 + 2;
5551
                  check_earlyclobber = 1;
5552
                  break;
5553
                case RELOAD_FOR_INSN:
5554
                  time2 = MAX_RECOG_OPERANDS * 4 + 3;
5555
                  break;
5556
                case RELOAD_FOR_OUTPUT:
5557
                  /* All RELOAD_FOR_OUTPUT reloads become live just after the
5558
                     instruction is executed.  */
5559
                  time2 = MAX_RECOG_OPERANDS * 4 + 4;
5560
                  break;
5561
                  /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
5562
                     the RELOAD_FOR_OUTPUT reloads, so assign it the same time
5563
                     value.  */
5564
                case RELOAD_FOR_OUTADDR_ADDRESS:
5565
                  if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
5566
                      && ignore_address_reloads
5567
                      && ! rld[reloadnum].out)
5568
                    continue;
5569
                  time2 = MAX_RECOG_OPERANDS * 4 + 4 + rld[i].opnum;
5570
                  break;
5571
                case RELOAD_FOR_OUTPUT_ADDRESS:
5572
                  time2 = MAX_RECOG_OPERANDS * 4 + 5 + rld[i].opnum;
5573
                  break;
5574
                case RELOAD_OTHER:
5575
                  /* If there is no conflict in the input part, handle this
5576
                     like an output reload.  */
5577
                  if (! rld[i].in || rtx_equal_p (other_input, value))
5578
                    {
5579
                      time2 = MAX_RECOG_OPERANDS * 4 + 4;
5580
                      /* Earlyclobbered outputs must conflict with inputs.  */
5581
                      if (earlyclobber_operand_p (rld[i].out))
5582
                        time2 = MAX_RECOG_OPERANDS * 4 + 3;
5583
 
5584
                      break;
5585
                    }
5586
                  time2 = 1;
5587
                  /* RELOAD_OTHER might be live beyond instruction execution,
5588
                     but this is not obvious when we set time2 = 1.  So check
5589
                     here if there might be a problem with the new reload
5590
                     clobbering the register used by the RELOAD_OTHER.  */
5591
                  if (out)
5592
                    return 0;
5593
                  break;
5594
                default:
5595
                  return 0;
5596
                }
5597
              if ((time1 >= time2
5598
                   && (! rld[i].in || rld[i].out
5599
                       || ! rtx_equal_p (other_input, value)))
5600
                  || (out && rld[reloadnum].out_reg
5601
                      && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
5602
                return 0;
5603
            }
5604
        }
5605
    }
5606
 
5607
  /* Earlyclobbered outputs must conflict with inputs.  */
5608
  if (check_earlyclobber && out && earlyclobber_operand_p (out))
5609
    return 0;
5610
 
5611
  return 1;
5612
}
5613
 
5614
/* Return 1 if the value in reload reg REGNO, as used by a reload
5615
   needed for the part of the insn specified by OPNUM and TYPE,
5616
   may be used to load VALUE into it.
5617
 
5618
   MODE is the mode in which the register is used, this is needed to
5619
   determine how many hard regs to test.
5620
 
5621
   Other read-only reloads with the same value do not conflict
5622
   unless OUT is nonzero and these other reloads have to live while
5623
   output reloads live.
5624
   If OUT is CONST0_RTX, this is a special case: it means that the
5625
   test should not be for using register REGNO as reload register, but
5626
   for copying from register REGNO into the reload register.
5627
 
5628
   RELOADNUM is the number of the reload we want to load this value for;
5629
   a reload does not conflict with itself.
5630
 
5631
   When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
5632
   reloads that load an address for the very reload we are considering.
5633
 
5634
   The caller has to make sure that there is no conflict with the return
5635
   register.  */
5636
 
5637
static int
5638
free_for_value_p (int regno, enum machine_mode mode, int opnum,
5639
                  enum reload_type type, rtx value, rtx out, int reloadnum,
5640
                  int ignore_address_reloads)
5641
{
5642
  int nregs = hard_regno_nregs[regno][mode];
5643
  while (nregs-- > 0)
5644
    if (! reload_reg_free_for_value_p (regno, regno + nregs, opnum, type,
5645
                                       value, out, reloadnum,
5646
                                       ignore_address_reloads))
5647
      return 0;
5648
  return 1;
5649
}
5650
 
5651
/* Return nonzero if the rtx X is invariant over the current function.  */
5652
/* ??? Actually, the places where we use this expect exactly what is
5653
   tested here, and not everything that is function invariant.  In
5654
   particular, the frame pointer and arg pointer are special cased;
5655
   pic_offset_table_rtx is not, and we must not spill these things to
5656
   memory.  */
5657
 
5658
int
5659
function_invariant_p (const_rtx x)
5660
{
5661
  if (CONSTANT_P (x))
5662
    return 1;
5663
  if (x == frame_pointer_rtx || x == arg_pointer_rtx)
5664
    return 1;
5665
  if (GET_CODE (x) == PLUS
5666
      && (XEXP (x, 0) == frame_pointer_rtx || XEXP (x, 0) == arg_pointer_rtx)
5667
      && CONSTANT_P (XEXP (x, 1)))
5668
    return 1;
5669
  return 0;
5670
}
5671
 
5672
/* Determine whether the reload reg X overlaps any rtx'es used for
5673
   overriding inheritance.  Return nonzero if so.  */
5674
 
5675
static int
5676
conflicts_with_override (rtx x)
5677
{
5678
  int i;
5679
  for (i = 0; i < n_reloads; i++)
5680
    if (reload_override_in[i]
5681
        && reg_overlap_mentioned_p (x, reload_override_in[i]))
5682
      return 1;
5683
  return 0;
5684
}
5685
 
5686
/* Give an error message saying we failed to find a reload for INSN,
5687
   and clear out reload R.  */
5688
static void
5689
failed_reload (rtx insn, int r)
5690
{
5691
  if (asm_noperands (PATTERN (insn)) < 0)
5692
    /* It's the compiler's fault.  */
5693
    fatal_insn ("could not find a spill register", insn);
5694
 
5695
  /* It's the user's fault; the operand's mode and constraint
5696
     don't match.  Disable this reload so we don't crash in final.  */
5697
  error_for_asm (insn,
5698
                 "%<asm%> operand constraint incompatible with operand size");
5699
  rld[r].in = 0;
5700
  rld[r].out = 0;
5701
  rld[r].reg_rtx = 0;
5702
  rld[r].optional = 1;
5703
  rld[r].secondary_p = 1;
5704
}
5705
 
5706
/* I is the index in SPILL_REG_RTX of the reload register we are to allocate
5707
   for reload R.  If it's valid, get an rtx for it.  Return nonzero if
5708
   successful.  */
5709
static int
5710
set_reload_reg (int i, int r)
5711
{
5712
  int regno;
5713
  rtx reg = spill_reg_rtx[i];
5714
 
5715
  if (reg == 0 || GET_MODE (reg) != rld[r].mode)
5716
    spill_reg_rtx[i] = reg
5717
      = gen_rtx_REG (rld[r].mode, spill_regs[i]);
5718
 
5719
  regno = true_regnum (reg);
5720
 
5721
  /* Detect when the reload reg can't hold the reload mode.
5722
     This used to be one `if', but Sequent compiler can't handle that.  */
5723
  if (HARD_REGNO_MODE_OK (regno, rld[r].mode))
5724
    {
5725
      enum machine_mode test_mode = VOIDmode;
5726
      if (rld[r].in)
5727
        test_mode = GET_MODE (rld[r].in);
5728
      /* If rld[r].in has VOIDmode, it means we will load it
5729
         in whatever mode the reload reg has: to wit, rld[r].mode.
5730
         We have already tested that for validity.  */
5731
      /* Aside from that, we need to test that the expressions
5732
         to reload from or into have modes which are valid for this
5733
         reload register.  Otherwise the reload insns would be invalid.  */
5734
      if (! (rld[r].in != 0 && test_mode != VOIDmode
5735
             && ! HARD_REGNO_MODE_OK (regno, test_mode)))
5736
        if (! (rld[r].out != 0
5737
               && ! HARD_REGNO_MODE_OK (regno, GET_MODE (rld[r].out))))
5738
          {
5739
            /* The reg is OK.  */
5740
            last_spill_reg = i;
5741
 
5742
            /* Mark as in use for this insn the reload regs we use
5743
               for this.  */
5744
            mark_reload_reg_in_use (spill_regs[i], rld[r].opnum,
5745
                                    rld[r].when_needed, rld[r].mode);
5746
 
5747
            rld[r].reg_rtx = reg;
5748
            reload_spill_index[r] = spill_regs[i];
5749
            return 1;
5750
          }
5751
    }
5752
  return 0;
5753
}
5754
 
5755
/* Find a spill register to use as a reload register for reload R.
5756
   LAST_RELOAD is nonzero if this is the last reload for the insn being
5757
   processed.
5758
 
5759
   Set rld[R].reg_rtx to the register allocated.
5760
 
5761
   We return 1 if successful, or 0 if we couldn't find a spill reg and
5762
   we didn't change anything.  */
5763
 
5764
static int
5765
allocate_reload_reg (struct insn_chain *chain ATTRIBUTE_UNUSED, int r,
5766
                     int last_reload)
5767
{
5768
  int i, pass, count;
5769
 
5770
  /* If we put this reload ahead, thinking it is a group,
5771
     then insist on finding a group.  Otherwise we can grab a
5772
     reg that some other reload needs.
5773
     (That can happen when we have a 68000 DATA_OR_FP_REG
5774
     which is a group of data regs or one fp reg.)
5775
     We need not be so restrictive if there are no more reloads
5776
     for this insn.
5777
 
5778
     ??? Really it would be nicer to have smarter handling
5779
     for that kind of reg class, where a problem like this is normal.
5780
     Perhaps those classes should be avoided for reloading
5781
     by use of more alternatives.  */
5782
 
5783
  int force_group = rld[r].nregs > 1 && ! last_reload;
5784
 
5785
  /* If we want a single register and haven't yet found one,
5786
     take any reg in the right class and not in use.
5787
     If we want a consecutive group, here is where we look for it.
5788
 
5789
     We use two passes so we can first look for reload regs to
5790
     reuse, which are already in use for other reloads in this insn,
5791
     and only then use additional registers.
5792
     I think that maximizing reuse is needed to make sure we don't
5793
     run out of reload regs.  Suppose we have three reloads, and
5794
     reloads A and B can share regs.  These need two regs.
5795
     Suppose A and B are given different regs.
5796
     That leaves none for C.  */
5797
  for (pass = 0; pass < 2; pass++)
5798
    {
5799
      /* I is the index in spill_regs.
5800
         We advance it round-robin between insns to use all spill regs
5801
         equally, so that inherited reloads have a chance
5802
         of leapfrogging each other.  */
5803
 
5804
      i = last_spill_reg;
5805
 
5806
      for (count = 0; count < n_spills; count++)
5807
        {
5808
          int rclass = (int) rld[r].rclass;
5809
          int regnum;
5810
 
5811
          i++;
5812
          if (i >= n_spills)
5813
            i -= n_spills;
5814
          regnum = spill_regs[i];
5815
 
5816
          if ((reload_reg_free_p (regnum, rld[r].opnum,
5817
                                  rld[r].when_needed)
5818
               || (rld[r].in
5819
                   /* We check reload_reg_used to make sure we
5820
                      don't clobber the return register.  */
5821
                   && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
5822
                   && free_for_value_p (regnum, rld[r].mode, rld[r].opnum,
5823
                                        rld[r].when_needed, rld[r].in,
5824
                                        rld[r].out, r, 1)))
5825
              && TEST_HARD_REG_BIT (reg_class_contents[rclass], regnum)
5826
              && HARD_REGNO_MODE_OK (regnum, rld[r].mode)
5827
              /* Look first for regs to share, then for unshared.  But
5828
                 don't share regs used for inherited reloads; they are
5829
                 the ones we want to preserve.  */
5830
              && (pass
5831
                  || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
5832
                                         regnum)
5833
                      && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
5834
                                              regnum))))
5835
            {
5836
              int nr = hard_regno_nregs[regnum][rld[r].mode];
5837
              /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5838
                 (on 68000) got us two FP regs.  If NR is 1,
5839
                 we would reject both of them.  */
5840
              if (force_group)
5841
                nr = rld[r].nregs;
5842
              /* If we need only one reg, we have already won.  */
5843
              if (nr == 1)
5844
                {
5845
                  /* But reject a single reg if we demand a group.  */
5846
                  if (force_group)
5847
                    continue;
5848
                  break;
5849
                }
5850
              /* Otherwise check that as many consecutive regs as we need
5851
                 are available here.  */
5852
              while (nr > 1)
5853
                {
5854
                  int regno = regnum + nr - 1;
5855
                  if (!(TEST_HARD_REG_BIT (reg_class_contents[rclass], regno)
5856
                        && spill_reg_order[regno] >= 0
5857
                        && reload_reg_free_p (regno, rld[r].opnum,
5858
                                              rld[r].when_needed)))
5859
                    break;
5860
                  nr--;
5861
                }
5862
              if (nr == 1)
5863
                break;
5864
            }
5865
        }
5866
 
5867
      /* If we found something on pass 1, omit pass 2.  */
5868
      if (count < n_spills)
5869
        break;
5870
    }
5871
 
5872
  /* We should have found a spill register by now.  */
5873
  if (count >= n_spills)
5874
    return 0;
5875
 
5876
  /* I is the index in SPILL_REG_RTX of the reload register we are to
5877
     allocate.  Get an rtx for it and find its register number.  */
5878
 
5879
  return set_reload_reg (i, r);
5880
}
5881
 
5882
/* Initialize all the tables needed to allocate reload registers.
5883
   CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
5884
   is the array we use to restore the reg_rtx field for every reload.  */
5885
 
5886
static void
5887
choose_reload_regs_init (struct insn_chain *chain, rtx *save_reload_reg_rtx)
5888
{
5889
  int i;
5890
 
5891
  for (i = 0; i < n_reloads; i++)
5892
    rld[i].reg_rtx = save_reload_reg_rtx[i];
5893
 
5894
  memset (reload_inherited, 0, MAX_RELOADS);
5895
  memset (reload_inheritance_insn, 0, MAX_RELOADS * sizeof (rtx));
5896
  memset (reload_override_in, 0, MAX_RELOADS * sizeof (rtx));
5897
 
5898
  CLEAR_HARD_REG_SET (reload_reg_used);
5899
  CLEAR_HARD_REG_SET (reload_reg_used_at_all);
5900
  CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
5901
  CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
5902
  CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
5903
  CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
5904
 
5905
  CLEAR_HARD_REG_SET (reg_used_in_insn);
5906
  {
5907
    HARD_REG_SET tmp;
5908
    REG_SET_TO_HARD_REG_SET (tmp, &chain->live_throughout);
5909
    IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5910
    REG_SET_TO_HARD_REG_SET (tmp, &chain->dead_or_set);
5911
    IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5912
    compute_use_by_pseudos (&reg_used_in_insn, &chain->live_throughout);
5913
    compute_use_by_pseudos (&reg_used_in_insn, &chain->dead_or_set);
5914
  }
5915
 
5916
  for (i = 0; i < reload_n_operands; i++)
5917
    {
5918
      CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
5919
      CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
5920
      CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
5921
      CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
5922
      CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
5923
      CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
5924
    }
5925
 
5926
  COMPL_HARD_REG_SET (reload_reg_unavailable, chain->used_spill_regs);
5927
 
5928
  CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5929
 
5930
  for (i = 0; i < n_reloads; i++)
5931
    /* If we have already decided to use a certain register,
5932
       don't use it in another way.  */
5933
    if (rld[i].reg_rtx)
5934
      mark_reload_reg_in_use (REGNO (rld[i].reg_rtx), rld[i].opnum,
5935
                              rld[i].when_needed, rld[i].mode);
5936
}
5937
 
5938
/* Assign hard reg targets for the pseudo-registers we must reload
5939
   into hard regs for this insn.
5940
   Also output the instructions to copy them in and out of the hard regs.
5941
 
5942
   For machines with register classes, we are responsible for
5943
   finding a reload reg in the proper class.  */
5944
 
5945
static void
5946
choose_reload_regs (struct insn_chain *chain)
5947
{
5948
  rtx insn = chain->insn;
5949
  int i, j;
5950
  unsigned int max_group_size = 1;
5951
  enum reg_class group_class = NO_REGS;
5952
  int pass, win, inheritance;
5953
 
5954
  rtx save_reload_reg_rtx[MAX_RELOADS];
5955
 
5956
  /* In order to be certain of getting the registers we need,
5957
     we must sort the reloads into order of increasing register class.
5958
     Then our grabbing of reload registers will parallel the process
5959
     that provided the reload registers.
5960
 
5961
     Also note whether any of the reloads wants a consecutive group of regs.
5962
     If so, record the maximum size of the group desired and what
5963
     register class contains all the groups needed by this insn.  */
5964
 
5965
  for (j = 0; j < n_reloads; j++)
5966
    {
5967
      reload_order[j] = j;
5968
      if (rld[j].reg_rtx != NULL_RTX)
5969
        {
5970
          gcc_assert (REG_P (rld[j].reg_rtx)
5971
                      && HARD_REGISTER_P (rld[j].reg_rtx));
5972
          reload_spill_index[j] = REGNO (rld[j].reg_rtx);
5973
        }
5974
      else
5975
        reload_spill_index[j] = -1;
5976
 
5977
      if (rld[j].nregs > 1)
5978
        {
5979
          max_group_size = MAX (rld[j].nregs, max_group_size);
5980
          group_class
5981
            = reg_class_superunion[(int) rld[j].rclass][(int) group_class];
5982
        }
5983
 
5984
      save_reload_reg_rtx[j] = rld[j].reg_rtx;
5985
    }
5986
 
5987
  if (n_reloads > 1)
5988
    qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5989
 
5990
  /* If -O, try first with inheritance, then turning it off.
5991
     If not -O, don't do inheritance.
5992
     Using inheritance when not optimizing leads to paradoxes
5993
     with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5994
     because one side of the comparison might be inherited.  */
5995
  win = 0;
5996
  for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5997
    {
5998
      choose_reload_regs_init (chain, save_reload_reg_rtx);
5999
 
6000
      /* Process the reloads in order of preference just found.
6001
         Beyond this point, subregs can be found in reload_reg_rtx.
6002
 
6003
         This used to look for an existing reloaded home for all of the
6004
         reloads, and only then perform any new reloads.  But that could lose
6005
         if the reloads were done out of reg-class order because a later
6006
         reload with a looser constraint might have an old home in a register
6007
         needed by an earlier reload with a tighter constraint.
6008
 
6009
         To solve this, we make two passes over the reloads, in the order
6010
         described above.  In the first pass we try to inherit a reload
6011
         from a previous insn.  If there is a later reload that needs a
6012
         class that is a proper subset of the class being processed, we must
6013
         also allocate a spill register during the first pass.
6014
 
6015
         Then make a second pass over the reloads to allocate any reloads
6016
         that haven't been given registers yet.  */
6017
 
6018
      for (j = 0; j < n_reloads; j++)
6019
        {
6020
          int r = reload_order[j];
6021
          rtx search_equiv = NULL_RTX;
6022
 
6023
          /* Ignore reloads that got marked inoperative.  */
6024
          if (rld[r].out == 0 && rld[r].in == 0
6025
              && ! rld[r].secondary_p)
6026
            continue;
6027
 
6028
          /* If find_reloads chose to use reload_in or reload_out as a reload
6029
             register, we don't need to chose one.  Otherwise, try even if it
6030
             found one since we might save an insn if we find the value lying
6031
             around.
6032
             Try also when reload_in is a pseudo without a hard reg.  */
6033
          if (rld[r].in != 0 && rld[r].reg_rtx != 0
6034
              && (rtx_equal_p (rld[r].in, rld[r].reg_rtx)
6035
                  || (rtx_equal_p (rld[r].out, rld[r].reg_rtx)
6036
                      && !MEM_P (rld[r].in)
6037
                      && true_regnum (rld[r].in) < FIRST_PSEUDO_REGISTER)))
6038
            continue;
6039
 
6040
#if 0 /* No longer needed for correct operation.
6041
         It might give better code, or might not; worth an experiment?  */
6042
          /* If this is an optional reload, we can't inherit from earlier insns
6043
             until we are sure that any non-optional reloads have been allocated.
6044
             The following code takes advantage of the fact that optional reloads
6045
             are at the end of reload_order.  */
6046
          if (rld[r].optional != 0)
6047
            for (i = 0; i < j; i++)
6048
              if ((rld[reload_order[i]].out != 0
6049
                   || rld[reload_order[i]].in != 0
6050
                   || rld[reload_order[i]].secondary_p)
6051
                  && ! rld[reload_order[i]].optional
6052
                  && rld[reload_order[i]].reg_rtx == 0)
6053
                allocate_reload_reg (chain, reload_order[i], 0);
6054
#endif
6055
 
6056
          /* First see if this pseudo is already available as reloaded
6057
             for a previous insn.  We cannot try to inherit for reloads
6058
             that are smaller than the maximum number of registers needed
6059
             for groups unless the register we would allocate cannot be used
6060
             for the groups.
6061
 
6062
             We could check here to see if this is a secondary reload for
6063
             an object that is already in a register of the desired class.
6064
             This would avoid the need for the secondary reload register.
6065
             But this is complex because we can't easily determine what
6066
             objects might want to be loaded via this reload.  So let a
6067
             register be allocated here.  In `emit_reload_insns' we suppress
6068
             one of the loads in the case described above.  */
6069
 
6070
          if (inheritance)
6071
            {
6072
              int byte = 0;
6073
              int regno = -1;
6074
              enum machine_mode mode = VOIDmode;
6075
 
6076
              if (rld[r].in == 0)
6077
                ;
6078
              else if (REG_P (rld[r].in))
6079
                {
6080
                  regno = REGNO (rld[r].in);
6081
                  mode = GET_MODE (rld[r].in);
6082
                }
6083
              else if (REG_P (rld[r].in_reg))
6084
                {
6085
                  regno = REGNO (rld[r].in_reg);
6086
                  mode = GET_MODE (rld[r].in_reg);
6087
                }
6088
              else if (GET_CODE (rld[r].in_reg) == SUBREG
6089
                       && REG_P (SUBREG_REG (rld[r].in_reg)))
6090
                {
6091
                  regno = REGNO (SUBREG_REG (rld[r].in_reg));
6092
                  if (regno < FIRST_PSEUDO_REGISTER)
6093
                    regno = subreg_regno (rld[r].in_reg);
6094
                  else
6095
                    byte = SUBREG_BYTE (rld[r].in_reg);
6096
                  mode = GET_MODE (rld[r].in_reg);
6097
                }
6098
#ifdef AUTO_INC_DEC
6099
              else if (GET_RTX_CLASS (GET_CODE (rld[r].in_reg)) == RTX_AUTOINC
6100
                       && REG_P (XEXP (rld[r].in_reg, 0)))
6101
                {
6102
                  regno = REGNO (XEXP (rld[r].in_reg, 0));
6103
                  mode = GET_MODE (XEXP (rld[r].in_reg, 0));
6104
                  rld[r].out = rld[r].in;
6105
                }
6106
#endif
6107
#if 0
6108
              /* This won't work, since REGNO can be a pseudo reg number.
6109
                 Also, it takes much more hair to keep track of all the things
6110
                 that can invalidate an inherited reload of part of a pseudoreg.  */
6111
              else if (GET_CODE (rld[r].in) == SUBREG
6112
                       && REG_P (SUBREG_REG (rld[r].in)))
6113
                regno = subreg_regno (rld[r].in);
6114
#endif
6115
 
6116
              if (regno >= 0
6117
                  && reg_last_reload_reg[regno] != 0
6118
#ifdef CANNOT_CHANGE_MODE_CLASS
6119
                  /* Verify that the register it's in can be used in
6120
                     mode MODE.  */
6121
                  && !REG_CANNOT_CHANGE_MODE_P (REGNO (reg_last_reload_reg[regno]),
6122
                                                GET_MODE (reg_last_reload_reg[regno]),
6123
                                                mode)
6124
#endif
6125
                  )
6126
                {
6127
                  enum reg_class rclass = rld[r].rclass, last_class;
6128
                  rtx last_reg = reg_last_reload_reg[regno];
6129
                  enum machine_mode need_mode;
6130
 
6131
                  i = REGNO (last_reg);
6132
                  i += subreg_regno_offset (i, GET_MODE (last_reg), byte, mode);
6133
                  last_class = REGNO_REG_CLASS (i);
6134
 
6135
                  if (byte == 0)
6136
                    need_mode = mode;
6137
                  else
6138
                    need_mode
6139
                      = smallest_mode_for_size
6140
                        (GET_MODE_BITSIZE (mode) + byte * BITS_PER_UNIT,
6141
                         GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
6142
                         ? MODE_INT : GET_MODE_CLASS (mode));
6143
 
6144
                  if ((GET_MODE_SIZE (GET_MODE (last_reg))
6145
                       >= GET_MODE_SIZE (need_mode))
6146
                      && reg_reloaded_contents[i] == regno
6147
                      && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
6148
                      && HARD_REGNO_MODE_OK (i, rld[r].mode)
6149
                      && (TEST_HARD_REG_BIT (reg_class_contents[(int) rclass], i)
6150
                          /* Even if we can't use this register as a reload
6151
                             register, we might use it for reload_override_in,
6152
                             if copying it to the desired class is cheap
6153
                             enough.  */
6154
                          || ((REGISTER_MOVE_COST (mode, last_class, rclass)
6155
                               < MEMORY_MOVE_COST (mode, rclass, 1))
6156
                              && (secondary_reload_class (1, rclass, mode,
6157
                                                          last_reg)
6158
                                  == NO_REGS)
6159
#ifdef SECONDARY_MEMORY_NEEDED
6160
                              && ! SECONDARY_MEMORY_NEEDED (last_class, rclass,
6161
                                                            mode)
6162
#endif
6163
                              ))
6164
 
6165
                      && (rld[r].nregs == max_group_size
6166
                          || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
6167
                                                  i))
6168
                      && free_for_value_p (i, rld[r].mode, rld[r].opnum,
6169
                                           rld[r].when_needed, rld[r].in,
6170
                                           const0_rtx, r, 1))
6171
                    {
6172
                      /* If a group is needed, verify that all the subsequent
6173
                         registers still have their values intact.  */
6174
                      int nr = hard_regno_nregs[i][rld[r].mode];
6175
                      int k;
6176
 
6177
                      for (k = 1; k < nr; k++)
6178
                        if (reg_reloaded_contents[i + k] != regno
6179
                            || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
6180
                          break;
6181
 
6182
                      if (k == nr)
6183
                        {
6184
                          int i1;
6185
                          int bad_for_class;
6186
 
6187
                          last_reg = (GET_MODE (last_reg) == mode
6188
                                      ? last_reg : gen_rtx_REG (mode, i));
6189
 
6190
                          bad_for_class = 0;
6191
                          for (k = 0; k < nr; k++)
6192
                            bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6193
                                                                  i+k);
6194
 
6195
                          /* We found a register that contains the
6196
                             value we need.  If this register is the
6197
                             same as an `earlyclobber' operand of the
6198
                             current insn, just mark it as a place to
6199
                             reload from since we can't use it as the
6200
                             reload register itself.  */
6201
 
6202
                          for (i1 = 0; i1 < n_earlyclobbers; i1++)
6203
                            if (reg_overlap_mentioned_for_reload_p
6204
                                (reg_last_reload_reg[regno],
6205
                                 reload_earlyclobbers[i1]))
6206
                              break;
6207
 
6208
                          if (i1 != n_earlyclobbers
6209
                              || ! (free_for_value_p (i, rld[r].mode,
6210
                                                      rld[r].opnum,
6211
                                                      rld[r].when_needed, rld[r].in,
6212
                                                      rld[r].out, r, 1))
6213
                              /* Don't use it if we'd clobber a pseudo reg.  */
6214
                              || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
6215
                                  && rld[r].out
6216
                                  && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
6217
                              /* Don't clobber the frame pointer.  */
6218
                              || (i == HARD_FRAME_POINTER_REGNUM
6219
                                  && frame_pointer_needed
6220
                                  && rld[r].out)
6221
                              /* Don't really use the inherited spill reg
6222
                                 if we need it wider than we've got it.  */
6223
                              || (GET_MODE_SIZE (rld[r].mode)
6224
                                  > GET_MODE_SIZE (mode))
6225
                              || bad_for_class
6226
 
6227
                              /* If find_reloads chose reload_out as reload
6228
                                 register, stay with it - that leaves the
6229
                                 inherited register for subsequent reloads.  */
6230
                              || (rld[r].out && rld[r].reg_rtx
6231
                                  && rtx_equal_p (rld[r].out, rld[r].reg_rtx)))
6232
                            {
6233
                              if (! rld[r].optional)
6234
                                {
6235
                                  reload_override_in[r] = last_reg;
6236
                                  reload_inheritance_insn[r]
6237
                                    = reg_reloaded_insn[i];
6238
                                }
6239
                            }
6240
                          else
6241
                            {
6242
                              int k;
6243
                              /* We can use this as a reload reg.  */
6244
                              /* Mark the register as in use for this part of
6245
                                 the insn.  */
6246
                              mark_reload_reg_in_use (i,
6247
                                                      rld[r].opnum,
6248
                                                      rld[r].when_needed,
6249
                                                      rld[r].mode);
6250
                              rld[r].reg_rtx = last_reg;
6251
                              reload_inherited[r] = 1;
6252
                              reload_inheritance_insn[r]
6253
                                = reg_reloaded_insn[i];
6254
                              reload_spill_index[r] = i;
6255
                              for (k = 0; k < nr; k++)
6256
                                SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6257
                                                  i + k);
6258
                            }
6259
                        }
6260
                    }
6261
                }
6262
            }
6263
 
6264
          /* Here's another way to see if the value is already lying around.  */
6265
          if (inheritance
6266
              && rld[r].in != 0
6267
              && ! reload_inherited[r]
6268
              && rld[r].out == 0
6269
              && (CONSTANT_P (rld[r].in)
6270
                  || GET_CODE (rld[r].in) == PLUS
6271
                  || REG_P (rld[r].in)
6272
                  || MEM_P (rld[r].in))
6273
              && (rld[r].nregs == max_group_size
6274
                  || ! reg_classes_intersect_p (rld[r].rclass, group_class)))
6275
            search_equiv = rld[r].in;
6276
          /* If this is an output reload from a simple move insn, look
6277
             if an equivalence for the input is available.  */
6278
          else if (inheritance && rld[r].in == 0 && rld[r].out != 0)
6279
            {
6280
              rtx set = single_set (insn);
6281
 
6282
              if (set
6283
                  && rtx_equal_p (rld[r].out, SET_DEST (set))
6284
                  && CONSTANT_P (SET_SRC (set)))
6285
                search_equiv = SET_SRC (set);
6286
            }
6287
 
6288
          if (search_equiv)
6289
            {
6290
              rtx equiv
6291
                = find_equiv_reg (search_equiv, insn, rld[r].rclass,
6292
                                  -1, NULL, 0, rld[r].mode);
6293
              int regno = 0;
6294
 
6295
              if (equiv != 0)
6296
                {
6297
                  if (REG_P (equiv))
6298
                    regno = REGNO (equiv);
6299
                  else
6300
                    {
6301
                      /* This must be a SUBREG of a hard register.
6302
                         Make a new REG since this might be used in an
6303
                         address and not all machines support SUBREGs
6304
                         there.  */
6305
                      gcc_assert (GET_CODE (equiv) == SUBREG);
6306
                      regno = subreg_regno (equiv);
6307
                      equiv = gen_rtx_REG (rld[r].mode, regno);
6308
                      /* If we choose EQUIV as the reload register, but the
6309
                         loop below decides to cancel the inheritance, we'll
6310
                         end up reloading EQUIV in rld[r].mode, not the mode
6311
                         it had originally.  That isn't safe when EQUIV isn't
6312
                         available as a spill register since its value might
6313
                         still be live at this point.  */
6314
                      for (i = regno; i < regno + (int) rld[r].nregs; i++)
6315
                        if (TEST_HARD_REG_BIT (reload_reg_unavailable, i))
6316
                          equiv = 0;
6317
                    }
6318
                }
6319
 
6320
              /* If we found a spill reg, reject it unless it is free
6321
                 and of the desired class.  */
6322
              if (equiv != 0)
6323
                {
6324
                  int regs_used = 0;
6325
                  int bad_for_class = 0;
6326
                  int max_regno = regno + rld[r].nregs;
6327
 
6328
                  for (i = regno; i < max_regno; i++)
6329
                    {
6330
                      regs_used |= TEST_HARD_REG_BIT (reload_reg_used_at_all,
6331
                                                      i);
6332
                      bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6333
                                                           i);
6334
                    }
6335
 
6336
                  if ((regs_used
6337
                       && ! free_for_value_p (regno, rld[r].mode,
6338
                                              rld[r].opnum, rld[r].when_needed,
6339
                                              rld[r].in, rld[r].out, r, 1))
6340
                      || bad_for_class)
6341
                    equiv = 0;
6342
                }
6343
 
6344
              if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, rld[r].mode))
6345
                equiv = 0;
6346
 
6347
              /* We found a register that contains the value we need.
6348
                 If this register is the same as an `earlyclobber' operand
6349
                 of the current insn, just mark it as a place to reload from
6350
                 since we can't use it as the reload register itself.  */
6351
 
6352
              if (equiv != 0)
6353
                for (i = 0; i < n_earlyclobbers; i++)
6354
                  if (reg_overlap_mentioned_for_reload_p (equiv,
6355
                                                          reload_earlyclobbers[i]))
6356
                    {
6357
                      if (! rld[r].optional)
6358
                        reload_override_in[r] = equiv;
6359
                      equiv = 0;
6360
                      break;
6361
                    }
6362
 
6363
              /* If the equiv register we have found is explicitly clobbered
6364
                 in the current insn, it depends on the reload type if we
6365
                 can use it, use it for reload_override_in, or not at all.
6366
                 In particular, we then can't use EQUIV for a
6367
                 RELOAD_FOR_OUTPUT_ADDRESS reload.  */
6368
 
6369
              if (equiv != 0)
6370
                {
6371
                  if (regno_clobbered_p (regno, insn, rld[r].mode, 2))
6372
                    switch (rld[r].when_needed)
6373
                      {
6374
                      case RELOAD_FOR_OTHER_ADDRESS:
6375
                      case RELOAD_FOR_INPADDR_ADDRESS:
6376
                      case RELOAD_FOR_INPUT_ADDRESS:
6377
                      case RELOAD_FOR_OPADDR_ADDR:
6378
                        break;
6379
                      case RELOAD_OTHER:
6380
                      case RELOAD_FOR_INPUT:
6381
                      case RELOAD_FOR_OPERAND_ADDRESS:
6382
                        if (! rld[r].optional)
6383
                          reload_override_in[r] = equiv;
6384
                        /* Fall through.  */
6385
                      default:
6386
                        equiv = 0;
6387
                        break;
6388
                      }
6389
                  else if (regno_clobbered_p (regno, insn, rld[r].mode, 1))
6390
                    switch (rld[r].when_needed)
6391
                      {
6392
                      case RELOAD_FOR_OTHER_ADDRESS:
6393
                      case RELOAD_FOR_INPADDR_ADDRESS:
6394
                      case RELOAD_FOR_INPUT_ADDRESS:
6395
                      case RELOAD_FOR_OPADDR_ADDR:
6396
                      case RELOAD_FOR_OPERAND_ADDRESS:
6397
                      case RELOAD_FOR_INPUT:
6398
                        break;
6399
                      case RELOAD_OTHER:
6400
                        if (! rld[r].optional)
6401
                          reload_override_in[r] = equiv;
6402
                        /* Fall through.  */
6403
                      default:
6404
                        equiv = 0;
6405
                        break;
6406
                      }
6407
                }
6408
 
6409
              /* If we found an equivalent reg, say no code need be generated
6410
                 to load it, and use it as our reload reg.  */
6411
              if (equiv != 0
6412
                  && (regno != HARD_FRAME_POINTER_REGNUM
6413
                      || !frame_pointer_needed))
6414
                {
6415
                  int nr = hard_regno_nregs[regno][rld[r].mode];
6416
                  int k;
6417
                  rld[r].reg_rtx = equiv;
6418
                  reload_spill_index[r] = regno;
6419
                  reload_inherited[r] = 1;
6420
 
6421
                  /* If reg_reloaded_valid is not set for this register,
6422
                     there might be a stale spill_reg_store lying around.
6423
                     We must clear it, since otherwise emit_reload_insns
6424
                     might delete the store.  */
6425
                  if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
6426
                    spill_reg_store[regno] = NULL_RTX;
6427
                  /* If any of the hard registers in EQUIV are spill
6428
                     registers, mark them as in use for this insn.  */
6429
                  for (k = 0; k < nr; k++)
6430
                    {
6431
                      i = spill_reg_order[regno + k];
6432
                      if (i >= 0)
6433
                        {
6434
                          mark_reload_reg_in_use (regno, rld[r].opnum,
6435
                                                  rld[r].when_needed,
6436
                                                  rld[r].mode);
6437
                          SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6438
                                            regno + k);
6439
                        }
6440
                    }
6441
                }
6442
            }
6443
 
6444
          /* If we found a register to use already, or if this is an optional
6445
             reload, we are done.  */
6446
          if (rld[r].reg_rtx != 0 || rld[r].optional != 0)
6447
            continue;
6448
 
6449
#if 0
6450
          /* No longer needed for correct operation.  Might or might
6451
             not give better code on the average.  Want to experiment?  */
6452
 
6453
          /* See if there is a later reload that has a class different from our
6454
             class that intersects our class or that requires less register
6455
             than our reload.  If so, we must allocate a register to this
6456
             reload now, since that reload might inherit a previous reload
6457
             and take the only available register in our class.  Don't do this
6458
             for optional reloads since they will force all previous reloads
6459
             to be allocated.  Also don't do this for reloads that have been
6460
             turned off.  */
6461
 
6462
          for (i = j + 1; i < n_reloads; i++)
6463
            {
6464
              int s = reload_order[i];
6465
 
6466
              if ((rld[s].in == 0 && rld[s].out == 0
6467
                   && ! rld[s].secondary_p)
6468
                  || rld[s].optional)
6469
                continue;
6470
 
6471
              if ((rld[s].rclass != rld[r].rclass
6472
                   && reg_classes_intersect_p (rld[r].rclass,
6473
                                               rld[s].rclass))
6474
                  || rld[s].nregs < rld[r].nregs)
6475
                break;
6476
            }
6477
 
6478
          if (i == n_reloads)
6479
            continue;
6480
 
6481
          allocate_reload_reg (chain, r, j == n_reloads - 1);
6482
#endif
6483
        }
6484
 
6485
      /* Now allocate reload registers for anything non-optional that
6486
         didn't get one yet.  */
6487
      for (j = 0; j < n_reloads; j++)
6488
        {
6489
          int r = reload_order[j];
6490
 
6491
          /* Ignore reloads that got marked inoperative.  */
6492
          if (rld[r].out == 0 && rld[r].in == 0 && ! rld[r].secondary_p)
6493
            continue;
6494
 
6495
          /* Skip reloads that already have a register allocated or are
6496
             optional.  */
6497
          if (rld[r].reg_rtx != 0 || rld[r].optional)
6498
            continue;
6499
 
6500
          if (! allocate_reload_reg (chain, r, j == n_reloads - 1))
6501
            break;
6502
        }
6503
 
6504
      /* If that loop got all the way, we have won.  */
6505
      if (j == n_reloads)
6506
        {
6507
          win = 1;
6508
          break;
6509
        }
6510
 
6511
      /* Loop around and try without any inheritance.  */
6512
    }
6513
 
6514
  if (! win)
6515
    {
6516
      /* First undo everything done by the failed attempt
6517
         to allocate with inheritance.  */
6518
      choose_reload_regs_init (chain, save_reload_reg_rtx);
6519
 
6520
      /* Some sanity tests to verify that the reloads found in the first
6521
         pass are identical to the ones we have now.  */
6522
      gcc_assert (chain->n_reloads == n_reloads);
6523
 
6524
      for (i = 0; i < n_reloads; i++)
6525
        {
6526
          if (chain->rld[i].regno < 0 || chain->rld[i].reg_rtx != 0)
6527
            continue;
6528
          gcc_assert (chain->rld[i].when_needed == rld[i].when_needed);
6529
          for (j = 0; j < n_spills; j++)
6530
            if (spill_regs[j] == chain->rld[i].regno)
6531
              if (! set_reload_reg (j, i))
6532
                failed_reload (chain->insn, i);
6533
        }
6534
    }
6535
 
6536
  /* If we thought we could inherit a reload, because it seemed that
6537
     nothing else wanted the same reload register earlier in the insn,
6538
     verify that assumption, now that all reloads have been assigned.
6539
     Likewise for reloads where reload_override_in has been set.  */
6540
 
6541
  /* If doing expensive optimizations, do one preliminary pass that doesn't
6542
     cancel any inheritance, but removes reloads that have been needed only
6543
     for reloads that we know can be inherited.  */
6544
  for (pass = flag_expensive_optimizations; pass >= 0; pass--)
6545
    {
6546
      for (j = 0; j < n_reloads; j++)
6547
        {
6548
          int r = reload_order[j];
6549
          rtx check_reg;
6550
          if (reload_inherited[r] && rld[r].reg_rtx)
6551
            check_reg = rld[r].reg_rtx;
6552
          else if (reload_override_in[r]
6553
                   && (REG_P (reload_override_in[r])
6554
                       || GET_CODE (reload_override_in[r]) == SUBREG))
6555
            check_reg = reload_override_in[r];
6556
          else
6557
            continue;
6558
          if (! free_for_value_p (true_regnum (check_reg), rld[r].mode,
6559
                                  rld[r].opnum, rld[r].when_needed, rld[r].in,
6560
                                  (reload_inherited[r]
6561
                                   ? rld[r].out : const0_rtx),
6562
                                  r, 1))
6563
            {
6564
              if (pass)
6565
                continue;
6566
              reload_inherited[r] = 0;
6567
              reload_override_in[r] = 0;
6568
            }
6569
          /* If we can inherit a RELOAD_FOR_INPUT, or can use a
6570
             reload_override_in, then we do not need its related
6571
             RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
6572
             likewise for other reload types.
6573
             We handle this by removing a reload when its only replacement
6574
             is mentioned in reload_in of the reload we are going to inherit.
6575
             A special case are auto_inc expressions; even if the input is
6576
             inherited, we still need the address for the output.  We can
6577
             recognize them because they have RELOAD_OUT set to RELOAD_IN.
6578
             If we succeeded removing some reload and we are doing a preliminary
6579
             pass just to remove such reloads, make another pass, since the
6580
             removal of one reload might allow us to inherit another one.  */
6581
          else if (rld[r].in
6582
                   && rld[r].out != rld[r].in
6583
                   && remove_address_replacements (rld[r].in) && pass)
6584
            pass = 2;
6585
        }
6586
    }
6587
 
6588
  /* Now that reload_override_in is known valid,
6589
     actually override reload_in.  */
6590
  for (j = 0; j < n_reloads; j++)
6591
    if (reload_override_in[j])
6592
      rld[j].in = reload_override_in[j];
6593
 
6594
  /* If this reload won't be done because it has been canceled or is
6595
     optional and not inherited, clear reload_reg_rtx so other
6596
     routines (such as subst_reloads) don't get confused.  */
6597
  for (j = 0; j < n_reloads; j++)
6598
    if (rld[j].reg_rtx != 0
6599
        && ((rld[j].optional && ! reload_inherited[j])
6600
            || (rld[j].in == 0 && rld[j].out == 0
6601
                && ! rld[j].secondary_p)))
6602
      {
6603
        int regno = true_regnum (rld[j].reg_rtx);
6604
 
6605
        if (spill_reg_order[regno] >= 0)
6606
          clear_reload_reg_in_use (regno, rld[j].opnum,
6607
                                   rld[j].when_needed, rld[j].mode);
6608
        rld[j].reg_rtx = 0;
6609
        reload_spill_index[j] = -1;
6610
      }
6611
 
6612
  /* Record which pseudos and which spill regs have output reloads.  */
6613
  for (j = 0; j < n_reloads; j++)
6614
    {
6615
      int r = reload_order[j];
6616
 
6617
      i = reload_spill_index[r];
6618
 
6619
      /* I is nonneg if this reload uses a register.
6620
         If rld[r].reg_rtx is 0, this is an optional reload
6621
         that we opted to ignore.  */
6622
      if (rld[r].out_reg != 0 && REG_P (rld[r].out_reg)
6623
          && rld[r].reg_rtx != 0)
6624
        {
6625
          int nregno = REGNO (rld[r].out_reg);
6626
          int nr = 1;
6627
 
6628
          if (nregno < FIRST_PSEUDO_REGISTER)
6629
            nr = hard_regno_nregs[nregno][rld[r].mode];
6630
 
6631
          while (--nr >= 0)
6632
            SET_REGNO_REG_SET (&reg_has_output_reload,
6633
                               nregno + nr);
6634
 
6635
          if (i >= 0)
6636
            {
6637
              nr = hard_regno_nregs[i][rld[r].mode];
6638
              while (--nr >= 0)
6639
                SET_HARD_REG_BIT (reg_is_output_reload, i + nr);
6640
            }
6641
 
6642
          gcc_assert (rld[r].when_needed == RELOAD_OTHER
6643
                      || rld[r].when_needed == RELOAD_FOR_OUTPUT
6644
                      || rld[r].when_needed == RELOAD_FOR_INSN);
6645
        }
6646
    }
6647
}
6648
 
6649
/* Deallocate the reload register for reload R.  This is called from
6650
   remove_address_replacements.  */
6651
 
6652
void
6653
deallocate_reload_reg (int r)
6654
{
6655
  int regno;
6656
 
6657
  if (! rld[r].reg_rtx)
6658
    return;
6659
  regno = true_regnum (rld[r].reg_rtx);
6660
  rld[r].reg_rtx = 0;
6661
  if (spill_reg_order[regno] >= 0)
6662
    clear_reload_reg_in_use (regno, rld[r].opnum, rld[r].when_needed,
6663
                             rld[r].mode);
6664
  reload_spill_index[r] = -1;
6665
}
6666
 
6667
/* If SMALL_REGISTER_CLASSES is nonzero, we may not have merged two
6668
   reloads of the same item for fear that we might not have enough reload
6669
   registers. However, normally they will get the same reload register
6670
   and hence actually need not be loaded twice.
6671
 
6672
   Here we check for the most common case of this phenomenon: when we have
6673
   a number of reloads for the same object, each of which were allocated
6674
   the same reload_reg_rtx, that reload_reg_rtx is not used for any other
6675
   reload, and is not modified in the insn itself.  If we find such,
6676
   merge all the reloads and set the resulting reload to RELOAD_OTHER.
6677
   This will not increase the number of spill registers needed and will
6678
   prevent redundant code.  */
6679
 
6680
static void
6681
merge_assigned_reloads (rtx insn)
6682
{
6683
  int i, j;
6684
 
6685
  /* Scan all the reloads looking for ones that only load values and
6686
     are not already RELOAD_OTHER and ones whose reload_reg_rtx are
6687
     assigned and not modified by INSN.  */
6688
 
6689
  for (i = 0; i < n_reloads; i++)
6690
    {
6691
      int conflicting_input = 0;
6692
      int max_input_address_opnum = -1;
6693
      int min_conflicting_input_opnum = MAX_RECOG_OPERANDS;
6694
 
6695
      if (rld[i].in == 0 || rld[i].when_needed == RELOAD_OTHER
6696
          || rld[i].out != 0 || rld[i].reg_rtx == 0
6697
          || reg_set_p (rld[i].reg_rtx, insn))
6698
        continue;
6699
 
6700
      /* Look at all other reloads.  Ensure that the only use of this
6701
         reload_reg_rtx is in a reload that just loads the same value
6702
         as we do.  Note that any secondary reloads must be of the identical
6703
         class since the values, modes, and result registers are the
6704
         same, so we need not do anything with any secondary reloads.  */
6705
 
6706
      for (j = 0; j < n_reloads; j++)
6707
        {
6708
          if (i == j || rld[j].reg_rtx == 0
6709
              || ! reg_overlap_mentioned_p (rld[j].reg_rtx,
6710
                                            rld[i].reg_rtx))
6711
            continue;
6712
 
6713
          if (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6714
              && rld[j].opnum > max_input_address_opnum)
6715
            max_input_address_opnum = rld[j].opnum;
6716
 
6717
          /* If the reload regs aren't exactly the same (e.g, different modes)
6718
             or if the values are different, we can't merge this reload.
6719
             But if it is an input reload, we might still merge
6720
             RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_OTHER_ADDRESS reloads.  */
6721
 
6722
          if (! rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
6723
              || rld[j].out != 0 || rld[j].in == 0
6724
              || ! rtx_equal_p (rld[i].in, rld[j].in))
6725
            {
6726
              if (rld[j].when_needed != RELOAD_FOR_INPUT
6727
                  || ((rld[i].when_needed != RELOAD_FOR_INPUT_ADDRESS
6728
                       || rld[i].opnum > rld[j].opnum)
6729
                      && rld[i].when_needed != RELOAD_FOR_OTHER_ADDRESS))
6730
                break;
6731
              conflicting_input = 1;
6732
              if (min_conflicting_input_opnum > rld[j].opnum)
6733
                min_conflicting_input_opnum = rld[j].opnum;
6734
            }
6735
        }
6736
 
6737
      /* If all is OK, merge the reloads.  Only set this to RELOAD_OTHER if
6738
         we, in fact, found any matching reloads.  */
6739
 
6740
      if (j == n_reloads
6741
          && max_input_address_opnum <= min_conflicting_input_opnum)
6742
        {
6743
          gcc_assert (rld[i].when_needed != RELOAD_FOR_OUTPUT);
6744
 
6745
          for (j = 0; j < n_reloads; j++)
6746
            if (i != j && rld[j].reg_rtx != 0
6747
                && rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
6748
                && (! conflicting_input
6749
                    || rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6750
                    || rld[j].when_needed == RELOAD_FOR_OTHER_ADDRESS))
6751
              {
6752
                rld[i].when_needed = RELOAD_OTHER;
6753
                rld[j].in = 0;
6754
                reload_spill_index[j] = -1;
6755
                transfer_replacements (i, j);
6756
              }
6757
 
6758
          /* If this is now RELOAD_OTHER, look for any reloads that
6759
             load parts of this operand and set them to
6760
             RELOAD_FOR_OTHER_ADDRESS if they were for inputs,
6761
             RELOAD_OTHER for outputs.  Note that this test is
6762
             equivalent to looking for reloads for this operand
6763
             number.
6764
 
6765
             We must take special care with RELOAD_FOR_OUTPUT_ADDRESS;
6766
             it may share registers with a RELOAD_FOR_INPUT, so we can
6767
             not change it to RELOAD_FOR_OTHER_ADDRESS.  We should
6768
             never need to, since we do not modify RELOAD_FOR_OUTPUT.
6769
 
6770
             It is possible that the RELOAD_FOR_OPERAND_ADDRESS
6771
             instruction is assigned the same register as the earlier
6772
             RELOAD_FOR_OTHER_ADDRESS instruction.  Merging these two
6773
             instructions will cause the RELOAD_FOR_OTHER_ADDRESS
6774
             instruction to be deleted later on.  */
6775
 
6776
          if (rld[i].when_needed == RELOAD_OTHER)
6777
            for (j = 0; j < n_reloads; j++)
6778
              if (rld[j].in != 0
6779
                  && rld[j].when_needed != RELOAD_OTHER
6780
                  && rld[j].when_needed != RELOAD_FOR_OTHER_ADDRESS
6781
                  && rld[j].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
6782
                  && rld[j].when_needed != RELOAD_FOR_OPERAND_ADDRESS
6783
                  && (! conflicting_input
6784
                      || rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6785
                      || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
6786
                  && reg_overlap_mentioned_for_reload_p (rld[j].in,
6787
                                                         rld[i].in))
6788
                {
6789
                  int k;
6790
 
6791
                  rld[j].when_needed
6792
                    = ((rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6793
                        || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
6794
                       ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
6795
 
6796
                  /* Check to see if we accidentally converted two
6797
                     reloads that use the same reload register with
6798
                     different inputs to the same type.  If so, the
6799
                     resulting code won't work.  */
6800
                  if (rld[j].reg_rtx)
6801
                    for (k = 0; k < j; k++)
6802
                      gcc_assert (rld[k].in == 0 || rld[k].reg_rtx == 0
6803
                                  || rld[k].when_needed != rld[j].when_needed
6804
                                  || !rtx_equal_p (rld[k].reg_rtx,
6805
                                                   rld[j].reg_rtx)
6806
                                  || rtx_equal_p (rld[k].in,
6807
                                                  rld[j].in));
6808
                }
6809
        }
6810
    }
6811
}
6812
 
6813
/* These arrays are filled by emit_reload_insns and its subroutines.  */
6814
static rtx input_reload_insns[MAX_RECOG_OPERANDS];
6815
static rtx other_input_address_reload_insns = 0;
6816
static rtx other_input_reload_insns = 0;
6817
static rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
6818
static rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6819
static rtx output_reload_insns[MAX_RECOG_OPERANDS];
6820
static rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
6821
static rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6822
static rtx operand_reload_insns = 0;
6823
static rtx other_operand_reload_insns = 0;
6824
static rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
6825
 
6826
/* Values to be put in spill_reg_store are put here first.  */
6827
static rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
6828
static HARD_REG_SET reg_reloaded_died;
6829
 
6830
/* Check if *RELOAD_REG is suitable as an intermediate or scratch register
6831
   of class NEW_CLASS with mode NEW_MODE.  Or alternatively, if alt_reload_reg
6832
   is nonzero, if that is suitable.  On success, change *RELOAD_REG to the
6833
   adjusted register, and return true.  Otherwise, return false.  */
6834
static bool
6835
reload_adjust_reg_for_temp (rtx *reload_reg, rtx alt_reload_reg,
6836
                            enum reg_class new_class,
6837
                            enum machine_mode new_mode)
6838
 
6839
{
6840
  rtx reg;
6841
 
6842
  for (reg = *reload_reg; reg; reg = alt_reload_reg, alt_reload_reg = 0)
6843
    {
6844
      unsigned regno = REGNO (reg);
6845
 
6846
      if (!TEST_HARD_REG_BIT (reg_class_contents[(int) new_class], regno))
6847
        continue;
6848
      if (GET_MODE (reg) != new_mode)
6849
        {
6850
          if (!HARD_REGNO_MODE_OK (regno, new_mode))
6851
            continue;
6852
          if (hard_regno_nregs[regno][new_mode]
6853
              > hard_regno_nregs[regno][GET_MODE (reg)])
6854
            continue;
6855
          reg = reload_adjust_reg_for_mode (reg, new_mode);
6856
        }
6857
      *reload_reg = reg;
6858
      return true;
6859
    }
6860
  return false;
6861
}
6862
 
6863
/* Check if *RELOAD_REG is suitable as a scratch register for the reload
6864
   pattern with insn_code ICODE, or alternatively, if alt_reload_reg is
6865
   nonzero, if that is suitable.  On success, change *RELOAD_REG to the
6866
   adjusted register, and return true.  Otherwise, return false.  */
6867
static bool
6868
reload_adjust_reg_for_icode (rtx *reload_reg, rtx alt_reload_reg,
6869
                             enum insn_code icode)
6870
 
6871
{
6872
  enum reg_class new_class = scratch_reload_class (icode);
6873
  enum machine_mode new_mode = insn_data[(int) icode].operand[2].mode;
6874
 
6875
  return reload_adjust_reg_for_temp (reload_reg, alt_reload_reg,
6876
                                     new_class, new_mode);
6877
}
6878
 
6879
/* Generate insns to perform reload RL, which is for the insn in CHAIN and
6880
   has the number J.  OLD contains the value to be used as input.  */
6881
 
6882
static void
6883
emit_input_reload_insns (struct insn_chain *chain, struct reload *rl,
6884
                         rtx old, int j)
6885
{
6886
  rtx insn = chain->insn;
6887
  rtx reloadreg;
6888
  rtx oldequiv_reg = 0;
6889
  rtx oldequiv = 0;
6890
  int special = 0;
6891
  enum machine_mode mode;
6892
  rtx *where;
6893
 
6894
  /* delete_output_reload is only invoked properly if old contains
6895
     the original pseudo register.  Since this is replaced with a
6896
     hard reg when RELOAD_OVERRIDE_IN is set, see if we can
6897
     find the pseudo in RELOAD_IN_REG.  */
6898
  if (reload_override_in[j]
6899
      && REG_P (rl->in_reg))
6900
    {
6901
      oldequiv = old;
6902
      old = rl->in_reg;
6903
    }
6904
  if (oldequiv == 0)
6905
    oldequiv = old;
6906
  else if (REG_P (oldequiv))
6907
    oldequiv_reg = oldequiv;
6908
  else if (GET_CODE (oldequiv) == SUBREG)
6909
    oldequiv_reg = SUBREG_REG (oldequiv);
6910
 
6911
  reloadreg = reload_reg_rtx_for_input[j];
6912
  mode = GET_MODE (reloadreg);
6913
 
6914
  /* If we are reloading from a register that was recently stored in
6915
     with an output-reload, see if we can prove there was
6916
     actually no need to store the old value in it.  */
6917
 
6918
  if (optimize && REG_P (oldequiv)
6919
      && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6920
      && spill_reg_store[REGNO (oldequiv)]
6921
      && REG_P (old)
6922
      && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
6923
          || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6924
                          rl->out_reg)))
6925
    delete_output_reload (insn, j, REGNO (oldequiv), reloadreg);
6926
 
6927
  /* Encapsulate OLDEQUIV into the reload mode, then load RELOADREG from
6928
     OLDEQUIV.  */
6929
 
6930
  while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
6931
    oldequiv = SUBREG_REG (oldequiv);
6932
  if (GET_MODE (oldequiv) != VOIDmode
6933
      && mode != GET_MODE (oldequiv))
6934
    oldequiv = gen_lowpart_SUBREG (mode, oldequiv);
6935
 
6936
  /* Switch to the right place to emit the reload insns.  */
6937
  switch (rl->when_needed)
6938
    {
6939
    case RELOAD_OTHER:
6940
      where = &other_input_reload_insns;
6941
      break;
6942
    case RELOAD_FOR_INPUT:
6943
      where = &input_reload_insns[rl->opnum];
6944
      break;
6945
    case RELOAD_FOR_INPUT_ADDRESS:
6946
      where = &input_address_reload_insns[rl->opnum];
6947
      break;
6948
    case RELOAD_FOR_INPADDR_ADDRESS:
6949
      where = &inpaddr_address_reload_insns[rl->opnum];
6950
      break;
6951
    case RELOAD_FOR_OUTPUT_ADDRESS:
6952
      where = &output_address_reload_insns[rl->opnum];
6953
      break;
6954
    case RELOAD_FOR_OUTADDR_ADDRESS:
6955
      where = &outaddr_address_reload_insns[rl->opnum];
6956
      break;
6957
    case RELOAD_FOR_OPERAND_ADDRESS:
6958
      where = &operand_reload_insns;
6959
      break;
6960
    case RELOAD_FOR_OPADDR_ADDR:
6961
      where = &other_operand_reload_insns;
6962
      break;
6963
    case RELOAD_FOR_OTHER_ADDRESS:
6964
      where = &other_input_address_reload_insns;
6965
      break;
6966
    default:
6967
      gcc_unreachable ();
6968
    }
6969
 
6970
  push_to_sequence (*where);
6971
 
6972
  /* Auto-increment addresses must be reloaded in a special way.  */
6973
  if (rl->out && ! rl->out_reg)
6974
    {
6975
      /* We are not going to bother supporting the case where a
6976
         incremented register can't be copied directly from
6977
         OLDEQUIV since this seems highly unlikely.  */
6978
      gcc_assert (rl->secondary_in_reload < 0);
6979
 
6980
      if (reload_inherited[j])
6981
        oldequiv = reloadreg;
6982
 
6983
      old = XEXP (rl->in_reg, 0);
6984
 
6985
      if (optimize && REG_P (oldequiv)
6986
          && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6987
          && spill_reg_store[REGNO (oldequiv)]
6988
          && REG_P (old)
6989
          && (dead_or_set_p (insn,
6990
                             spill_reg_stored_to[REGNO (oldequiv)])
6991
              || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6992
                              old)))
6993
        delete_output_reload (insn, j, REGNO (oldequiv), reloadreg);
6994
 
6995
      /* Prevent normal processing of this reload.  */
6996
      special = 1;
6997
      /* Output a special code sequence for this case.  */
6998
      new_spill_reg_store[REGNO (reloadreg)]
6999
        = inc_for_reload (reloadreg, oldequiv, rl->out,
7000
                          rl->inc);
7001
    }
7002
 
7003
  /* If we are reloading a pseudo-register that was set by the previous
7004
     insn, see if we can get rid of that pseudo-register entirely
7005
     by redirecting the previous insn into our reload register.  */
7006
 
7007
  else if (optimize && REG_P (old)
7008
           && REGNO (old) >= FIRST_PSEUDO_REGISTER
7009
           && dead_or_set_p (insn, old)
7010
           /* This is unsafe if some other reload
7011
              uses the same reg first.  */
7012
           && ! conflicts_with_override (reloadreg)
7013
           && free_for_value_p (REGNO (reloadreg), rl->mode, rl->opnum,
7014
                                rl->when_needed, old, rl->out, j, 0))
7015
    {
7016
      rtx temp = PREV_INSN (insn);
7017
      while (temp && (NOTE_P (temp) || DEBUG_INSN_P (temp)))
7018
        temp = PREV_INSN (temp);
7019
      if (temp
7020
          && NONJUMP_INSN_P (temp)
7021
          && GET_CODE (PATTERN (temp)) == SET
7022
          && SET_DEST (PATTERN (temp)) == old
7023
          /* Make sure we can access insn_operand_constraint.  */
7024
          && asm_noperands (PATTERN (temp)) < 0
7025
          /* This is unsafe if operand occurs more than once in current
7026
             insn.  Perhaps some occurrences aren't reloaded.  */
7027
          && count_occurrences (PATTERN (insn), old, 0) == 1)
7028
        {
7029
          rtx old = SET_DEST (PATTERN (temp));
7030
          /* Store into the reload register instead of the pseudo.  */
7031
          SET_DEST (PATTERN (temp)) = reloadreg;
7032
 
7033
          /* Verify that resulting insn is valid.  */
7034
          extract_insn (temp);
7035
          if (constrain_operands (1))
7036
            {
7037
              /* If the previous insn is an output reload, the source is
7038
                 a reload register, and its spill_reg_store entry will
7039
                 contain the previous destination.  This is now
7040
                 invalid.  */
7041
              if (REG_P (SET_SRC (PATTERN (temp)))
7042
                  && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
7043
                {
7044
                  spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7045
                  spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7046
                }
7047
 
7048
              /* If these are the only uses of the pseudo reg,
7049
                 pretend for GDB it lives in the reload reg we used.  */
7050
              if (REG_N_DEATHS (REGNO (old)) == 1
7051
                  && REG_N_SETS (REGNO (old)) == 1)
7052
                {
7053
                  reg_renumber[REGNO (old)] = REGNO (reloadreg);
7054
                  if (ira_conflicts_p)
7055
                    /* Inform IRA about the change.  */
7056
                    ira_mark_allocation_change (REGNO (old));
7057
                  alter_reg (REGNO (old), -1, false);
7058
                }
7059
              special = 1;
7060
 
7061
              /* Adjust any debug insns between temp and insn.  */
7062
              while ((temp = NEXT_INSN (temp)) != insn)
7063
                if (DEBUG_INSN_P (temp))
7064
                  replace_rtx (PATTERN (temp), old, reloadreg);
7065
                else
7066
                  gcc_assert (NOTE_P (temp));
7067
            }
7068
          else
7069
            {
7070
              SET_DEST (PATTERN (temp)) = old;
7071
            }
7072
        }
7073
    }
7074
 
7075
  /* We can't do that, so output an insn to load RELOADREG.  */
7076
 
7077
  /* If we have a secondary reload, pick up the secondary register
7078
     and icode, if any.  If OLDEQUIV and OLD are different or
7079
     if this is an in-out reload, recompute whether or not we
7080
     still need a secondary register and what the icode should
7081
     be.  If we still need a secondary register and the class or
7082
     icode is different, go back to reloading from OLD if using
7083
     OLDEQUIV means that we got the wrong type of register.  We
7084
     cannot have different class or icode due to an in-out reload
7085
     because we don't make such reloads when both the input and
7086
     output need secondary reload registers.  */
7087
 
7088
  if (! special && rl->secondary_in_reload >= 0)
7089
    {
7090
      rtx second_reload_reg = 0;
7091
      rtx third_reload_reg = 0;
7092
      int secondary_reload = rl->secondary_in_reload;
7093
      rtx real_oldequiv = oldequiv;
7094
      rtx real_old = old;
7095
      rtx tmp;
7096
      enum insn_code icode;
7097
      enum insn_code tertiary_icode = CODE_FOR_nothing;
7098
 
7099
      /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
7100
         and similarly for OLD.
7101
         See comments in get_secondary_reload in reload.c.  */
7102
      /* If it is a pseudo that cannot be replaced with its
7103
         equivalent MEM, we must fall back to reload_in, which
7104
         will have all the necessary substitutions registered.
7105
         Likewise for a pseudo that can't be replaced with its
7106
         equivalent constant.
7107
 
7108
         Take extra care for subregs of such pseudos.  Note that
7109
         we cannot use reg_equiv_mem in this case because it is
7110
         not in the right mode.  */
7111
 
7112
      tmp = oldequiv;
7113
      if (GET_CODE (tmp) == SUBREG)
7114
        tmp = SUBREG_REG (tmp);
7115
      if (REG_P (tmp)
7116
          && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7117
          && (reg_equiv_memory_loc[REGNO (tmp)] != 0
7118
              || reg_equiv_constant[REGNO (tmp)] != 0))
7119
        {
7120
          if (! reg_equiv_mem[REGNO (tmp)]
7121
              || num_not_at_initial_offset
7122
              || GET_CODE (oldequiv) == SUBREG)
7123
            real_oldequiv = rl->in;
7124
          else
7125
            real_oldequiv = reg_equiv_mem[REGNO (tmp)];
7126
        }
7127
 
7128
      tmp = old;
7129
      if (GET_CODE (tmp) == SUBREG)
7130
        tmp = SUBREG_REG (tmp);
7131
      if (REG_P (tmp)
7132
          && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7133
          && (reg_equiv_memory_loc[REGNO (tmp)] != 0
7134
              || reg_equiv_constant[REGNO (tmp)] != 0))
7135
        {
7136
          if (! reg_equiv_mem[REGNO (tmp)]
7137
              || num_not_at_initial_offset
7138
              || GET_CODE (old) == SUBREG)
7139
            real_old = rl->in;
7140
          else
7141
            real_old = reg_equiv_mem[REGNO (tmp)];
7142
        }
7143
 
7144
      second_reload_reg = rld[secondary_reload].reg_rtx;
7145
      if (rld[secondary_reload].secondary_in_reload >= 0)
7146
        {
7147
          int tertiary_reload = rld[secondary_reload].secondary_in_reload;
7148
 
7149
          third_reload_reg = rld[tertiary_reload].reg_rtx;
7150
          tertiary_icode = rld[secondary_reload].secondary_in_icode;
7151
          /* We'd have to add more code for quartary reloads.  */
7152
          gcc_assert (rld[tertiary_reload].secondary_in_reload < 0);
7153
        }
7154
      icode = rl->secondary_in_icode;
7155
 
7156
      if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
7157
          || (rl->in != 0 && rl->out != 0))
7158
        {
7159
          secondary_reload_info sri, sri2;
7160
          enum reg_class new_class, new_t_class;
7161
 
7162
          sri.icode = CODE_FOR_nothing;
7163
          sri.prev_sri = NULL;
7164
          new_class = targetm.secondary_reload (1, real_oldequiv, rl->rclass,
7165
                                                mode, &sri);
7166
 
7167
          if (new_class == NO_REGS && sri.icode == CODE_FOR_nothing)
7168
            second_reload_reg = 0;
7169
          else if (new_class == NO_REGS)
7170
            {
7171
              if (reload_adjust_reg_for_icode (&second_reload_reg,
7172
                                               third_reload_reg,
7173
                                               (enum insn_code) sri.icode))
7174
                {
7175
                  icode = (enum insn_code) sri.icode;
7176
                  third_reload_reg = 0;
7177
                }
7178
              else
7179
                {
7180
                  oldequiv = old;
7181
                  real_oldequiv = real_old;
7182
                }
7183
            }
7184
          else if (sri.icode != CODE_FOR_nothing)
7185
            /* We currently lack a way to express this in reloads.  */
7186
            gcc_unreachable ();
7187
          else
7188
            {
7189
              sri2.icode = CODE_FOR_nothing;
7190
              sri2.prev_sri = &sri;
7191
              new_t_class = targetm.secondary_reload (1, real_oldequiv,
7192
                                                      new_class, mode, &sri);
7193
              if (new_t_class == NO_REGS && sri2.icode == CODE_FOR_nothing)
7194
                {
7195
                  if (reload_adjust_reg_for_temp (&second_reload_reg,
7196
                                                  third_reload_reg,
7197
                                                  new_class, mode))
7198
                    {
7199
                      third_reload_reg = 0;
7200
                      tertiary_icode = (enum insn_code) sri2.icode;
7201
                    }
7202
                  else
7203
                    {
7204
                      oldequiv = old;
7205
                      real_oldequiv = real_old;
7206
                    }
7207
                }
7208
              else if (new_t_class == NO_REGS && sri2.icode != CODE_FOR_nothing)
7209
                {
7210
                  rtx intermediate = second_reload_reg;
7211
 
7212
                  if (reload_adjust_reg_for_temp (&intermediate, NULL,
7213
                                                  new_class, mode)
7214
                      && reload_adjust_reg_for_icode (&third_reload_reg, NULL,
7215
                                                      ((enum insn_code)
7216
                                                       sri2.icode)))
7217
                    {
7218
                      second_reload_reg = intermediate;
7219
                      tertiary_icode = (enum insn_code) sri2.icode;
7220
                    }
7221
                  else
7222
                    {
7223
                      oldequiv = old;
7224
                      real_oldequiv = real_old;
7225
                    }
7226
                }
7227
              else if (new_t_class != NO_REGS && sri2.icode == CODE_FOR_nothing)
7228
                {
7229
                  rtx intermediate = second_reload_reg;
7230
 
7231
                  if (reload_adjust_reg_for_temp (&intermediate, NULL,
7232
                                                  new_class, mode)
7233
                      && reload_adjust_reg_for_temp (&third_reload_reg, NULL,
7234
                                                      new_t_class, mode))
7235
                    {
7236
                      second_reload_reg = intermediate;
7237
                      tertiary_icode = (enum insn_code) sri2.icode;
7238
                    }
7239
                  else
7240
                    {
7241
                      oldequiv = old;
7242
                      real_oldequiv = real_old;
7243
                    }
7244
                }
7245
              else
7246
                {
7247
                  /* This could be handled more intelligently too.  */
7248
                  oldequiv = old;
7249
                  real_oldequiv = real_old;
7250
                }
7251
            }
7252
        }
7253
 
7254
      /* If we still need a secondary reload register, check
7255
         to see if it is being used as a scratch or intermediate
7256
         register and generate code appropriately.  If we need
7257
         a scratch register, use REAL_OLDEQUIV since the form of
7258
         the insn may depend on the actual address if it is
7259
         a MEM.  */
7260
 
7261
      if (second_reload_reg)
7262
        {
7263
          if (icode != CODE_FOR_nothing)
7264
            {
7265
              /* We'd have to add extra code to handle this case.  */
7266
              gcc_assert (!third_reload_reg);
7267
 
7268
              emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
7269
                                          second_reload_reg));
7270
              special = 1;
7271
            }
7272
          else
7273
            {
7274
              /* See if we need a scratch register to load the
7275
                 intermediate register (a tertiary reload).  */
7276
              if (tertiary_icode != CODE_FOR_nothing)
7277
                {
7278
                  emit_insn ((GEN_FCN (tertiary_icode)
7279
                              (second_reload_reg, real_oldequiv,
7280
                               third_reload_reg)));
7281
                }
7282
              else if (third_reload_reg)
7283
                {
7284
                  gen_reload (third_reload_reg, real_oldequiv,
7285
                              rl->opnum,
7286
                              rl->when_needed);
7287
                  gen_reload (second_reload_reg, third_reload_reg,
7288
                              rl->opnum,
7289
                              rl->when_needed);
7290
                }
7291
              else
7292
                gen_reload (second_reload_reg, real_oldequiv,
7293
                            rl->opnum,
7294
                            rl->when_needed);
7295
 
7296
              oldequiv = second_reload_reg;
7297
            }
7298
        }
7299
    }
7300
 
7301
  if (! special && ! rtx_equal_p (reloadreg, oldequiv))
7302
    {
7303
      rtx real_oldequiv = oldequiv;
7304
 
7305
      if ((REG_P (oldequiv)
7306
           && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
7307
           && (reg_equiv_memory_loc[REGNO (oldequiv)] != 0
7308
               || reg_equiv_constant[REGNO (oldequiv)] != 0))
7309
          || (GET_CODE (oldequiv) == SUBREG
7310
              && REG_P (SUBREG_REG (oldequiv))
7311
              && (REGNO (SUBREG_REG (oldequiv))
7312
                  >= FIRST_PSEUDO_REGISTER)
7313
              && ((reg_equiv_memory_loc
7314
                   [REGNO (SUBREG_REG (oldequiv))] != 0)
7315
                  || (reg_equiv_constant
7316
                      [REGNO (SUBREG_REG (oldequiv))] != 0)))
7317
          || (CONSTANT_P (oldequiv)
7318
              && (PREFERRED_RELOAD_CLASS (oldequiv,
7319
                                          REGNO_REG_CLASS (REGNO (reloadreg)))
7320
                  == NO_REGS)))
7321
        real_oldequiv = rl->in;
7322
      gen_reload (reloadreg, real_oldequiv, rl->opnum,
7323
                  rl->when_needed);
7324
    }
7325
 
7326
  if (flag_non_call_exceptions)
7327
    copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7328
 
7329
  /* End this sequence.  */
7330
  *where = get_insns ();
7331
  end_sequence ();
7332
 
7333
  /* Update reload_override_in so that delete_address_reloads_1
7334
     can see the actual register usage.  */
7335
  if (oldequiv_reg)
7336
    reload_override_in[j] = oldequiv;
7337
}
7338
 
7339
/* Generate insns to for the output reload RL, which is for the insn described
7340
   by CHAIN and has the number J.  */
7341
static void
7342
emit_output_reload_insns (struct insn_chain *chain, struct reload *rl,
7343
                          int j)
7344
{
7345
  rtx reloadreg;
7346
  rtx insn = chain->insn;
7347
  int special = 0;
7348
  rtx old = rl->out;
7349
  enum machine_mode mode;
7350
  rtx p;
7351
  rtx rl_reg_rtx;
7352
 
7353
  if (rl->when_needed == RELOAD_OTHER)
7354
    start_sequence ();
7355
  else
7356
    push_to_sequence (output_reload_insns[rl->opnum]);
7357
 
7358
  rl_reg_rtx = reload_reg_rtx_for_output[j];
7359
  mode = GET_MODE (rl_reg_rtx);
7360
 
7361
  reloadreg = rl_reg_rtx;
7362
 
7363
  /* If we need two reload regs, set RELOADREG to the intermediate
7364
     one, since it will be stored into OLD.  We might need a secondary
7365
     register only for an input reload, so check again here.  */
7366
 
7367
  if (rl->secondary_out_reload >= 0)
7368
    {
7369
      rtx real_old = old;
7370
      int secondary_reload = rl->secondary_out_reload;
7371
      int tertiary_reload = rld[secondary_reload].secondary_out_reload;
7372
 
7373
      if (REG_P (old) && REGNO (old) >= FIRST_PSEUDO_REGISTER
7374
          && reg_equiv_mem[REGNO (old)] != 0)
7375
        real_old = reg_equiv_mem[REGNO (old)];
7376
 
7377
      if (secondary_reload_class (0, rl->rclass, mode, real_old) != NO_REGS)
7378
        {
7379
          rtx second_reloadreg = reloadreg;
7380
          reloadreg = rld[secondary_reload].reg_rtx;
7381
 
7382
          /* See if RELOADREG is to be used as a scratch register
7383
             or as an intermediate register.  */
7384
          if (rl->secondary_out_icode != CODE_FOR_nothing)
7385
            {
7386
              /* We'd have to add extra code to handle this case.  */
7387
              gcc_assert (tertiary_reload < 0);
7388
 
7389
              emit_insn ((GEN_FCN (rl->secondary_out_icode)
7390
                          (real_old, second_reloadreg, reloadreg)));
7391
              special = 1;
7392
            }
7393
          else
7394
            {
7395
              /* See if we need both a scratch and intermediate reload
7396
                 register.  */
7397
 
7398
              enum insn_code tertiary_icode
7399
                = rld[secondary_reload].secondary_out_icode;
7400
 
7401
              /* We'd have to add more code for quartary reloads.  */
7402
              gcc_assert (tertiary_reload < 0
7403
                          || rld[tertiary_reload].secondary_out_reload < 0);
7404
 
7405
              if (GET_MODE (reloadreg) != mode)
7406
                reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
7407
 
7408
              if (tertiary_icode != CODE_FOR_nothing)
7409
                {
7410
                  rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7411
                  rtx tem;
7412
 
7413
                  /* Copy primary reload reg to secondary reload reg.
7414
                     (Note that these have been swapped above, then
7415
                     secondary reload reg to OLD using our insn.)  */
7416
 
7417
                  /* If REAL_OLD is a paradoxical SUBREG, remove it
7418
                     and try to put the opposite SUBREG on
7419
                     RELOADREG.  */
7420
                  if (GET_CODE (real_old) == SUBREG
7421
                      && (GET_MODE_SIZE (GET_MODE (real_old))
7422
                          > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
7423
                      && 0 != (tem = gen_lowpart_common
7424
                               (GET_MODE (SUBREG_REG (real_old)),
7425
                                reloadreg)))
7426
                    real_old = SUBREG_REG (real_old), reloadreg = tem;
7427
 
7428
                  gen_reload (reloadreg, second_reloadreg,
7429
                              rl->opnum, rl->when_needed);
7430
                  emit_insn ((GEN_FCN (tertiary_icode)
7431
                              (real_old, reloadreg, third_reloadreg)));
7432
                  special = 1;
7433
                }
7434
 
7435
              else
7436
                {
7437
                  /* Copy between the reload regs here and then to
7438
                     OUT later.  */
7439
 
7440
                  gen_reload (reloadreg, second_reloadreg,
7441
                              rl->opnum, rl->when_needed);
7442
                  if (tertiary_reload >= 0)
7443
                    {
7444
                      rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7445
 
7446
                      gen_reload (third_reloadreg, reloadreg,
7447
                                  rl->opnum, rl->when_needed);
7448
                      reloadreg = third_reloadreg;
7449
                    }
7450
                }
7451
            }
7452
        }
7453
    }
7454
 
7455
  /* Output the last reload insn.  */
7456
  if (! special)
7457
    {
7458
      rtx set;
7459
 
7460
      /* Don't output the last reload if OLD is not the dest of
7461
         INSN and is in the src and is clobbered by INSN.  */
7462
      if (! flag_expensive_optimizations
7463
          || !REG_P (old)
7464
          || !(set = single_set (insn))
7465
          || rtx_equal_p (old, SET_DEST (set))
7466
          || !reg_mentioned_p (old, SET_SRC (set))
7467
          || !((REGNO (old) < FIRST_PSEUDO_REGISTER)
7468
               && regno_clobbered_p (REGNO (old), insn, rl->mode, 0)))
7469
        gen_reload (old, reloadreg, rl->opnum,
7470
                    rl->when_needed);
7471
    }
7472
 
7473
  /* Look at all insns we emitted, just to be safe.  */
7474
  for (p = get_insns (); p; p = NEXT_INSN (p))
7475
    if (INSN_P (p))
7476
      {
7477
        rtx pat = PATTERN (p);
7478
 
7479
        /* If this output reload doesn't come from a spill reg,
7480
           clear any memory of reloaded copies of the pseudo reg.
7481
           If this output reload comes from a spill reg,
7482
           reg_has_output_reload will make this do nothing.  */
7483
        note_stores (pat, forget_old_reloads_1, NULL);
7484
 
7485
        if (reg_mentioned_p (rl_reg_rtx, pat))
7486
          {
7487
            rtx set = single_set (insn);
7488
            if (reload_spill_index[j] < 0
7489
                && set
7490
                && SET_SRC (set) == rl_reg_rtx)
7491
              {
7492
                int src = REGNO (SET_SRC (set));
7493
 
7494
                reload_spill_index[j] = src;
7495
                SET_HARD_REG_BIT (reg_is_output_reload, src);
7496
                if (find_regno_note (insn, REG_DEAD, src))
7497
                  SET_HARD_REG_BIT (reg_reloaded_died, src);
7498
              }
7499
            if (HARD_REGISTER_P (rl_reg_rtx))
7500
              {
7501
                int s = rl->secondary_out_reload;
7502
                set = single_set (p);
7503
                /* If this reload copies only to the secondary reload
7504
                   register, the secondary reload does the actual
7505
                   store.  */
7506
                if (s >= 0 && set == NULL_RTX)
7507
                  /* We can't tell what function the secondary reload
7508
                     has and where the actual store to the pseudo is
7509
                     made; leave new_spill_reg_store alone.  */
7510
                  ;
7511
                else if (s >= 0
7512
                         && SET_SRC (set) == rl_reg_rtx
7513
                         && SET_DEST (set) == rld[s].reg_rtx)
7514
                  {
7515
                    /* Usually the next instruction will be the
7516
                       secondary reload insn;  if we can confirm
7517
                       that it is, setting new_spill_reg_store to
7518
                       that insn will allow an extra optimization.  */
7519
                    rtx s_reg = rld[s].reg_rtx;
7520
                    rtx next = NEXT_INSN (p);
7521
                    rld[s].out = rl->out;
7522
                    rld[s].out_reg = rl->out_reg;
7523
                    set = single_set (next);
7524
                    if (set && SET_SRC (set) == s_reg
7525
                        && ! new_spill_reg_store[REGNO (s_reg)])
7526
                      {
7527
                        SET_HARD_REG_BIT (reg_is_output_reload,
7528
                                          REGNO (s_reg));
7529
                        new_spill_reg_store[REGNO (s_reg)] = next;
7530
                      }
7531
                  }
7532
                else
7533
                  new_spill_reg_store[REGNO (rl_reg_rtx)] = p;
7534
              }
7535
          }
7536
      }
7537
 
7538
  if (rl->when_needed == RELOAD_OTHER)
7539
    {
7540
      emit_insn (other_output_reload_insns[rl->opnum]);
7541
      other_output_reload_insns[rl->opnum] = get_insns ();
7542
    }
7543
  else
7544
    output_reload_insns[rl->opnum] = get_insns ();
7545
 
7546
  if (flag_non_call_exceptions)
7547
    copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7548
 
7549
  end_sequence ();
7550
}
7551
 
7552
/* Do input reloading for reload RL, which is for the insn described by CHAIN
7553
   and has the number J.  */
7554
static void
7555
do_input_reload (struct insn_chain *chain, struct reload *rl, int j)
7556
{
7557
  rtx insn = chain->insn;
7558
  rtx old = (rl->in && MEM_P (rl->in)
7559
             ? rl->in_reg : rl->in);
7560
  rtx reg_rtx = rl->reg_rtx;
7561
 
7562
  if (old && reg_rtx)
7563
    {
7564
      enum machine_mode mode;
7565
 
7566
      /* Determine the mode to reload in.
7567
         This is very tricky because we have three to choose from.
7568
         There is the mode the insn operand wants (rl->inmode).
7569
         There is the mode of the reload register RELOADREG.
7570
         There is the intrinsic mode of the operand, which we could find
7571
         by stripping some SUBREGs.
7572
         It turns out that RELOADREG's mode is irrelevant:
7573
         we can change that arbitrarily.
7574
 
7575
         Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
7576
         then the reload reg may not support QImode moves, so use SImode.
7577
         If foo is in memory due to spilling a pseudo reg, this is safe,
7578
         because the QImode value is in the least significant part of a
7579
         slot big enough for a SImode.  If foo is some other sort of
7580
         memory reference, then it is impossible to reload this case,
7581
         so previous passes had better make sure this never happens.
7582
 
7583
         Then consider a one-word union which has SImode and one of its
7584
         members is a float, being fetched as (SUBREG:SF union:SI).
7585
         We must fetch that as SFmode because we could be loading into
7586
         a float-only register.  In this case OLD's mode is correct.
7587
 
7588
         Consider an immediate integer: it has VOIDmode.  Here we need
7589
         to get a mode from something else.
7590
 
7591
         In some cases, there is a fourth mode, the operand's
7592
         containing mode.  If the insn specifies a containing mode for
7593
         this operand, it overrides all others.
7594
 
7595
         I am not sure whether the algorithm here is always right,
7596
         but it does the right things in those cases.  */
7597
 
7598
      mode = GET_MODE (old);
7599
      if (mode == VOIDmode)
7600
        mode = rl->inmode;
7601
 
7602
      /* We cannot use gen_lowpart_common since it can do the wrong thing
7603
         when REG_RTX has a multi-word mode.  Note that REG_RTX must
7604
         always be a REG here.  */
7605
      if (GET_MODE (reg_rtx) != mode)
7606
        reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
7607
    }
7608
  reload_reg_rtx_for_input[j] = reg_rtx;
7609
 
7610
  if (old != 0
7611
      /* AUTO_INC reloads need to be handled even if inherited.  We got an
7612
         AUTO_INC reload if reload_out is set but reload_out_reg isn't.  */
7613
      && (! reload_inherited[j] || (rl->out && ! rl->out_reg))
7614
      && ! rtx_equal_p (reg_rtx, old)
7615
      && reg_rtx != 0)
7616
    emit_input_reload_insns (chain, rld + j, old, j);
7617
 
7618
  /* When inheriting a wider reload, we have a MEM in rl->in,
7619
     e.g. inheriting a SImode output reload for
7620
     (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10)))  */
7621
  if (optimize && reload_inherited[j] && rl->in
7622
      && MEM_P (rl->in)
7623
      && MEM_P (rl->in_reg)
7624
      && reload_spill_index[j] >= 0
7625
      && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
7626
    rl->in = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
7627
 
7628
  /* If we are reloading a register that was recently stored in with an
7629
     output-reload, see if we can prove there was
7630
     actually no need to store the old value in it.  */
7631
 
7632
  if (optimize
7633
      && (reload_inherited[j] || reload_override_in[j])
7634
      && reg_rtx
7635
      && REG_P (reg_rtx)
7636
      && spill_reg_store[REGNO (reg_rtx)] != 0
7637
#if 0
7638
      /* There doesn't seem to be any reason to restrict this to pseudos
7639
         and doing so loses in the case where we are copying from a
7640
         register of the wrong class.  */
7641
      && !HARD_REGISTER_P (spill_reg_stored_to[REGNO (reg_rtx)])
7642
#endif
7643
      /* The insn might have already some references to stackslots
7644
         replaced by MEMs, while reload_out_reg still names the
7645
         original pseudo.  */
7646
      && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (reg_rtx)])
7647
          || rtx_equal_p (spill_reg_stored_to[REGNO (reg_rtx)], rl->out_reg)))
7648
    delete_output_reload (insn, j, REGNO (reg_rtx), reg_rtx);
7649
}
7650
 
7651
/* Do output reloading for reload RL, which is for the insn described by
7652
   CHAIN and has the number J.
7653
   ??? At some point we need to support handling output reloads of
7654
   JUMP_INSNs or insns that set cc0.  */
7655
static void
7656
do_output_reload (struct insn_chain *chain, struct reload *rl, int j)
7657
{
7658
  rtx note, old;
7659
  rtx insn = chain->insn;
7660
  /* If this is an output reload that stores something that is
7661
     not loaded in this same reload, see if we can eliminate a previous
7662
     store.  */
7663
  rtx pseudo = rl->out_reg;
7664
  rtx reg_rtx = rl->reg_rtx;
7665
 
7666
  if (rl->out && reg_rtx)
7667
    {
7668
      enum machine_mode mode;
7669
 
7670
      /* Determine the mode to reload in.
7671
         See comments above (for input reloading).  */
7672
      mode = GET_MODE (rl->out);
7673
      if (mode == VOIDmode)
7674
        {
7675
          /* VOIDmode should never happen for an output.  */
7676
          if (asm_noperands (PATTERN (insn)) < 0)
7677
            /* It's the compiler's fault.  */
7678
            fatal_insn ("VOIDmode on an output", insn);
7679
          error_for_asm (insn, "output operand is constant in %<asm%>");
7680
          /* Prevent crash--use something we know is valid.  */
7681
          mode = word_mode;
7682
          rl->out = gen_rtx_REG (mode, REGNO (reg_rtx));
7683
        }
7684
      if (GET_MODE (reg_rtx) != mode)
7685
        reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
7686
    }
7687
  reload_reg_rtx_for_output[j] = reg_rtx;
7688
 
7689
  if (pseudo
7690
      && optimize
7691
      && REG_P (pseudo)
7692
      && ! rtx_equal_p (rl->in_reg, pseudo)
7693
      && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
7694
      && reg_last_reload_reg[REGNO (pseudo)])
7695
    {
7696
      int pseudo_no = REGNO (pseudo);
7697
      int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
7698
 
7699
      /* We don't need to test full validity of last_regno for
7700
         inherit here; we only want to know if the store actually
7701
         matches the pseudo.  */
7702
      if (TEST_HARD_REG_BIT (reg_reloaded_valid, last_regno)
7703
          && reg_reloaded_contents[last_regno] == pseudo_no
7704
          && spill_reg_store[last_regno]
7705
          && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
7706
        delete_output_reload (insn, j, last_regno, reg_rtx);
7707
    }
7708
 
7709
  old = rl->out_reg;
7710
  if (old == 0
7711
      || reg_rtx == 0
7712
      || rtx_equal_p (old, reg_rtx))
7713
    return;
7714
 
7715
  /* An output operand that dies right away does need a reload,
7716
     but need not be copied from it.  Show the new location in the
7717
     REG_UNUSED note.  */
7718
  if ((REG_P (old) || GET_CODE (old) == SCRATCH)
7719
      && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
7720
    {
7721
      XEXP (note, 0) = reg_rtx;
7722
      return;
7723
    }
7724
  /* Likewise for a SUBREG of an operand that dies.  */
7725
  else if (GET_CODE (old) == SUBREG
7726
           && REG_P (SUBREG_REG (old))
7727
           && 0 != (note = find_reg_note (insn, REG_UNUSED,
7728
                                          SUBREG_REG (old))))
7729
    {
7730
      XEXP (note, 0) = gen_lowpart_common (GET_MODE (old), reg_rtx);
7731
      return;
7732
    }
7733
  else if (GET_CODE (old) == SCRATCH)
7734
    /* If we aren't optimizing, there won't be a REG_UNUSED note,
7735
       but we don't want to make an output reload.  */
7736
    return;
7737
 
7738
  /* If is a JUMP_INSN, we can't support output reloads yet.  */
7739
  gcc_assert (NONJUMP_INSN_P (insn));
7740
 
7741
  emit_output_reload_insns (chain, rld + j, j);
7742
}
7743
 
7744
/* A reload copies values of MODE from register SRC to register DEST.
7745
   Return true if it can be treated for inheritance purposes like a
7746
   group of reloads, each one reloading a single hard register.  The
7747
   caller has already checked that (reg:MODE SRC) and (reg:MODE DEST)
7748
   occupy the same number of hard registers.  */
7749
 
7750
static bool
7751
inherit_piecemeal_p (int dest ATTRIBUTE_UNUSED,
7752
                     int src ATTRIBUTE_UNUSED,
7753
                     enum machine_mode mode ATTRIBUTE_UNUSED)
7754
{
7755
#ifdef CANNOT_CHANGE_MODE_CLASS
7756
  return (!REG_CANNOT_CHANGE_MODE_P (dest, mode, reg_raw_mode[dest])
7757
          && !REG_CANNOT_CHANGE_MODE_P (src, mode, reg_raw_mode[src]));
7758
#else
7759
  return true;
7760
#endif
7761
}
7762
 
7763
/* Output insns to reload values in and out of the chosen reload regs.  */
7764
 
7765
static void
7766
emit_reload_insns (struct insn_chain *chain)
7767
{
7768
  rtx insn = chain->insn;
7769
 
7770
  int j;
7771
 
7772
  CLEAR_HARD_REG_SET (reg_reloaded_died);
7773
 
7774
  for (j = 0; j < reload_n_operands; j++)
7775
    input_reload_insns[j] = input_address_reload_insns[j]
7776
      = inpaddr_address_reload_insns[j]
7777
      = output_reload_insns[j] = output_address_reload_insns[j]
7778
      = outaddr_address_reload_insns[j]
7779
      = other_output_reload_insns[j] = 0;
7780
  other_input_address_reload_insns = 0;
7781
  other_input_reload_insns = 0;
7782
  operand_reload_insns = 0;
7783
  other_operand_reload_insns = 0;
7784
 
7785
  /* Dump reloads into the dump file.  */
7786
  if (dump_file)
7787
    {
7788
      fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
7789
      debug_reload_to_stream (dump_file);
7790
    }
7791
 
7792
  /* Now output the instructions to copy the data into and out of the
7793
     reload registers.  Do these in the order that the reloads were reported,
7794
     since reloads of base and index registers precede reloads of operands
7795
     and the operands may need the base and index registers reloaded.  */
7796
 
7797
  for (j = 0; j < n_reloads; j++)
7798
    {
7799
      if (rld[j].reg_rtx && HARD_REGISTER_P (rld[j].reg_rtx))
7800
        {
7801
          unsigned int i;
7802
 
7803
          for (i = REGNO (rld[j].reg_rtx); i < END_REGNO (rld[j].reg_rtx); i++)
7804
            new_spill_reg_store[i] = 0;
7805
        }
7806
 
7807
      do_input_reload (chain, rld + j, j);
7808
      do_output_reload (chain, rld + j, j);
7809
    }
7810
 
7811
  /* Now write all the insns we made for reloads in the order expected by
7812
     the allocation functions.  Prior to the insn being reloaded, we write
7813
     the following reloads:
7814
 
7815
     RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
7816
 
7817
     RELOAD_OTHER reloads.
7818
 
7819
     For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
7820
     by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
7821
     RELOAD_FOR_INPUT reload for the operand.
7822
 
7823
     RELOAD_FOR_OPADDR_ADDRS reloads.
7824
 
7825
     RELOAD_FOR_OPERAND_ADDRESS reloads.
7826
 
7827
     After the insn being reloaded, we write the following:
7828
 
7829
     For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
7830
     by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
7831
     RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
7832
     reloads for the operand.  The RELOAD_OTHER output reloads are
7833
     output in descending order by reload number.  */
7834
 
7835
  emit_insn_before (other_input_address_reload_insns, insn);
7836
  emit_insn_before (other_input_reload_insns, insn);
7837
 
7838
  for (j = 0; j < reload_n_operands; j++)
7839
    {
7840
      emit_insn_before (inpaddr_address_reload_insns[j], insn);
7841
      emit_insn_before (input_address_reload_insns[j], insn);
7842
      emit_insn_before (input_reload_insns[j], insn);
7843
    }
7844
 
7845
  emit_insn_before (other_operand_reload_insns, insn);
7846
  emit_insn_before (operand_reload_insns, insn);
7847
 
7848
  for (j = 0; j < reload_n_operands; j++)
7849
    {
7850
      rtx x = emit_insn_after (outaddr_address_reload_insns[j], insn);
7851
      x = emit_insn_after (output_address_reload_insns[j], x);
7852
      x = emit_insn_after (output_reload_insns[j], x);
7853
      emit_insn_after (other_output_reload_insns[j], x);
7854
    }
7855
 
7856
  /* For all the spill regs newly reloaded in this instruction,
7857
     record what they were reloaded from, so subsequent instructions
7858
     can inherit the reloads.
7859
 
7860
     Update spill_reg_store for the reloads of this insn.
7861
     Copy the elements that were updated in the loop above.  */
7862
 
7863
  for (j = 0; j < n_reloads; j++)
7864
    {
7865
      int r = reload_order[j];
7866
      int i = reload_spill_index[r];
7867
 
7868
      /* If this is a non-inherited input reload from a pseudo, we must
7869
         clear any memory of a previous store to the same pseudo.  Only do
7870
         something if there will not be an output reload for the pseudo
7871
         being reloaded.  */
7872
      if (rld[r].in_reg != 0
7873
          && ! (reload_inherited[r] || reload_override_in[r]))
7874
        {
7875
          rtx reg = rld[r].in_reg;
7876
 
7877
          if (GET_CODE (reg) == SUBREG)
7878
            reg = SUBREG_REG (reg);
7879
 
7880
          if (REG_P (reg)
7881
              && REGNO (reg) >= FIRST_PSEUDO_REGISTER
7882
              && !REGNO_REG_SET_P (&reg_has_output_reload, REGNO (reg)))
7883
            {
7884
              int nregno = REGNO (reg);
7885
 
7886
              if (reg_last_reload_reg[nregno])
7887
                {
7888
                  int last_regno = REGNO (reg_last_reload_reg[nregno]);
7889
 
7890
                  if (reg_reloaded_contents[last_regno] == nregno)
7891
                    spill_reg_store[last_regno] = 0;
7892
                }
7893
            }
7894
        }
7895
 
7896
      /* I is nonneg if this reload used a register.
7897
         If rld[r].reg_rtx is 0, this is an optional reload
7898
         that we opted to ignore.  */
7899
 
7900
      if (i >= 0 && rld[r].reg_rtx != 0)
7901
        {
7902
          int nr = hard_regno_nregs[i][GET_MODE (rld[r].reg_rtx)];
7903
          int k;
7904
 
7905
          /* For a multi register reload, we need to check if all or part
7906
             of the value lives to the end.  */
7907
          for (k = 0; k < nr; k++)
7908
            if (reload_reg_reaches_end_p (i + k, rld[r].opnum,
7909
                                          rld[r].when_needed))
7910
              CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7911
 
7912
          /* Maybe the spill reg contains a copy of reload_out.  */
7913
          if (rld[r].out != 0
7914
              && (REG_P (rld[r].out)
7915
#ifdef AUTO_INC_DEC
7916
                  || ! rld[r].out_reg
7917
#endif
7918
                  || REG_P (rld[r].out_reg)))
7919
            {
7920
              rtx reg;
7921
              enum machine_mode mode;
7922
              int regno, nregs;
7923
 
7924
              reg = reload_reg_rtx_for_output[r];
7925
              mode = GET_MODE (reg);
7926
              regno = REGNO (reg);
7927
              nregs = hard_regno_nregs[regno][mode];
7928
              if (reload_regs_reach_end_p (regno, nregs, rld[r].opnum,
7929
                                           rld[r].when_needed))
7930
                {
7931
                  rtx out = (REG_P (rld[r].out)
7932
                             ? rld[r].out
7933
                             : rld[r].out_reg
7934
                             ? rld[r].out_reg
7935
/* AUTO_INC */               : XEXP (rld[r].in_reg, 0));
7936
                  int out_regno = REGNO (out);
7937
                  int out_nregs = (!HARD_REGISTER_NUM_P (out_regno) ? 1
7938
                                   : hard_regno_nregs[out_regno][mode]);
7939
                  bool piecemeal;
7940
 
7941
                  spill_reg_store[regno] = new_spill_reg_store[regno];
7942
                  spill_reg_stored_to[regno] = out;
7943
                  reg_last_reload_reg[out_regno] = reg;
7944
 
7945
                  piecemeal = (HARD_REGISTER_NUM_P (out_regno)
7946
                               && nregs == out_nregs
7947
                               && inherit_piecemeal_p (out_regno, regno, mode));
7948
 
7949
                  /* If OUT_REGNO is a hard register, it may occupy more than
7950
                     one register.  If it does, say what is in the
7951
                     rest of the registers assuming that both registers
7952
                     agree on how many words the object takes.  If not,
7953
                     invalidate the subsequent registers.  */
7954
 
7955
                  if (HARD_REGISTER_NUM_P (out_regno))
7956
                    for (k = 1; k < out_nregs; k++)
7957
                      reg_last_reload_reg[out_regno + k]
7958
                        = (piecemeal ? regno_reg_rtx[regno + k] : 0);
7959
 
7960
                  /* Now do the inverse operation.  */
7961
                  for (k = 0; k < nregs; k++)
7962
                    {
7963
                      CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
7964
                      reg_reloaded_contents[regno + k]
7965
                        = (!HARD_REGISTER_NUM_P (out_regno) || !piecemeal
7966
                           ? out_regno
7967
                           : out_regno + k);
7968
                      reg_reloaded_insn[regno + k] = insn;
7969
                      SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
7970
                      if (HARD_REGNO_CALL_PART_CLOBBERED (regno + k, mode))
7971
                        SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
7972
                                          regno + k);
7973
                      else
7974
                        CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
7975
                                            regno + k);
7976
                    }
7977
                }
7978
            }
7979
          /* Maybe the spill reg contains a copy of reload_in.  Only do
7980
             something if there will not be an output reload for
7981
             the register being reloaded.  */
7982
          else if (rld[r].out_reg == 0
7983
                   && rld[r].in != 0
7984
                   && ((REG_P (rld[r].in)
7985
                        && !HARD_REGISTER_P (rld[r].in)
7986
                        && !REGNO_REG_SET_P (&reg_has_output_reload,
7987
                                             REGNO (rld[r].in)))
7988
                       || (REG_P (rld[r].in_reg)
7989
                           && !REGNO_REG_SET_P (&reg_has_output_reload,
7990
                                                REGNO (rld[r].in_reg))))
7991
                   && !reg_set_p (reload_reg_rtx_for_input[r], PATTERN (insn)))
7992
            {
7993
              rtx reg;
7994
              enum machine_mode mode;
7995
              int regno, nregs;
7996
 
7997
              reg = reload_reg_rtx_for_input[r];
7998
              mode = GET_MODE (reg);
7999
              regno = REGNO (reg);
8000
              nregs = hard_regno_nregs[regno][mode];
8001
              if (reload_regs_reach_end_p (regno, nregs, rld[r].opnum,
8002
                                           rld[r].when_needed))
8003
                {
8004
                  int in_regno;
8005
                  int in_nregs;
8006
                  rtx in;
8007
                  bool piecemeal;
8008
 
8009
                  if (REG_P (rld[r].in)
8010
                      && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER)
8011
                    in = rld[r].in;
8012
                  else if (REG_P (rld[r].in_reg))
8013
                    in = rld[r].in_reg;
8014
                  else
8015
                    in = XEXP (rld[r].in_reg, 0);
8016
                  in_regno = REGNO (in);
8017
 
8018
                  in_nregs = (!HARD_REGISTER_NUM_P (in_regno) ? 1
8019
                              : hard_regno_nregs[in_regno][mode]);
8020
 
8021
                  reg_last_reload_reg[in_regno] = reg;
8022
 
8023
                  piecemeal = (HARD_REGISTER_NUM_P (in_regno)
8024
                               && nregs == in_nregs
8025
                               && inherit_piecemeal_p (regno, in_regno, mode));
8026
 
8027
                  if (HARD_REGISTER_NUM_P (in_regno))
8028
                    for (k = 1; k < in_nregs; k++)
8029
                      reg_last_reload_reg[in_regno + k]
8030
                        = (piecemeal ? regno_reg_rtx[regno + k] : 0);
8031
 
8032
                  /* Unless we inherited this reload, show we haven't
8033
                     recently done a store.
8034
                     Previous stores of inherited auto_inc expressions
8035
                     also have to be discarded.  */
8036
                  if (! reload_inherited[r]
8037
                      || (rld[r].out && ! rld[r].out_reg))
8038
                    spill_reg_store[regno] = 0;
8039
 
8040
                  for (k = 0; k < nregs; k++)
8041
                    {
8042
                      CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8043
                      reg_reloaded_contents[regno + k]
8044
                        = (!HARD_REGISTER_NUM_P (in_regno) || !piecemeal
8045
                           ? in_regno
8046
                           : in_regno + k);
8047
                      reg_reloaded_insn[regno + k] = insn;
8048
                      SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8049
                      if (HARD_REGNO_CALL_PART_CLOBBERED (regno + k, mode))
8050
                        SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8051
                                          regno + k);
8052
                      else
8053
                        CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8054
                                            regno + k);
8055
                    }
8056
                }
8057
            }
8058
        }
8059
 
8060
      /* The following if-statement was #if 0'd in 1.34 (or before...).
8061
         It's reenabled in 1.35 because supposedly nothing else
8062
         deals with this problem.  */
8063
 
8064
      /* If a register gets output-reloaded from a non-spill register,
8065
         that invalidates any previous reloaded copy of it.
8066
         But forget_old_reloads_1 won't get to see it, because
8067
         it thinks only about the original insn.  So invalidate it here.
8068
         Also do the same thing for RELOAD_OTHER constraints where the
8069
         output is discarded.  */
8070
      if (i < 0
8071
          && ((rld[r].out != 0
8072
               && (REG_P (rld[r].out)
8073
                   || (MEM_P (rld[r].out)
8074
                       && REG_P (rld[r].out_reg))))
8075
              || (rld[r].out == 0 && rld[r].out_reg
8076
                  && REG_P (rld[r].out_reg))))
8077
        {
8078
          rtx out = ((rld[r].out && REG_P (rld[r].out))
8079
                     ? rld[r].out : rld[r].out_reg);
8080
          int out_regno = REGNO (out);
8081
          enum machine_mode mode = GET_MODE (out);
8082
 
8083
          /* REG_RTX is now set or clobbered by the main instruction.
8084
             As the comment above explains, forget_old_reloads_1 only
8085
             sees the original instruction, and there is no guarantee
8086
             that the original instruction also clobbered REG_RTX.
8087
             For example, if find_reloads sees that the input side of
8088
             a matched operand pair dies in this instruction, it may
8089
             use the input register as the reload register.
8090
 
8091
             Calling forget_old_reloads_1 is a waste of effort if
8092
             REG_RTX is also the output register.
8093
 
8094
             If we know that REG_RTX holds the value of a pseudo
8095
             register, the code after the call will record that fact.  */
8096
          if (rld[r].reg_rtx && rld[r].reg_rtx != out)
8097
            forget_old_reloads_1 (rld[r].reg_rtx, NULL_RTX, NULL);
8098
 
8099
          if (!HARD_REGISTER_NUM_P (out_regno))
8100
            {
8101
              rtx src_reg, store_insn = NULL_RTX;
8102
 
8103
              reg_last_reload_reg[out_regno] = 0;
8104
 
8105
              /* If we can find a hard register that is stored, record
8106
                 the storing insn so that we may delete this insn with
8107
                 delete_output_reload.  */
8108
              src_reg = reload_reg_rtx_for_output[r];
8109
 
8110
              /* If this is an optional reload, try to find the source reg
8111
                 from an input reload.  */
8112
              if (! src_reg)
8113
                {
8114
                  rtx set = single_set (insn);
8115
                  if (set && SET_DEST (set) == rld[r].out)
8116
                    {
8117
                      int k;
8118
 
8119
                      src_reg = SET_SRC (set);
8120
                      store_insn = insn;
8121
                      for (k = 0; k < n_reloads; k++)
8122
                        {
8123
                          if (rld[k].in == src_reg)
8124
                            {
8125
                              src_reg = reload_reg_rtx_for_input[k];
8126
                              break;
8127
                            }
8128
                        }
8129
                    }
8130
                }
8131
              else
8132
                store_insn = new_spill_reg_store[REGNO (src_reg)];
8133
              if (src_reg && REG_P (src_reg)
8134
                  && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
8135
                {
8136
                  int src_regno, src_nregs, k;
8137
                  rtx note;
8138
 
8139
                  gcc_assert (GET_MODE (src_reg) == mode);
8140
                  src_regno = REGNO (src_reg);
8141
                  src_nregs = hard_regno_nregs[src_regno][mode];
8142
                  /* The place where to find a death note varies with
8143
                     PRESERVE_DEATH_INFO_REGNO_P .  The condition is not
8144
                     necessarily checked exactly in the code that moves
8145
                     notes, so just check both locations.  */
8146
                  note = find_regno_note (insn, REG_DEAD, src_regno);
8147
                  if (! note && store_insn)
8148
                    note = find_regno_note (store_insn, REG_DEAD, src_regno);
8149
                  for (k = 0; k < src_nregs; k++)
8150
                    {
8151
                      spill_reg_store[src_regno + k] = store_insn;
8152
                      spill_reg_stored_to[src_regno + k] = out;
8153
                      reg_reloaded_contents[src_regno + k] = out_regno;
8154
                      reg_reloaded_insn[src_regno + k] = store_insn;
8155
                      CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + k);
8156
                      SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + k);
8157
                      if (HARD_REGNO_CALL_PART_CLOBBERED (src_regno + k,
8158
                                                          mode))
8159
                        SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8160
                                          src_regno + k);
8161
                      else
8162
                        CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8163
                                            src_regno + k);
8164
                      SET_HARD_REG_BIT (reg_is_output_reload, src_regno + k);
8165
                      if (note)
8166
                        SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
8167
                      else
8168
                        CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
8169
                    }
8170
                  reg_last_reload_reg[out_regno] = src_reg;
8171
                  /* We have to set reg_has_output_reload here, or else
8172
                     forget_old_reloads_1 will clear reg_last_reload_reg
8173
                     right away.  */
8174
                  SET_REGNO_REG_SET (&reg_has_output_reload,
8175
                                     out_regno);
8176
                }
8177
            }
8178
          else
8179
            {
8180
              int k, out_nregs = hard_regno_nregs[out_regno][mode];
8181
 
8182
              for (k = 0; k < out_nregs; k++)
8183
                reg_last_reload_reg[out_regno + k] = 0;
8184
            }
8185
        }
8186
    }
8187
  IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
8188
}
8189
 
8190
/* Go through the motions to emit INSN and test if it is strictly valid.
8191
   Return the emitted insn if valid, else return NULL.  */
8192
 
8193
static rtx
8194
emit_insn_if_valid_for_reload (rtx insn)
8195
{
8196
  rtx last = get_last_insn ();
8197
  int code;
8198
 
8199
  insn = emit_insn (insn);
8200
  code = recog_memoized (insn);
8201
 
8202
  if (code >= 0)
8203
    {
8204
      extract_insn (insn);
8205
      /* We want constrain operands to treat this insn strictly in its
8206
         validity determination, i.e., the way it would after reload has
8207
         completed.  */
8208
      if (constrain_operands (1))
8209
        return insn;
8210
    }
8211
 
8212
  delete_insns_since (last);
8213
  return NULL;
8214
}
8215
 
8216
/* Emit code to perform a reload from IN (which may be a reload register) to
8217
   OUT (which may also be a reload register).  IN or OUT is from operand
8218
   OPNUM with reload type TYPE.
8219
 
8220
   Returns first insn emitted.  */
8221
 
8222
static rtx
8223
gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
8224
{
8225
  rtx last = get_last_insn ();
8226
  rtx tem;
8227
 
8228
  /* If IN is a paradoxical SUBREG, remove it and try to put the
8229
     opposite SUBREG on OUT.  Likewise for a paradoxical SUBREG on OUT.  */
8230
  if (GET_CODE (in) == SUBREG
8231
      && (GET_MODE_SIZE (GET_MODE (in))
8232
          > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
8233
      && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
8234
    in = SUBREG_REG (in), out = tem;
8235
  else if (GET_CODE (out) == SUBREG
8236
           && (GET_MODE_SIZE (GET_MODE (out))
8237
               > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
8238
           && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
8239
    out = SUBREG_REG (out), in = tem;
8240
 
8241
  /* How to do this reload can get quite tricky.  Normally, we are being
8242
     asked to reload a simple operand, such as a MEM, a constant, or a pseudo
8243
     register that didn't get a hard register.  In that case we can just
8244
     call emit_move_insn.
8245
 
8246
     We can also be asked to reload a PLUS that adds a register or a MEM to
8247
     another register, constant or MEM.  This can occur during frame pointer
8248
     elimination and while reloading addresses.  This case is handled by
8249
     trying to emit a single insn to perform the add.  If it is not valid,
8250
     we use a two insn sequence.
8251
 
8252
     Or we can be asked to reload an unary operand that was a fragment of
8253
     an addressing mode, into a register.  If it isn't recognized as-is,
8254
     we try making the unop operand and the reload-register the same:
8255
     (set reg:X (unop:X expr:Y))
8256
     -> (set reg:Y expr:Y) (set reg:X (unop:X reg:Y)).
8257
 
8258
     Finally, we could be called to handle an 'o' constraint by putting
8259
     an address into a register.  In that case, we first try to do this
8260
     with a named pattern of "reload_load_address".  If no such pattern
8261
     exists, we just emit a SET insn and hope for the best (it will normally
8262
     be valid on machines that use 'o').
8263
 
8264
     This entire process is made complex because reload will never
8265
     process the insns we generate here and so we must ensure that
8266
     they will fit their constraints and also by the fact that parts of
8267
     IN might be being reloaded separately and replaced with spill registers.
8268
     Because of this, we are, in some sense, just guessing the right approach
8269
     here.  The one listed above seems to work.
8270
 
8271
     ??? At some point, this whole thing needs to be rethought.  */
8272
 
8273
  if (GET_CODE (in) == PLUS
8274
      && (REG_P (XEXP (in, 0))
8275
          || GET_CODE (XEXP (in, 0)) == SUBREG
8276
          || MEM_P (XEXP (in, 0)))
8277
      && (REG_P (XEXP (in, 1))
8278
          || GET_CODE (XEXP (in, 1)) == SUBREG
8279
          || CONSTANT_P (XEXP (in, 1))
8280
          || MEM_P (XEXP (in, 1))))
8281
    {
8282
      /* We need to compute the sum of a register or a MEM and another
8283
         register, constant, or MEM, and put it into the reload
8284
         register.  The best possible way of doing this is if the machine
8285
         has a three-operand ADD insn that accepts the required operands.
8286
 
8287
         The simplest approach is to try to generate such an insn and see if it
8288
         is recognized and matches its constraints.  If so, it can be used.
8289
 
8290
         It might be better not to actually emit the insn unless it is valid,
8291
         but we need to pass the insn as an operand to `recog' and
8292
         `extract_insn' and it is simpler to emit and then delete the insn if
8293
         not valid than to dummy things up.  */
8294
 
8295
      rtx op0, op1, tem, insn;
8296
      int code;
8297
 
8298
      op0 = find_replacement (&XEXP (in, 0));
8299
      op1 = find_replacement (&XEXP (in, 1));
8300
 
8301
      /* Since constraint checking is strict, commutativity won't be
8302
         checked, so we need to do that here to avoid spurious failure
8303
         if the add instruction is two-address and the second operand
8304
         of the add is the same as the reload reg, which is frequently
8305
         the case.  If the insn would be A = B + A, rearrange it so
8306
         it will be A = A + B as constrain_operands expects.  */
8307
 
8308
      if (REG_P (XEXP (in, 1))
8309
          && REGNO (out) == REGNO (XEXP (in, 1)))
8310
        tem = op0, op0 = op1, op1 = tem;
8311
 
8312
      if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
8313
        in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
8314
 
8315
      insn = emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode, out, in));
8316
      if (insn)
8317
        return insn;
8318
 
8319
      /* If that failed, we must use a conservative two-insn sequence.
8320
 
8321
         Use a move to copy one operand into the reload register.  Prefer
8322
         to reload a constant, MEM or pseudo since the move patterns can
8323
         handle an arbitrary operand.  If OP1 is not a constant, MEM or
8324
         pseudo and OP1 is not a valid operand for an add instruction, then
8325
         reload OP1.
8326
 
8327
         After reloading one of the operands into the reload register, add
8328
         the reload register to the output register.
8329
 
8330
         If there is another way to do this for a specific machine, a
8331
         DEFINE_PEEPHOLE should be specified that recognizes the sequence
8332
         we emit below.  */
8333
 
8334
      code = (int) optab_handler (add_optab, GET_MODE (out))->insn_code;
8335
 
8336
      if (CONSTANT_P (op1) || MEM_P (op1) || GET_CODE (op1) == SUBREG
8337
          || (REG_P (op1)
8338
              && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
8339
          || (code != CODE_FOR_nothing
8340
              && ! ((*insn_data[code].operand[2].predicate)
8341
                    (op1, insn_data[code].operand[2].mode))))
8342
        tem = op0, op0 = op1, op1 = tem;
8343
 
8344
      gen_reload (out, op0, opnum, type);
8345
 
8346
      /* If OP0 and OP1 are the same, we can use OUT for OP1.
8347
         This fixes a problem on the 32K where the stack pointer cannot
8348
         be used as an operand of an add insn.  */
8349
 
8350
      if (rtx_equal_p (op0, op1))
8351
        op1 = out;
8352
 
8353
      insn = emit_insn_if_valid_for_reload (gen_add2_insn (out, op1));
8354
      if (insn)
8355
        {
8356
          /* Add a REG_EQUIV note so that find_equiv_reg can find it.  */
8357
          set_unique_reg_note (insn, REG_EQUIV, in);
8358
          return insn;
8359
        }
8360
 
8361
      /* If that failed, copy the address register to the reload register.
8362
         Then add the constant to the reload register.  */
8363
 
8364
      gcc_assert (!reg_overlap_mentioned_p (out, op0));
8365
      gen_reload (out, op1, opnum, type);
8366
      insn = emit_insn (gen_add2_insn (out, op0));
8367
      set_unique_reg_note (insn, REG_EQUIV, in);
8368
    }
8369
 
8370
#ifdef SECONDARY_MEMORY_NEEDED
8371
  /* If we need a memory location to do the move, do it that way.  */
8372
  else if ((REG_P (in)
8373
            || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
8374
           && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER
8375
           && (REG_P (out)
8376
               || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
8377
           && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
8378
           && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in)),
8379
                                       REGNO_REG_CLASS (reg_or_subregno (out)),
8380
                                       GET_MODE (out)))
8381
    {
8382
      /* Get the memory to use and rewrite both registers to its mode.  */
8383
      rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
8384
 
8385
      if (GET_MODE (loc) != GET_MODE (out))
8386
        out = gen_rtx_REG (GET_MODE (loc), REGNO (out));
8387
 
8388
      if (GET_MODE (loc) != GET_MODE (in))
8389
        in = gen_rtx_REG (GET_MODE (loc), REGNO (in));
8390
 
8391
      gen_reload (loc, in, opnum, type);
8392
      gen_reload (out, loc, opnum, type);
8393
    }
8394
#endif
8395
  else if (REG_P (out) && UNARY_P (in))
8396
    {
8397
      rtx insn;
8398
      rtx op1;
8399
      rtx out_moded;
8400
      rtx set;
8401
 
8402
      op1 = find_replacement (&XEXP (in, 0));
8403
      if (op1 != XEXP (in, 0))
8404
        in = gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in), op1);
8405
 
8406
      /* First, try a plain SET.  */
8407
      set = emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode, out, in));
8408
      if (set)
8409
        return set;
8410
 
8411
      /* If that failed, move the inner operand to the reload
8412
         register, and try the same unop with the inner expression
8413
         replaced with the reload register.  */
8414
 
8415
      if (GET_MODE (op1) != GET_MODE (out))
8416
        out_moded = gen_rtx_REG (GET_MODE (op1), REGNO (out));
8417
      else
8418
        out_moded = out;
8419
 
8420
      gen_reload (out_moded, op1, opnum, type);
8421
 
8422
      insn
8423
        = gen_rtx_SET (VOIDmode, out,
8424
                       gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in),
8425
                                      out_moded));
8426
      insn = emit_insn_if_valid_for_reload (insn);
8427
      if (insn)
8428
        {
8429
          set_unique_reg_note (insn, REG_EQUIV, in);
8430
          return insn;
8431
        }
8432
 
8433
      fatal_insn ("Failure trying to reload:", set);
8434
    }
8435
  /* If IN is a simple operand, use gen_move_insn.  */
8436
  else if (OBJECT_P (in) || GET_CODE (in) == SUBREG)
8437
    {
8438
      tem = emit_insn (gen_move_insn (out, in));
8439
      /* IN may contain a LABEL_REF, if so add a REG_LABEL_OPERAND note.  */
8440
      mark_jump_label (in, tem, 0);
8441
    }
8442
 
8443
#ifdef HAVE_reload_load_address
8444
  else if (HAVE_reload_load_address)
8445
    emit_insn (gen_reload_load_address (out, in));
8446
#endif
8447
 
8448
  /* Otherwise, just write (set OUT IN) and hope for the best.  */
8449
  else
8450
    emit_insn (gen_rtx_SET (VOIDmode, out, in));
8451
 
8452
  /* Return the first insn emitted.
8453
     We can not just return get_last_insn, because there may have
8454
     been multiple instructions emitted.  Also note that gen_move_insn may
8455
     emit more than one insn itself, so we can not assume that there is one
8456
     insn emitted per emit_insn_before call.  */
8457
 
8458
  return last ? NEXT_INSN (last) : get_insns ();
8459
}
8460
 
8461
/* Delete a previously made output-reload whose result we now believe
8462
   is not needed.  First we double-check.
8463
 
8464
   INSN is the insn now being processed.
8465
   LAST_RELOAD_REG is the hard register number for which we want to delete
8466
   the last output reload.
8467
   J is the reload-number that originally used REG.  The caller has made
8468
   certain that reload J doesn't use REG any longer for input.
8469
   NEW_RELOAD_REG is reload register that reload J is using for REG.  */
8470
 
8471
static void
8472
delete_output_reload (rtx insn, int j, int last_reload_reg, rtx new_reload_reg)
8473
{
8474
  rtx output_reload_insn = spill_reg_store[last_reload_reg];
8475
  rtx reg = spill_reg_stored_to[last_reload_reg];
8476
  int k;
8477
  int n_occurrences;
8478
  int n_inherited = 0;
8479
  rtx i1;
8480
  rtx substed;
8481
 
8482
  /* It is possible that this reload has been only used to set another reload
8483
     we eliminated earlier and thus deleted this instruction too.  */
8484
  if (INSN_DELETED_P (output_reload_insn))
8485
    return;
8486
 
8487
  /* Get the raw pseudo-register referred to.  */
8488
 
8489
  while (GET_CODE (reg) == SUBREG)
8490
    reg = SUBREG_REG (reg);
8491
  substed = reg_equiv_memory_loc[REGNO (reg)];
8492
 
8493
  /* This is unsafe if the operand occurs more often in the current
8494
     insn than it is inherited.  */
8495
  for (k = n_reloads - 1; k >= 0; k--)
8496
    {
8497
      rtx reg2 = rld[k].in;
8498
      if (! reg2)
8499
        continue;
8500
      if (MEM_P (reg2) || reload_override_in[k])
8501
        reg2 = rld[k].in_reg;
8502
#ifdef AUTO_INC_DEC
8503
      if (rld[k].out && ! rld[k].out_reg)
8504
        reg2 = XEXP (rld[k].in_reg, 0);
8505
#endif
8506
      while (GET_CODE (reg2) == SUBREG)
8507
        reg2 = SUBREG_REG (reg2);
8508
      if (rtx_equal_p (reg2, reg))
8509
        {
8510
          if (reload_inherited[k] || reload_override_in[k] || k == j)
8511
            n_inherited++;
8512
          else
8513
            return;
8514
        }
8515
    }
8516
  n_occurrences = count_occurrences (PATTERN (insn), reg, 0);
8517
  if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
8518
    n_occurrences += count_occurrences (CALL_INSN_FUNCTION_USAGE (insn),
8519
                                        reg, 0);
8520
  if (substed)
8521
    n_occurrences += count_occurrences (PATTERN (insn),
8522
                                        eliminate_regs (substed, VOIDmode,
8523
                                                        NULL_RTX), 0);
8524
  for (i1 = reg_equiv_alt_mem_list[REGNO (reg)]; i1; i1 = XEXP (i1, 1))
8525
    {
8526
      gcc_assert (!rtx_equal_p (XEXP (i1, 0), substed));
8527
      n_occurrences += count_occurrences (PATTERN (insn), XEXP (i1, 0), 0);
8528
    }
8529
  if (n_occurrences > n_inherited)
8530
    return;
8531
 
8532
  /* If the pseudo-reg we are reloading is no longer referenced
8533
     anywhere between the store into it and here,
8534
     and we're within the same basic block, then the value can only
8535
     pass through the reload reg and end up here.
8536
     Otherwise, give up--return.  */
8537
  for (i1 = NEXT_INSN (output_reload_insn);
8538
       i1 != insn; i1 = NEXT_INSN (i1))
8539
    {
8540
      if (NOTE_INSN_BASIC_BLOCK_P (i1))
8541
        return;
8542
      if ((NONJUMP_INSN_P (i1) || CALL_P (i1))
8543
          && reg_mentioned_p (reg, PATTERN (i1)))
8544
        {
8545
          /* If this is USE in front of INSN, we only have to check that
8546
             there are no more references than accounted for by inheritance.  */
8547
          while (NONJUMP_INSN_P (i1) && GET_CODE (PATTERN (i1)) == USE)
8548
            {
8549
              n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
8550
              i1 = NEXT_INSN (i1);
8551
            }
8552
          if (n_occurrences <= n_inherited && i1 == insn)
8553
            break;
8554
          return;
8555
        }
8556
    }
8557
 
8558
  /* We will be deleting the insn.  Remove the spill reg information.  */
8559
  for (k = hard_regno_nregs[last_reload_reg][GET_MODE (reg)]; k-- > 0; )
8560
    {
8561
      spill_reg_store[last_reload_reg + k] = 0;
8562
      spill_reg_stored_to[last_reload_reg + k] = 0;
8563
    }
8564
 
8565
  /* The caller has already checked that REG dies or is set in INSN.
8566
     It has also checked that we are optimizing, and thus some
8567
     inaccuracies in the debugging information are acceptable.
8568
     So we could just delete output_reload_insn.  But in some cases
8569
     we can improve the debugging information without sacrificing
8570
     optimization - maybe even improving the code: See if the pseudo
8571
     reg has been completely replaced with reload regs.  If so, delete
8572
     the store insn and forget we had a stack slot for the pseudo.  */
8573
  if (rld[j].out != rld[j].in
8574
      && REG_N_DEATHS (REGNO (reg)) == 1
8575
      && REG_N_SETS (REGNO (reg)) == 1
8576
      && REG_BASIC_BLOCK (REGNO (reg)) >= NUM_FIXED_BLOCKS
8577
      && find_regno_note (insn, REG_DEAD, REGNO (reg)))
8578
    {
8579
      rtx i2;
8580
 
8581
      /* We know that it was used only between here and the beginning of
8582
         the current basic block.  (We also know that the last use before
8583
         INSN was the output reload we are thinking of deleting, but never
8584
         mind that.)  Search that range; see if any ref remains.  */
8585
      for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8586
        {
8587
          rtx set = single_set (i2);
8588
 
8589
          /* Uses which just store in the pseudo don't count,
8590
             since if they are the only uses, they are dead.  */
8591
          if (set != 0 && SET_DEST (set) == reg)
8592
            continue;
8593
          if (LABEL_P (i2)
8594
              || JUMP_P (i2))
8595
            break;
8596
          if ((NONJUMP_INSN_P (i2) || CALL_P (i2))
8597
              && reg_mentioned_p (reg, PATTERN (i2)))
8598
            {
8599
              /* Some other ref remains; just delete the output reload we
8600
                 know to be dead.  */
8601
              delete_address_reloads (output_reload_insn, insn);
8602
              delete_insn (output_reload_insn);
8603
              return;
8604
            }
8605
        }
8606
 
8607
      /* Delete the now-dead stores into this pseudo.  Note that this
8608
         loop also takes care of deleting output_reload_insn.  */
8609
      for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8610
        {
8611
          rtx set = single_set (i2);
8612
 
8613
          if (set != 0 && SET_DEST (set) == reg)
8614
            {
8615
              delete_address_reloads (i2, insn);
8616
              delete_insn (i2);
8617
            }
8618
          if (LABEL_P (i2)
8619
              || JUMP_P (i2))
8620
            break;
8621
        }
8622
 
8623
      /* For the debugging info, say the pseudo lives in this reload reg.  */
8624
      reg_renumber[REGNO (reg)] = REGNO (new_reload_reg);
8625
      if (ira_conflicts_p)
8626
        /* Inform IRA about the change.  */
8627
        ira_mark_allocation_change (REGNO (reg));
8628
      alter_reg (REGNO (reg), -1, false);
8629
    }
8630
  else
8631
    {
8632
      delete_address_reloads (output_reload_insn, insn);
8633
      delete_insn (output_reload_insn);
8634
    }
8635
}
8636
 
8637
/* We are going to delete DEAD_INSN.  Recursively delete loads of
8638
   reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
8639
   CURRENT_INSN is being reloaded, so we have to check its reloads too.  */
8640
static void
8641
delete_address_reloads (rtx dead_insn, rtx current_insn)
8642
{
8643
  rtx set = single_set (dead_insn);
8644
  rtx set2, dst, prev, next;
8645
  if (set)
8646
    {
8647
      rtx dst = SET_DEST (set);
8648
      if (MEM_P (dst))
8649
        delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
8650
    }
8651
  /* If we deleted the store from a reloaded post_{in,de}c expression,
8652
     we can delete the matching adds.  */
8653
  prev = PREV_INSN (dead_insn);
8654
  next = NEXT_INSN (dead_insn);
8655
  if (! prev || ! next)
8656
    return;
8657
  set = single_set (next);
8658
  set2 = single_set (prev);
8659
  if (! set || ! set2
8660
      || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
8661
      || !CONST_INT_P (XEXP (SET_SRC (set), 1))
8662
      || !CONST_INT_P (XEXP (SET_SRC (set2), 1)))
8663
    return;
8664
  dst = SET_DEST (set);
8665
  if (! rtx_equal_p (dst, SET_DEST (set2))
8666
      || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
8667
      || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
8668
      || (INTVAL (XEXP (SET_SRC (set), 1))
8669
          != -INTVAL (XEXP (SET_SRC (set2), 1))))
8670
    return;
8671
  delete_related_insns (prev);
8672
  delete_related_insns (next);
8673
}
8674
 
8675
/* Subfunction of delete_address_reloads: process registers found in X.  */
8676
static void
8677
delete_address_reloads_1 (rtx dead_insn, rtx x, rtx current_insn)
8678
{
8679
  rtx prev, set, dst, i2;
8680
  int i, j;
8681
  enum rtx_code code = GET_CODE (x);
8682
 
8683
  if (code != REG)
8684
    {
8685
      const char *fmt = GET_RTX_FORMAT (code);
8686
      for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8687
        {
8688
          if (fmt[i] == 'e')
8689
            delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
8690
          else if (fmt[i] == 'E')
8691
            {
8692
              for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8693
                delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
8694
                                          current_insn);
8695
            }
8696
        }
8697
      return;
8698
    }
8699
 
8700
  if (spill_reg_order[REGNO (x)] < 0)
8701
    return;
8702
 
8703
  /* Scan backwards for the insn that sets x.  This might be a way back due
8704
     to inheritance.  */
8705
  for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
8706
    {
8707
      code = GET_CODE (prev);
8708
      if (code == CODE_LABEL || code == JUMP_INSN)
8709
        return;
8710
      if (!INSN_P (prev))
8711
        continue;
8712
      if (reg_set_p (x, PATTERN (prev)))
8713
        break;
8714
      if (reg_referenced_p (x, PATTERN (prev)))
8715
        return;
8716
    }
8717
  if (! prev || INSN_UID (prev) < reload_first_uid)
8718
    return;
8719
  /* Check that PREV only sets the reload register.  */
8720
  set = single_set (prev);
8721
  if (! set)
8722
    return;
8723
  dst = SET_DEST (set);
8724
  if (!REG_P (dst)
8725
      || ! rtx_equal_p (dst, x))
8726
    return;
8727
  if (! reg_set_p (dst, PATTERN (dead_insn)))
8728
    {
8729
      /* Check if DST was used in a later insn -
8730
         it might have been inherited.  */
8731
      for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
8732
        {
8733
          if (LABEL_P (i2))
8734
            break;
8735
          if (! INSN_P (i2))
8736
            continue;
8737
          if (reg_referenced_p (dst, PATTERN (i2)))
8738
            {
8739
              /* If there is a reference to the register in the current insn,
8740
                 it might be loaded in a non-inherited reload.  If no other
8741
                 reload uses it, that means the register is set before
8742
                 referenced.  */
8743
              if (i2 == current_insn)
8744
                {
8745
                  for (j = n_reloads - 1; j >= 0; j--)
8746
                    if ((rld[j].reg_rtx == dst && reload_inherited[j])
8747
                        || reload_override_in[j] == dst)
8748
                      return;
8749
                  for (j = n_reloads - 1; j >= 0; j--)
8750
                    if (rld[j].in && rld[j].reg_rtx == dst)
8751
                      break;
8752
                  if (j >= 0)
8753
                    break;
8754
                }
8755
              return;
8756
            }
8757
          if (JUMP_P (i2))
8758
            break;
8759
          /* If DST is still live at CURRENT_INSN, check if it is used for
8760
             any reload.  Note that even if CURRENT_INSN sets DST, we still
8761
             have to check the reloads.  */
8762
          if (i2 == current_insn)
8763
            {
8764
              for (j = n_reloads - 1; j >= 0; j--)
8765
                if ((rld[j].reg_rtx == dst && reload_inherited[j])
8766
                    || reload_override_in[j] == dst)
8767
                  return;
8768
              /* ??? We can't finish the loop here, because dst might be
8769
                 allocated to a pseudo in this block if no reload in this
8770
                 block needs any of the classes containing DST - see
8771
                 spill_hard_reg.  There is no easy way to tell this, so we
8772
                 have to scan till the end of the basic block.  */
8773
            }
8774
          if (reg_set_p (dst, PATTERN (i2)))
8775
            break;
8776
        }
8777
    }
8778
  delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
8779
  reg_reloaded_contents[REGNO (dst)] = -1;
8780
  delete_insn (prev);
8781
}
8782
 
8783
/* Output reload-insns to reload VALUE into RELOADREG.
8784
   VALUE is an autoincrement or autodecrement RTX whose operand
8785
   is a register or memory location;
8786
   so reloading involves incrementing that location.
8787
   IN is either identical to VALUE, or some cheaper place to reload from.
8788
 
8789
   INC_AMOUNT is the number to increment or decrement by (always positive).
8790
   This cannot be deduced from VALUE.
8791
 
8792
   Return the instruction that stores into RELOADREG.  */
8793
 
8794
static rtx
8795
inc_for_reload (rtx reloadreg, rtx in, rtx value, int inc_amount)
8796
{
8797
  /* REG or MEM to be copied and incremented.  */
8798
  rtx incloc = find_replacement (&XEXP (value, 0));
8799
  /* Nonzero if increment after copying.  */
8800
  int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC
8801
              || GET_CODE (value) == POST_MODIFY);
8802
  rtx last;
8803
  rtx inc;
8804
  rtx add_insn;
8805
  int code;
8806
  rtx store;
8807
  rtx real_in = in == value ? incloc : in;
8808
 
8809
  /* No hard register is equivalent to this register after
8810
     inc/dec operation.  If REG_LAST_RELOAD_REG were nonzero,
8811
     we could inc/dec that register as well (maybe even using it for
8812
     the source), but I'm not sure it's worth worrying about.  */
8813
  if (REG_P (incloc))
8814
    reg_last_reload_reg[REGNO (incloc)] = 0;
8815
 
8816
  if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
8817
    {
8818
      gcc_assert (GET_CODE (XEXP (value, 1)) == PLUS);
8819
      inc = find_replacement (&XEXP (XEXP (value, 1), 1));
8820
    }
8821
  else
8822
    {
8823
      if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
8824
        inc_amount = -inc_amount;
8825
 
8826
      inc = GEN_INT (inc_amount);
8827
    }
8828
 
8829
  /* If this is post-increment, first copy the location to the reload reg.  */
8830
  if (post && real_in != reloadreg)
8831
    emit_insn (gen_move_insn (reloadreg, real_in));
8832
 
8833
  if (in == value)
8834
    {
8835
      /* See if we can directly increment INCLOC.  Use a method similar to
8836
         that in gen_reload.  */
8837
 
8838
      last = get_last_insn ();
8839
      add_insn = emit_insn (gen_rtx_SET (VOIDmode, incloc,
8840
                                         gen_rtx_PLUS (GET_MODE (incloc),
8841
                                                       incloc, inc)));
8842
 
8843
      code = recog_memoized (add_insn);
8844
      if (code >= 0)
8845
        {
8846
          extract_insn (add_insn);
8847
          if (constrain_operands (1))
8848
            {
8849
              /* If this is a pre-increment and we have incremented the value
8850
                 where it lives, copy the incremented value to RELOADREG to
8851
                 be used as an address.  */
8852
 
8853
              if (! post)
8854
                emit_insn (gen_move_insn (reloadreg, incloc));
8855
 
8856
              return add_insn;
8857
            }
8858
        }
8859
      delete_insns_since (last);
8860
    }
8861
 
8862
  /* If couldn't do the increment directly, must increment in RELOADREG.
8863
     The way we do this depends on whether this is pre- or post-increment.
8864
     For pre-increment, copy INCLOC to the reload register, increment it
8865
     there, then save back.  */
8866
 
8867
  if (! post)
8868
    {
8869
      if (in != reloadreg)
8870
        emit_insn (gen_move_insn (reloadreg, real_in));
8871
      emit_insn (gen_add2_insn (reloadreg, inc));
8872
      store = emit_insn (gen_move_insn (incloc, reloadreg));
8873
    }
8874
  else
8875
    {
8876
      /* Postincrement.
8877
         Because this might be a jump insn or a compare, and because RELOADREG
8878
         may not be available after the insn in an input reload, we must do
8879
         the incrementation before the insn being reloaded for.
8880
 
8881
         We have already copied IN to RELOADREG.  Increment the copy in
8882
         RELOADREG, save that back, then decrement RELOADREG so it has
8883
         the original value.  */
8884
 
8885
      emit_insn (gen_add2_insn (reloadreg, inc));
8886
      store = emit_insn (gen_move_insn (incloc, reloadreg));
8887
      if (CONST_INT_P (inc))
8888
        emit_insn (gen_add2_insn (reloadreg, GEN_INT (-INTVAL (inc))));
8889
      else
8890
        emit_insn (gen_sub2_insn (reloadreg, inc));
8891
    }
8892
 
8893
  return store;
8894
}
8895
 
8896
#ifdef AUTO_INC_DEC
8897
static void
8898
add_auto_inc_notes (rtx insn, rtx x)
8899
{
8900
  enum rtx_code code = GET_CODE (x);
8901
  const char *fmt;
8902
  int i, j;
8903
 
8904
  if (code == MEM && auto_inc_p (XEXP (x, 0)))
8905
    {
8906
      add_reg_note (insn, REG_INC, XEXP (XEXP (x, 0), 0));
8907
      return;
8908
    }
8909
 
8910
  /* Scan all the operand sub-expressions.  */
8911
  fmt = GET_RTX_FORMAT (code);
8912
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8913
    {
8914
      if (fmt[i] == 'e')
8915
        add_auto_inc_notes (insn, XEXP (x, i));
8916
      else if (fmt[i] == 'E')
8917
        for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8918
          add_auto_inc_notes (insn, XVECEXP (x, i, j));
8919
    }
8920
}
8921
#endif
8922
 
8923
/* This is used by reload pass, that does emit some instructions after
8924
   abnormal calls moving basic block end, but in fact it wants to emit
8925
   them on the edge.  Looks for abnormal call edges, find backward the
8926
   proper call and fix the damage.
8927
 
8928
   Similar handle instructions throwing exceptions internally.  */
8929
void
8930
fixup_abnormal_edges (void)
8931
{
8932
  bool inserted = false;
8933
  basic_block bb;
8934
 
8935
  FOR_EACH_BB (bb)
8936
    {
8937
      edge e;
8938
      edge_iterator ei;
8939
 
8940
      /* Look for cases we are interested in - calls or instructions causing
8941
         exceptions.  */
8942
      FOR_EACH_EDGE (e, ei, bb->succs)
8943
        {
8944
          if (e->flags & EDGE_ABNORMAL_CALL)
8945
            break;
8946
          if ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
8947
              == (EDGE_ABNORMAL | EDGE_EH))
8948
            break;
8949
        }
8950
      if (e && !CALL_P (BB_END (bb))
8951
          && !can_throw_internal (BB_END (bb)))
8952
        {
8953
          rtx insn;
8954
 
8955
          /* Get past the new insns generated.  Allow notes, as the insns
8956
             may be already deleted.  */
8957
          insn = BB_END (bb);
8958
          while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
8959
                 && !can_throw_internal (insn)
8960
                 && insn != BB_HEAD (bb))
8961
            insn = PREV_INSN (insn);
8962
 
8963
          if (CALL_P (insn) || can_throw_internal (insn))
8964
            {
8965
              rtx stop, next;
8966
 
8967
              stop = NEXT_INSN (BB_END (bb));
8968
              BB_END (bb) = insn;
8969
              insn = NEXT_INSN (insn);
8970
 
8971
              FOR_EACH_EDGE (e, ei, bb->succs)
8972
                if (e->flags & EDGE_FALLTHRU)
8973
                  break;
8974
 
8975
              while (insn && insn != stop)
8976
                {
8977
                  next = NEXT_INSN (insn);
8978
                  if (INSN_P (insn))
8979
                    {
8980
                      delete_insn (insn);
8981
 
8982
                      /* Sometimes there's still the return value USE.
8983
                         If it's placed after a trapping call (i.e. that
8984
                         call is the last insn anyway), we have no fallthru
8985
                         edge.  Simply delete this use and don't try to insert
8986
                         on the non-existent edge.  */
8987
                      if (GET_CODE (PATTERN (insn)) != USE)
8988
                        {
8989
                          /* We're not deleting it, we're moving it.  */
8990
                          INSN_DELETED_P (insn) = 0;
8991
                          PREV_INSN (insn) = NULL_RTX;
8992
                          NEXT_INSN (insn) = NULL_RTX;
8993
 
8994
                          insert_insn_on_edge (insn, e);
8995
                          inserted = true;
8996
                        }
8997
                    }
8998
                  else if (!BARRIER_P (insn))
8999
                    set_block_for_insn (insn, NULL);
9000
                  insn = next;
9001
                }
9002
            }
9003
 
9004
          /* It may be that we don't find any such trapping insn.  In this
9005
             case we discovered quite late that the insn that had been
9006
             marked as can_throw_internal in fact couldn't trap at all.
9007
             So we should in fact delete the EH edges out of the block.  */
9008
          else
9009
            purge_dead_edges (bb);
9010
        }
9011
    }
9012
 
9013
  /* We've possibly turned single trapping insn into multiple ones.  */
9014
  if (flag_non_call_exceptions)
9015
    {
9016
      sbitmap blocks;
9017
      blocks = sbitmap_alloc (last_basic_block);
9018
      sbitmap_ones (blocks);
9019
      find_many_sub_basic_blocks (blocks);
9020
      sbitmap_free (blocks);
9021
    }
9022
 
9023
  if (inserted)
9024
    commit_edge_insertions ();
9025
 
9026
#ifdef ENABLE_CHECKING
9027
  /* Verify that we didn't turn one trapping insn into many, and that
9028
     we found and corrected all of the problems wrt fixups on the
9029
     fallthru edge.  */
9030
  verify_flow_info ();
9031
#endif
9032
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.