OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-old/] [gcc-4.2.2/] [gcc/] [reload1.c] - Blame information for rev 825

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 38 julius
/* Reload pseudo regs into hard regs for insns that require hard regs.
2
   Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3
   1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4
   Free Software Foundation, Inc.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
 
27
#include "machmode.h"
28
#include "hard-reg-set.h"
29
#include "rtl.h"
30
#include "tm_p.h"
31
#include "obstack.h"
32
#include "insn-config.h"
33
#include "flags.h"
34
#include "function.h"
35
#include "expr.h"
36
#include "optabs.h"
37
#include "regs.h"
38
#include "addresses.h"
39
#include "basic-block.h"
40
#include "reload.h"
41
#include "recog.h"
42
#include "output.h"
43
#include "real.h"
44
#include "toplev.h"
45
#include "except.h"
46
#include "tree.h"
47
#include "target.h"
48
 
49
/* This file contains the reload pass of the compiler, which is
50
   run after register allocation has been done.  It checks that
51
   each insn is valid (operands required to be in registers really
52
   are in registers of the proper class) and fixes up invalid ones
53
   by copying values temporarily into registers for the insns
54
   that need them.
55
 
56
   The results of register allocation are described by the vector
57
   reg_renumber; the insns still contain pseudo regs, but reg_renumber
58
   can be used to find which hard reg, if any, a pseudo reg is in.
59
 
60
   The technique we always use is to free up a few hard regs that are
61
   called ``reload regs'', and for each place where a pseudo reg
62
   must be in a hard reg, copy it temporarily into one of the reload regs.
63
 
64
   Reload regs are allocated locally for every instruction that needs
65
   reloads.  When there are pseudos which are allocated to a register that
66
   has been chosen as a reload reg, such pseudos must be ``spilled''.
67
   This means that they go to other hard regs, or to stack slots if no other
68
   available hard regs can be found.  Spilling can invalidate more
69
   insns, requiring additional need for reloads, so we must keep checking
70
   until the process stabilizes.
71
 
72
   For machines with different classes of registers, we must keep track
73
   of the register class needed for each reload, and make sure that
74
   we allocate enough reload registers of each class.
75
 
76
   The file reload.c contains the code that checks one insn for
77
   validity and reports the reloads that it needs.  This file
78
   is in charge of scanning the entire rtl code, accumulating the
79
   reload needs, spilling, assigning reload registers to use for
80
   fixing up each insn, and generating the new insns to copy values
81
   into the reload registers.  */
82
 
83
/* During reload_as_needed, element N contains a REG rtx for the hard reg
84
   into which reg N has been reloaded (perhaps for a previous insn).  */
85
static rtx *reg_last_reload_reg;
86
 
87
/* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
88
   for an output reload that stores into reg N.  */
89
static regset_head reg_has_output_reload;
90
 
91
/* Indicates which hard regs are reload-registers for an output reload
92
   in the current insn.  */
93
static HARD_REG_SET reg_is_output_reload;
94
 
95
/* Element N is the constant value to which pseudo reg N is equivalent,
96
   or zero if pseudo reg N is not equivalent to a constant.
97
   find_reloads looks at this in order to replace pseudo reg N
98
   with the constant it stands for.  */
99
rtx *reg_equiv_constant;
100
 
101
/* Element N is an invariant value to which pseudo reg N is equivalent.
102
   eliminate_regs_in_insn uses this to replace pseudos in particular
103
   contexts.  */
104
rtx *reg_equiv_invariant;
105
 
106
/* Element N is a memory location to which pseudo reg N is equivalent,
107
   prior to any register elimination (such as frame pointer to stack
108
   pointer).  Depending on whether or not it is a valid address, this value
109
   is transferred to either reg_equiv_address or reg_equiv_mem.  */
110
rtx *reg_equiv_memory_loc;
111
 
112
/* We allocate reg_equiv_memory_loc inside a varray so that the garbage
113
   collector can keep track of what is inside.  */
114
VEC(rtx,gc) *reg_equiv_memory_loc_vec;
115
 
116
/* Element N is the address of stack slot to which pseudo reg N is equivalent.
117
   This is used when the address is not valid as a memory address
118
   (because its displacement is too big for the machine.)  */
119
rtx *reg_equiv_address;
120
 
121
/* Element N is the memory slot to which pseudo reg N is equivalent,
122
   or zero if pseudo reg N is not equivalent to a memory slot.  */
123
rtx *reg_equiv_mem;
124
 
125
/* Element N is an EXPR_LIST of REG_EQUIVs containing MEMs with
126
   alternate representations of the location of pseudo reg N.  */
127
rtx *reg_equiv_alt_mem_list;
128
 
129
/* Widest width in which each pseudo reg is referred to (via subreg).  */
130
static unsigned int *reg_max_ref_width;
131
 
132
/* Element N is the list of insns that initialized reg N from its equivalent
133
   constant or memory slot.  */
134
rtx *reg_equiv_init;
135
int reg_equiv_init_size;
136
 
137
/* Vector to remember old contents of reg_renumber before spilling.  */
138
static short *reg_old_renumber;
139
 
140
/* During reload_as_needed, element N contains the last pseudo regno reloaded
141
   into hard register N.  If that pseudo reg occupied more than one register,
142
   reg_reloaded_contents points to that pseudo for each spill register in
143
   use; all of these must remain set for an inheritance to occur.  */
144
static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
145
 
146
/* During reload_as_needed, element N contains the insn for which
147
   hard register N was last used.   Its contents are significant only
148
   when reg_reloaded_valid is set for this register.  */
149
static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
150
 
151
/* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid.  */
152
static HARD_REG_SET reg_reloaded_valid;
153
/* Indicate if the register was dead at the end of the reload.
154
   This is only valid if reg_reloaded_contents is set and valid.  */
155
static HARD_REG_SET reg_reloaded_dead;
156
 
157
/* Indicate whether the register's current value is one that is not
158
   safe to retain across a call, even for registers that are normally
159
   call-saved.  */
160
static HARD_REG_SET reg_reloaded_call_part_clobbered;
161
 
162
/* Number of spill-regs so far; number of valid elements of spill_regs.  */
163
static int n_spills;
164
 
165
/* In parallel with spill_regs, contains REG rtx's for those regs.
166
   Holds the last rtx used for any given reg, or 0 if it has never
167
   been used for spilling yet.  This rtx is reused, provided it has
168
   the proper mode.  */
169
static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
170
 
171
/* In parallel with spill_regs, contains nonzero for a spill reg
172
   that was stored after the last time it was used.
173
   The precise value is the insn generated to do the store.  */
174
static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
175
 
176
/* This is the register that was stored with spill_reg_store.  This is a
177
   copy of reload_out / reload_out_reg when the value was stored; if
178
   reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg.  */
179
static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
180
 
181
/* This table is the inverse mapping of spill_regs:
182
   indexed by hard reg number,
183
   it contains the position of that reg in spill_regs,
184
   or -1 for something that is not in spill_regs.
185
 
186
   ?!?  This is no longer accurate.  */
187
static short spill_reg_order[FIRST_PSEUDO_REGISTER];
188
 
189
/* This reg set indicates registers that can't be used as spill registers for
190
   the currently processed insn.  These are the hard registers which are live
191
   during the insn, but not allocated to pseudos, as well as fixed
192
   registers.  */
193
static HARD_REG_SET bad_spill_regs;
194
 
195
/* These are the hard registers that can't be used as spill register for any
196
   insn.  This includes registers used for user variables and registers that
197
   we can't eliminate.  A register that appears in this set also can't be used
198
   to retry register allocation.  */
199
static HARD_REG_SET bad_spill_regs_global;
200
 
201
/* Describes order of use of registers for reloading
202
   of spilled pseudo-registers.  `n_spills' is the number of
203
   elements that are actually valid; new ones are added at the end.
204
 
205
   Both spill_regs and spill_reg_order are used on two occasions:
206
   once during find_reload_regs, where they keep track of the spill registers
207
   for a single insn, but also during reload_as_needed where they show all
208
   the registers ever used by reload.  For the latter case, the information
209
   is calculated during finish_spills.  */
210
static short spill_regs[FIRST_PSEUDO_REGISTER];
211
 
212
/* This vector of reg sets indicates, for each pseudo, which hard registers
213
   may not be used for retrying global allocation because the register was
214
   formerly spilled from one of them.  If we allowed reallocating a pseudo to
215
   a register that it was already allocated to, reload might not
216
   terminate.  */
217
static HARD_REG_SET *pseudo_previous_regs;
218
 
219
/* This vector of reg sets indicates, for each pseudo, which hard
220
   registers may not be used for retrying global allocation because they
221
   are used as spill registers during one of the insns in which the
222
   pseudo is live.  */
223
static HARD_REG_SET *pseudo_forbidden_regs;
224
 
225
/* All hard regs that have been used as spill registers for any insn are
226
   marked in this set.  */
227
static HARD_REG_SET used_spill_regs;
228
 
229
/* Index of last register assigned as a spill register.  We allocate in
230
   a round-robin fashion.  */
231
static int last_spill_reg;
232
 
233
/* Nonzero if indirect addressing is supported on the machine; this means
234
   that spilling (REG n) does not require reloading it into a register in
235
   order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))).  The
236
   value indicates the level of indirect addressing supported, e.g., two
237
   means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
238
   a hard register.  */
239
static char spill_indirect_levels;
240
 
241
/* Nonzero if indirect addressing is supported when the innermost MEM is
242
   of the form (MEM (SYMBOL_REF sym)).  It is assumed that the level to
243
   which these are valid is the same as spill_indirect_levels, above.  */
244
char indirect_symref_ok;
245
 
246
/* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid.  */
247
char double_reg_address_ok;
248
 
249
/* Record the stack slot for each spilled hard register.  */
250
static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
251
 
252
/* Width allocated so far for that stack slot.  */
253
static unsigned int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
254
 
255
/* Record which pseudos needed to be spilled.  */
256
static regset_head spilled_pseudos;
257
 
258
/* Used for communication between order_regs_for_reload and count_pseudo.
259
   Used to avoid counting one pseudo twice.  */
260
static regset_head pseudos_counted;
261
 
262
/* First uid used by insns created by reload in this function.
263
   Used in find_equiv_reg.  */
264
int reload_first_uid;
265
 
266
/* Flag set by local-alloc or global-alloc if anything is live in
267
   a call-clobbered reg across calls.  */
268
int caller_save_needed;
269
 
270
/* Set to 1 while reload_as_needed is operating.
271
   Required by some machines to handle any generated moves differently.  */
272
int reload_in_progress = 0;
273
 
274
/* These arrays record the insn_code of insns that may be needed to
275
   perform input and output reloads of special objects.  They provide a
276
   place to pass a scratch register.  */
277
enum insn_code reload_in_optab[NUM_MACHINE_MODES];
278
enum insn_code reload_out_optab[NUM_MACHINE_MODES];
279
 
280
/* This obstack is used for allocation of rtl during register elimination.
281
   The allocated storage can be freed once find_reloads has processed the
282
   insn.  */
283
static struct obstack reload_obstack;
284
 
285
/* Points to the beginning of the reload_obstack.  All insn_chain structures
286
   are allocated first.  */
287
static char *reload_startobj;
288
 
289
/* The point after all insn_chain structures.  Used to quickly deallocate
290
   memory allocated in copy_reloads during calculate_needs_all_insns.  */
291
static char *reload_firstobj;
292
 
293
/* This points before all local rtl generated by register elimination.
294
   Used to quickly free all memory after processing one insn.  */
295
static char *reload_insn_firstobj;
296
 
297
/* List of insn_chain instructions, one for every insn that reload needs to
298
   examine.  */
299
struct insn_chain *reload_insn_chain;
300
 
301
/* List of all insns needing reloads.  */
302
static struct insn_chain *insns_need_reload;
303
 
304
/* This structure is used to record information about register eliminations.
305
   Each array entry describes one possible way of eliminating a register
306
   in favor of another.   If there is more than one way of eliminating a
307
   particular register, the most preferred should be specified first.  */
308
 
309
struct elim_table
310
{
311
  int from;                     /* Register number to be eliminated.  */
312
  int to;                       /* Register number used as replacement.  */
313
  HOST_WIDE_INT initial_offset; /* Initial difference between values.  */
314
  int can_eliminate;            /* Nonzero if this elimination can be done.  */
315
  int can_eliminate_previous;   /* Value of CAN_ELIMINATE in previous scan over
316
                                   insns made by reload.  */
317
  HOST_WIDE_INT offset;         /* Current offset between the two regs.  */
318
  HOST_WIDE_INT previous_offset;/* Offset at end of previous insn.  */
319
  int ref_outside_mem;          /* "to" has been referenced outside a MEM.  */
320
  rtx from_rtx;                 /* REG rtx for the register to be eliminated.
321
                                   We cannot simply compare the number since
322
                                   we might then spuriously replace a hard
323
                                   register corresponding to a pseudo
324
                                   assigned to the reg to be eliminated.  */
325
  rtx to_rtx;                   /* REG rtx for the replacement.  */
326
};
327
 
328
static struct elim_table *reg_eliminate = 0;
329
 
330
/* This is an intermediate structure to initialize the table.  It has
331
   exactly the members provided by ELIMINABLE_REGS.  */
332
static const struct elim_table_1
333
{
334
  const int from;
335
  const int to;
336
} reg_eliminate_1[] =
337
 
338
/* If a set of eliminable registers was specified, define the table from it.
339
   Otherwise, default to the normal case of the frame pointer being
340
   replaced by the stack pointer.  */
341
 
342
#ifdef ELIMINABLE_REGS
343
  ELIMINABLE_REGS;
344
#else
345
  {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
346
#endif
347
 
348
#define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
349
 
350
/* Record the number of pending eliminations that have an offset not equal
351
   to their initial offset.  If nonzero, we use a new copy of each
352
   replacement result in any insns encountered.  */
353
int num_not_at_initial_offset;
354
 
355
/* Count the number of registers that we may be able to eliminate.  */
356
static int num_eliminable;
357
/* And the number of registers that are equivalent to a constant that
358
   can be eliminated to frame_pointer / arg_pointer + constant.  */
359
static int num_eliminable_invariants;
360
 
361
/* For each label, we record the offset of each elimination.  If we reach
362
   a label by more than one path and an offset differs, we cannot do the
363
   elimination.  This information is indexed by the difference of the
364
   number of the label and the first label number.  We can't offset the
365
   pointer itself as this can cause problems on machines with segmented
366
   memory.  The first table is an array of flags that records whether we
367
   have yet encountered a label and the second table is an array of arrays,
368
   one entry in the latter array for each elimination.  */
369
 
370
static int first_label_num;
371
static char *offsets_known_at;
372
static HOST_WIDE_INT (*offsets_at)[NUM_ELIMINABLE_REGS];
373
 
374
/* Number of labels in the current function.  */
375
 
376
static int num_labels;
377
 
378
static void replace_pseudos_in (rtx *, enum machine_mode, rtx);
379
static void maybe_fix_stack_asms (void);
380
static void copy_reloads (struct insn_chain *);
381
static void calculate_needs_all_insns (int);
382
static int find_reg (struct insn_chain *, int);
383
static void find_reload_regs (struct insn_chain *);
384
static void select_reload_regs (void);
385
static void delete_caller_save_insns (void);
386
 
387
static void spill_failure (rtx, enum reg_class);
388
static void count_spilled_pseudo (int, int, int);
389
static void delete_dead_insn (rtx);
390
static void alter_reg (int, int);
391
static void set_label_offsets (rtx, rtx, int);
392
static void check_eliminable_occurrences (rtx);
393
static void elimination_effects (rtx, enum machine_mode);
394
static int eliminate_regs_in_insn (rtx, int);
395
static void update_eliminable_offsets (void);
396
static void mark_not_eliminable (rtx, rtx, void *);
397
static void set_initial_elim_offsets (void);
398
static bool verify_initial_elim_offsets (void);
399
static void set_initial_label_offsets (void);
400
static void set_offsets_for_label (rtx);
401
static void init_elim_table (void);
402
static void update_eliminables (HARD_REG_SET *);
403
static void spill_hard_reg (unsigned int, int);
404
static int finish_spills (int);
405
static void scan_paradoxical_subregs (rtx);
406
static void count_pseudo (int);
407
static void order_regs_for_reload (struct insn_chain *);
408
static void reload_as_needed (int);
409
static void forget_old_reloads_1 (rtx, rtx, void *);
410
static void forget_marked_reloads (regset);
411
static int reload_reg_class_lower (const void *, const void *);
412
static void mark_reload_reg_in_use (unsigned int, int, enum reload_type,
413
                                    enum machine_mode);
414
static void clear_reload_reg_in_use (unsigned int, int, enum reload_type,
415
                                     enum machine_mode);
416
static int reload_reg_free_p (unsigned int, int, enum reload_type);
417
static int reload_reg_free_for_value_p (int, int, int, enum reload_type,
418
                                        rtx, rtx, int, int);
419
static int free_for_value_p (int, enum machine_mode, int, enum reload_type,
420
                             rtx, rtx, int, int);
421
static int reload_reg_reaches_end_p (unsigned int, int, enum reload_type);
422
static int allocate_reload_reg (struct insn_chain *, int, int);
423
static int conflicts_with_override (rtx);
424
static void failed_reload (rtx, int);
425
static int set_reload_reg (int, int);
426
static void choose_reload_regs_init (struct insn_chain *, rtx *);
427
static void choose_reload_regs (struct insn_chain *);
428
static void merge_assigned_reloads (rtx);
429
static void emit_input_reload_insns (struct insn_chain *, struct reload *,
430
                                     rtx, int);
431
static void emit_output_reload_insns (struct insn_chain *, struct reload *,
432
                                      int);
433
static void do_input_reload (struct insn_chain *, struct reload *, int);
434
static void do_output_reload (struct insn_chain *, struct reload *, int);
435
static bool inherit_piecemeal_p (int, int);
436
static void emit_reload_insns (struct insn_chain *);
437
static void delete_output_reload (rtx, int, int);
438
static void delete_address_reloads (rtx, rtx);
439
static void delete_address_reloads_1 (rtx, rtx, rtx);
440
static rtx inc_for_reload (rtx, rtx, rtx, int);
441
#ifdef AUTO_INC_DEC
442
static void add_auto_inc_notes (rtx, rtx);
443
#endif
444
static void copy_eh_notes (rtx, rtx);
445
static int reloads_conflict (int, int);
446
static rtx gen_reload (rtx, rtx, int, enum reload_type);
447
static rtx emit_insn_if_valid_for_reload (rtx);
448
 
449
/* Initialize the reload pass once per compilation.  */
450
 
451
void
452
init_reload (void)
453
{
454
  int i;
455
 
456
  /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
457
     Set spill_indirect_levels to the number of levels such addressing is
458
     permitted, zero if it is not permitted at all.  */
459
 
460
  rtx tem
461
    = gen_rtx_MEM (Pmode,
462
                   gen_rtx_PLUS (Pmode,
463
                                 gen_rtx_REG (Pmode,
464
                                              LAST_VIRTUAL_REGISTER + 1),
465
                                 GEN_INT (4)));
466
  spill_indirect_levels = 0;
467
 
468
  while (memory_address_p (QImode, tem))
469
    {
470
      spill_indirect_levels++;
471
      tem = gen_rtx_MEM (Pmode, tem);
472
    }
473
 
474
  /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)).  */
475
 
476
  tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
477
  indirect_symref_ok = memory_address_p (QImode, tem);
478
 
479
  /* See if reg+reg is a valid (and offsettable) address.  */
480
 
481
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
482
    {
483
      tem = gen_rtx_PLUS (Pmode,
484
                          gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
485
                          gen_rtx_REG (Pmode, i));
486
 
487
      /* This way, we make sure that reg+reg is an offsettable address.  */
488
      tem = plus_constant (tem, 4);
489
 
490
      if (memory_address_p (QImode, tem))
491
        {
492
          double_reg_address_ok = 1;
493
          break;
494
        }
495
    }
496
 
497
  /* Initialize obstack for our rtl allocation.  */
498
  gcc_obstack_init (&reload_obstack);
499
  reload_startobj = obstack_alloc (&reload_obstack, 0);
500
 
501
  INIT_REG_SET (&spilled_pseudos);
502
  INIT_REG_SET (&pseudos_counted);
503
}
504
 
505
/* List of insn chains that are currently unused.  */
506
static struct insn_chain *unused_insn_chains = 0;
507
 
508
/* Allocate an empty insn_chain structure.  */
509
struct insn_chain *
510
new_insn_chain (void)
511
{
512
  struct insn_chain *c;
513
 
514
  if (unused_insn_chains == 0)
515
    {
516
      c = obstack_alloc (&reload_obstack, sizeof (struct insn_chain));
517
      INIT_REG_SET (&c->live_throughout);
518
      INIT_REG_SET (&c->dead_or_set);
519
    }
520
  else
521
    {
522
      c = unused_insn_chains;
523
      unused_insn_chains = c->next;
524
    }
525
  c->is_caller_save_insn = 0;
526
  c->need_operand_change = 0;
527
  c->need_reload = 0;
528
  c->need_elim = 0;
529
  return c;
530
}
531
 
532
/* Small utility function to set all regs in hard reg set TO which are
533
   allocated to pseudos in regset FROM.  */
534
 
535
void
536
compute_use_by_pseudos (HARD_REG_SET *to, regset from)
537
{
538
  unsigned int regno;
539
  reg_set_iterator rsi;
540
 
541
  EXECUTE_IF_SET_IN_REG_SET (from, FIRST_PSEUDO_REGISTER, regno, rsi)
542
    {
543
      int r = reg_renumber[regno];
544
      int nregs;
545
 
546
      if (r < 0)
547
        {
548
          /* reload_combine uses the information from
549
             BASIC_BLOCK->global_live_at_start, which might still
550
             contain registers that have not actually been allocated
551
             since they have an equivalence.  */
552
          gcc_assert (reload_completed);
553
        }
554
      else
555
        {
556
          nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (regno)];
557
          while (nregs-- > 0)
558
            SET_HARD_REG_BIT (*to, r + nregs);
559
        }
560
    }
561
}
562
 
563
/* Replace all pseudos found in LOC with their corresponding
564
   equivalences.  */
565
 
566
static void
567
replace_pseudos_in (rtx *loc, enum machine_mode mem_mode, rtx usage)
568
{
569
  rtx x = *loc;
570
  enum rtx_code code;
571
  const char *fmt;
572
  int i, j;
573
 
574
  if (! x)
575
    return;
576
 
577
  code = GET_CODE (x);
578
  if (code == REG)
579
    {
580
      unsigned int regno = REGNO (x);
581
 
582
      if (regno < FIRST_PSEUDO_REGISTER)
583
        return;
584
 
585
      x = eliminate_regs (x, mem_mode, usage);
586
      if (x != *loc)
587
        {
588
          *loc = x;
589
          replace_pseudos_in (loc, mem_mode, usage);
590
          return;
591
        }
592
 
593
      if (reg_equiv_constant[regno])
594
        *loc = reg_equiv_constant[regno];
595
      else if (reg_equiv_mem[regno])
596
        *loc = reg_equiv_mem[regno];
597
      else if (reg_equiv_address[regno])
598
        *loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address[regno]);
599
      else
600
        {
601
          gcc_assert (!REG_P (regno_reg_rtx[regno])
602
                      || REGNO (regno_reg_rtx[regno]) != regno);
603
          *loc = regno_reg_rtx[regno];
604
        }
605
 
606
      return;
607
    }
608
  else if (code == MEM)
609
    {
610
      replace_pseudos_in (& XEXP (x, 0), GET_MODE (x), usage);
611
      return;
612
    }
613
 
614
  /* Process each of our operands recursively.  */
615
  fmt = GET_RTX_FORMAT (code);
616
  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
617
    if (*fmt == 'e')
618
      replace_pseudos_in (&XEXP (x, i), mem_mode, usage);
619
    else if (*fmt == 'E')
620
      for (j = 0; j < XVECLEN (x, i); j++)
621
        replace_pseudos_in (& XVECEXP (x, i, j), mem_mode, usage);
622
}
623
 
624
 
625
/* Global variables used by reload and its subroutines.  */
626
 
627
/* Set during calculate_needs if an insn needs register elimination.  */
628
static int something_needs_elimination;
629
/* Set during calculate_needs if an insn needs an operand changed.  */
630
static int something_needs_operands_changed;
631
 
632
/* Nonzero means we couldn't get enough spill regs.  */
633
static int failure;
634
 
635
/* Main entry point for the reload pass.
636
 
637
   FIRST is the first insn of the function being compiled.
638
 
639
   GLOBAL nonzero means we were called from global_alloc
640
   and should attempt to reallocate any pseudoregs that we
641
   displace from hard regs we will use for reloads.
642
   If GLOBAL is zero, we do not have enough information to do that,
643
   so any pseudo reg that is spilled must go to the stack.
644
 
645
   Return value is nonzero if reload failed
646
   and we must not do any more for this function.  */
647
 
648
int
649
reload (rtx first, int global)
650
{
651
  int i;
652
  rtx insn;
653
  struct elim_table *ep;
654
  basic_block bb;
655
 
656
  /* Make sure even insns with volatile mem refs are recognizable.  */
657
  init_recog ();
658
 
659
  failure = 0;
660
 
661
  reload_firstobj = obstack_alloc (&reload_obstack, 0);
662
 
663
  /* Make sure that the last insn in the chain
664
     is not something that needs reloading.  */
665
  emit_note (NOTE_INSN_DELETED);
666
 
667
  /* Enable find_equiv_reg to distinguish insns made by reload.  */
668
  reload_first_uid = get_max_uid ();
669
 
670
#ifdef SECONDARY_MEMORY_NEEDED
671
  /* Initialize the secondary memory table.  */
672
  clear_secondary_mem ();
673
#endif
674
 
675
  /* We don't have a stack slot for any spill reg yet.  */
676
  memset (spill_stack_slot, 0, sizeof spill_stack_slot);
677
  memset (spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
678
 
679
  /* Initialize the save area information for caller-save, in case some
680
     are needed.  */
681
  init_save_areas ();
682
 
683
  /* Compute which hard registers are now in use
684
     as homes for pseudo registers.
685
     This is done here rather than (eg) in global_alloc
686
     because this point is reached even if not optimizing.  */
687
  for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
688
    mark_home_live (i);
689
 
690
  /* A function that receives a nonlocal goto must save all call-saved
691
     registers.  */
692
  if (current_function_has_nonlocal_label)
693
    for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
694
      if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
695
        regs_ever_live[i] = 1;
696
 
697
  /* Find all the pseudo registers that didn't get hard regs
698
     but do have known equivalent constants or memory slots.
699
     These include parameters (known equivalent to parameter slots)
700
     and cse'd or loop-moved constant memory addresses.
701
 
702
     Record constant equivalents in reg_equiv_constant
703
     so they will be substituted by find_reloads.
704
     Record memory equivalents in reg_mem_equiv so they can
705
     be substituted eventually by altering the REG-rtx's.  */
706
 
707
  reg_equiv_constant = XCNEWVEC (rtx, max_regno);
708
  reg_equiv_invariant = XCNEWVEC (rtx, max_regno);
709
  reg_equiv_mem = XCNEWVEC (rtx, max_regno);
710
  reg_equiv_alt_mem_list = XCNEWVEC (rtx, max_regno);
711
  reg_equiv_address = XCNEWVEC (rtx, max_regno);
712
  reg_max_ref_width = XCNEWVEC (unsigned int, max_regno);
713
  reg_old_renumber = XCNEWVEC (short, max_regno);
714
  memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
715
  pseudo_forbidden_regs = XNEWVEC (HARD_REG_SET, max_regno);
716
  pseudo_previous_regs = XCNEWVEC (HARD_REG_SET, max_regno);
717
 
718
  CLEAR_HARD_REG_SET (bad_spill_regs_global);
719
 
720
  /* Look for REG_EQUIV notes; record what each pseudo is equivalent
721
     to.  Also find all paradoxical subregs and find largest such for
722
     each pseudo.  */
723
 
724
  num_eliminable_invariants = 0;
725
  for (insn = first; insn; insn = NEXT_INSN (insn))
726
    {
727
      rtx set = single_set (insn);
728
 
729
      /* We may introduce USEs that we want to remove at the end, so
730
         we'll mark them with QImode.  Make sure there are no
731
         previously-marked insns left by say regmove.  */
732
      if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
733
          && GET_MODE (insn) != VOIDmode)
734
        PUT_MODE (insn, VOIDmode);
735
 
736
      if (INSN_P (insn))
737
        scan_paradoxical_subregs (PATTERN (insn));
738
 
739
      if (set != 0 && REG_P (SET_DEST (set)))
740
        {
741
          rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
742
          rtx x;
743
 
744
          if (! note)
745
            continue;
746
 
747
          i = REGNO (SET_DEST (set));
748
          x = XEXP (note, 0);
749
 
750
          if (i <= LAST_VIRTUAL_REGISTER)
751
            continue;
752
 
753
          if (! function_invariant_p (x)
754
              || ! flag_pic
755
              /* A function invariant is often CONSTANT_P but may
756
                 include a register.  We promise to only pass
757
                 CONSTANT_P objects to LEGITIMATE_PIC_OPERAND_P.  */
758
              || (CONSTANT_P (x)
759
                  && LEGITIMATE_PIC_OPERAND_P (x)))
760
            {
761
              /* It can happen that a REG_EQUIV note contains a MEM
762
                 that is not a legitimate memory operand.  As later
763
                 stages of reload assume that all addresses found
764
                 in the reg_equiv_* arrays were originally legitimate,
765
                 we ignore such REG_EQUIV notes.  */
766
              if (memory_operand (x, VOIDmode))
767
                {
768
                  /* Always unshare the equivalence, so we can
769
                     substitute into this insn without touching the
770
                       equivalence.  */
771
                  reg_equiv_memory_loc[i] = copy_rtx (x);
772
                }
773
              else if (function_invariant_p (x))
774
                {
775
                  if (GET_CODE (x) == PLUS)
776
                    {
777
                      /* This is PLUS of frame pointer and a constant,
778
                         and might be shared.  Unshare it.  */
779
                      reg_equiv_invariant[i] = copy_rtx (x);
780
                      num_eliminable_invariants++;
781
                    }
782
                  else if (x == frame_pointer_rtx || x == arg_pointer_rtx)
783
                    {
784
                      reg_equiv_invariant[i] = x;
785
                      num_eliminable_invariants++;
786
                    }
787
                  else if (LEGITIMATE_CONSTANT_P (x))
788
                    reg_equiv_constant[i] = x;
789
                  else
790
                    {
791
                      reg_equiv_memory_loc[i]
792
                        = force_const_mem (GET_MODE (SET_DEST (set)), x);
793
                      if (! reg_equiv_memory_loc[i])
794
                        reg_equiv_init[i] = NULL_RTX;
795
                    }
796
                }
797
              else
798
                {
799
                  reg_equiv_init[i] = NULL_RTX;
800
                  continue;
801
                }
802
            }
803
          else
804
            reg_equiv_init[i] = NULL_RTX;
805
        }
806
    }
807
 
808
  if (dump_file)
809
    for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
810
      if (reg_equiv_init[i])
811
        {
812
          fprintf (dump_file, "init_insns for %u: ", i);
813
          print_inline_rtx (dump_file, reg_equiv_init[i], 20);
814
          fprintf (dump_file, "\n");
815
        }
816
 
817
  init_elim_table ();
818
 
819
  first_label_num = get_first_label_num ();
820
  num_labels = max_label_num () - first_label_num;
821
 
822
  /* Allocate the tables used to store offset information at labels.  */
823
  /* We used to use alloca here, but the size of what it would try to
824
     allocate would occasionally cause it to exceed the stack limit and
825
     cause a core dump.  */
826
  offsets_known_at = XNEWVEC (char, num_labels);
827
  offsets_at = (HOST_WIDE_INT (*)[NUM_ELIMINABLE_REGS]) xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (HOST_WIDE_INT));
828
 
829
  /* Alter each pseudo-reg rtx to contain its hard reg number.
830
     Assign stack slots to the pseudos that lack hard regs or equivalents.
831
     Do not touch virtual registers.  */
832
 
833
  for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
834
    alter_reg (i, -1);
835
 
836
  /* If we have some registers we think can be eliminated, scan all insns to
837
     see if there is an insn that sets one of these registers to something
838
     other than itself plus a constant.  If so, the register cannot be
839
     eliminated.  Doing this scan here eliminates an extra pass through the
840
     main reload loop in the most common case where register elimination
841
     cannot be done.  */
842
  for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
843
    if (INSN_P (insn))
844
      note_stores (PATTERN (insn), mark_not_eliminable, NULL);
845
 
846
  maybe_fix_stack_asms ();
847
 
848
  insns_need_reload = 0;
849
  something_needs_elimination = 0;
850
 
851
  /* Initialize to -1, which means take the first spill register.  */
852
  last_spill_reg = -1;
853
 
854
  /* Spill any hard regs that we know we can't eliminate.  */
855
  CLEAR_HARD_REG_SET (used_spill_regs);
856
  /* There can be multiple ways to eliminate a register;
857
     they should be listed adjacently.
858
     Elimination for any register fails only if all possible ways fail.  */
859
  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; )
860
    {
861
      int from = ep->from;
862
      int can_eliminate = 0;
863
      do
864
        {
865
          can_eliminate |= ep->can_eliminate;
866
          ep++;
867
        }
868
      while (ep < &reg_eliminate[NUM_ELIMINABLE_REGS] && ep->from == from);
869
      if (! can_eliminate)
870
        spill_hard_reg (from, 1);
871
    }
872
 
873
#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
874
  if (frame_pointer_needed)
875
    spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
876
#endif
877
  finish_spills (global);
878
 
879
  /* From now on, we may need to generate moves differently.  We may also
880
     allow modifications of insns which cause them to not be recognized.
881
     Any such modifications will be cleaned up during reload itself.  */
882
  reload_in_progress = 1;
883
 
884
  /* This loop scans the entire function each go-round
885
     and repeats until one repetition spills no additional hard regs.  */
886
  for (;;)
887
    {
888
      int something_changed;
889
      int did_spill;
890
 
891
      HOST_WIDE_INT starting_frame_size;
892
 
893
      /* Round size of stack frame to stack_alignment_needed.  This must be done
894
         here because the stack size may be a part of the offset computation
895
         for register elimination, and there might have been new stack slots
896
         created in the last iteration of this loop.  */
897
      if (cfun->stack_alignment_needed)
898
        assign_stack_local (BLKmode, 0, cfun->stack_alignment_needed);
899
 
900
      starting_frame_size = get_frame_size ();
901
 
902
      set_initial_elim_offsets ();
903
      set_initial_label_offsets ();
904
 
905
      /* For each pseudo register that has an equivalent location defined,
906
         try to eliminate any eliminable registers (such as the frame pointer)
907
         assuming initial offsets for the replacement register, which
908
         is the normal case.
909
 
910
         If the resulting location is directly addressable, substitute
911
         the MEM we just got directly for the old REG.
912
 
913
         If it is not addressable but is a constant or the sum of a hard reg
914
         and constant, it is probably not addressable because the constant is
915
         out of range, in that case record the address; we will generate
916
         hairy code to compute the address in a register each time it is
917
         needed.  Similarly if it is a hard register, but one that is not
918
         valid as an address register.
919
 
920
         If the location is not addressable, but does not have one of the
921
         above forms, assign a stack slot.  We have to do this to avoid the
922
         potential of producing lots of reloads if, e.g., a location involves
923
         a pseudo that didn't get a hard register and has an equivalent memory
924
         location that also involves a pseudo that didn't get a hard register.
925
 
926
         Perhaps at some point we will improve reload_when_needed handling
927
         so this problem goes away.  But that's very hairy.  */
928
 
929
      for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
930
        if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
931
          {
932
            rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
933
 
934
            if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
935
                                         XEXP (x, 0)))
936
              reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
937
            else if (CONSTANT_P (XEXP (x, 0))
938
                     || (REG_P (XEXP (x, 0))
939
                         && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
940
                     || (GET_CODE (XEXP (x, 0)) == PLUS
941
                         && REG_P (XEXP (XEXP (x, 0), 0))
942
                         && (REGNO (XEXP (XEXP (x, 0), 0))
943
                             < FIRST_PSEUDO_REGISTER)
944
                         && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
945
              reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
946
            else
947
              {
948
                /* Make a new stack slot.  Then indicate that something
949
                   changed so we go back and recompute offsets for
950
                   eliminable registers because the allocation of memory
951
                   below might change some offset.  reg_equiv_{mem,address}
952
                   will be set up for this pseudo on the next pass around
953
                   the loop.  */
954
                reg_equiv_memory_loc[i] = 0;
955
                reg_equiv_init[i] = 0;
956
                alter_reg (i, -1);
957
              }
958
          }
959
 
960
      if (caller_save_needed)
961
        setup_save_areas ();
962
 
963
      /* If we allocated another stack slot, redo elimination bookkeeping.  */
964
      if (starting_frame_size != get_frame_size ())
965
        continue;
966
 
967
      if (caller_save_needed)
968
        {
969
          save_call_clobbered_regs ();
970
          /* That might have allocated new insn_chain structures.  */
971
          reload_firstobj = obstack_alloc (&reload_obstack, 0);
972
        }
973
 
974
      calculate_needs_all_insns (global);
975
 
976
      CLEAR_REG_SET (&spilled_pseudos);
977
      did_spill = 0;
978
 
979
      something_changed = 0;
980
 
981
      /* If we allocated any new memory locations, make another pass
982
         since it might have changed elimination offsets.  */
983
      if (starting_frame_size != get_frame_size ())
984
        something_changed = 1;
985
 
986
      /* Even if the frame size remained the same, we might still have
987
         changed elimination offsets, e.g. if find_reloads called
988
         force_const_mem requiring the back end to allocate a constant
989
         pool base register that needs to be saved on the stack.  */
990
      else if (!verify_initial_elim_offsets ())
991
        something_changed = 1;
992
 
993
      {
994
        HARD_REG_SET to_spill;
995
        CLEAR_HARD_REG_SET (to_spill);
996
        update_eliminables (&to_spill);
997
        AND_COMPL_HARD_REG_SET(used_spill_regs, to_spill);
998
 
999
        for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1000
          if (TEST_HARD_REG_BIT (to_spill, i))
1001
            {
1002
              spill_hard_reg (i, 1);
1003
              did_spill = 1;
1004
 
1005
              /* Regardless of the state of spills, if we previously had
1006
                 a register that we thought we could eliminate, but now can
1007
                 not eliminate, we must run another pass.
1008
 
1009
                 Consider pseudos which have an entry in reg_equiv_* which
1010
                 reference an eliminable register.  We must make another pass
1011
                 to update reg_equiv_* so that we do not substitute in the
1012
                 old value from when we thought the elimination could be
1013
                 performed.  */
1014
              something_changed = 1;
1015
            }
1016
      }
1017
 
1018
      select_reload_regs ();
1019
      if (failure)
1020
        goto failed;
1021
 
1022
      if (insns_need_reload != 0 || did_spill)
1023
        something_changed |= finish_spills (global);
1024
 
1025
      if (! something_changed)
1026
        break;
1027
 
1028
      if (caller_save_needed)
1029
        delete_caller_save_insns ();
1030
 
1031
      obstack_free (&reload_obstack, reload_firstobj);
1032
    }
1033
 
1034
  /* If global-alloc was run, notify it of any register eliminations we have
1035
     done.  */
1036
  if (global)
1037
    for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1038
      if (ep->can_eliminate)
1039
        mark_elimination (ep->from, ep->to);
1040
 
1041
  /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1042
     If that insn didn't set the register (i.e., it copied the register to
1043
     memory), just delete that insn instead of the equivalencing insn plus
1044
     anything now dead.  If we call delete_dead_insn on that insn, we may
1045
     delete the insn that actually sets the register if the register dies
1046
     there and that is incorrect.  */
1047
 
1048
  for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1049
    {
1050
      if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0)
1051
        {
1052
          rtx list;
1053
          for (list = reg_equiv_init[i]; list; list = XEXP (list, 1))
1054
            {
1055
              rtx equiv_insn = XEXP (list, 0);
1056
 
1057
              /* If we already deleted the insn or if it may trap, we can't
1058
                 delete it.  The latter case shouldn't happen, but can
1059
                 if an insn has a variable address, gets a REG_EH_REGION
1060
                 note added to it, and then gets converted into a load
1061
                 from a constant address.  */
1062
              if (NOTE_P (equiv_insn)
1063
                  || can_throw_internal (equiv_insn))
1064
                ;
1065
              else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
1066
                delete_dead_insn (equiv_insn);
1067
              else
1068
                SET_INSN_DELETED (equiv_insn);
1069
            }
1070
        }
1071
    }
1072
 
1073
  /* Use the reload registers where necessary
1074
     by generating move instructions to move the must-be-register
1075
     values into or out of the reload registers.  */
1076
 
1077
  if (insns_need_reload != 0 || something_needs_elimination
1078
      || something_needs_operands_changed)
1079
    {
1080
      HOST_WIDE_INT old_frame_size = get_frame_size ();
1081
 
1082
      reload_as_needed (global);
1083
 
1084
      gcc_assert (old_frame_size == get_frame_size ());
1085
 
1086
      gcc_assert (verify_initial_elim_offsets ());
1087
    }
1088
 
1089
  /* If we were able to eliminate the frame pointer, show that it is no
1090
     longer live at the start of any basic block.  If it ls live by
1091
     virtue of being in a pseudo, that pseudo will be marked live
1092
     and hence the frame pointer will be known to be live via that
1093
     pseudo.  */
1094
 
1095
  if (! frame_pointer_needed)
1096
    FOR_EACH_BB (bb)
1097
      CLEAR_REGNO_REG_SET (bb->il.rtl->global_live_at_start,
1098
                           HARD_FRAME_POINTER_REGNUM);
1099
 
1100
  /* Come here (with failure set nonzero) if we can't get enough spill
1101
     regs.  */
1102
 failed:
1103
 
1104
  CLEAR_REG_SET (&spilled_pseudos);
1105
  reload_in_progress = 0;
1106
 
1107
  /* Now eliminate all pseudo regs by modifying them into
1108
     their equivalent memory references.
1109
     The REG-rtx's for the pseudos are modified in place,
1110
     so all insns that used to refer to them now refer to memory.
1111
 
1112
     For a reg that has a reg_equiv_address, all those insns
1113
     were changed by reloading so that no insns refer to it any longer;
1114
     but the DECL_RTL of a variable decl may refer to it,
1115
     and if so this causes the debugging info to mention the variable.  */
1116
 
1117
  for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1118
    {
1119
      rtx addr = 0;
1120
 
1121
      if (reg_equiv_mem[i])
1122
        addr = XEXP (reg_equiv_mem[i], 0);
1123
 
1124
      if (reg_equiv_address[i])
1125
        addr = reg_equiv_address[i];
1126
 
1127
      if (addr)
1128
        {
1129
          if (reg_renumber[i] < 0)
1130
            {
1131
              rtx reg = regno_reg_rtx[i];
1132
 
1133
              REG_USERVAR_P (reg) = 0;
1134
              PUT_CODE (reg, MEM);
1135
              XEXP (reg, 0) = addr;
1136
              if (reg_equiv_memory_loc[i])
1137
                MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc[i]);
1138
              else
1139
                {
1140
                  MEM_IN_STRUCT_P (reg) = MEM_SCALAR_P (reg) = 0;
1141
                  MEM_ATTRS (reg) = 0;
1142
                }
1143
              MEM_NOTRAP_P (reg) = 1;
1144
            }
1145
          else if (reg_equiv_mem[i])
1146
            XEXP (reg_equiv_mem[i], 0) = addr;
1147
        }
1148
    }
1149
 
1150
  /* We must set reload_completed now since the cleanup_subreg_operands call
1151
     below will re-recognize each insn and reload may have generated insns
1152
     which are only valid during and after reload.  */
1153
  reload_completed = 1;
1154
 
1155
  /* Make a pass over all the insns and delete all USEs which we inserted
1156
     only to tag a REG_EQUAL note on them.  Remove all REG_DEAD and REG_UNUSED
1157
     notes.  Delete all CLOBBER insns, except those that refer to the return
1158
     value and the special mem:BLK CLOBBERs added to prevent the scheduler
1159
     from misarranging variable-array code, and simplify (subreg (reg))
1160
     operands.  Also remove all REG_RETVAL and REG_LIBCALL notes since they
1161
     are no longer useful or accurate.  Strip and regenerate REG_INC notes
1162
     that may have been moved around.  */
1163
 
1164
  for (insn = first; insn; insn = NEXT_INSN (insn))
1165
    if (INSN_P (insn))
1166
      {
1167
        rtx *pnote;
1168
 
1169
        if (CALL_P (insn))
1170
          replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn),
1171
                              VOIDmode, CALL_INSN_FUNCTION_USAGE (insn));
1172
 
1173
        if ((GET_CODE (PATTERN (insn)) == USE
1174
             /* We mark with QImode USEs introduced by reload itself.  */
1175
             && (GET_MODE (insn) == QImode
1176
                 || find_reg_note (insn, REG_EQUAL, NULL_RTX)))
1177
            || (GET_CODE (PATTERN (insn)) == CLOBBER
1178
                && (!MEM_P (XEXP (PATTERN (insn), 0))
1179
                    || GET_MODE (XEXP (PATTERN (insn), 0)) != BLKmode
1180
                    || (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0)) != SCRATCH
1181
                        && XEXP (XEXP (PATTERN (insn), 0), 0)
1182
                                != stack_pointer_rtx))
1183
                && (!REG_P (XEXP (PATTERN (insn), 0))
1184
                    || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))))
1185
          {
1186
            delete_insn (insn);
1187
            continue;
1188
          }
1189
 
1190
        /* Some CLOBBERs may survive until here and still reference unassigned
1191
           pseudos with const equivalent, which may in turn cause ICE in later
1192
           passes if the reference remains in place.  */
1193
        if (GET_CODE (PATTERN (insn)) == CLOBBER)
1194
          replace_pseudos_in (& XEXP (PATTERN (insn), 0),
1195
                              VOIDmode, PATTERN (insn));
1196
 
1197
        /* Discard obvious no-ops, even without -O.  This optimization
1198
           is fast and doesn't interfere with debugging.  */
1199
        if (NONJUMP_INSN_P (insn)
1200
            && GET_CODE (PATTERN (insn)) == SET
1201
            && REG_P (SET_SRC (PATTERN (insn)))
1202
            && REG_P (SET_DEST (PATTERN (insn)))
1203
            && (REGNO (SET_SRC (PATTERN (insn)))
1204
                == REGNO (SET_DEST (PATTERN (insn)))))
1205
          {
1206
            delete_insn (insn);
1207
            continue;
1208
          }
1209
 
1210
        pnote = &REG_NOTES (insn);
1211
        while (*pnote != 0)
1212
          {
1213
            if (REG_NOTE_KIND (*pnote) == REG_DEAD
1214
                || REG_NOTE_KIND (*pnote) == REG_UNUSED
1215
                || REG_NOTE_KIND (*pnote) == REG_INC
1216
                || REG_NOTE_KIND (*pnote) == REG_RETVAL
1217
                || REG_NOTE_KIND (*pnote) == REG_LIBCALL)
1218
              *pnote = XEXP (*pnote, 1);
1219
            else
1220
              pnote = &XEXP (*pnote, 1);
1221
          }
1222
 
1223
#ifdef AUTO_INC_DEC
1224
        add_auto_inc_notes (insn, PATTERN (insn));
1225
#endif
1226
 
1227
        /* Simplify (subreg (reg)) if it appears as an operand.  */
1228
        cleanup_subreg_operands (insn);
1229
 
1230
        /* Clean up invalid ASMs so that they don't confuse later passes.
1231
           See PR 21299.  */
1232
        if (asm_noperands (PATTERN (insn)) >= 0)
1233
          {
1234
            extract_insn (insn);
1235
            if (!constrain_operands (1))
1236
              {
1237
                error_for_asm (insn,
1238
                               "%<asm%> operand has impossible constraints");
1239
                delete_insn (insn);
1240
                continue;
1241
              }
1242
          }
1243
      }
1244
 
1245
  /* If we are doing stack checking, give a warning if this function's
1246
     frame size is larger than we expect.  */
1247
  if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1248
    {
1249
      HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
1250
      static int verbose_warned = 0;
1251
 
1252
      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1253
        if (regs_ever_live[i] && ! fixed_regs[i] && call_used_regs[i])
1254
          size += UNITS_PER_WORD;
1255
 
1256
      if (size > STACK_CHECK_MAX_FRAME_SIZE)
1257
        {
1258
          warning (0, "frame size too large for reliable stack checking");
1259
          if (! verbose_warned)
1260
            {
1261
              warning (0, "try reducing the number of local variables");
1262
              verbose_warned = 1;
1263
            }
1264
        }
1265
    }
1266
 
1267
  /* Indicate that we no longer have known memory locations or constants.  */
1268
  if (reg_equiv_constant)
1269
    free (reg_equiv_constant);
1270
  if (reg_equiv_invariant)
1271
    free (reg_equiv_invariant);
1272
  reg_equiv_constant = 0;
1273
  reg_equiv_invariant = 0;
1274
  VEC_free (rtx, gc, reg_equiv_memory_loc_vec);
1275
  reg_equiv_memory_loc = 0;
1276
 
1277
  if (offsets_known_at)
1278
    free (offsets_known_at);
1279
  if (offsets_at)
1280
    free (offsets_at);
1281
 
1282
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1283
    if (reg_equiv_alt_mem_list[i])
1284
      free_EXPR_LIST_list (&reg_equiv_alt_mem_list[i]);
1285
  free (reg_equiv_alt_mem_list);
1286
 
1287
  free (reg_equiv_mem);
1288
  reg_equiv_init = 0;
1289
  free (reg_equiv_address);
1290
  free (reg_max_ref_width);
1291
  free (reg_old_renumber);
1292
  free (pseudo_previous_regs);
1293
  free (pseudo_forbidden_regs);
1294
 
1295
  CLEAR_HARD_REG_SET (used_spill_regs);
1296
  for (i = 0; i < n_spills; i++)
1297
    SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1298
 
1299
  /* Free all the insn_chain structures at once.  */
1300
  obstack_free (&reload_obstack, reload_startobj);
1301
  unused_insn_chains = 0;
1302
  fixup_abnormal_edges ();
1303
 
1304
  /* Replacing pseudos with their memory equivalents might have
1305
     created shared rtx.  Subsequent passes would get confused
1306
     by this, so unshare everything here.  */
1307
  unshare_all_rtl_again (first);
1308
 
1309
#ifdef STACK_BOUNDARY
1310
  /* init_emit has set the alignment of the hard frame pointer
1311
     to STACK_BOUNDARY.  It is very likely no longer valid if
1312
     the hard frame pointer was used for register allocation.  */
1313
  if (!frame_pointer_needed)
1314
    REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = BITS_PER_UNIT;
1315
#endif
1316
 
1317
  return failure;
1318
}
1319
 
1320
/* Yet another special case.  Unfortunately, reg-stack forces people to
1321
   write incorrect clobbers in asm statements.  These clobbers must not
1322
   cause the register to appear in bad_spill_regs, otherwise we'll call
1323
   fatal_insn later.  We clear the corresponding regnos in the live
1324
   register sets to avoid this.
1325
   The whole thing is rather sick, I'm afraid.  */
1326
 
1327
static void
1328
maybe_fix_stack_asms (void)
1329
{
1330
#ifdef STACK_REGS
1331
  const char *constraints[MAX_RECOG_OPERANDS];
1332
  enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
1333
  struct insn_chain *chain;
1334
 
1335
  for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1336
    {
1337
      int i, noperands;
1338
      HARD_REG_SET clobbered, allowed;
1339
      rtx pat;
1340
 
1341
      if (! INSN_P (chain->insn)
1342
          || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1343
        continue;
1344
      pat = PATTERN (chain->insn);
1345
      if (GET_CODE (pat) != PARALLEL)
1346
        continue;
1347
 
1348
      CLEAR_HARD_REG_SET (clobbered);
1349
      CLEAR_HARD_REG_SET (allowed);
1350
 
1351
      /* First, make a mask of all stack regs that are clobbered.  */
1352
      for (i = 0; i < XVECLEN (pat, 0); i++)
1353
        {
1354
          rtx t = XVECEXP (pat, 0, i);
1355
          if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1356
            SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1357
        }
1358
 
1359
      /* Get the operand values and constraints out of the insn.  */
1360
      decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1361
                           constraints, operand_mode);
1362
 
1363
      /* For every operand, see what registers are allowed.  */
1364
      for (i = 0; i < noperands; i++)
1365
        {
1366
          const char *p = constraints[i];
1367
          /* For every alternative, we compute the class of registers allowed
1368
             for reloading in CLS, and merge its contents into the reg set
1369
             ALLOWED.  */
1370
          int cls = (int) NO_REGS;
1371
 
1372
          for (;;)
1373
            {
1374
              char c = *p;
1375
 
1376
              if (c == '\0' || c == ',' || c == '#')
1377
                {
1378
                  /* End of one alternative - mark the regs in the current
1379
                     class, and reset the class.  */
1380
                  IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1381
                  cls = NO_REGS;
1382
                  p++;
1383
                  if (c == '#')
1384
                    do {
1385
                      c = *p++;
1386
                    } while (c != '\0' && c != ',');
1387
                  if (c == '\0')
1388
                    break;
1389
                  continue;
1390
                }
1391
 
1392
              switch (c)
1393
                {
1394
                case '=': case '+': case '*': case '%': case '?': case '!':
1395
                case '0': case '1': case '2': case '3': case '4': case 'm':
1396
                case '<': case '>': case 'V': case 'o': case '&': case 'E':
1397
                case 'F': case 's': case 'i': case 'n': case 'X': case 'I':
1398
                case 'J': case 'K': case 'L': case 'M': case 'N': case 'O':
1399
                case 'P':
1400
                  break;
1401
 
1402
                case 'p':
1403
                  cls = (int) reg_class_subunion[cls]
1404
                      [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
1405
                  break;
1406
 
1407
                case 'g':
1408
                case 'r':
1409
                  cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1410
                  break;
1411
 
1412
                default:
1413
                  if (EXTRA_ADDRESS_CONSTRAINT (c, p))
1414
                    cls = (int) reg_class_subunion[cls]
1415
                      [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
1416
                  else
1417
                    cls = (int) reg_class_subunion[cls]
1418
                      [(int) REG_CLASS_FROM_CONSTRAINT (c, p)];
1419
                }
1420
              p += CONSTRAINT_LEN (c, p);
1421
            }
1422
        }
1423
      /* Those of the registers which are clobbered, but allowed by the
1424
         constraints, must be usable as reload registers.  So clear them
1425
         out of the life information.  */
1426
      AND_HARD_REG_SET (allowed, clobbered);
1427
      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1428
        if (TEST_HARD_REG_BIT (allowed, i))
1429
          {
1430
            CLEAR_REGNO_REG_SET (&chain->live_throughout, i);
1431
            CLEAR_REGNO_REG_SET (&chain->dead_or_set, i);
1432
          }
1433
    }
1434
 
1435
#endif
1436
}
1437
 
1438
/* Copy the global variables n_reloads and rld into the corresponding elts
1439
   of CHAIN.  */
1440
static void
1441
copy_reloads (struct insn_chain *chain)
1442
{
1443
  chain->n_reloads = n_reloads;
1444
  chain->rld = obstack_alloc (&reload_obstack,
1445
                              n_reloads * sizeof (struct reload));
1446
  memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1447
  reload_insn_firstobj = obstack_alloc (&reload_obstack, 0);
1448
}
1449
 
1450
/* Walk the chain of insns, and determine for each whether it needs reloads
1451
   and/or eliminations.  Build the corresponding insns_need_reload list, and
1452
   set something_needs_elimination as appropriate.  */
1453
static void
1454
calculate_needs_all_insns (int global)
1455
{
1456
  struct insn_chain **pprev_reload = &insns_need_reload;
1457
  struct insn_chain *chain, *next = 0;
1458
 
1459
  something_needs_elimination = 0;
1460
 
1461
  reload_insn_firstobj = obstack_alloc (&reload_obstack, 0);
1462
  for (chain = reload_insn_chain; chain != 0; chain = next)
1463
    {
1464
      rtx insn = chain->insn;
1465
 
1466
      next = chain->next;
1467
 
1468
      /* Clear out the shortcuts.  */
1469
      chain->n_reloads = 0;
1470
      chain->need_elim = 0;
1471
      chain->need_reload = 0;
1472
      chain->need_operand_change = 0;
1473
 
1474
      /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1475
         include REG_LABEL), we need to see what effects this has on the
1476
         known offsets at labels.  */
1477
 
1478
      if (LABEL_P (insn) || JUMP_P (insn)
1479
          || (INSN_P (insn) && REG_NOTES (insn) != 0))
1480
        set_label_offsets (insn, insn, 0);
1481
 
1482
      if (INSN_P (insn))
1483
        {
1484
          rtx old_body = PATTERN (insn);
1485
          int old_code = INSN_CODE (insn);
1486
          rtx old_notes = REG_NOTES (insn);
1487
          int did_elimination = 0;
1488
          int operands_changed = 0;
1489
          rtx set = single_set (insn);
1490
 
1491
          /* Skip insns that only set an equivalence.  */
1492
          if (set && REG_P (SET_DEST (set))
1493
              && reg_renumber[REGNO (SET_DEST (set))] < 0
1494
              && (reg_equiv_constant[REGNO (SET_DEST (set))]
1495
                  || (reg_equiv_invariant[REGNO (SET_DEST (set))]))
1496
                      && reg_equiv_init[REGNO (SET_DEST (set))])
1497
            continue;
1498
 
1499
          /* If needed, eliminate any eliminable registers.  */
1500
          if (num_eliminable || num_eliminable_invariants)
1501
            did_elimination = eliminate_regs_in_insn (insn, 0);
1502
 
1503
          /* Analyze the instruction.  */
1504
          operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1505
                                           global, spill_reg_order);
1506
 
1507
          /* If a no-op set needs more than one reload, this is likely
1508
             to be something that needs input address reloads.  We
1509
             can't get rid of this cleanly later, and it is of no use
1510
             anyway, so discard it now.
1511
             We only do this when expensive_optimizations is enabled,
1512
             since this complements reload inheritance / output
1513
             reload deletion, and it can make debugging harder.  */
1514
          if (flag_expensive_optimizations && n_reloads > 1)
1515
            {
1516
              rtx set = single_set (insn);
1517
              if (set
1518
                  && SET_SRC (set) == SET_DEST (set)
1519
                  && REG_P (SET_SRC (set))
1520
                  && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1521
                {
1522
                  delete_insn (insn);
1523
                  /* Delete it from the reload chain.  */
1524
                  if (chain->prev)
1525
                    chain->prev->next = next;
1526
                  else
1527
                    reload_insn_chain = next;
1528
                  if (next)
1529
                    next->prev = chain->prev;
1530
                  chain->next = unused_insn_chains;
1531
                  unused_insn_chains = chain;
1532
                  continue;
1533
                }
1534
            }
1535
          if (num_eliminable)
1536
            update_eliminable_offsets ();
1537
 
1538
          /* Remember for later shortcuts which insns had any reloads or
1539
             register eliminations.  */
1540
          chain->need_elim = did_elimination;
1541
          chain->need_reload = n_reloads > 0;
1542
          chain->need_operand_change = operands_changed;
1543
 
1544
          /* Discard any register replacements done.  */
1545
          if (did_elimination)
1546
            {
1547
              obstack_free (&reload_obstack, reload_insn_firstobj);
1548
              PATTERN (insn) = old_body;
1549
              INSN_CODE (insn) = old_code;
1550
              REG_NOTES (insn) = old_notes;
1551
              something_needs_elimination = 1;
1552
            }
1553
 
1554
          something_needs_operands_changed |= operands_changed;
1555
 
1556
          if (n_reloads != 0)
1557
            {
1558
              copy_reloads (chain);
1559
              *pprev_reload = chain;
1560
              pprev_reload = &chain->next_need_reload;
1561
            }
1562
        }
1563
    }
1564
  *pprev_reload = 0;
1565
}
1566
 
1567
/* Comparison function for qsort to decide which of two reloads
1568
   should be handled first.  *P1 and *P2 are the reload numbers.  */
1569
 
1570
static int
1571
reload_reg_class_lower (const void *r1p, const void *r2p)
1572
{
1573
  int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
1574
  int t;
1575
 
1576
  /* Consider required reloads before optional ones.  */
1577
  t = rld[r1].optional - rld[r2].optional;
1578
  if (t != 0)
1579
    return t;
1580
 
1581
  /* Count all solitary classes before non-solitary ones.  */
1582
  t = ((reg_class_size[(int) rld[r2].class] == 1)
1583
       - (reg_class_size[(int) rld[r1].class] == 1));
1584
  if (t != 0)
1585
    return t;
1586
 
1587
  /* Aside from solitaires, consider all multi-reg groups first.  */
1588
  t = rld[r2].nregs - rld[r1].nregs;
1589
  if (t != 0)
1590
    return t;
1591
 
1592
  /* Consider reloads in order of increasing reg-class number.  */
1593
  t = (int) rld[r1].class - (int) rld[r2].class;
1594
  if (t != 0)
1595
    return t;
1596
 
1597
  /* If reloads are equally urgent, sort by reload number,
1598
     so that the results of qsort leave nothing to chance.  */
1599
  return r1 - r2;
1600
}
1601
 
1602
/* The cost of spilling each hard reg.  */
1603
static int spill_cost[FIRST_PSEUDO_REGISTER];
1604
 
1605
/* When spilling multiple hard registers, we use SPILL_COST for the first
1606
   spilled hard reg and SPILL_ADD_COST for subsequent regs.  SPILL_ADD_COST
1607
   only the first hard reg for a multi-reg pseudo.  */
1608
static int spill_add_cost[FIRST_PSEUDO_REGISTER];
1609
 
1610
/* Update the spill cost arrays, considering that pseudo REG is live.  */
1611
 
1612
static void
1613
count_pseudo (int reg)
1614
{
1615
  int freq = REG_FREQ (reg);
1616
  int r = reg_renumber[reg];
1617
  int nregs;
1618
 
1619
  if (REGNO_REG_SET_P (&pseudos_counted, reg)
1620
      || REGNO_REG_SET_P (&spilled_pseudos, reg))
1621
    return;
1622
 
1623
  SET_REGNO_REG_SET (&pseudos_counted, reg);
1624
 
1625
  gcc_assert (r >= 0);
1626
 
1627
  spill_add_cost[r] += freq;
1628
 
1629
  nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1630
  while (nregs-- > 0)
1631
    spill_cost[r + nregs] += freq;
1632
}
1633
 
1634
/* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1635
   contents of BAD_SPILL_REGS for the insn described by CHAIN.  */
1636
 
1637
static void
1638
order_regs_for_reload (struct insn_chain *chain)
1639
{
1640
  unsigned i;
1641
  HARD_REG_SET used_by_pseudos;
1642
  HARD_REG_SET used_by_pseudos2;
1643
  reg_set_iterator rsi;
1644
 
1645
  COPY_HARD_REG_SET (bad_spill_regs, fixed_reg_set);
1646
 
1647
  memset (spill_cost, 0, sizeof spill_cost);
1648
  memset (spill_add_cost, 0, sizeof spill_add_cost);
1649
 
1650
  /* Count number of uses of each hard reg by pseudo regs allocated to it
1651
     and then order them by decreasing use.  First exclude hard registers
1652
     that are live in or across this insn.  */
1653
 
1654
  REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
1655
  REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
1656
  IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos);
1657
  IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos2);
1658
 
1659
  /* Now find out which pseudos are allocated to it, and update
1660
     hard_reg_n_uses.  */
1661
  CLEAR_REG_SET (&pseudos_counted);
1662
 
1663
  EXECUTE_IF_SET_IN_REG_SET
1664
    (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
1665
    {
1666
      count_pseudo (i);
1667
    }
1668
  EXECUTE_IF_SET_IN_REG_SET
1669
    (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
1670
    {
1671
      count_pseudo (i);
1672
    }
1673
  CLEAR_REG_SET (&pseudos_counted);
1674
}
1675
 
1676
/* Vector of reload-numbers showing the order in which the reloads should
1677
   be processed.  */
1678
static short reload_order[MAX_RELOADS];
1679
 
1680
/* This is used to keep track of the spill regs used in one insn.  */
1681
static HARD_REG_SET used_spill_regs_local;
1682
 
1683
/* We decided to spill hard register SPILLED, which has a size of
1684
   SPILLED_NREGS.  Determine how pseudo REG, which is live during the insn,
1685
   is affected.  We will add it to SPILLED_PSEUDOS if necessary, and we will
1686
   update SPILL_COST/SPILL_ADD_COST.  */
1687
 
1688
static void
1689
count_spilled_pseudo (int spilled, int spilled_nregs, int reg)
1690
{
1691
  int r = reg_renumber[reg];
1692
  int nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1693
 
1694
  if (REGNO_REG_SET_P (&spilled_pseudos, reg)
1695
      || spilled + spilled_nregs <= r || r + nregs <= spilled)
1696
    return;
1697
 
1698
  SET_REGNO_REG_SET (&spilled_pseudos, reg);
1699
 
1700
  spill_add_cost[r] -= REG_FREQ (reg);
1701
  while (nregs-- > 0)
1702
    spill_cost[r + nregs] -= REG_FREQ (reg);
1703
}
1704
 
1705
/* Find reload register to use for reload number ORDER.  */
1706
 
1707
static int
1708
find_reg (struct insn_chain *chain, int order)
1709
{
1710
  int rnum = reload_order[order];
1711
  struct reload *rl = rld + rnum;
1712
  int best_cost = INT_MAX;
1713
  int best_reg = -1;
1714
  unsigned int i, j;
1715
  int k;
1716
  HARD_REG_SET not_usable;
1717
  HARD_REG_SET used_by_other_reload;
1718
  reg_set_iterator rsi;
1719
 
1720
  COPY_HARD_REG_SET (not_usable, bad_spill_regs);
1721
  IOR_HARD_REG_SET (not_usable, bad_spill_regs_global);
1722
  IOR_COMPL_HARD_REG_SET (not_usable, reg_class_contents[rl->class]);
1723
 
1724
  CLEAR_HARD_REG_SET (used_by_other_reload);
1725
  for (k = 0; k < order; k++)
1726
    {
1727
      int other = reload_order[k];
1728
 
1729
      if (rld[other].regno >= 0 && reloads_conflict (other, rnum))
1730
        for (j = 0; j < rld[other].nregs; j++)
1731
          SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j);
1732
    }
1733
 
1734
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1735
    {
1736
      unsigned int regno = i;
1737
 
1738
      if (! TEST_HARD_REG_BIT (not_usable, regno)
1739
          && ! TEST_HARD_REG_BIT (used_by_other_reload, regno)
1740
          && HARD_REGNO_MODE_OK (regno, rl->mode))
1741
        {
1742
          int this_cost = spill_cost[regno];
1743
          int ok = 1;
1744
          unsigned int this_nregs = hard_regno_nregs[regno][rl->mode];
1745
 
1746
          for (j = 1; j < this_nregs; j++)
1747
            {
1748
              this_cost += spill_add_cost[regno + j];
1749
              if ((TEST_HARD_REG_BIT (not_usable, regno + j))
1750
                  || TEST_HARD_REG_BIT (used_by_other_reload, regno + j))
1751
                ok = 0;
1752
            }
1753
          if (! ok)
1754
            continue;
1755
          if (rl->in && REG_P (rl->in) && REGNO (rl->in) == regno)
1756
            this_cost--;
1757
          if (rl->out && REG_P (rl->out) && REGNO (rl->out) == regno)
1758
            this_cost--;
1759
          if (this_cost < best_cost
1760
              /* Among registers with equal cost, prefer caller-saved ones, or
1761
                 use REG_ALLOC_ORDER if it is defined.  */
1762
              || (this_cost == best_cost
1763
#ifdef REG_ALLOC_ORDER
1764
                  && (inv_reg_alloc_order[regno]
1765
                      < inv_reg_alloc_order[best_reg])
1766
#else
1767
                  && call_used_regs[regno]
1768
                  && ! call_used_regs[best_reg]
1769
#endif
1770
                  ))
1771
            {
1772
              best_reg = regno;
1773
              best_cost = this_cost;
1774
            }
1775
        }
1776
    }
1777
  if (best_reg == -1)
1778
    return 0;
1779
 
1780
  if (dump_file)
1781
    fprintf (dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
1782
 
1783
  rl->nregs = hard_regno_nregs[best_reg][rl->mode];
1784
  rl->regno = best_reg;
1785
 
1786
  EXECUTE_IF_SET_IN_REG_SET
1787
    (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j, rsi)
1788
    {
1789
      count_spilled_pseudo (best_reg, rl->nregs, j);
1790
    }
1791
 
1792
  EXECUTE_IF_SET_IN_REG_SET
1793
    (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j, rsi)
1794
    {
1795
      count_spilled_pseudo (best_reg, rl->nregs, j);
1796
    }
1797
 
1798
  for (i = 0; i < rl->nregs; i++)
1799
    {
1800
      gcc_assert (spill_cost[best_reg + i] == 0);
1801
      gcc_assert (spill_add_cost[best_reg + i] == 0);
1802
      SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i);
1803
    }
1804
  return 1;
1805
}
1806
 
1807
/* Find more reload regs to satisfy the remaining need of an insn, which
1808
   is given by CHAIN.
1809
   Do it by ascending class number, since otherwise a reg
1810
   might be spilled for a big class and might fail to count
1811
   for a smaller class even though it belongs to that class.  */
1812
 
1813
static void
1814
find_reload_regs (struct insn_chain *chain)
1815
{
1816
  int i;
1817
 
1818
  /* In order to be certain of getting the registers we need,
1819
     we must sort the reloads into order of increasing register class.
1820
     Then our grabbing of reload registers will parallel the process
1821
     that provided the reload registers.  */
1822
  for (i = 0; i < chain->n_reloads; i++)
1823
    {
1824
      /* Show whether this reload already has a hard reg.  */
1825
      if (chain->rld[i].reg_rtx)
1826
        {
1827
          int regno = REGNO (chain->rld[i].reg_rtx);
1828
          chain->rld[i].regno = regno;
1829
          chain->rld[i].nregs
1830
            = hard_regno_nregs[regno][GET_MODE (chain->rld[i].reg_rtx)];
1831
        }
1832
      else
1833
        chain->rld[i].regno = -1;
1834
      reload_order[i] = i;
1835
    }
1836
 
1837
  n_reloads = chain->n_reloads;
1838
  memcpy (rld, chain->rld, n_reloads * sizeof (struct reload));
1839
 
1840
  CLEAR_HARD_REG_SET (used_spill_regs_local);
1841
 
1842
  if (dump_file)
1843
    fprintf (dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
1844
 
1845
  qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
1846
 
1847
  /* Compute the order of preference for hard registers to spill.  */
1848
 
1849
  order_regs_for_reload (chain);
1850
 
1851
  for (i = 0; i < n_reloads; i++)
1852
    {
1853
      int r = reload_order[i];
1854
 
1855
      /* Ignore reloads that got marked inoperative.  */
1856
      if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
1857
          && ! rld[r].optional
1858
          && rld[r].regno == -1)
1859
        if (! find_reg (chain, i))
1860
          {
1861
            if (dump_file)
1862
              fprintf(dump_file, "reload failure for reload %d\n", r);
1863
            spill_failure (chain->insn, rld[r].class);
1864
            failure = 1;
1865
            return;
1866
          }
1867
    }
1868
 
1869
  COPY_HARD_REG_SET (chain->used_spill_regs, used_spill_regs_local);
1870
  IOR_HARD_REG_SET (used_spill_regs, used_spill_regs_local);
1871
 
1872
  memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1873
}
1874
 
1875
static void
1876
select_reload_regs (void)
1877
{
1878
  struct insn_chain *chain;
1879
 
1880
  /* Try to satisfy the needs for each insn.  */
1881
  for (chain = insns_need_reload; chain != 0;
1882
       chain = chain->next_need_reload)
1883
    find_reload_regs (chain);
1884
}
1885
 
1886
/* Delete all insns that were inserted by emit_caller_save_insns during
1887
   this iteration.  */
1888
static void
1889
delete_caller_save_insns (void)
1890
{
1891
  struct insn_chain *c = reload_insn_chain;
1892
 
1893
  while (c != 0)
1894
    {
1895
      while (c != 0 && c->is_caller_save_insn)
1896
        {
1897
          struct insn_chain *next = c->next;
1898
          rtx insn = c->insn;
1899
 
1900
          if (c == reload_insn_chain)
1901
            reload_insn_chain = next;
1902
          delete_insn (insn);
1903
 
1904
          if (next)
1905
            next->prev = c->prev;
1906
          if (c->prev)
1907
            c->prev->next = next;
1908
          c->next = unused_insn_chains;
1909
          unused_insn_chains = c;
1910
          c = next;
1911
        }
1912
      if (c != 0)
1913
        c = c->next;
1914
    }
1915
}
1916
 
1917
/* Handle the failure to find a register to spill.
1918
   INSN should be one of the insns which needed this particular spill reg.  */
1919
 
1920
static void
1921
spill_failure (rtx insn, enum reg_class class)
1922
{
1923
  if (asm_noperands (PATTERN (insn)) >= 0)
1924
    error_for_asm (insn, "can't find a register in class %qs while "
1925
                   "reloading %<asm%>",
1926
                   reg_class_names[class]);
1927
  else
1928
    {
1929
      error ("unable to find a register to spill in class %qs",
1930
             reg_class_names[class]);
1931
 
1932
      if (dump_file)
1933
        {
1934
          fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
1935
          debug_reload_to_stream (dump_file);
1936
        }
1937
      fatal_insn ("this is the insn:", insn);
1938
    }
1939
}
1940
 
1941
/* Delete an unneeded INSN and any previous insns who sole purpose is loading
1942
   data that is dead in INSN.  */
1943
 
1944
static void
1945
delete_dead_insn (rtx insn)
1946
{
1947
  rtx prev = prev_real_insn (insn);
1948
  rtx prev_dest;
1949
 
1950
  /* If the previous insn sets a register that dies in our insn, delete it
1951
     too.  */
1952
  if (prev && GET_CODE (PATTERN (prev)) == SET
1953
      && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
1954
      && reg_mentioned_p (prev_dest, PATTERN (insn))
1955
      && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
1956
      && ! side_effects_p (SET_SRC (PATTERN (prev))))
1957
    delete_dead_insn (prev);
1958
 
1959
  SET_INSN_DELETED (insn);
1960
}
1961
 
1962
/* Modify the home of pseudo-reg I.
1963
   The new home is present in reg_renumber[I].
1964
 
1965
   FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
1966
   or it may be -1, meaning there is none or it is not relevant.
1967
   This is used so that all pseudos spilled from a given hard reg
1968
   can share one stack slot.  */
1969
 
1970
static void
1971
alter_reg (int i, int from_reg)
1972
{
1973
  /* When outputting an inline function, this can happen
1974
     for a reg that isn't actually used.  */
1975
  if (regno_reg_rtx[i] == 0)
1976
    return;
1977
 
1978
  /* If the reg got changed to a MEM at rtl-generation time,
1979
     ignore it.  */
1980
  if (!REG_P (regno_reg_rtx[i]))
1981
    return;
1982
 
1983
  /* Modify the reg-rtx to contain the new hard reg
1984
     number or else to contain its pseudo reg number.  */
1985
  REGNO (regno_reg_rtx[i])
1986
    = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
1987
 
1988
  /* If we have a pseudo that is needed but has no hard reg or equivalent,
1989
     allocate a stack slot for it.  */
1990
 
1991
  if (reg_renumber[i] < 0
1992
      && REG_N_REFS (i) > 0
1993
      && reg_equiv_constant[i] == 0
1994
      && (reg_equiv_invariant[i] == 0 || reg_equiv_init[i] == 0)
1995
      && reg_equiv_memory_loc[i] == 0)
1996
    {
1997
      rtx x;
1998
      enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
1999
      unsigned int inherent_size = PSEUDO_REGNO_BYTES (i);
2000
      unsigned int inherent_align = GET_MODE_ALIGNMENT (mode);
2001
      unsigned int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2002
      unsigned int min_align = reg_max_ref_width[i] * BITS_PER_UNIT;
2003
      int adjust = 0;
2004
 
2005
      /* Each pseudo reg has an inherent size which comes from its own mode,
2006
         and a total size which provides room for paradoxical subregs
2007
         which refer to the pseudo reg in wider modes.
2008
 
2009
         We can use a slot already allocated if it provides both
2010
         enough inherent space and enough total space.
2011
         Otherwise, we allocate a new slot, making sure that it has no less
2012
         inherent space, and no less total space, then the previous slot.  */
2013
      if (from_reg == -1)
2014
        {
2015
          /* No known place to spill from => no slot to reuse.  */
2016
          x = assign_stack_local (mode, total_size,
2017
                                  min_align > inherent_align
2018
                                  || total_size > inherent_size ? -1 : 0);
2019
          if (BYTES_BIG_ENDIAN)
2020
            /* Cancel the  big-endian correction done in assign_stack_local.
2021
               Get the address of the beginning of the slot.
2022
               This is so we can do a big-endian correction unconditionally
2023
               below.  */
2024
            adjust = inherent_size - total_size;
2025
 
2026
          /* Nothing can alias this slot except this pseudo.  */
2027
          set_mem_alias_set (x, new_alias_set ());
2028
        }
2029
 
2030
      /* Reuse a stack slot if possible.  */
2031
      else if (spill_stack_slot[from_reg] != 0
2032
               && spill_stack_slot_width[from_reg] >= total_size
2033
               && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2034
                   >= inherent_size)
2035
               && MEM_ALIGN (spill_stack_slot[from_reg]) >= min_align)
2036
        x = spill_stack_slot[from_reg];
2037
 
2038
      /* Allocate a bigger slot.  */
2039
      else
2040
        {
2041
          /* Compute maximum size needed, both for inherent size
2042
             and for total size.  */
2043
          rtx stack_slot;
2044
 
2045
          if (spill_stack_slot[from_reg])
2046
            {
2047
              if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2048
                  > inherent_size)
2049
                mode = GET_MODE (spill_stack_slot[from_reg]);
2050
              if (spill_stack_slot_width[from_reg] > total_size)
2051
                total_size = spill_stack_slot_width[from_reg];
2052
              if (MEM_ALIGN (spill_stack_slot[from_reg]) > min_align)
2053
                min_align = MEM_ALIGN (spill_stack_slot[from_reg]);
2054
            }
2055
 
2056
          /* Make a slot with that size.  */
2057
          x = assign_stack_local (mode, total_size,
2058
                                  min_align > inherent_align
2059
                                  || total_size > inherent_size ? -1 : 0);
2060
          stack_slot = x;
2061
 
2062
          /* All pseudos mapped to this slot can alias each other.  */
2063
          if (spill_stack_slot[from_reg])
2064
            set_mem_alias_set (x, MEM_ALIAS_SET (spill_stack_slot[from_reg]));
2065
          else
2066
            set_mem_alias_set (x, new_alias_set ());
2067
 
2068
          if (BYTES_BIG_ENDIAN)
2069
            {
2070
              /* Cancel the  big-endian correction done in assign_stack_local.
2071
                 Get the address of the beginning of the slot.
2072
                 This is so we can do a big-endian correction unconditionally
2073
                 below.  */
2074
              adjust = GET_MODE_SIZE (mode) - total_size;
2075
              if (adjust)
2076
                stack_slot
2077
                  = adjust_address_nv (x, mode_for_size (total_size
2078
                                                         * BITS_PER_UNIT,
2079
                                                         MODE_INT, 1),
2080
                                       adjust);
2081
            }
2082
 
2083
          spill_stack_slot[from_reg] = stack_slot;
2084
          spill_stack_slot_width[from_reg] = total_size;
2085
        }
2086
 
2087
      /* On a big endian machine, the "address" of the slot
2088
         is the address of the low part that fits its inherent mode.  */
2089
      if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2090
        adjust += (total_size - inherent_size);
2091
 
2092
      /* If we have any adjustment to make, or if the stack slot is the
2093
         wrong mode, make a new stack slot.  */
2094
      x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
2095
 
2096
      /* If we have a decl for the original register, set it for the
2097
         memory.  If this is a shared MEM, make a copy.  */
2098
      if (REG_EXPR (regno_reg_rtx[i])
2099
          && DECL_P (REG_EXPR (regno_reg_rtx[i])))
2100
        {
2101
          rtx decl = DECL_RTL_IF_SET (REG_EXPR (regno_reg_rtx[i]));
2102
 
2103
          /* We can do this only for the DECLs home pseudo, not for
2104
             any copies of it, since otherwise when the stack slot
2105
             is reused, nonoverlapping_memrefs_p might think they
2106
             cannot overlap.  */
2107
          if (decl && REG_P (decl) && REGNO (decl) == (unsigned) i)
2108
            {
2109
              if (from_reg != -1 && spill_stack_slot[from_reg] == x)
2110
                x = copy_rtx (x);
2111
 
2112
              set_mem_attrs_from_reg (x, regno_reg_rtx[i]);
2113
            }
2114
        }
2115
 
2116
      /* Save the stack slot for later.  */
2117
      reg_equiv_memory_loc[i] = x;
2118
    }
2119
}
2120
 
2121
/* Mark the slots in regs_ever_live for the hard regs
2122
   used by pseudo-reg number REGNO.  */
2123
 
2124
void
2125
mark_home_live (int regno)
2126
{
2127
  int i, lim;
2128
 
2129
  i = reg_renumber[regno];
2130
  if (i < 0)
2131
    return;
2132
  lim = i + hard_regno_nregs[i][PSEUDO_REGNO_MODE (regno)];
2133
  while (i < lim)
2134
    regs_ever_live[i++] = 1;
2135
}
2136
 
2137
/* This function handles the tracking of elimination offsets around branches.
2138
 
2139
   X is a piece of RTL being scanned.
2140
 
2141
   INSN is the insn that it came from, if any.
2142
 
2143
   INITIAL_P is nonzero if we are to set the offset to be the initial
2144
   offset and zero if we are setting the offset of the label to be the
2145
   current offset.  */
2146
 
2147
static void
2148
set_label_offsets (rtx x, rtx insn, int initial_p)
2149
{
2150
  enum rtx_code code = GET_CODE (x);
2151
  rtx tem;
2152
  unsigned int i;
2153
  struct elim_table *p;
2154
 
2155
  switch (code)
2156
    {
2157
    case LABEL_REF:
2158
      if (LABEL_REF_NONLOCAL_P (x))
2159
        return;
2160
 
2161
      x = XEXP (x, 0);
2162
 
2163
      /* ... fall through ...  */
2164
 
2165
    case CODE_LABEL:
2166
      /* If we know nothing about this label, set the desired offsets.  Note
2167
         that this sets the offset at a label to be the offset before a label
2168
         if we don't know anything about the label.  This is not correct for
2169
         the label after a BARRIER, but is the best guess we can make.  If
2170
         we guessed wrong, we will suppress an elimination that might have
2171
         been possible had we been able to guess correctly.  */
2172
 
2173
      if (! offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num])
2174
        {
2175
          for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2176
            offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2177
              = (initial_p ? reg_eliminate[i].initial_offset
2178
                 : reg_eliminate[i].offset);
2179
          offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num] = 1;
2180
        }
2181
 
2182
      /* Otherwise, if this is the definition of a label and it is
2183
         preceded by a BARRIER, set our offsets to the known offset of
2184
         that label.  */
2185
 
2186
      else if (x == insn
2187
               && (tem = prev_nonnote_insn (insn)) != 0
2188
               && BARRIER_P (tem))
2189
        set_offsets_for_label (insn);
2190
      else
2191
        /* If neither of the above cases is true, compare each offset
2192
           with those previously recorded and suppress any eliminations
2193
           where the offsets disagree.  */
2194
 
2195
        for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2196
          if (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2197
              != (initial_p ? reg_eliminate[i].initial_offset
2198
                  : reg_eliminate[i].offset))
2199
            reg_eliminate[i].can_eliminate = 0;
2200
 
2201
      return;
2202
 
2203
    case JUMP_INSN:
2204
      set_label_offsets (PATTERN (insn), insn, initial_p);
2205
 
2206
      /* ... fall through ...  */
2207
 
2208
    case INSN:
2209
    case CALL_INSN:
2210
      /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2211
         and hence must have all eliminations at their initial offsets.  */
2212
      for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2213
        if (REG_NOTE_KIND (tem) == REG_LABEL)
2214
          set_label_offsets (XEXP (tem, 0), insn, 1);
2215
      return;
2216
 
2217
    case PARALLEL:
2218
    case ADDR_VEC:
2219
    case ADDR_DIFF_VEC:
2220
      /* Each of the labels in the parallel or address vector must be
2221
         at their initial offsets.  We want the first field for PARALLEL
2222
         and ADDR_VEC and the second field for ADDR_DIFF_VEC.  */
2223
 
2224
      for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2225
        set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2226
                           insn, initial_p);
2227
      return;
2228
 
2229
    case SET:
2230
      /* We only care about setting PC.  If the source is not RETURN,
2231
         IF_THEN_ELSE, or a label, disable any eliminations not at
2232
         their initial offsets.  Similarly if any arm of the IF_THEN_ELSE
2233
         isn't one of those possibilities.  For branches to a label,
2234
         call ourselves recursively.
2235
 
2236
         Note that this can disable elimination unnecessarily when we have
2237
         a non-local goto since it will look like a non-constant jump to
2238
         someplace in the current function.  This isn't a significant
2239
         problem since such jumps will normally be when all elimination
2240
         pairs are back to their initial offsets.  */
2241
 
2242
      if (SET_DEST (x) != pc_rtx)
2243
        return;
2244
 
2245
      switch (GET_CODE (SET_SRC (x)))
2246
        {
2247
        case PC:
2248
        case RETURN:
2249
          return;
2250
 
2251
        case LABEL_REF:
2252
          set_label_offsets (SET_SRC (x), insn, initial_p);
2253
          return;
2254
 
2255
        case IF_THEN_ELSE:
2256
          tem = XEXP (SET_SRC (x), 1);
2257
          if (GET_CODE (tem) == LABEL_REF)
2258
            set_label_offsets (XEXP (tem, 0), insn, initial_p);
2259
          else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2260
            break;
2261
 
2262
          tem = XEXP (SET_SRC (x), 2);
2263
          if (GET_CODE (tem) == LABEL_REF)
2264
            set_label_offsets (XEXP (tem, 0), insn, initial_p);
2265
          else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2266
            break;
2267
          return;
2268
 
2269
        default:
2270
          break;
2271
        }
2272
 
2273
      /* If we reach here, all eliminations must be at their initial
2274
         offset because we are doing a jump to a variable address.  */
2275
      for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2276
        if (p->offset != p->initial_offset)
2277
          p->can_eliminate = 0;
2278
      break;
2279
 
2280
    default:
2281
      break;
2282
    }
2283
}
2284
 
2285
/* Scan X and replace any eliminable registers (such as fp) with a
2286
   replacement (such as sp), plus an offset.
2287
 
2288
   MEM_MODE is the mode of an enclosing MEM.  We need this to know how
2289
   much to adjust a register for, e.g., PRE_DEC.  Also, if we are inside a
2290
   MEM, we are allowed to replace a sum of a register and the constant zero
2291
   with the register, which we cannot do outside a MEM.  In addition, we need
2292
   to record the fact that a register is referenced outside a MEM.
2293
 
2294
   If INSN is an insn, it is the insn containing X.  If we replace a REG
2295
   in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
2296
   CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2297
   the REG is being modified.
2298
 
2299
   Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2300
   That's used when we eliminate in expressions stored in notes.
2301
   This means, do not set ref_outside_mem even if the reference
2302
   is outside of MEMs.
2303
 
2304
   REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2305
   replacements done assuming all offsets are at their initial values.  If
2306
   they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2307
   encounter, return the actual location so that find_reloads will do
2308
   the proper thing.  */
2309
 
2310
static rtx
2311
eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
2312
                  bool may_use_invariant)
2313
{
2314
  enum rtx_code code = GET_CODE (x);
2315
  struct elim_table *ep;
2316
  int regno;
2317
  rtx new;
2318
  int i, j;
2319
  const char *fmt;
2320
  int copied = 0;
2321
 
2322
  if (! current_function_decl)
2323
    return x;
2324
 
2325
  switch (code)
2326
    {
2327
    case CONST_INT:
2328
    case CONST_DOUBLE:
2329
    case CONST_VECTOR:
2330
    case CONST:
2331
    case SYMBOL_REF:
2332
    case CODE_LABEL:
2333
    case PC:
2334
    case CC0:
2335
    case ASM_INPUT:
2336
    case ADDR_VEC:
2337
    case ADDR_DIFF_VEC:
2338
    case RETURN:
2339
      return x;
2340
 
2341
    case REG:
2342
      regno = REGNO (x);
2343
 
2344
      /* First handle the case where we encounter a bare register that
2345
         is eliminable.  Replace it with a PLUS.  */
2346
      if (regno < FIRST_PSEUDO_REGISTER)
2347
        {
2348
          for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2349
               ep++)
2350
            if (ep->from_rtx == x && ep->can_eliminate)
2351
              return plus_constant (ep->to_rtx, ep->previous_offset);
2352
 
2353
        }
2354
      else if (reg_renumber && reg_renumber[regno] < 0
2355
               && reg_equiv_invariant && reg_equiv_invariant[regno])
2356
        {
2357
          if (may_use_invariant)
2358
            return eliminate_regs_1 (copy_rtx (reg_equiv_invariant[regno]),
2359
                                     mem_mode, insn, true);
2360
          /* There exists at least one use of REGNO that cannot be
2361
             eliminated.  Prevent the defining insn from being deleted.  */
2362
          reg_equiv_init[regno] = NULL_RTX;
2363
          alter_reg (regno, -1);
2364
        }
2365
      return x;
2366
 
2367
    /* You might think handling MINUS in a manner similar to PLUS is a
2368
       good idea.  It is not.  It has been tried multiple times and every
2369
       time the change has had to have been reverted.
2370
 
2371
       Other parts of reload know a PLUS is special (gen_reload for example)
2372
       and require special code to handle code a reloaded PLUS operand.
2373
 
2374
       Also consider backends where the flags register is clobbered by a
2375
       MINUS, but we can emit a PLUS that does not clobber flags (IA-32,
2376
       lea instruction comes to mind).  If we try to reload a MINUS, we
2377
       may kill the flags register that was holding a useful value.
2378
 
2379
       So, please before trying to handle MINUS, consider reload as a
2380
       whole instead of this little section as well as the backend issues.  */
2381
    case PLUS:
2382
      /* If this is the sum of an eliminable register and a constant, rework
2383
         the sum.  */
2384
      if (REG_P (XEXP (x, 0))
2385
          && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2386
          && CONSTANT_P (XEXP (x, 1)))
2387
        {
2388
          for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2389
               ep++)
2390
            if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2391
              {
2392
                /* The only time we want to replace a PLUS with a REG (this
2393
                   occurs when the constant operand of the PLUS is the negative
2394
                   of the offset) is when we are inside a MEM.  We won't want
2395
                   to do so at other times because that would change the
2396
                   structure of the insn in a way that reload can't handle.
2397
                   We special-case the commonest situation in
2398
                   eliminate_regs_in_insn, so just replace a PLUS with a
2399
                   PLUS here, unless inside a MEM.  */
2400
                if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2401
                    && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2402
                  return ep->to_rtx;
2403
                else
2404
                  return gen_rtx_PLUS (Pmode, ep->to_rtx,
2405
                                       plus_constant (XEXP (x, 1),
2406
                                                      ep->previous_offset));
2407
              }
2408
 
2409
          /* If the register is not eliminable, we are done since the other
2410
             operand is a constant.  */
2411
          return x;
2412
        }
2413
 
2414
      /* If this is part of an address, we want to bring any constant to the
2415
         outermost PLUS.  We will do this by doing register replacement in
2416
         our operands and seeing if a constant shows up in one of them.
2417
 
2418
         Note that there is no risk of modifying the structure of the insn,
2419
         since we only get called for its operands, thus we are either
2420
         modifying the address inside a MEM, or something like an address
2421
         operand of a load-address insn.  */
2422
 
2423
      {
2424
        rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true);
2425
        rtx new1 = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true);
2426
 
2427
        if (reg_renumber && (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)))
2428
          {
2429
            /* If one side is a PLUS and the other side is a pseudo that
2430
               didn't get a hard register but has a reg_equiv_constant,
2431
               we must replace the constant here since it may no longer
2432
               be in the position of any operand.  */
2433
            if (GET_CODE (new0) == PLUS && REG_P (new1)
2434
                && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2435
                && reg_renumber[REGNO (new1)] < 0
2436
                && reg_equiv_constant != 0
2437
                && reg_equiv_constant[REGNO (new1)] != 0)
2438
              new1 = reg_equiv_constant[REGNO (new1)];
2439
            else if (GET_CODE (new1) == PLUS && REG_P (new0)
2440
                     && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2441
                     && reg_renumber[REGNO (new0)] < 0
2442
                     && reg_equiv_constant[REGNO (new0)] != 0)
2443
              new0 = reg_equiv_constant[REGNO (new0)];
2444
 
2445
            new = form_sum (new0, new1);
2446
 
2447
            /* As above, if we are not inside a MEM we do not want to
2448
               turn a PLUS into something else.  We might try to do so here
2449
               for an addition of 0 if we aren't optimizing.  */
2450
            if (! mem_mode && GET_CODE (new) != PLUS)
2451
              return gen_rtx_PLUS (GET_MODE (x), new, const0_rtx);
2452
            else
2453
              return new;
2454
          }
2455
      }
2456
      return x;
2457
 
2458
    case MULT:
2459
      /* If this is the product of an eliminable register and a
2460
         constant, apply the distribute law and move the constant out
2461
         so that we have (plus (mult ..) ..).  This is needed in order
2462
         to keep load-address insns valid.   This case is pathological.
2463
         We ignore the possibility of overflow here.  */
2464
      if (REG_P (XEXP (x, 0))
2465
          && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2466
          && GET_CODE (XEXP (x, 1)) == CONST_INT)
2467
        for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2468
             ep++)
2469
          if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2470
            {
2471
              if (! mem_mode
2472
                  /* Refs inside notes don't count for this purpose.  */
2473
                  && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2474
                                      || GET_CODE (insn) == INSN_LIST)))
2475
                ep->ref_outside_mem = 1;
2476
 
2477
              return
2478
                plus_constant (gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2479
                               ep->previous_offset * INTVAL (XEXP (x, 1)));
2480
            }
2481
 
2482
      /* ... fall through ...  */
2483
 
2484
    case CALL:
2485
    case COMPARE:
2486
    /* See comments before PLUS about handling MINUS.  */
2487
    case MINUS:
2488
    case DIV:      case UDIV:
2489
    case MOD:      case UMOD:
2490
    case AND:      case IOR:      case XOR:
2491
    case ROTATERT: case ROTATE:
2492
    case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2493
    case NE:       case EQ:
2494
    case GE:       case GT:       case GEU:    case GTU:
2495
    case LE:       case LT:       case LEU:    case LTU:
2496
      {
2497
        rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false);
2498
        rtx new1 = XEXP (x, 1)
2499
                   ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, false) : 0;
2500
 
2501
        if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2502
          return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2503
      }
2504
      return x;
2505
 
2506
    case EXPR_LIST:
2507
      /* If we have something in XEXP (x, 0), the usual case, eliminate it.  */
2508
      if (XEXP (x, 0))
2509
        {
2510
          new = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true);
2511
          if (new != XEXP (x, 0))
2512
            {
2513
              /* If this is a REG_DEAD note, it is not valid anymore.
2514
                 Using the eliminated version could result in creating a
2515
                 REG_DEAD note for the stack or frame pointer.  */
2516
              if (GET_MODE (x) == REG_DEAD)
2517
                return (XEXP (x, 1)
2518
                        ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true)
2519
                        : NULL_RTX);
2520
 
2521
              x = gen_rtx_EXPR_LIST (REG_NOTE_KIND (x), new, XEXP (x, 1));
2522
            }
2523
        }
2524
 
2525
      /* ... fall through ...  */
2526
 
2527
    case INSN_LIST:
2528
      /* Now do eliminations in the rest of the chain.  If this was
2529
         an EXPR_LIST, this might result in allocating more memory than is
2530
         strictly needed, but it simplifies the code.  */
2531
      if (XEXP (x, 1))
2532
        {
2533
          new = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true);
2534
          if (new != XEXP (x, 1))
2535
            return
2536
              gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
2537
        }
2538
      return x;
2539
 
2540
    case PRE_INC:
2541
    case POST_INC:
2542
    case PRE_DEC:
2543
    case POST_DEC:
2544
    case STRICT_LOW_PART:
2545
    case NEG:          case NOT:
2546
    case SIGN_EXTEND:  case ZERO_EXTEND:
2547
    case TRUNCATE:     case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2548
    case FLOAT:        case FIX:
2549
    case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2550
    case ABS:
2551
    case SQRT:
2552
    case FFS:
2553
    case CLZ:
2554
    case CTZ:
2555
    case POPCOUNT:
2556
    case PARITY:
2557
      new = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false);
2558
      if (new != XEXP (x, 0))
2559
        return gen_rtx_fmt_e (code, GET_MODE (x), new);
2560
      return x;
2561
 
2562
    case SUBREG:
2563
      /* Similar to above processing, but preserve SUBREG_BYTE.
2564
         Convert (subreg (mem)) to (mem) if not paradoxical.
2565
         Also, if we have a non-paradoxical (subreg (pseudo)) and the
2566
         pseudo didn't get a hard reg, we must replace this with the
2567
         eliminated version of the memory location because push_reload
2568
         may do the replacement in certain circumstances.  */
2569
      if (REG_P (SUBREG_REG (x))
2570
          && (GET_MODE_SIZE (GET_MODE (x))
2571
              <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2572
          && reg_equiv_memory_loc != 0
2573
          && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2574
        {
2575
          new = SUBREG_REG (x);
2576
        }
2577
      else
2578
        new = eliminate_regs_1 (SUBREG_REG (x), mem_mode, insn, false);
2579
 
2580
      if (new != SUBREG_REG (x))
2581
        {
2582
          int x_size = GET_MODE_SIZE (GET_MODE (x));
2583
          int new_size = GET_MODE_SIZE (GET_MODE (new));
2584
 
2585
          if (MEM_P (new)
2586
              && ((x_size < new_size
2587
#ifdef WORD_REGISTER_OPERATIONS
2588
                   /* On these machines, combine can create rtl of the form
2589
                      (set (subreg:m1 (reg:m2 R) 0) ...)
2590
                      where m1 < m2, and expects something interesting to
2591
                      happen to the entire word.  Moreover, it will use the
2592
                      (reg:m2 R) later, expecting all bits to be preserved.
2593
                      So if the number of words is the same, preserve the
2594
                      subreg so that push_reload can see it.  */
2595
                   && ! ((x_size - 1) / UNITS_PER_WORD
2596
                         == (new_size -1 ) / UNITS_PER_WORD)
2597
#endif
2598
                   )
2599
                  || x_size == new_size)
2600
              )
2601
            return adjust_address_nv (new, GET_MODE (x), SUBREG_BYTE (x));
2602
          else
2603
            return gen_rtx_SUBREG (GET_MODE (x), new, SUBREG_BYTE (x));
2604
        }
2605
 
2606
      return x;
2607
 
2608
    case MEM:
2609
      /* Our only special processing is to pass the mode of the MEM to our
2610
         recursive call and copy the flags.  While we are here, handle this
2611
         case more efficiently.  */
2612
      return
2613
        replace_equiv_address_nv (x,
2614
                                  eliminate_regs_1 (XEXP (x, 0), GET_MODE (x),
2615
                                                    insn, true));
2616
 
2617
    case USE:
2618
      /* Handle insn_list USE that a call to a pure function may generate.  */
2619
      new = eliminate_regs_1 (XEXP (x, 0), 0, insn, false);
2620
      if (new != XEXP (x, 0))
2621
        return gen_rtx_USE (GET_MODE (x), new);
2622
      return x;
2623
 
2624
    case CLOBBER:
2625
    case ASM_OPERANDS:
2626
    case SET:
2627
      gcc_unreachable ();
2628
 
2629
    default:
2630
      break;
2631
    }
2632
 
2633
  /* Process each of our operands recursively.  If any have changed, make a
2634
     copy of the rtx.  */
2635
  fmt = GET_RTX_FORMAT (code);
2636
  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2637
    {
2638
      if (*fmt == 'e')
2639
        {
2640
          new = eliminate_regs_1 (XEXP (x, i), mem_mode, insn, false);
2641
          if (new != XEXP (x, i) && ! copied)
2642
            {
2643
              x = shallow_copy_rtx (x);
2644
              copied = 1;
2645
            }
2646
          XEXP (x, i) = new;
2647
        }
2648
      else if (*fmt == 'E')
2649
        {
2650
          int copied_vec = 0;
2651
          for (j = 0; j < XVECLEN (x, i); j++)
2652
            {
2653
              new = eliminate_regs_1 (XVECEXP (x, i, j), mem_mode, insn, false);
2654
              if (new != XVECEXP (x, i, j) && ! copied_vec)
2655
                {
2656
                  rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2657
                                             XVEC (x, i)->elem);
2658
                  if (! copied)
2659
                    {
2660
                      x = shallow_copy_rtx (x);
2661
                      copied = 1;
2662
                    }
2663
                  XVEC (x, i) = new_v;
2664
                  copied_vec = 1;
2665
                }
2666
              XVECEXP (x, i, j) = new;
2667
            }
2668
        }
2669
    }
2670
 
2671
  return x;
2672
}
2673
 
2674
rtx
2675
eliminate_regs (rtx x, enum machine_mode mem_mode, rtx insn)
2676
{
2677
  return eliminate_regs_1 (x, mem_mode, insn, false);
2678
}
2679
 
2680
/* Scan rtx X for modifications of elimination target registers.  Update
2681
   the table of eliminables to reflect the changed state.  MEM_MODE is
2682
   the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM.  */
2683
 
2684
static void
2685
elimination_effects (rtx x, enum machine_mode mem_mode)
2686
{
2687
  enum rtx_code code = GET_CODE (x);
2688
  struct elim_table *ep;
2689
  int regno;
2690
  int i, j;
2691
  const char *fmt;
2692
 
2693
  switch (code)
2694
    {
2695
    case CONST_INT:
2696
    case CONST_DOUBLE:
2697
    case CONST_VECTOR:
2698
    case CONST:
2699
    case SYMBOL_REF:
2700
    case CODE_LABEL:
2701
    case PC:
2702
    case CC0:
2703
    case ASM_INPUT:
2704
    case ADDR_VEC:
2705
    case ADDR_DIFF_VEC:
2706
    case RETURN:
2707
      return;
2708
 
2709
    case REG:
2710
      regno = REGNO (x);
2711
 
2712
      /* First handle the case where we encounter a bare register that
2713
         is eliminable.  Replace it with a PLUS.  */
2714
      if (regno < FIRST_PSEUDO_REGISTER)
2715
        {
2716
          for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2717
               ep++)
2718
            if (ep->from_rtx == x && ep->can_eliminate)
2719
              {
2720
                if (! mem_mode)
2721
                  ep->ref_outside_mem = 1;
2722
                return;
2723
              }
2724
 
2725
        }
2726
      else if (reg_renumber[regno] < 0 && reg_equiv_constant
2727
               && reg_equiv_constant[regno]
2728
               && ! function_invariant_p (reg_equiv_constant[regno]))
2729
        elimination_effects (reg_equiv_constant[regno], mem_mode);
2730
      return;
2731
 
2732
    case PRE_INC:
2733
    case POST_INC:
2734
    case PRE_DEC:
2735
    case POST_DEC:
2736
    case POST_MODIFY:
2737
    case PRE_MODIFY:
2738
      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2739
        if (ep->to_rtx == XEXP (x, 0))
2740
          {
2741
            int size = GET_MODE_SIZE (mem_mode);
2742
 
2743
            /* If more bytes than MEM_MODE are pushed, account for them.  */
2744
#ifdef PUSH_ROUNDING
2745
            if (ep->to_rtx == stack_pointer_rtx)
2746
              size = PUSH_ROUNDING (size);
2747
#endif
2748
            if (code == PRE_DEC || code == POST_DEC)
2749
              ep->offset += size;
2750
            else if (code == PRE_INC || code == POST_INC)
2751
              ep->offset -= size;
2752
            else if ((code == PRE_MODIFY || code == POST_MODIFY)
2753
                     && GET_CODE (XEXP (x, 1)) == PLUS
2754
                     && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
2755
                     && CONSTANT_P (XEXP (XEXP (x, 1), 1)))
2756
              ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
2757
          }
2758
 
2759
      /* These two aren't unary operators.  */
2760
      if (code == POST_MODIFY || code == PRE_MODIFY)
2761
        break;
2762
 
2763
      /* Fall through to generic unary operation case.  */
2764
    case STRICT_LOW_PART:
2765
    case NEG:          case NOT:
2766
    case SIGN_EXTEND:  case ZERO_EXTEND:
2767
    case TRUNCATE:     case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2768
    case FLOAT:        case FIX:
2769
    case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2770
    case ABS:
2771
    case SQRT:
2772
    case FFS:
2773
    case CLZ:
2774
    case CTZ:
2775
    case POPCOUNT:
2776
    case PARITY:
2777
      elimination_effects (XEXP (x, 0), mem_mode);
2778
      return;
2779
 
2780
    case SUBREG:
2781
      if (REG_P (SUBREG_REG (x))
2782
          && (GET_MODE_SIZE (GET_MODE (x))
2783
              <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2784
          && reg_equiv_memory_loc != 0
2785
          && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2786
        return;
2787
 
2788
      elimination_effects (SUBREG_REG (x), mem_mode);
2789
      return;
2790
 
2791
    case USE:
2792
      /* If using a register that is the source of an eliminate we still
2793
         think can be performed, note it cannot be performed since we don't
2794
         know how this register is used.  */
2795
      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2796
        if (ep->from_rtx == XEXP (x, 0))
2797
          ep->can_eliminate = 0;
2798
 
2799
      elimination_effects (XEXP (x, 0), mem_mode);
2800
      return;
2801
 
2802
    case CLOBBER:
2803
      /* If clobbering a register that is the replacement register for an
2804
         elimination we still think can be performed, note that it cannot
2805
         be performed.  Otherwise, we need not be concerned about it.  */
2806
      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2807
        if (ep->to_rtx == XEXP (x, 0))
2808
          ep->can_eliminate = 0;
2809
 
2810
      elimination_effects (XEXP (x, 0), mem_mode);
2811
      return;
2812
 
2813
    case SET:
2814
      /* Check for setting a register that we know about.  */
2815
      if (REG_P (SET_DEST (x)))
2816
        {
2817
          /* See if this is setting the replacement register for an
2818
             elimination.
2819
 
2820
             If DEST is the hard frame pointer, we do nothing because we
2821
             assume that all assignments to the frame pointer are for
2822
             non-local gotos and are being done at a time when they are valid
2823
             and do not disturb anything else.  Some machines want to
2824
             eliminate a fake argument pointer (or even a fake frame pointer)
2825
             with either the real frame or the stack pointer.  Assignments to
2826
             the hard frame pointer must not prevent this elimination.  */
2827
 
2828
          for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2829
               ep++)
2830
            if (ep->to_rtx == SET_DEST (x)
2831
                && SET_DEST (x) != hard_frame_pointer_rtx)
2832
              {
2833
                /* If it is being incremented, adjust the offset.  Otherwise,
2834
                   this elimination can't be done.  */
2835
                rtx src = SET_SRC (x);
2836
 
2837
                if (GET_CODE (src) == PLUS
2838
                    && XEXP (src, 0) == SET_DEST (x)
2839
                    && GET_CODE (XEXP (src, 1)) == CONST_INT)
2840
                  ep->offset -= INTVAL (XEXP (src, 1));
2841
                else
2842
                  ep->can_eliminate = 0;
2843
              }
2844
        }
2845
 
2846
      elimination_effects (SET_DEST (x), 0);
2847
      elimination_effects (SET_SRC (x), 0);
2848
      return;
2849
 
2850
    case MEM:
2851
      /* Our only special processing is to pass the mode of the MEM to our
2852
         recursive call.  */
2853
      elimination_effects (XEXP (x, 0), GET_MODE (x));
2854
      return;
2855
 
2856
    default:
2857
      break;
2858
    }
2859
 
2860
  fmt = GET_RTX_FORMAT (code);
2861
  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2862
    {
2863
      if (*fmt == 'e')
2864
        elimination_effects (XEXP (x, i), mem_mode);
2865
      else if (*fmt == 'E')
2866
        for (j = 0; j < XVECLEN (x, i); j++)
2867
          elimination_effects (XVECEXP (x, i, j), mem_mode);
2868
    }
2869
}
2870
 
2871
/* Descend through rtx X and verify that no references to eliminable registers
2872
   remain.  If any do remain, mark the involved register as not
2873
   eliminable.  */
2874
 
2875
static void
2876
check_eliminable_occurrences (rtx x)
2877
{
2878
  const char *fmt;
2879
  int i;
2880
  enum rtx_code code;
2881
 
2882
  if (x == 0)
2883
    return;
2884
 
2885
  code = GET_CODE (x);
2886
 
2887
  if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
2888
    {
2889
      struct elim_table *ep;
2890
 
2891
      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2892
        if (ep->from_rtx == x)
2893
          ep->can_eliminate = 0;
2894
      return;
2895
    }
2896
 
2897
  fmt = GET_RTX_FORMAT (code);
2898
  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2899
    {
2900
      if (*fmt == 'e')
2901
        check_eliminable_occurrences (XEXP (x, i));
2902
      else if (*fmt == 'E')
2903
        {
2904
          int j;
2905
          for (j = 0; j < XVECLEN (x, i); j++)
2906
            check_eliminable_occurrences (XVECEXP (x, i, j));
2907
        }
2908
    }
2909
}
2910
 
2911
/* Scan INSN and eliminate all eliminable registers in it.
2912
 
2913
   If REPLACE is nonzero, do the replacement destructively.  Also
2914
   delete the insn as dead it if it is setting an eliminable register.
2915
 
2916
   If REPLACE is zero, do all our allocations in reload_obstack.
2917
 
2918
   If no eliminations were done and this insn doesn't require any elimination
2919
   processing (these are not identical conditions: it might be updating sp,
2920
   but not referencing fp; this needs to be seen during reload_as_needed so
2921
   that the offset between fp and sp can be taken into consideration), zero
2922
   is returned.  Otherwise, 1 is returned.  */
2923
 
2924
static int
2925
eliminate_regs_in_insn (rtx insn, int replace)
2926
{
2927
  int icode = recog_memoized (insn);
2928
  rtx old_body = PATTERN (insn);
2929
  int insn_is_asm = asm_noperands (old_body) >= 0;
2930
  rtx old_set = single_set (insn);
2931
  rtx new_body;
2932
  int val = 0;
2933
  int i;
2934
  rtx substed_operand[MAX_RECOG_OPERANDS];
2935
  rtx orig_operand[MAX_RECOG_OPERANDS];
2936
  struct elim_table *ep;
2937
  rtx plus_src, plus_cst_src;
2938
 
2939
  if (! insn_is_asm && icode < 0)
2940
    {
2941
      gcc_assert (GET_CODE (PATTERN (insn)) == USE
2942
                  || GET_CODE (PATTERN (insn)) == CLOBBER
2943
                  || GET_CODE (PATTERN (insn)) == ADDR_VEC
2944
                  || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
2945
                  || GET_CODE (PATTERN (insn)) == ASM_INPUT);
2946
      return 0;
2947
    }
2948
 
2949
  if (old_set != 0 && REG_P (SET_DEST (old_set))
2950
      && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
2951
    {
2952
      /* Check for setting an eliminable register.  */
2953
      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2954
        if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
2955
          {
2956
#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2957
            /* If this is setting the frame pointer register to the
2958
               hardware frame pointer register and this is an elimination
2959
               that will be done (tested above), this insn is really
2960
               adjusting the frame pointer downward to compensate for
2961
               the adjustment done before a nonlocal goto.  */
2962
            if (ep->from == FRAME_POINTER_REGNUM
2963
                && ep->to == HARD_FRAME_POINTER_REGNUM)
2964
              {
2965
                rtx base = SET_SRC (old_set);
2966
                rtx base_insn = insn;
2967
                HOST_WIDE_INT offset = 0;
2968
 
2969
                while (base != ep->to_rtx)
2970
                  {
2971
                    rtx prev_insn, prev_set;
2972
 
2973
                    if (GET_CODE (base) == PLUS
2974
                        && GET_CODE (XEXP (base, 1)) == CONST_INT)
2975
                      {
2976
                        offset += INTVAL (XEXP (base, 1));
2977
                        base = XEXP (base, 0);
2978
                      }
2979
                    else if ((prev_insn = prev_nonnote_insn (base_insn)) != 0
2980
                             && (prev_set = single_set (prev_insn)) != 0
2981
                             && rtx_equal_p (SET_DEST (prev_set), base))
2982
                      {
2983
                        base = SET_SRC (prev_set);
2984
                        base_insn = prev_insn;
2985
                      }
2986
                    else
2987
                      break;
2988
                  }
2989
 
2990
                if (base == ep->to_rtx)
2991
                  {
2992
                    rtx src
2993
                      = plus_constant (ep->to_rtx, offset - ep->offset);
2994
 
2995
                    new_body = old_body;
2996
                    if (! replace)
2997
                      {
2998
                        new_body = copy_insn (old_body);
2999
                        if (REG_NOTES (insn))
3000
                          REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3001
                      }
3002
                    PATTERN (insn) = new_body;
3003
                    old_set = single_set (insn);
3004
 
3005
                    /* First see if this insn remains valid when we
3006
                       make the change.  If not, keep the INSN_CODE
3007
                       the same and let reload fit it up.  */
3008
                    validate_change (insn, &SET_SRC (old_set), src, 1);
3009
                    validate_change (insn, &SET_DEST (old_set),
3010
                                     ep->to_rtx, 1);
3011
                    if (! apply_change_group ())
3012
                      {
3013
                        SET_SRC (old_set) = src;
3014
                        SET_DEST (old_set) = ep->to_rtx;
3015
                      }
3016
 
3017
                    val = 1;
3018
                    goto done;
3019
                  }
3020
              }
3021
#endif
3022
 
3023
            /* In this case this insn isn't serving a useful purpose.  We
3024
               will delete it in reload_as_needed once we know that this
3025
               elimination is, in fact, being done.
3026
 
3027
               If REPLACE isn't set, we can't delete this insn, but needn't
3028
               process it since it won't be used unless something changes.  */
3029
            if (replace)
3030
              {
3031
                delete_dead_insn (insn);
3032
                return 1;
3033
              }
3034
            val = 1;
3035
            goto done;
3036
          }
3037
    }
3038
 
3039
  /* We allow one special case which happens to work on all machines we
3040
     currently support: a single set with the source or a REG_EQUAL
3041
     note being a PLUS of an eliminable register and a constant.  */
3042
  plus_src = plus_cst_src = 0;
3043
  if (old_set && REG_P (SET_DEST (old_set)))
3044
    {
3045
      if (GET_CODE (SET_SRC (old_set)) == PLUS)
3046
        plus_src = SET_SRC (old_set);
3047
      /* First see if the source is of the form (plus (...) CST).  */
3048
      if (plus_src
3049
          && GET_CODE (XEXP (plus_src, 1)) == CONST_INT)
3050
        plus_cst_src = plus_src;
3051
      else if (REG_P (SET_SRC (old_set))
3052
               || plus_src)
3053
        {
3054
          /* Otherwise, see if we have a REG_EQUAL note of the form
3055
             (plus (...) CST).  */
3056
          rtx links;
3057
          for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3058
            {
3059
              if (REG_NOTE_KIND (links) == REG_EQUAL
3060
                  && GET_CODE (XEXP (links, 0)) == PLUS
3061
                  && GET_CODE (XEXP (XEXP (links, 0), 1)) == CONST_INT)
3062
                {
3063
                  plus_cst_src = XEXP (links, 0);
3064
                  break;
3065
                }
3066
            }
3067
        }
3068
 
3069
      /* Check that the first operand of the PLUS is a hard reg or
3070
         the lowpart subreg of one.  */
3071
      if (plus_cst_src)
3072
        {
3073
          rtx reg = XEXP (plus_cst_src, 0);
3074
          if (GET_CODE (reg) == SUBREG && subreg_lowpart_p (reg))
3075
            reg = SUBREG_REG (reg);
3076
 
3077
          if (!REG_P (reg) || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3078
            plus_cst_src = 0;
3079
        }
3080
    }
3081
  if (plus_cst_src)
3082
    {
3083
      rtx reg = XEXP (plus_cst_src, 0);
3084
      HOST_WIDE_INT offset = INTVAL (XEXP (plus_cst_src, 1));
3085
 
3086
      if (GET_CODE (reg) == SUBREG)
3087
        reg = SUBREG_REG (reg);
3088
 
3089
      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3090
        if (ep->from_rtx == reg && ep->can_eliminate)
3091
          {
3092
            rtx to_rtx = ep->to_rtx;
3093
            offset += ep->offset;
3094
            offset = trunc_int_for_mode (offset, GET_MODE (reg));
3095
 
3096
            if (GET_CODE (XEXP (plus_cst_src, 0)) == SUBREG)
3097
              to_rtx = gen_lowpart (GET_MODE (XEXP (plus_cst_src, 0)),
3098
                                    to_rtx);
3099
            /* If we have a nonzero offset, and the source is already
3100
               a simple REG, the following transformation would
3101
               increase the cost of the insn by replacing a simple REG
3102
               with (plus (reg sp) CST).  So try only when we already
3103
               had a PLUS before.  */
3104
            if (offset == 0 || plus_src)
3105
              {
3106
                rtx new_src = plus_constant (to_rtx, offset);
3107
 
3108
                new_body = old_body;
3109
                if (! replace)
3110
                  {
3111
                    new_body = copy_insn (old_body);
3112
                    if (REG_NOTES (insn))
3113
                      REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3114
                  }
3115
                PATTERN (insn) = new_body;
3116
                old_set = single_set (insn);
3117
 
3118
                /* First see if this insn remains valid when we make the
3119
                   change.  If not, try to replace the whole pattern with
3120
                   a simple set (this may help if the original insn was a
3121
                   PARALLEL that was only recognized as single_set due to
3122
                   REG_UNUSED notes).  If this isn't valid either, keep
3123
                   the INSN_CODE the same and let reload fix it up.  */
3124
                if (!validate_change (insn, &SET_SRC (old_set), new_src, 0))
3125
                  {
3126
                    rtx new_pat = gen_rtx_SET (VOIDmode,
3127
                                               SET_DEST (old_set), new_src);
3128
 
3129
                    if (!validate_change (insn, &PATTERN (insn), new_pat, 0))
3130
                      SET_SRC (old_set) = new_src;
3131
                  }
3132
              }
3133
            else
3134
              break;
3135
 
3136
            val = 1;
3137
            /* This can't have an effect on elimination offsets, so skip right
3138
               to the end.  */
3139
            goto done;
3140
          }
3141
    }
3142
 
3143
  /* Determine the effects of this insn on elimination offsets.  */
3144
  elimination_effects (old_body, 0);
3145
 
3146
  /* Eliminate all eliminable registers occurring in operands that
3147
     can be handled by reload.  */
3148
  extract_insn (insn);
3149
  for (i = 0; i < recog_data.n_operands; i++)
3150
    {
3151
      orig_operand[i] = recog_data.operand[i];
3152
      substed_operand[i] = recog_data.operand[i];
3153
 
3154
      /* For an asm statement, every operand is eliminable.  */
3155
      if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3156
        {
3157
          bool is_set_src, in_plus;
3158
 
3159
          /* Check for setting a register that we know about.  */
3160
          if (recog_data.operand_type[i] != OP_IN
3161
              && REG_P (orig_operand[i]))
3162
            {
3163
              /* If we are assigning to a register that can be eliminated, it
3164
                 must be as part of a PARALLEL, since the code above handles
3165
                 single SETs.  We must indicate that we can no longer
3166
                 eliminate this reg.  */
3167
              for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3168
                   ep++)
3169
                if (ep->from_rtx == orig_operand[i])
3170
                  ep->can_eliminate = 0;
3171
            }
3172
 
3173
          /* Companion to the above plus substitution, we can allow
3174
             invariants as the source of a plain move.  */
3175
          is_set_src = false;
3176
          if (old_set && recog_data.operand_loc[i] == &SET_SRC (old_set))
3177
            is_set_src = true;
3178
          in_plus = false;
3179
          if (plus_src
3180
              && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3181
                  || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3182
            in_plus = true;
3183
 
3184
          substed_operand[i]
3185
            = eliminate_regs_1 (recog_data.operand[i], 0,
3186
                                replace ? insn : NULL_RTX,
3187
                                is_set_src || in_plus);
3188
          if (substed_operand[i] != orig_operand[i])
3189
            val = 1;
3190
          /* Terminate the search in check_eliminable_occurrences at
3191
             this point.  */
3192
          *recog_data.operand_loc[i] = 0;
3193
 
3194
        /* If an output operand changed from a REG to a MEM and INSN is an
3195
           insn, write a CLOBBER insn.  */
3196
          if (recog_data.operand_type[i] != OP_IN
3197
              && REG_P (orig_operand[i])
3198
              && MEM_P (substed_operand[i])
3199
              && replace)
3200
            emit_insn_after (gen_rtx_CLOBBER (VOIDmode, orig_operand[i]),
3201
                             insn);
3202
        }
3203
    }
3204
 
3205
  for (i = 0; i < recog_data.n_dups; i++)
3206
    *recog_data.dup_loc[i]
3207
      = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3208
 
3209
  /* If any eliminable remain, they aren't eliminable anymore.  */
3210
  check_eliminable_occurrences (old_body);
3211
 
3212
  /* Substitute the operands; the new values are in the substed_operand
3213
     array.  */
3214
  for (i = 0; i < recog_data.n_operands; i++)
3215
    *recog_data.operand_loc[i] = substed_operand[i];
3216
  for (i = 0; i < recog_data.n_dups; i++)
3217
    *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
3218
 
3219
  /* If we are replacing a body that was a (set X (plus Y Z)), try to
3220
     re-recognize the insn.  We do this in case we had a simple addition
3221
     but now can do this as a load-address.  This saves an insn in this
3222
     common case.
3223
     If re-recognition fails, the old insn code number will still be used,
3224
     and some register operands may have changed into PLUS expressions.
3225
     These will be handled by find_reloads by loading them into a register
3226
     again.  */
3227
 
3228
  if (val)
3229
    {
3230
      /* If we aren't replacing things permanently and we changed something,
3231
         make another copy to ensure that all the RTL is new.  Otherwise
3232
         things can go wrong if find_reload swaps commutative operands
3233
         and one is inside RTL that has been copied while the other is not.  */
3234
      new_body = old_body;
3235
      if (! replace)
3236
        {
3237
          new_body = copy_insn (old_body);
3238
          if (REG_NOTES (insn))
3239
            REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3240
        }
3241
      PATTERN (insn) = new_body;
3242
 
3243
      /* If we had a move insn but now we don't, rerecognize it.  This will
3244
         cause spurious re-recognition if the old move had a PARALLEL since
3245
         the new one still will, but we can't call single_set without
3246
         having put NEW_BODY into the insn and the re-recognition won't
3247
         hurt in this rare case.  */
3248
      /* ??? Why this huge if statement - why don't we just rerecognize the
3249
         thing always?  */
3250
      if (! insn_is_asm
3251
          && old_set != 0
3252
          && ((REG_P (SET_SRC (old_set))
3253
               && (GET_CODE (new_body) != SET
3254
                   || !REG_P (SET_SRC (new_body))))
3255
              /* If this was a load from or store to memory, compare
3256
                 the MEM in recog_data.operand to the one in the insn.
3257
                 If they are not equal, then rerecognize the insn.  */
3258
              || (old_set != 0
3259
                  && ((MEM_P (SET_SRC (old_set))
3260
                       && SET_SRC (old_set) != recog_data.operand[1])
3261
                      || (MEM_P (SET_DEST (old_set))
3262
                          && SET_DEST (old_set) != recog_data.operand[0])))
3263
              /* If this was an add insn before, rerecognize.  */
3264
              || GET_CODE (SET_SRC (old_set)) == PLUS))
3265
        {
3266
          int new_icode = recog (PATTERN (insn), insn, 0);
3267
          if (new_icode >= 0)
3268
            INSN_CODE (insn) = new_icode;
3269
        }
3270
    }
3271
 
3272
  /* Restore the old body.  If there were any changes to it, we made a copy
3273
     of it while the changes were still in place, so we'll correctly return
3274
     a modified insn below.  */
3275
  if (! replace)
3276
    {
3277
      /* Restore the old body.  */
3278
      for (i = 0; i < recog_data.n_operands; i++)
3279
        *recog_data.operand_loc[i] = orig_operand[i];
3280
      for (i = 0; i < recog_data.n_dups; i++)
3281
        *recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
3282
    }
3283
 
3284
  /* Update all elimination pairs to reflect the status after the current
3285
     insn.  The changes we make were determined by the earlier call to
3286
     elimination_effects.
3287
 
3288
     We also detect cases where register elimination cannot be done,
3289
     namely, if a register would be both changed and referenced outside a MEM
3290
     in the resulting insn since such an insn is often undefined and, even if
3291
     not, we cannot know what meaning will be given to it.  Note that it is
3292
     valid to have a register used in an address in an insn that changes it
3293
     (presumably with a pre- or post-increment or decrement).
3294
 
3295
     If anything changes, return nonzero.  */
3296
 
3297
  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3298
    {
3299
      if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3300
        ep->can_eliminate = 0;
3301
 
3302
      ep->ref_outside_mem = 0;
3303
 
3304
      if (ep->previous_offset != ep->offset)
3305
        val = 1;
3306
    }
3307
 
3308
 done:
3309
  /* If we changed something, perform elimination in REG_NOTES.  This is
3310
     needed even when REPLACE is zero because a REG_DEAD note might refer
3311
     to a register that we eliminate and could cause a different number
3312
     of spill registers to be needed in the final reload pass than in
3313
     the pre-passes.  */
3314
  if (val && REG_NOTES (insn) != 0)
3315
    REG_NOTES (insn)
3316
      = eliminate_regs_1 (REG_NOTES (insn), 0, REG_NOTES (insn), true);
3317
 
3318
  return val;
3319
}
3320
 
3321
/* Loop through all elimination pairs.
3322
   Recalculate the number not at initial offset.
3323
 
3324
   Compute the maximum offset (minimum offset if the stack does not
3325
   grow downward) for each elimination pair.  */
3326
 
3327
static void
3328
update_eliminable_offsets (void)
3329
{
3330
  struct elim_table *ep;
3331
 
3332
  num_not_at_initial_offset = 0;
3333
  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3334
    {
3335
      ep->previous_offset = ep->offset;
3336
      if (ep->can_eliminate && ep->offset != ep->initial_offset)
3337
        num_not_at_initial_offset++;
3338
    }
3339
}
3340
 
3341
/* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3342
   replacement we currently believe is valid, mark it as not eliminable if X
3343
   modifies DEST in any way other than by adding a constant integer to it.
3344
 
3345
   If DEST is the frame pointer, we do nothing because we assume that
3346
   all assignments to the hard frame pointer are nonlocal gotos and are being
3347
   done at a time when they are valid and do not disturb anything else.
3348
   Some machines want to eliminate a fake argument pointer with either the
3349
   frame or stack pointer.  Assignments to the hard frame pointer must not
3350
   prevent this elimination.
3351
 
3352
   Called via note_stores from reload before starting its passes to scan
3353
   the insns of the function.  */
3354
 
3355
static void
3356
mark_not_eliminable (rtx dest, rtx x, void *data ATTRIBUTE_UNUSED)
3357
{
3358
  unsigned int i;
3359
 
3360
  /* A SUBREG of a hard register here is just changing its mode.  We should
3361
     not see a SUBREG of an eliminable hard register, but check just in
3362
     case.  */
3363
  if (GET_CODE (dest) == SUBREG)
3364
    dest = SUBREG_REG (dest);
3365
 
3366
  if (dest == hard_frame_pointer_rtx)
3367
    return;
3368
 
3369
  for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3370
    if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3371
        && (GET_CODE (x) != SET
3372
            || GET_CODE (SET_SRC (x)) != PLUS
3373
            || XEXP (SET_SRC (x), 0) != dest
3374
            || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3375
      {
3376
        reg_eliminate[i].can_eliminate_previous
3377
          = reg_eliminate[i].can_eliminate = 0;
3378
        num_eliminable--;
3379
      }
3380
}
3381
 
3382
/* Verify that the initial elimination offsets did not change since the
3383
   last call to set_initial_elim_offsets.  This is used to catch cases
3384
   where something illegal happened during reload_as_needed that could
3385
   cause incorrect code to be generated if we did not check for it.  */
3386
 
3387
static bool
3388
verify_initial_elim_offsets (void)
3389
{
3390
  HOST_WIDE_INT t;
3391
 
3392
  if (!num_eliminable)
3393
    return true;
3394
 
3395
#ifdef ELIMINABLE_REGS
3396
  {
3397
   struct elim_table *ep;
3398
 
3399
   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3400
     {
3401
       INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3402
       if (t != ep->initial_offset)
3403
         return false;
3404
     }
3405
  }
3406
#else
3407
  INITIAL_FRAME_POINTER_OFFSET (t);
3408
  if (t != reg_eliminate[0].initial_offset)
3409
    return false;
3410
#endif
3411
 
3412
  return true;
3413
}
3414
 
3415
/* Reset all offsets on eliminable registers to their initial values.  */
3416
 
3417
static void
3418
set_initial_elim_offsets (void)
3419
{
3420
  struct elim_table *ep = reg_eliminate;
3421
 
3422
#ifdef ELIMINABLE_REGS
3423
  for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3424
    {
3425
      INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3426
      ep->previous_offset = ep->offset = ep->initial_offset;
3427
    }
3428
#else
3429
  INITIAL_FRAME_POINTER_OFFSET (ep->initial_offset);
3430
  ep->previous_offset = ep->offset = ep->initial_offset;
3431
#endif
3432
 
3433
  num_not_at_initial_offset = 0;
3434
}
3435
 
3436
/* Subroutine of set_initial_label_offsets called via for_each_eh_label.  */
3437
 
3438
static void
3439
set_initial_eh_label_offset (rtx label)
3440
{
3441
  set_label_offsets (label, NULL_RTX, 1);
3442
}
3443
 
3444
/* Initialize the known label offsets.
3445
   Set a known offset for each forced label to be at the initial offset
3446
   of each elimination.  We do this because we assume that all
3447
   computed jumps occur from a location where each elimination is
3448
   at its initial offset.
3449
   For all other labels, show that we don't know the offsets.  */
3450
 
3451
static void
3452
set_initial_label_offsets (void)
3453
{
3454
  rtx x;
3455
  memset (offsets_known_at, 0, num_labels);
3456
 
3457
  for (x = forced_labels; x; x = XEXP (x, 1))
3458
    if (XEXP (x, 0))
3459
      set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
3460
 
3461
  for_each_eh_label (set_initial_eh_label_offset);
3462
}
3463
 
3464
/* Set all elimination offsets to the known values for the code label given
3465
   by INSN.  */
3466
 
3467
static void
3468
set_offsets_for_label (rtx insn)
3469
{
3470
  unsigned int i;
3471
  int label_nr = CODE_LABEL_NUMBER (insn);
3472
  struct elim_table *ep;
3473
 
3474
  num_not_at_initial_offset = 0;
3475
  for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3476
    {
3477
      ep->offset = ep->previous_offset
3478
                 = offsets_at[label_nr - first_label_num][i];
3479
      if (ep->can_eliminate && ep->offset != ep->initial_offset)
3480
        num_not_at_initial_offset++;
3481
    }
3482
}
3483
 
3484
/* See if anything that happened changes which eliminations are valid.
3485
   For example, on the SPARC, whether or not the frame pointer can
3486
   be eliminated can depend on what registers have been used.  We need
3487
   not check some conditions again (such as flag_omit_frame_pointer)
3488
   since they can't have changed.  */
3489
 
3490
static void
3491
update_eliminables (HARD_REG_SET *pset)
3492
{
3493
  int previous_frame_pointer_needed = frame_pointer_needed;
3494
  struct elim_table *ep;
3495
 
3496
  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3497
    if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
3498
#ifdef ELIMINABLE_REGS
3499
        || ! CAN_ELIMINATE (ep->from, ep->to)
3500
#endif
3501
        )
3502
      ep->can_eliminate = 0;
3503
 
3504
  /* Look for the case where we have discovered that we can't replace
3505
     register A with register B and that means that we will now be
3506
     trying to replace register A with register C.  This means we can
3507
     no longer replace register C with register B and we need to disable
3508
     such an elimination, if it exists.  This occurs often with A == ap,
3509
     B == sp, and C == fp.  */
3510
 
3511
  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3512
    {
3513
      struct elim_table *op;
3514
      int new_to = -1;
3515
 
3516
      if (! ep->can_eliminate && ep->can_eliminate_previous)
3517
        {
3518
          /* Find the current elimination for ep->from, if there is a
3519
             new one.  */
3520
          for (op = reg_eliminate;
3521
               op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3522
            if (op->from == ep->from && op->can_eliminate)
3523
              {
3524
                new_to = op->to;
3525
                break;
3526
              }
3527
 
3528
          /* See if there is an elimination of NEW_TO -> EP->TO.  If so,
3529
             disable it.  */
3530
          for (op = reg_eliminate;
3531
               op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3532
            if (op->from == new_to && op->to == ep->to)
3533
              op->can_eliminate = 0;
3534
        }
3535
    }
3536
 
3537
  /* See if any registers that we thought we could eliminate the previous
3538
     time are no longer eliminable.  If so, something has changed and we
3539
     must spill the register.  Also, recompute the number of eliminable
3540
     registers and see if the frame pointer is needed; it is if there is
3541
     no elimination of the frame pointer that we can perform.  */
3542
 
3543
  frame_pointer_needed = 1;
3544
  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3545
    {
3546
      if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
3547
          && ep->to != HARD_FRAME_POINTER_REGNUM)
3548
        frame_pointer_needed = 0;
3549
 
3550
      if (! ep->can_eliminate && ep->can_eliminate_previous)
3551
        {
3552
          ep->can_eliminate_previous = 0;
3553
          SET_HARD_REG_BIT (*pset, ep->from);
3554
          num_eliminable--;
3555
        }
3556
    }
3557
 
3558
  /* If we didn't need a frame pointer last time, but we do now, spill
3559
     the hard frame pointer.  */
3560
  if (frame_pointer_needed && ! previous_frame_pointer_needed)
3561
    SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
3562
}
3563
 
3564
/* Initialize the table of registers to eliminate.  */
3565
 
3566
static void
3567
init_elim_table (void)
3568
{
3569
  struct elim_table *ep;
3570
#ifdef ELIMINABLE_REGS
3571
  const struct elim_table_1 *ep1;
3572
#endif
3573
 
3574
  if (!reg_eliminate)
3575
    reg_eliminate = xcalloc (sizeof (struct elim_table), NUM_ELIMINABLE_REGS);
3576
 
3577
  /* Does this function require a frame pointer?  */
3578
 
3579
  frame_pointer_needed = (! flag_omit_frame_pointer
3580
                          /* ?? If EXIT_IGNORE_STACK is set, we will not save
3581
                             and restore sp for alloca.  So we can't eliminate
3582
                             the frame pointer in that case.  At some point,
3583
                             we should improve this by emitting the
3584
                             sp-adjusting insns for this case.  */
3585
                          || (current_function_calls_alloca
3586
                              && EXIT_IGNORE_STACK)
3587
                          || current_function_accesses_prior_frames
3588
                          || FRAME_POINTER_REQUIRED);
3589
 
3590
  num_eliminable = 0;
3591
 
3592
#ifdef ELIMINABLE_REGS
3593
  for (ep = reg_eliminate, ep1 = reg_eliminate_1;
3594
       ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
3595
    {
3596
      ep->from = ep1->from;
3597
      ep->to = ep1->to;
3598
      ep->can_eliminate = ep->can_eliminate_previous
3599
        = (CAN_ELIMINATE (ep->from, ep->to)
3600
           && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
3601
    }
3602
#else
3603
  reg_eliminate[0].from = reg_eliminate_1[0].from;
3604
  reg_eliminate[0].to = reg_eliminate_1[0].to;
3605
  reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
3606
    = ! frame_pointer_needed;
3607
#endif
3608
 
3609
  /* Count the number of eliminable registers and build the FROM and TO
3610
     REG rtx's.  Note that code in gen_rtx_REG will cause, e.g.,
3611
     gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
3612
     We depend on this.  */
3613
  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3614
    {
3615
      num_eliminable += ep->can_eliminate;
3616
      ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
3617
      ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
3618
    }
3619
}
3620
 
3621
/* Kick all pseudos out of hard register REGNO.
3622
 
3623
   If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3624
   because we found we can't eliminate some register.  In the case, no pseudos
3625
   are allowed to be in the register, even if they are only in a block that
3626
   doesn't require spill registers, unlike the case when we are spilling this
3627
   hard reg to produce another spill register.
3628
 
3629
   Return nonzero if any pseudos needed to be kicked out.  */
3630
 
3631
static void
3632
spill_hard_reg (unsigned int regno, int cant_eliminate)
3633
{
3634
  int i;
3635
 
3636
  if (cant_eliminate)
3637
    {
3638
      SET_HARD_REG_BIT (bad_spill_regs_global, regno);
3639
      regs_ever_live[regno] = 1;
3640
    }
3641
 
3642
  /* Spill every pseudo reg that was allocated to this reg
3643
     or to something that overlaps this reg.  */
3644
 
3645
  for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3646
    if (reg_renumber[i] >= 0
3647
        && (unsigned int) reg_renumber[i] <= regno
3648
        && ((unsigned int) reg_renumber[i]
3649
            + hard_regno_nregs[(unsigned int) reg_renumber[i]]
3650
                              [PSEUDO_REGNO_MODE (i)]
3651
            > regno))
3652
      SET_REGNO_REG_SET (&spilled_pseudos, i);
3653
}
3654
 
3655
/* After find_reload_regs has been run for all insn that need reloads,
3656
   and/or spill_hard_regs was called, this function is used to actually
3657
   spill pseudo registers and try to reallocate them.  It also sets up the
3658
   spill_regs array for use by choose_reload_regs.  */
3659
 
3660
static int
3661
finish_spills (int global)
3662
{
3663
  struct insn_chain *chain;
3664
  int something_changed = 0;
3665
  unsigned i;
3666
  reg_set_iterator rsi;
3667
 
3668
  /* Build the spill_regs array for the function.  */
3669
  /* If there are some registers still to eliminate and one of the spill regs
3670
     wasn't ever used before, additional stack space may have to be
3671
     allocated to store this register.  Thus, we may have changed the offset
3672
     between the stack and frame pointers, so mark that something has changed.
3673
 
3674
     One might think that we need only set VAL to 1 if this is a call-used
3675
     register.  However, the set of registers that must be saved by the
3676
     prologue is not identical to the call-used set.  For example, the
3677
     register used by the call insn for the return PC is a call-used register,
3678
     but must be saved by the prologue.  */
3679
 
3680
  n_spills = 0;
3681
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3682
    if (TEST_HARD_REG_BIT (used_spill_regs, i))
3683
      {
3684
        spill_reg_order[i] = n_spills;
3685
        spill_regs[n_spills++] = i;
3686
        if (num_eliminable && ! regs_ever_live[i])
3687
          something_changed = 1;
3688
        regs_ever_live[i] = 1;
3689
      }
3690
    else
3691
      spill_reg_order[i] = -1;
3692
 
3693
  EXECUTE_IF_SET_IN_REG_SET (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i, rsi)
3694
    {
3695
      /* Record the current hard register the pseudo is allocated to in
3696
         pseudo_previous_regs so we avoid reallocating it to the same
3697
         hard reg in a later pass.  */
3698
      gcc_assert (reg_renumber[i] >= 0);
3699
 
3700
      SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
3701
      /* Mark it as no longer having a hard register home.  */
3702
      reg_renumber[i] = -1;
3703
      /* We will need to scan everything again.  */
3704
      something_changed = 1;
3705
    }
3706
 
3707
  /* Retry global register allocation if possible.  */
3708
  if (global)
3709
    {
3710
      memset (pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET));
3711
      /* For every insn that needs reloads, set the registers used as spill
3712
         regs in pseudo_forbidden_regs for every pseudo live across the
3713
         insn.  */
3714
      for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
3715
        {
3716
          EXECUTE_IF_SET_IN_REG_SET
3717
            (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
3718
            {
3719
              IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
3720
                                chain->used_spill_regs);
3721
            }
3722
          EXECUTE_IF_SET_IN_REG_SET
3723
            (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
3724
            {
3725
              IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
3726
                                chain->used_spill_regs);
3727
            }
3728
        }
3729
 
3730
      /* Retry allocating the spilled pseudos.  For each reg, merge the
3731
         various reg sets that indicate which hard regs can't be used,
3732
         and call retry_global_alloc.
3733
         We change spill_pseudos here to only contain pseudos that did not
3734
         get a new hard register.  */
3735
      for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
3736
        if (reg_old_renumber[i] != reg_renumber[i])
3737
          {
3738
            HARD_REG_SET forbidden;
3739
            COPY_HARD_REG_SET (forbidden, bad_spill_regs_global);
3740
            IOR_HARD_REG_SET (forbidden, pseudo_forbidden_regs[i]);
3741
            IOR_HARD_REG_SET (forbidden, pseudo_previous_regs[i]);
3742
            retry_global_alloc (i, forbidden);
3743
            if (reg_renumber[i] >= 0)
3744
              CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
3745
          }
3746
    }
3747
 
3748
  /* Fix up the register information in the insn chain.
3749
     This involves deleting those of the spilled pseudos which did not get
3750
     a new hard register home from the live_{before,after} sets.  */
3751
  for (chain = reload_insn_chain; chain; chain = chain->next)
3752
    {
3753
      HARD_REG_SET used_by_pseudos;
3754
      HARD_REG_SET used_by_pseudos2;
3755
 
3756
      AND_COMPL_REG_SET (&chain->live_throughout, &spilled_pseudos);
3757
      AND_COMPL_REG_SET (&chain->dead_or_set, &spilled_pseudos);
3758
 
3759
      /* Mark any unallocated hard regs as available for spills.  That
3760
         makes inheritance work somewhat better.  */
3761
      if (chain->need_reload)
3762
        {
3763
          REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
3764
          REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
3765
          IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
3766
 
3767
          /* Save the old value for the sanity test below.  */
3768
          COPY_HARD_REG_SET (used_by_pseudos2, chain->used_spill_regs);
3769
 
3770
          compute_use_by_pseudos (&used_by_pseudos, &chain->live_throughout);
3771
          compute_use_by_pseudos (&used_by_pseudos, &chain->dead_or_set);
3772
          COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
3773
          AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
3774
 
3775
          /* Make sure we only enlarge the set.  */
3776
          GO_IF_HARD_REG_SUBSET (used_by_pseudos2, chain->used_spill_regs, ok);
3777
          gcc_unreachable ();
3778
        ok:;
3779
        }
3780
    }
3781
 
3782
  /* Let alter_reg modify the reg rtx's for the modified pseudos.  */
3783
  for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
3784
    {
3785
      int regno = reg_renumber[i];
3786
      if (reg_old_renumber[i] == regno)
3787
        continue;
3788
 
3789
      alter_reg (i, reg_old_renumber[i]);
3790
      reg_old_renumber[i] = regno;
3791
      if (dump_file)
3792
        {
3793
          if (regno == -1)
3794
            fprintf (dump_file, " Register %d now on stack.\n\n", i);
3795
          else
3796
            fprintf (dump_file, " Register %d now in %d.\n\n",
3797
                     i, reg_renumber[i]);
3798
        }
3799
    }
3800
 
3801
  return something_changed;
3802
}
3803
 
3804
/* Find all paradoxical subregs within X and update reg_max_ref_width.  */
3805
 
3806
static void
3807
scan_paradoxical_subregs (rtx x)
3808
{
3809
  int i;
3810
  const char *fmt;
3811
  enum rtx_code code = GET_CODE (x);
3812
 
3813
  switch (code)
3814
    {
3815
    case REG:
3816
    case CONST_INT:
3817
    case CONST:
3818
    case SYMBOL_REF:
3819
    case LABEL_REF:
3820
    case CONST_DOUBLE:
3821
    case CONST_VECTOR: /* shouldn't happen, but just in case.  */
3822
    case CC0:
3823
    case PC:
3824
    case USE:
3825
    case CLOBBER:
3826
      return;
3827
 
3828
    case SUBREG:
3829
      if (REG_P (SUBREG_REG (x))
3830
          && (GET_MODE_SIZE (GET_MODE (x))
3831
              > reg_max_ref_width[REGNO (SUBREG_REG (x))]))
3832
        reg_max_ref_width[REGNO (SUBREG_REG (x))]
3833
          = GET_MODE_SIZE (GET_MODE (x));
3834
      return;
3835
 
3836
    default:
3837
      break;
3838
    }
3839
 
3840
  fmt = GET_RTX_FORMAT (code);
3841
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3842
    {
3843
      if (fmt[i] == 'e')
3844
        scan_paradoxical_subregs (XEXP (x, i));
3845
      else if (fmt[i] == 'E')
3846
        {
3847
          int j;
3848
          for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3849
            scan_paradoxical_subregs (XVECEXP (x, i, j));
3850
        }
3851
    }
3852
}
3853
 
3854
/* A subroutine of reload_as_needed.  If INSN has a REG_EH_REGION note,
3855
   examine all of the reload insns between PREV and NEXT exclusive, and
3856
   annotate all that may trap.  */
3857
 
3858
static void
3859
fixup_eh_region_note (rtx insn, rtx prev, rtx next)
3860
{
3861
  rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3862
  unsigned int trap_count;
3863
  rtx i;
3864
 
3865
  if (note == NULL)
3866
    return;
3867
 
3868
  if (may_trap_p (PATTERN (insn)))
3869
    trap_count = 1;
3870
  else
3871
    {
3872
      remove_note (insn, note);
3873
      trap_count = 0;
3874
    }
3875
 
3876
  for (i = NEXT_INSN (prev); i != next; i = NEXT_INSN (i))
3877
    if (INSN_P (i) && i != insn && may_trap_p (PATTERN (i)))
3878
      {
3879
        trap_count++;
3880
        REG_NOTES (i)
3881
          = gen_rtx_EXPR_LIST (REG_EH_REGION, XEXP (note, 0), REG_NOTES (i));
3882
      }
3883
}
3884
 
3885
/* Reload pseudo-registers into hard regs around each insn as needed.
3886
   Additional register load insns are output before the insn that needs it
3887
   and perhaps store insns after insns that modify the reloaded pseudo reg.
3888
 
3889
   reg_last_reload_reg and reg_reloaded_contents keep track of
3890
   which registers are already available in reload registers.
3891
   We update these for the reloads that we perform,
3892
   as the insns are scanned.  */
3893
 
3894
static void
3895
reload_as_needed (int live_known)
3896
{
3897
  struct insn_chain *chain;
3898
#if defined (AUTO_INC_DEC)
3899
  int i;
3900
#endif
3901
  rtx x;
3902
 
3903
  memset (spill_reg_rtx, 0, sizeof spill_reg_rtx);
3904
  memset (spill_reg_store, 0, sizeof spill_reg_store);
3905
  reg_last_reload_reg = XCNEWVEC (rtx, max_regno);
3906
  INIT_REG_SET (&reg_has_output_reload);
3907
  CLEAR_HARD_REG_SET (reg_reloaded_valid);
3908
  CLEAR_HARD_REG_SET (reg_reloaded_call_part_clobbered);
3909
 
3910
  set_initial_elim_offsets ();
3911
 
3912
  for (chain = reload_insn_chain; chain; chain = chain->next)
3913
    {
3914
      rtx prev = 0;
3915
      rtx insn = chain->insn;
3916
      rtx old_next = NEXT_INSN (insn);
3917
 
3918
      /* If we pass a label, copy the offsets from the label information
3919
         into the current offsets of each elimination.  */
3920
      if (LABEL_P (insn))
3921
        set_offsets_for_label (insn);
3922
 
3923
      else if (INSN_P (insn))
3924
        {
3925
          regset_head regs_to_forget;
3926
          INIT_REG_SET (&regs_to_forget);
3927
          note_stores (PATTERN (insn), forget_old_reloads_1, &regs_to_forget);
3928
 
3929
          /* If this is a USE and CLOBBER of a MEM, ensure that any
3930
             references to eliminable registers have been removed.  */
3931
 
3932
          if ((GET_CODE (PATTERN (insn)) == USE
3933
               || GET_CODE (PATTERN (insn)) == CLOBBER)
3934
              && MEM_P (XEXP (PATTERN (insn), 0)))
3935
            XEXP (XEXP (PATTERN (insn), 0), 0)
3936
              = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
3937
                                GET_MODE (XEXP (PATTERN (insn), 0)),
3938
                                NULL_RTX);
3939
 
3940
          /* If we need to do register elimination processing, do so.
3941
             This might delete the insn, in which case we are done.  */
3942
          if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
3943
            {
3944
              eliminate_regs_in_insn (insn, 1);
3945
              if (NOTE_P (insn))
3946
                {
3947
                  update_eliminable_offsets ();
3948
                  CLEAR_REG_SET (&regs_to_forget);
3949
                  continue;
3950
                }
3951
            }
3952
 
3953
          /* If need_elim is nonzero but need_reload is zero, one might think
3954
             that we could simply set n_reloads to 0.  However, find_reloads
3955
             could have done some manipulation of the insn (such as swapping
3956
             commutative operands), and these manipulations are lost during
3957
             the first pass for every insn that needs register elimination.
3958
             So the actions of find_reloads must be redone here.  */
3959
 
3960
          if (! chain->need_elim && ! chain->need_reload
3961
              && ! chain->need_operand_change)
3962
            n_reloads = 0;
3963
          /* First find the pseudo regs that must be reloaded for this insn.
3964
             This info is returned in the tables reload_... (see reload.h).
3965
             Also modify the body of INSN by substituting RELOAD
3966
             rtx's for those pseudo regs.  */
3967
          else
3968
            {
3969
              CLEAR_REG_SET (&reg_has_output_reload);
3970
              CLEAR_HARD_REG_SET (reg_is_output_reload);
3971
 
3972
              find_reloads (insn, 1, spill_indirect_levels, live_known,
3973
                            spill_reg_order);
3974
            }
3975
 
3976
          if (n_reloads > 0)
3977
            {
3978
              rtx next = NEXT_INSN (insn);
3979
              rtx p;
3980
 
3981
              prev = PREV_INSN (insn);
3982
 
3983
              /* Now compute which reload regs to reload them into.  Perhaps
3984
                 reusing reload regs from previous insns, or else output
3985
                 load insns to reload them.  Maybe output store insns too.
3986
                 Record the choices of reload reg in reload_reg_rtx.  */
3987
              choose_reload_regs (chain);
3988
 
3989
              /* Merge any reloads that we didn't combine for fear of
3990
                 increasing the number of spill registers needed but now
3991
                 discover can be safely merged.  */
3992
              if (SMALL_REGISTER_CLASSES)
3993
                merge_assigned_reloads (insn);
3994
 
3995
              /* Generate the insns to reload operands into or out of
3996
                 their reload regs.  */
3997
              emit_reload_insns (chain);
3998
 
3999
              /* Substitute the chosen reload regs from reload_reg_rtx
4000
                 into the insn's body (or perhaps into the bodies of other
4001
                 load and store insn that we just made for reloading
4002
                 and that we moved the structure into).  */
4003
              subst_reloads (insn);
4004
 
4005
              /* Adjust the exception region notes for loads and stores.  */
4006
              if (flag_non_call_exceptions && !CALL_P (insn))
4007
                fixup_eh_region_note (insn, prev, next);
4008
 
4009
              /* If this was an ASM, make sure that all the reload insns
4010
                 we have generated are valid.  If not, give an error
4011
                 and delete them.  */
4012
              if (asm_noperands (PATTERN (insn)) >= 0)
4013
                for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
4014
                  if (p != insn && INSN_P (p)
4015
                      && GET_CODE (PATTERN (p)) != USE
4016
                      && (recog_memoized (p) < 0
4017
                          || (extract_insn (p), ! constrain_operands (1))))
4018
                    {
4019
                      error_for_asm (insn,
4020
                                     "%<asm%> operand requires "
4021
                                     "impossible reload");
4022
                      delete_insn (p);
4023
                    }
4024
            }
4025
 
4026
          if (num_eliminable && chain->need_elim)
4027
            update_eliminable_offsets ();
4028
 
4029
          /* Any previously reloaded spilled pseudo reg, stored in this insn,
4030
             is no longer validly lying around to save a future reload.
4031
             Note that this does not detect pseudos that were reloaded
4032
             for this insn in order to be stored in
4033
             (obeying register constraints).  That is correct; such reload
4034
             registers ARE still valid.  */
4035
          forget_marked_reloads (&regs_to_forget);
4036
          CLEAR_REG_SET (&regs_to_forget);
4037
 
4038
          /* There may have been CLOBBER insns placed after INSN.  So scan
4039
             between INSN and NEXT and use them to forget old reloads.  */
4040
          for (x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
4041
            if (NONJUMP_INSN_P (x) && GET_CODE (PATTERN (x)) == CLOBBER)
4042
              note_stores (PATTERN (x), forget_old_reloads_1, NULL);
4043
 
4044
#ifdef AUTO_INC_DEC
4045
          /* Likewise for regs altered by auto-increment in this insn.
4046
             REG_INC notes have been changed by reloading:
4047
             find_reloads_address_1 records substitutions for them,
4048
             which have been performed by subst_reloads above.  */
4049
          for (i = n_reloads - 1; i >= 0; i--)
4050
            {
4051
              rtx in_reg = rld[i].in_reg;
4052
              if (in_reg)
4053
                {
4054
                  enum rtx_code code = GET_CODE (in_reg);
4055
                  /* PRE_INC / PRE_DEC will have the reload register ending up
4056
                     with the same value as the stack slot, but that doesn't
4057
                     hold true for POST_INC / POST_DEC.  Either we have to
4058
                     convert the memory access to a true POST_INC / POST_DEC,
4059
                     or we can't use the reload register for inheritance.  */
4060
                  if ((code == POST_INC || code == POST_DEC)
4061
                      && TEST_HARD_REG_BIT (reg_reloaded_valid,
4062
                                            REGNO (rld[i].reg_rtx))
4063
                      /* Make sure it is the inc/dec pseudo, and not
4064
                         some other (e.g. output operand) pseudo.  */
4065
                      && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4066
                          == REGNO (XEXP (in_reg, 0))))
4067
 
4068
                    {
4069
                      rtx reload_reg = rld[i].reg_rtx;
4070
                      enum machine_mode mode = GET_MODE (reload_reg);
4071
                      int n = 0;
4072
                      rtx p;
4073
 
4074
                      for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
4075
                        {
4076
                          /* We really want to ignore REG_INC notes here, so
4077
                             use PATTERN (p) as argument to reg_set_p .  */
4078
                          if (reg_set_p (reload_reg, PATTERN (p)))
4079
                            break;
4080
                          n = count_occurrences (PATTERN (p), reload_reg, 0);
4081
                          if (! n)
4082
                            continue;
4083
                          if (n == 1)
4084
                            {
4085
                              n = validate_replace_rtx (reload_reg,
4086
                                                        gen_rtx_fmt_e (code,
4087
                                                                       mode,
4088
                                                                       reload_reg),
4089
                                                        p);
4090
 
4091
                              /* We must also verify that the constraints
4092
                                 are met after the replacement.  */
4093
                              extract_insn (p);
4094
                              if (n)
4095
                                n = constrain_operands (1);
4096
                              else
4097
                                break;
4098
 
4099
                              /* If the constraints were not met, then
4100
                                 undo the replacement.  */
4101
                              if (!n)
4102
                                {
4103
                                  validate_replace_rtx (gen_rtx_fmt_e (code,
4104
                                                                       mode,
4105
                                                                       reload_reg),
4106
                                                        reload_reg, p);
4107
                                  break;
4108
                                }
4109
 
4110
                            }
4111
                          break;
4112
                        }
4113
                      if (n == 1)
4114
                        {
4115
                          REG_NOTES (p)
4116
                            = gen_rtx_EXPR_LIST (REG_INC, reload_reg,
4117
                                                 REG_NOTES (p));
4118
                          /* Mark this as having an output reload so that the
4119
                             REG_INC processing code below won't invalidate
4120
                             the reload for inheritance.  */
4121
                          SET_HARD_REG_BIT (reg_is_output_reload,
4122
                                            REGNO (reload_reg));
4123
                          SET_REGNO_REG_SET (&reg_has_output_reload,
4124
                                             REGNO (XEXP (in_reg, 0)));
4125
                        }
4126
                      else
4127
                        forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
4128
                                              NULL);
4129
                    }
4130
                  else if ((code == PRE_INC || code == PRE_DEC)
4131
                           && TEST_HARD_REG_BIT (reg_reloaded_valid,
4132
                                                 REGNO (rld[i].reg_rtx))
4133
                           /* Make sure it is the inc/dec pseudo, and not
4134
                              some other (e.g. output operand) pseudo.  */
4135
                           && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4136
                               == REGNO (XEXP (in_reg, 0))))
4137
                    {
4138
                      SET_HARD_REG_BIT (reg_is_output_reload,
4139
                                        REGNO (rld[i].reg_rtx));
4140
                      SET_REGNO_REG_SET (&reg_has_output_reload,
4141
                                         REGNO (XEXP (in_reg, 0)));
4142
                    }
4143
                }
4144
            }
4145
          /* If a pseudo that got a hard register is auto-incremented,
4146
             we must purge records of copying it into pseudos without
4147
             hard registers.  */
4148
          for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4149
            if (REG_NOTE_KIND (x) == REG_INC)
4150
              {
4151
                /* See if this pseudo reg was reloaded in this insn.
4152
                   If so, its last-reload info is still valid
4153
                   because it is based on this insn's reload.  */
4154
                for (i = 0; i < n_reloads; i++)
4155
                  if (rld[i].out == XEXP (x, 0))
4156
                    break;
4157
 
4158
                if (i == n_reloads)
4159
                  forget_old_reloads_1 (XEXP (x, 0), NULL_RTX, NULL);
4160
              }
4161
#endif
4162
        }
4163
      /* A reload reg's contents are unknown after a label.  */
4164
      if (LABEL_P (insn))
4165
        CLEAR_HARD_REG_SET (reg_reloaded_valid);
4166
 
4167
      /* Don't assume a reload reg is still good after a call insn
4168
         if it is a call-used reg, or if it contains a value that will
4169
         be partially clobbered by the call.  */
4170
      else if (CALL_P (insn))
4171
        {
4172
        AND_COMPL_HARD_REG_SET (reg_reloaded_valid, call_used_reg_set);
4173
        AND_COMPL_HARD_REG_SET (reg_reloaded_valid, reg_reloaded_call_part_clobbered);
4174
        }
4175
    }
4176
 
4177
  /* Clean up.  */
4178
  free (reg_last_reload_reg);
4179
  CLEAR_REG_SET (&reg_has_output_reload);
4180
}
4181
 
4182
/* Discard all record of any value reloaded from X,
4183
   or reloaded in X from someplace else;
4184
   unless X is an output reload reg of the current insn.
4185
 
4186
   X may be a hard reg (the reload reg)
4187
   or it may be a pseudo reg that was reloaded from.
4188
 
4189
   When DATA is non-NULL just mark the registers in regset
4190
   to be forgotten later.  */
4191
 
4192
static void
4193
forget_old_reloads_1 (rtx x, rtx ignored ATTRIBUTE_UNUSED,
4194
                      void *data)
4195
{
4196
  unsigned int regno;
4197
  unsigned int nr;
4198
  regset regs = (regset) data;
4199
 
4200
  /* note_stores does give us subregs of hard regs,
4201
     subreg_regno_offset requires a hard reg.  */
4202
  while (GET_CODE (x) == SUBREG)
4203
    {
4204
      /* We ignore the subreg offset when calculating the regno,
4205
         because we are using the entire underlying hard register
4206
         below.  */
4207
      x = SUBREG_REG (x);
4208
    }
4209
 
4210
  if (!REG_P (x))
4211
    return;
4212
 
4213
  regno = REGNO (x);
4214
 
4215
  if (regno >= FIRST_PSEUDO_REGISTER)
4216
    nr = 1;
4217
  else
4218
    {
4219
      unsigned int i;
4220
 
4221
      nr = hard_regno_nregs[regno][GET_MODE (x)];
4222
      /* Storing into a spilled-reg invalidates its contents.
4223
         This can happen if a block-local pseudo is allocated to that reg
4224
         and it wasn't spilled because this block's total need is 0.
4225
         Then some insn might have an optional reload and use this reg.  */
4226
      if (!regs)
4227
        for (i = 0; i < nr; i++)
4228
          /* But don't do this if the reg actually serves as an output
4229
             reload reg in the current instruction.  */
4230
          if (n_reloads == 0
4231
              || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4232
            {
4233
              CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4234
              CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered, regno + i);
4235
              spill_reg_store[regno + i] = 0;
4236
            }
4237
    }
4238
 
4239
  if (regs)
4240
    while (nr-- > 0)
4241
      SET_REGNO_REG_SET (regs, regno + nr);
4242
  else
4243
    {
4244
      /* Since value of X has changed,
4245
         forget any value previously copied from it.  */
4246
 
4247
      while (nr-- > 0)
4248
        /* But don't forget a copy if this is the output reload
4249
           that establishes the copy's validity.  */
4250
        if (n_reloads == 0
4251
            || !REGNO_REG_SET_P (&reg_has_output_reload, regno + nr))
4252
          reg_last_reload_reg[regno + nr] = 0;
4253
     }
4254
}
4255
 
4256
/* Forget the reloads marked in regset by previous function.  */
4257
static void
4258
forget_marked_reloads (regset regs)
4259
{
4260
  unsigned int reg;
4261
  reg_set_iterator rsi;
4262
  EXECUTE_IF_SET_IN_REG_SET (regs, 0, reg, rsi)
4263
    {
4264
      if (reg < FIRST_PSEUDO_REGISTER
4265
          /* But don't do this if the reg actually serves as an output
4266
             reload reg in the current instruction.  */
4267
          && (n_reloads == 0
4268
              || ! TEST_HARD_REG_BIT (reg_is_output_reload, reg)))
4269
          {
4270
            CLEAR_HARD_REG_BIT (reg_reloaded_valid, reg);
4271
            CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered, reg);
4272
            spill_reg_store[reg] = 0;
4273
          }
4274
      if (n_reloads == 0
4275
          || !REGNO_REG_SET_P (&reg_has_output_reload, reg))
4276
        reg_last_reload_reg[reg] = 0;
4277
    }
4278
}
4279
 
4280
/* The following HARD_REG_SETs indicate when each hard register is
4281
   used for a reload of various parts of the current insn.  */
4282
 
4283
/* If reg is unavailable for all reloads.  */
4284
static HARD_REG_SET reload_reg_unavailable;
4285
/* If reg is in use as a reload reg for a RELOAD_OTHER reload.  */
4286
static HARD_REG_SET reload_reg_used;
4287
/* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I.  */
4288
static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4289
/* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I.  */
4290
static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4291
/* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I.  */
4292
static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4293
/* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I.  */
4294
static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
4295
/* If reg is in use for a RELOAD_FOR_INPUT reload for operand I.  */
4296
static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4297
/* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I.  */
4298
static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4299
/* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload.  */
4300
static HARD_REG_SET reload_reg_used_in_op_addr;
4301
/* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload.  */
4302
static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4303
/* If reg is in use for a RELOAD_FOR_INSN reload.  */
4304
static HARD_REG_SET reload_reg_used_in_insn;
4305
/* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload.  */
4306
static HARD_REG_SET reload_reg_used_in_other_addr;
4307
 
4308
/* If reg is in use as a reload reg for any sort of reload.  */
4309
static HARD_REG_SET reload_reg_used_at_all;
4310
 
4311
/* If reg is use as an inherited reload.  We just mark the first register
4312
   in the group.  */
4313
static HARD_REG_SET reload_reg_used_for_inherit;
4314
 
4315
/* Records which hard regs are used in any way, either as explicit use or
4316
   by being allocated to a pseudo during any point of the current insn.  */
4317
static HARD_REG_SET reg_used_in_insn;
4318
 
4319
/* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4320
   TYPE. MODE is used to indicate how many consecutive regs are
4321
   actually used.  */
4322
 
4323
static void
4324
mark_reload_reg_in_use (unsigned int regno, int opnum, enum reload_type type,
4325
                        enum machine_mode mode)
4326
{
4327
  unsigned int nregs = hard_regno_nregs[regno][mode];
4328
  unsigned int i;
4329
 
4330
  for (i = regno; i < nregs + regno; i++)
4331
    {
4332
      switch (type)
4333
        {
4334
        case RELOAD_OTHER:
4335
          SET_HARD_REG_BIT (reload_reg_used, i);
4336
          break;
4337
 
4338
        case RELOAD_FOR_INPUT_ADDRESS:
4339
          SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4340
          break;
4341
 
4342
        case RELOAD_FOR_INPADDR_ADDRESS:
4343
          SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4344
          break;
4345
 
4346
        case RELOAD_FOR_OUTPUT_ADDRESS:
4347
          SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4348
          break;
4349
 
4350
        case RELOAD_FOR_OUTADDR_ADDRESS:
4351
          SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4352
          break;
4353
 
4354
        case RELOAD_FOR_OPERAND_ADDRESS:
4355
          SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4356
          break;
4357
 
4358
        case RELOAD_FOR_OPADDR_ADDR:
4359
          SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4360
          break;
4361
 
4362
        case RELOAD_FOR_OTHER_ADDRESS:
4363
          SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4364
          break;
4365
 
4366
        case RELOAD_FOR_INPUT:
4367
          SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4368
          break;
4369
 
4370
        case RELOAD_FOR_OUTPUT:
4371
          SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4372
          break;
4373
 
4374
        case RELOAD_FOR_INSN:
4375
          SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4376
          break;
4377
        }
4378
 
4379
      SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4380
    }
4381
}
4382
 
4383
/* Similarly, but show REGNO is no longer in use for a reload.  */
4384
 
4385
static void
4386
clear_reload_reg_in_use (unsigned int regno, int opnum,
4387
                         enum reload_type type, enum machine_mode mode)
4388
{
4389
  unsigned int nregs = hard_regno_nregs[regno][mode];
4390
  unsigned int start_regno, end_regno, r;
4391
  int i;
4392
  /* A complication is that for some reload types, inheritance might
4393
     allow multiple reloads of the same types to share a reload register.
4394
     We set check_opnum if we have to check only reloads with the same
4395
     operand number, and check_any if we have to check all reloads.  */
4396
  int check_opnum = 0;
4397
  int check_any = 0;
4398
  HARD_REG_SET *used_in_set;
4399
 
4400
  switch (type)
4401
    {
4402
    case RELOAD_OTHER:
4403
      used_in_set = &reload_reg_used;
4404
      break;
4405
 
4406
    case RELOAD_FOR_INPUT_ADDRESS:
4407
      used_in_set = &reload_reg_used_in_input_addr[opnum];
4408
      break;
4409
 
4410
    case RELOAD_FOR_INPADDR_ADDRESS:
4411
      check_opnum = 1;
4412
      used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
4413
      break;
4414
 
4415
    case RELOAD_FOR_OUTPUT_ADDRESS:
4416
      used_in_set = &reload_reg_used_in_output_addr[opnum];
4417
      break;
4418
 
4419
    case RELOAD_FOR_OUTADDR_ADDRESS:
4420
      check_opnum = 1;
4421
      used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
4422
      break;
4423
 
4424
    case RELOAD_FOR_OPERAND_ADDRESS:
4425
      used_in_set = &reload_reg_used_in_op_addr;
4426
      break;
4427
 
4428
    case RELOAD_FOR_OPADDR_ADDR:
4429
      check_any = 1;
4430
      used_in_set = &reload_reg_used_in_op_addr_reload;
4431
      break;
4432
 
4433
    case RELOAD_FOR_OTHER_ADDRESS:
4434
      used_in_set = &reload_reg_used_in_other_addr;
4435
      check_any = 1;
4436
      break;
4437
 
4438
    case RELOAD_FOR_INPUT:
4439
      used_in_set = &reload_reg_used_in_input[opnum];
4440
      break;
4441
 
4442
    case RELOAD_FOR_OUTPUT:
4443
      used_in_set = &reload_reg_used_in_output[opnum];
4444
      break;
4445
 
4446
    case RELOAD_FOR_INSN:
4447
      used_in_set = &reload_reg_used_in_insn;
4448
      break;
4449
    default:
4450
      gcc_unreachable ();
4451
    }
4452
  /* We resolve conflicts with remaining reloads of the same type by
4453
     excluding the intervals of reload registers by them from the
4454
     interval of freed reload registers.  Since we only keep track of
4455
     one set of interval bounds, we might have to exclude somewhat
4456
     more than what would be necessary if we used a HARD_REG_SET here.
4457
     But this should only happen very infrequently, so there should
4458
     be no reason to worry about it.  */
4459
 
4460
  start_regno = regno;
4461
  end_regno = regno + nregs;
4462
  if (check_opnum || check_any)
4463
    {
4464
      for (i = n_reloads - 1; i >= 0; i--)
4465
        {
4466
          if (rld[i].when_needed == type
4467
              && (check_any || rld[i].opnum == opnum)
4468
              && rld[i].reg_rtx)
4469
            {
4470
              unsigned int conflict_start = true_regnum (rld[i].reg_rtx);
4471
              unsigned int conflict_end
4472
                = (conflict_start
4473
                   + hard_regno_nregs[conflict_start][rld[i].mode]);
4474
 
4475
              /* If there is an overlap with the first to-be-freed register,
4476
                 adjust the interval start.  */
4477
              if (conflict_start <= start_regno && conflict_end > start_regno)
4478
                start_regno = conflict_end;
4479
              /* Otherwise, if there is a conflict with one of the other
4480
                 to-be-freed registers, adjust the interval end.  */
4481
              if (conflict_start > start_regno && conflict_start < end_regno)
4482
                end_regno = conflict_start;
4483
            }
4484
        }
4485
    }
4486
 
4487
  for (r = start_regno; r < end_regno; r++)
4488
    CLEAR_HARD_REG_BIT (*used_in_set, r);
4489
}
4490
 
4491
/* 1 if reg REGNO is free as a reload reg for a reload of the sort
4492
   specified by OPNUM and TYPE.  */
4493
 
4494
static int
4495
reload_reg_free_p (unsigned int regno, int opnum, enum reload_type type)
4496
{
4497
  int i;
4498
 
4499
  /* In use for a RELOAD_OTHER means it's not available for anything.  */
4500
  if (TEST_HARD_REG_BIT (reload_reg_used, regno)
4501
      || TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
4502
    return 0;
4503
 
4504
  switch (type)
4505
    {
4506
    case RELOAD_OTHER:
4507
      /* In use for anything means we can't use it for RELOAD_OTHER.  */
4508
      if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4509
          || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4510
          || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
4511
          || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4512
        return 0;
4513
 
4514
      for (i = 0; i < reload_n_operands; i++)
4515
        if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4516
            || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4517
            || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4518
            || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4519
            || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4520
            || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4521
          return 0;
4522
 
4523
      return 1;
4524
 
4525
    case RELOAD_FOR_INPUT:
4526
      if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4527
          || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4528
        return 0;
4529
 
4530
      if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4531
        return 0;
4532
 
4533
      /* If it is used for some other input, can't use it.  */
4534
      for (i = 0; i < reload_n_operands; i++)
4535
        if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4536
          return 0;
4537
 
4538
      /* If it is used in a later operand's address, can't use it.  */
4539
      for (i = opnum + 1; i < reload_n_operands; i++)
4540
        if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4541
            || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4542
          return 0;
4543
 
4544
      return 1;
4545
 
4546
    case RELOAD_FOR_INPUT_ADDRESS:
4547
      /* Can't use a register if it is used for an input address for this
4548
         operand or used as an input in an earlier one.  */
4549
      if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
4550
          || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4551
        return 0;
4552
 
4553
      for (i = 0; i < opnum; i++)
4554
        if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4555
          return 0;
4556
 
4557
      return 1;
4558
 
4559
    case RELOAD_FOR_INPADDR_ADDRESS:
4560
      /* Can't use a register if it is used for an input address
4561
         for this operand or used as an input in an earlier
4562
         one.  */
4563
      if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4564
        return 0;
4565
 
4566
      for (i = 0; i < opnum; i++)
4567
        if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4568
          return 0;
4569
 
4570
      return 1;
4571
 
4572
    case RELOAD_FOR_OUTPUT_ADDRESS:
4573
      /* Can't use a register if it is used for an output address for this
4574
         operand or used as an output in this or a later operand.  Note
4575
         that multiple output operands are emitted in reverse order, so
4576
         the conflicting ones are those with lower indices.  */
4577
      if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4578
        return 0;
4579
 
4580
      for (i = 0; i <= opnum; i++)
4581
        if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4582
          return 0;
4583
 
4584
      return 1;
4585
 
4586
    case RELOAD_FOR_OUTADDR_ADDRESS:
4587
      /* Can't use a register if it is used for an output address
4588
         for this operand or used as an output in this or a
4589
         later operand.  Note that multiple output operands are
4590
         emitted in reverse order, so the conflicting ones are
4591
         those with lower indices.  */
4592
      if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4593
        return 0;
4594
 
4595
      for (i = 0; i <= opnum; i++)
4596
        if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4597
          return 0;
4598
 
4599
      return 1;
4600
 
4601
    case RELOAD_FOR_OPERAND_ADDRESS:
4602
      for (i = 0; i < reload_n_operands; i++)
4603
        if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4604
          return 0;
4605
 
4606
      return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4607
              && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4608
 
4609
    case RELOAD_FOR_OPADDR_ADDR:
4610
      for (i = 0; i < reload_n_operands; i++)
4611
        if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4612
          return 0;
4613
 
4614
      return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4615
 
4616
    case RELOAD_FOR_OUTPUT:
4617
      /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4618
         outputs, or an operand address for this or an earlier output.
4619
         Note that multiple output operands are emitted in reverse order,
4620
         so the conflicting ones are those with higher indices.  */
4621
      if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4622
        return 0;
4623
 
4624
      for (i = 0; i < reload_n_operands; i++)
4625
        if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4626
          return 0;
4627
 
4628
      for (i = opnum; i < reload_n_operands; i++)
4629
        if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4630
            || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4631
          return 0;
4632
 
4633
      return 1;
4634
 
4635
    case RELOAD_FOR_INSN:
4636
      for (i = 0; i < reload_n_operands; i++)
4637
        if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4638
            || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4639
          return 0;
4640
 
4641
      return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4642
              && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4643
 
4644
    case RELOAD_FOR_OTHER_ADDRESS:
4645
      return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4646
 
4647
    default:
4648
      gcc_unreachable ();
4649
    }
4650
}
4651
 
4652
/* Return 1 if the value in reload reg REGNO, as used by a reload
4653
   needed for the part of the insn specified by OPNUM and TYPE,
4654
   is still available in REGNO at the end of the insn.
4655
 
4656
   We can assume that the reload reg was already tested for availability
4657
   at the time it is needed, and we should not check this again,
4658
   in case the reg has already been marked in use.  */
4659
 
4660
static int
4661
reload_reg_reaches_end_p (unsigned int regno, int opnum, enum reload_type type)
4662
{
4663
  int i;
4664
 
4665
  switch (type)
4666
    {
4667
    case RELOAD_OTHER:
4668
      /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4669
         its value must reach the end.  */
4670
      return 1;
4671
 
4672
      /* If this use is for part of the insn,
4673
         its value reaches if no subsequent part uses the same register.
4674
         Just like the above function, don't try to do this with lots
4675
         of fallthroughs.  */
4676
 
4677
    case RELOAD_FOR_OTHER_ADDRESS:
4678
      /* Here we check for everything else, since these don't conflict
4679
         with anything else and everything comes later.  */
4680
 
4681
      for (i = 0; i < reload_n_operands; i++)
4682
        if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4683
            || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4684
            || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4685
            || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4686
            || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4687
            || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4688
          return 0;
4689
 
4690
      return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4691
              && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
4692
              && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4693
              && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4694
 
4695
    case RELOAD_FOR_INPUT_ADDRESS:
4696
    case RELOAD_FOR_INPADDR_ADDRESS:
4697
      /* Similar, except that we check only for this and subsequent inputs
4698
         and the address of only subsequent inputs and we do not need
4699
         to check for RELOAD_OTHER objects since they are known not to
4700
         conflict.  */
4701
 
4702
      for (i = opnum; i < reload_n_operands; i++)
4703
        if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4704
          return 0;
4705
 
4706
      for (i = opnum + 1; i < reload_n_operands; i++)
4707
        if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4708
            || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4709
          return 0;
4710
 
4711
      for (i = 0; i < reload_n_operands; i++)
4712
        if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4713
            || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4714
            || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4715
          return 0;
4716
 
4717
      if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4718
        return 0;
4719
 
4720
      return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4721
              && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4722
              && !TEST_HARD_REG_BIT (reload_reg_used, regno));
4723
 
4724
    case RELOAD_FOR_INPUT:
4725
      /* Similar to input address, except we start at the next operand for
4726
         both input and input address and we do not check for
4727
         RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4728
         would conflict.  */
4729
 
4730
      for (i = opnum + 1; i < reload_n_operands; i++)
4731
        if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4732
            || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4733
            || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4734
          return 0;
4735
 
4736
      /* ... fall through ...  */
4737
 
4738
    case RELOAD_FOR_OPERAND_ADDRESS:
4739
      /* Check outputs and their addresses.  */
4740
 
4741
      for (i = 0; i < reload_n_operands; i++)
4742
        if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4743
            || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4744
            || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4745
          return 0;
4746
 
4747
      return (!TEST_HARD_REG_BIT (reload_reg_used, regno));
4748
 
4749
    case RELOAD_FOR_OPADDR_ADDR:
4750
      for (i = 0; i < reload_n_operands; i++)
4751
        if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4752
            || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4753
            || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4754
          return 0;
4755
 
4756
      return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4757
              && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4758
              && !TEST_HARD_REG_BIT (reload_reg_used, regno));
4759
 
4760
    case RELOAD_FOR_INSN:
4761
      /* These conflict with other outputs with RELOAD_OTHER.  So
4762
         we need only check for output addresses.  */
4763
 
4764
      opnum = reload_n_operands;
4765
 
4766
      /* ... fall through ...  */
4767
 
4768
    case RELOAD_FOR_OUTPUT:
4769
    case RELOAD_FOR_OUTPUT_ADDRESS:
4770
    case RELOAD_FOR_OUTADDR_ADDRESS:
4771
      /* We already know these can't conflict with a later output.  So the
4772
         only thing to check are later output addresses.
4773
         Note that multiple output operands are emitted in reverse order,
4774
         so the conflicting ones are those with lower indices.  */
4775
      for (i = 0; i < opnum; i++)
4776
        if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4777
            || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4778
          return 0;
4779
 
4780
      return 1;
4781
 
4782
    default:
4783
      gcc_unreachable ();
4784
    }
4785
}
4786
 
4787
/* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4788
   Return 0 otherwise.
4789
 
4790
   This function uses the same algorithm as reload_reg_free_p above.  */
4791
 
4792
static int
4793
reloads_conflict (int r1, int r2)
4794
{
4795
  enum reload_type r1_type = rld[r1].when_needed;
4796
  enum reload_type r2_type = rld[r2].when_needed;
4797
  int r1_opnum = rld[r1].opnum;
4798
  int r2_opnum = rld[r2].opnum;
4799
 
4800
  /* RELOAD_OTHER conflicts with everything.  */
4801
  if (r2_type == RELOAD_OTHER)
4802
    return 1;
4803
 
4804
  /* Otherwise, check conflicts differently for each type.  */
4805
 
4806
  switch (r1_type)
4807
    {
4808
    case RELOAD_FOR_INPUT:
4809
      return (r2_type == RELOAD_FOR_INSN
4810
              || r2_type == RELOAD_FOR_OPERAND_ADDRESS
4811
              || r2_type == RELOAD_FOR_OPADDR_ADDR
4812
              || r2_type == RELOAD_FOR_INPUT
4813
              || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
4814
                   || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
4815
                  && r2_opnum > r1_opnum));
4816
 
4817
    case RELOAD_FOR_INPUT_ADDRESS:
4818
      return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4819
              || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4820
 
4821
    case RELOAD_FOR_INPADDR_ADDRESS:
4822
      return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
4823
              || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4824
 
4825
    case RELOAD_FOR_OUTPUT_ADDRESS:
4826
      return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4827
              || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
4828
 
4829
    case RELOAD_FOR_OUTADDR_ADDRESS:
4830
      return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
4831
              || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
4832
 
4833
    case RELOAD_FOR_OPERAND_ADDRESS:
4834
      return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4835
              || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4836
 
4837
    case RELOAD_FOR_OPADDR_ADDR:
4838
      return (r2_type == RELOAD_FOR_INPUT
4839
              || r2_type == RELOAD_FOR_OPADDR_ADDR);
4840
 
4841
    case RELOAD_FOR_OUTPUT:
4842
      return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
4843
              || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
4844
                   || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
4845
                  && r2_opnum >= r1_opnum));
4846
 
4847
    case RELOAD_FOR_INSN:
4848
      return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4849
              || r2_type == RELOAD_FOR_INSN
4850
              || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4851
 
4852
    case RELOAD_FOR_OTHER_ADDRESS:
4853
      return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4854
 
4855
    case RELOAD_OTHER:
4856
      return 1;
4857
 
4858
    default:
4859
      gcc_unreachable ();
4860
    }
4861
}
4862
 
4863
/* Indexed by reload number, 1 if incoming value
4864
   inherited from previous insns.  */
4865
static char reload_inherited[MAX_RELOADS];
4866
 
4867
/* For an inherited reload, this is the insn the reload was inherited from,
4868
   if we know it.  Otherwise, this is 0.  */
4869
static rtx reload_inheritance_insn[MAX_RELOADS];
4870
 
4871
/* If nonzero, this is a place to get the value of the reload,
4872
   rather than using reload_in.  */
4873
static rtx reload_override_in[MAX_RELOADS];
4874
 
4875
/* For each reload, the hard register number of the register used,
4876
   or -1 if we did not need a register for this reload.  */
4877
static int reload_spill_index[MAX_RELOADS];
4878
 
4879
/* Subroutine of free_for_value_p, used to check a single register.
4880
   START_REGNO is the starting regno of the full reload register
4881
   (possibly comprising multiple hard registers) that we are considering.  */
4882
 
4883
static int
4884
reload_reg_free_for_value_p (int start_regno, int regno, int opnum,
4885
                             enum reload_type type, rtx value, rtx out,
4886
                             int reloadnum, int ignore_address_reloads)
4887
{
4888
  int time1;
4889
  /* Set if we see an input reload that must not share its reload register
4890
     with any new earlyclobber, but might otherwise share the reload
4891
     register with an output or input-output reload.  */
4892
  int check_earlyclobber = 0;
4893
  int i;
4894
  int copy = 0;
4895
 
4896
  if (TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
4897
    return 0;
4898
 
4899
  if (out == const0_rtx)
4900
    {
4901
      copy = 1;
4902
      out = NULL_RTX;
4903
    }
4904
 
4905
  /* We use some pseudo 'time' value to check if the lifetimes of the
4906
     new register use would overlap with the one of a previous reload
4907
     that is not read-only or uses a different value.
4908
     The 'time' used doesn't have to be linear in any shape or form, just
4909
     monotonic.
4910
     Some reload types use different 'buckets' for each operand.
4911
     So there are MAX_RECOG_OPERANDS different time values for each
4912
     such reload type.
4913
     We compute TIME1 as the time when the register for the prospective
4914
     new reload ceases to be live, and TIME2 for each existing
4915
     reload as the time when that the reload register of that reload
4916
     becomes live.
4917
     Where there is little to be gained by exact lifetime calculations,
4918
     we just make conservative assumptions, i.e. a longer lifetime;
4919
     this is done in the 'default:' cases.  */
4920
  switch (type)
4921
    {
4922
    case RELOAD_FOR_OTHER_ADDRESS:
4923
      /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads.  */
4924
      time1 = copy ? 0 : 1;
4925
      break;
4926
    case RELOAD_OTHER:
4927
      time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
4928
      break;
4929
      /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
4930
         RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT.  By adding 0 / 1 / 2 ,
4931
         respectively, to the time values for these, we get distinct time
4932
         values.  To get distinct time values for each operand, we have to
4933
         multiply opnum by at least three.  We round that up to four because
4934
         multiply by four is often cheaper.  */
4935
    case RELOAD_FOR_INPADDR_ADDRESS:
4936
      time1 = opnum * 4 + 2;
4937
      break;
4938
    case RELOAD_FOR_INPUT_ADDRESS:
4939
      time1 = opnum * 4 + 3;
4940
      break;
4941
    case RELOAD_FOR_INPUT:
4942
      /* All RELOAD_FOR_INPUT reloads remain live till the instruction
4943
         executes (inclusive).  */
4944
      time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
4945
      break;
4946
    case RELOAD_FOR_OPADDR_ADDR:
4947
      /* opnum * 4 + 4
4948
         <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
4949
      time1 = MAX_RECOG_OPERANDS * 4 + 1;
4950
      break;
4951
    case RELOAD_FOR_OPERAND_ADDRESS:
4952
      /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
4953
         is executed.  */
4954
      time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
4955
      break;
4956
    case RELOAD_FOR_OUTADDR_ADDRESS:
4957
      time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
4958
      break;
4959
    case RELOAD_FOR_OUTPUT_ADDRESS:
4960
      time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
4961
      break;
4962
    default:
4963
      time1 = MAX_RECOG_OPERANDS * 5 + 5;
4964
    }
4965
 
4966
  for (i = 0; i < n_reloads; i++)
4967
    {
4968
      rtx reg = rld[i].reg_rtx;
4969
      if (reg && REG_P (reg)
4970
          && ((unsigned) regno - true_regnum (reg)
4971
              <= hard_regno_nregs[REGNO (reg)][GET_MODE (reg)] - (unsigned) 1)
4972
          && i != reloadnum)
4973
        {
4974
          rtx other_input = rld[i].in;
4975
 
4976
          /* If the other reload loads the same input value, that
4977
             will not cause a conflict only if it's loading it into
4978
             the same register.  */
4979
          if (true_regnum (reg) != start_regno)
4980
            other_input = NULL_RTX;
4981
          if (! other_input || ! rtx_equal_p (other_input, value)
4982
              || rld[i].out || out)
4983
            {
4984
              int time2;
4985
              switch (rld[i].when_needed)
4986
                {
4987
                case RELOAD_FOR_OTHER_ADDRESS:
4988
                  time2 = 0;
4989
                  break;
4990
                case RELOAD_FOR_INPADDR_ADDRESS:
4991
                  /* find_reloads makes sure that a
4992
                     RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
4993
                     by at most one - the first -
4994
                     RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS .  If the
4995
                     address reload is inherited, the address address reload
4996
                     goes away, so we can ignore this conflict.  */
4997
                  if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
4998
                      && ignore_address_reloads
4999
                      /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
5000
                         Then the address address is still needed to store
5001
                         back the new address.  */
5002
                      && ! rld[reloadnum].out)
5003
                    continue;
5004
                  /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
5005
                     RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
5006
                     reloads go away.  */
5007
                  if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5008
                      && ignore_address_reloads
5009
                      /* Unless we are reloading an auto_inc expression.  */
5010
                      && ! rld[reloadnum].out)
5011
                    continue;
5012
                  time2 = rld[i].opnum * 4 + 2;
5013
                  break;
5014
                case RELOAD_FOR_INPUT_ADDRESS:
5015
                  if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5016
                      && ignore_address_reloads
5017
                      && ! rld[reloadnum].out)
5018
                    continue;
5019
                  time2 = rld[i].opnum * 4 + 3;
5020
                  break;
5021
                case RELOAD_FOR_INPUT:
5022
                  time2 = rld[i].opnum * 4 + 4;
5023
                  check_earlyclobber = 1;
5024
                  break;
5025
                  /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
5026
                     == MAX_RECOG_OPERAND * 4  */
5027
                case RELOAD_FOR_OPADDR_ADDR:
5028
                  if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
5029
                      && ignore_address_reloads
5030
                      && ! rld[reloadnum].out)
5031
                    continue;
5032
                  time2 = MAX_RECOG_OPERANDS * 4 + 1;
5033
                  break;
5034
                case RELOAD_FOR_OPERAND_ADDRESS:
5035
                  time2 = MAX_RECOG_OPERANDS * 4 + 2;
5036
                  check_earlyclobber = 1;
5037
                  break;
5038
                case RELOAD_FOR_INSN:
5039
                  time2 = MAX_RECOG_OPERANDS * 4 + 3;
5040
                  break;
5041
                case RELOAD_FOR_OUTPUT:
5042
                  /* All RELOAD_FOR_OUTPUT reloads become live just after the
5043
                     instruction is executed.  */
5044
                  time2 = MAX_RECOG_OPERANDS * 4 + 4;
5045
                  break;
5046
                  /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
5047
                     the RELOAD_FOR_OUTPUT reloads, so assign it the same time
5048
                     value.  */
5049
                case RELOAD_FOR_OUTADDR_ADDRESS:
5050
                  if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
5051
                      && ignore_address_reloads
5052
                      && ! rld[reloadnum].out)
5053
                    continue;
5054
                  time2 = MAX_RECOG_OPERANDS * 4 + 4 + rld[i].opnum;
5055
                  break;
5056
                case RELOAD_FOR_OUTPUT_ADDRESS:
5057
                  time2 = MAX_RECOG_OPERANDS * 4 + 5 + rld[i].opnum;
5058
                  break;
5059
                case RELOAD_OTHER:
5060
                  /* If there is no conflict in the input part, handle this
5061
                     like an output reload.  */
5062
                  if (! rld[i].in || rtx_equal_p (other_input, value))
5063
                    {
5064
                      time2 = MAX_RECOG_OPERANDS * 4 + 4;
5065
                      /* Earlyclobbered outputs must conflict with inputs.  */
5066
                      if (earlyclobber_operand_p (rld[i].out))
5067
                        time2 = MAX_RECOG_OPERANDS * 4 + 3;
5068
 
5069
                      break;
5070
                    }
5071
                  time2 = 1;
5072
                  /* RELOAD_OTHER might be live beyond instruction execution,
5073
                     but this is not obvious when we set time2 = 1.  So check
5074
                     here if there might be a problem with the new reload
5075
                     clobbering the register used by the RELOAD_OTHER.  */
5076
                  if (out)
5077
                    return 0;
5078
                  break;
5079
                default:
5080
                  return 0;
5081
                }
5082
              if ((time1 >= time2
5083
                   && (! rld[i].in || rld[i].out
5084
                       || ! rtx_equal_p (other_input, value)))
5085
                  || (out && rld[reloadnum].out_reg
5086
                      && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
5087
                return 0;
5088
            }
5089
        }
5090
    }
5091
 
5092
  /* Earlyclobbered outputs must conflict with inputs.  */
5093
  if (check_earlyclobber && out && earlyclobber_operand_p (out))
5094
    return 0;
5095
 
5096
  return 1;
5097
}
5098
 
5099
/* Return 1 if the value in reload reg REGNO, as used by a reload
5100
   needed for the part of the insn specified by OPNUM and TYPE,
5101
   may be used to load VALUE into it.
5102
 
5103
   MODE is the mode in which the register is used, this is needed to
5104
   determine how many hard regs to test.
5105
 
5106
   Other read-only reloads with the same value do not conflict
5107
   unless OUT is nonzero and these other reloads have to live while
5108
   output reloads live.
5109
   If OUT is CONST0_RTX, this is a special case: it means that the
5110
   test should not be for using register REGNO as reload register, but
5111
   for copying from register REGNO into the reload register.
5112
 
5113
   RELOADNUM is the number of the reload we want to load this value for;
5114
   a reload does not conflict with itself.
5115
 
5116
   When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
5117
   reloads that load an address for the very reload we are considering.
5118
 
5119
   The caller has to make sure that there is no conflict with the return
5120
   register.  */
5121
 
5122
static int
5123
free_for_value_p (int regno, enum machine_mode mode, int opnum,
5124
                  enum reload_type type, rtx value, rtx out, int reloadnum,
5125
                  int ignore_address_reloads)
5126
{
5127
  int nregs = hard_regno_nregs[regno][mode];
5128
  while (nregs-- > 0)
5129
    if (! reload_reg_free_for_value_p (regno, regno + nregs, opnum, type,
5130
                                       value, out, reloadnum,
5131
                                       ignore_address_reloads))
5132
      return 0;
5133
  return 1;
5134
}
5135
 
5136
/* Return nonzero if the rtx X is invariant over the current function.  */
5137
/* ??? Actually, the places where we use this expect exactly what is
5138
   tested here, and not everything that is function invariant.  In
5139
   particular, the frame pointer and arg pointer are special cased;
5140
   pic_offset_table_rtx is not, and we must not spill these things to
5141
   memory.  */
5142
 
5143
int
5144
function_invariant_p (rtx x)
5145
{
5146
  if (CONSTANT_P (x))
5147
    return 1;
5148
  if (x == frame_pointer_rtx || x == arg_pointer_rtx)
5149
    return 1;
5150
  if (GET_CODE (x) == PLUS
5151
      && (XEXP (x, 0) == frame_pointer_rtx || XEXP (x, 0) == arg_pointer_rtx)
5152
      && CONSTANT_P (XEXP (x, 1)))
5153
    return 1;
5154
  return 0;
5155
}
5156
 
5157
/* Determine whether the reload reg X overlaps any rtx'es used for
5158
   overriding inheritance.  Return nonzero if so.  */
5159
 
5160
static int
5161
conflicts_with_override (rtx x)
5162
{
5163
  int i;
5164
  for (i = 0; i < n_reloads; i++)
5165
    if (reload_override_in[i]
5166
        && reg_overlap_mentioned_p (x, reload_override_in[i]))
5167
      return 1;
5168
  return 0;
5169
}
5170
 
5171
/* Give an error message saying we failed to find a reload for INSN,
5172
   and clear out reload R.  */
5173
static void
5174
failed_reload (rtx insn, int r)
5175
{
5176
  if (asm_noperands (PATTERN (insn)) < 0)
5177
    /* It's the compiler's fault.  */
5178
    fatal_insn ("could not find a spill register", insn);
5179
 
5180
  /* It's the user's fault; the operand's mode and constraint
5181
     don't match.  Disable this reload so we don't crash in final.  */
5182
  error_for_asm (insn,
5183
                 "%<asm%> operand constraint incompatible with operand size");
5184
  rld[r].in = 0;
5185
  rld[r].out = 0;
5186
  rld[r].reg_rtx = 0;
5187
  rld[r].optional = 1;
5188
  rld[r].secondary_p = 1;
5189
}
5190
 
5191
/* I is the index in SPILL_REG_RTX of the reload register we are to allocate
5192
   for reload R.  If it's valid, get an rtx for it.  Return nonzero if
5193
   successful.  */
5194
static int
5195
set_reload_reg (int i, int r)
5196
{
5197
  int regno;
5198
  rtx reg = spill_reg_rtx[i];
5199
 
5200
  if (reg == 0 || GET_MODE (reg) != rld[r].mode)
5201
    spill_reg_rtx[i] = reg
5202
      = gen_rtx_REG (rld[r].mode, spill_regs[i]);
5203
 
5204
  regno = true_regnum (reg);
5205
 
5206
  /* Detect when the reload reg can't hold the reload mode.
5207
     This used to be one `if', but Sequent compiler can't handle that.  */
5208
  if (HARD_REGNO_MODE_OK (regno, rld[r].mode))
5209
    {
5210
      enum machine_mode test_mode = VOIDmode;
5211
      if (rld[r].in)
5212
        test_mode = GET_MODE (rld[r].in);
5213
      /* If rld[r].in has VOIDmode, it means we will load it
5214
         in whatever mode the reload reg has: to wit, rld[r].mode.
5215
         We have already tested that for validity.  */
5216
      /* Aside from that, we need to test that the expressions
5217
         to reload from or into have modes which are valid for this
5218
         reload register.  Otherwise the reload insns would be invalid.  */
5219
      if (! (rld[r].in != 0 && test_mode != VOIDmode
5220
             && ! HARD_REGNO_MODE_OK (regno, test_mode)))
5221
        if (! (rld[r].out != 0
5222
               && ! HARD_REGNO_MODE_OK (regno, GET_MODE (rld[r].out))))
5223
          {
5224
            /* The reg is OK.  */
5225
            last_spill_reg = i;
5226
 
5227
            /* Mark as in use for this insn the reload regs we use
5228
               for this.  */
5229
            mark_reload_reg_in_use (spill_regs[i], rld[r].opnum,
5230
                                    rld[r].when_needed, rld[r].mode);
5231
 
5232
            rld[r].reg_rtx = reg;
5233
            reload_spill_index[r] = spill_regs[i];
5234
            return 1;
5235
          }
5236
    }
5237
  return 0;
5238
}
5239
 
5240
/* Find a spill register to use as a reload register for reload R.
5241
   LAST_RELOAD is nonzero if this is the last reload for the insn being
5242
   processed.
5243
 
5244
   Set rld[R].reg_rtx to the register allocated.
5245
 
5246
   We return 1 if successful, or 0 if we couldn't find a spill reg and
5247
   we didn't change anything.  */
5248
 
5249
static int
5250
allocate_reload_reg (struct insn_chain *chain ATTRIBUTE_UNUSED, int r,
5251
                     int last_reload)
5252
{
5253
  int i, pass, count;
5254
 
5255
  /* If we put this reload ahead, thinking it is a group,
5256
     then insist on finding a group.  Otherwise we can grab a
5257
     reg that some other reload needs.
5258
     (That can happen when we have a 68000 DATA_OR_FP_REG
5259
     which is a group of data regs or one fp reg.)
5260
     We need not be so restrictive if there are no more reloads
5261
     for this insn.
5262
 
5263
     ??? Really it would be nicer to have smarter handling
5264
     for that kind of reg class, where a problem like this is normal.
5265
     Perhaps those classes should be avoided for reloading
5266
     by use of more alternatives.  */
5267
 
5268
  int force_group = rld[r].nregs > 1 && ! last_reload;
5269
 
5270
  /* If we want a single register and haven't yet found one,
5271
     take any reg in the right class and not in use.
5272
     If we want a consecutive group, here is where we look for it.
5273
 
5274
     We use two passes so we can first look for reload regs to
5275
     reuse, which are already in use for other reloads in this insn,
5276
     and only then use additional registers.
5277
     I think that maximizing reuse is needed to make sure we don't
5278
     run out of reload regs.  Suppose we have three reloads, and
5279
     reloads A and B can share regs.  These need two regs.
5280
     Suppose A and B are given different regs.
5281
     That leaves none for C.  */
5282
  for (pass = 0; pass < 2; pass++)
5283
    {
5284
      /* I is the index in spill_regs.
5285
         We advance it round-robin between insns to use all spill regs
5286
         equally, so that inherited reloads have a chance
5287
         of leapfrogging each other.  */
5288
 
5289
      i = last_spill_reg;
5290
 
5291
      for (count = 0; count < n_spills; count++)
5292
        {
5293
          int class = (int) rld[r].class;
5294
          int regnum;
5295
 
5296
          i++;
5297
          if (i >= n_spills)
5298
            i -= n_spills;
5299
          regnum = spill_regs[i];
5300
 
5301
          if ((reload_reg_free_p (regnum, rld[r].opnum,
5302
                                  rld[r].when_needed)
5303
               || (rld[r].in
5304
                   /* We check reload_reg_used to make sure we
5305
                      don't clobber the return register.  */
5306
                   && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
5307
                   && free_for_value_p (regnum, rld[r].mode, rld[r].opnum,
5308
                                        rld[r].when_needed, rld[r].in,
5309
                                        rld[r].out, r, 1)))
5310
              && TEST_HARD_REG_BIT (reg_class_contents[class], regnum)
5311
              && HARD_REGNO_MODE_OK (regnum, rld[r].mode)
5312
              /* Look first for regs to share, then for unshared.  But
5313
                 don't share regs used for inherited reloads; they are
5314
                 the ones we want to preserve.  */
5315
              && (pass
5316
                  || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
5317
                                         regnum)
5318
                      && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
5319
                                              regnum))))
5320
            {
5321
              int nr = hard_regno_nregs[regnum][rld[r].mode];
5322
              /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5323
                 (on 68000) got us two FP regs.  If NR is 1,
5324
                 we would reject both of them.  */
5325
              if (force_group)
5326
                nr = rld[r].nregs;
5327
              /* If we need only one reg, we have already won.  */
5328
              if (nr == 1)
5329
                {
5330
                  /* But reject a single reg if we demand a group.  */
5331
                  if (force_group)
5332
                    continue;
5333
                  break;
5334
                }
5335
              /* Otherwise check that as many consecutive regs as we need
5336
                 are available here.  */
5337
              while (nr > 1)
5338
                {
5339
                  int regno = regnum + nr - 1;
5340
                  if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
5341
                        && spill_reg_order[regno] >= 0
5342
                        && reload_reg_free_p (regno, rld[r].opnum,
5343
                                              rld[r].when_needed)))
5344
                    break;
5345
                  nr--;
5346
                }
5347
              if (nr == 1)
5348
                break;
5349
            }
5350
        }
5351
 
5352
      /* If we found something on pass 1, omit pass 2.  */
5353
      if (count < n_spills)
5354
        break;
5355
    }
5356
 
5357
  /* We should have found a spill register by now.  */
5358
  if (count >= n_spills)
5359
    return 0;
5360
 
5361
  /* I is the index in SPILL_REG_RTX of the reload register we are to
5362
     allocate.  Get an rtx for it and find its register number.  */
5363
 
5364
  return set_reload_reg (i, r);
5365
}
5366
 
5367
/* Initialize all the tables needed to allocate reload registers.
5368
   CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
5369
   is the array we use to restore the reg_rtx field for every reload.  */
5370
 
5371
static void
5372
choose_reload_regs_init (struct insn_chain *chain, rtx *save_reload_reg_rtx)
5373
{
5374
  int i;
5375
 
5376
  for (i = 0; i < n_reloads; i++)
5377
    rld[i].reg_rtx = save_reload_reg_rtx[i];
5378
 
5379
  memset (reload_inherited, 0, MAX_RELOADS);
5380
  memset (reload_inheritance_insn, 0, MAX_RELOADS * sizeof (rtx));
5381
  memset (reload_override_in, 0, MAX_RELOADS * sizeof (rtx));
5382
 
5383
  CLEAR_HARD_REG_SET (reload_reg_used);
5384
  CLEAR_HARD_REG_SET (reload_reg_used_at_all);
5385
  CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
5386
  CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
5387
  CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
5388
  CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
5389
 
5390
  CLEAR_HARD_REG_SET (reg_used_in_insn);
5391
  {
5392
    HARD_REG_SET tmp;
5393
    REG_SET_TO_HARD_REG_SET (tmp, &chain->live_throughout);
5394
    IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5395
    REG_SET_TO_HARD_REG_SET (tmp, &chain->dead_or_set);
5396
    IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5397
    compute_use_by_pseudos (&reg_used_in_insn, &chain->live_throughout);
5398
    compute_use_by_pseudos (&reg_used_in_insn, &chain->dead_or_set);
5399
  }
5400
 
5401
  for (i = 0; i < reload_n_operands; i++)
5402
    {
5403
      CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
5404
      CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
5405
      CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
5406
      CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
5407
      CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
5408
      CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
5409
    }
5410
 
5411
  COMPL_HARD_REG_SET (reload_reg_unavailable, chain->used_spill_regs);
5412
 
5413
  CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5414
 
5415
  for (i = 0; i < n_reloads; i++)
5416
    /* If we have already decided to use a certain register,
5417
       don't use it in another way.  */
5418
    if (rld[i].reg_rtx)
5419
      mark_reload_reg_in_use (REGNO (rld[i].reg_rtx), rld[i].opnum,
5420
                              rld[i].when_needed, rld[i].mode);
5421
}
5422
 
5423
/* Assign hard reg targets for the pseudo-registers we must reload
5424
   into hard regs for this insn.
5425
   Also output the instructions to copy them in and out of the hard regs.
5426
 
5427
   For machines with register classes, we are responsible for
5428
   finding a reload reg in the proper class.  */
5429
 
5430
static void
5431
choose_reload_regs (struct insn_chain *chain)
5432
{
5433
  rtx insn = chain->insn;
5434
  int i, j;
5435
  unsigned int max_group_size = 1;
5436
  enum reg_class group_class = NO_REGS;
5437
  int pass, win, inheritance;
5438
 
5439
  rtx save_reload_reg_rtx[MAX_RELOADS];
5440
 
5441
  /* In order to be certain of getting the registers we need,
5442
     we must sort the reloads into order of increasing register class.
5443
     Then our grabbing of reload registers will parallel the process
5444
     that provided the reload registers.
5445
 
5446
     Also note whether any of the reloads wants a consecutive group of regs.
5447
     If so, record the maximum size of the group desired and what
5448
     register class contains all the groups needed by this insn.  */
5449
 
5450
  for (j = 0; j < n_reloads; j++)
5451
    {
5452
      reload_order[j] = j;
5453
      if (rld[j].reg_rtx != NULL_RTX)
5454
        {
5455
          gcc_assert (REG_P (rld[j].reg_rtx)
5456
                      && HARD_REGISTER_P (rld[j].reg_rtx));
5457
          reload_spill_index[j] = REGNO (rld[j].reg_rtx);
5458
        }
5459
      else
5460
        reload_spill_index[j] = -1;
5461
 
5462
      if (rld[j].nregs > 1)
5463
        {
5464
          max_group_size = MAX (rld[j].nregs, max_group_size);
5465
          group_class
5466
            = reg_class_superunion[(int) rld[j].class][(int) group_class];
5467
        }
5468
 
5469
      save_reload_reg_rtx[j] = rld[j].reg_rtx;
5470
    }
5471
 
5472
  if (n_reloads > 1)
5473
    qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5474
 
5475
  /* If -O, try first with inheritance, then turning it off.
5476
     If not -O, don't do inheritance.
5477
     Using inheritance when not optimizing leads to paradoxes
5478
     with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5479
     because one side of the comparison might be inherited.  */
5480
  win = 0;
5481
  for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5482
    {
5483
      choose_reload_regs_init (chain, save_reload_reg_rtx);
5484
 
5485
      /* Process the reloads in order of preference just found.
5486
         Beyond this point, subregs can be found in reload_reg_rtx.
5487
 
5488
         This used to look for an existing reloaded home for all of the
5489
         reloads, and only then perform any new reloads.  But that could lose
5490
         if the reloads were done out of reg-class order because a later
5491
         reload with a looser constraint might have an old home in a register
5492
         needed by an earlier reload with a tighter constraint.
5493
 
5494
         To solve this, we make two passes over the reloads, in the order
5495
         described above.  In the first pass we try to inherit a reload
5496
         from a previous insn.  If there is a later reload that needs a
5497
         class that is a proper subset of the class being processed, we must
5498
         also allocate a spill register during the first pass.
5499
 
5500
         Then make a second pass over the reloads to allocate any reloads
5501
         that haven't been given registers yet.  */
5502
 
5503
      for (j = 0; j < n_reloads; j++)
5504
        {
5505
          int r = reload_order[j];
5506
          rtx search_equiv = NULL_RTX;
5507
 
5508
          /* Ignore reloads that got marked inoperative.  */
5509
          if (rld[r].out == 0 && rld[r].in == 0
5510
              && ! rld[r].secondary_p)
5511
            continue;
5512
 
5513
          /* If find_reloads chose to use reload_in or reload_out as a reload
5514
             register, we don't need to chose one.  Otherwise, try even if it
5515
             found one since we might save an insn if we find the value lying
5516
             around.
5517
             Try also when reload_in is a pseudo without a hard reg.  */
5518
          if (rld[r].in != 0 && rld[r].reg_rtx != 0
5519
              && (rtx_equal_p (rld[r].in, rld[r].reg_rtx)
5520
                  || (rtx_equal_p (rld[r].out, rld[r].reg_rtx)
5521
                      && !MEM_P (rld[r].in)
5522
                      && true_regnum (rld[r].in) < FIRST_PSEUDO_REGISTER)))
5523
            continue;
5524
 
5525
#if 0 /* No longer needed for correct operation.
5526
         It might give better code, or might not; worth an experiment?  */
5527
          /* If this is an optional reload, we can't inherit from earlier insns
5528
             until we are sure that any non-optional reloads have been allocated.
5529
             The following code takes advantage of the fact that optional reloads
5530
             are at the end of reload_order.  */
5531
          if (rld[r].optional != 0)
5532
            for (i = 0; i < j; i++)
5533
              if ((rld[reload_order[i]].out != 0
5534
                   || rld[reload_order[i]].in != 0
5535
                   || rld[reload_order[i]].secondary_p)
5536
                  && ! rld[reload_order[i]].optional
5537
                  && rld[reload_order[i]].reg_rtx == 0)
5538
                allocate_reload_reg (chain, reload_order[i], 0);
5539
#endif
5540
 
5541
          /* First see if this pseudo is already available as reloaded
5542
             for a previous insn.  We cannot try to inherit for reloads
5543
             that are smaller than the maximum number of registers needed
5544
             for groups unless the register we would allocate cannot be used
5545
             for the groups.
5546
 
5547
             We could check here to see if this is a secondary reload for
5548
             an object that is already in a register of the desired class.
5549
             This would avoid the need for the secondary reload register.
5550
             But this is complex because we can't easily determine what
5551
             objects might want to be loaded via this reload.  So let a
5552
             register be allocated here.  In `emit_reload_insns' we suppress
5553
             one of the loads in the case described above.  */
5554
 
5555
          if (inheritance)
5556
            {
5557
              int byte = 0;
5558
              int regno = -1;
5559
              enum machine_mode mode = VOIDmode;
5560
 
5561
              if (rld[r].in == 0)
5562
                ;
5563
              else if (REG_P (rld[r].in))
5564
                {
5565
                  regno = REGNO (rld[r].in);
5566
                  mode = GET_MODE (rld[r].in);
5567
                }
5568
              else if (REG_P (rld[r].in_reg))
5569
                {
5570
                  regno = REGNO (rld[r].in_reg);
5571
                  mode = GET_MODE (rld[r].in_reg);
5572
                }
5573
              else if (GET_CODE (rld[r].in_reg) == SUBREG
5574
                       && REG_P (SUBREG_REG (rld[r].in_reg)))
5575
                {
5576
                  byte = SUBREG_BYTE (rld[r].in_reg);
5577
                  regno = REGNO (SUBREG_REG (rld[r].in_reg));
5578
                  if (regno < FIRST_PSEUDO_REGISTER)
5579
                    regno = subreg_regno (rld[r].in_reg);
5580
                  mode = GET_MODE (rld[r].in_reg);
5581
                }
5582
#ifdef AUTO_INC_DEC
5583
              else if (GET_RTX_CLASS (GET_CODE (rld[r].in_reg)) == RTX_AUTOINC
5584
                       && REG_P (XEXP (rld[r].in_reg, 0)))
5585
                {
5586
                  regno = REGNO (XEXP (rld[r].in_reg, 0));
5587
                  mode = GET_MODE (XEXP (rld[r].in_reg, 0));
5588
                  rld[r].out = rld[r].in;
5589
                }
5590
#endif
5591
#if 0
5592
              /* This won't work, since REGNO can be a pseudo reg number.
5593
                 Also, it takes much more hair to keep track of all the things
5594
                 that can invalidate an inherited reload of part of a pseudoreg.  */
5595
              else if (GET_CODE (rld[r].in) == SUBREG
5596
                       && REG_P (SUBREG_REG (rld[r].in)))
5597
                regno = subreg_regno (rld[r].in);
5598
#endif
5599
 
5600
              if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5601
                {
5602
                  enum reg_class class = rld[r].class, last_class;
5603
                  rtx last_reg = reg_last_reload_reg[regno];
5604
                  enum machine_mode need_mode;
5605
 
5606
                  i = REGNO (last_reg);
5607
                  i += subreg_regno_offset (i, GET_MODE (last_reg), byte, mode);
5608
                  last_class = REGNO_REG_CLASS (i);
5609
 
5610
                  if (byte == 0)
5611
                    need_mode = mode;
5612
                  else
5613
                    need_mode
5614
                      = smallest_mode_for_size (GET_MODE_BITSIZE (mode)
5615
                                                + byte * BITS_PER_UNIT,
5616
                                                GET_MODE_CLASS (mode));
5617
 
5618
                  if ((GET_MODE_SIZE (GET_MODE (last_reg))
5619
                       >= GET_MODE_SIZE (need_mode))
5620
#ifdef CANNOT_CHANGE_MODE_CLASS
5621
                      /* Verify that the register in "i" can be obtained
5622
                         from LAST_REG.  */
5623
                      && !REG_CANNOT_CHANGE_MODE_P (REGNO (last_reg),
5624
                                                    GET_MODE (last_reg),
5625
                                                    mode)
5626
#endif
5627
                      && reg_reloaded_contents[i] == regno
5628
                      && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
5629
                      && HARD_REGNO_MODE_OK (i, rld[r].mode)
5630
                      && (TEST_HARD_REG_BIT (reg_class_contents[(int) class], i)
5631
                          /* Even if we can't use this register as a reload
5632
                             register, we might use it for reload_override_in,
5633
                             if copying it to the desired class is cheap
5634
                             enough.  */
5635
                          || ((REGISTER_MOVE_COST (mode, last_class, class)
5636
                               < MEMORY_MOVE_COST (mode, class, 1))
5637
                              && (secondary_reload_class (1, class, mode,
5638
                                                          last_reg)
5639
                                  == NO_REGS)
5640
#ifdef SECONDARY_MEMORY_NEEDED
5641
                              && ! SECONDARY_MEMORY_NEEDED (last_class, class,
5642
                                                            mode)
5643
#endif
5644
                              ))
5645
 
5646
                      && (rld[r].nregs == max_group_size
5647
                          || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5648
                                                  i))
5649
                      && free_for_value_p (i, rld[r].mode, rld[r].opnum,
5650
                                           rld[r].when_needed, rld[r].in,
5651
                                           const0_rtx, r, 1))
5652
                    {
5653
                      /* If a group is needed, verify that all the subsequent
5654
                         registers still have their values intact.  */
5655
                      int nr = hard_regno_nregs[i][rld[r].mode];
5656
                      int k;
5657
 
5658
                      for (k = 1; k < nr; k++)
5659
                        if (reg_reloaded_contents[i + k] != regno
5660
                            || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
5661
                          break;
5662
 
5663
                      if (k == nr)
5664
                        {
5665
                          int i1;
5666
                          int bad_for_class;
5667
 
5668
                          last_reg = (GET_MODE (last_reg) == mode
5669
                                      ? last_reg : gen_rtx_REG (mode, i));
5670
 
5671
                          bad_for_class = 0;
5672
                          for (k = 0; k < nr; k++)
5673
                            bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].class],
5674
                                                                  i+k);
5675
 
5676
                          /* We found a register that contains the
5677
                             value we need.  If this register is the
5678
                             same as an `earlyclobber' operand of the
5679
                             current insn, just mark it as a place to
5680
                             reload from since we can't use it as the
5681
                             reload register itself.  */
5682
 
5683
                          for (i1 = 0; i1 < n_earlyclobbers; i1++)
5684
                            if (reg_overlap_mentioned_for_reload_p
5685
                                (reg_last_reload_reg[regno],
5686
                                 reload_earlyclobbers[i1]))
5687
                              break;
5688
 
5689
                          if (i1 != n_earlyclobbers
5690
                              || ! (free_for_value_p (i, rld[r].mode,
5691
                                                      rld[r].opnum,
5692
                                                      rld[r].when_needed, rld[r].in,
5693
                                                      rld[r].out, r, 1))
5694
                              /* Don't use it if we'd clobber a pseudo reg.  */
5695
                              || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
5696
                                  && rld[r].out
5697
                                  && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
5698
                              /* Don't clobber the frame pointer.  */
5699
                              || (i == HARD_FRAME_POINTER_REGNUM
5700
                                  && frame_pointer_needed
5701
                                  && rld[r].out)
5702
                              /* Don't really use the inherited spill reg
5703
                                 if we need it wider than we've got it.  */
5704
                              || (GET_MODE_SIZE (rld[r].mode)
5705
                                  > GET_MODE_SIZE (mode))
5706
                              || bad_for_class
5707
 
5708
                              /* If find_reloads chose reload_out as reload
5709
                                 register, stay with it - that leaves the
5710
                                 inherited register for subsequent reloads.  */
5711
                              || (rld[r].out && rld[r].reg_rtx
5712
                                  && rtx_equal_p (rld[r].out, rld[r].reg_rtx)))
5713
                            {
5714
                              if (! rld[r].optional)
5715
                                {
5716
                                  reload_override_in[r] = last_reg;
5717
                                  reload_inheritance_insn[r]
5718
                                    = reg_reloaded_insn[i];
5719
                                }
5720
                            }
5721
                          else
5722
                            {
5723
                              int k;
5724
                              /* We can use this as a reload reg.  */
5725
                              /* Mark the register as in use for this part of
5726
                                 the insn.  */
5727
                              mark_reload_reg_in_use (i,
5728
                                                      rld[r].opnum,
5729
                                                      rld[r].when_needed,
5730
                                                      rld[r].mode);
5731
                              rld[r].reg_rtx = last_reg;
5732
                              reload_inherited[r] = 1;
5733
                              reload_inheritance_insn[r]
5734
                                = reg_reloaded_insn[i];
5735
                              reload_spill_index[r] = i;
5736
                              for (k = 0; k < nr; k++)
5737
                                SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5738
                                                  i + k);
5739
                            }
5740
                        }
5741
                    }
5742
                }
5743
            }
5744
 
5745
          /* Here's another way to see if the value is already lying around.  */
5746
          if (inheritance
5747
              && rld[r].in != 0
5748
              && ! reload_inherited[r]
5749
              && rld[r].out == 0
5750
              && (CONSTANT_P (rld[r].in)
5751
                  || GET_CODE (rld[r].in) == PLUS
5752
                  || REG_P (rld[r].in)
5753
                  || MEM_P (rld[r].in))
5754
              && (rld[r].nregs == max_group_size
5755
                  || ! reg_classes_intersect_p (rld[r].class, group_class)))
5756
            search_equiv = rld[r].in;
5757
          /* If this is an output reload from a simple move insn, look
5758
             if an equivalence for the input is available.  */
5759
          else if (inheritance && rld[r].in == 0 && rld[r].out != 0)
5760
            {
5761
              rtx set = single_set (insn);
5762
 
5763
              if (set
5764
                  && rtx_equal_p (rld[r].out, SET_DEST (set))
5765
                  && CONSTANT_P (SET_SRC (set)))
5766
                search_equiv = SET_SRC (set);
5767
            }
5768
 
5769
          if (search_equiv)
5770
            {
5771
              rtx equiv
5772
                = find_equiv_reg (search_equiv, insn, rld[r].class,
5773
                                  -1, NULL, 0, rld[r].mode);
5774
              int regno = 0;
5775
 
5776
              if (equiv != 0)
5777
                {
5778
                  if (REG_P (equiv))
5779
                    regno = REGNO (equiv);
5780
                  else
5781
                    {
5782
                      /* This must be a SUBREG of a hard register.
5783
                         Make a new REG since this might be used in an
5784
                         address and not all machines support SUBREGs
5785
                         there.  */
5786
                      gcc_assert (GET_CODE (equiv) == SUBREG);
5787
                      regno = subreg_regno (equiv);
5788
                      equiv = gen_rtx_REG (rld[r].mode, regno);
5789
                      /* If we choose EQUIV as the reload register, but the
5790
                         loop below decides to cancel the inheritance, we'll
5791
                         end up reloading EQUIV in rld[r].mode, not the mode
5792
                         it had originally.  That isn't safe when EQUIV isn't
5793
                         available as a spill register since its value might
5794
                         still be live at this point.  */
5795
                      for (i = regno; i < regno + (int) rld[r].nregs; i++)
5796
                        if (TEST_HARD_REG_BIT (reload_reg_unavailable, i))
5797
                          equiv = 0;
5798
                    }
5799
                }
5800
 
5801
              /* If we found a spill reg, reject it unless it is free
5802
                 and of the desired class.  */
5803
              if (equiv != 0)
5804
                {
5805
                  int regs_used = 0;
5806
                  int bad_for_class = 0;
5807
                  int max_regno = regno + rld[r].nregs;
5808
 
5809
                  for (i = regno; i < max_regno; i++)
5810
                    {
5811
                      regs_used |= TEST_HARD_REG_BIT (reload_reg_used_at_all,
5812
                                                      i);
5813
                      bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].class],
5814
                                                           i);
5815
                    }
5816
 
5817
                  if ((regs_used
5818
                       && ! free_for_value_p (regno, rld[r].mode,
5819
                                              rld[r].opnum, rld[r].when_needed,
5820
                                              rld[r].in, rld[r].out, r, 1))
5821
                      || bad_for_class)
5822
                    equiv = 0;
5823
                }
5824
 
5825
              if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, rld[r].mode))
5826
                equiv = 0;
5827
 
5828
              /* We found a register that contains the value we need.
5829
                 If this register is the same as an `earlyclobber' operand
5830
                 of the current insn, just mark it as a place to reload from
5831
                 since we can't use it as the reload register itself.  */
5832
 
5833
              if (equiv != 0)
5834
                for (i = 0; i < n_earlyclobbers; i++)
5835
                  if (reg_overlap_mentioned_for_reload_p (equiv,
5836
                                                          reload_earlyclobbers[i]))
5837
                    {
5838
                      if (! rld[r].optional)
5839
                        reload_override_in[r] = equiv;
5840
                      equiv = 0;
5841
                      break;
5842
                    }
5843
 
5844
              /* If the equiv register we have found is explicitly clobbered
5845
                 in the current insn, it depends on the reload type if we
5846
                 can use it, use it for reload_override_in, or not at all.
5847
                 In particular, we then can't use EQUIV for a
5848
                 RELOAD_FOR_OUTPUT_ADDRESS reload.  */
5849
 
5850
              if (equiv != 0)
5851
                {
5852
                  if (regno_clobbered_p (regno, insn, rld[r].mode, 2))
5853
                    switch (rld[r].when_needed)
5854
                      {
5855
                      case RELOAD_FOR_OTHER_ADDRESS:
5856
                      case RELOAD_FOR_INPADDR_ADDRESS:
5857
                      case RELOAD_FOR_INPUT_ADDRESS:
5858
                      case RELOAD_FOR_OPADDR_ADDR:
5859
                        break;
5860
                      case RELOAD_OTHER:
5861
                      case RELOAD_FOR_INPUT:
5862
                      case RELOAD_FOR_OPERAND_ADDRESS:
5863
                        if (! rld[r].optional)
5864
                          reload_override_in[r] = equiv;
5865
                        /* Fall through.  */
5866
                      default:
5867
                        equiv = 0;
5868
                        break;
5869
                      }
5870
                  else if (regno_clobbered_p (regno, insn, rld[r].mode, 1))
5871
                    switch (rld[r].when_needed)
5872
                      {
5873
                      case RELOAD_FOR_OTHER_ADDRESS:
5874
                      case RELOAD_FOR_INPADDR_ADDRESS:
5875
                      case RELOAD_FOR_INPUT_ADDRESS:
5876
                      case RELOAD_FOR_OPADDR_ADDR:
5877
                      case RELOAD_FOR_OPERAND_ADDRESS:
5878
                      case RELOAD_FOR_INPUT:
5879
                        break;
5880
                      case RELOAD_OTHER:
5881
                        if (! rld[r].optional)
5882
                          reload_override_in[r] = equiv;
5883
                        /* Fall through.  */
5884
                      default:
5885
                        equiv = 0;
5886
                        break;
5887
                      }
5888
                }
5889
 
5890
              /* If we found an equivalent reg, say no code need be generated
5891
                 to load it, and use it as our reload reg.  */
5892
              if (equiv != 0
5893
                  && (regno != HARD_FRAME_POINTER_REGNUM
5894
                      || !frame_pointer_needed))
5895
                {
5896
                  int nr = hard_regno_nregs[regno][rld[r].mode];
5897
                  int k;
5898
                  rld[r].reg_rtx = equiv;
5899
                  reload_inherited[r] = 1;
5900
 
5901
                  /* If reg_reloaded_valid is not set for this register,
5902
                     there might be a stale spill_reg_store lying around.
5903
                     We must clear it, since otherwise emit_reload_insns
5904
                     might delete the store.  */
5905
                  if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
5906
                    spill_reg_store[regno] = NULL_RTX;
5907
                  /* If any of the hard registers in EQUIV are spill
5908
                     registers, mark them as in use for this insn.  */
5909
                  for (k = 0; k < nr; k++)
5910
                    {
5911
                      i = spill_reg_order[regno + k];
5912
                      if (i >= 0)
5913
                        {
5914
                          mark_reload_reg_in_use (regno, rld[r].opnum,
5915
                                                  rld[r].when_needed,
5916
                                                  rld[r].mode);
5917
                          SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5918
                                            regno + k);
5919
                        }
5920
                    }
5921
                }
5922
            }
5923
 
5924
          /* If we found a register to use already, or if this is an optional
5925
             reload, we are done.  */
5926
          if (rld[r].reg_rtx != 0 || rld[r].optional != 0)
5927
            continue;
5928
 
5929
#if 0
5930
          /* No longer needed for correct operation.  Might or might
5931
             not give better code on the average.  Want to experiment?  */
5932
 
5933
          /* See if there is a later reload that has a class different from our
5934
             class that intersects our class or that requires less register
5935
             than our reload.  If so, we must allocate a register to this
5936
             reload now, since that reload might inherit a previous reload
5937
             and take the only available register in our class.  Don't do this
5938
             for optional reloads since they will force all previous reloads
5939
             to be allocated.  Also don't do this for reloads that have been
5940
             turned off.  */
5941
 
5942
          for (i = j + 1; i < n_reloads; i++)
5943
            {
5944
              int s = reload_order[i];
5945
 
5946
              if ((rld[s].in == 0 && rld[s].out == 0
5947
                   && ! rld[s].secondary_p)
5948
                  || rld[s].optional)
5949
                continue;
5950
 
5951
              if ((rld[s].class != rld[r].class
5952
                   && reg_classes_intersect_p (rld[r].class,
5953
                                               rld[s].class))
5954
                  || rld[s].nregs < rld[r].nregs)
5955
                break;
5956
            }
5957
 
5958
          if (i == n_reloads)
5959
            continue;
5960
 
5961
          allocate_reload_reg (chain, r, j == n_reloads - 1);
5962
#endif
5963
        }
5964
 
5965
      /* Now allocate reload registers for anything non-optional that
5966
         didn't get one yet.  */
5967
      for (j = 0; j < n_reloads; j++)
5968
        {
5969
          int r = reload_order[j];
5970
 
5971
          /* Ignore reloads that got marked inoperative.  */
5972
          if (rld[r].out == 0 && rld[r].in == 0 && ! rld[r].secondary_p)
5973
            continue;
5974
 
5975
          /* Skip reloads that already have a register allocated or are
5976
             optional.  */
5977
          if (rld[r].reg_rtx != 0 || rld[r].optional)
5978
            continue;
5979
 
5980
          if (! allocate_reload_reg (chain, r, j == n_reloads - 1))
5981
            break;
5982
        }
5983
 
5984
      /* If that loop got all the way, we have won.  */
5985
      if (j == n_reloads)
5986
        {
5987
          win = 1;
5988
          break;
5989
        }
5990
 
5991
      /* Loop around and try without any inheritance.  */
5992
    }
5993
 
5994
  if (! win)
5995
    {
5996
      /* First undo everything done by the failed attempt
5997
         to allocate with inheritance.  */
5998
      choose_reload_regs_init (chain, save_reload_reg_rtx);
5999
 
6000
      /* Some sanity tests to verify that the reloads found in the first
6001
         pass are identical to the ones we have now.  */
6002
      gcc_assert (chain->n_reloads == n_reloads);
6003
 
6004
      for (i = 0; i < n_reloads; i++)
6005
        {
6006
          if (chain->rld[i].regno < 0 || chain->rld[i].reg_rtx != 0)
6007
            continue;
6008
          gcc_assert (chain->rld[i].when_needed == rld[i].when_needed);
6009
          for (j = 0; j < n_spills; j++)
6010
            if (spill_regs[j] == chain->rld[i].regno)
6011
              if (! set_reload_reg (j, i))
6012
                failed_reload (chain->insn, i);
6013
        }
6014
    }
6015
 
6016
  /* If we thought we could inherit a reload, because it seemed that
6017
     nothing else wanted the same reload register earlier in the insn,
6018
     verify that assumption, now that all reloads have been assigned.
6019
     Likewise for reloads where reload_override_in has been set.  */
6020
 
6021
  /* If doing expensive optimizations, do one preliminary pass that doesn't
6022
     cancel any inheritance, but removes reloads that have been needed only
6023
     for reloads that we know can be inherited.  */
6024
  for (pass = flag_expensive_optimizations; pass >= 0; pass--)
6025
    {
6026
      for (j = 0; j < n_reloads; j++)
6027
        {
6028
          int r = reload_order[j];
6029
          rtx check_reg;
6030
          if (reload_inherited[r] && rld[r].reg_rtx)
6031
            check_reg = rld[r].reg_rtx;
6032
          else if (reload_override_in[r]
6033
                   && (REG_P (reload_override_in[r])
6034
                       || GET_CODE (reload_override_in[r]) == SUBREG))
6035
            check_reg = reload_override_in[r];
6036
          else
6037
            continue;
6038
          if (! free_for_value_p (true_regnum (check_reg), rld[r].mode,
6039
                                  rld[r].opnum, rld[r].when_needed, rld[r].in,
6040
                                  (reload_inherited[r]
6041
                                   ? rld[r].out : const0_rtx),
6042
                                  r, 1))
6043
            {
6044
              if (pass)
6045
                continue;
6046
              reload_inherited[r] = 0;
6047
              reload_override_in[r] = 0;
6048
            }
6049
          /* If we can inherit a RELOAD_FOR_INPUT, or can use a
6050
             reload_override_in, then we do not need its related
6051
             RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
6052
             likewise for other reload types.
6053
             We handle this by removing a reload when its only replacement
6054
             is mentioned in reload_in of the reload we are going to inherit.
6055
             A special case are auto_inc expressions; even if the input is
6056
             inherited, we still need the address for the output.  We can
6057
             recognize them because they have RELOAD_OUT set to RELOAD_IN.
6058
             If we succeeded removing some reload and we are doing a preliminary
6059
             pass just to remove such reloads, make another pass, since the
6060
             removal of one reload might allow us to inherit another one.  */
6061
          else if (rld[r].in
6062
                   && rld[r].out != rld[r].in
6063
                   && remove_address_replacements (rld[r].in) && pass)
6064
            pass = 2;
6065
        }
6066
    }
6067
 
6068
  /* Now that reload_override_in is known valid,
6069
     actually override reload_in.  */
6070
  for (j = 0; j < n_reloads; j++)
6071
    if (reload_override_in[j])
6072
      rld[j].in = reload_override_in[j];
6073
 
6074
  /* If this reload won't be done because it has been canceled or is
6075
     optional and not inherited, clear reload_reg_rtx so other
6076
     routines (such as subst_reloads) don't get confused.  */
6077
  for (j = 0; j < n_reloads; j++)
6078
    if (rld[j].reg_rtx != 0
6079
        && ((rld[j].optional && ! reload_inherited[j])
6080
            || (rld[j].in == 0 && rld[j].out == 0
6081
                && ! rld[j].secondary_p)))
6082
      {
6083
        int regno = true_regnum (rld[j].reg_rtx);
6084
 
6085
        if (spill_reg_order[regno] >= 0)
6086
          clear_reload_reg_in_use (regno, rld[j].opnum,
6087
                                   rld[j].when_needed, rld[j].mode);
6088
        rld[j].reg_rtx = 0;
6089
        reload_spill_index[j] = -1;
6090
      }
6091
 
6092
  /* Record which pseudos and which spill regs have output reloads.  */
6093
  for (j = 0; j < n_reloads; j++)
6094
    {
6095
      int r = reload_order[j];
6096
 
6097
      i = reload_spill_index[r];
6098
 
6099
      /* I is nonneg if this reload uses a register.
6100
         If rld[r].reg_rtx is 0, this is an optional reload
6101
         that we opted to ignore.  */
6102
      if (rld[r].out_reg != 0 && REG_P (rld[r].out_reg)
6103
          && rld[r].reg_rtx != 0)
6104
        {
6105
          int nregno = REGNO (rld[r].out_reg);
6106
          int nr = 1;
6107
 
6108
          if (nregno < FIRST_PSEUDO_REGISTER)
6109
            nr = hard_regno_nregs[nregno][rld[r].mode];
6110
 
6111
          while (--nr >= 0)
6112
            SET_REGNO_REG_SET (&reg_has_output_reload,
6113
                               nregno + nr);
6114
 
6115
          if (i >= 0)
6116
            {
6117
              nr = hard_regno_nregs[i][rld[r].mode];
6118
              while (--nr >= 0)
6119
                SET_HARD_REG_BIT (reg_is_output_reload, i + nr);
6120
            }
6121
 
6122
          gcc_assert (rld[r].when_needed == RELOAD_OTHER
6123
                      || rld[r].when_needed == RELOAD_FOR_OUTPUT
6124
                      || rld[r].when_needed == RELOAD_FOR_INSN);
6125
        }
6126
    }
6127
}
6128
 
6129
/* Deallocate the reload register for reload R.  This is called from
6130
   remove_address_replacements.  */
6131
 
6132
void
6133
deallocate_reload_reg (int r)
6134
{
6135
  int regno;
6136
 
6137
  if (! rld[r].reg_rtx)
6138
    return;
6139
  regno = true_regnum (rld[r].reg_rtx);
6140
  rld[r].reg_rtx = 0;
6141
  if (spill_reg_order[regno] >= 0)
6142
    clear_reload_reg_in_use (regno, rld[r].opnum, rld[r].when_needed,
6143
                             rld[r].mode);
6144
  reload_spill_index[r] = -1;
6145
}
6146
 
6147
/* If SMALL_REGISTER_CLASSES is nonzero, we may not have merged two
6148
   reloads of the same item for fear that we might not have enough reload
6149
   registers. However, normally they will get the same reload register
6150
   and hence actually need not be loaded twice.
6151
 
6152
   Here we check for the most common case of this phenomenon: when we have
6153
   a number of reloads for the same object, each of which were allocated
6154
   the same reload_reg_rtx, that reload_reg_rtx is not used for any other
6155
   reload, and is not modified in the insn itself.  If we find such,
6156
   merge all the reloads and set the resulting reload to RELOAD_OTHER.
6157
   This will not increase the number of spill registers needed and will
6158
   prevent redundant code.  */
6159
 
6160
static void
6161
merge_assigned_reloads (rtx insn)
6162
{
6163
  int i, j;
6164
 
6165
  /* Scan all the reloads looking for ones that only load values and
6166
     are not already RELOAD_OTHER and ones whose reload_reg_rtx are
6167
     assigned and not modified by INSN.  */
6168
 
6169
  for (i = 0; i < n_reloads; i++)
6170
    {
6171
      int conflicting_input = 0;
6172
      int max_input_address_opnum = -1;
6173
      int min_conflicting_input_opnum = MAX_RECOG_OPERANDS;
6174
 
6175
      if (rld[i].in == 0 || rld[i].when_needed == RELOAD_OTHER
6176
          || rld[i].out != 0 || rld[i].reg_rtx == 0
6177
          || reg_set_p (rld[i].reg_rtx, insn))
6178
        continue;
6179
 
6180
      /* Look at all other reloads.  Ensure that the only use of this
6181
         reload_reg_rtx is in a reload that just loads the same value
6182
         as we do.  Note that any secondary reloads must be of the identical
6183
         class since the values, modes, and result registers are the
6184
         same, so we need not do anything with any secondary reloads.  */
6185
 
6186
      for (j = 0; j < n_reloads; j++)
6187
        {
6188
          if (i == j || rld[j].reg_rtx == 0
6189
              || ! reg_overlap_mentioned_p (rld[j].reg_rtx,
6190
                                            rld[i].reg_rtx))
6191
            continue;
6192
 
6193
          if (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6194
              && rld[j].opnum > max_input_address_opnum)
6195
            max_input_address_opnum = rld[j].opnum;
6196
 
6197
          /* If the reload regs aren't exactly the same (e.g, different modes)
6198
             or if the values are different, we can't merge this reload.
6199
             But if it is an input reload, we might still merge
6200
             RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_OTHER_ADDRESS reloads.  */
6201
 
6202
          if (! rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
6203
              || rld[j].out != 0 || rld[j].in == 0
6204
              || ! rtx_equal_p (rld[i].in, rld[j].in))
6205
            {
6206
              if (rld[j].when_needed != RELOAD_FOR_INPUT
6207
                  || ((rld[i].when_needed != RELOAD_FOR_INPUT_ADDRESS
6208
                       || rld[i].opnum > rld[j].opnum)
6209
                      && rld[i].when_needed != RELOAD_FOR_OTHER_ADDRESS))
6210
                break;
6211
              conflicting_input = 1;
6212
              if (min_conflicting_input_opnum > rld[j].opnum)
6213
                min_conflicting_input_opnum = rld[j].opnum;
6214
            }
6215
        }
6216
 
6217
      /* If all is OK, merge the reloads.  Only set this to RELOAD_OTHER if
6218
         we, in fact, found any matching reloads.  */
6219
 
6220
      if (j == n_reloads
6221
          && max_input_address_opnum <= min_conflicting_input_opnum)
6222
        {
6223
          gcc_assert (rld[i].when_needed != RELOAD_FOR_OUTPUT);
6224
 
6225
          for (j = 0; j < n_reloads; j++)
6226
            if (i != j && rld[j].reg_rtx != 0
6227
                && rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
6228
                && (! conflicting_input
6229
                    || rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6230
                    || rld[j].when_needed == RELOAD_FOR_OTHER_ADDRESS))
6231
              {
6232
                rld[i].when_needed = RELOAD_OTHER;
6233
                rld[j].in = 0;
6234
                reload_spill_index[j] = -1;
6235
                transfer_replacements (i, j);
6236
              }
6237
 
6238
          /* If this is now RELOAD_OTHER, look for any reloads that load
6239
             parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
6240
             if they were for inputs, RELOAD_OTHER for outputs.  Note that
6241
             this test is equivalent to looking for reloads for this operand
6242
             number.  */
6243
          /* We must take special care with RELOAD_FOR_OUTPUT_ADDRESS; it may
6244
             share registers with a RELOAD_FOR_INPUT, so we can not change it
6245
             to RELOAD_FOR_OTHER_ADDRESS.  We should never need to, since we
6246
             do not modify RELOAD_FOR_OUTPUT.  */
6247
 
6248
          if (rld[i].when_needed == RELOAD_OTHER)
6249
            for (j = 0; j < n_reloads; j++)
6250
              if (rld[j].in != 0
6251
                  && rld[j].when_needed != RELOAD_OTHER
6252
                  && rld[j].when_needed != RELOAD_FOR_OTHER_ADDRESS
6253
                  && rld[j].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
6254
                  && (! conflicting_input
6255
                      || rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6256
                      || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
6257
                  && reg_overlap_mentioned_for_reload_p (rld[j].in,
6258
                                                         rld[i].in))
6259
                {
6260
                  int k;
6261
 
6262
                  rld[j].when_needed
6263
                    = ((rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6264
                        || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
6265
                       ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
6266
 
6267
                  /* Check to see if we accidentally converted two
6268
                     reloads that use the same reload register with
6269
                     different inputs to the same type.  If so, the
6270
                     resulting code won't work.  */
6271
                  if (rld[j].reg_rtx)
6272
                    for (k = 0; k < j; k++)
6273
                      gcc_assert (rld[k].in == 0 || rld[k].reg_rtx == 0
6274
                                  || rld[k].when_needed != rld[j].when_needed
6275
                                  || !rtx_equal_p (rld[k].reg_rtx,
6276
                                                   rld[j].reg_rtx)
6277
                                  || rtx_equal_p (rld[k].in,
6278
                                                  rld[j].in));
6279
                }
6280
        }
6281
    }
6282
}
6283
 
6284
/* These arrays are filled by emit_reload_insns and its subroutines.  */
6285
static rtx input_reload_insns[MAX_RECOG_OPERANDS];
6286
static rtx other_input_address_reload_insns = 0;
6287
static rtx other_input_reload_insns = 0;
6288
static rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
6289
static rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6290
static rtx output_reload_insns[MAX_RECOG_OPERANDS];
6291
static rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
6292
static rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6293
static rtx operand_reload_insns = 0;
6294
static rtx other_operand_reload_insns = 0;
6295
static rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
6296
 
6297
/* Values to be put in spill_reg_store are put here first.  */
6298
static rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
6299
static HARD_REG_SET reg_reloaded_died;
6300
 
6301
/* Check if *RELOAD_REG is suitable as an intermediate or scratch register
6302
   of class NEW_CLASS with mode NEW_MODE.  Or alternatively, if alt_reload_reg
6303
   is nonzero, if that is suitable.  On success, change *RELOAD_REG to the
6304
   adjusted register, and return true.  Otherwise, return false.  */
6305
static bool
6306
reload_adjust_reg_for_temp (rtx *reload_reg, rtx alt_reload_reg,
6307
                            enum reg_class new_class,
6308
                            enum machine_mode new_mode)
6309
 
6310
{
6311
  rtx reg;
6312
 
6313
  for (reg = *reload_reg; reg; reg = alt_reload_reg, alt_reload_reg = 0)
6314
    {
6315
      unsigned regno = REGNO (reg);
6316
 
6317
      if (!TEST_HARD_REG_BIT (reg_class_contents[(int) new_class], regno))
6318
        continue;
6319
      if (GET_MODE (reg) != new_mode)
6320
        {
6321
          if (!HARD_REGNO_MODE_OK (regno, new_mode))
6322
            continue;
6323
          if (hard_regno_nregs[regno][new_mode]
6324
              > hard_regno_nregs[regno][GET_MODE (reg)])
6325
            continue;
6326
          reg = reload_adjust_reg_for_mode (reg, new_mode);
6327
        }
6328
      *reload_reg = reg;
6329
      return true;
6330
    }
6331
  return false;
6332
}
6333
 
6334
/* Check if *RELOAD_REG is suitable as a scratch register for the reload
6335
   pattern with insn_code ICODE, or alternatively, if alt_reload_reg is
6336
   nonzero, if that is suitable.  On success, change *RELOAD_REG to the
6337
   adjusted register, and return true.  Otherwise, return false.  */
6338
static bool
6339
reload_adjust_reg_for_icode (rtx *reload_reg, rtx alt_reload_reg,
6340
                             enum insn_code icode)
6341
 
6342
{
6343
  enum reg_class new_class = scratch_reload_class (icode);
6344
  enum machine_mode new_mode = insn_data[(int) icode].operand[2].mode;
6345
 
6346
  return reload_adjust_reg_for_temp (reload_reg, alt_reload_reg,
6347
                                     new_class, new_mode);
6348
}
6349
 
6350
/* Generate insns to perform reload RL, which is for the insn in CHAIN and
6351
   has the number J.  OLD contains the value to be used as input.  */
6352
 
6353
static void
6354
emit_input_reload_insns (struct insn_chain *chain, struct reload *rl,
6355
                         rtx old, int j)
6356
{
6357
  rtx insn = chain->insn;
6358
  rtx reloadreg = rl->reg_rtx;
6359
  rtx oldequiv_reg = 0;
6360
  rtx oldequiv = 0;
6361
  int special = 0;
6362
  enum machine_mode mode;
6363
  rtx *where;
6364
 
6365
  /* Determine the mode to reload in.
6366
     This is very tricky because we have three to choose from.
6367
     There is the mode the insn operand wants (rl->inmode).
6368
     There is the mode of the reload register RELOADREG.
6369
     There is the intrinsic mode of the operand, which we could find
6370
     by stripping some SUBREGs.
6371
     It turns out that RELOADREG's mode is irrelevant:
6372
     we can change that arbitrarily.
6373
 
6374
     Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
6375
     then the reload reg may not support QImode moves, so use SImode.
6376
     If foo is in memory due to spilling a pseudo reg, this is safe,
6377
     because the QImode value is in the least significant part of a
6378
     slot big enough for a SImode.  If foo is some other sort of
6379
     memory reference, then it is impossible to reload this case,
6380
     so previous passes had better make sure this never happens.
6381
 
6382
     Then consider a one-word union which has SImode and one of its
6383
     members is a float, being fetched as (SUBREG:SF union:SI).
6384
     We must fetch that as SFmode because we could be loading into
6385
     a float-only register.  In this case OLD's mode is correct.
6386
 
6387
     Consider an immediate integer: it has VOIDmode.  Here we need
6388
     to get a mode from something else.
6389
 
6390
     In some cases, there is a fourth mode, the operand's
6391
     containing mode.  If the insn specifies a containing mode for
6392
     this operand, it overrides all others.
6393
 
6394
     I am not sure whether the algorithm here is always right,
6395
     but it does the right things in those cases.  */
6396
 
6397
  mode = GET_MODE (old);
6398
  if (mode == VOIDmode)
6399
    mode = rl->inmode;
6400
 
6401
  /* delete_output_reload is only invoked properly if old contains
6402
     the original pseudo register.  Since this is replaced with a
6403
     hard reg when RELOAD_OVERRIDE_IN is set, see if we can
6404
     find the pseudo in RELOAD_IN_REG.  */
6405
  if (reload_override_in[j]
6406
      && REG_P (rl->in_reg))
6407
    {
6408
      oldequiv = old;
6409
      old = rl->in_reg;
6410
    }
6411
  if (oldequiv == 0)
6412
    oldequiv = old;
6413
  else if (REG_P (oldequiv))
6414
    oldequiv_reg = oldequiv;
6415
  else if (GET_CODE (oldequiv) == SUBREG)
6416
    oldequiv_reg = SUBREG_REG (oldequiv);
6417
 
6418
  /* If we are reloading from a register that was recently stored in
6419
     with an output-reload, see if we can prove there was
6420
     actually no need to store the old value in it.  */
6421
 
6422
  if (optimize && REG_P (oldequiv)
6423
      && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6424
      && spill_reg_store[REGNO (oldequiv)]
6425
      && REG_P (old)
6426
      && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
6427
          || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6428
                          rl->out_reg)))
6429
    delete_output_reload (insn, j, REGNO (oldequiv));
6430
 
6431
  /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
6432
     then load RELOADREG from OLDEQUIV.  Note that we cannot use
6433
     gen_lowpart_common since it can do the wrong thing when
6434
     RELOADREG has a multi-word mode.  Note that RELOADREG
6435
     must always be a REG here.  */
6436
 
6437
  if (GET_MODE (reloadreg) != mode)
6438
    reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
6439
  while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
6440
    oldequiv = SUBREG_REG (oldequiv);
6441
  if (GET_MODE (oldequiv) != VOIDmode
6442
      && mode != GET_MODE (oldequiv))
6443
    oldequiv = gen_lowpart_SUBREG (mode, oldequiv);
6444
 
6445
  /* Switch to the right place to emit the reload insns.  */
6446
  switch (rl->when_needed)
6447
    {
6448
    case RELOAD_OTHER:
6449
      where = &other_input_reload_insns;
6450
      break;
6451
    case RELOAD_FOR_INPUT:
6452
      where = &input_reload_insns[rl->opnum];
6453
      break;
6454
    case RELOAD_FOR_INPUT_ADDRESS:
6455
      where = &input_address_reload_insns[rl->opnum];
6456
      break;
6457
    case RELOAD_FOR_INPADDR_ADDRESS:
6458
      where = &inpaddr_address_reload_insns[rl->opnum];
6459
      break;
6460
    case RELOAD_FOR_OUTPUT_ADDRESS:
6461
      where = &output_address_reload_insns[rl->opnum];
6462
      break;
6463
    case RELOAD_FOR_OUTADDR_ADDRESS:
6464
      where = &outaddr_address_reload_insns[rl->opnum];
6465
      break;
6466
    case RELOAD_FOR_OPERAND_ADDRESS:
6467
      where = &operand_reload_insns;
6468
      break;
6469
    case RELOAD_FOR_OPADDR_ADDR:
6470
      where = &other_operand_reload_insns;
6471
      break;
6472
    case RELOAD_FOR_OTHER_ADDRESS:
6473
      where = &other_input_address_reload_insns;
6474
      break;
6475
    default:
6476
      gcc_unreachable ();
6477
    }
6478
 
6479
  push_to_sequence (*where);
6480
 
6481
  /* Auto-increment addresses must be reloaded in a special way.  */
6482
  if (rl->out && ! rl->out_reg)
6483
    {
6484
      /* We are not going to bother supporting the case where a
6485
         incremented register can't be copied directly from
6486
         OLDEQUIV since this seems highly unlikely.  */
6487
      gcc_assert (rl->secondary_in_reload < 0);
6488
 
6489
      if (reload_inherited[j])
6490
        oldequiv = reloadreg;
6491
 
6492
      old = XEXP (rl->in_reg, 0);
6493
 
6494
      if (optimize && REG_P (oldequiv)
6495
          && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6496
          && spill_reg_store[REGNO (oldequiv)]
6497
          && REG_P (old)
6498
          && (dead_or_set_p (insn,
6499
                             spill_reg_stored_to[REGNO (oldequiv)])
6500
              || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6501
                              old)))
6502
        delete_output_reload (insn, j, REGNO (oldequiv));
6503
 
6504
      /* Prevent normal processing of this reload.  */
6505
      special = 1;
6506
      /* Output a special code sequence for this case.  */
6507
      new_spill_reg_store[REGNO (reloadreg)]
6508
        = inc_for_reload (reloadreg, oldequiv, rl->out,
6509
                          rl->inc);
6510
    }
6511
 
6512
  /* If we are reloading a pseudo-register that was set by the previous
6513
     insn, see if we can get rid of that pseudo-register entirely
6514
     by redirecting the previous insn into our reload register.  */
6515
 
6516
  else if (optimize && REG_P (old)
6517
           && REGNO (old) >= FIRST_PSEUDO_REGISTER
6518
           && dead_or_set_p (insn, old)
6519
           /* This is unsafe if some other reload
6520
              uses the same reg first.  */
6521
           && ! conflicts_with_override (reloadreg)
6522
           && free_for_value_p (REGNO (reloadreg), rl->mode, rl->opnum,
6523
                                rl->when_needed, old, rl->out, j, 0))
6524
    {
6525
      rtx temp = PREV_INSN (insn);
6526
      while (temp && NOTE_P (temp))
6527
        temp = PREV_INSN (temp);
6528
      if (temp
6529
          && NONJUMP_INSN_P (temp)
6530
          && GET_CODE (PATTERN (temp)) == SET
6531
          && SET_DEST (PATTERN (temp)) == old
6532
          /* Make sure we can access insn_operand_constraint.  */
6533
          && asm_noperands (PATTERN (temp)) < 0
6534
          /* This is unsafe if operand occurs more than once in current
6535
             insn.  Perhaps some occurrences aren't reloaded.  */
6536
          && count_occurrences (PATTERN (insn), old, 0) == 1)
6537
        {
6538
          rtx old = SET_DEST (PATTERN (temp));
6539
          /* Store into the reload register instead of the pseudo.  */
6540
          SET_DEST (PATTERN (temp)) = reloadreg;
6541
 
6542
          /* Verify that resulting insn is valid.  */
6543
          extract_insn (temp);
6544
          if (constrain_operands (1))
6545
            {
6546
              /* If the previous insn is an output reload, the source is
6547
                 a reload register, and its spill_reg_store entry will
6548
                 contain the previous destination.  This is now
6549
                 invalid.  */
6550
              if (REG_P (SET_SRC (PATTERN (temp)))
6551
                  && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
6552
                {
6553
                  spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
6554
                  spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
6555
                }
6556
 
6557
              /* If these are the only uses of the pseudo reg,
6558
                 pretend for GDB it lives in the reload reg we used.  */
6559
              if (REG_N_DEATHS (REGNO (old)) == 1
6560
                  && REG_N_SETS (REGNO (old)) == 1)
6561
                {
6562
                  reg_renumber[REGNO (old)] = REGNO (rl->reg_rtx);
6563
                  alter_reg (REGNO (old), -1);
6564
                }
6565
              special = 1;
6566
            }
6567
          else
6568
            {
6569
              SET_DEST (PATTERN (temp)) = old;
6570
            }
6571
        }
6572
    }
6573
 
6574
  /* We can't do that, so output an insn to load RELOADREG.  */
6575
 
6576
  /* If we have a secondary reload, pick up the secondary register
6577
     and icode, if any.  If OLDEQUIV and OLD are different or
6578
     if this is an in-out reload, recompute whether or not we
6579
     still need a secondary register and what the icode should
6580
     be.  If we still need a secondary register and the class or
6581
     icode is different, go back to reloading from OLD if using
6582
     OLDEQUIV means that we got the wrong type of register.  We
6583
     cannot have different class or icode due to an in-out reload
6584
     because we don't make such reloads when both the input and
6585
     output need secondary reload registers.  */
6586
 
6587
  if (! special && rl->secondary_in_reload >= 0)
6588
    {
6589
      rtx second_reload_reg = 0;
6590
      rtx third_reload_reg = 0;
6591
      int secondary_reload = rl->secondary_in_reload;
6592
      rtx real_oldequiv = oldequiv;
6593
      rtx real_old = old;
6594
      rtx tmp;
6595
      enum insn_code icode;
6596
      enum insn_code tertiary_icode = CODE_FOR_nothing;
6597
 
6598
      /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6599
         and similarly for OLD.
6600
         See comments in get_secondary_reload in reload.c.  */
6601
      /* If it is a pseudo that cannot be replaced with its
6602
         equivalent MEM, we must fall back to reload_in, which
6603
         will have all the necessary substitutions registered.
6604
         Likewise for a pseudo that can't be replaced with its
6605
         equivalent constant.
6606
 
6607
         Take extra care for subregs of such pseudos.  Note that
6608
         we cannot use reg_equiv_mem in this case because it is
6609
         not in the right mode.  */
6610
 
6611
      tmp = oldequiv;
6612
      if (GET_CODE (tmp) == SUBREG)
6613
        tmp = SUBREG_REG (tmp);
6614
      if (REG_P (tmp)
6615
          && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
6616
          && (reg_equiv_memory_loc[REGNO (tmp)] != 0
6617
              || reg_equiv_constant[REGNO (tmp)] != 0))
6618
        {
6619
          if (! reg_equiv_mem[REGNO (tmp)]
6620
              || num_not_at_initial_offset
6621
              || GET_CODE (oldequiv) == SUBREG)
6622
            real_oldequiv = rl->in;
6623
          else
6624
            real_oldequiv = reg_equiv_mem[REGNO (tmp)];
6625
        }
6626
 
6627
      tmp = old;
6628
      if (GET_CODE (tmp) == SUBREG)
6629
        tmp = SUBREG_REG (tmp);
6630
      if (REG_P (tmp)
6631
          && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
6632
          && (reg_equiv_memory_loc[REGNO (tmp)] != 0
6633
              || reg_equiv_constant[REGNO (tmp)] != 0))
6634
        {
6635
          if (! reg_equiv_mem[REGNO (tmp)]
6636
              || num_not_at_initial_offset
6637
              || GET_CODE (old) == SUBREG)
6638
            real_old = rl->in;
6639
          else
6640
            real_old = reg_equiv_mem[REGNO (tmp)];
6641
        }
6642
 
6643
      second_reload_reg = rld[secondary_reload].reg_rtx;
6644
      if (rld[secondary_reload].secondary_in_reload >= 0)
6645
        {
6646
          int tertiary_reload = rld[secondary_reload].secondary_in_reload;
6647
 
6648
          third_reload_reg = rld[tertiary_reload].reg_rtx;
6649
          tertiary_icode = rld[secondary_reload].secondary_in_icode;
6650
          /* We'd have to add more code for quartary reloads.  */
6651
          gcc_assert (rld[tertiary_reload].secondary_in_reload < 0);
6652
        }
6653
      icode = rl->secondary_in_icode;
6654
 
6655
      if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
6656
          || (rl->in != 0 && rl->out != 0))
6657
        {
6658
          secondary_reload_info sri, sri2;
6659
          enum reg_class new_class, new_t_class;
6660
 
6661
          sri.icode = CODE_FOR_nothing;
6662
          sri.prev_sri = NULL;
6663
          new_class = targetm.secondary_reload (1, real_oldequiv, rl->class,
6664
                                                mode, &sri);
6665
 
6666
          if (new_class == NO_REGS && sri.icode == CODE_FOR_nothing)
6667
            second_reload_reg = 0;
6668
          else if (new_class == NO_REGS)
6669
            {
6670
              if (reload_adjust_reg_for_icode (&second_reload_reg,
6671
                                               third_reload_reg, sri.icode))
6672
                icode = sri.icode, third_reload_reg = 0;
6673
              else
6674
                oldequiv = old, real_oldequiv = real_old;
6675
            }
6676
          else if (sri.icode != CODE_FOR_nothing)
6677
            /* We currently lack a way to express this in reloads.  */
6678
            gcc_unreachable ();
6679
          else
6680
            {
6681
              sri2.icode = CODE_FOR_nothing;
6682
              sri2.prev_sri = &sri;
6683
              new_t_class = targetm.secondary_reload (1, real_oldequiv,
6684
                                                      new_class, mode, &sri);
6685
              if (new_t_class == NO_REGS && sri2.icode == CODE_FOR_nothing)
6686
                {
6687
                  if (reload_adjust_reg_for_temp (&second_reload_reg,
6688
                                                  third_reload_reg,
6689
                                                  new_class, mode))
6690
                    third_reload_reg = 0, tertiary_icode = sri2.icode;
6691
                  else
6692
                    oldequiv = old, real_oldequiv = real_old;
6693
                }
6694
              else if (new_t_class == NO_REGS && sri2.icode != CODE_FOR_nothing)
6695
                {
6696
                  rtx intermediate = second_reload_reg;
6697
 
6698
                  if (reload_adjust_reg_for_temp (&intermediate, NULL,
6699
                                                  new_class, mode)
6700
                      && reload_adjust_reg_for_icode (&third_reload_reg, NULL,
6701
                                                      sri2.icode))
6702
                    {
6703
                      second_reload_reg = intermediate;
6704
                      tertiary_icode = sri2.icode;
6705
                    }
6706
                  else
6707
                    oldequiv = old, real_oldequiv = real_old;
6708
                }
6709
              else if (new_t_class != NO_REGS && sri2.icode == CODE_FOR_nothing)
6710
                {
6711
                  rtx intermediate = second_reload_reg;
6712
 
6713
                  if (reload_adjust_reg_for_temp (&intermediate, NULL,
6714
                                                  new_class, mode)
6715
                      && reload_adjust_reg_for_temp (&third_reload_reg, NULL,
6716
                                                      new_t_class, mode))
6717
                    {
6718
                      second_reload_reg = intermediate;
6719
                      tertiary_icode = sri2.icode;
6720
                    }
6721
                  else
6722
                    oldequiv = old, real_oldequiv = real_old;
6723
                }
6724
              else
6725
                /* This could be handled more intelligently too.  */
6726
                oldequiv = old, real_oldequiv = real_old;
6727
            }
6728
        }
6729
 
6730
      /* If we still need a secondary reload register, check
6731
         to see if it is being used as a scratch or intermediate
6732
         register and generate code appropriately.  If we need
6733
         a scratch register, use REAL_OLDEQUIV since the form of
6734
         the insn may depend on the actual address if it is
6735
         a MEM.  */
6736
 
6737
      if (second_reload_reg)
6738
        {
6739
          if (icode != CODE_FOR_nothing)
6740
            {
6741
              /* We'd have to add extra code to handle this case.  */
6742
              gcc_assert (!third_reload_reg);
6743
 
6744
              emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6745
                                          second_reload_reg));
6746
              special = 1;
6747
            }
6748
          else
6749
            {
6750
              /* See if we need a scratch register to load the
6751
                 intermediate register (a tertiary reload).  */
6752
              if (tertiary_icode != CODE_FOR_nothing)
6753
                {
6754
                  emit_insn ((GEN_FCN (tertiary_icode)
6755
                              (second_reload_reg, real_oldequiv,
6756
                               third_reload_reg)));
6757
                }
6758
              else if (third_reload_reg)
6759
                {
6760
                  gen_reload (third_reload_reg, real_oldequiv,
6761
                              rl->opnum,
6762
                              rl->when_needed);
6763
                  gen_reload (second_reload_reg, third_reload_reg,
6764
                              rl->opnum,
6765
                              rl->when_needed);
6766
                }
6767
              else
6768
                gen_reload (second_reload_reg, real_oldequiv,
6769
                            rl->opnum,
6770
                            rl->when_needed);
6771
 
6772
              oldequiv = second_reload_reg;
6773
            }
6774
        }
6775
    }
6776
 
6777
  if (! special && ! rtx_equal_p (reloadreg, oldequiv))
6778
    {
6779
      rtx real_oldequiv = oldequiv;
6780
 
6781
      if ((REG_P (oldequiv)
6782
           && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
6783
           && (reg_equiv_memory_loc[REGNO (oldequiv)] != 0
6784
               || reg_equiv_constant[REGNO (oldequiv)] != 0))
6785
          || (GET_CODE (oldequiv) == SUBREG
6786
              && REG_P (SUBREG_REG (oldequiv))
6787
              && (REGNO (SUBREG_REG (oldequiv))
6788
                  >= FIRST_PSEUDO_REGISTER)
6789
              && ((reg_equiv_memory_loc
6790
                   [REGNO (SUBREG_REG (oldequiv))] != 0)
6791
                  || (reg_equiv_constant
6792
                      [REGNO (SUBREG_REG (oldequiv))] != 0)))
6793
          || (CONSTANT_P (oldequiv)
6794
              && (PREFERRED_RELOAD_CLASS (oldequiv,
6795
                                          REGNO_REG_CLASS (REGNO (reloadreg)))
6796
                  == NO_REGS)))
6797
        real_oldequiv = rl->in;
6798
      gen_reload (reloadreg, real_oldequiv, rl->opnum,
6799
                  rl->when_needed);
6800
    }
6801
 
6802
  if (flag_non_call_exceptions)
6803
    copy_eh_notes (insn, get_insns ());
6804
 
6805
  /* End this sequence.  */
6806
  *where = get_insns ();
6807
  end_sequence ();
6808
 
6809
  /* Update reload_override_in so that delete_address_reloads_1
6810
     can see the actual register usage.  */
6811
  if (oldequiv_reg)
6812
    reload_override_in[j] = oldequiv;
6813
}
6814
 
6815
/* Generate insns to for the output reload RL, which is for the insn described
6816
   by CHAIN and has the number J.  */
6817
static void
6818
emit_output_reload_insns (struct insn_chain *chain, struct reload *rl,
6819
                          int j)
6820
{
6821
  rtx reloadreg = rl->reg_rtx;
6822
  rtx insn = chain->insn;
6823
  int special = 0;
6824
  rtx old = rl->out;
6825
  enum machine_mode mode = GET_MODE (old);
6826
  rtx p;
6827
 
6828
  if (rl->when_needed == RELOAD_OTHER)
6829
    start_sequence ();
6830
  else
6831
    push_to_sequence (output_reload_insns[rl->opnum]);
6832
 
6833
  /* Determine the mode to reload in.
6834
     See comments above (for input reloading).  */
6835
 
6836
  if (mode == VOIDmode)
6837
    {
6838
      /* VOIDmode should never happen for an output.  */
6839
      if (asm_noperands (PATTERN (insn)) < 0)
6840
        /* It's the compiler's fault.  */
6841
        fatal_insn ("VOIDmode on an output", insn);
6842
      error_for_asm (insn, "output operand is constant in %<asm%>");
6843
      /* Prevent crash--use something we know is valid.  */
6844
      mode = word_mode;
6845
      old = gen_rtx_REG (mode, REGNO (reloadreg));
6846
    }
6847
 
6848
  if (GET_MODE (reloadreg) != mode)
6849
    reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
6850
 
6851
  /* If we need two reload regs, set RELOADREG to the intermediate
6852
     one, since it will be stored into OLD.  We might need a secondary
6853
     register only for an input reload, so check again here.  */
6854
 
6855
  if (rl->secondary_out_reload >= 0)
6856
    {
6857
      rtx real_old = old;
6858
      int secondary_reload = rl->secondary_out_reload;
6859
      int tertiary_reload = rld[secondary_reload].secondary_out_reload;
6860
 
6861
      if (REG_P (old) && REGNO (old) >= FIRST_PSEUDO_REGISTER
6862
          && reg_equiv_mem[REGNO (old)] != 0)
6863
        real_old = reg_equiv_mem[REGNO (old)];
6864
 
6865
      if (secondary_reload_class (0, rl->class, mode, real_old) != NO_REGS)
6866
        {
6867
          rtx second_reloadreg = reloadreg;
6868
          reloadreg = rld[secondary_reload].reg_rtx;
6869
 
6870
          /* See if RELOADREG is to be used as a scratch register
6871
             or as an intermediate register.  */
6872
          if (rl->secondary_out_icode != CODE_FOR_nothing)
6873
            {
6874
              /* We'd have to add extra code to handle this case.  */
6875
              gcc_assert (tertiary_reload < 0);
6876
 
6877
              emit_insn ((GEN_FCN (rl->secondary_out_icode)
6878
                          (real_old, second_reloadreg, reloadreg)));
6879
              special = 1;
6880
            }
6881
          else
6882
            {
6883
              /* See if we need both a scratch and intermediate reload
6884
                 register.  */
6885
 
6886
              enum insn_code tertiary_icode
6887
                = rld[secondary_reload].secondary_out_icode;
6888
 
6889
              /* We'd have to add more code for quartary reloads.  */
6890
              gcc_assert (tertiary_reload < 0
6891
                          || rld[tertiary_reload].secondary_out_reload < 0);
6892
 
6893
              if (GET_MODE (reloadreg) != mode)
6894
                reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
6895
 
6896
              if (tertiary_icode != CODE_FOR_nothing)
6897
                {
6898
                  rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
6899
                  rtx tem;
6900
 
6901
                  /* Copy primary reload reg to secondary reload reg.
6902
                     (Note that these have been swapped above, then
6903
                     secondary reload reg to OLD using our insn.)  */
6904
 
6905
                  /* If REAL_OLD is a paradoxical SUBREG, remove it
6906
                     and try to put the opposite SUBREG on
6907
                     RELOADREG.  */
6908
                  if (GET_CODE (real_old) == SUBREG
6909
                      && (GET_MODE_SIZE (GET_MODE (real_old))
6910
                          > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
6911
                      && 0 != (tem = gen_lowpart_common
6912
                               (GET_MODE (SUBREG_REG (real_old)),
6913
                                reloadreg)))
6914
                    real_old = SUBREG_REG (real_old), reloadreg = tem;
6915
 
6916
                  gen_reload (reloadreg, second_reloadreg,
6917
                              rl->opnum, rl->when_needed);
6918
                  emit_insn ((GEN_FCN (tertiary_icode)
6919
                              (real_old, reloadreg, third_reloadreg)));
6920
                  special = 1;
6921
                }
6922
 
6923
              else
6924
                {
6925
                  /* Copy between the reload regs here and then to
6926
                     OUT later.  */
6927
 
6928
                  gen_reload (reloadreg, second_reloadreg,
6929
                              rl->opnum, rl->when_needed);
6930
                  if (tertiary_reload >= 0)
6931
                    {
6932
                      rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
6933
 
6934
                      gen_reload (third_reloadreg, reloadreg,
6935
                                  rl->opnum, rl->when_needed);
6936
                      reloadreg = third_reloadreg;
6937
                    }
6938
                }
6939
            }
6940
        }
6941
    }
6942
 
6943
  /* Output the last reload insn.  */
6944
  if (! special)
6945
    {
6946
      rtx set;
6947
 
6948
      /* Don't output the last reload if OLD is not the dest of
6949
         INSN and is in the src and is clobbered by INSN.  */
6950
      if (! flag_expensive_optimizations
6951
          || !REG_P (old)
6952
          || !(set = single_set (insn))
6953
          || rtx_equal_p (old, SET_DEST (set))
6954
          || !reg_mentioned_p (old, SET_SRC (set))
6955
          || !((REGNO (old) < FIRST_PSEUDO_REGISTER)
6956
               && regno_clobbered_p (REGNO (old), insn, rl->mode, 0)))
6957
        gen_reload (old, reloadreg, rl->opnum,
6958
                    rl->when_needed);
6959
    }
6960
 
6961
  /* Look at all insns we emitted, just to be safe.  */
6962
  for (p = get_insns (); p; p = NEXT_INSN (p))
6963
    if (INSN_P (p))
6964
      {
6965
        rtx pat = PATTERN (p);
6966
 
6967
        /* If this output reload doesn't come from a spill reg,
6968
           clear any memory of reloaded copies of the pseudo reg.
6969
           If this output reload comes from a spill reg,
6970
           reg_has_output_reload will make this do nothing.  */
6971
        note_stores (pat, forget_old_reloads_1, NULL);
6972
 
6973
        if (reg_mentioned_p (rl->reg_rtx, pat))
6974
          {
6975
            rtx set = single_set (insn);
6976
            if (reload_spill_index[j] < 0
6977
                && set
6978
                && SET_SRC (set) == rl->reg_rtx)
6979
              {
6980
                int src = REGNO (SET_SRC (set));
6981
 
6982
                reload_spill_index[j] = src;
6983
                SET_HARD_REG_BIT (reg_is_output_reload, src);
6984
                if (find_regno_note (insn, REG_DEAD, src))
6985
                  SET_HARD_REG_BIT (reg_reloaded_died, src);
6986
              }
6987
            if (REGNO (rl->reg_rtx) < FIRST_PSEUDO_REGISTER)
6988
              {
6989
                int s = rl->secondary_out_reload;
6990
                set = single_set (p);
6991
                /* If this reload copies only to the secondary reload
6992
                   register, the secondary reload does the actual
6993
                   store.  */
6994
                if (s >= 0 && set == NULL_RTX)
6995
                  /* We can't tell what function the secondary reload
6996
                     has and where the actual store to the pseudo is
6997
                     made; leave new_spill_reg_store alone.  */
6998
                  ;
6999
                else if (s >= 0
7000
                         && SET_SRC (set) == rl->reg_rtx
7001
                         && SET_DEST (set) == rld[s].reg_rtx)
7002
                  {
7003
                    /* Usually the next instruction will be the
7004
                       secondary reload insn;  if we can confirm
7005
                       that it is, setting new_spill_reg_store to
7006
                       that insn will allow an extra optimization.  */
7007
                    rtx s_reg = rld[s].reg_rtx;
7008
                    rtx next = NEXT_INSN (p);
7009
                    rld[s].out = rl->out;
7010
                    rld[s].out_reg = rl->out_reg;
7011
                    set = single_set (next);
7012
                    if (set && SET_SRC (set) == s_reg
7013
                        && ! new_spill_reg_store[REGNO (s_reg)])
7014
                      {
7015
                        SET_HARD_REG_BIT (reg_is_output_reload,
7016
                                          REGNO (s_reg));
7017
                        new_spill_reg_store[REGNO (s_reg)] = next;
7018
                      }
7019
                  }
7020
                else
7021
                  new_spill_reg_store[REGNO (rl->reg_rtx)] = p;
7022
              }
7023
          }
7024
      }
7025
 
7026
  if (rl->when_needed == RELOAD_OTHER)
7027
    {
7028
      emit_insn (other_output_reload_insns[rl->opnum]);
7029
      other_output_reload_insns[rl->opnum] = get_insns ();
7030
    }
7031
  else
7032
    output_reload_insns[rl->opnum] = get_insns ();
7033
 
7034
  if (flag_non_call_exceptions)
7035
    copy_eh_notes (insn, get_insns ());
7036
 
7037
  end_sequence ();
7038
}
7039
 
7040
/* Do input reloading for reload RL, which is for the insn described by CHAIN
7041
   and has the number J.  */
7042
static void
7043
do_input_reload (struct insn_chain *chain, struct reload *rl, int j)
7044
{
7045
  rtx insn = chain->insn;
7046
  rtx old = (rl->in && MEM_P (rl->in)
7047
             ? rl->in_reg : rl->in);
7048
 
7049
  if (old != 0
7050
      /* AUTO_INC reloads need to be handled even if inherited.  We got an
7051
         AUTO_INC reload if reload_out is set but reload_out_reg isn't.  */
7052
      && (! reload_inherited[j] || (rl->out && ! rl->out_reg))
7053
      && ! rtx_equal_p (rl->reg_rtx, old)
7054
      && rl->reg_rtx != 0)
7055
    emit_input_reload_insns (chain, rld + j, old, j);
7056
 
7057
  /* When inheriting a wider reload, we have a MEM in rl->in,
7058
     e.g. inheriting a SImode output reload for
7059
     (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10)))  */
7060
  if (optimize && reload_inherited[j] && rl->in
7061
      && MEM_P (rl->in)
7062
      && MEM_P (rl->in_reg)
7063
      && reload_spill_index[j] >= 0
7064
      && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
7065
    rl->in = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
7066
 
7067
  /* If we are reloading a register that was recently stored in with an
7068
     output-reload, see if we can prove there was
7069
     actually no need to store the old value in it.  */
7070
 
7071
  if (optimize
7072
      /* Only attempt this for input reloads; for RELOAD_OTHER we miss
7073
         that there may be multiple uses of the previous output reload.
7074
         Restricting to RELOAD_FOR_INPUT is mostly paranoia.  */
7075
      && rl->when_needed == RELOAD_FOR_INPUT
7076
      && (reload_inherited[j] || reload_override_in[j])
7077
      && rl->reg_rtx
7078
      && REG_P (rl->reg_rtx)
7079
      && spill_reg_store[REGNO (rl->reg_rtx)] != 0
7080
#if 0
7081
      /* There doesn't seem to be any reason to restrict this to pseudos
7082
         and doing so loses in the case where we are copying from a
7083
         register of the wrong class.  */
7084
      && (REGNO (spill_reg_stored_to[REGNO (rl->reg_rtx)])
7085
          >= FIRST_PSEUDO_REGISTER)
7086
#endif
7087
      /* The insn might have already some references to stackslots
7088
         replaced by MEMs, while reload_out_reg still names the
7089
         original pseudo.  */
7090
      && (dead_or_set_p (insn,
7091
                         spill_reg_stored_to[REGNO (rl->reg_rtx)])
7092
          || rtx_equal_p (spill_reg_stored_to[REGNO (rl->reg_rtx)],
7093
                          rl->out_reg)))
7094
    delete_output_reload (insn, j, REGNO (rl->reg_rtx));
7095
}
7096
 
7097
/* Do output reloading for reload RL, which is for the insn described by
7098
   CHAIN and has the number J.
7099
   ??? At some point we need to support handling output reloads of
7100
   JUMP_INSNs or insns that set cc0.  */
7101
static void
7102
do_output_reload (struct insn_chain *chain, struct reload *rl, int j)
7103
{
7104
  rtx note, old;
7105
  rtx insn = chain->insn;
7106
  /* If this is an output reload that stores something that is
7107
     not loaded in this same reload, see if we can eliminate a previous
7108
     store.  */
7109
  rtx pseudo = rl->out_reg;
7110
 
7111
  if (pseudo
7112
      && optimize
7113
      && REG_P (pseudo)
7114
      && ! rtx_equal_p (rl->in_reg, pseudo)
7115
      && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
7116
      && reg_last_reload_reg[REGNO (pseudo)])
7117
    {
7118
      int pseudo_no = REGNO (pseudo);
7119
      int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
7120
 
7121
      /* We don't need to test full validity of last_regno for
7122
         inherit here; we only want to know if the store actually
7123
         matches the pseudo.  */
7124
      if (TEST_HARD_REG_BIT (reg_reloaded_valid, last_regno)
7125
          && reg_reloaded_contents[last_regno] == pseudo_no
7126
          && spill_reg_store[last_regno]
7127
          && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
7128
        delete_output_reload (insn, j, last_regno);
7129
    }
7130
 
7131
  old = rl->out_reg;
7132
  if (old == 0
7133
      || rl->reg_rtx == old
7134
      || rl->reg_rtx == 0)
7135
    return;
7136
 
7137
  /* An output operand that dies right away does need a reload,
7138
     but need not be copied from it.  Show the new location in the
7139
     REG_UNUSED note.  */
7140
  if ((REG_P (old) || GET_CODE (old) == SCRATCH)
7141
      && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
7142
    {
7143
      XEXP (note, 0) = rl->reg_rtx;
7144
      return;
7145
    }
7146
  /* Likewise for a SUBREG of an operand that dies.  */
7147
  else if (GET_CODE (old) == SUBREG
7148
           && REG_P (SUBREG_REG (old))
7149
           && 0 != (note = find_reg_note (insn, REG_UNUSED,
7150
                                          SUBREG_REG (old))))
7151
    {
7152
      XEXP (note, 0) = gen_lowpart_common (GET_MODE (old),
7153
                                           rl->reg_rtx);
7154
      return;
7155
    }
7156
  else if (GET_CODE (old) == SCRATCH)
7157
    /* If we aren't optimizing, there won't be a REG_UNUSED note,
7158
       but we don't want to make an output reload.  */
7159
    return;
7160
 
7161
  /* If is a JUMP_INSN, we can't support output reloads yet.  */
7162
  gcc_assert (NONJUMP_INSN_P (insn));
7163
 
7164
  emit_output_reload_insns (chain, rld + j, j);
7165
}
7166
 
7167
/* Reload number R reloads from or to a group of hard registers starting at
7168
   register REGNO.  Return true if it can be treated for inheritance purposes
7169
   like a group of reloads, each one reloading a single hard register.
7170
   The caller has already checked that the spill register and REGNO use
7171
   the same number of registers to store the reload value.  */
7172
 
7173
static bool
7174
inherit_piecemeal_p (int r ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED)
7175
{
7176
#ifdef CANNOT_CHANGE_MODE_CLASS
7177
  return (!REG_CANNOT_CHANGE_MODE_P (reload_spill_index[r],
7178
                                     GET_MODE (rld[r].reg_rtx),
7179
                                     reg_raw_mode[reload_spill_index[r]])
7180
          && !REG_CANNOT_CHANGE_MODE_P (regno,
7181
                                        GET_MODE (rld[r].reg_rtx),
7182
                                        reg_raw_mode[regno]));
7183
#else
7184
  return true;
7185
#endif
7186
}
7187
 
7188
/* Output insns to reload values in and out of the chosen reload regs.  */
7189
 
7190
static void
7191
emit_reload_insns (struct insn_chain *chain)
7192
{
7193
  rtx insn = chain->insn;
7194
 
7195
  int j;
7196
 
7197
  CLEAR_HARD_REG_SET (reg_reloaded_died);
7198
 
7199
  for (j = 0; j < reload_n_operands; j++)
7200
    input_reload_insns[j] = input_address_reload_insns[j]
7201
      = inpaddr_address_reload_insns[j]
7202
      = output_reload_insns[j] = output_address_reload_insns[j]
7203
      = outaddr_address_reload_insns[j]
7204
      = other_output_reload_insns[j] = 0;
7205
  other_input_address_reload_insns = 0;
7206
  other_input_reload_insns = 0;
7207
  operand_reload_insns = 0;
7208
  other_operand_reload_insns = 0;
7209
 
7210
  /* Dump reloads into the dump file.  */
7211
  if (dump_file)
7212
    {
7213
      fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
7214
      debug_reload_to_stream (dump_file);
7215
    }
7216
 
7217
  /* Now output the instructions to copy the data into and out of the
7218
     reload registers.  Do these in the order that the reloads were reported,
7219
     since reloads of base and index registers precede reloads of operands
7220
     and the operands may need the base and index registers reloaded.  */
7221
 
7222
  for (j = 0; j < n_reloads; j++)
7223
    {
7224
      if (rld[j].reg_rtx
7225
          && REGNO (rld[j].reg_rtx) < FIRST_PSEUDO_REGISTER)
7226
        new_spill_reg_store[REGNO (rld[j].reg_rtx)] = 0;
7227
 
7228
      do_input_reload (chain, rld + j, j);
7229
      do_output_reload (chain, rld + j, j);
7230
    }
7231
 
7232
  /* Now write all the insns we made for reloads in the order expected by
7233
     the allocation functions.  Prior to the insn being reloaded, we write
7234
     the following reloads:
7235
 
7236
     RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
7237
 
7238
     RELOAD_OTHER reloads.
7239
 
7240
     For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
7241
     by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
7242
     RELOAD_FOR_INPUT reload for the operand.
7243
 
7244
     RELOAD_FOR_OPADDR_ADDRS reloads.
7245
 
7246
     RELOAD_FOR_OPERAND_ADDRESS reloads.
7247
 
7248
     After the insn being reloaded, we write the following:
7249
 
7250
     For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
7251
     by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
7252
     RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
7253
     reloads for the operand.  The RELOAD_OTHER output reloads are
7254
     output in descending order by reload number.  */
7255
 
7256
  emit_insn_before (other_input_address_reload_insns, insn);
7257
  emit_insn_before (other_input_reload_insns, insn);
7258
 
7259
  for (j = 0; j < reload_n_operands; j++)
7260
    {
7261
      emit_insn_before (inpaddr_address_reload_insns[j], insn);
7262
      emit_insn_before (input_address_reload_insns[j], insn);
7263
      emit_insn_before (input_reload_insns[j], insn);
7264
    }
7265
 
7266
  emit_insn_before (other_operand_reload_insns, insn);
7267
  emit_insn_before (operand_reload_insns, insn);
7268
 
7269
  for (j = 0; j < reload_n_operands; j++)
7270
    {
7271
      rtx x = emit_insn_after (outaddr_address_reload_insns[j], insn);
7272
      x = emit_insn_after (output_address_reload_insns[j], x);
7273
      x = emit_insn_after (output_reload_insns[j], x);
7274
      emit_insn_after (other_output_reload_insns[j], x);
7275
    }
7276
 
7277
  /* For all the spill regs newly reloaded in this instruction,
7278
     record what they were reloaded from, so subsequent instructions
7279
     can inherit the reloads.
7280
 
7281
     Update spill_reg_store for the reloads of this insn.
7282
     Copy the elements that were updated in the loop above.  */
7283
 
7284
  for (j = 0; j < n_reloads; j++)
7285
    {
7286
      int r = reload_order[j];
7287
      int i = reload_spill_index[r];
7288
 
7289
      /* If this is a non-inherited input reload from a pseudo, we must
7290
         clear any memory of a previous store to the same pseudo.  Only do
7291
         something if there will not be an output reload for the pseudo
7292
         being reloaded.  */
7293
      if (rld[r].in_reg != 0
7294
          && ! (reload_inherited[r] || reload_override_in[r]))
7295
        {
7296
          rtx reg = rld[r].in_reg;
7297
 
7298
          if (GET_CODE (reg) == SUBREG)
7299
            reg = SUBREG_REG (reg);
7300
 
7301
          if (REG_P (reg)
7302
              && REGNO (reg) >= FIRST_PSEUDO_REGISTER
7303
              && !REGNO_REG_SET_P (&reg_has_output_reload, REGNO (reg)))
7304
            {
7305
              int nregno = REGNO (reg);
7306
 
7307
              if (reg_last_reload_reg[nregno])
7308
                {
7309
                  int last_regno = REGNO (reg_last_reload_reg[nregno]);
7310
 
7311
                  if (reg_reloaded_contents[last_regno] == nregno)
7312
                    spill_reg_store[last_regno] = 0;
7313
                }
7314
            }
7315
        }
7316
 
7317
      /* I is nonneg if this reload used a register.
7318
         If rld[r].reg_rtx is 0, this is an optional reload
7319
         that we opted to ignore.  */
7320
 
7321
      if (i >= 0 && rld[r].reg_rtx != 0)
7322
        {
7323
          int nr = hard_regno_nregs[i][GET_MODE (rld[r].reg_rtx)];
7324
          int k;
7325
          int part_reaches_end = 0;
7326
          int all_reaches_end = 1;
7327
 
7328
          /* For a multi register reload, we need to check if all or part
7329
             of the value lives to the end.  */
7330
          for (k = 0; k < nr; k++)
7331
            {
7332
              if (reload_reg_reaches_end_p (i + k, rld[r].opnum,
7333
                                            rld[r].when_needed))
7334
                part_reaches_end = 1;
7335
              else
7336
                all_reaches_end = 0;
7337
            }
7338
 
7339
          /* Ignore reloads that don't reach the end of the insn in
7340
             entirety.  */
7341
          if (all_reaches_end)
7342
            {
7343
              /* First, clear out memory of what used to be in this spill reg.
7344
                 If consecutive registers are used, clear them all.  */
7345
 
7346
              for (k = 0; k < nr; k++)
7347
                {
7348
                CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7349
                  CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered, i + k);
7350
                }
7351
 
7352
              /* Maybe the spill reg contains a copy of reload_out.  */
7353
              if (rld[r].out != 0
7354
                  && (REG_P (rld[r].out)
7355
#ifdef AUTO_INC_DEC
7356
                      || ! rld[r].out_reg
7357
#endif
7358
                      || REG_P (rld[r].out_reg)))
7359
                {
7360
                  rtx out = (REG_P (rld[r].out)
7361
                             ? rld[r].out
7362
                             : rld[r].out_reg
7363
                             ? rld[r].out_reg
7364
/* AUTO_INC */               : XEXP (rld[r].in_reg, 0));
7365
                  int nregno = REGNO (out);
7366
                  int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7367
                             : hard_regno_nregs[nregno]
7368
                                               [GET_MODE (rld[r].reg_rtx)]);
7369
                  bool piecemeal;
7370
 
7371
                  spill_reg_store[i] = new_spill_reg_store[i];
7372
                  spill_reg_stored_to[i] = out;
7373
                  reg_last_reload_reg[nregno] = rld[r].reg_rtx;
7374
 
7375
                  piecemeal = (nregno < FIRST_PSEUDO_REGISTER
7376
                               && nr == nnr
7377
                               && inherit_piecemeal_p (r, nregno));
7378
 
7379
                  /* If NREGNO is a hard register, it may occupy more than
7380
                     one register.  If it does, say what is in the
7381
                     rest of the registers assuming that both registers
7382
                     agree on how many words the object takes.  If not,
7383
                     invalidate the subsequent registers.  */
7384
 
7385
                  if (nregno < FIRST_PSEUDO_REGISTER)
7386
                    for (k = 1; k < nnr; k++)
7387
                      reg_last_reload_reg[nregno + k]
7388
                        = (piecemeal
7389
                           ? regno_reg_rtx[REGNO (rld[r].reg_rtx) + k]
7390
                           : 0);
7391
 
7392
                  /* Now do the inverse operation.  */
7393
                  for (k = 0; k < nr; k++)
7394
                    {
7395
                      CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7396
                      reg_reloaded_contents[i + k]
7397
                        = (nregno >= FIRST_PSEUDO_REGISTER || !piecemeal
7398
                           ? nregno
7399
                           : nregno + k);
7400
                      reg_reloaded_insn[i + k] = insn;
7401
                      SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
7402
                      if (HARD_REGNO_CALL_PART_CLOBBERED (i + k, GET_MODE (out)))
7403
                        SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered, i + k);
7404
                    }
7405
                }
7406
 
7407
              /* Maybe the spill reg contains a copy of reload_in.  Only do
7408
                 something if there will not be an output reload for
7409
                 the register being reloaded.  */
7410
              else if (rld[r].out_reg == 0
7411
                       && rld[r].in != 0
7412
                       && ((REG_P (rld[r].in)
7413
                            && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER
7414
                            && !REGNO_REG_SET_P (&reg_has_output_reload,
7415
                                                 REGNO (rld[r].in)))
7416
                           || (REG_P (rld[r].in_reg)
7417
                               && !REGNO_REG_SET_P (&reg_has_output_reload,
7418
                                                    REGNO (rld[r].in_reg))))
7419
                       && ! reg_set_p (rld[r].reg_rtx, PATTERN (insn)))
7420
                {
7421
                  int nregno;
7422
                  int nnr;
7423
                  rtx in;
7424
                  bool piecemeal;
7425
 
7426
                  if (REG_P (rld[r].in)
7427
                      && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER)
7428
                    in = rld[r].in;
7429
                  else if (REG_P (rld[r].in_reg))
7430
                    in = rld[r].in_reg;
7431
                  else
7432
                    in = XEXP (rld[r].in_reg, 0);
7433
                  nregno = REGNO (in);
7434
 
7435
                  nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7436
                         : hard_regno_nregs[nregno]
7437
                                           [GET_MODE (rld[r].reg_rtx)]);
7438
 
7439
                  reg_last_reload_reg[nregno] = rld[r].reg_rtx;
7440
 
7441
                  piecemeal = (nregno < FIRST_PSEUDO_REGISTER
7442
                               && nr == nnr
7443
                               && inherit_piecemeal_p (r, nregno));
7444
 
7445
                  if (nregno < FIRST_PSEUDO_REGISTER)
7446
                    for (k = 1; k < nnr; k++)
7447
                      reg_last_reload_reg[nregno + k]
7448
                        = (piecemeal
7449
                           ? regno_reg_rtx[REGNO (rld[r].reg_rtx) + k]
7450
                           : 0);
7451
 
7452
                  /* Unless we inherited this reload, show we haven't
7453
                     recently done a store.
7454
                     Previous stores of inherited auto_inc expressions
7455
                     also have to be discarded.  */
7456
                  if (! reload_inherited[r]
7457
                      || (rld[r].out && ! rld[r].out_reg))
7458
                    spill_reg_store[i] = 0;
7459
 
7460
                  for (k = 0; k < nr; k++)
7461
                    {
7462
                      CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7463
                      reg_reloaded_contents[i + k]
7464
                        = (nregno >= FIRST_PSEUDO_REGISTER || !piecemeal
7465
                           ? nregno
7466
                           : nregno + k);
7467
                      reg_reloaded_insn[i + k] = insn;
7468
                      SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
7469
                      if (HARD_REGNO_CALL_PART_CLOBBERED (i + k, GET_MODE (in)))
7470
                        SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered, i + k);
7471
                    }
7472
                }
7473
            }
7474
 
7475
          /* However, if part of the reload reaches the end, then we must
7476
             invalidate the old info for the part that survives to the end.  */
7477
          else if (part_reaches_end)
7478
            {
7479
              for (k = 0; k < nr; k++)
7480
                if (reload_reg_reaches_end_p (i + k,
7481
                                              rld[r].opnum,
7482
                                              rld[r].when_needed))
7483
                  CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7484
            }
7485
        }
7486
 
7487
      /* The following if-statement was #if 0'd in 1.34 (or before...).
7488
         It's reenabled in 1.35 because supposedly nothing else
7489
         deals with this problem.  */
7490
 
7491
      /* If a register gets output-reloaded from a non-spill register,
7492
         that invalidates any previous reloaded copy of it.
7493
         But forget_old_reloads_1 won't get to see it, because
7494
         it thinks only about the original insn.  So invalidate it here.
7495
         Also do the same thing for RELOAD_OTHER constraints where the
7496
         output is discarded.  */
7497
      if (i < 0
7498
          && ((rld[r].out != 0
7499
               && (REG_P (rld[r].out)
7500
                   || (MEM_P (rld[r].out)
7501
                       && REG_P (rld[r].out_reg))))
7502
              || (rld[r].out == 0 && rld[r].out_reg
7503
                  && REG_P (rld[r].out_reg))))
7504
        {
7505
          rtx out = ((rld[r].out && REG_P (rld[r].out))
7506
                     ? rld[r].out : rld[r].out_reg);
7507
          int nregno = REGNO (out);
7508
          if (nregno >= FIRST_PSEUDO_REGISTER)
7509
            {
7510
              rtx src_reg, store_insn = NULL_RTX;
7511
 
7512
              reg_last_reload_reg[nregno] = 0;
7513
 
7514
              /* If we can find a hard register that is stored, record
7515
                 the storing insn so that we may delete this insn with
7516
                 delete_output_reload.  */
7517
              src_reg = rld[r].reg_rtx;
7518
 
7519
              /* If this is an optional reload, try to find the source reg
7520
                 from an input reload.  */
7521
              if (! src_reg)
7522
                {
7523
                  rtx set = single_set (insn);
7524
                  if (set && SET_DEST (set) == rld[r].out)
7525
                    {
7526
                      int k;
7527
 
7528
                      src_reg = SET_SRC (set);
7529
                      store_insn = insn;
7530
                      for (k = 0; k < n_reloads; k++)
7531
                        {
7532
                          if (rld[k].in == src_reg)
7533
                            {
7534
                              src_reg = rld[k].reg_rtx;
7535
                              break;
7536
                            }
7537
                        }
7538
                    }
7539
                }
7540
              else
7541
                store_insn = new_spill_reg_store[REGNO (src_reg)];
7542
              if (src_reg && REG_P (src_reg)
7543
                  && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
7544
                {
7545
                  int src_regno = REGNO (src_reg);
7546
                  int nr = hard_regno_nregs[src_regno][rld[r].mode];
7547
                  /* The place where to find a death note varies with
7548
                     PRESERVE_DEATH_INFO_REGNO_P .  The condition is not
7549
                     necessarily checked exactly in the code that moves
7550
                     notes, so just check both locations.  */
7551
                  rtx note = find_regno_note (insn, REG_DEAD, src_regno);
7552
                  if (! note && store_insn)
7553
                    note = find_regno_note (store_insn, REG_DEAD, src_regno);
7554
                  while (nr-- > 0)
7555
                    {
7556
                      spill_reg_store[src_regno + nr] = store_insn;
7557
                      spill_reg_stored_to[src_regno + nr] = out;
7558
                      reg_reloaded_contents[src_regno + nr] = nregno;
7559
                      reg_reloaded_insn[src_regno + nr] = store_insn;
7560
                      CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + nr);
7561
                      SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + nr);
7562
                      if (HARD_REGNO_CALL_PART_CLOBBERED (src_regno + nr,
7563
                                                          GET_MODE (src_reg)))
7564
                        SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
7565
                                          src_regno + nr);
7566
                      SET_HARD_REG_BIT (reg_is_output_reload, src_regno + nr);
7567
                      if (note)
7568
                        SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
7569
                      else
7570
                        CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
7571
                    }
7572
                  reg_last_reload_reg[nregno] = src_reg;
7573
                  /* We have to set reg_has_output_reload here, or else
7574
                     forget_old_reloads_1 will clear reg_last_reload_reg
7575
                     right away.  */
7576
                  SET_REGNO_REG_SET (&reg_has_output_reload,
7577
                                     nregno);
7578
                }
7579
            }
7580
          else
7581
            {
7582
              int num_regs = hard_regno_nregs[nregno][GET_MODE (out)];
7583
 
7584
              while (num_regs-- > 0)
7585
                reg_last_reload_reg[nregno + num_regs] = 0;
7586
            }
7587
        }
7588
    }
7589
  IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
7590
}
7591
 
7592
/* Go through the motions to emit INSN and test if it is strictly valid.
7593
   Return the emitted insn if valid, else return NULL.  */
7594
 
7595
static rtx
7596
emit_insn_if_valid_for_reload (rtx insn)
7597
{
7598
  rtx last = get_last_insn ();
7599
  int code;
7600
 
7601
  insn = emit_insn (insn);
7602
  code = recog_memoized (insn);
7603
 
7604
  if (code >= 0)
7605
    {
7606
      extract_insn (insn);
7607
      /* We want constrain operands to treat this insn strictly in its
7608
         validity determination, i.e., the way it would after reload has
7609
         completed.  */
7610
      if (constrain_operands (1))
7611
        return insn;
7612
    }
7613
 
7614
  delete_insns_since (last);
7615
  return NULL;
7616
}
7617
 
7618
/* Emit code to perform a reload from IN (which may be a reload register) to
7619
   OUT (which may also be a reload register).  IN or OUT is from operand
7620
   OPNUM with reload type TYPE.
7621
 
7622
   Returns first insn emitted.  */
7623
 
7624
static rtx
7625
gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
7626
{
7627
  rtx last = get_last_insn ();
7628
  rtx tem;
7629
 
7630
  /* If IN is a paradoxical SUBREG, remove it and try to put the
7631
     opposite SUBREG on OUT.  Likewise for a paradoxical SUBREG on OUT.  */
7632
  if (GET_CODE (in) == SUBREG
7633
      && (GET_MODE_SIZE (GET_MODE (in))
7634
          > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
7635
      && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
7636
    in = SUBREG_REG (in), out = tem;
7637
  else if (GET_CODE (out) == SUBREG
7638
           && (GET_MODE_SIZE (GET_MODE (out))
7639
               > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
7640
           && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
7641
    out = SUBREG_REG (out), in = tem;
7642
 
7643
  /* How to do this reload can get quite tricky.  Normally, we are being
7644
     asked to reload a simple operand, such as a MEM, a constant, or a pseudo
7645
     register that didn't get a hard register.  In that case we can just
7646
     call emit_move_insn.
7647
 
7648
     We can also be asked to reload a PLUS that adds a register or a MEM to
7649
     another register, constant or MEM.  This can occur during frame pointer
7650
     elimination and while reloading addresses.  This case is handled by
7651
     trying to emit a single insn to perform the add.  If it is not valid,
7652
     we use a two insn sequence.
7653
 
7654
     Or we can be asked to reload an unary operand that was a fragment of
7655
     an addressing mode, into a register.  If it isn't recognized as-is,
7656
     we try making the unop operand and the reload-register the same:
7657
     (set reg:X (unop:X expr:Y))
7658
     -> (set reg:Y expr:Y) (set reg:X (unop:X reg:Y)).
7659
 
7660
     Finally, we could be called to handle an 'o' constraint by putting
7661
     an address into a register.  In that case, we first try to do this
7662
     with a named pattern of "reload_load_address".  If no such pattern
7663
     exists, we just emit a SET insn and hope for the best (it will normally
7664
     be valid on machines that use 'o').
7665
 
7666
     This entire process is made complex because reload will never
7667
     process the insns we generate here and so we must ensure that
7668
     they will fit their constraints and also by the fact that parts of
7669
     IN might be being reloaded separately and replaced with spill registers.
7670
     Because of this, we are, in some sense, just guessing the right approach
7671
     here.  The one listed above seems to work.
7672
 
7673
     ??? At some point, this whole thing needs to be rethought.  */
7674
 
7675
  if (GET_CODE (in) == PLUS
7676
      && (REG_P (XEXP (in, 0))
7677
          || GET_CODE (XEXP (in, 0)) == SUBREG
7678
          || MEM_P (XEXP (in, 0)))
7679
      && (REG_P (XEXP (in, 1))
7680
          || GET_CODE (XEXP (in, 1)) == SUBREG
7681
          || CONSTANT_P (XEXP (in, 1))
7682
          || MEM_P (XEXP (in, 1))))
7683
    {
7684
      /* We need to compute the sum of a register or a MEM and another
7685
         register, constant, or MEM, and put it into the reload
7686
         register.  The best possible way of doing this is if the machine
7687
         has a three-operand ADD insn that accepts the required operands.
7688
 
7689
         The simplest approach is to try to generate such an insn and see if it
7690
         is recognized and matches its constraints.  If so, it can be used.
7691
 
7692
         It might be better not to actually emit the insn unless it is valid,
7693
         but we need to pass the insn as an operand to `recog' and
7694
         `extract_insn' and it is simpler to emit and then delete the insn if
7695
         not valid than to dummy things up.  */
7696
 
7697
      rtx op0, op1, tem, insn;
7698
      int code;
7699
 
7700
      op0 = find_replacement (&XEXP (in, 0));
7701
      op1 = find_replacement (&XEXP (in, 1));
7702
 
7703
      /* Since constraint checking is strict, commutativity won't be
7704
         checked, so we need to do that here to avoid spurious failure
7705
         if the add instruction is two-address and the second operand
7706
         of the add is the same as the reload reg, which is frequently
7707
         the case.  If the insn would be A = B + A, rearrange it so
7708
         it will be A = A + B as constrain_operands expects.  */
7709
 
7710
      if (REG_P (XEXP (in, 1))
7711
          && REGNO (out) == REGNO (XEXP (in, 1)))
7712
        tem = op0, op0 = op1, op1 = tem;
7713
 
7714
      if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
7715
        in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
7716
 
7717
      insn = emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode, out, in));
7718
      if (insn)
7719
        return insn;
7720
 
7721
      /* If that failed, we must use a conservative two-insn sequence.
7722
 
7723
         Use a move to copy one operand into the reload register.  Prefer
7724
         to reload a constant, MEM or pseudo since the move patterns can
7725
         handle an arbitrary operand.  If OP1 is not a constant, MEM or
7726
         pseudo and OP1 is not a valid operand for an add instruction, then
7727
         reload OP1.
7728
 
7729
         After reloading one of the operands into the reload register, add
7730
         the reload register to the output register.
7731
 
7732
         If there is another way to do this for a specific machine, a
7733
         DEFINE_PEEPHOLE should be specified that recognizes the sequence
7734
         we emit below.  */
7735
 
7736
      code = (int) add_optab->handlers[(int) GET_MODE (out)].insn_code;
7737
 
7738
      if (CONSTANT_P (op1) || MEM_P (op1) || GET_CODE (op1) == SUBREG
7739
          || (REG_P (op1)
7740
              && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
7741
          || (code != CODE_FOR_nothing
7742
              && ! ((*insn_data[code].operand[2].predicate)
7743
                    (op1, insn_data[code].operand[2].mode))))
7744
        tem = op0, op0 = op1, op1 = tem;
7745
 
7746
      gen_reload (out, op0, opnum, type);
7747
 
7748
      /* If OP0 and OP1 are the same, we can use OUT for OP1.
7749
         This fixes a problem on the 32K where the stack pointer cannot
7750
         be used as an operand of an add insn.  */
7751
 
7752
      if (rtx_equal_p (op0, op1))
7753
        op1 = out;
7754
 
7755
      insn = emit_insn_if_valid_for_reload (gen_add2_insn (out, op1));
7756
      if (insn)
7757
        {
7758
          /* Add a REG_EQUIV note so that find_equiv_reg can find it.  */
7759
          REG_NOTES (insn)
7760
            = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
7761
          return insn;
7762
        }
7763
 
7764
      /* If that failed, copy the address register to the reload register.
7765
         Then add the constant to the reload register.  */
7766
 
7767
      gen_reload (out, op1, opnum, type);
7768
      insn = emit_insn (gen_add2_insn (out, op0));
7769
      REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
7770
    }
7771
 
7772
#ifdef SECONDARY_MEMORY_NEEDED
7773
  /* If we need a memory location to do the move, do it that way.  */
7774
  else if ((REG_P (in) || GET_CODE (in) == SUBREG)
7775
           && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER
7776
           && (REG_P (out) || GET_CODE (out) == SUBREG)
7777
           && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
7778
           && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in)),
7779
                                       REGNO_REG_CLASS (reg_or_subregno (out)),
7780
                                       GET_MODE (out)))
7781
    {
7782
      /* Get the memory to use and rewrite both registers to its mode.  */
7783
      rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
7784
 
7785
      if (GET_MODE (loc) != GET_MODE (out))
7786
        out = gen_rtx_REG (GET_MODE (loc), REGNO (out));
7787
 
7788
      if (GET_MODE (loc) != GET_MODE (in))
7789
        in = gen_rtx_REG (GET_MODE (loc), REGNO (in));
7790
 
7791
      gen_reload (loc, in, opnum, type);
7792
      gen_reload (out, loc, opnum, type);
7793
    }
7794
#endif
7795
  else if (REG_P (out) && UNARY_P (in))
7796
    {
7797
      rtx insn;
7798
      rtx op1;
7799
      rtx out_moded;
7800
      rtx set;
7801
 
7802
      op1 = find_replacement (&XEXP (in, 0));
7803
      if (op1 != XEXP (in, 0))
7804
        in = gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in), op1);
7805
 
7806
      /* First, try a plain SET.  */
7807
      set = emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode, out, in));
7808
      if (set)
7809
        return set;
7810
 
7811
      /* If that failed, move the inner operand to the reload
7812
         register, and try the same unop with the inner expression
7813
         replaced with the reload register.  */
7814
 
7815
      if (GET_MODE (op1) != GET_MODE (out))
7816
        out_moded = gen_rtx_REG (GET_MODE (op1), REGNO (out));
7817
      else
7818
        out_moded = out;
7819
 
7820
      gen_reload (out_moded, op1, opnum, type);
7821
 
7822
      insn
7823
        = gen_rtx_SET (VOIDmode, out,
7824
                       gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in),
7825
                                      out_moded));
7826
      insn = emit_insn_if_valid_for_reload (insn);
7827
      if (insn)
7828
        {
7829
          REG_NOTES (insn)
7830
            = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
7831
          return insn;
7832
        }
7833
 
7834
      fatal_insn ("Failure trying to reload:", set);
7835
    }
7836
  /* If IN is a simple operand, use gen_move_insn.  */
7837
  else if (OBJECT_P (in) || GET_CODE (in) == SUBREG)
7838
    {
7839
      tem = emit_insn (gen_move_insn (out, in));
7840
      /* IN may contain a LABEL_REF, if so add a REG_LABEL note.  */
7841
      mark_jump_label (in, tem, 0);
7842
    }
7843
 
7844
#ifdef HAVE_reload_load_address
7845
  else if (HAVE_reload_load_address)
7846
    emit_insn (gen_reload_load_address (out, in));
7847
#endif
7848
 
7849
  /* Otherwise, just write (set OUT IN) and hope for the best.  */
7850
  else
7851
    emit_insn (gen_rtx_SET (VOIDmode, out, in));
7852
 
7853
  /* Return the first insn emitted.
7854
     We can not just return get_last_insn, because there may have
7855
     been multiple instructions emitted.  Also note that gen_move_insn may
7856
     emit more than one insn itself, so we can not assume that there is one
7857
     insn emitted per emit_insn_before call.  */
7858
 
7859
  return last ? NEXT_INSN (last) : get_insns ();
7860
}
7861
 
7862
/* Delete a previously made output-reload whose result we now believe
7863
   is not needed.  First we double-check.
7864
 
7865
   INSN is the insn now being processed.
7866
   LAST_RELOAD_REG is the hard register number for which we want to delete
7867
   the last output reload.
7868
   J is the reload-number that originally used REG.  The caller has made
7869
   certain that reload J doesn't use REG any longer for input.  */
7870
 
7871
static void
7872
delete_output_reload (rtx insn, int j, int last_reload_reg)
7873
{
7874
  rtx output_reload_insn = spill_reg_store[last_reload_reg];
7875
  rtx reg = spill_reg_stored_to[last_reload_reg];
7876
  int k;
7877
  int n_occurrences;
7878
  int n_inherited = 0;
7879
  rtx i1;
7880
  rtx substed;
7881
 
7882
  /* It is possible that this reload has been only used to set another reload
7883
     we eliminated earlier and thus deleted this instruction too.  */
7884
  if (INSN_DELETED_P (output_reload_insn))
7885
    return;
7886
 
7887
  /* Get the raw pseudo-register referred to.  */
7888
 
7889
  while (GET_CODE (reg) == SUBREG)
7890
    reg = SUBREG_REG (reg);
7891
  substed = reg_equiv_memory_loc[REGNO (reg)];
7892
 
7893
  /* This is unsafe if the operand occurs more often in the current
7894
     insn than it is inherited.  */
7895
  for (k = n_reloads - 1; k >= 0; k--)
7896
    {
7897
      rtx reg2 = rld[k].in;
7898
      if (! reg2)
7899
        continue;
7900
      if (MEM_P (reg2) || reload_override_in[k])
7901
        reg2 = rld[k].in_reg;
7902
#ifdef AUTO_INC_DEC
7903
      if (rld[k].out && ! rld[k].out_reg)
7904
        reg2 = XEXP (rld[k].in_reg, 0);
7905
#endif
7906
      while (GET_CODE (reg2) == SUBREG)
7907
        reg2 = SUBREG_REG (reg2);
7908
      if (rtx_equal_p (reg2, reg))
7909
        {
7910
          if (reload_inherited[k] || reload_override_in[k] || k == j)
7911
            {
7912
              n_inherited++;
7913
              reg2 = rld[k].out_reg;
7914
              if (! reg2)
7915
                continue;
7916
              while (GET_CODE (reg2) == SUBREG)
7917
                reg2 = XEXP (reg2, 0);
7918
              if (rtx_equal_p (reg2, reg))
7919
                n_inherited++;
7920
            }
7921
          else
7922
            return;
7923
        }
7924
    }
7925
  n_occurrences = count_occurrences (PATTERN (insn), reg, 0);
7926
  if (substed)
7927
    n_occurrences += count_occurrences (PATTERN (insn),
7928
                                        eliminate_regs (substed, 0,
7929
                                                        NULL_RTX), 0);
7930
  for (i1 = reg_equiv_alt_mem_list [REGNO (reg)]; i1; i1 = XEXP (i1, 1))
7931
    {
7932
      gcc_assert (!rtx_equal_p (XEXP (i1, 0), substed));
7933
      n_occurrences += count_occurrences (PATTERN (insn), XEXP (i1, 0), 0);
7934
    }
7935
  if (n_occurrences > n_inherited)
7936
    return;
7937
 
7938
  /* If the pseudo-reg we are reloading is no longer referenced
7939
     anywhere between the store into it and here,
7940
     and we're within the same basic block, then the value can only
7941
     pass through the reload reg and end up here.
7942
     Otherwise, give up--return.  */
7943
  for (i1 = NEXT_INSN (output_reload_insn);
7944
       i1 != insn; i1 = NEXT_INSN (i1))
7945
    {
7946
      if (NOTE_INSN_BASIC_BLOCK_P (i1))
7947
        return;
7948
      if ((NONJUMP_INSN_P (i1) || CALL_P (i1))
7949
          && reg_mentioned_p (reg, PATTERN (i1)))
7950
        {
7951
          /* If this is USE in front of INSN, we only have to check that
7952
             there are no more references than accounted for by inheritance.  */
7953
          while (NONJUMP_INSN_P (i1) && GET_CODE (PATTERN (i1)) == USE)
7954
            {
7955
              n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
7956
              i1 = NEXT_INSN (i1);
7957
            }
7958
          if (n_occurrences <= n_inherited && i1 == insn)
7959
            break;
7960
          return;
7961
        }
7962
    }
7963
 
7964
  /* We will be deleting the insn.  Remove the spill reg information.  */
7965
  for (k = hard_regno_nregs[last_reload_reg][GET_MODE (reg)]; k-- > 0; )
7966
    {
7967
      spill_reg_store[last_reload_reg + k] = 0;
7968
      spill_reg_stored_to[last_reload_reg + k] = 0;
7969
    }
7970
 
7971
  /* The caller has already checked that REG dies or is set in INSN.
7972
     It has also checked that we are optimizing, and thus some
7973
     inaccuracies in the debugging information are acceptable.
7974
     So we could just delete output_reload_insn.  But in some cases
7975
     we can improve the debugging information without sacrificing
7976
     optimization - maybe even improving the code: See if the pseudo
7977
     reg has been completely replaced with reload regs.  If so, delete
7978
     the store insn and forget we had a stack slot for the pseudo.  */
7979
  if (rld[j].out != rld[j].in
7980
      && REG_N_DEATHS (REGNO (reg)) == 1
7981
      && REG_N_SETS (REGNO (reg)) == 1
7982
      && REG_BASIC_BLOCK (REGNO (reg)) >= 0
7983
      && find_regno_note (insn, REG_DEAD, REGNO (reg)))
7984
    {
7985
      rtx i2;
7986
 
7987
      /* We know that it was used only between here and the beginning of
7988
         the current basic block.  (We also know that the last use before
7989
         INSN was the output reload we are thinking of deleting, but never
7990
         mind that.)  Search that range; see if any ref remains.  */
7991
      for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7992
        {
7993
          rtx set = single_set (i2);
7994
 
7995
          /* Uses which just store in the pseudo don't count,
7996
             since if they are the only uses, they are dead.  */
7997
          if (set != 0 && SET_DEST (set) == reg)
7998
            continue;
7999
          if (LABEL_P (i2)
8000
              || JUMP_P (i2))
8001
            break;
8002
          if ((NONJUMP_INSN_P (i2) || CALL_P (i2))
8003
              && reg_mentioned_p (reg, PATTERN (i2)))
8004
            {
8005
              /* Some other ref remains; just delete the output reload we
8006
                 know to be dead.  */
8007
              delete_address_reloads (output_reload_insn, insn);
8008
              delete_insn (output_reload_insn);
8009
              return;
8010
            }
8011
        }
8012
 
8013
      /* Delete the now-dead stores into this pseudo.  Note that this
8014
         loop also takes care of deleting output_reload_insn.  */
8015
      for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8016
        {
8017
          rtx set = single_set (i2);
8018
 
8019
          if (set != 0 && SET_DEST (set) == reg)
8020
            {
8021
              delete_address_reloads (i2, insn);
8022
              delete_insn (i2);
8023
            }
8024
          if (LABEL_P (i2)
8025
              || JUMP_P (i2))
8026
            break;
8027
        }
8028
 
8029
      /* For the debugging info, say the pseudo lives in this reload reg.  */
8030
      reg_renumber[REGNO (reg)] = REGNO (rld[j].reg_rtx);
8031
      alter_reg (REGNO (reg), -1);
8032
    }
8033
  else
8034
    {
8035
      delete_address_reloads (output_reload_insn, insn);
8036
      delete_insn (output_reload_insn);
8037
    }
8038
}
8039
 
8040
/* We are going to delete DEAD_INSN.  Recursively delete loads of
8041
   reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
8042
   CURRENT_INSN is being reloaded, so we have to check its reloads too.  */
8043
static void
8044
delete_address_reloads (rtx dead_insn, rtx current_insn)
8045
{
8046
  rtx set = single_set (dead_insn);
8047
  rtx set2, dst, prev, next;
8048
  if (set)
8049
    {
8050
      rtx dst = SET_DEST (set);
8051
      if (MEM_P (dst))
8052
        delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
8053
    }
8054
  /* If we deleted the store from a reloaded post_{in,de}c expression,
8055
     we can delete the matching adds.  */
8056
  prev = PREV_INSN (dead_insn);
8057
  next = NEXT_INSN (dead_insn);
8058
  if (! prev || ! next)
8059
    return;
8060
  set = single_set (next);
8061
  set2 = single_set (prev);
8062
  if (! set || ! set2
8063
      || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
8064
      || GET_CODE (XEXP (SET_SRC (set), 1)) != CONST_INT
8065
      || GET_CODE (XEXP (SET_SRC (set2), 1)) != CONST_INT)
8066
    return;
8067
  dst = SET_DEST (set);
8068
  if (! rtx_equal_p (dst, SET_DEST (set2))
8069
      || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
8070
      || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
8071
      || (INTVAL (XEXP (SET_SRC (set), 1))
8072
          != -INTVAL (XEXP (SET_SRC (set2), 1))))
8073
    return;
8074
  delete_related_insns (prev);
8075
  delete_related_insns (next);
8076
}
8077
 
8078
/* Subfunction of delete_address_reloads: process registers found in X.  */
8079
static void
8080
delete_address_reloads_1 (rtx dead_insn, rtx x, rtx current_insn)
8081
{
8082
  rtx prev, set, dst, i2;
8083
  int i, j;
8084
  enum rtx_code code = GET_CODE (x);
8085
 
8086
  if (code != REG)
8087
    {
8088
      const char *fmt = GET_RTX_FORMAT (code);
8089
      for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8090
        {
8091
          if (fmt[i] == 'e')
8092
            delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
8093
          else if (fmt[i] == 'E')
8094
            {
8095
              for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8096
                delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
8097
                                          current_insn);
8098
            }
8099
        }
8100
      return;
8101
    }
8102
 
8103
  if (spill_reg_order[REGNO (x)] < 0)
8104
    return;
8105
 
8106
  /* Scan backwards for the insn that sets x.  This might be a way back due
8107
     to inheritance.  */
8108
  for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
8109
    {
8110
      code = GET_CODE (prev);
8111
      if (code == CODE_LABEL || code == JUMP_INSN)
8112
        return;
8113
      if (!INSN_P (prev))
8114
        continue;
8115
      if (reg_set_p (x, PATTERN (prev)))
8116
        break;
8117
      if (reg_referenced_p (x, PATTERN (prev)))
8118
        return;
8119
    }
8120
  if (! prev || INSN_UID (prev) < reload_first_uid)
8121
    return;
8122
  /* Check that PREV only sets the reload register.  */
8123
  set = single_set (prev);
8124
  if (! set)
8125
    return;
8126
  dst = SET_DEST (set);
8127
  if (!REG_P (dst)
8128
      || ! rtx_equal_p (dst, x))
8129
    return;
8130
  if (! reg_set_p (dst, PATTERN (dead_insn)))
8131
    {
8132
      /* Check if DST was used in a later insn -
8133
         it might have been inherited.  */
8134
      for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
8135
        {
8136
          if (LABEL_P (i2))
8137
            break;
8138
          if (! INSN_P (i2))
8139
            continue;
8140
          if (reg_referenced_p (dst, PATTERN (i2)))
8141
            {
8142
              /* If there is a reference to the register in the current insn,
8143
                 it might be loaded in a non-inherited reload.  If no other
8144
                 reload uses it, that means the register is set before
8145
                 referenced.  */
8146
              if (i2 == current_insn)
8147
                {
8148
                  for (j = n_reloads - 1; j >= 0; j--)
8149
                    if ((rld[j].reg_rtx == dst && reload_inherited[j])
8150
                        || reload_override_in[j] == dst)
8151
                      return;
8152
                  for (j = n_reloads - 1; j >= 0; j--)
8153
                    if (rld[j].in && rld[j].reg_rtx == dst)
8154
                      break;
8155
                  if (j >= 0)
8156
                    break;
8157
                }
8158
              return;
8159
            }
8160
          if (JUMP_P (i2))
8161
            break;
8162
          /* If DST is still live at CURRENT_INSN, check if it is used for
8163
             any reload.  Note that even if CURRENT_INSN sets DST, we still
8164
             have to check the reloads.  */
8165
          if (i2 == current_insn)
8166
            {
8167
              for (j = n_reloads - 1; j >= 0; j--)
8168
                if ((rld[j].reg_rtx == dst && reload_inherited[j])
8169
                    || reload_override_in[j] == dst)
8170
                  return;
8171
              /* ??? We can't finish the loop here, because dst might be
8172
                 allocated to a pseudo in this block if no reload in this
8173
                 block needs any of the classes containing DST - see
8174
                 spill_hard_reg.  There is no easy way to tell this, so we
8175
                 have to scan till the end of the basic block.  */
8176
            }
8177
          if (reg_set_p (dst, PATTERN (i2)))
8178
            break;
8179
        }
8180
    }
8181
  delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
8182
  reg_reloaded_contents[REGNO (dst)] = -1;
8183
  delete_insn (prev);
8184
}
8185
 
8186
/* Output reload-insns to reload VALUE into RELOADREG.
8187
   VALUE is an autoincrement or autodecrement RTX whose operand
8188
   is a register or memory location;
8189
   so reloading involves incrementing that location.
8190
   IN is either identical to VALUE, or some cheaper place to reload from.
8191
 
8192
   INC_AMOUNT is the number to increment or decrement by (always positive).
8193
   This cannot be deduced from VALUE.
8194
 
8195
   Return the instruction that stores into RELOADREG.  */
8196
 
8197
static rtx
8198
inc_for_reload (rtx reloadreg, rtx in, rtx value, int inc_amount)
8199
{
8200
  /* REG or MEM to be copied and incremented.  */
8201
  rtx incloc = find_replacement (&XEXP (value, 0));
8202
  /* Nonzero if increment after copying.  */
8203
  int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC
8204
              || GET_CODE (value) == POST_MODIFY);
8205
  rtx last;
8206
  rtx inc;
8207
  rtx add_insn;
8208
  int code;
8209
  rtx store;
8210
  rtx real_in = in == value ? incloc : in;
8211
 
8212
  /* No hard register is equivalent to this register after
8213
     inc/dec operation.  If REG_LAST_RELOAD_REG were nonzero,
8214
     we could inc/dec that register as well (maybe even using it for
8215
     the source), but I'm not sure it's worth worrying about.  */
8216
  if (REG_P (incloc))
8217
    reg_last_reload_reg[REGNO (incloc)] = 0;
8218
 
8219
  if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
8220
    {
8221
      gcc_assert (GET_CODE (XEXP (value, 1)) == PLUS);
8222
      inc = find_replacement (&XEXP (XEXP (value, 1), 1));
8223
    }
8224
  else
8225
    {
8226
      if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
8227
        inc_amount = -inc_amount;
8228
 
8229
      inc = GEN_INT (inc_amount);
8230
    }
8231
 
8232
  /* If this is post-increment, first copy the location to the reload reg.  */
8233
  if (post && real_in != reloadreg)
8234
    emit_insn (gen_move_insn (reloadreg, real_in));
8235
 
8236
  if (in == value)
8237
    {
8238
      /* See if we can directly increment INCLOC.  Use a method similar to
8239
         that in gen_reload.  */
8240
 
8241
      last = get_last_insn ();
8242
      add_insn = emit_insn (gen_rtx_SET (VOIDmode, incloc,
8243
                                         gen_rtx_PLUS (GET_MODE (incloc),
8244
                                                       incloc, inc)));
8245
 
8246
      code = recog_memoized (add_insn);
8247
      if (code >= 0)
8248
        {
8249
          extract_insn (add_insn);
8250
          if (constrain_operands (1))
8251
            {
8252
              /* If this is a pre-increment and we have incremented the value
8253
                 where it lives, copy the incremented value to RELOADREG to
8254
                 be used as an address.  */
8255
 
8256
              if (! post)
8257
                emit_insn (gen_move_insn (reloadreg, incloc));
8258
 
8259
              return add_insn;
8260
            }
8261
        }
8262
      delete_insns_since (last);
8263
    }
8264
 
8265
  /* If couldn't do the increment directly, must increment in RELOADREG.
8266
     The way we do this depends on whether this is pre- or post-increment.
8267
     For pre-increment, copy INCLOC to the reload register, increment it
8268
     there, then save back.  */
8269
 
8270
  if (! post)
8271
    {
8272
      if (in != reloadreg)
8273
        emit_insn (gen_move_insn (reloadreg, real_in));
8274
      emit_insn (gen_add2_insn (reloadreg, inc));
8275
      store = emit_insn (gen_move_insn (incloc, reloadreg));
8276
    }
8277
  else
8278
    {
8279
      /* Postincrement.
8280
         Because this might be a jump insn or a compare, and because RELOADREG
8281
         may not be available after the insn in an input reload, we must do
8282
         the incrementation before the insn being reloaded for.
8283
 
8284
         We have already copied IN to RELOADREG.  Increment the copy in
8285
         RELOADREG, save that back, then decrement RELOADREG so it has
8286
         the original value.  */
8287
 
8288
      emit_insn (gen_add2_insn (reloadreg, inc));
8289
      store = emit_insn (gen_move_insn (incloc, reloadreg));
8290
      if (GET_CODE (inc) == CONST_INT)
8291
        emit_insn (gen_add2_insn (reloadreg, GEN_INT (-INTVAL(inc))));
8292
      else
8293
        emit_insn (gen_sub2_insn (reloadreg, inc));
8294
    }
8295
 
8296
  return store;
8297
}
8298
 
8299
#ifdef AUTO_INC_DEC
8300
static void
8301
add_auto_inc_notes (rtx insn, rtx x)
8302
{
8303
  enum rtx_code code = GET_CODE (x);
8304
  const char *fmt;
8305
  int i, j;
8306
 
8307
  if (code == MEM && auto_inc_p (XEXP (x, 0)))
8308
    {
8309
      REG_NOTES (insn)
8310
        = gen_rtx_EXPR_LIST (REG_INC, XEXP (XEXP (x, 0), 0), REG_NOTES (insn));
8311
      return;
8312
    }
8313
 
8314
  /* Scan all the operand sub-expressions.  */
8315
  fmt = GET_RTX_FORMAT (code);
8316
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8317
    {
8318
      if (fmt[i] == 'e')
8319
        add_auto_inc_notes (insn, XEXP (x, i));
8320
      else if (fmt[i] == 'E')
8321
        for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8322
          add_auto_inc_notes (insn, XVECEXP (x, i, j));
8323
    }
8324
}
8325
#endif
8326
 
8327
/* Copy EH notes from an insn to its reloads.  */
8328
static void
8329
copy_eh_notes (rtx insn, rtx x)
8330
{
8331
  rtx eh_note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
8332
  if (eh_note)
8333
    {
8334
      for (; x != 0; x = NEXT_INSN (x))
8335
        {
8336
          if (may_trap_p (PATTERN (x)))
8337
            REG_NOTES (x)
8338
              = gen_rtx_EXPR_LIST (REG_EH_REGION, XEXP (eh_note, 0),
8339
                                   REG_NOTES (x));
8340
        }
8341
    }
8342
}
8343
 
8344
/* This is used by reload pass, that does emit some instructions after
8345
   abnormal calls moving basic block end, but in fact it wants to emit
8346
   them on the edge.  Looks for abnormal call edges, find backward the
8347
   proper call and fix the damage.
8348
 
8349
   Similar handle instructions throwing exceptions internally.  */
8350
void
8351
fixup_abnormal_edges (void)
8352
{
8353
  bool inserted = false;
8354
  basic_block bb;
8355
 
8356
  FOR_EACH_BB (bb)
8357
    {
8358
      edge e;
8359
      edge_iterator ei;
8360
 
8361
      /* Look for cases we are interested in - calls or instructions causing
8362
         exceptions.  */
8363
      FOR_EACH_EDGE (e, ei, bb->succs)
8364
        {
8365
          if (e->flags & EDGE_ABNORMAL_CALL)
8366
            break;
8367
          if ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
8368
              == (EDGE_ABNORMAL | EDGE_EH))
8369
            break;
8370
        }
8371
      if (e && !CALL_P (BB_END (bb))
8372
          && !can_throw_internal (BB_END (bb)))
8373
        {
8374
          rtx insn;
8375
 
8376
          /* Get past the new insns generated.  Allow notes, as the insns
8377
             may be already deleted.  */
8378
          insn = BB_END (bb);
8379
          while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
8380
                 && !can_throw_internal (insn)
8381
                 && insn != BB_HEAD (bb))
8382
            insn = PREV_INSN (insn);
8383
 
8384
          if (CALL_P (insn) || can_throw_internal (insn))
8385
            {
8386
              rtx stop, next;
8387
 
8388
              stop = NEXT_INSN (BB_END (bb));
8389
              BB_END (bb) = insn;
8390
              insn = NEXT_INSN (insn);
8391
 
8392
              FOR_EACH_EDGE (e, ei, bb->succs)
8393
                if (e->flags & EDGE_FALLTHRU)
8394
                  break;
8395
 
8396
              while (insn && insn != stop)
8397
                {
8398
                  next = NEXT_INSN (insn);
8399
                  if (INSN_P (insn))
8400
                    {
8401
                      delete_insn (insn);
8402
 
8403
                      /* Sometimes there's still the return value USE.
8404
                         If it's placed after a trapping call (i.e. that
8405
                         call is the last insn anyway), we have no fallthru
8406
                         edge.  Simply delete this use and don't try to insert
8407
                         on the non-existent edge.  */
8408
                      if (GET_CODE (PATTERN (insn)) != USE)
8409
                        {
8410
                          /* We're not deleting it, we're moving it.  */
8411
                          INSN_DELETED_P (insn) = 0;
8412
                          PREV_INSN (insn) = NULL_RTX;
8413
                          NEXT_INSN (insn) = NULL_RTX;
8414
 
8415
                          insert_insn_on_edge (insn, e);
8416
                          inserted = true;
8417
                        }
8418
                    }
8419
                  insn = next;
8420
                }
8421
            }
8422
 
8423
          /* It may be that we don't find any such trapping insn.  In this
8424
             case we discovered quite late that the insn that had been
8425
             marked as can_throw_internal in fact couldn't trap at all.
8426
             So we should in fact delete the EH edges out of the block.  */
8427
          else
8428
            purge_dead_edges (bb);
8429
        }
8430
    }
8431
 
8432
  /* We've possibly turned single trapping insn into multiple ones.  */
8433
  if (flag_non_call_exceptions)
8434
    {
8435
      sbitmap blocks;
8436
      blocks = sbitmap_alloc (last_basic_block);
8437
      sbitmap_ones (blocks);
8438
      find_many_sub_basic_blocks (blocks);
8439
    }
8440
 
8441
  if (inserted)
8442
    commit_edge_insertions ();
8443
 
8444
#ifdef ENABLE_CHECKING
8445
  /* Verify that we didn't turn one trapping insn into many, and that
8446
     we found and corrected all of the problems wrt fixups on the
8447
     fallthru edge.  */
8448
  verify_flow_info ();
8449
#endif
8450
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.