OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [reload1.c] - Blame information for rev 848

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 684 jeremybenn
/* Reload pseudo regs into hard regs for insns that require hard regs.
2
   Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3
   1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
4
   2011, 2012 Free Software Foundation, Inc.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
 
27
#include "machmode.h"
28
#include "hard-reg-set.h"
29
#include "rtl-error.h"
30
#include "tm_p.h"
31
#include "obstack.h"
32
#include "insn-config.h"
33
#include "ggc.h"
34
#include "flags.h"
35
#include "function.h"
36
#include "expr.h"
37
#include "optabs.h"
38
#include "regs.h"
39
#include "addresses.h"
40
#include "basic-block.h"
41
#include "df.h"
42
#include "reload.h"
43
#include "recog.h"
44
#include "output.h"
45
#include "except.h"
46
#include "tree.h"
47
#include "ira.h"
48
#include "target.h"
49
#include "emit-rtl.h"
50
 
51
/* This file contains the reload pass of the compiler, which is
52
   run after register allocation has been done.  It checks that
53
   each insn is valid (operands required to be in registers really
54
   are in registers of the proper class) and fixes up invalid ones
55
   by copying values temporarily into registers for the insns
56
   that need them.
57
 
58
   The results of register allocation are described by the vector
59
   reg_renumber; the insns still contain pseudo regs, but reg_renumber
60
   can be used to find which hard reg, if any, a pseudo reg is in.
61
 
62
   The technique we always use is to free up a few hard regs that are
63
   called ``reload regs'', and for each place where a pseudo reg
64
   must be in a hard reg, copy it temporarily into one of the reload regs.
65
 
66
   Reload regs are allocated locally for every instruction that needs
67
   reloads.  When there are pseudos which are allocated to a register that
68
   has been chosen as a reload reg, such pseudos must be ``spilled''.
69
   This means that they go to other hard regs, or to stack slots if no other
70
   available hard regs can be found.  Spilling can invalidate more
71
   insns, requiring additional need for reloads, so we must keep checking
72
   until the process stabilizes.
73
 
74
   For machines with different classes of registers, we must keep track
75
   of the register class needed for each reload, and make sure that
76
   we allocate enough reload registers of each class.
77
 
78
   The file reload.c contains the code that checks one insn for
79
   validity and reports the reloads that it needs.  This file
80
   is in charge of scanning the entire rtl code, accumulating the
81
   reload needs, spilling, assigning reload registers to use for
82
   fixing up each insn, and generating the new insns to copy values
83
   into the reload registers.  */
84
 
85
struct target_reload default_target_reload;
86
#if SWITCHABLE_TARGET
87
struct target_reload *this_target_reload = &default_target_reload;
88
#endif
89
 
90
#define spill_indirect_levels                   \
91
  (this_target_reload->x_spill_indirect_levels)
92
 
93
/* During reload_as_needed, element N contains a REG rtx for the hard reg
94
   into which reg N has been reloaded (perhaps for a previous insn).  */
95
static rtx *reg_last_reload_reg;
96
 
97
/* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
98
   for an output reload that stores into reg N.  */
99
static regset_head reg_has_output_reload;
100
 
101
/* Indicates which hard regs are reload-registers for an output reload
102
   in the current insn.  */
103
static HARD_REG_SET reg_is_output_reload;
104
 
105
/* Widest width in which each pseudo reg is referred to (via subreg).  */
106
static unsigned int *reg_max_ref_width;
107
 
108
/* Vector to remember old contents of reg_renumber before spilling.  */
109
static short *reg_old_renumber;
110
 
111
/* During reload_as_needed, element N contains the last pseudo regno reloaded
112
   into hard register N.  If that pseudo reg occupied more than one register,
113
   reg_reloaded_contents points to that pseudo for each spill register in
114
   use; all of these must remain set for an inheritance to occur.  */
115
static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
116
 
117
/* During reload_as_needed, element N contains the insn for which
118
   hard register N was last used.   Its contents are significant only
119
   when reg_reloaded_valid is set for this register.  */
120
static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
121
 
122
/* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid.  */
123
static HARD_REG_SET reg_reloaded_valid;
124
/* Indicate if the register was dead at the end of the reload.
125
   This is only valid if reg_reloaded_contents is set and valid.  */
126
static HARD_REG_SET reg_reloaded_dead;
127
 
128
/* Indicate whether the register's current value is one that is not
129
   safe to retain across a call, even for registers that are normally
130
   call-saved.  This is only meaningful for members of reg_reloaded_valid.  */
131
static HARD_REG_SET reg_reloaded_call_part_clobbered;
132
 
133
/* Number of spill-regs so far; number of valid elements of spill_regs.  */
134
static int n_spills;
135
 
136
/* In parallel with spill_regs, contains REG rtx's for those regs.
137
   Holds the last rtx used for any given reg, or 0 if it has never
138
   been used for spilling yet.  This rtx is reused, provided it has
139
   the proper mode.  */
140
static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
141
 
142
/* In parallel with spill_regs, contains nonzero for a spill reg
143
   that was stored after the last time it was used.
144
   The precise value is the insn generated to do the store.  */
145
static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
146
 
147
/* This is the register that was stored with spill_reg_store.  This is a
148
   copy of reload_out / reload_out_reg when the value was stored; if
149
   reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg.  */
150
static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
151
 
152
/* This table is the inverse mapping of spill_regs:
153
   indexed by hard reg number,
154
   it contains the position of that reg in spill_regs,
155
   or -1 for something that is not in spill_regs.
156
 
157
   ?!?  This is no longer accurate.  */
158
static short spill_reg_order[FIRST_PSEUDO_REGISTER];
159
 
160
/* This reg set indicates registers that can't be used as spill registers for
161
   the currently processed insn.  These are the hard registers which are live
162
   during the insn, but not allocated to pseudos, as well as fixed
163
   registers.  */
164
static HARD_REG_SET bad_spill_regs;
165
 
166
/* These are the hard registers that can't be used as spill register for any
167
   insn.  This includes registers used for user variables and registers that
168
   we can't eliminate.  A register that appears in this set also can't be used
169
   to retry register allocation.  */
170
static HARD_REG_SET bad_spill_regs_global;
171
 
172
/* Describes order of use of registers for reloading
173
   of spilled pseudo-registers.  `n_spills' is the number of
174
   elements that are actually valid; new ones are added at the end.
175
 
176
   Both spill_regs and spill_reg_order are used on two occasions:
177
   once during find_reload_regs, where they keep track of the spill registers
178
   for a single insn, but also during reload_as_needed where they show all
179
   the registers ever used by reload.  For the latter case, the information
180
   is calculated during finish_spills.  */
181
static short spill_regs[FIRST_PSEUDO_REGISTER];
182
 
183
/* This vector of reg sets indicates, for each pseudo, which hard registers
184
   may not be used for retrying global allocation because the register was
185
   formerly spilled from one of them.  If we allowed reallocating a pseudo to
186
   a register that it was already allocated to, reload might not
187
   terminate.  */
188
static HARD_REG_SET *pseudo_previous_regs;
189
 
190
/* This vector of reg sets indicates, for each pseudo, which hard
191
   registers may not be used for retrying global allocation because they
192
   are used as spill registers during one of the insns in which the
193
   pseudo is live.  */
194
static HARD_REG_SET *pseudo_forbidden_regs;
195
 
196
/* All hard regs that have been used as spill registers for any insn are
197
   marked in this set.  */
198
static HARD_REG_SET used_spill_regs;
199
 
200
/* Index of last register assigned as a spill register.  We allocate in
201
   a round-robin fashion.  */
202
static int last_spill_reg;
203
 
204
/* Record the stack slot for each spilled hard register.  */
205
static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
206
 
207
/* Width allocated so far for that stack slot.  */
208
static unsigned int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
209
 
210
/* Record which pseudos needed to be spilled.  */
211
static regset_head spilled_pseudos;
212
 
213
/* Record which pseudos changed their allocation in finish_spills.  */
214
static regset_head changed_allocation_pseudos;
215
 
216
/* Used for communication between order_regs_for_reload and count_pseudo.
217
   Used to avoid counting one pseudo twice.  */
218
static regset_head pseudos_counted;
219
 
220
/* First uid used by insns created by reload in this function.
221
   Used in find_equiv_reg.  */
222
int reload_first_uid;
223
 
224
/* Flag set by local-alloc or global-alloc if anything is live in
225
   a call-clobbered reg across calls.  */
226
int caller_save_needed;
227
 
228
/* Set to 1 while reload_as_needed is operating.
229
   Required by some machines to handle any generated moves differently.  */
230
int reload_in_progress = 0;
231
 
232
/* This obstack is used for allocation of rtl during register elimination.
233
   The allocated storage can be freed once find_reloads has processed the
234
   insn.  */
235
static struct obstack reload_obstack;
236
 
237
/* Points to the beginning of the reload_obstack.  All insn_chain structures
238
   are allocated first.  */
239
static char *reload_startobj;
240
 
241
/* The point after all insn_chain structures.  Used to quickly deallocate
242
   memory allocated in copy_reloads during calculate_needs_all_insns.  */
243
static char *reload_firstobj;
244
 
245
/* This points before all local rtl generated by register elimination.
246
   Used to quickly free all memory after processing one insn.  */
247
static char *reload_insn_firstobj;
248
 
249
/* List of insn_chain instructions, one for every insn that reload needs to
250
   examine.  */
251
struct insn_chain *reload_insn_chain;
252
 
253
/* TRUE if we potentially left dead insns in the insn stream and want to
254
   run DCE immediately after reload, FALSE otherwise.  */
255
static bool need_dce;
256
 
257
/* List of all insns needing reloads.  */
258
static struct insn_chain *insns_need_reload;
259
 
260
/* This structure is used to record information about register eliminations.
261
   Each array entry describes one possible way of eliminating a register
262
   in favor of another.   If there is more than one way of eliminating a
263
   particular register, the most preferred should be specified first.  */
264
 
265
struct elim_table
266
{
267
  int from;                     /* Register number to be eliminated.  */
268
  int to;                       /* Register number used as replacement.  */
269
  HOST_WIDE_INT initial_offset; /* Initial difference between values.  */
270
  int can_eliminate;            /* Nonzero if this elimination can be done.  */
271
  int can_eliminate_previous;   /* Value returned by TARGET_CAN_ELIMINATE
272
                                   target hook in previous scan over insns
273
                                   made by reload.  */
274
  HOST_WIDE_INT offset;         /* Current offset between the two regs.  */
275
  HOST_WIDE_INT previous_offset;/* Offset at end of previous insn.  */
276
  int ref_outside_mem;          /* "to" has been referenced outside a MEM.  */
277
  rtx from_rtx;                 /* REG rtx for the register to be eliminated.
278
                                   We cannot simply compare the number since
279
                                   we might then spuriously replace a hard
280
                                   register corresponding to a pseudo
281
                                   assigned to the reg to be eliminated.  */
282
  rtx to_rtx;                   /* REG rtx for the replacement.  */
283
};
284
 
285
static struct elim_table *reg_eliminate = 0;
286
 
287
/* This is an intermediate structure to initialize the table.  It has
288
   exactly the members provided by ELIMINABLE_REGS.  */
289
static const struct elim_table_1
290
{
291
  const int from;
292
  const int to;
293
} reg_eliminate_1[] =
294
 
295
/* If a set of eliminable registers was specified, define the table from it.
296
   Otherwise, default to the normal case of the frame pointer being
297
   replaced by the stack pointer.  */
298
 
299
#ifdef ELIMINABLE_REGS
300
  ELIMINABLE_REGS;
301
#else
302
  {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
303
#endif
304
 
305
#define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
306
 
307
/* Record the number of pending eliminations that have an offset not equal
308
   to their initial offset.  If nonzero, we use a new copy of each
309
   replacement result in any insns encountered.  */
310
int num_not_at_initial_offset;
311
 
312
/* Count the number of registers that we may be able to eliminate.  */
313
static int num_eliminable;
314
/* And the number of registers that are equivalent to a constant that
315
   can be eliminated to frame_pointer / arg_pointer + constant.  */
316
static int num_eliminable_invariants;
317
 
318
/* For each label, we record the offset of each elimination.  If we reach
319
   a label by more than one path and an offset differs, we cannot do the
320
   elimination.  This information is indexed by the difference of the
321
   number of the label and the first label number.  We can't offset the
322
   pointer itself as this can cause problems on machines with segmented
323
   memory.  The first table is an array of flags that records whether we
324
   have yet encountered a label and the second table is an array of arrays,
325
   one entry in the latter array for each elimination.  */
326
 
327
static int first_label_num;
328
static char *offsets_known_at;
329
static HOST_WIDE_INT (*offsets_at)[NUM_ELIMINABLE_REGS];
330
 
331
VEC(reg_equivs_t,gc) *reg_equivs;
332
 
333
/* Stack of addresses where an rtx has been changed.  We can undo the
334
   changes by popping items off the stack and restoring the original
335
   value at each location.
336
 
337
   We use this simplistic undo capability rather than copy_rtx as copy_rtx
338
   will not make a deep copy of a normally sharable rtx, such as
339
   (const (plus (symbol_ref) (const_int))).  If such an expression appears
340
   as R1 in gen_reload_chain_without_interm_reg_p, then a shared
341
   rtx expression would be changed.  See PR 42431.  */
342
 
343
typedef rtx *rtx_p;
344
DEF_VEC_P(rtx_p);
345
DEF_VEC_ALLOC_P(rtx_p,heap);
346
static VEC(rtx_p,heap) *substitute_stack;
347
 
348
/* Number of labels in the current function.  */
349
 
350
static int num_labels;
351
 
352
static void replace_pseudos_in (rtx *, enum machine_mode, rtx);
353
static void maybe_fix_stack_asms (void);
354
static void copy_reloads (struct insn_chain *);
355
static void calculate_needs_all_insns (int);
356
static int find_reg (struct insn_chain *, int);
357
static void find_reload_regs (struct insn_chain *);
358
static void select_reload_regs (void);
359
static void delete_caller_save_insns (void);
360
 
361
static void spill_failure (rtx, enum reg_class);
362
static void count_spilled_pseudo (int, int, int);
363
static void delete_dead_insn (rtx);
364
static void alter_reg (int, int, bool);
365
static void set_label_offsets (rtx, rtx, int);
366
static void check_eliminable_occurrences (rtx);
367
static void elimination_effects (rtx, enum machine_mode);
368
static rtx eliminate_regs_1 (rtx, enum machine_mode, rtx, bool, bool);
369
static int eliminate_regs_in_insn (rtx, int);
370
static void update_eliminable_offsets (void);
371
static void mark_not_eliminable (rtx, const_rtx, void *);
372
static void set_initial_elim_offsets (void);
373
static bool verify_initial_elim_offsets (void);
374
static void set_initial_label_offsets (void);
375
static void set_offsets_for_label (rtx);
376
static void init_eliminable_invariants (rtx, bool);
377
static void init_elim_table (void);
378
static void free_reg_equiv (void);
379
static void update_eliminables (HARD_REG_SET *);
380
static void elimination_costs_in_insn (rtx);
381
static void spill_hard_reg (unsigned int, int);
382
static int finish_spills (int);
383
static void scan_paradoxical_subregs (rtx);
384
static void count_pseudo (int);
385
static void order_regs_for_reload (struct insn_chain *);
386
static void reload_as_needed (int);
387
static void forget_old_reloads_1 (rtx, const_rtx, void *);
388
static void forget_marked_reloads (regset);
389
static int reload_reg_class_lower (const void *, const void *);
390
static void mark_reload_reg_in_use (unsigned int, int, enum reload_type,
391
                                    enum machine_mode);
392
static void clear_reload_reg_in_use (unsigned int, int, enum reload_type,
393
                                     enum machine_mode);
394
static int reload_reg_free_p (unsigned int, int, enum reload_type);
395
static int reload_reg_free_for_value_p (int, int, int, enum reload_type,
396
                                        rtx, rtx, int, int);
397
static int free_for_value_p (int, enum machine_mode, int, enum reload_type,
398
                             rtx, rtx, int, int);
399
static int allocate_reload_reg (struct insn_chain *, int, int);
400
static int conflicts_with_override (rtx);
401
static void failed_reload (rtx, int);
402
static int set_reload_reg (int, int);
403
static void choose_reload_regs_init (struct insn_chain *, rtx *);
404
static void choose_reload_regs (struct insn_chain *);
405
static void emit_input_reload_insns (struct insn_chain *, struct reload *,
406
                                     rtx, int);
407
static void emit_output_reload_insns (struct insn_chain *, struct reload *,
408
                                      int);
409
static void do_input_reload (struct insn_chain *, struct reload *, int);
410
static void do_output_reload (struct insn_chain *, struct reload *, int);
411
static void emit_reload_insns (struct insn_chain *);
412
static void delete_output_reload (rtx, int, int, rtx);
413
static void delete_address_reloads (rtx, rtx);
414
static void delete_address_reloads_1 (rtx, rtx, rtx);
415
static void inc_for_reload (rtx, rtx, rtx, int);
416
#ifdef AUTO_INC_DEC
417
static void add_auto_inc_notes (rtx, rtx);
418
#endif
419
static void substitute (rtx *, const_rtx, rtx);
420
static bool gen_reload_chain_without_interm_reg_p (int, int);
421
static int reloads_conflict (int, int);
422
static rtx gen_reload (rtx, rtx, int, enum reload_type);
423
static rtx emit_insn_if_valid_for_reload (rtx);
424
 
425
/* Initialize the reload pass.  This is called at the beginning of compilation
426
   and may be called again if the target is reinitialized.  */
427
 
428
void
429
init_reload (void)
430
{
431
  int i;
432
 
433
  /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
434
     Set spill_indirect_levels to the number of levels such addressing is
435
     permitted, zero if it is not permitted at all.  */
436
 
437
  rtx tem
438
    = gen_rtx_MEM (Pmode,
439
                   gen_rtx_PLUS (Pmode,
440
                                 gen_rtx_REG (Pmode,
441
                                              LAST_VIRTUAL_REGISTER + 1),
442
                                 GEN_INT (4)));
443
  spill_indirect_levels = 0;
444
 
445
  while (memory_address_p (QImode, tem))
446
    {
447
      spill_indirect_levels++;
448
      tem = gen_rtx_MEM (Pmode, tem);
449
    }
450
 
451
  /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)).  */
452
 
453
  tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
454
  indirect_symref_ok = memory_address_p (QImode, tem);
455
 
456
  /* See if reg+reg is a valid (and offsettable) address.  */
457
 
458
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
459
    {
460
      tem = gen_rtx_PLUS (Pmode,
461
                          gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
462
                          gen_rtx_REG (Pmode, i));
463
 
464
      /* This way, we make sure that reg+reg is an offsettable address.  */
465
      tem = plus_constant (tem, 4);
466
 
467
      if (memory_address_p (QImode, tem))
468
        {
469
          double_reg_address_ok = 1;
470
          break;
471
        }
472
    }
473
 
474
  /* Initialize obstack for our rtl allocation.  */
475
  gcc_obstack_init (&reload_obstack);
476
  reload_startobj = XOBNEWVAR (&reload_obstack, char, 0);
477
 
478
  INIT_REG_SET (&spilled_pseudos);
479
  INIT_REG_SET (&changed_allocation_pseudos);
480
  INIT_REG_SET (&pseudos_counted);
481
}
482
 
483
/* List of insn chains that are currently unused.  */
484
static struct insn_chain *unused_insn_chains = 0;
485
 
486
/* Allocate an empty insn_chain structure.  */
487
struct insn_chain *
488
new_insn_chain (void)
489
{
490
  struct insn_chain *c;
491
 
492
  if (unused_insn_chains == 0)
493
    {
494
      c = XOBNEW (&reload_obstack, struct insn_chain);
495
      INIT_REG_SET (&c->live_throughout);
496
      INIT_REG_SET (&c->dead_or_set);
497
    }
498
  else
499
    {
500
      c = unused_insn_chains;
501
      unused_insn_chains = c->next;
502
    }
503
  c->is_caller_save_insn = 0;
504
  c->need_operand_change = 0;
505
  c->need_reload = 0;
506
  c->need_elim = 0;
507
  return c;
508
}
509
 
510
/* Small utility function to set all regs in hard reg set TO which are
511
   allocated to pseudos in regset FROM.  */
512
 
513
void
514
compute_use_by_pseudos (HARD_REG_SET *to, regset from)
515
{
516
  unsigned int regno;
517
  reg_set_iterator rsi;
518
 
519
  EXECUTE_IF_SET_IN_REG_SET (from, FIRST_PSEUDO_REGISTER, regno, rsi)
520
    {
521
      int r = reg_renumber[regno];
522
 
523
      if (r < 0)
524
        {
525
          /* reload_combine uses the information from DF_LIVE_IN,
526
             which might still contain registers that have not
527
             actually been allocated since they have an
528
             equivalence.  */
529
          gcc_assert (ira_conflicts_p || reload_completed);
530
        }
531
      else
532
        add_to_hard_reg_set (to, PSEUDO_REGNO_MODE (regno), r);
533
    }
534
}
535
 
536
/* Replace all pseudos found in LOC with their corresponding
537
   equivalences.  */
538
 
539
static void
540
replace_pseudos_in (rtx *loc, enum machine_mode mem_mode, rtx usage)
541
{
542
  rtx x = *loc;
543
  enum rtx_code code;
544
  const char *fmt;
545
  int i, j;
546
 
547
  if (! x)
548
    return;
549
 
550
  code = GET_CODE (x);
551
  if (code == REG)
552
    {
553
      unsigned int regno = REGNO (x);
554
 
555
      if (regno < FIRST_PSEUDO_REGISTER)
556
        return;
557
 
558
      x = eliminate_regs_1 (x, mem_mode, usage, true, false);
559
      if (x != *loc)
560
        {
561
          *loc = x;
562
          replace_pseudos_in (loc, mem_mode, usage);
563
          return;
564
        }
565
 
566
      if (reg_equiv_constant (regno))
567
        *loc = reg_equiv_constant (regno);
568
      else if (reg_equiv_invariant (regno))
569
        *loc = reg_equiv_invariant (regno);
570
      else if (reg_equiv_mem (regno))
571
        *loc = reg_equiv_mem (regno);
572
      else if (reg_equiv_address (regno))
573
        *loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address (regno));
574
      else
575
        {
576
          gcc_assert (!REG_P (regno_reg_rtx[regno])
577
                      || REGNO (regno_reg_rtx[regno]) != regno);
578
          *loc = regno_reg_rtx[regno];
579
        }
580
 
581
      return;
582
    }
583
  else if (code == MEM)
584
    {
585
      replace_pseudos_in (& XEXP (x, 0), GET_MODE (x), usage);
586
      return;
587
    }
588
 
589
  /* Process each of our operands recursively.  */
590
  fmt = GET_RTX_FORMAT (code);
591
  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
592
    if (*fmt == 'e')
593
      replace_pseudos_in (&XEXP (x, i), mem_mode, usage);
594
    else if (*fmt == 'E')
595
      for (j = 0; j < XVECLEN (x, i); j++)
596
        replace_pseudos_in (& XVECEXP (x, i, j), mem_mode, usage);
597
}
598
 
599
/* Determine if the current function has an exception receiver block
600
   that reaches the exit block via non-exceptional edges  */
601
 
602
static bool
603
has_nonexceptional_receiver (void)
604
{
605
  edge e;
606
  edge_iterator ei;
607
  basic_block *tos, *worklist, bb;
608
 
609
  /* If we're not optimizing, then just err on the safe side.  */
610
  if (!optimize)
611
    return true;
612
 
613
  /* First determine which blocks can reach exit via normal paths.  */
614
  tos = worklist = XNEWVEC (basic_block, n_basic_blocks + 1);
615
 
616
  FOR_EACH_BB (bb)
617
    bb->flags &= ~BB_REACHABLE;
618
 
619
  /* Place the exit block on our worklist.  */
620
  EXIT_BLOCK_PTR->flags |= BB_REACHABLE;
621
  *tos++ = EXIT_BLOCK_PTR;
622
 
623
  /* Iterate: find everything reachable from what we've already seen.  */
624
  while (tos != worklist)
625
    {
626
      bb = *--tos;
627
 
628
      FOR_EACH_EDGE (e, ei, bb->preds)
629
        if (!(e->flags & EDGE_ABNORMAL))
630
          {
631
            basic_block src = e->src;
632
 
633
            if (!(src->flags & BB_REACHABLE))
634
              {
635
                src->flags |= BB_REACHABLE;
636
                *tos++ = src;
637
              }
638
          }
639
    }
640
  free (worklist);
641
 
642
  /* Now see if there's a reachable block with an exceptional incoming
643
     edge.  */
644
  FOR_EACH_BB (bb)
645
    if (bb->flags & BB_REACHABLE && bb_has_abnormal_pred (bb))
646
      return true;
647
 
648
  /* No exceptional block reached exit unexceptionally.  */
649
  return false;
650
}
651
 
652
/* Grow (or allocate) the REG_EQUIVS array from its current size (which may be
653
   zero elements) to MAX_REG_NUM elements.
654
 
655
   Initialize all new fields to NULL and update REG_EQUIVS_SIZE.  */
656
void
657
grow_reg_equivs (void)
658
{
659
  int old_size = VEC_length (reg_equivs_t, reg_equivs);
660
  int max_regno = max_reg_num ();
661
  int i;
662
 
663
  VEC_reserve (reg_equivs_t, gc, reg_equivs, max_regno);
664
  for (i = old_size; i < max_regno; i++)
665
    {
666
      VEC_quick_insert (reg_equivs_t, reg_equivs, i, 0);
667
      memset (VEC_index (reg_equivs_t, reg_equivs, i), 0, sizeof (reg_equivs_t));
668
    }
669
 
670
}
671
 
672
 
673
/* Global variables used by reload and its subroutines.  */
674
 
675
/* The current basic block while in calculate_elim_costs_all_insns.  */
676
static basic_block elim_bb;
677
 
678
/* Set during calculate_needs if an insn needs register elimination.  */
679
static int something_needs_elimination;
680
/* Set during calculate_needs if an insn needs an operand changed.  */
681
static int something_needs_operands_changed;
682
/* Set by alter_regs if we spilled a register to the stack.  */
683
static bool something_was_spilled;
684
 
685
/* Nonzero means we couldn't get enough spill regs.  */
686
static int failure;
687
 
688
/* Temporary array of pseudo-register number.  */
689
static int *temp_pseudo_reg_arr;
690
 
691
/* Main entry point for the reload pass.
692
 
693
   FIRST is the first insn of the function being compiled.
694
 
695
   GLOBAL nonzero means we were called from global_alloc
696
   and should attempt to reallocate any pseudoregs that we
697
   displace from hard regs we will use for reloads.
698
   If GLOBAL is zero, we do not have enough information to do that,
699
   so any pseudo reg that is spilled must go to the stack.
700
 
701
   Return value is TRUE if reload likely left dead insns in the
702
   stream and a DCE pass should be run to elimiante them.  Else the
703
   return value is FALSE.  */
704
 
705
bool
706
reload (rtx first, int global)
707
{
708
  int i, n;
709
  rtx insn;
710
  struct elim_table *ep;
711
  basic_block bb;
712
  bool inserted;
713
 
714
  /* Make sure even insns with volatile mem refs are recognizable.  */
715
  init_recog ();
716
 
717
  failure = 0;
718
 
719
  reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
720
 
721
  /* Make sure that the last insn in the chain
722
     is not something that needs reloading.  */
723
  emit_note (NOTE_INSN_DELETED);
724
 
725
  /* Enable find_equiv_reg to distinguish insns made by reload.  */
726
  reload_first_uid = get_max_uid ();
727
 
728
#ifdef SECONDARY_MEMORY_NEEDED
729
  /* Initialize the secondary memory table.  */
730
  clear_secondary_mem ();
731
#endif
732
 
733
  /* We don't have a stack slot for any spill reg yet.  */
734
  memset (spill_stack_slot, 0, sizeof spill_stack_slot);
735
  memset (spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
736
 
737
  /* Initialize the save area information for caller-save, in case some
738
     are needed.  */
739
  init_save_areas ();
740
 
741
  /* Compute which hard registers are now in use
742
     as homes for pseudo registers.
743
     This is done here rather than (eg) in global_alloc
744
     because this point is reached even if not optimizing.  */
745
  for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
746
    mark_home_live (i);
747
 
748
  /* A function that has a nonlocal label that can reach the exit
749
     block via non-exceptional paths must save all call-saved
750
     registers.  */
751
  if (cfun->has_nonlocal_label
752
      && has_nonexceptional_receiver ())
753
    crtl->saves_all_registers = 1;
754
 
755
  if (crtl->saves_all_registers)
756
    for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
757
      if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
758
        df_set_regs_ever_live (i, true);
759
 
760
  /* Find all the pseudo registers that didn't get hard regs
761
     but do have known equivalent constants or memory slots.
762
     These include parameters (known equivalent to parameter slots)
763
     and cse'd or loop-moved constant memory addresses.
764
 
765
     Record constant equivalents in reg_equiv_constant
766
     so they will be substituted by find_reloads.
767
     Record memory equivalents in reg_mem_equiv so they can
768
     be substituted eventually by altering the REG-rtx's.  */
769
 
770
  grow_reg_equivs ();
771
  reg_old_renumber = XCNEWVEC (short, max_regno);
772
  memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
773
  pseudo_forbidden_regs = XNEWVEC (HARD_REG_SET, max_regno);
774
  pseudo_previous_regs = XCNEWVEC (HARD_REG_SET, max_regno);
775
 
776
  CLEAR_HARD_REG_SET (bad_spill_regs_global);
777
 
778
  init_eliminable_invariants (first, true);
779
  init_elim_table ();
780
 
781
  /* Alter each pseudo-reg rtx to contain its hard reg number.  Assign
782
     stack slots to the pseudos that lack hard regs or equivalents.
783
     Do not touch virtual registers.  */
784
 
785
  temp_pseudo_reg_arr = XNEWVEC (int, max_regno - LAST_VIRTUAL_REGISTER - 1);
786
  for (n = 0, i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
787
    temp_pseudo_reg_arr[n++] = i;
788
 
789
  if (ira_conflicts_p)
790
    /* Ask IRA to order pseudo-registers for better stack slot
791
       sharing.  */
792
    ira_sort_regnos_for_alter_reg (temp_pseudo_reg_arr, n, reg_max_ref_width);
793
 
794
  for (i = 0; i < n; i++)
795
    alter_reg (temp_pseudo_reg_arr[i], -1, false);
796
 
797
  /* If we have some registers we think can be eliminated, scan all insns to
798
     see if there is an insn that sets one of these registers to something
799
     other than itself plus a constant.  If so, the register cannot be
800
     eliminated.  Doing this scan here eliminates an extra pass through the
801
     main reload loop in the most common case where register elimination
802
     cannot be done.  */
803
  for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
804
    if (INSN_P (insn))
805
      note_stores (PATTERN (insn), mark_not_eliminable, NULL);
806
 
807
  maybe_fix_stack_asms ();
808
 
809
  insns_need_reload = 0;
810
  something_needs_elimination = 0;
811
 
812
  /* Initialize to -1, which means take the first spill register.  */
813
  last_spill_reg = -1;
814
 
815
  /* Spill any hard regs that we know we can't eliminate.  */
816
  CLEAR_HARD_REG_SET (used_spill_regs);
817
  /* There can be multiple ways to eliminate a register;
818
     they should be listed adjacently.
819
     Elimination for any register fails only if all possible ways fail.  */
820
  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; )
821
    {
822
      int from = ep->from;
823
      int can_eliminate = 0;
824
      do
825
        {
826
          can_eliminate |= ep->can_eliminate;
827
          ep++;
828
        }
829
      while (ep < &reg_eliminate[NUM_ELIMINABLE_REGS] && ep->from == from);
830
      if (! can_eliminate)
831
        spill_hard_reg (from, 1);
832
    }
833
 
834
#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
835
  if (frame_pointer_needed)
836
    spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
837
#endif
838
  finish_spills (global);
839
 
840
  /* From now on, we may need to generate moves differently.  We may also
841
     allow modifications of insns which cause them to not be recognized.
842
     Any such modifications will be cleaned up during reload itself.  */
843
  reload_in_progress = 1;
844
 
845
  /* This loop scans the entire function each go-round
846
     and repeats until one repetition spills no additional hard regs.  */
847
  for (;;)
848
    {
849
      int something_changed;
850
      int did_spill;
851
      HOST_WIDE_INT starting_frame_size;
852
 
853
      starting_frame_size = get_frame_size ();
854
      something_was_spilled = false;
855
 
856
      set_initial_elim_offsets ();
857
      set_initial_label_offsets ();
858
 
859
      /* For each pseudo register that has an equivalent location defined,
860
         try to eliminate any eliminable registers (such as the frame pointer)
861
         assuming initial offsets for the replacement register, which
862
         is the normal case.
863
 
864
         If the resulting location is directly addressable, substitute
865
         the MEM we just got directly for the old REG.
866
 
867
         If it is not addressable but is a constant or the sum of a hard reg
868
         and constant, it is probably not addressable because the constant is
869
         out of range, in that case record the address; we will generate
870
         hairy code to compute the address in a register each time it is
871
         needed.  Similarly if it is a hard register, but one that is not
872
         valid as an address register.
873
 
874
         If the location is not addressable, but does not have one of the
875
         above forms, assign a stack slot.  We have to do this to avoid the
876
         potential of producing lots of reloads if, e.g., a location involves
877
         a pseudo that didn't get a hard register and has an equivalent memory
878
         location that also involves a pseudo that didn't get a hard register.
879
 
880
         Perhaps at some point we will improve reload_when_needed handling
881
         so this problem goes away.  But that's very hairy.  */
882
 
883
      for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
884
        if (reg_renumber[i] < 0 && reg_equiv_memory_loc (i))
885
          {
886
            rtx x = eliminate_regs (reg_equiv_memory_loc (i), VOIDmode,
887
                                    NULL_RTX);
888
 
889
            if (strict_memory_address_addr_space_p
890
                  (GET_MODE (regno_reg_rtx[i]), XEXP (x, 0),
891
                   MEM_ADDR_SPACE (x)))
892
              reg_equiv_mem (i) = x, reg_equiv_address (i) = 0;
893
            else if (CONSTANT_P (XEXP (x, 0))
894
                     || (REG_P (XEXP (x, 0))
895
                         && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
896
                     || (GET_CODE (XEXP (x, 0)) == PLUS
897
                         && REG_P (XEXP (XEXP (x, 0), 0))
898
                         && (REGNO (XEXP (XEXP (x, 0), 0))
899
                             < FIRST_PSEUDO_REGISTER)
900
                         && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
901
              reg_equiv_address (i) = XEXP (x, 0), reg_equiv_mem (i) = 0;
902
            else
903
              {
904
                /* Make a new stack slot.  Then indicate that something
905
                   changed so we go back and recompute offsets for
906
                   eliminable registers because the allocation of memory
907
                   below might change some offset.  reg_equiv_{mem,address}
908
                   will be set up for this pseudo on the next pass around
909
                   the loop.  */
910
                reg_equiv_memory_loc (i) = 0;
911
                reg_equiv_init (i) = 0;
912
                alter_reg (i, -1, true);
913
              }
914
          }
915
 
916
      if (caller_save_needed)
917
        setup_save_areas ();
918
 
919
      /* If we allocated another stack slot, redo elimination bookkeeping.  */
920
      if (something_was_spilled || starting_frame_size != get_frame_size ())
921
        continue;
922
      if (starting_frame_size && crtl->stack_alignment_needed)
923
        {
924
          /* If we have a stack frame, we must align it now.  The
925
             stack size may be a part of the offset computation for
926
             register elimination.  So if this changes the stack size,
927
             then repeat the elimination bookkeeping.  We don't
928
             realign when there is no stack, as that will cause a
929
             stack frame when none is needed should
930
             STARTING_FRAME_OFFSET not be already aligned to
931
             STACK_BOUNDARY.  */
932
          assign_stack_local (BLKmode, 0, crtl->stack_alignment_needed);
933
          if (starting_frame_size != get_frame_size ())
934
            continue;
935
        }
936
 
937
      if (caller_save_needed)
938
        {
939
          save_call_clobbered_regs ();
940
          /* That might have allocated new insn_chain structures.  */
941
          reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
942
        }
943
 
944
      calculate_needs_all_insns (global);
945
 
946
      if (! ira_conflicts_p)
947
        /* Don't do it for IRA.  We need this info because we don't
948
           change live_throughout and dead_or_set for chains when IRA
949
           is used.  */
950
        CLEAR_REG_SET (&spilled_pseudos);
951
 
952
      did_spill = 0;
953
 
954
      something_changed = 0;
955
 
956
      /* If we allocated any new memory locations, make another pass
957
         since it might have changed elimination offsets.  */
958
      if (something_was_spilled || starting_frame_size != get_frame_size ())
959
        something_changed = 1;
960
 
961
      /* Even if the frame size remained the same, we might still have
962
         changed elimination offsets, e.g. if find_reloads called
963
         force_const_mem requiring the back end to allocate a constant
964
         pool base register that needs to be saved on the stack.  */
965
      else if (!verify_initial_elim_offsets ())
966
        something_changed = 1;
967
 
968
      {
969
        HARD_REG_SET to_spill;
970
        CLEAR_HARD_REG_SET (to_spill);
971
        update_eliminables (&to_spill);
972
        AND_COMPL_HARD_REG_SET (used_spill_regs, to_spill);
973
 
974
        for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
975
          if (TEST_HARD_REG_BIT (to_spill, i))
976
            {
977
              spill_hard_reg (i, 1);
978
              did_spill = 1;
979
 
980
              /* Regardless of the state of spills, if we previously had
981
                 a register that we thought we could eliminate, but now can
982
                 not eliminate, we must run another pass.
983
 
984
                 Consider pseudos which have an entry in reg_equiv_* which
985
                 reference an eliminable register.  We must make another pass
986
                 to update reg_equiv_* so that we do not substitute in the
987
                 old value from when we thought the elimination could be
988
                 performed.  */
989
              something_changed = 1;
990
            }
991
      }
992
 
993
      select_reload_regs ();
994
      if (failure)
995
        goto failed;
996
 
997
      if (insns_need_reload != 0 || did_spill)
998
        something_changed |= finish_spills (global);
999
 
1000
      if (! something_changed)
1001
        break;
1002
 
1003
      if (caller_save_needed)
1004
        delete_caller_save_insns ();
1005
 
1006
      obstack_free (&reload_obstack, reload_firstobj);
1007
    }
1008
 
1009
  /* If global-alloc was run, notify it of any register eliminations we have
1010
     done.  */
1011
  if (global)
1012
    for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1013
      if (ep->can_eliminate)
1014
        mark_elimination (ep->from, ep->to);
1015
 
1016
  /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1017
     If that insn didn't set the register (i.e., it copied the register to
1018
     memory), just delete that insn instead of the equivalencing insn plus
1019
     anything now dead.  If we call delete_dead_insn on that insn, we may
1020
     delete the insn that actually sets the register if the register dies
1021
     there and that is incorrect.  */
1022
 
1023
  for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1024
    {
1025
      if (reg_renumber[i] < 0 && reg_equiv_init (i) != 0)
1026
        {
1027
          rtx list;
1028
          for (list = reg_equiv_init (i); list; list = XEXP (list, 1))
1029
            {
1030
              rtx equiv_insn = XEXP (list, 0);
1031
 
1032
              /* If we already deleted the insn or if it may trap, we can't
1033
                 delete it.  The latter case shouldn't happen, but can
1034
                 if an insn has a variable address, gets a REG_EH_REGION
1035
                 note added to it, and then gets converted into a load
1036
                 from a constant address.  */
1037
              if (NOTE_P (equiv_insn)
1038
                  || can_throw_internal (equiv_insn))
1039
                ;
1040
              else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
1041
                delete_dead_insn (equiv_insn);
1042
              else
1043
                SET_INSN_DELETED (equiv_insn);
1044
            }
1045
        }
1046
    }
1047
 
1048
  /* Use the reload registers where necessary
1049
     by generating move instructions to move the must-be-register
1050
     values into or out of the reload registers.  */
1051
 
1052
  if (insns_need_reload != 0 || something_needs_elimination
1053
      || something_needs_operands_changed)
1054
    {
1055
      HOST_WIDE_INT old_frame_size = get_frame_size ();
1056
 
1057
      reload_as_needed (global);
1058
 
1059
      gcc_assert (old_frame_size == get_frame_size ());
1060
 
1061
      gcc_assert (verify_initial_elim_offsets ());
1062
    }
1063
 
1064
  /* If we were able to eliminate the frame pointer, show that it is no
1065
     longer live at the start of any basic block.  If it ls live by
1066
     virtue of being in a pseudo, that pseudo will be marked live
1067
     and hence the frame pointer will be known to be live via that
1068
     pseudo.  */
1069
 
1070
  if (! frame_pointer_needed)
1071
    FOR_EACH_BB (bb)
1072
      bitmap_clear_bit (df_get_live_in (bb), HARD_FRAME_POINTER_REGNUM);
1073
 
1074
  /* Come here (with failure set nonzero) if we can't get enough spill
1075
     regs.  */
1076
 failed:
1077
 
1078
  CLEAR_REG_SET (&changed_allocation_pseudos);
1079
  CLEAR_REG_SET (&spilled_pseudos);
1080
  reload_in_progress = 0;
1081
 
1082
  /* Now eliminate all pseudo regs by modifying them into
1083
     their equivalent memory references.
1084
     The REG-rtx's for the pseudos are modified in place,
1085
     so all insns that used to refer to them now refer to memory.
1086
 
1087
     For a reg that has a reg_equiv_address, all those insns
1088
     were changed by reloading so that no insns refer to it any longer;
1089
     but the DECL_RTL of a variable decl may refer to it,
1090
     and if so this causes the debugging info to mention the variable.  */
1091
 
1092
  for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1093
    {
1094
      rtx addr = 0;
1095
 
1096
      if (reg_equiv_mem (i))
1097
        addr = XEXP (reg_equiv_mem (i), 0);
1098
 
1099
      if (reg_equiv_address (i))
1100
        addr = reg_equiv_address (i);
1101
 
1102
      if (addr)
1103
        {
1104
          if (reg_renumber[i] < 0)
1105
            {
1106
              rtx reg = regno_reg_rtx[i];
1107
 
1108
              REG_USERVAR_P (reg) = 0;
1109
              PUT_CODE (reg, MEM);
1110
              XEXP (reg, 0) = addr;
1111
              if (reg_equiv_memory_loc (i))
1112
                MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc (i));
1113
              else
1114
                MEM_ATTRS (reg) = 0;
1115
              MEM_NOTRAP_P (reg) = 1;
1116
            }
1117
          else if (reg_equiv_mem (i))
1118
            XEXP (reg_equiv_mem (i), 0) = addr;
1119
        }
1120
 
1121
      /* We don't want complex addressing modes in debug insns
1122
         if simpler ones will do, so delegitimize equivalences
1123
         in debug insns.  */
1124
      if (MAY_HAVE_DEBUG_INSNS && reg_renumber[i] < 0)
1125
        {
1126
          rtx reg = regno_reg_rtx[i];
1127
          rtx equiv = 0;
1128
          df_ref use, next;
1129
 
1130
          if (reg_equiv_constant (i))
1131
            equiv = reg_equiv_constant (i);
1132
          else if (reg_equiv_invariant (i))
1133
            equiv = reg_equiv_invariant (i);
1134
          else if (reg && MEM_P (reg))
1135
            equiv = targetm.delegitimize_address (reg);
1136
          else if (reg && REG_P (reg) && (int)REGNO (reg) != i)
1137
            equiv = reg;
1138
 
1139
          if (equiv == reg)
1140
            continue;
1141
 
1142
          for (use = DF_REG_USE_CHAIN (i); use; use = next)
1143
            {
1144
              insn = DF_REF_INSN (use);
1145
 
1146
              /* Make sure the next ref is for a different instruction,
1147
                 so that we're not affected by the rescan.  */
1148
              next = DF_REF_NEXT_REG (use);
1149
              while (next && DF_REF_INSN (next) == insn)
1150
                next = DF_REF_NEXT_REG (next);
1151
 
1152
              if (DEBUG_INSN_P (insn))
1153
                {
1154
                  if (!equiv)
1155
                    {
1156
                      INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
1157
                      df_insn_rescan_debug_internal (insn);
1158
                    }
1159
                  else
1160
                    INSN_VAR_LOCATION_LOC (insn)
1161
                      = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (insn),
1162
                                              reg, equiv);
1163
                }
1164
            }
1165
        }
1166
    }
1167
 
1168
  /* We must set reload_completed now since the cleanup_subreg_operands call
1169
     below will re-recognize each insn and reload may have generated insns
1170
     which are only valid during and after reload.  */
1171
  reload_completed = 1;
1172
 
1173
  /* Make a pass over all the insns and delete all USEs which we inserted
1174
     only to tag a REG_EQUAL note on them.  Remove all REG_DEAD and REG_UNUSED
1175
     notes.  Delete all CLOBBER insns, except those that refer to the return
1176
     value and the special mem:BLK CLOBBERs added to prevent the scheduler
1177
     from misarranging variable-array code, and simplify (subreg (reg))
1178
     operands.  Strip and regenerate REG_INC notes that may have been moved
1179
     around.  */
1180
 
1181
  for (insn = first; insn; insn = NEXT_INSN (insn))
1182
    if (INSN_P (insn))
1183
      {
1184
        rtx *pnote;
1185
 
1186
        if (CALL_P (insn))
1187
          replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn),
1188
                              VOIDmode, CALL_INSN_FUNCTION_USAGE (insn));
1189
 
1190
        if ((GET_CODE (PATTERN (insn)) == USE
1191
             /* We mark with QImode USEs introduced by reload itself.  */
1192
             && (GET_MODE (insn) == QImode
1193
                 || find_reg_note (insn, REG_EQUAL, NULL_RTX)))
1194
            || (GET_CODE (PATTERN (insn)) == CLOBBER
1195
                && (!MEM_P (XEXP (PATTERN (insn), 0))
1196
                    || GET_MODE (XEXP (PATTERN (insn), 0)) != BLKmode
1197
                    || (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0)) != SCRATCH
1198
                        && XEXP (XEXP (PATTERN (insn), 0), 0)
1199
                                != stack_pointer_rtx))
1200
                && (!REG_P (XEXP (PATTERN (insn), 0))
1201
                    || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))))
1202
          {
1203
            delete_insn (insn);
1204
            continue;
1205
          }
1206
 
1207
        /* Some CLOBBERs may survive until here and still reference unassigned
1208
           pseudos with const equivalent, which may in turn cause ICE in later
1209
           passes if the reference remains in place.  */
1210
        if (GET_CODE (PATTERN (insn)) == CLOBBER)
1211
          replace_pseudos_in (& XEXP (PATTERN (insn), 0),
1212
                              VOIDmode, PATTERN (insn));
1213
 
1214
        /* Discard obvious no-ops, even without -O.  This optimization
1215
           is fast and doesn't interfere with debugging.  */
1216
        if (NONJUMP_INSN_P (insn)
1217
            && GET_CODE (PATTERN (insn)) == SET
1218
            && REG_P (SET_SRC (PATTERN (insn)))
1219
            && REG_P (SET_DEST (PATTERN (insn)))
1220
            && (REGNO (SET_SRC (PATTERN (insn)))
1221
                == REGNO (SET_DEST (PATTERN (insn)))))
1222
          {
1223
            delete_insn (insn);
1224
            continue;
1225
          }
1226
 
1227
        pnote = &REG_NOTES (insn);
1228
        while (*pnote != 0)
1229
          {
1230
            if (REG_NOTE_KIND (*pnote) == REG_DEAD
1231
                || REG_NOTE_KIND (*pnote) == REG_UNUSED
1232
                || REG_NOTE_KIND (*pnote) == REG_INC)
1233
              *pnote = XEXP (*pnote, 1);
1234
            else
1235
              pnote = &XEXP (*pnote, 1);
1236
          }
1237
 
1238
#ifdef AUTO_INC_DEC
1239
        add_auto_inc_notes (insn, PATTERN (insn));
1240
#endif
1241
 
1242
        /* Simplify (subreg (reg)) if it appears as an operand.  */
1243
        cleanup_subreg_operands (insn);
1244
 
1245
        /* Clean up invalid ASMs so that they don't confuse later passes.
1246
           See PR 21299.  */
1247
        if (asm_noperands (PATTERN (insn)) >= 0)
1248
          {
1249
            extract_insn (insn);
1250
            if (!constrain_operands (1))
1251
              {
1252
                error_for_asm (insn,
1253
                               "%<asm%> operand has impossible constraints");
1254
                delete_insn (insn);
1255
                continue;
1256
              }
1257
          }
1258
      }
1259
 
1260
  /* If we are doing generic stack checking, give a warning if this
1261
     function's frame size is larger than we expect.  */
1262
  if (flag_stack_check == GENERIC_STACK_CHECK)
1263
    {
1264
      HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
1265
      static int verbose_warned = 0;
1266
 
1267
      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1268
        if (df_regs_ever_live_p (i) && ! fixed_regs[i] && call_used_regs[i])
1269
          size += UNITS_PER_WORD;
1270
 
1271
      if (size > STACK_CHECK_MAX_FRAME_SIZE)
1272
        {
1273
          warning (0, "frame size too large for reliable stack checking");
1274
          if (! verbose_warned)
1275
            {
1276
              warning (0, "try reducing the number of local variables");
1277
              verbose_warned = 1;
1278
            }
1279
        }
1280
    }
1281
 
1282
  free (temp_pseudo_reg_arr);
1283
 
1284
  /* Indicate that we no longer have known memory locations or constants.  */
1285
  free_reg_equiv ();
1286
 
1287
  free (reg_max_ref_width);
1288
  free (reg_old_renumber);
1289
  free (pseudo_previous_regs);
1290
  free (pseudo_forbidden_regs);
1291
 
1292
  CLEAR_HARD_REG_SET (used_spill_regs);
1293
  for (i = 0; i < n_spills; i++)
1294
    SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1295
 
1296
  /* Free all the insn_chain structures at once.  */
1297
  obstack_free (&reload_obstack, reload_startobj);
1298
  unused_insn_chains = 0;
1299
 
1300
  inserted = fixup_abnormal_edges ();
1301
 
1302
  /* We've possibly turned single trapping insn into multiple ones.  */
1303
  if (cfun->can_throw_non_call_exceptions)
1304
    {
1305
      sbitmap blocks;
1306
      blocks = sbitmap_alloc (last_basic_block);
1307
      sbitmap_ones (blocks);
1308
      find_many_sub_basic_blocks (blocks);
1309
      sbitmap_free (blocks);
1310
    }
1311
 
1312
  if (inserted)
1313
    commit_edge_insertions ();
1314
 
1315
  /* Replacing pseudos with their memory equivalents might have
1316
     created shared rtx.  Subsequent passes would get confused
1317
     by this, so unshare everything here.  */
1318
  unshare_all_rtl_again (first);
1319
 
1320
#ifdef STACK_BOUNDARY
1321
  /* init_emit has set the alignment of the hard frame pointer
1322
     to STACK_BOUNDARY.  It is very likely no longer valid if
1323
     the hard frame pointer was used for register allocation.  */
1324
  if (!frame_pointer_needed)
1325
    REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = BITS_PER_UNIT;
1326
#endif
1327
 
1328
  VEC_free (rtx_p, heap, substitute_stack);
1329
 
1330
  gcc_assert (bitmap_empty_p (&spilled_pseudos));
1331
 
1332
  reload_completed = !failure;
1333
 
1334
  return need_dce;
1335
}
1336
 
1337
/* Yet another special case.  Unfortunately, reg-stack forces people to
1338
   write incorrect clobbers in asm statements.  These clobbers must not
1339
   cause the register to appear in bad_spill_regs, otherwise we'll call
1340
   fatal_insn later.  We clear the corresponding regnos in the live
1341
   register sets to avoid this.
1342
   The whole thing is rather sick, I'm afraid.  */
1343
 
1344
static void
1345
maybe_fix_stack_asms (void)
1346
{
1347
#ifdef STACK_REGS
1348
  const char *constraints[MAX_RECOG_OPERANDS];
1349
  enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
1350
  struct insn_chain *chain;
1351
 
1352
  for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1353
    {
1354
      int i, noperands;
1355
      HARD_REG_SET clobbered, allowed;
1356
      rtx pat;
1357
 
1358
      if (! INSN_P (chain->insn)
1359
          || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1360
        continue;
1361
      pat = PATTERN (chain->insn);
1362
      if (GET_CODE (pat) != PARALLEL)
1363
        continue;
1364
 
1365
      CLEAR_HARD_REG_SET (clobbered);
1366
      CLEAR_HARD_REG_SET (allowed);
1367
 
1368
      /* First, make a mask of all stack regs that are clobbered.  */
1369
      for (i = 0; i < XVECLEN (pat, 0); i++)
1370
        {
1371
          rtx t = XVECEXP (pat, 0, i);
1372
          if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1373
            SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1374
        }
1375
 
1376
      /* Get the operand values and constraints out of the insn.  */
1377
      decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1378
                           constraints, operand_mode, NULL);
1379
 
1380
      /* For every operand, see what registers are allowed.  */
1381
      for (i = 0; i < noperands; i++)
1382
        {
1383
          const char *p = constraints[i];
1384
          /* For every alternative, we compute the class of registers allowed
1385
             for reloading in CLS, and merge its contents into the reg set
1386
             ALLOWED.  */
1387
          int cls = (int) NO_REGS;
1388
 
1389
          for (;;)
1390
            {
1391
              char c = *p;
1392
 
1393
              if (c == '\0' || c == ',' || c == '#')
1394
                {
1395
                  /* End of one alternative - mark the regs in the current
1396
                     class, and reset the class.  */
1397
                  IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1398
                  cls = NO_REGS;
1399
                  p++;
1400
                  if (c == '#')
1401
                    do {
1402
                      c = *p++;
1403
                    } while (c != '\0' && c != ',');
1404
                  if (c == '\0')
1405
                    break;
1406
                  continue;
1407
                }
1408
 
1409
              switch (c)
1410
                {
1411
                case '=': case '+': case '*': case '%': case '?': case '!':
1412
                case '0': case '1': case '2': case '3': case '4': case '<':
1413
                case '>': case 'V': case 'o': case '&': case 'E': case 'F':
1414
                case 's': case 'i': case 'n': case 'X': case 'I': case 'J':
1415
                case 'K': case 'L': case 'M': case 'N': case 'O': case 'P':
1416
                case TARGET_MEM_CONSTRAINT:
1417
                  break;
1418
 
1419
                case 'p':
1420
                  cls = (int) reg_class_subunion[cls]
1421
                      [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
1422
                                             ADDRESS, SCRATCH)];
1423
                  break;
1424
 
1425
                case 'g':
1426
                case 'r':
1427
                  cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1428
                  break;
1429
 
1430
                default:
1431
                  if (EXTRA_ADDRESS_CONSTRAINT (c, p))
1432
                    cls = (int) reg_class_subunion[cls]
1433
                      [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
1434
                                             ADDRESS, SCRATCH)];
1435
                  else
1436
                    cls = (int) reg_class_subunion[cls]
1437
                      [(int) REG_CLASS_FROM_CONSTRAINT (c, p)];
1438
                }
1439
              p += CONSTRAINT_LEN (c, p);
1440
            }
1441
        }
1442
      /* Those of the registers which are clobbered, but allowed by the
1443
         constraints, must be usable as reload registers.  So clear them
1444
         out of the life information.  */
1445
      AND_HARD_REG_SET (allowed, clobbered);
1446
      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1447
        if (TEST_HARD_REG_BIT (allowed, i))
1448
          {
1449
            CLEAR_REGNO_REG_SET (&chain->live_throughout, i);
1450
            CLEAR_REGNO_REG_SET (&chain->dead_or_set, i);
1451
          }
1452
    }
1453
 
1454
#endif
1455
}
1456
 
1457
/* Copy the global variables n_reloads and rld into the corresponding elts
1458
   of CHAIN.  */
1459
static void
1460
copy_reloads (struct insn_chain *chain)
1461
{
1462
  chain->n_reloads = n_reloads;
1463
  chain->rld = XOBNEWVEC (&reload_obstack, struct reload, n_reloads);
1464
  memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1465
  reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1466
}
1467
 
1468
/* Walk the chain of insns, and determine for each whether it needs reloads
1469
   and/or eliminations.  Build the corresponding insns_need_reload list, and
1470
   set something_needs_elimination as appropriate.  */
1471
static void
1472
calculate_needs_all_insns (int global)
1473
{
1474
  struct insn_chain **pprev_reload = &insns_need_reload;
1475
  struct insn_chain *chain, *next = 0;
1476
 
1477
  something_needs_elimination = 0;
1478
 
1479
  reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1480
  for (chain = reload_insn_chain; chain != 0; chain = next)
1481
    {
1482
      rtx insn = chain->insn;
1483
 
1484
      next = chain->next;
1485
 
1486
      /* Clear out the shortcuts.  */
1487
      chain->n_reloads = 0;
1488
      chain->need_elim = 0;
1489
      chain->need_reload = 0;
1490
      chain->need_operand_change = 0;
1491
 
1492
      /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1493
         include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1494
         what effects this has on the known offsets at labels.  */
1495
 
1496
      if (LABEL_P (insn) || JUMP_P (insn)
1497
          || (INSN_P (insn) && REG_NOTES (insn) != 0))
1498
        set_label_offsets (insn, insn, 0);
1499
 
1500
      if (INSN_P (insn))
1501
        {
1502
          rtx old_body = PATTERN (insn);
1503
          int old_code = INSN_CODE (insn);
1504
          rtx old_notes = REG_NOTES (insn);
1505
          int did_elimination = 0;
1506
          int operands_changed = 0;
1507
          rtx set = single_set (insn);
1508
 
1509
          /* Skip insns that only set an equivalence.  */
1510
          if (set && REG_P (SET_DEST (set))
1511
              && reg_renumber[REGNO (SET_DEST (set))] < 0
1512
              && (reg_equiv_constant (REGNO (SET_DEST (set)))
1513
                  || (reg_equiv_invariant (REGNO (SET_DEST (set)))))
1514
                      && reg_equiv_init (REGNO (SET_DEST (set))))
1515
            continue;
1516
 
1517
          /* If needed, eliminate any eliminable registers.  */
1518
          if (num_eliminable || num_eliminable_invariants)
1519
            did_elimination = eliminate_regs_in_insn (insn, 0);
1520
 
1521
          /* Analyze the instruction.  */
1522
          operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1523
                                           global, spill_reg_order);
1524
 
1525
          /* If a no-op set needs more than one reload, this is likely
1526
             to be something that needs input address reloads.  We
1527
             can't get rid of this cleanly later, and it is of no use
1528
             anyway, so discard it now.
1529
             We only do this when expensive_optimizations is enabled,
1530
             since this complements reload inheritance / output
1531
             reload deletion, and it can make debugging harder.  */
1532
          if (flag_expensive_optimizations && n_reloads > 1)
1533
            {
1534
              rtx set = single_set (insn);
1535
              if (set
1536
                  &&
1537
                  ((SET_SRC (set) == SET_DEST (set)
1538
                    && REG_P (SET_SRC (set))
1539
                    && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1540
                   || (REG_P (SET_SRC (set)) && REG_P (SET_DEST (set))
1541
                       && reg_renumber[REGNO (SET_SRC (set))] < 0
1542
                       && reg_renumber[REGNO (SET_DEST (set))] < 0
1543
                       && reg_equiv_memory_loc (REGNO (SET_SRC (set))) != NULL
1544
                       && reg_equiv_memory_loc (REGNO (SET_DEST (set))) != NULL
1545
                       && rtx_equal_p (reg_equiv_memory_loc (REGNO (SET_SRC (set))),
1546
                                       reg_equiv_memory_loc (REGNO (SET_DEST (set)))))))
1547
                {
1548
                  if (ira_conflicts_p)
1549
                    /* Inform IRA about the insn deletion.  */
1550
                    ira_mark_memory_move_deletion (REGNO (SET_DEST (set)),
1551
                                                   REGNO (SET_SRC (set)));
1552
                  delete_insn (insn);
1553
                  /* Delete it from the reload chain.  */
1554
                  if (chain->prev)
1555
                    chain->prev->next = next;
1556
                  else
1557
                    reload_insn_chain = next;
1558
                  if (next)
1559
                    next->prev = chain->prev;
1560
                  chain->next = unused_insn_chains;
1561
                  unused_insn_chains = chain;
1562
                  continue;
1563
                }
1564
            }
1565
          if (num_eliminable)
1566
            update_eliminable_offsets ();
1567
 
1568
          /* Remember for later shortcuts which insns had any reloads or
1569
             register eliminations.  */
1570
          chain->need_elim = did_elimination;
1571
          chain->need_reload = n_reloads > 0;
1572
          chain->need_operand_change = operands_changed;
1573
 
1574
          /* Discard any register replacements done.  */
1575
          if (did_elimination)
1576
            {
1577
              obstack_free (&reload_obstack, reload_insn_firstobj);
1578
              PATTERN (insn) = old_body;
1579
              INSN_CODE (insn) = old_code;
1580
              REG_NOTES (insn) = old_notes;
1581
              something_needs_elimination = 1;
1582
            }
1583
 
1584
          something_needs_operands_changed |= operands_changed;
1585
 
1586
          if (n_reloads != 0)
1587
            {
1588
              copy_reloads (chain);
1589
              *pprev_reload = chain;
1590
              pprev_reload = &chain->next_need_reload;
1591
            }
1592
        }
1593
    }
1594
  *pprev_reload = 0;
1595
}
1596
 
1597
/* This function is called from the register allocator to set up estimates
1598
   for the cost of eliminating pseudos which have REG_EQUIV equivalences to
1599
   an invariant.  The structure is similar to calculate_needs_all_insns.  */
1600
 
1601
void
1602
calculate_elim_costs_all_insns (void)
1603
{
1604
  int *reg_equiv_init_cost;
1605
  basic_block bb;
1606
  int i;
1607
 
1608
  reg_equiv_init_cost = XCNEWVEC (int, max_regno);
1609
  init_elim_table ();
1610
  init_eliminable_invariants (get_insns (), false);
1611
 
1612
  set_initial_elim_offsets ();
1613
  set_initial_label_offsets ();
1614
 
1615
  FOR_EACH_BB (bb)
1616
    {
1617
      rtx insn;
1618
      elim_bb = bb;
1619
 
1620
      FOR_BB_INSNS (bb, insn)
1621
        {
1622
          /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1623
             include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1624
             what effects this has on the known offsets at labels.  */
1625
 
1626
          if (LABEL_P (insn) || JUMP_P (insn)
1627
              || (INSN_P (insn) && REG_NOTES (insn) != 0))
1628
            set_label_offsets (insn, insn, 0);
1629
 
1630
          if (INSN_P (insn))
1631
            {
1632
              rtx set = single_set (insn);
1633
 
1634
              /* Skip insns that only set an equivalence.  */
1635
              if (set && REG_P (SET_DEST (set))
1636
                  && reg_renumber[REGNO (SET_DEST (set))] < 0
1637
                  && (reg_equiv_constant (REGNO (SET_DEST (set)))
1638
                      || reg_equiv_invariant (REGNO (SET_DEST (set)))))
1639
                {
1640
                  unsigned regno = REGNO (SET_DEST (set));
1641
                  rtx init = reg_equiv_init (regno);
1642
                  if (init)
1643
                    {
1644
                      rtx t = eliminate_regs_1 (SET_SRC (set), VOIDmode, insn,
1645
                                                false, true);
1646
                      int cost = set_src_cost (t, optimize_bb_for_speed_p (bb));
1647
                      int freq = REG_FREQ_FROM_BB (bb);
1648
 
1649
                      reg_equiv_init_cost[regno] = cost * freq;
1650
                      continue;
1651
                    }
1652
                }
1653
              /* If needed, eliminate any eliminable registers.  */
1654
              if (num_eliminable || num_eliminable_invariants)
1655
                elimination_costs_in_insn (insn);
1656
 
1657
              if (num_eliminable)
1658
                update_eliminable_offsets ();
1659
            }
1660
        }
1661
    }
1662
  for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1663
    {
1664
      if (reg_equiv_invariant (i))
1665
        {
1666
          if (reg_equiv_init (i))
1667
            {
1668
              int cost = reg_equiv_init_cost[i];
1669
              if (dump_file)
1670
                fprintf (dump_file,
1671
                         "Reg %d has equivalence, initial gains %d\n", i, cost);
1672
              if (cost != 0)
1673
                ira_adjust_equiv_reg_cost (i, cost);
1674
            }
1675
          else
1676
            {
1677
              if (dump_file)
1678
                fprintf (dump_file,
1679
                         "Reg %d had equivalence, but can't be eliminated\n",
1680
                         i);
1681
              ira_adjust_equiv_reg_cost (i, 0);
1682
            }
1683
        }
1684
    }
1685
 
1686
  free (reg_equiv_init_cost);
1687
  free (offsets_known_at);
1688
  free (offsets_at);
1689
  offsets_at = NULL;
1690
  offsets_known_at = NULL;
1691
}
1692
 
1693
/* Comparison function for qsort to decide which of two reloads
1694
   should be handled first.  *P1 and *P2 are the reload numbers.  */
1695
 
1696
static int
1697
reload_reg_class_lower (const void *r1p, const void *r2p)
1698
{
1699
  int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
1700
  int t;
1701
 
1702
  /* Consider required reloads before optional ones.  */
1703
  t = rld[r1].optional - rld[r2].optional;
1704
  if (t != 0)
1705
    return t;
1706
 
1707
  /* Count all solitary classes before non-solitary ones.  */
1708
  t = ((reg_class_size[(int) rld[r2].rclass] == 1)
1709
       - (reg_class_size[(int) rld[r1].rclass] == 1));
1710
  if (t != 0)
1711
    return t;
1712
 
1713
  /* Aside from solitaires, consider all multi-reg groups first.  */
1714
  t = rld[r2].nregs - rld[r1].nregs;
1715
  if (t != 0)
1716
    return t;
1717
 
1718
  /* Consider reloads in order of increasing reg-class number.  */
1719
  t = (int) rld[r1].rclass - (int) rld[r2].rclass;
1720
  if (t != 0)
1721
    return t;
1722
 
1723
  /* If reloads are equally urgent, sort by reload number,
1724
     so that the results of qsort leave nothing to chance.  */
1725
  return r1 - r2;
1726
}
1727
 
1728
/* The cost of spilling each hard reg.  */
1729
static int spill_cost[FIRST_PSEUDO_REGISTER];
1730
 
1731
/* When spilling multiple hard registers, we use SPILL_COST for the first
1732
   spilled hard reg and SPILL_ADD_COST for subsequent regs.  SPILL_ADD_COST
1733
   only the first hard reg for a multi-reg pseudo.  */
1734
static int spill_add_cost[FIRST_PSEUDO_REGISTER];
1735
 
1736
/* Map of hard regno to pseudo regno currently occupying the hard
1737
   reg.  */
1738
static int hard_regno_to_pseudo_regno[FIRST_PSEUDO_REGISTER];
1739
 
1740
/* Update the spill cost arrays, considering that pseudo REG is live.  */
1741
 
1742
static void
1743
count_pseudo (int reg)
1744
{
1745
  int freq = REG_FREQ (reg);
1746
  int r = reg_renumber[reg];
1747
  int nregs;
1748
 
1749
  if (REGNO_REG_SET_P (&pseudos_counted, reg)
1750
      || REGNO_REG_SET_P (&spilled_pseudos, reg)
1751
      /* Ignore spilled pseudo-registers which can be here only if IRA
1752
         is used.  */
1753
      || (ira_conflicts_p && r < 0))
1754
    return;
1755
 
1756
  SET_REGNO_REG_SET (&pseudos_counted, reg);
1757
 
1758
  gcc_assert (r >= 0);
1759
 
1760
  spill_add_cost[r] += freq;
1761
  nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1762
  while (nregs-- > 0)
1763
    {
1764
      hard_regno_to_pseudo_regno[r + nregs] = reg;
1765
      spill_cost[r + nregs] += freq;
1766
    }
1767
}
1768
 
1769
/* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1770
   contents of BAD_SPILL_REGS for the insn described by CHAIN.  */
1771
 
1772
static void
1773
order_regs_for_reload (struct insn_chain *chain)
1774
{
1775
  unsigned i;
1776
  HARD_REG_SET used_by_pseudos;
1777
  HARD_REG_SET used_by_pseudos2;
1778
  reg_set_iterator rsi;
1779
 
1780
  COPY_HARD_REG_SET (bad_spill_regs, fixed_reg_set);
1781
 
1782
  memset (spill_cost, 0, sizeof spill_cost);
1783
  memset (spill_add_cost, 0, sizeof spill_add_cost);
1784
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1785
    hard_regno_to_pseudo_regno[i] = -1;
1786
 
1787
  /* Count number of uses of each hard reg by pseudo regs allocated to it
1788
     and then order them by decreasing use.  First exclude hard registers
1789
     that are live in or across this insn.  */
1790
 
1791
  REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
1792
  REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
1793
  IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos);
1794
  IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos2);
1795
 
1796
  /* Now find out which pseudos are allocated to it, and update
1797
     hard_reg_n_uses.  */
1798
  CLEAR_REG_SET (&pseudos_counted);
1799
 
1800
  EXECUTE_IF_SET_IN_REG_SET
1801
    (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
1802
    {
1803
      count_pseudo (i);
1804
    }
1805
  EXECUTE_IF_SET_IN_REG_SET
1806
    (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
1807
    {
1808
      count_pseudo (i);
1809
    }
1810
  CLEAR_REG_SET (&pseudos_counted);
1811
}
1812
 
1813
/* Vector of reload-numbers showing the order in which the reloads should
1814
   be processed.  */
1815
static short reload_order[MAX_RELOADS];
1816
 
1817
/* This is used to keep track of the spill regs used in one insn.  */
1818
static HARD_REG_SET used_spill_regs_local;
1819
 
1820
/* We decided to spill hard register SPILLED, which has a size of
1821
   SPILLED_NREGS.  Determine how pseudo REG, which is live during the insn,
1822
   is affected.  We will add it to SPILLED_PSEUDOS if necessary, and we will
1823
   update SPILL_COST/SPILL_ADD_COST.  */
1824
 
1825
static void
1826
count_spilled_pseudo (int spilled, int spilled_nregs, int reg)
1827
{
1828
  int freq = REG_FREQ (reg);
1829
  int r = reg_renumber[reg];
1830
  int nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1831
 
1832
  /* Ignore spilled pseudo-registers which can be here only if IRA is
1833
     used.  */
1834
  if ((ira_conflicts_p && r < 0)
1835
      || REGNO_REG_SET_P (&spilled_pseudos, reg)
1836
      || spilled + spilled_nregs <= r || r + nregs <= spilled)
1837
    return;
1838
 
1839
  SET_REGNO_REG_SET (&spilled_pseudos, reg);
1840
 
1841
  spill_add_cost[r] -= freq;
1842
  while (nregs-- > 0)
1843
    {
1844
      hard_regno_to_pseudo_regno[r + nregs] = -1;
1845
      spill_cost[r + nregs] -= freq;
1846
    }
1847
}
1848
 
1849
/* Find reload register to use for reload number ORDER.  */
1850
 
1851
static int
1852
find_reg (struct insn_chain *chain, int order)
1853
{
1854
  int rnum = reload_order[order];
1855
  struct reload *rl = rld + rnum;
1856
  int best_cost = INT_MAX;
1857
  int best_reg = -1;
1858
  unsigned int i, j, n;
1859
  int k;
1860
  HARD_REG_SET not_usable;
1861
  HARD_REG_SET used_by_other_reload;
1862
  reg_set_iterator rsi;
1863
  static int regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1864
  static int best_regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1865
 
1866
  COPY_HARD_REG_SET (not_usable, bad_spill_regs);
1867
  IOR_HARD_REG_SET (not_usable, bad_spill_regs_global);
1868
  IOR_COMPL_HARD_REG_SET (not_usable, reg_class_contents[rl->rclass]);
1869
 
1870
  CLEAR_HARD_REG_SET (used_by_other_reload);
1871
  for (k = 0; k < order; k++)
1872
    {
1873
      int other = reload_order[k];
1874
 
1875
      if (rld[other].regno >= 0 && reloads_conflict (other, rnum))
1876
        for (j = 0; j < rld[other].nregs; j++)
1877
          SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j);
1878
    }
1879
 
1880
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1881
    {
1882
#ifdef REG_ALLOC_ORDER
1883
      unsigned int regno = reg_alloc_order[i];
1884
#else
1885
      unsigned int regno = i;
1886
#endif
1887
 
1888
      if (! TEST_HARD_REG_BIT (not_usable, regno)
1889
          && ! TEST_HARD_REG_BIT (used_by_other_reload, regno)
1890
          && HARD_REGNO_MODE_OK (regno, rl->mode))
1891
        {
1892
          int this_cost = spill_cost[regno];
1893
          int ok = 1;
1894
          unsigned int this_nregs = hard_regno_nregs[regno][rl->mode];
1895
 
1896
          for (j = 1; j < this_nregs; j++)
1897
            {
1898
              this_cost += spill_add_cost[regno + j];
1899
              if ((TEST_HARD_REG_BIT (not_usable, regno + j))
1900
                  || TEST_HARD_REG_BIT (used_by_other_reload, regno + j))
1901
                ok = 0;
1902
            }
1903
          if (! ok)
1904
            continue;
1905
 
1906
          if (ira_conflicts_p)
1907
            {
1908
              /* Ask IRA to find a better pseudo-register for
1909
                 spilling.  */
1910
              for (n = j = 0; j < this_nregs; j++)
1911
                {
1912
                  int r = hard_regno_to_pseudo_regno[regno + j];
1913
 
1914
                  if (r < 0)
1915
                    continue;
1916
                  if (n == 0 || regno_pseudo_regs[n - 1] != r)
1917
                    regno_pseudo_regs[n++] = r;
1918
                }
1919
              regno_pseudo_regs[n++] = -1;
1920
              if (best_reg < 0
1921
                  || ira_better_spill_reload_regno_p (regno_pseudo_regs,
1922
                                                      best_regno_pseudo_regs,
1923
                                                      rl->in, rl->out,
1924
                                                      chain->insn))
1925
                {
1926
                  best_reg = regno;
1927
                  for (j = 0;; j++)
1928
                    {
1929
                      best_regno_pseudo_regs[j] = regno_pseudo_regs[j];
1930
                      if (regno_pseudo_regs[j] < 0)
1931
                        break;
1932
                    }
1933
                }
1934
              continue;
1935
            }
1936
 
1937
          if (rl->in && REG_P (rl->in) && REGNO (rl->in) == regno)
1938
            this_cost--;
1939
          if (rl->out && REG_P (rl->out) && REGNO (rl->out) == regno)
1940
            this_cost--;
1941
          if (this_cost < best_cost
1942
              /* Among registers with equal cost, prefer caller-saved ones, or
1943
                 use REG_ALLOC_ORDER if it is defined.  */
1944
              || (this_cost == best_cost
1945
#ifdef REG_ALLOC_ORDER
1946
                  && (inv_reg_alloc_order[regno]
1947
                      < inv_reg_alloc_order[best_reg])
1948
#else
1949
                  && call_used_regs[regno]
1950
                  && ! call_used_regs[best_reg]
1951
#endif
1952
                  ))
1953
            {
1954
              best_reg = regno;
1955
              best_cost = this_cost;
1956
            }
1957
        }
1958
    }
1959
  if (best_reg == -1)
1960
    return 0;
1961
 
1962
  if (dump_file)
1963
    fprintf (dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
1964
 
1965
  rl->nregs = hard_regno_nregs[best_reg][rl->mode];
1966
  rl->regno = best_reg;
1967
 
1968
  EXECUTE_IF_SET_IN_REG_SET
1969
    (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j, rsi)
1970
    {
1971
      count_spilled_pseudo (best_reg, rl->nregs, j);
1972
    }
1973
 
1974
  EXECUTE_IF_SET_IN_REG_SET
1975
    (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j, rsi)
1976
    {
1977
      count_spilled_pseudo (best_reg, rl->nregs, j);
1978
    }
1979
 
1980
  for (i = 0; i < rl->nregs; i++)
1981
    {
1982
      gcc_assert (spill_cost[best_reg + i] == 0);
1983
      gcc_assert (spill_add_cost[best_reg + i] == 0);
1984
      gcc_assert (hard_regno_to_pseudo_regno[best_reg + i] == -1);
1985
      SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i);
1986
    }
1987
  return 1;
1988
}
1989
 
1990
/* Find more reload regs to satisfy the remaining need of an insn, which
1991
   is given by CHAIN.
1992
   Do it by ascending class number, since otherwise a reg
1993
   might be spilled for a big class and might fail to count
1994
   for a smaller class even though it belongs to that class.  */
1995
 
1996
static void
1997
find_reload_regs (struct insn_chain *chain)
1998
{
1999
  int i;
2000
 
2001
  /* In order to be certain of getting the registers we need,
2002
     we must sort the reloads into order of increasing register class.
2003
     Then our grabbing of reload registers will parallel the process
2004
     that provided the reload registers.  */
2005
  for (i = 0; i < chain->n_reloads; i++)
2006
    {
2007
      /* Show whether this reload already has a hard reg.  */
2008
      if (chain->rld[i].reg_rtx)
2009
        {
2010
          int regno = REGNO (chain->rld[i].reg_rtx);
2011
          chain->rld[i].regno = regno;
2012
          chain->rld[i].nregs
2013
            = hard_regno_nregs[regno][GET_MODE (chain->rld[i].reg_rtx)];
2014
        }
2015
      else
2016
        chain->rld[i].regno = -1;
2017
      reload_order[i] = i;
2018
    }
2019
 
2020
  n_reloads = chain->n_reloads;
2021
  memcpy (rld, chain->rld, n_reloads * sizeof (struct reload));
2022
 
2023
  CLEAR_HARD_REG_SET (used_spill_regs_local);
2024
 
2025
  if (dump_file)
2026
    fprintf (dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
2027
 
2028
  qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
2029
 
2030
  /* Compute the order of preference for hard registers to spill.  */
2031
 
2032
  order_regs_for_reload (chain);
2033
 
2034
  for (i = 0; i < n_reloads; i++)
2035
    {
2036
      int r = reload_order[i];
2037
 
2038
      /* Ignore reloads that got marked inoperative.  */
2039
      if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
2040
          && ! rld[r].optional
2041
          && rld[r].regno == -1)
2042
        if (! find_reg (chain, i))
2043
          {
2044
            if (dump_file)
2045
              fprintf (dump_file, "reload failure for reload %d\n", r);
2046
            spill_failure (chain->insn, rld[r].rclass);
2047
            failure = 1;
2048
            return;
2049
          }
2050
    }
2051
 
2052
  COPY_HARD_REG_SET (chain->used_spill_regs, used_spill_regs_local);
2053
  IOR_HARD_REG_SET (used_spill_regs, used_spill_regs_local);
2054
 
2055
  memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
2056
}
2057
 
2058
static void
2059
select_reload_regs (void)
2060
{
2061
  struct insn_chain *chain;
2062
 
2063
  /* Try to satisfy the needs for each insn.  */
2064
  for (chain = insns_need_reload; chain != 0;
2065
       chain = chain->next_need_reload)
2066
    find_reload_regs (chain);
2067
}
2068
 
2069
/* Delete all insns that were inserted by emit_caller_save_insns during
2070
   this iteration.  */
2071
static void
2072
delete_caller_save_insns (void)
2073
{
2074
  struct insn_chain *c = reload_insn_chain;
2075
 
2076
  while (c != 0)
2077
    {
2078
      while (c != 0 && c->is_caller_save_insn)
2079
        {
2080
          struct insn_chain *next = c->next;
2081
          rtx insn = c->insn;
2082
 
2083
          if (c == reload_insn_chain)
2084
            reload_insn_chain = next;
2085
          delete_insn (insn);
2086
 
2087
          if (next)
2088
            next->prev = c->prev;
2089
          if (c->prev)
2090
            c->prev->next = next;
2091
          c->next = unused_insn_chains;
2092
          unused_insn_chains = c;
2093
          c = next;
2094
        }
2095
      if (c != 0)
2096
        c = c->next;
2097
    }
2098
}
2099
 
2100
/* Handle the failure to find a register to spill.
2101
   INSN should be one of the insns which needed this particular spill reg.  */
2102
 
2103
static void
2104
spill_failure (rtx insn, enum reg_class rclass)
2105
{
2106
  if (asm_noperands (PATTERN (insn)) >= 0)
2107
    error_for_asm (insn, "can%'t find a register in class %qs while "
2108
                   "reloading %<asm%>",
2109
                   reg_class_names[rclass]);
2110
  else
2111
    {
2112
      error ("unable to find a register to spill in class %qs",
2113
             reg_class_names[rclass]);
2114
 
2115
      if (dump_file)
2116
        {
2117
          fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
2118
          debug_reload_to_stream (dump_file);
2119
        }
2120
      fatal_insn ("this is the insn:", insn);
2121
    }
2122
}
2123
 
2124
/* Delete an unneeded INSN and any previous insns who sole purpose is loading
2125
   data that is dead in INSN.  */
2126
 
2127
static void
2128
delete_dead_insn (rtx insn)
2129
{
2130
  rtx prev = prev_active_insn (insn);
2131
  rtx prev_dest;
2132
 
2133
  /* If the previous insn sets a register that dies in our insn make
2134
     a note that we want to run DCE immediately after reload.
2135
 
2136
     We used to delete the previous insn & recurse, but that's wrong for
2137
     block local equivalences.  Instead of trying to figure out the exact
2138
     circumstances where we can delete the potentially dead insns, just
2139
     let DCE do the job.  */
2140
  if (prev && GET_CODE (PATTERN (prev)) == SET
2141
      && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
2142
      && reg_mentioned_p (prev_dest, PATTERN (insn))
2143
      && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
2144
      && ! side_effects_p (SET_SRC (PATTERN (prev))))
2145
    need_dce = 1;
2146
 
2147
  SET_INSN_DELETED (insn);
2148
}
2149
 
2150
/* Modify the home of pseudo-reg I.
2151
   The new home is present in reg_renumber[I].
2152
 
2153
   FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2154
   or it may be -1, meaning there is none or it is not relevant.
2155
   This is used so that all pseudos spilled from a given hard reg
2156
   can share one stack slot.  */
2157
 
2158
static void
2159
alter_reg (int i, int from_reg, bool dont_share_p)
2160
{
2161
  /* When outputting an inline function, this can happen
2162
     for a reg that isn't actually used.  */
2163
  if (regno_reg_rtx[i] == 0)
2164
    return;
2165
 
2166
  /* If the reg got changed to a MEM at rtl-generation time,
2167
     ignore it.  */
2168
  if (!REG_P (regno_reg_rtx[i]))
2169
    return;
2170
 
2171
  /* Modify the reg-rtx to contain the new hard reg
2172
     number or else to contain its pseudo reg number.  */
2173
  SET_REGNO (regno_reg_rtx[i],
2174
             reg_renumber[i] >= 0 ? reg_renumber[i] : i);
2175
 
2176
  /* If we have a pseudo that is needed but has no hard reg or equivalent,
2177
     allocate a stack slot for it.  */
2178
 
2179
  if (reg_renumber[i] < 0
2180
      && REG_N_REFS (i) > 0
2181
      && reg_equiv_constant (i) == 0
2182
      && (reg_equiv_invariant (i) == 0
2183
          || reg_equiv_init (i) == 0)
2184
      && reg_equiv_memory_loc (i) == 0)
2185
    {
2186
      rtx x = NULL_RTX;
2187
      enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2188
      unsigned int inherent_size = PSEUDO_REGNO_BYTES (i);
2189
      unsigned int inherent_align = GET_MODE_ALIGNMENT (mode);
2190
      unsigned int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2191
      unsigned int min_align = reg_max_ref_width[i] * BITS_PER_UNIT;
2192
      int adjust = 0;
2193
 
2194
      something_was_spilled = true;
2195
 
2196
      if (ira_conflicts_p)
2197
        {
2198
          /* Mark the spill for IRA.  */
2199
          SET_REGNO_REG_SET (&spilled_pseudos, i);
2200
          if (!dont_share_p)
2201
            x = ira_reuse_stack_slot (i, inherent_size, total_size);
2202
        }
2203
 
2204
      if (x)
2205
        ;
2206
 
2207
      /* Each pseudo reg has an inherent size which comes from its own mode,
2208
         and a total size which provides room for paradoxical subregs
2209
         which refer to the pseudo reg in wider modes.
2210
 
2211
         We can use a slot already allocated if it provides both
2212
         enough inherent space and enough total space.
2213
         Otherwise, we allocate a new slot, making sure that it has no less
2214
         inherent space, and no less total space, then the previous slot.  */
2215
      else if (from_reg == -1 || (!dont_share_p && ira_conflicts_p))
2216
        {
2217
          rtx stack_slot;
2218
 
2219
          /* No known place to spill from => no slot to reuse.  */
2220
          x = assign_stack_local (mode, total_size,
2221
                                  min_align > inherent_align
2222
                                  || total_size > inherent_size ? -1 : 0);
2223
 
2224
          stack_slot = x;
2225
 
2226
          /* Cancel the big-endian correction done in assign_stack_local.
2227
             Get the address of the beginning of the slot.  This is so we
2228
             can do a big-endian correction unconditionally below.  */
2229
          if (BYTES_BIG_ENDIAN)
2230
            {
2231
              adjust = inherent_size - total_size;
2232
              if (adjust)
2233
                stack_slot
2234
                  = adjust_address_nv (x, mode_for_size (total_size
2235
                                                         * BITS_PER_UNIT,
2236
                                                         MODE_INT, 1),
2237
                                       adjust);
2238
            }
2239
 
2240
          if (! dont_share_p && ira_conflicts_p)
2241
            /* Inform IRA about allocation a new stack slot.  */
2242
            ira_mark_new_stack_slot (stack_slot, i, total_size);
2243
        }
2244
 
2245
      /* Reuse a stack slot if possible.  */
2246
      else if (spill_stack_slot[from_reg] != 0
2247
               && spill_stack_slot_width[from_reg] >= total_size
2248
               && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2249
                   >= inherent_size)
2250
               && MEM_ALIGN (spill_stack_slot[from_reg]) >= min_align)
2251
        x = spill_stack_slot[from_reg];
2252
 
2253
      /* Allocate a bigger slot.  */
2254
      else
2255
        {
2256
          /* Compute maximum size needed, both for inherent size
2257
             and for total size.  */
2258
          rtx stack_slot;
2259
 
2260
          if (spill_stack_slot[from_reg])
2261
            {
2262
              if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2263
                  > inherent_size)
2264
                mode = GET_MODE (spill_stack_slot[from_reg]);
2265
              if (spill_stack_slot_width[from_reg] > total_size)
2266
                total_size = spill_stack_slot_width[from_reg];
2267
              if (MEM_ALIGN (spill_stack_slot[from_reg]) > min_align)
2268
                min_align = MEM_ALIGN (spill_stack_slot[from_reg]);
2269
            }
2270
 
2271
          /* Make a slot with that size.  */
2272
          x = assign_stack_local (mode, total_size,
2273
                                  min_align > inherent_align
2274
                                  || total_size > inherent_size ? -1 : 0);
2275
          stack_slot = x;
2276
 
2277
          /* Cancel the  big-endian correction done in assign_stack_local.
2278
             Get the address of the beginning of the slot.  This is so we
2279
             can do a big-endian correction unconditionally below.  */
2280
          if (BYTES_BIG_ENDIAN)
2281
            {
2282
              adjust = GET_MODE_SIZE (mode) - total_size;
2283
              if (adjust)
2284
                stack_slot
2285
                  = adjust_address_nv (x, mode_for_size (total_size
2286
                                                         * BITS_PER_UNIT,
2287
                                                         MODE_INT, 1),
2288
                                       adjust);
2289
            }
2290
 
2291
          spill_stack_slot[from_reg] = stack_slot;
2292
          spill_stack_slot_width[from_reg] = total_size;
2293
        }
2294
 
2295
      /* On a big endian machine, the "address" of the slot
2296
         is the address of the low part that fits its inherent mode.  */
2297
      if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2298
        adjust += (total_size - inherent_size);
2299
 
2300
      /* If we have any adjustment to make, or if the stack slot is the
2301
         wrong mode, make a new stack slot.  */
2302
      x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
2303
 
2304
      /* Set all of the memory attributes as appropriate for a spill.  */
2305
      set_mem_attrs_for_spill (x);
2306
 
2307
      /* Save the stack slot for later.  */
2308
      reg_equiv_memory_loc (i) = x;
2309
    }
2310
}
2311
 
2312
/* Mark the slots in regs_ever_live for the hard regs used by
2313
   pseudo-reg number REGNO, accessed in MODE.  */
2314
 
2315
static void
2316
mark_home_live_1 (int regno, enum machine_mode mode)
2317
{
2318
  int i, lim;
2319
 
2320
  i = reg_renumber[regno];
2321
  if (i < 0)
2322
    return;
2323
  lim = end_hard_regno (mode, i);
2324
  while (i < lim)
2325
    df_set_regs_ever_live(i++, true);
2326
}
2327
 
2328
/* Mark the slots in regs_ever_live for the hard regs
2329
   used by pseudo-reg number REGNO.  */
2330
 
2331
void
2332
mark_home_live (int regno)
2333
{
2334
  if (reg_renumber[regno] >= 0)
2335
    mark_home_live_1 (regno, PSEUDO_REGNO_MODE (regno));
2336
}
2337
 
2338
/* This function handles the tracking of elimination offsets around branches.
2339
 
2340
   X is a piece of RTL being scanned.
2341
 
2342
   INSN is the insn that it came from, if any.
2343
 
2344
   INITIAL_P is nonzero if we are to set the offset to be the initial
2345
   offset and zero if we are setting the offset of the label to be the
2346
   current offset.  */
2347
 
2348
static void
2349
set_label_offsets (rtx x, rtx insn, int initial_p)
2350
{
2351
  enum rtx_code code = GET_CODE (x);
2352
  rtx tem;
2353
  unsigned int i;
2354
  struct elim_table *p;
2355
 
2356
  switch (code)
2357
    {
2358
    case LABEL_REF:
2359
      if (LABEL_REF_NONLOCAL_P (x))
2360
        return;
2361
 
2362
      x = XEXP (x, 0);
2363
 
2364
      /* ... fall through ...  */
2365
 
2366
    case CODE_LABEL:
2367
      /* If we know nothing about this label, set the desired offsets.  Note
2368
         that this sets the offset at a label to be the offset before a label
2369
         if we don't know anything about the label.  This is not correct for
2370
         the label after a BARRIER, but is the best guess we can make.  If
2371
         we guessed wrong, we will suppress an elimination that might have
2372
         been possible had we been able to guess correctly.  */
2373
 
2374
      if (! offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num])
2375
        {
2376
          for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2377
            offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2378
              = (initial_p ? reg_eliminate[i].initial_offset
2379
                 : reg_eliminate[i].offset);
2380
          offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num] = 1;
2381
        }
2382
 
2383
      /* Otherwise, if this is the definition of a label and it is
2384
         preceded by a BARRIER, set our offsets to the known offset of
2385
         that label.  */
2386
 
2387
      else if (x == insn
2388
               && (tem = prev_nonnote_insn (insn)) != 0
2389
               && BARRIER_P (tem))
2390
        set_offsets_for_label (insn);
2391
      else
2392
        /* If neither of the above cases is true, compare each offset
2393
           with those previously recorded and suppress any eliminations
2394
           where the offsets disagree.  */
2395
 
2396
        for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2397
          if (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2398
              != (initial_p ? reg_eliminate[i].initial_offset
2399
                  : reg_eliminate[i].offset))
2400
            reg_eliminate[i].can_eliminate = 0;
2401
 
2402
      return;
2403
 
2404
    case JUMP_INSN:
2405
      set_label_offsets (PATTERN (insn), insn, initial_p);
2406
 
2407
      /* ... fall through ...  */
2408
 
2409
    case INSN:
2410
    case CALL_INSN:
2411
      /* Any labels mentioned in REG_LABEL_OPERAND notes can be branched
2412
         to indirectly and hence must have all eliminations at their
2413
         initial offsets.  */
2414
      for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2415
        if (REG_NOTE_KIND (tem) == REG_LABEL_OPERAND)
2416
          set_label_offsets (XEXP (tem, 0), insn, 1);
2417
      return;
2418
 
2419
    case PARALLEL:
2420
    case ADDR_VEC:
2421
    case ADDR_DIFF_VEC:
2422
      /* Each of the labels in the parallel or address vector must be
2423
         at their initial offsets.  We want the first field for PARALLEL
2424
         and ADDR_VEC and the second field for ADDR_DIFF_VEC.  */
2425
 
2426
      for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2427
        set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2428
                           insn, initial_p);
2429
      return;
2430
 
2431
    case SET:
2432
      /* We only care about setting PC.  If the source is not RETURN,
2433
         IF_THEN_ELSE, or a label, disable any eliminations not at
2434
         their initial offsets.  Similarly if any arm of the IF_THEN_ELSE
2435
         isn't one of those possibilities.  For branches to a label,
2436
         call ourselves recursively.
2437
 
2438
         Note that this can disable elimination unnecessarily when we have
2439
         a non-local goto since it will look like a non-constant jump to
2440
         someplace in the current function.  This isn't a significant
2441
         problem since such jumps will normally be when all elimination
2442
         pairs are back to their initial offsets.  */
2443
 
2444
      if (SET_DEST (x) != pc_rtx)
2445
        return;
2446
 
2447
      switch (GET_CODE (SET_SRC (x)))
2448
        {
2449
        case PC:
2450
        case RETURN:
2451
          return;
2452
 
2453
        case LABEL_REF:
2454
          set_label_offsets (SET_SRC (x), insn, initial_p);
2455
          return;
2456
 
2457
        case IF_THEN_ELSE:
2458
          tem = XEXP (SET_SRC (x), 1);
2459
          if (GET_CODE (tem) == LABEL_REF)
2460
            set_label_offsets (XEXP (tem, 0), insn, initial_p);
2461
          else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2462
            break;
2463
 
2464
          tem = XEXP (SET_SRC (x), 2);
2465
          if (GET_CODE (tem) == LABEL_REF)
2466
            set_label_offsets (XEXP (tem, 0), insn, initial_p);
2467
          else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2468
            break;
2469
          return;
2470
 
2471
        default:
2472
          break;
2473
        }
2474
 
2475
      /* If we reach here, all eliminations must be at their initial
2476
         offset because we are doing a jump to a variable address.  */
2477
      for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2478
        if (p->offset != p->initial_offset)
2479
          p->can_eliminate = 0;
2480
      break;
2481
 
2482
    default:
2483
      break;
2484
    }
2485
}
2486
 
2487
/* Called through for_each_rtx, this function examines every reg that occurs
2488
   in PX and adjusts the costs for its elimination which are gathered by IRA.
2489
   DATA is the insn in which PX occurs.  We do not recurse into MEM
2490
   expressions.  */
2491
 
2492
static int
2493
note_reg_elim_costly (rtx *px, void *data)
2494
{
2495
  rtx insn = (rtx)data;
2496
  rtx x = *px;
2497
 
2498
  if (MEM_P (x))
2499
    return -1;
2500
 
2501
  if (REG_P (x)
2502
      && REGNO (x) >= FIRST_PSEUDO_REGISTER
2503
      && reg_equiv_init (REGNO (x))
2504
      && reg_equiv_invariant (REGNO (x)))
2505
    {
2506
      rtx t = reg_equiv_invariant (REGNO (x));
2507
      rtx new_rtx = eliminate_regs_1 (t, Pmode, insn, true, true);
2508
      int cost = set_src_cost (new_rtx, optimize_bb_for_speed_p (elim_bb));
2509
      int freq = REG_FREQ_FROM_BB (elim_bb);
2510
 
2511
      if (cost != 0)
2512
        ira_adjust_equiv_reg_cost (REGNO (x), -cost * freq);
2513
    }
2514
  return 0;
2515
}
2516
 
2517
/* Scan X and replace any eliminable registers (such as fp) with a
2518
   replacement (such as sp), plus an offset.
2519
 
2520
   MEM_MODE is the mode of an enclosing MEM.  We need this to know how
2521
   much to adjust a register for, e.g., PRE_DEC.  Also, if we are inside a
2522
   MEM, we are allowed to replace a sum of a register and the constant zero
2523
   with the register, which we cannot do outside a MEM.  In addition, we need
2524
   to record the fact that a register is referenced outside a MEM.
2525
 
2526
   If INSN is an insn, it is the insn containing X.  If we replace a REG
2527
   in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
2528
   CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2529
   the REG is being modified.
2530
 
2531
   Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2532
   That's used when we eliminate in expressions stored in notes.
2533
   This means, do not set ref_outside_mem even if the reference
2534
   is outside of MEMs.
2535
 
2536
   If FOR_COSTS is true, we are being called before reload in order to
2537
   estimate the costs of keeping registers with an equivalence unallocated.
2538
 
2539
   REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2540
   replacements done assuming all offsets are at their initial values.  If
2541
   they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2542
   encounter, return the actual location so that find_reloads will do
2543
   the proper thing.  */
2544
 
2545
static rtx
2546
eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
2547
                  bool may_use_invariant, bool for_costs)
2548
{
2549
  enum rtx_code code = GET_CODE (x);
2550
  struct elim_table *ep;
2551
  int regno;
2552
  rtx new_rtx;
2553
  int i, j;
2554
  const char *fmt;
2555
  int copied = 0;
2556
 
2557
  if (! current_function_decl)
2558
    return x;
2559
 
2560
  switch (code)
2561
    {
2562
    case CONST_INT:
2563
    case CONST_DOUBLE:
2564
    case CONST_FIXED:
2565
    case CONST_VECTOR:
2566
    case CONST:
2567
    case SYMBOL_REF:
2568
    case CODE_LABEL:
2569
    case PC:
2570
    case CC0:
2571
    case ASM_INPUT:
2572
    case ADDR_VEC:
2573
    case ADDR_DIFF_VEC:
2574
    case RETURN:
2575
      return x;
2576
 
2577
    case REG:
2578
      regno = REGNO (x);
2579
 
2580
      /* First handle the case where we encounter a bare register that
2581
         is eliminable.  Replace it with a PLUS.  */
2582
      if (regno < FIRST_PSEUDO_REGISTER)
2583
        {
2584
          for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2585
               ep++)
2586
            if (ep->from_rtx == x && ep->can_eliminate)
2587
              return plus_constant (ep->to_rtx, ep->previous_offset);
2588
 
2589
        }
2590
      else if (reg_renumber && reg_renumber[regno] < 0
2591
               && reg_equivs
2592
               && reg_equiv_invariant (regno))
2593
        {
2594
          if (may_use_invariant || (insn && DEBUG_INSN_P (insn)))
2595
            return eliminate_regs_1 (copy_rtx (reg_equiv_invariant (regno)),
2596
                                     mem_mode, insn, true, for_costs);
2597
          /* There exists at least one use of REGNO that cannot be
2598
             eliminated.  Prevent the defining insn from being deleted.  */
2599
          reg_equiv_init (regno) = NULL_RTX;
2600
          if (!for_costs)
2601
            alter_reg (regno, -1, true);
2602
        }
2603
      return x;
2604
 
2605
    /* You might think handling MINUS in a manner similar to PLUS is a
2606
       good idea.  It is not.  It has been tried multiple times and every
2607
       time the change has had to have been reverted.
2608
 
2609
       Other parts of reload know a PLUS is special (gen_reload for example)
2610
       and require special code to handle code a reloaded PLUS operand.
2611
 
2612
       Also consider backends where the flags register is clobbered by a
2613
       MINUS, but we can emit a PLUS that does not clobber flags (IA-32,
2614
       lea instruction comes to mind).  If we try to reload a MINUS, we
2615
       may kill the flags register that was holding a useful value.
2616
 
2617
       So, please before trying to handle MINUS, consider reload as a
2618
       whole instead of this little section as well as the backend issues.  */
2619
    case PLUS:
2620
      /* If this is the sum of an eliminable register and a constant, rework
2621
         the sum.  */
2622
      if (REG_P (XEXP (x, 0))
2623
          && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2624
          && CONSTANT_P (XEXP (x, 1)))
2625
        {
2626
          for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2627
               ep++)
2628
            if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2629
              {
2630
                /* The only time we want to replace a PLUS with a REG (this
2631
                   occurs when the constant operand of the PLUS is the negative
2632
                   of the offset) is when we are inside a MEM.  We won't want
2633
                   to do so at other times because that would change the
2634
                   structure of the insn in a way that reload can't handle.
2635
                   We special-case the commonest situation in
2636
                   eliminate_regs_in_insn, so just replace a PLUS with a
2637
                   PLUS here, unless inside a MEM.  */
2638
                if (mem_mode != 0 && CONST_INT_P (XEXP (x, 1))
2639
                    && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2640
                  return ep->to_rtx;
2641
                else
2642
                  return gen_rtx_PLUS (Pmode, ep->to_rtx,
2643
                                       plus_constant (XEXP (x, 1),
2644
                                                      ep->previous_offset));
2645
              }
2646
 
2647
          /* If the register is not eliminable, we are done since the other
2648
             operand is a constant.  */
2649
          return x;
2650
        }
2651
 
2652
      /* If this is part of an address, we want to bring any constant to the
2653
         outermost PLUS.  We will do this by doing register replacement in
2654
         our operands and seeing if a constant shows up in one of them.
2655
 
2656
         Note that there is no risk of modifying the structure of the insn,
2657
         since we only get called for its operands, thus we are either
2658
         modifying the address inside a MEM, or something like an address
2659
         operand of a load-address insn.  */
2660
 
2661
      {
2662
        rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true,
2663
                                     for_costs);
2664
        rtx new1 = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2665
                                     for_costs);
2666
 
2667
        if (reg_renumber && (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)))
2668
          {
2669
            /* If one side is a PLUS and the other side is a pseudo that
2670
               didn't get a hard register but has a reg_equiv_constant,
2671
               we must replace the constant here since it may no longer
2672
               be in the position of any operand.  */
2673
            if (GET_CODE (new0) == PLUS && REG_P (new1)
2674
                && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2675
                && reg_renumber[REGNO (new1)] < 0
2676
                && reg_equivs
2677
                && reg_equiv_constant (REGNO (new1)) != 0)
2678
              new1 = reg_equiv_constant (REGNO (new1));
2679
            else if (GET_CODE (new1) == PLUS && REG_P (new0)
2680
                     && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2681
                     && reg_renumber[REGNO (new0)] < 0
2682
                     && reg_equiv_constant (REGNO (new0)) != 0)
2683
              new0 = reg_equiv_constant (REGNO (new0));
2684
 
2685
            new_rtx = form_sum (GET_MODE (x), new0, new1);
2686
 
2687
            /* As above, if we are not inside a MEM we do not want to
2688
               turn a PLUS into something else.  We might try to do so here
2689
               for an addition of 0 if we aren't optimizing.  */
2690
            if (! mem_mode && GET_CODE (new_rtx) != PLUS)
2691
              return gen_rtx_PLUS (GET_MODE (x), new_rtx, const0_rtx);
2692
            else
2693
              return new_rtx;
2694
          }
2695
      }
2696
      return x;
2697
 
2698
    case MULT:
2699
      /* If this is the product of an eliminable register and a
2700
         constant, apply the distribute law and move the constant out
2701
         so that we have (plus (mult ..) ..).  This is needed in order
2702
         to keep load-address insns valid.   This case is pathological.
2703
         We ignore the possibility of overflow here.  */
2704
      if (REG_P (XEXP (x, 0))
2705
          && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2706
          && CONST_INT_P (XEXP (x, 1)))
2707
        for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2708
             ep++)
2709
          if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2710
            {
2711
              if (! mem_mode
2712
                  /* Refs inside notes or in DEBUG_INSNs don't count for
2713
                     this purpose.  */
2714
                  && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2715
                                      || GET_CODE (insn) == INSN_LIST
2716
                                      || DEBUG_INSN_P (insn))))
2717
                ep->ref_outside_mem = 1;
2718
 
2719
              return
2720
                plus_constant (gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2721
                               ep->previous_offset * INTVAL (XEXP (x, 1)));
2722
            }
2723
 
2724
      /* ... fall through ...  */
2725
 
2726
    case CALL:
2727
    case COMPARE:
2728
    /* See comments before PLUS about handling MINUS.  */
2729
    case MINUS:
2730
    case DIV:      case UDIV:
2731
    case MOD:      case UMOD:
2732
    case AND:      case IOR:      case XOR:
2733
    case ROTATERT: case ROTATE:
2734
    case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2735
    case NE:       case EQ:
2736
    case GE:       case GT:       case GEU:    case GTU:
2737
    case LE:       case LT:       case LEU:    case LTU:
2738
      {
2739
        rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false,
2740
                                     for_costs);
2741
        rtx new1 = XEXP (x, 1)
2742
          ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, false,
2743
                              for_costs) : 0;
2744
 
2745
        if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2746
          return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2747
      }
2748
      return x;
2749
 
2750
    case EXPR_LIST:
2751
      /* If we have something in XEXP (x, 0), the usual case, eliminate it.  */
2752
      if (XEXP (x, 0))
2753
        {
2754
          new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true,
2755
                                      for_costs);
2756
          if (new_rtx != XEXP (x, 0))
2757
            {
2758
              /* If this is a REG_DEAD note, it is not valid anymore.
2759
                 Using the eliminated version could result in creating a
2760
                 REG_DEAD note for the stack or frame pointer.  */
2761
              if (REG_NOTE_KIND (x) == REG_DEAD)
2762
                return (XEXP (x, 1)
2763
                        ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2764
                                            for_costs)
2765
                        : NULL_RTX);
2766
 
2767
              x = alloc_reg_note (REG_NOTE_KIND (x), new_rtx, XEXP (x, 1));
2768
            }
2769
        }
2770
 
2771
      /* ... fall through ...  */
2772
 
2773
    case INSN_LIST:
2774
      /* Now do eliminations in the rest of the chain.  If this was
2775
         an EXPR_LIST, this might result in allocating more memory than is
2776
         strictly needed, but it simplifies the code.  */
2777
      if (XEXP (x, 1))
2778
        {
2779
          new_rtx = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2780
                                      for_costs);
2781
          if (new_rtx != XEXP (x, 1))
2782
            return
2783
              gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new_rtx);
2784
        }
2785
      return x;
2786
 
2787
    case PRE_INC:
2788
    case POST_INC:
2789
    case PRE_DEC:
2790
    case POST_DEC:
2791
      /* We do not support elimination of a register that is modified.
2792
         elimination_effects has already make sure that this does not
2793
         happen.  */
2794
      return x;
2795
 
2796
    case PRE_MODIFY:
2797
    case POST_MODIFY:
2798
      /* We do not support elimination of a register that is modified.
2799
         elimination_effects has already make sure that this does not
2800
         happen.  The only remaining case we need to consider here is
2801
         that the increment value may be an eliminable register.  */
2802
      if (GET_CODE (XEXP (x, 1)) == PLUS
2803
          && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2804
        {
2805
          rtx new_rtx = eliminate_regs_1 (XEXP (XEXP (x, 1), 1), mem_mode,
2806
                                          insn, true, for_costs);
2807
 
2808
          if (new_rtx != XEXP (XEXP (x, 1), 1))
2809
            return gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (x, 0),
2810
                                   gen_rtx_PLUS (GET_MODE (x),
2811
                                                 XEXP (x, 0), new_rtx));
2812
        }
2813
      return x;
2814
 
2815
    case STRICT_LOW_PART:
2816
    case NEG:          case NOT:
2817
    case SIGN_EXTEND:  case ZERO_EXTEND:
2818
    case TRUNCATE:     case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2819
    case FLOAT:        case FIX:
2820
    case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2821
    case ABS:
2822
    case SQRT:
2823
    case FFS:
2824
    case CLZ:
2825
    case CTZ:
2826
    case POPCOUNT:
2827
    case PARITY:
2828
    case BSWAP:
2829
      new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false,
2830
                                  for_costs);
2831
      if (new_rtx != XEXP (x, 0))
2832
        return gen_rtx_fmt_e (code, GET_MODE (x), new_rtx);
2833
      return x;
2834
 
2835
    case SUBREG:
2836
      /* Similar to above processing, but preserve SUBREG_BYTE.
2837
         Convert (subreg (mem)) to (mem) if not paradoxical.
2838
         Also, if we have a non-paradoxical (subreg (pseudo)) and the
2839
         pseudo didn't get a hard reg, we must replace this with the
2840
         eliminated version of the memory location because push_reload
2841
         may do the replacement in certain circumstances.  */
2842
      if (REG_P (SUBREG_REG (x))
2843
          && !paradoxical_subreg_p (x)
2844
          && reg_equivs
2845
          && reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
2846
        {
2847
          new_rtx = SUBREG_REG (x);
2848
        }
2849
      else
2850
        new_rtx = eliminate_regs_1 (SUBREG_REG (x), mem_mode, insn, false, for_costs);
2851
 
2852
      if (new_rtx != SUBREG_REG (x))
2853
        {
2854
          int x_size = GET_MODE_SIZE (GET_MODE (x));
2855
          int new_size = GET_MODE_SIZE (GET_MODE (new_rtx));
2856
 
2857
          if (MEM_P (new_rtx)
2858
              && ((x_size < new_size
2859
#ifdef WORD_REGISTER_OPERATIONS
2860
                   /* On these machines, combine can create rtl of the form
2861
                      (set (subreg:m1 (reg:m2 R) 0) ...)
2862
                      where m1 < m2, and expects something interesting to
2863
                      happen to the entire word.  Moreover, it will use the
2864
                      (reg:m2 R) later, expecting all bits to be preserved.
2865
                      So if the number of words is the same, preserve the
2866
                      subreg so that push_reload can see it.  */
2867
                   && ! ((x_size - 1) / UNITS_PER_WORD
2868
                         == (new_size -1 ) / UNITS_PER_WORD)
2869
#endif
2870
                   )
2871
                  || x_size == new_size)
2872
              )
2873
            return adjust_address_nv (new_rtx, GET_MODE (x), SUBREG_BYTE (x));
2874
          else
2875
            return gen_rtx_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x));
2876
        }
2877
 
2878
      return x;
2879
 
2880
    case MEM:
2881
      /* Our only special processing is to pass the mode of the MEM to our
2882
         recursive call and copy the flags.  While we are here, handle this
2883
         case more efficiently.  */
2884
 
2885
      new_rtx = eliminate_regs_1 (XEXP (x, 0), GET_MODE (x), insn, true,
2886
                                  for_costs);
2887
      if (for_costs
2888
          && memory_address_p (GET_MODE (x), XEXP (x, 0))
2889
          && !memory_address_p (GET_MODE (x), new_rtx))
2890
        for_each_rtx (&XEXP (x, 0), note_reg_elim_costly, insn);
2891
 
2892
      return replace_equiv_address_nv (x, new_rtx);
2893
 
2894
    case USE:
2895
      /* Handle insn_list USE that a call to a pure function may generate.  */
2896
      new_rtx = eliminate_regs_1 (XEXP (x, 0), VOIDmode, insn, false,
2897
                                  for_costs);
2898
      if (new_rtx != XEXP (x, 0))
2899
        return gen_rtx_USE (GET_MODE (x), new_rtx);
2900
      return x;
2901
 
2902
    case CLOBBER:
2903
    case ASM_OPERANDS:
2904
      gcc_assert (insn && DEBUG_INSN_P (insn));
2905
      break;
2906
 
2907
    case SET:
2908
      gcc_unreachable ();
2909
 
2910
    default:
2911
      break;
2912
    }
2913
 
2914
  /* Process each of our operands recursively.  If any have changed, make a
2915
     copy of the rtx.  */
2916
  fmt = GET_RTX_FORMAT (code);
2917
  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2918
    {
2919
      if (*fmt == 'e')
2920
        {
2921
          new_rtx = eliminate_regs_1 (XEXP (x, i), mem_mode, insn, false,
2922
                                      for_costs);
2923
          if (new_rtx != XEXP (x, i) && ! copied)
2924
            {
2925
              x = shallow_copy_rtx (x);
2926
              copied = 1;
2927
            }
2928
          XEXP (x, i) = new_rtx;
2929
        }
2930
      else if (*fmt == 'E')
2931
        {
2932
          int copied_vec = 0;
2933
          for (j = 0; j < XVECLEN (x, i); j++)
2934
            {
2935
              new_rtx = eliminate_regs_1 (XVECEXP (x, i, j), mem_mode, insn, false,
2936
                                          for_costs);
2937
              if (new_rtx != XVECEXP (x, i, j) && ! copied_vec)
2938
                {
2939
                  rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2940
                                             XVEC (x, i)->elem);
2941
                  if (! copied)
2942
                    {
2943
                      x = shallow_copy_rtx (x);
2944
                      copied = 1;
2945
                    }
2946
                  XVEC (x, i) = new_v;
2947
                  copied_vec = 1;
2948
                }
2949
              XVECEXP (x, i, j) = new_rtx;
2950
            }
2951
        }
2952
    }
2953
 
2954
  return x;
2955
}
2956
 
2957
rtx
2958
eliminate_regs (rtx x, enum machine_mode mem_mode, rtx insn)
2959
{
2960
  return eliminate_regs_1 (x, mem_mode, insn, false, false);
2961
}
2962
 
2963
/* Scan rtx X for modifications of elimination target registers.  Update
2964
   the table of eliminables to reflect the changed state.  MEM_MODE is
2965
   the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM.  */
2966
 
2967
static void
2968
elimination_effects (rtx x, enum machine_mode mem_mode)
2969
{
2970
  enum rtx_code code = GET_CODE (x);
2971
  struct elim_table *ep;
2972
  int regno;
2973
  int i, j;
2974
  const char *fmt;
2975
 
2976
  switch (code)
2977
    {
2978
    case CONST_INT:
2979
    case CONST_DOUBLE:
2980
    case CONST_FIXED:
2981
    case CONST_VECTOR:
2982
    case CONST:
2983
    case SYMBOL_REF:
2984
    case CODE_LABEL:
2985
    case PC:
2986
    case CC0:
2987
    case ASM_INPUT:
2988
    case ADDR_VEC:
2989
    case ADDR_DIFF_VEC:
2990
    case RETURN:
2991
      return;
2992
 
2993
    case REG:
2994
      regno = REGNO (x);
2995
 
2996
      /* First handle the case where we encounter a bare register that
2997
         is eliminable.  Replace it with a PLUS.  */
2998
      if (regno < FIRST_PSEUDO_REGISTER)
2999
        {
3000
          for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3001
               ep++)
3002
            if (ep->from_rtx == x && ep->can_eliminate)
3003
              {
3004
                if (! mem_mode)
3005
                  ep->ref_outside_mem = 1;
3006
                return;
3007
              }
3008
 
3009
        }
3010
      else if (reg_renumber[regno] < 0
3011
               && reg_equivs != 0
3012
               && reg_equiv_constant (regno)
3013
               && ! function_invariant_p (reg_equiv_constant (regno)))
3014
        elimination_effects (reg_equiv_constant (regno), mem_mode);
3015
      return;
3016
 
3017
    case PRE_INC:
3018
    case POST_INC:
3019
    case PRE_DEC:
3020
    case POST_DEC:
3021
    case POST_MODIFY:
3022
    case PRE_MODIFY:
3023
      /* If we modify the source of an elimination rule, disable it.  */
3024
      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3025
        if (ep->from_rtx == XEXP (x, 0))
3026
          ep->can_eliminate = 0;
3027
 
3028
      /* If we modify the target of an elimination rule by adding a constant,
3029
         update its offset.  If we modify the target in any other way, we'll
3030
         have to disable the rule as well.  */
3031
      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3032
        if (ep->to_rtx == XEXP (x, 0))
3033
          {
3034
            int size = GET_MODE_SIZE (mem_mode);
3035
 
3036
            /* If more bytes than MEM_MODE are pushed, account for them.  */
3037
#ifdef PUSH_ROUNDING
3038
            if (ep->to_rtx == stack_pointer_rtx)
3039
              size = PUSH_ROUNDING (size);
3040
#endif
3041
            if (code == PRE_DEC || code == POST_DEC)
3042
              ep->offset += size;
3043
            else if (code == PRE_INC || code == POST_INC)
3044
              ep->offset -= size;
3045
            else if (code == PRE_MODIFY || code == POST_MODIFY)
3046
              {
3047
                if (GET_CODE (XEXP (x, 1)) == PLUS
3048
                    && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
3049
                    && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
3050
                  ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
3051
                else
3052
                  ep->can_eliminate = 0;
3053
              }
3054
          }
3055
 
3056
      /* These two aren't unary operators.  */
3057
      if (code == POST_MODIFY || code == PRE_MODIFY)
3058
        break;
3059
 
3060
      /* Fall through to generic unary operation case.  */
3061
    case STRICT_LOW_PART:
3062
    case NEG:          case NOT:
3063
    case SIGN_EXTEND:  case ZERO_EXTEND:
3064
    case TRUNCATE:     case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3065
    case FLOAT:        case FIX:
3066
    case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3067
    case ABS:
3068
    case SQRT:
3069
    case FFS:
3070
    case CLZ:
3071
    case CTZ:
3072
    case POPCOUNT:
3073
    case PARITY:
3074
    case BSWAP:
3075
      elimination_effects (XEXP (x, 0), mem_mode);
3076
      return;
3077
 
3078
    case SUBREG:
3079
      if (REG_P (SUBREG_REG (x))
3080
          && (GET_MODE_SIZE (GET_MODE (x))
3081
              <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3082
          && reg_equivs != 0
3083
          && reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
3084
        return;
3085
 
3086
      elimination_effects (SUBREG_REG (x), mem_mode);
3087
      return;
3088
 
3089
    case USE:
3090
      /* If using a register that is the source of an eliminate we still
3091
         think can be performed, note it cannot be performed since we don't
3092
         know how this register is used.  */
3093
      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3094
        if (ep->from_rtx == XEXP (x, 0))
3095
          ep->can_eliminate = 0;
3096
 
3097
      elimination_effects (XEXP (x, 0), mem_mode);
3098
      return;
3099
 
3100
    case CLOBBER:
3101
      /* If clobbering a register that is the replacement register for an
3102
         elimination we still think can be performed, note that it cannot
3103
         be performed.  Otherwise, we need not be concerned about it.  */
3104
      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3105
        if (ep->to_rtx == XEXP (x, 0))
3106
          ep->can_eliminate = 0;
3107
 
3108
      elimination_effects (XEXP (x, 0), mem_mode);
3109
      return;
3110
 
3111
    case SET:
3112
      /* Check for setting a register that we know about.  */
3113
      if (REG_P (SET_DEST (x)))
3114
        {
3115
          /* See if this is setting the replacement register for an
3116
             elimination.
3117
 
3118
             If DEST is the hard frame pointer, we do nothing because we
3119
             assume that all assignments to the frame pointer are for
3120
             non-local gotos and are being done at a time when they are valid
3121
             and do not disturb anything else.  Some machines want to
3122
             eliminate a fake argument pointer (or even a fake frame pointer)
3123
             with either the real frame or the stack pointer.  Assignments to
3124
             the hard frame pointer must not prevent this elimination.  */
3125
 
3126
          for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3127
               ep++)
3128
            if (ep->to_rtx == SET_DEST (x)
3129
                && SET_DEST (x) != hard_frame_pointer_rtx)
3130
              {
3131
                /* If it is being incremented, adjust the offset.  Otherwise,
3132
                   this elimination can't be done.  */
3133
                rtx src = SET_SRC (x);
3134
 
3135
                if (GET_CODE (src) == PLUS
3136
                    && XEXP (src, 0) == SET_DEST (x)
3137
                    && CONST_INT_P (XEXP (src, 1)))
3138
                  ep->offset -= INTVAL (XEXP (src, 1));
3139
                else
3140
                  ep->can_eliminate = 0;
3141
              }
3142
        }
3143
 
3144
      elimination_effects (SET_DEST (x), VOIDmode);
3145
      elimination_effects (SET_SRC (x), VOIDmode);
3146
      return;
3147
 
3148
    case MEM:
3149
      /* Our only special processing is to pass the mode of the MEM to our
3150
         recursive call.  */
3151
      elimination_effects (XEXP (x, 0), GET_MODE (x));
3152
      return;
3153
 
3154
    default:
3155
      break;
3156
    }
3157
 
3158
  fmt = GET_RTX_FORMAT (code);
3159
  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3160
    {
3161
      if (*fmt == 'e')
3162
        elimination_effects (XEXP (x, i), mem_mode);
3163
      else if (*fmt == 'E')
3164
        for (j = 0; j < XVECLEN (x, i); j++)
3165
          elimination_effects (XVECEXP (x, i, j), mem_mode);
3166
    }
3167
}
3168
 
3169
/* Descend through rtx X and verify that no references to eliminable registers
3170
   remain.  If any do remain, mark the involved register as not
3171
   eliminable.  */
3172
 
3173
static void
3174
check_eliminable_occurrences (rtx x)
3175
{
3176
  const char *fmt;
3177
  int i;
3178
  enum rtx_code code;
3179
 
3180
  if (x == 0)
3181
    return;
3182
 
3183
  code = GET_CODE (x);
3184
 
3185
  if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3186
    {
3187
      struct elim_table *ep;
3188
 
3189
      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3190
        if (ep->from_rtx == x)
3191
          ep->can_eliminate = 0;
3192
      return;
3193
    }
3194
 
3195
  fmt = GET_RTX_FORMAT (code);
3196
  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3197
    {
3198
      if (*fmt == 'e')
3199
        check_eliminable_occurrences (XEXP (x, i));
3200
      else if (*fmt == 'E')
3201
        {
3202
          int j;
3203
          for (j = 0; j < XVECLEN (x, i); j++)
3204
            check_eliminable_occurrences (XVECEXP (x, i, j));
3205
        }
3206
    }
3207
}
3208
 
3209
/* Scan INSN and eliminate all eliminable registers in it.
3210
 
3211
   If REPLACE is nonzero, do the replacement destructively.  Also
3212
   delete the insn as dead it if it is setting an eliminable register.
3213
 
3214
   If REPLACE is zero, do all our allocations in reload_obstack.
3215
 
3216
   If no eliminations were done and this insn doesn't require any elimination
3217
   processing (these are not identical conditions: it might be updating sp,
3218
   but not referencing fp; this needs to be seen during reload_as_needed so
3219
   that the offset between fp and sp can be taken into consideration), zero
3220
   is returned.  Otherwise, 1 is returned.  */
3221
 
3222
static int
3223
eliminate_regs_in_insn (rtx insn, int replace)
3224
{
3225
  int icode = recog_memoized (insn);
3226
  rtx old_body = PATTERN (insn);
3227
  int insn_is_asm = asm_noperands (old_body) >= 0;
3228
  rtx old_set = single_set (insn);
3229
  rtx new_body;
3230
  int val = 0;
3231
  int i;
3232
  rtx substed_operand[MAX_RECOG_OPERANDS];
3233
  rtx orig_operand[MAX_RECOG_OPERANDS];
3234
  struct elim_table *ep;
3235
  rtx plus_src, plus_cst_src;
3236
 
3237
  if (! insn_is_asm && icode < 0)
3238
    {
3239
      gcc_assert (GET_CODE (PATTERN (insn)) == USE
3240
                  || GET_CODE (PATTERN (insn)) == CLOBBER
3241
                  || GET_CODE (PATTERN (insn)) == ADDR_VEC
3242
                  || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
3243
                  || GET_CODE (PATTERN (insn)) == ASM_INPUT
3244
                  || DEBUG_INSN_P (insn));
3245
      if (DEBUG_INSN_P (insn))
3246
        INSN_VAR_LOCATION_LOC (insn)
3247
          = eliminate_regs (INSN_VAR_LOCATION_LOC (insn), VOIDmode, insn);
3248
      return 0;
3249
    }
3250
 
3251
  if (old_set != 0 && REG_P (SET_DEST (old_set))
3252
      && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3253
    {
3254
      /* Check for setting an eliminable register.  */
3255
      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3256
        if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3257
          {
3258
#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3259
            /* If this is setting the frame pointer register to the
3260
               hardware frame pointer register and this is an elimination
3261
               that will be done (tested above), this insn is really
3262
               adjusting the frame pointer downward to compensate for
3263
               the adjustment done before a nonlocal goto.  */
3264
            if (ep->from == FRAME_POINTER_REGNUM
3265
                && ep->to == HARD_FRAME_POINTER_REGNUM)
3266
              {
3267
                rtx base = SET_SRC (old_set);
3268
                rtx base_insn = insn;
3269
                HOST_WIDE_INT offset = 0;
3270
 
3271
                while (base != ep->to_rtx)
3272
                  {
3273
                    rtx prev_insn, prev_set;
3274
 
3275
                    if (GET_CODE (base) == PLUS
3276
                        && CONST_INT_P (XEXP (base, 1)))
3277
                      {
3278
                        offset += INTVAL (XEXP (base, 1));
3279
                        base = XEXP (base, 0);
3280
                      }
3281
                    else if ((prev_insn = prev_nonnote_insn (base_insn)) != 0
3282
                             && (prev_set = single_set (prev_insn)) != 0
3283
                             && rtx_equal_p (SET_DEST (prev_set), base))
3284
                      {
3285
                        base = SET_SRC (prev_set);
3286
                        base_insn = prev_insn;
3287
                      }
3288
                    else
3289
                      break;
3290
                  }
3291
 
3292
                if (base == ep->to_rtx)
3293
                  {
3294
                    rtx src
3295
                      = plus_constant (ep->to_rtx, offset - ep->offset);
3296
 
3297
                    new_body = old_body;
3298
                    if (! replace)
3299
                      {
3300
                        new_body = copy_insn (old_body);
3301
                        if (REG_NOTES (insn))
3302
                          REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3303
                      }
3304
                    PATTERN (insn) = new_body;
3305
                    old_set = single_set (insn);
3306
 
3307
                    /* First see if this insn remains valid when we
3308
                       make the change.  If not, keep the INSN_CODE
3309
                       the same and let reload fit it up.  */
3310
                    validate_change (insn, &SET_SRC (old_set), src, 1);
3311
                    validate_change (insn, &SET_DEST (old_set),
3312
                                     ep->to_rtx, 1);
3313
                    if (! apply_change_group ())
3314
                      {
3315
                        SET_SRC (old_set) = src;
3316
                        SET_DEST (old_set) = ep->to_rtx;
3317
                      }
3318
 
3319
                    val = 1;
3320
                    goto done;
3321
                  }
3322
              }
3323
#endif
3324
 
3325
            /* In this case this insn isn't serving a useful purpose.  We
3326
               will delete it in reload_as_needed once we know that this
3327
               elimination is, in fact, being done.
3328
 
3329
               If REPLACE isn't set, we can't delete this insn, but needn't
3330
               process it since it won't be used unless something changes.  */
3331
            if (replace)
3332
              {
3333
                delete_dead_insn (insn);
3334
                return 1;
3335
              }
3336
            val = 1;
3337
            goto done;
3338
          }
3339
    }
3340
 
3341
  /* We allow one special case which happens to work on all machines we
3342
     currently support: a single set with the source or a REG_EQUAL
3343
     note being a PLUS of an eliminable register and a constant.  */
3344
  plus_src = plus_cst_src = 0;
3345
  if (old_set && REG_P (SET_DEST (old_set)))
3346
    {
3347
      if (GET_CODE (SET_SRC (old_set)) == PLUS)
3348
        plus_src = SET_SRC (old_set);
3349
      /* First see if the source is of the form (plus (...) CST).  */
3350
      if (plus_src
3351
          && CONST_INT_P (XEXP (plus_src, 1)))
3352
        plus_cst_src = plus_src;
3353
      else if (REG_P (SET_SRC (old_set))
3354
               || plus_src)
3355
        {
3356
          /* Otherwise, see if we have a REG_EQUAL note of the form
3357
             (plus (...) CST).  */
3358
          rtx links;
3359
          for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3360
            {
3361
              if ((REG_NOTE_KIND (links) == REG_EQUAL
3362
                   || REG_NOTE_KIND (links) == REG_EQUIV)
3363
                  && GET_CODE (XEXP (links, 0)) == PLUS
3364
                  && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3365
                {
3366
                  plus_cst_src = XEXP (links, 0);
3367
                  break;
3368
                }
3369
            }
3370
        }
3371
 
3372
      /* Check that the first operand of the PLUS is a hard reg or
3373
         the lowpart subreg of one.  */
3374
      if (plus_cst_src)
3375
        {
3376
          rtx reg = XEXP (plus_cst_src, 0);
3377
          if (GET_CODE (reg) == SUBREG && subreg_lowpart_p (reg))
3378
            reg = SUBREG_REG (reg);
3379
 
3380
          if (!REG_P (reg) || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3381
            plus_cst_src = 0;
3382
        }
3383
    }
3384
  if (plus_cst_src)
3385
    {
3386
      rtx reg = XEXP (plus_cst_src, 0);
3387
      HOST_WIDE_INT offset = INTVAL (XEXP (plus_cst_src, 1));
3388
 
3389
      if (GET_CODE (reg) == SUBREG)
3390
        reg = SUBREG_REG (reg);
3391
 
3392
      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3393
        if (ep->from_rtx == reg && ep->can_eliminate)
3394
          {
3395
            rtx to_rtx = ep->to_rtx;
3396
            offset += ep->offset;
3397
            offset = trunc_int_for_mode (offset, GET_MODE (plus_cst_src));
3398
 
3399
            if (GET_CODE (XEXP (plus_cst_src, 0)) == SUBREG)
3400
              to_rtx = gen_lowpart (GET_MODE (XEXP (plus_cst_src, 0)),
3401
                                    to_rtx);
3402
            /* If we have a nonzero offset, and the source is already
3403
               a simple REG, the following transformation would
3404
               increase the cost of the insn by replacing a simple REG
3405
               with (plus (reg sp) CST).  So try only when we already
3406
               had a PLUS before.  */
3407
            if (offset == 0 || plus_src)
3408
              {
3409
                rtx new_src = plus_constant (to_rtx, offset);
3410
 
3411
                new_body = old_body;
3412
                if (! replace)
3413
                  {
3414
                    new_body = copy_insn (old_body);
3415
                    if (REG_NOTES (insn))
3416
                      REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3417
                  }
3418
                PATTERN (insn) = new_body;
3419
                old_set = single_set (insn);
3420
 
3421
                /* First see if this insn remains valid when we make the
3422
                   change.  If not, try to replace the whole pattern with
3423
                   a simple set (this may help if the original insn was a
3424
                   PARALLEL that was only recognized as single_set due to
3425
                   REG_UNUSED notes).  If this isn't valid either, keep
3426
                   the INSN_CODE the same and let reload fix it up.  */
3427
                if (!validate_change (insn, &SET_SRC (old_set), new_src, 0))
3428
                  {
3429
                    rtx new_pat = gen_rtx_SET (VOIDmode,
3430
                                               SET_DEST (old_set), new_src);
3431
 
3432
                    if (!validate_change (insn, &PATTERN (insn), new_pat, 0))
3433
                      SET_SRC (old_set) = new_src;
3434
                  }
3435
              }
3436
            else
3437
              break;
3438
 
3439
            val = 1;
3440
            /* This can't have an effect on elimination offsets, so skip right
3441
               to the end.  */
3442
            goto done;
3443
          }
3444
    }
3445
 
3446
  /* Determine the effects of this insn on elimination offsets.  */
3447
  elimination_effects (old_body, VOIDmode);
3448
 
3449
  /* Eliminate all eliminable registers occurring in operands that
3450
     can be handled by reload.  */
3451
  extract_insn (insn);
3452
  for (i = 0; i < recog_data.n_operands; i++)
3453
    {
3454
      orig_operand[i] = recog_data.operand[i];
3455
      substed_operand[i] = recog_data.operand[i];
3456
 
3457
      /* For an asm statement, every operand is eliminable.  */
3458
      if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3459
        {
3460
          bool is_set_src, in_plus;
3461
 
3462
          /* Check for setting a register that we know about.  */
3463
          if (recog_data.operand_type[i] != OP_IN
3464
              && REG_P (orig_operand[i]))
3465
            {
3466
              /* If we are assigning to a register that can be eliminated, it
3467
                 must be as part of a PARALLEL, since the code above handles
3468
                 single SETs.  We must indicate that we can no longer
3469
                 eliminate this reg.  */
3470
              for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3471
                   ep++)
3472
                if (ep->from_rtx == orig_operand[i])
3473
                  ep->can_eliminate = 0;
3474
            }
3475
 
3476
          /* Companion to the above plus substitution, we can allow
3477
             invariants as the source of a plain move.  */
3478
          is_set_src = false;
3479
          if (old_set
3480
              && recog_data.operand_loc[i] == &SET_SRC (old_set))
3481
            is_set_src = true;
3482
          in_plus = false;
3483
          if (plus_src
3484
              && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3485
                  || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3486
            in_plus = true;
3487
 
3488
          substed_operand[i]
3489
            = eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3490
                                replace ? insn : NULL_RTX,
3491
                                is_set_src || in_plus, false);
3492
          if (substed_operand[i] != orig_operand[i])
3493
            val = 1;
3494
          /* Terminate the search in check_eliminable_occurrences at
3495
             this point.  */
3496
          *recog_data.operand_loc[i] = 0;
3497
 
3498
          /* If an output operand changed from a REG to a MEM and INSN is an
3499
             insn, write a CLOBBER insn.  */
3500
          if (recog_data.operand_type[i] != OP_IN
3501
              && REG_P (orig_operand[i])
3502
              && MEM_P (substed_operand[i])
3503
              && replace)
3504
            emit_insn_after (gen_clobber (orig_operand[i]), insn);
3505
        }
3506
    }
3507
 
3508
  for (i = 0; i < recog_data.n_dups; i++)
3509
    *recog_data.dup_loc[i]
3510
      = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3511
 
3512
  /* If any eliminable remain, they aren't eliminable anymore.  */
3513
  check_eliminable_occurrences (old_body);
3514
 
3515
  /* Substitute the operands; the new values are in the substed_operand
3516
     array.  */
3517
  for (i = 0; i < recog_data.n_operands; i++)
3518
    *recog_data.operand_loc[i] = substed_operand[i];
3519
  for (i = 0; i < recog_data.n_dups; i++)
3520
    *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
3521
 
3522
  /* If we are replacing a body that was a (set X (plus Y Z)), try to
3523
     re-recognize the insn.  We do this in case we had a simple addition
3524
     but now can do this as a load-address.  This saves an insn in this
3525
     common case.
3526
     If re-recognition fails, the old insn code number will still be used,
3527
     and some register operands may have changed into PLUS expressions.
3528
     These will be handled by find_reloads by loading them into a register
3529
     again.  */
3530
 
3531
  if (val)
3532
    {
3533
      /* If we aren't replacing things permanently and we changed something,
3534
         make another copy to ensure that all the RTL is new.  Otherwise
3535
         things can go wrong if find_reload swaps commutative operands
3536
         and one is inside RTL that has been copied while the other is not.  */
3537
      new_body = old_body;
3538
      if (! replace)
3539
        {
3540
          new_body = copy_insn (old_body);
3541
          if (REG_NOTES (insn))
3542
            REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3543
        }
3544
      PATTERN (insn) = new_body;
3545
 
3546
      /* If we had a move insn but now we don't, rerecognize it.  This will
3547
         cause spurious re-recognition if the old move had a PARALLEL since
3548
         the new one still will, but we can't call single_set without
3549
         having put NEW_BODY into the insn and the re-recognition won't
3550
         hurt in this rare case.  */
3551
      /* ??? Why this huge if statement - why don't we just rerecognize the
3552
         thing always?  */
3553
      if (! insn_is_asm
3554
          && old_set != 0
3555
          && ((REG_P (SET_SRC (old_set))
3556
               && (GET_CODE (new_body) != SET
3557
                   || !REG_P (SET_SRC (new_body))))
3558
              /* If this was a load from or store to memory, compare
3559
                 the MEM in recog_data.operand to the one in the insn.
3560
                 If they are not equal, then rerecognize the insn.  */
3561
              || (old_set != 0
3562
                  && ((MEM_P (SET_SRC (old_set))
3563
                       && SET_SRC (old_set) != recog_data.operand[1])
3564
                      || (MEM_P (SET_DEST (old_set))
3565
                          && SET_DEST (old_set) != recog_data.operand[0])))
3566
              /* If this was an add insn before, rerecognize.  */
3567
              || GET_CODE (SET_SRC (old_set)) == PLUS))
3568
        {
3569
          int new_icode = recog (PATTERN (insn), insn, 0);
3570
          if (new_icode >= 0)
3571
            INSN_CODE (insn) = new_icode;
3572
        }
3573
    }
3574
 
3575
  /* Restore the old body.  If there were any changes to it, we made a copy
3576
     of it while the changes were still in place, so we'll correctly return
3577
     a modified insn below.  */
3578
  if (! replace)
3579
    {
3580
      /* Restore the old body.  */
3581
      for (i = 0; i < recog_data.n_operands; i++)
3582
        /* Restoring a top-level match_parallel would clobber the new_body
3583
           we installed in the insn.  */
3584
        if (recog_data.operand_loc[i] != &PATTERN (insn))
3585
          *recog_data.operand_loc[i] = orig_operand[i];
3586
      for (i = 0; i < recog_data.n_dups; i++)
3587
        *recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
3588
    }
3589
 
3590
  /* Update all elimination pairs to reflect the status after the current
3591
     insn.  The changes we make were determined by the earlier call to
3592
     elimination_effects.
3593
 
3594
     We also detect cases where register elimination cannot be done,
3595
     namely, if a register would be both changed and referenced outside a MEM
3596
     in the resulting insn since such an insn is often undefined and, even if
3597
     not, we cannot know what meaning will be given to it.  Note that it is
3598
     valid to have a register used in an address in an insn that changes it
3599
     (presumably with a pre- or post-increment or decrement).
3600
 
3601
     If anything changes, return nonzero.  */
3602
 
3603
  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3604
    {
3605
      if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3606
        ep->can_eliminate = 0;
3607
 
3608
      ep->ref_outside_mem = 0;
3609
 
3610
      if (ep->previous_offset != ep->offset)
3611
        val = 1;
3612
    }
3613
 
3614
 done:
3615
  /* If we changed something, perform elimination in REG_NOTES.  This is
3616
     needed even when REPLACE is zero because a REG_DEAD note might refer
3617
     to a register that we eliminate and could cause a different number
3618
     of spill registers to be needed in the final reload pass than in
3619
     the pre-passes.  */
3620
  if (val && REG_NOTES (insn) != 0)
3621
    REG_NOTES (insn)
3622
      = eliminate_regs_1 (REG_NOTES (insn), VOIDmode, REG_NOTES (insn), true,
3623
                          false);
3624
 
3625
  return val;
3626
}
3627
 
3628
/* Like eliminate_regs_in_insn, but only estimate costs for the use of the
3629
   register allocator.  INSN is the instruction we need to examine, we perform
3630
   eliminations in its operands and record cases where eliminating a reg with
3631
   an invariant equivalence would add extra cost.  */
3632
 
3633
static void
3634
elimination_costs_in_insn (rtx insn)
3635
{
3636
  int icode = recog_memoized (insn);
3637
  rtx old_body = PATTERN (insn);
3638
  int insn_is_asm = asm_noperands (old_body) >= 0;
3639
  rtx old_set = single_set (insn);
3640
  int i;
3641
  rtx orig_operand[MAX_RECOG_OPERANDS];
3642
  rtx orig_dup[MAX_RECOG_OPERANDS];
3643
  struct elim_table *ep;
3644
  rtx plus_src, plus_cst_src;
3645
  bool sets_reg_p;
3646
 
3647
  if (! insn_is_asm && icode < 0)
3648
    {
3649
      gcc_assert (GET_CODE (PATTERN (insn)) == USE
3650
                  || GET_CODE (PATTERN (insn)) == CLOBBER
3651
                  || GET_CODE (PATTERN (insn)) == ADDR_VEC
3652
                  || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
3653
                  || GET_CODE (PATTERN (insn)) == ASM_INPUT
3654
                  || DEBUG_INSN_P (insn));
3655
      return;
3656
    }
3657
 
3658
  if (old_set != 0 && REG_P (SET_DEST (old_set))
3659
      && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3660
    {
3661
      /* Check for setting an eliminable register.  */
3662
      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3663
        if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3664
          return;
3665
    }
3666
 
3667
  /* We allow one special case which happens to work on all machines we
3668
     currently support: a single set with the source or a REG_EQUAL
3669
     note being a PLUS of an eliminable register and a constant.  */
3670
  plus_src = plus_cst_src = 0;
3671
  sets_reg_p = false;
3672
  if (old_set && REG_P (SET_DEST (old_set)))
3673
    {
3674
      sets_reg_p = true;
3675
      if (GET_CODE (SET_SRC (old_set)) == PLUS)
3676
        plus_src = SET_SRC (old_set);
3677
      /* First see if the source is of the form (plus (...) CST).  */
3678
      if (plus_src
3679
          && CONST_INT_P (XEXP (plus_src, 1)))
3680
        plus_cst_src = plus_src;
3681
      else if (REG_P (SET_SRC (old_set))
3682
               || plus_src)
3683
        {
3684
          /* Otherwise, see if we have a REG_EQUAL note of the form
3685
             (plus (...) CST).  */
3686
          rtx links;
3687
          for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3688
            {
3689
              if ((REG_NOTE_KIND (links) == REG_EQUAL
3690
                   || REG_NOTE_KIND (links) == REG_EQUIV)
3691
                  && GET_CODE (XEXP (links, 0)) == PLUS
3692
                  && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3693
                {
3694
                  plus_cst_src = XEXP (links, 0);
3695
                  break;
3696
                }
3697
            }
3698
        }
3699
    }
3700
 
3701
  /* Determine the effects of this insn on elimination offsets.  */
3702
  elimination_effects (old_body, VOIDmode);
3703
 
3704
  /* Eliminate all eliminable registers occurring in operands that
3705
     can be handled by reload.  */
3706
  extract_insn (insn);
3707
  for (i = 0; i < recog_data.n_dups; i++)
3708
    orig_dup[i] = *recog_data.dup_loc[i];
3709
 
3710
  for (i = 0; i < recog_data.n_operands; i++)
3711
    {
3712
      orig_operand[i] = recog_data.operand[i];
3713
 
3714
      /* For an asm statement, every operand is eliminable.  */
3715
      if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3716
        {
3717
          bool is_set_src, in_plus;
3718
 
3719
          /* Check for setting a register that we know about.  */
3720
          if (recog_data.operand_type[i] != OP_IN
3721
              && REG_P (orig_operand[i]))
3722
            {
3723
              /* If we are assigning to a register that can be eliminated, it
3724
                 must be as part of a PARALLEL, since the code above handles
3725
                 single SETs.  We must indicate that we can no longer
3726
                 eliminate this reg.  */
3727
              for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3728
                   ep++)
3729
                if (ep->from_rtx == orig_operand[i])
3730
                  ep->can_eliminate = 0;
3731
            }
3732
 
3733
          /* Companion to the above plus substitution, we can allow
3734
             invariants as the source of a plain move.  */
3735
          is_set_src = false;
3736
          if (old_set && recog_data.operand_loc[i] == &SET_SRC (old_set))
3737
            is_set_src = true;
3738
          if (is_set_src && !sets_reg_p)
3739
            note_reg_elim_costly (&SET_SRC (old_set), insn);
3740
          in_plus = false;
3741
          if (plus_src && sets_reg_p
3742
              && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3743
                  || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3744
            in_plus = true;
3745
 
3746
          eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3747
                            NULL_RTX,
3748
                            is_set_src || in_plus, true);
3749
          /* Terminate the search in check_eliminable_occurrences at
3750
             this point.  */
3751
          *recog_data.operand_loc[i] = 0;
3752
        }
3753
    }
3754
 
3755
  for (i = 0; i < recog_data.n_dups; i++)
3756
    *recog_data.dup_loc[i]
3757
      = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3758
 
3759
  /* If any eliminable remain, they aren't eliminable anymore.  */
3760
  check_eliminable_occurrences (old_body);
3761
 
3762
  /* Restore the old body.  */
3763
  for (i = 0; i < recog_data.n_operands; i++)
3764
    *recog_data.operand_loc[i] = orig_operand[i];
3765
  for (i = 0; i < recog_data.n_dups; i++)
3766
    *recog_data.dup_loc[i] = orig_dup[i];
3767
 
3768
  /* Update all elimination pairs to reflect the status after the current
3769
     insn.  The changes we make were determined by the earlier call to
3770
     elimination_effects.  */
3771
 
3772
  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3773
    {
3774
      if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3775
        ep->can_eliminate = 0;
3776
 
3777
      ep->ref_outside_mem = 0;
3778
    }
3779
 
3780
  return;
3781
}
3782
 
3783
/* Loop through all elimination pairs.
3784
   Recalculate the number not at initial offset.
3785
 
3786
   Compute the maximum offset (minimum offset if the stack does not
3787
   grow downward) for each elimination pair.  */
3788
 
3789
static void
3790
update_eliminable_offsets (void)
3791
{
3792
  struct elim_table *ep;
3793
 
3794
  num_not_at_initial_offset = 0;
3795
  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3796
    {
3797
      ep->previous_offset = ep->offset;
3798
      if (ep->can_eliminate && ep->offset != ep->initial_offset)
3799
        num_not_at_initial_offset++;
3800
    }
3801
}
3802
 
3803
/* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3804
   replacement we currently believe is valid, mark it as not eliminable if X
3805
   modifies DEST in any way other than by adding a constant integer to it.
3806
 
3807
   If DEST is the frame pointer, we do nothing because we assume that
3808
   all assignments to the hard frame pointer are nonlocal gotos and are being
3809
   done at a time when they are valid and do not disturb anything else.
3810
   Some machines want to eliminate a fake argument pointer with either the
3811
   frame or stack pointer.  Assignments to the hard frame pointer must not
3812
   prevent this elimination.
3813
 
3814
   Called via note_stores from reload before starting its passes to scan
3815
   the insns of the function.  */
3816
 
3817
static void
3818
mark_not_eliminable (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
3819
{
3820
  unsigned int i;
3821
 
3822
  /* A SUBREG of a hard register here is just changing its mode.  We should
3823
     not see a SUBREG of an eliminable hard register, but check just in
3824
     case.  */
3825
  if (GET_CODE (dest) == SUBREG)
3826
    dest = SUBREG_REG (dest);
3827
 
3828
  if (dest == hard_frame_pointer_rtx)
3829
    return;
3830
 
3831
  for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3832
    if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3833
        && (GET_CODE (x) != SET
3834
            || GET_CODE (SET_SRC (x)) != PLUS
3835
            || XEXP (SET_SRC (x), 0) != dest
3836
            || !CONST_INT_P (XEXP (SET_SRC (x), 1))))
3837
      {
3838
        reg_eliminate[i].can_eliminate_previous
3839
          = reg_eliminate[i].can_eliminate = 0;
3840
        num_eliminable--;
3841
      }
3842
}
3843
 
3844
/* Verify that the initial elimination offsets did not change since the
3845
   last call to set_initial_elim_offsets.  This is used to catch cases
3846
   where something illegal happened during reload_as_needed that could
3847
   cause incorrect code to be generated if we did not check for it.  */
3848
 
3849
static bool
3850
verify_initial_elim_offsets (void)
3851
{
3852
  HOST_WIDE_INT t;
3853
 
3854
  if (!num_eliminable)
3855
    return true;
3856
 
3857
#ifdef ELIMINABLE_REGS
3858
  {
3859
   struct elim_table *ep;
3860
 
3861
   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3862
     {
3863
       INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3864
       if (t != ep->initial_offset)
3865
         return false;
3866
     }
3867
  }
3868
#else
3869
  INITIAL_FRAME_POINTER_OFFSET (t);
3870
  if (t != reg_eliminate[0].initial_offset)
3871
    return false;
3872
#endif
3873
 
3874
  return true;
3875
}
3876
 
3877
/* Reset all offsets on eliminable registers to their initial values.  */
3878
 
3879
static void
3880
set_initial_elim_offsets (void)
3881
{
3882
  struct elim_table *ep = reg_eliminate;
3883
 
3884
#ifdef ELIMINABLE_REGS
3885
  for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3886
    {
3887
      INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3888
      ep->previous_offset = ep->offset = ep->initial_offset;
3889
    }
3890
#else
3891
  INITIAL_FRAME_POINTER_OFFSET (ep->initial_offset);
3892
  ep->previous_offset = ep->offset = ep->initial_offset;
3893
#endif
3894
 
3895
  num_not_at_initial_offset = 0;
3896
}
3897
 
3898
/* Subroutine of set_initial_label_offsets called via for_each_eh_label.  */
3899
 
3900
static void
3901
set_initial_eh_label_offset (rtx label)
3902
{
3903
  set_label_offsets (label, NULL_RTX, 1);
3904
}
3905
 
3906
/* Initialize the known label offsets.
3907
   Set a known offset for each forced label to be at the initial offset
3908
   of each elimination.  We do this because we assume that all
3909
   computed jumps occur from a location where each elimination is
3910
   at its initial offset.
3911
   For all other labels, show that we don't know the offsets.  */
3912
 
3913
static void
3914
set_initial_label_offsets (void)
3915
{
3916
  rtx x;
3917
  memset (offsets_known_at, 0, num_labels);
3918
 
3919
  for (x = forced_labels; x; x = XEXP (x, 1))
3920
    if (XEXP (x, 0))
3921
      set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
3922
 
3923
  for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
3924
    if (XEXP (x, 0))
3925
      set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
3926
 
3927
  for_each_eh_label (set_initial_eh_label_offset);
3928
}
3929
 
3930
/* Set all elimination offsets to the known values for the code label given
3931
   by INSN.  */
3932
 
3933
static void
3934
set_offsets_for_label (rtx insn)
3935
{
3936
  unsigned int i;
3937
  int label_nr = CODE_LABEL_NUMBER (insn);
3938
  struct elim_table *ep;
3939
 
3940
  num_not_at_initial_offset = 0;
3941
  for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3942
    {
3943
      ep->offset = ep->previous_offset
3944
                 = offsets_at[label_nr - first_label_num][i];
3945
      if (ep->can_eliminate && ep->offset != ep->initial_offset)
3946
        num_not_at_initial_offset++;
3947
    }
3948
}
3949
 
3950
/* See if anything that happened changes which eliminations are valid.
3951
   For example, on the SPARC, whether or not the frame pointer can
3952
   be eliminated can depend on what registers have been used.  We need
3953
   not check some conditions again (such as flag_omit_frame_pointer)
3954
   since they can't have changed.  */
3955
 
3956
static void
3957
update_eliminables (HARD_REG_SET *pset)
3958
{
3959
  int previous_frame_pointer_needed = frame_pointer_needed;
3960
  struct elim_table *ep;
3961
 
3962
  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3963
    if ((ep->from == HARD_FRAME_POINTER_REGNUM
3964
         && targetm.frame_pointer_required ())
3965
#ifdef ELIMINABLE_REGS
3966
        || ! targetm.can_eliminate (ep->from, ep->to)
3967
#endif
3968
        )
3969
      ep->can_eliminate = 0;
3970
 
3971
  /* Look for the case where we have discovered that we can't replace
3972
     register A with register B and that means that we will now be
3973
     trying to replace register A with register C.  This means we can
3974
     no longer replace register C with register B and we need to disable
3975
     such an elimination, if it exists.  This occurs often with A == ap,
3976
     B == sp, and C == fp.  */
3977
 
3978
  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3979
    {
3980
      struct elim_table *op;
3981
      int new_to = -1;
3982
 
3983
      if (! ep->can_eliminate && ep->can_eliminate_previous)
3984
        {
3985
          /* Find the current elimination for ep->from, if there is a
3986
             new one.  */
3987
          for (op = reg_eliminate;
3988
               op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3989
            if (op->from == ep->from && op->can_eliminate)
3990
              {
3991
                new_to = op->to;
3992
                break;
3993
              }
3994
 
3995
          /* See if there is an elimination of NEW_TO -> EP->TO.  If so,
3996
             disable it.  */
3997
          for (op = reg_eliminate;
3998
               op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3999
            if (op->from == new_to && op->to == ep->to)
4000
              op->can_eliminate = 0;
4001
        }
4002
    }
4003
 
4004
  /* See if any registers that we thought we could eliminate the previous
4005
     time are no longer eliminable.  If so, something has changed and we
4006
     must spill the register.  Also, recompute the number of eliminable
4007
     registers and see if the frame pointer is needed; it is if there is
4008
     no elimination of the frame pointer that we can perform.  */
4009
 
4010
  frame_pointer_needed = 1;
4011
  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4012
    {
4013
      if (ep->can_eliminate
4014
          && ep->from == FRAME_POINTER_REGNUM
4015
          && ep->to != HARD_FRAME_POINTER_REGNUM
4016
          && (! SUPPORTS_STACK_ALIGNMENT
4017
              || ! crtl->stack_realign_needed))
4018
        frame_pointer_needed = 0;
4019
 
4020
      if (! ep->can_eliminate && ep->can_eliminate_previous)
4021
        {
4022
          ep->can_eliminate_previous = 0;
4023
          SET_HARD_REG_BIT (*pset, ep->from);
4024
          num_eliminable--;
4025
        }
4026
    }
4027
 
4028
  /* If we didn't need a frame pointer last time, but we do now, spill
4029
     the hard frame pointer.  */
4030
  if (frame_pointer_needed && ! previous_frame_pointer_needed)
4031
    SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
4032
}
4033
 
4034
/* Return true if X is used as the target register of an elimination.  */
4035
 
4036
bool
4037
elimination_target_reg_p (rtx x)
4038
{
4039
  struct elim_table *ep;
4040
 
4041
  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4042
    if (ep->to_rtx == x && ep->can_eliminate)
4043
      return true;
4044
 
4045
  return false;
4046
}
4047
 
4048
/* Initialize the table of registers to eliminate.
4049
   Pre-condition: global flag frame_pointer_needed has been set before
4050
   calling this function.  */
4051
 
4052
static void
4053
init_elim_table (void)
4054
{
4055
  struct elim_table *ep;
4056
#ifdef ELIMINABLE_REGS
4057
  const struct elim_table_1 *ep1;
4058
#endif
4059
 
4060
  if (!reg_eliminate)
4061
    reg_eliminate = XCNEWVEC (struct elim_table, NUM_ELIMINABLE_REGS);
4062
 
4063
  num_eliminable = 0;
4064
 
4065
#ifdef ELIMINABLE_REGS
4066
  for (ep = reg_eliminate, ep1 = reg_eliminate_1;
4067
       ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
4068
    {
4069
      ep->from = ep1->from;
4070
      ep->to = ep1->to;
4071
      ep->can_eliminate = ep->can_eliminate_previous
4072
        = (targetm.can_eliminate (ep->from, ep->to)
4073
           && ! (ep->to == STACK_POINTER_REGNUM
4074
                 && frame_pointer_needed
4075
                 && (! SUPPORTS_STACK_ALIGNMENT
4076
                     || ! stack_realign_fp)));
4077
    }
4078
#else
4079
  reg_eliminate[0].from = reg_eliminate_1[0].from;
4080
  reg_eliminate[0].to = reg_eliminate_1[0].to;
4081
  reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
4082
    = ! frame_pointer_needed;
4083
#endif
4084
 
4085
  /* Count the number of eliminable registers and build the FROM and TO
4086
     REG rtx's.  Note that code in gen_rtx_REG will cause, e.g.,
4087
     gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
4088
     We depend on this.  */
4089
  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4090
    {
4091
      num_eliminable += ep->can_eliminate;
4092
      ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
4093
      ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
4094
    }
4095
}
4096
 
4097
/* Find all the pseudo registers that didn't get hard regs
4098
   but do have known equivalent constants or memory slots.
4099
   These include parameters (known equivalent to parameter slots)
4100
   and cse'd or loop-moved constant memory addresses.
4101
 
4102
   Record constant equivalents in reg_equiv_constant
4103
   so they will be substituted by find_reloads.
4104
   Record memory equivalents in reg_mem_equiv so they can
4105
   be substituted eventually by altering the REG-rtx's.  */
4106
 
4107
static void
4108
init_eliminable_invariants (rtx first, bool do_subregs)
4109
{
4110
  int i;
4111
  rtx insn;
4112
 
4113
  grow_reg_equivs ();
4114
  if (do_subregs)
4115
    reg_max_ref_width = XCNEWVEC (unsigned int, max_regno);
4116
  else
4117
    reg_max_ref_width = NULL;
4118
 
4119
  num_eliminable_invariants = 0;
4120
 
4121
  first_label_num = get_first_label_num ();
4122
  num_labels = max_label_num () - first_label_num;
4123
 
4124
  /* Allocate the tables used to store offset information at labels.  */
4125
  offsets_known_at = XNEWVEC (char, num_labels);
4126
  offsets_at = (HOST_WIDE_INT (*)[NUM_ELIMINABLE_REGS]) xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (HOST_WIDE_INT));
4127
 
4128
/* Look for REG_EQUIV notes; record what each pseudo is equivalent
4129
   to.  If DO_SUBREGS is true, also find all paradoxical subregs and
4130
   find largest such for each pseudo.  FIRST is the head of the insn
4131
   list.  */
4132
 
4133
  for (insn = first; insn; insn = NEXT_INSN (insn))
4134
    {
4135
      rtx set = single_set (insn);
4136
 
4137
      /* We may introduce USEs that we want to remove at the end, so
4138
         we'll mark them with QImode.  Make sure there are no
4139
         previously-marked insns left by say regmove.  */
4140
      if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
4141
          && GET_MODE (insn) != VOIDmode)
4142
        PUT_MODE (insn, VOIDmode);
4143
 
4144
      if (do_subregs && NONDEBUG_INSN_P (insn))
4145
        scan_paradoxical_subregs (PATTERN (insn));
4146
 
4147
      if (set != 0 && REG_P (SET_DEST (set)))
4148
        {
4149
          rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
4150
          rtx x;
4151
 
4152
          if (! note)
4153
            continue;
4154
 
4155
          i = REGNO (SET_DEST (set));
4156
          x = XEXP (note, 0);
4157
 
4158
          if (i <= LAST_VIRTUAL_REGISTER)
4159
            continue;
4160
 
4161
          /* If flag_pic and we have constant, verify it's legitimate.  */
4162
          if (!CONSTANT_P (x)
4163
              || !flag_pic || LEGITIMATE_PIC_OPERAND_P (x))
4164
            {
4165
              /* It can happen that a REG_EQUIV note contains a MEM
4166
                 that is not a legitimate memory operand.  As later
4167
                 stages of reload assume that all addresses found
4168
                 in the reg_equiv_* arrays were originally legitimate,
4169
                 we ignore such REG_EQUIV notes.  */
4170
              if (memory_operand (x, VOIDmode))
4171
                {
4172
                  /* Always unshare the equivalence, so we can
4173
                     substitute into this insn without touching the
4174
                       equivalence.  */
4175
                  reg_equiv_memory_loc (i) = copy_rtx (x);
4176
                }
4177
              else if (function_invariant_p (x))
4178
                {
4179
                  enum machine_mode mode;
4180
 
4181
                  mode = GET_MODE (SET_DEST (set));
4182
                  if (GET_CODE (x) == PLUS)
4183
                    {
4184
                      /* This is PLUS of frame pointer and a constant,
4185
                         and might be shared.  Unshare it.  */
4186
                      reg_equiv_invariant (i) = copy_rtx (x);
4187
                      num_eliminable_invariants++;
4188
                    }
4189
                  else if (x == frame_pointer_rtx || x == arg_pointer_rtx)
4190
                    {
4191
                      reg_equiv_invariant (i) = x;
4192
                      num_eliminable_invariants++;
4193
                    }
4194
                  else if (targetm.legitimate_constant_p (mode, x))
4195
                    reg_equiv_constant (i) = x;
4196
                  else
4197
                    {
4198
                      reg_equiv_memory_loc (i) = force_const_mem (mode, x);
4199
                      if (! reg_equiv_memory_loc (i))
4200
                        reg_equiv_init (i) = NULL_RTX;
4201
                    }
4202
                }
4203
              else
4204
                {
4205
                  reg_equiv_init (i) = NULL_RTX;
4206
                  continue;
4207
                }
4208
            }
4209
          else
4210
            reg_equiv_init (i) = NULL_RTX;
4211
        }
4212
    }
4213
 
4214
  if (dump_file)
4215
    for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4216
      if (reg_equiv_init (i))
4217
        {
4218
          fprintf (dump_file, "init_insns for %u: ", i);
4219
          print_inline_rtx (dump_file, reg_equiv_init (i), 20);
4220
          fprintf (dump_file, "\n");
4221
        }
4222
}
4223
 
4224
/* Indicate that we no longer have known memory locations or constants.
4225
   Free all data involved in tracking these.  */
4226
 
4227
static void
4228
free_reg_equiv (void)
4229
{
4230
  int i;
4231
 
4232
 
4233
  free (offsets_known_at);
4234
  free (offsets_at);
4235
  offsets_at = 0;
4236
  offsets_known_at = 0;
4237
 
4238
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4239
    if (reg_equiv_alt_mem_list (i))
4240
      free_EXPR_LIST_list (&reg_equiv_alt_mem_list (i));
4241
  VEC_free (reg_equivs_t, gc, reg_equivs);
4242
  reg_equivs = NULL;
4243
 
4244
}
4245
 
4246
/* Kick all pseudos out of hard register REGNO.
4247
 
4248
   If CANT_ELIMINATE is nonzero, it means that we are doing this spill
4249
   because we found we can't eliminate some register.  In the case, no pseudos
4250
   are allowed to be in the register, even if they are only in a block that
4251
   doesn't require spill registers, unlike the case when we are spilling this
4252
   hard reg to produce another spill register.
4253
 
4254
   Return nonzero if any pseudos needed to be kicked out.  */
4255
 
4256
static void
4257
spill_hard_reg (unsigned int regno, int cant_eliminate)
4258
{
4259
  int i;
4260
 
4261
  if (cant_eliminate)
4262
    {
4263
      SET_HARD_REG_BIT (bad_spill_regs_global, regno);
4264
      df_set_regs_ever_live (regno, true);
4265
    }
4266
 
4267
  /* Spill every pseudo reg that was allocated to this reg
4268
     or to something that overlaps this reg.  */
4269
 
4270
  for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4271
    if (reg_renumber[i] >= 0
4272
        && (unsigned int) reg_renumber[i] <= regno
4273
        && end_hard_regno (PSEUDO_REGNO_MODE (i), reg_renumber[i]) > regno)
4274
      SET_REGNO_REG_SET (&spilled_pseudos, i);
4275
}
4276
 
4277
/* After find_reload_regs has been run for all insn that need reloads,
4278
   and/or spill_hard_regs was called, this function is used to actually
4279
   spill pseudo registers and try to reallocate them.  It also sets up the
4280
   spill_regs array for use by choose_reload_regs.  */
4281
 
4282
static int
4283
finish_spills (int global)
4284
{
4285
  struct insn_chain *chain;
4286
  int something_changed = 0;
4287
  unsigned i;
4288
  reg_set_iterator rsi;
4289
 
4290
  /* Build the spill_regs array for the function.  */
4291
  /* If there are some registers still to eliminate and one of the spill regs
4292
     wasn't ever used before, additional stack space may have to be
4293
     allocated to store this register.  Thus, we may have changed the offset
4294
     between the stack and frame pointers, so mark that something has changed.
4295
 
4296
     One might think that we need only set VAL to 1 if this is a call-used
4297
     register.  However, the set of registers that must be saved by the
4298
     prologue is not identical to the call-used set.  For example, the
4299
     register used by the call insn for the return PC is a call-used register,
4300
     but must be saved by the prologue.  */
4301
 
4302
  n_spills = 0;
4303
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4304
    if (TEST_HARD_REG_BIT (used_spill_regs, i))
4305
      {
4306
        spill_reg_order[i] = n_spills;
4307
        spill_regs[n_spills++] = i;
4308
        if (num_eliminable && ! df_regs_ever_live_p (i))
4309
          something_changed = 1;
4310
        df_set_regs_ever_live (i, true);
4311
      }
4312
    else
4313
      spill_reg_order[i] = -1;
4314
 
4315
  EXECUTE_IF_SET_IN_REG_SET (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i, rsi)
4316
    if (! ira_conflicts_p || reg_renumber[i] >= 0)
4317
      {
4318
        /* Record the current hard register the pseudo is allocated to
4319
           in pseudo_previous_regs so we avoid reallocating it to the
4320
           same hard reg in a later pass.  */
4321
        gcc_assert (reg_renumber[i] >= 0);
4322
 
4323
        SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
4324
        /* Mark it as no longer having a hard register home.  */
4325
        reg_renumber[i] = -1;
4326
        if (ira_conflicts_p)
4327
          /* Inform IRA about the change.  */
4328
          ira_mark_allocation_change (i);
4329
        /* We will need to scan everything again.  */
4330
        something_changed = 1;
4331
      }
4332
 
4333
  /* Retry global register allocation if possible.  */
4334
  if (global && ira_conflicts_p)
4335
    {
4336
      unsigned int n;
4337
 
4338
      memset (pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET));
4339
      /* For every insn that needs reloads, set the registers used as spill
4340
         regs in pseudo_forbidden_regs for every pseudo live across the
4341
         insn.  */
4342
      for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
4343
        {
4344
          EXECUTE_IF_SET_IN_REG_SET
4345
            (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
4346
            {
4347
              IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4348
                                chain->used_spill_regs);
4349
            }
4350
          EXECUTE_IF_SET_IN_REG_SET
4351
            (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
4352
            {
4353
              IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4354
                                chain->used_spill_regs);
4355
            }
4356
        }
4357
 
4358
      /* Retry allocating the pseudos spilled in IRA and the
4359
         reload.  For each reg, merge the various reg sets that
4360
         indicate which hard regs can't be used, and call
4361
         ira_reassign_pseudos.  */
4362
      for (n = 0, i = FIRST_PSEUDO_REGISTER; i < (unsigned) max_regno; i++)
4363
        if (reg_old_renumber[i] != reg_renumber[i])
4364
          {
4365
            if (reg_renumber[i] < 0)
4366
              temp_pseudo_reg_arr[n++] = i;
4367
            else
4368
              CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
4369
          }
4370
      if (ira_reassign_pseudos (temp_pseudo_reg_arr, n,
4371
                                bad_spill_regs_global,
4372
                                pseudo_forbidden_regs, pseudo_previous_regs,
4373
                                &spilled_pseudos))
4374
        something_changed = 1;
4375
    }
4376
  /* Fix up the register information in the insn chain.
4377
     This involves deleting those of the spilled pseudos which did not get
4378
     a new hard register home from the live_{before,after} sets.  */
4379
  for (chain = reload_insn_chain; chain; chain = chain->next)
4380
    {
4381
      HARD_REG_SET used_by_pseudos;
4382
      HARD_REG_SET used_by_pseudos2;
4383
 
4384
      if (! ira_conflicts_p)
4385
        {
4386
          /* Don't do it for IRA because IRA and the reload still can
4387
             assign hard registers to the spilled pseudos on next
4388
             reload iterations.  */
4389
          AND_COMPL_REG_SET (&chain->live_throughout, &spilled_pseudos);
4390
          AND_COMPL_REG_SET (&chain->dead_or_set, &spilled_pseudos);
4391
        }
4392
      /* Mark any unallocated hard regs as available for spills.  That
4393
         makes inheritance work somewhat better.  */
4394
      if (chain->need_reload)
4395
        {
4396
          REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
4397
          REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
4398
          IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
4399
 
4400
          compute_use_by_pseudos (&used_by_pseudos, &chain->live_throughout);
4401
          compute_use_by_pseudos (&used_by_pseudos, &chain->dead_or_set);
4402
          /* Value of chain->used_spill_regs from previous iteration
4403
             may be not included in the value calculated here because
4404
             of possible removing caller-saves insns (see function
4405
             delete_caller_save_insns.  */
4406
          COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
4407
          AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
4408
        }
4409
    }
4410
 
4411
  CLEAR_REG_SET (&changed_allocation_pseudos);
4412
  /* Let alter_reg modify the reg rtx's for the modified pseudos.  */
4413
  for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
4414
    {
4415
      int regno = reg_renumber[i];
4416
      if (reg_old_renumber[i] == regno)
4417
        continue;
4418
 
4419
      SET_REGNO_REG_SET (&changed_allocation_pseudos, i);
4420
 
4421
      alter_reg (i, reg_old_renumber[i], false);
4422
      reg_old_renumber[i] = regno;
4423
      if (dump_file)
4424
        {
4425
          if (regno == -1)
4426
            fprintf (dump_file, " Register %d now on stack.\n\n", i);
4427
          else
4428
            fprintf (dump_file, " Register %d now in %d.\n\n",
4429
                     i, reg_renumber[i]);
4430
        }
4431
    }
4432
 
4433
  return something_changed;
4434
}
4435
 
4436
/* Find all paradoxical subregs within X and update reg_max_ref_width.  */
4437
 
4438
static void
4439
scan_paradoxical_subregs (rtx x)
4440
{
4441
  int i;
4442
  const char *fmt;
4443
  enum rtx_code code = GET_CODE (x);
4444
 
4445
  switch (code)
4446
    {
4447
    case REG:
4448
    case CONST_INT:
4449
    case CONST:
4450
    case SYMBOL_REF:
4451
    case LABEL_REF:
4452
    case CONST_DOUBLE:
4453
    case CONST_FIXED:
4454
    case CONST_VECTOR: /* shouldn't happen, but just in case.  */
4455
    case CC0:
4456
    case PC:
4457
    case USE:
4458
    case CLOBBER:
4459
      return;
4460
 
4461
    case SUBREG:
4462
      if (REG_P (SUBREG_REG (x))
4463
          && (GET_MODE_SIZE (GET_MODE (x))
4464
              > reg_max_ref_width[REGNO (SUBREG_REG (x))]))
4465
        {
4466
          reg_max_ref_width[REGNO (SUBREG_REG (x))]
4467
            = GET_MODE_SIZE (GET_MODE (x));
4468
          mark_home_live_1 (REGNO (SUBREG_REG (x)), GET_MODE (x));
4469
        }
4470
      return;
4471
 
4472
    default:
4473
      break;
4474
    }
4475
 
4476
  fmt = GET_RTX_FORMAT (code);
4477
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4478
    {
4479
      if (fmt[i] == 'e')
4480
        scan_paradoxical_subregs (XEXP (x, i));
4481
      else if (fmt[i] == 'E')
4482
        {
4483
          int j;
4484
          for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4485
            scan_paradoxical_subregs (XVECEXP (x, i, j));
4486
        }
4487
    }
4488
}
4489
 
4490
/* *OP_PTR and *OTHER_PTR are two operands to a conceptual reload.
4491
   If *OP_PTR is a paradoxical subreg, try to remove that subreg
4492
   and apply the corresponding narrowing subreg to *OTHER_PTR.
4493
   Return true if the operands were changed, false otherwise.  */
4494
 
4495
static bool
4496
strip_paradoxical_subreg (rtx *op_ptr, rtx *other_ptr)
4497
{
4498
  rtx op, inner, other, tem;
4499
 
4500
  op = *op_ptr;
4501
  if (!paradoxical_subreg_p (op))
4502
    return false;
4503
  inner = SUBREG_REG (op);
4504
 
4505
  other = *other_ptr;
4506
  tem = gen_lowpart_common (GET_MODE (inner), other);
4507
  if (!tem)
4508
    return false;
4509
 
4510
  /* If the lowpart operation turned a hard register into a subreg,
4511
     rather than simplifying it to another hard register, then the
4512
     mode change cannot be properly represented.  For example, OTHER
4513
     might be valid in its current mode, but not in the new one.  */
4514
  if (GET_CODE (tem) == SUBREG
4515
      && REG_P (other)
4516
      && HARD_REGISTER_P (other))
4517
    return false;
4518
 
4519
  *op_ptr = inner;
4520
  *other_ptr = tem;
4521
  return true;
4522
}
4523
 
4524
/* A subroutine of reload_as_needed.  If INSN has a REG_EH_REGION note,
4525
   examine all of the reload insns between PREV and NEXT exclusive, and
4526
   annotate all that may trap.  */
4527
 
4528
static void
4529
fixup_eh_region_note (rtx insn, rtx prev, rtx next)
4530
{
4531
  rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
4532
  if (note == NULL)
4533
    return;
4534
  if (!insn_could_throw_p (insn))
4535
    remove_note (insn, note);
4536
  copy_reg_eh_region_note_forward (note, NEXT_INSN (prev), next);
4537
}
4538
 
4539
/* Reload pseudo-registers into hard regs around each insn as needed.
4540
   Additional register load insns are output before the insn that needs it
4541
   and perhaps store insns after insns that modify the reloaded pseudo reg.
4542
 
4543
   reg_last_reload_reg and reg_reloaded_contents keep track of
4544
   which registers are already available in reload registers.
4545
   We update these for the reloads that we perform,
4546
   as the insns are scanned.  */
4547
 
4548
static void
4549
reload_as_needed (int live_known)
4550
{
4551
  struct insn_chain *chain;
4552
#if defined (AUTO_INC_DEC)
4553
  int i;
4554
#endif
4555
  rtx x, marker;
4556
 
4557
  memset (spill_reg_rtx, 0, sizeof spill_reg_rtx);
4558
  memset (spill_reg_store, 0, sizeof spill_reg_store);
4559
  reg_last_reload_reg = XCNEWVEC (rtx, max_regno);
4560
  INIT_REG_SET (&reg_has_output_reload);
4561
  CLEAR_HARD_REG_SET (reg_reloaded_valid);
4562
  CLEAR_HARD_REG_SET (reg_reloaded_call_part_clobbered);
4563
 
4564
  set_initial_elim_offsets ();
4565
 
4566
  /* Generate a marker insn that we will move around.  */
4567
  marker = emit_note (NOTE_INSN_DELETED);
4568
  unlink_insn_chain (marker, marker);
4569
 
4570
  for (chain = reload_insn_chain; chain; chain = chain->next)
4571
    {
4572
      rtx prev = 0;
4573
      rtx insn = chain->insn;
4574
      rtx old_next = NEXT_INSN (insn);
4575
#ifdef AUTO_INC_DEC
4576
      rtx old_prev = PREV_INSN (insn);
4577
#endif
4578
 
4579
      /* If we pass a label, copy the offsets from the label information
4580
         into the current offsets of each elimination.  */
4581
      if (LABEL_P (insn))
4582
        set_offsets_for_label (insn);
4583
 
4584
      else if (INSN_P (insn))
4585
        {
4586
          regset_head regs_to_forget;
4587
          INIT_REG_SET (&regs_to_forget);
4588
          note_stores (PATTERN (insn), forget_old_reloads_1, &regs_to_forget);
4589
 
4590
          /* If this is a USE and CLOBBER of a MEM, ensure that any
4591
             references to eliminable registers have been removed.  */
4592
 
4593
          if ((GET_CODE (PATTERN (insn)) == USE
4594
               || GET_CODE (PATTERN (insn)) == CLOBBER)
4595
              && MEM_P (XEXP (PATTERN (insn), 0)))
4596
            XEXP (XEXP (PATTERN (insn), 0), 0)
4597
              = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4598
                                GET_MODE (XEXP (PATTERN (insn), 0)),
4599
                                NULL_RTX);
4600
 
4601
          /* If we need to do register elimination processing, do so.
4602
             This might delete the insn, in which case we are done.  */
4603
          if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
4604
            {
4605
              eliminate_regs_in_insn (insn, 1);
4606
              if (NOTE_P (insn))
4607
                {
4608
                  update_eliminable_offsets ();
4609
                  CLEAR_REG_SET (&regs_to_forget);
4610
                  continue;
4611
                }
4612
            }
4613
 
4614
          /* If need_elim is nonzero but need_reload is zero, one might think
4615
             that we could simply set n_reloads to 0.  However, find_reloads
4616
             could have done some manipulation of the insn (such as swapping
4617
             commutative operands), and these manipulations are lost during
4618
             the first pass for every insn that needs register elimination.
4619
             So the actions of find_reloads must be redone here.  */
4620
 
4621
          if (! chain->need_elim && ! chain->need_reload
4622
              && ! chain->need_operand_change)
4623
            n_reloads = 0;
4624
          /* First find the pseudo regs that must be reloaded for this insn.
4625
             This info is returned in the tables reload_... (see reload.h).
4626
             Also modify the body of INSN by substituting RELOAD
4627
             rtx's for those pseudo regs.  */
4628
          else
4629
            {
4630
              CLEAR_REG_SET (&reg_has_output_reload);
4631
              CLEAR_HARD_REG_SET (reg_is_output_reload);
4632
 
4633
              find_reloads (insn, 1, spill_indirect_levels, live_known,
4634
                            spill_reg_order);
4635
            }
4636
 
4637
          if (n_reloads > 0)
4638
            {
4639
              rtx next = NEXT_INSN (insn);
4640
              rtx p;
4641
 
4642
              /* ??? PREV can get deleted by reload inheritance.
4643
                 Work around this by emitting a marker note.  */
4644
              prev = PREV_INSN (insn);
4645
              reorder_insns_nobb (marker, marker, prev);
4646
 
4647
              /* Now compute which reload regs to reload them into.  Perhaps
4648
                 reusing reload regs from previous insns, or else output
4649
                 load insns to reload them.  Maybe output store insns too.
4650
                 Record the choices of reload reg in reload_reg_rtx.  */
4651
              choose_reload_regs (chain);
4652
 
4653
              /* Generate the insns to reload operands into or out of
4654
                 their reload regs.  */
4655
              emit_reload_insns (chain);
4656
 
4657
              /* Substitute the chosen reload regs from reload_reg_rtx
4658
                 into the insn's body (or perhaps into the bodies of other
4659
                 load and store insn that we just made for reloading
4660
                 and that we moved the structure into).  */
4661
              subst_reloads (insn);
4662
 
4663
              prev = PREV_INSN (marker);
4664
              unlink_insn_chain (marker, marker);
4665
 
4666
              /* Adjust the exception region notes for loads and stores.  */
4667
              if (cfun->can_throw_non_call_exceptions && !CALL_P (insn))
4668
                fixup_eh_region_note (insn, prev, next);
4669
 
4670
              /* Adjust the location of REG_ARGS_SIZE.  */
4671
              p = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
4672
              if (p)
4673
                {
4674
                  remove_note (insn, p);
4675
                  fixup_args_size_notes (prev, PREV_INSN (next),
4676
                                         INTVAL (XEXP (p, 0)));
4677
                }
4678
 
4679
              /* If this was an ASM, make sure that all the reload insns
4680
                 we have generated are valid.  If not, give an error
4681
                 and delete them.  */
4682
              if (asm_noperands (PATTERN (insn)) >= 0)
4683
                for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
4684
                  if (p != insn && INSN_P (p)
4685
                      && GET_CODE (PATTERN (p)) != USE
4686
                      && (recog_memoized (p) < 0
4687
                          || (extract_insn (p), ! constrain_operands (1))))
4688
                    {
4689
                      error_for_asm (insn,
4690
                                     "%<asm%> operand requires "
4691
                                     "impossible reload");
4692
                      delete_insn (p);
4693
                    }
4694
            }
4695
 
4696
          if (num_eliminable && chain->need_elim)
4697
            update_eliminable_offsets ();
4698
 
4699
          /* Any previously reloaded spilled pseudo reg, stored in this insn,
4700
             is no longer validly lying around to save a future reload.
4701
             Note that this does not detect pseudos that were reloaded
4702
             for this insn in order to be stored in
4703
             (obeying register constraints).  That is correct; such reload
4704
             registers ARE still valid.  */
4705
          forget_marked_reloads (&regs_to_forget);
4706
          CLEAR_REG_SET (&regs_to_forget);
4707
 
4708
          /* There may have been CLOBBER insns placed after INSN.  So scan
4709
             between INSN and NEXT and use them to forget old reloads.  */
4710
          for (x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
4711
            if (NONJUMP_INSN_P (x) && GET_CODE (PATTERN (x)) == CLOBBER)
4712
              note_stores (PATTERN (x), forget_old_reloads_1, NULL);
4713
 
4714
#ifdef AUTO_INC_DEC
4715
          /* Likewise for regs altered by auto-increment in this insn.
4716
             REG_INC notes have been changed by reloading:
4717
             find_reloads_address_1 records substitutions for them,
4718
             which have been performed by subst_reloads above.  */
4719
          for (i = n_reloads - 1; i >= 0; i--)
4720
            {
4721
              rtx in_reg = rld[i].in_reg;
4722
              if (in_reg)
4723
                {
4724
                  enum rtx_code code = GET_CODE (in_reg);
4725
                  /* PRE_INC / PRE_DEC will have the reload register ending up
4726
                     with the same value as the stack slot, but that doesn't
4727
                     hold true for POST_INC / POST_DEC.  Either we have to
4728
                     convert the memory access to a true POST_INC / POST_DEC,
4729
                     or we can't use the reload register for inheritance.  */
4730
                  if ((code == POST_INC || code == POST_DEC)
4731
                      && TEST_HARD_REG_BIT (reg_reloaded_valid,
4732
                                            REGNO (rld[i].reg_rtx))
4733
                      /* Make sure it is the inc/dec pseudo, and not
4734
                         some other (e.g. output operand) pseudo.  */
4735
                      && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4736
                          == REGNO (XEXP (in_reg, 0))))
4737
 
4738
                    {
4739
                      rtx reload_reg = rld[i].reg_rtx;
4740
                      enum machine_mode mode = GET_MODE (reload_reg);
4741
                      int n = 0;
4742
                      rtx p;
4743
 
4744
                      for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
4745
                        {
4746
                          /* We really want to ignore REG_INC notes here, so
4747
                             use PATTERN (p) as argument to reg_set_p .  */
4748
                          if (reg_set_p (reload_reg, PATTERN (p)))
4749
                            break;
4750
                          n = count_occurrences (PATTERN (p), reload_reg, 0);
4751
                          if (! n)
4752
                            continue;
4753
                          if (n == 1)
4754
                            {
4755
                              rtx replace_reg
4756
                                = gen_rtx_fmt_e (code, mode, reload_reg);
4757
 
4758
                              validate_replace_rtx_group (reload_reg,
4759
                                                          replace_reg, p);
4760
                              n = verify_changes (0);
4761
 
4762
                              /* We must also verify that the constraints
4763
                                 are met after the replacement.  Make sure
4764
                                 extract_insn is only called for an insn
4765
                                 where the replacements were found to be
4766
                                 valid so far. */
4767
                              if (n)
4768
                                {
4769
                                  extract_insn (p);
4770
                                  n = constrain_operands (1);
4771
                                }
4772
 
4773
                              /* If the constraints were not met, then
4774
                                 undo the replacement, else confirm it.  */
4775
                              if (!n)
4776
                                cancel_changes (0);
4777
                              else
4778
                                confirm_change_group ();
4779
                            }
4780
                          break;
4781
                        }
4782
                      if (n == 1)
4783
                        {
4784
                          add_reg_note (p, REG_INC, reload_reg);
4785
                          /* Mark this as having an output reload so that the
4786
                             REG_INC processing code below won't invalidate
4787
                             the reload for inheritance.  */
4788
                          SET_HARD_REG_BIT (reg_is_output_reload,
4789
                                            REGNO (reload_reg));
4790
                          SET_REGNO_REG_SET (&reg_has_output_reload,
4791
                                             REGNO (XEXP (in_reg, 0)));
4792
                        }
4793
                      else
4794
                        forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
4795
                                              NULL);
4796
                    }
4797
                  else if ((code == PRE_INC || code == PRE_DEC)
4798
                           && TEST_HARD_REG_BIT (reg_reloaded_valid,
4799
                                                 REGNO (rld[i].reg_rtx))
4800
                           /* Make sure it is the inc/dec pseudo, and not
4801
                              some other (e.g. output operand) pseudo.  */
4802
                           && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4803
                               == REGNO (XEXP (in_reg, 0))))
4804
                    {
4805
                      SET_HARD_REG_BIT (reg_is_output_reload,
4806
                                        REGNO (rld[i].reg_rtx));
4807
                      SET_REGNO_REG_SET (&reg_has_output_reload,
4808
                                         REGNO (XEXP (in_reg, 0)));
4809
                    }
4810
                  else if (code == PRE_INC || code == PRE_DEC
4811
                           || code == POST_INC || code == POST_DEC)
4812
                    {
4813
                      int in_regno = REGNO (XEXP (in_reg, 0));
4814
 
4815
                      if (reg_last_reload_reg[in_regno] != NULL_RTX)
4816
                        {
4817
                          int in_hard_regno;
4818
                          bool forget_p = true;
4819
 
4820
                          in_hard_regno = REGNO (reg_last_reload_reg[in_regno]);
4821
                          if (TEST_HARD_REG_BIT (reg_reloaded_valid,
4822
                                                 in_hard_regno))
4823
                            {
4824
                              for (x = old_prev ? NEXT_INSN (old_prev) : insn;
4825
                                   x != old_next;
4826
                                   x = NEXT_INSN (x))
4827
                                if (x == reg_reloaded_insn[in_hard_regno])
4828
                                  {
4829
                                    forget_p = false;
4830
                                    break;
4831
                                  }
4832
                            }
4833
                          /* If for some reasons, we didn't set up
4834
                             reg_last_reload_reg in this insn,
4835
                             invalidate inheritance from previous
4836
                             insns for the incremented/decremented
4837
                             register.  Such registers will be not in
4838
                             reg_has_output_reload.  Invalidate it
4839
                             also if the corresponding element in
4840
                             reg_reloaded_insn is also
4841
                             invalidated.  */
4842
                          if (forget_p)
4843
                            forget_old_reloads_1 (XEXP (in_reg, 0),
4844
                                                  NULL_RTX, NULL);
4845
                        }
4846
                    }
4847
                }
4848
            }
4849
          /* If a pseudo that got a hard register is auto-incremented,
4850
             we must purge records of copying it into pseudos without
4851
             hard registers.  */
4852
          for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4853
            if (REG_NOTE_KIND (x) == REG_INC)
4854
              {
4855
                /* See if this pseudo reg was reloaded in this insn.
4856
                   If so, its last-reload info is still valid
4857
                   because it is based on this insn's reload.  */
4858
                for (i = 0; i < n_reloads; i++)
4859
                  if (rld[i].out == XEXP (x, 0))
4860
                    break;
4861
 
4862
                if (i == n_reloads)
4863
                  forget_old_reloads_1 (XEXP (x, 0), NULL_RTX, NULL);
4864
              }
4865
#endif
4866
        }
4867
      /* A reload reg's contents are unknown after a label.  */
4868
      if (LABEL_P (insn))
4869
        CLEAR_HARD_REG_SET (reg_reloaded_valid);
4870
 
4871
      /* Don't assume a reload reg is still good after a call insn
4872
         if it is a call-used reg, or if it contains a value that will
4873
         be partially clobbered by the call.  */
4874
      else if (CALL_P (insn))
4875
        {
4876
          AND_COMPL_HARD_REG_SET (reg_reloaded_valid, call_used_reg_set);
4877
          AND_COMPL_HARD_REG_SET (reg_reloaded_valid, reg_reloaded_call_part_clobbered);
4878
 
4879
          /* If this is a call to a setjmp-type function, we must not
4880
             reuse any reload reg contents across the call; that will
4881
             just be clobbered by other uses of the register in later
4882
             code, before the longjmp.  */
4883
          if (find_reg_note (insn, REG_SETJMP, NULL_RTX))
4884
            CLEAR_HARD_REG_SET (reg_reloaded_valid);
4885
        }
4886
    }
4887
 
4888
  /* Clean up.  */
4889
  free (reg_last_reload_reg);
4890
  CLEAR_REG_SET (&reg_has_output_reload);
4891
}
4892
 
4893
/* Discard all record of any value reloaded from X,
4894
   or reloaded in X from someplace else;
4895
   unless X is an output reload reg of the current insn.
4896
 
4897
   X may be a hard reg (the reload reg)
4898
   or it may be a pseudo reg that was reloaded from.
4899
 
4900
   When DATA is non-NULL just mark the registers in regset
4901
   to be forgotten later.  */
4902
 
4903
static void
4904
forget_old_reloads_1 (rtx x, const_rtx ignored ATTRIBUTE_UNUSED,
4905
                      void *data)
4906
{
4907
  unsigned int regno;
4908
  unsigned int nr;
4909
  regset regs = (regset) data;
4910
 
4911
  /* note_stores does give us subregs of hard regs,
4912
     subreg_regno_offset requires a hard reg.  */
4913
  while (GET_CODE (x) == SUBREG)
4914
    {
4915
      /* We ignore the subreg offset when calculating the regno,
4916
         because we are using the entire underlying hard register
4917
         below.  */
4918
      x = SUBREG_REG (x);
4919
    }
4920
 
4921
  if (!REG_P (x))
4922
    return;
4923
 
4924
  regno = REGNO (x);
4925
 
4926
  if (regno >= FIRST_PSEUDO_REGISTER)
4927
    nr = 1;
4928
  else
4929
    {
4930
      unsigned int i;
4931
 
4932
      nr = hard_regno_nregs[regno][GET_MODE (x)];
4933
      /* Storing into a spilled-reg invalidates its contents.
4934
         This can happen if a block-local pseudo is allocated to that reg
4935
         and it wasn't spilled because this block's total need is 0.
4936
         Then some insn might have an optional reload and use this reg.  */
4937
      if (!regs)
4938
        for (i = 0; i < nr; i++)
4939
          /* But don't do this if the reg actually serves as an output
4940
             reload reg in the current instruction.  */
4941
          if (n_reloads == 0
4942
              || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4943
            {
4944
              CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4945
              spill_reg_store[regno + i] = 0;
4946
            }
4947
    }
4948
 
4949
  if (regs)
4950
    while (nr-- > 0)
4951
      SET_REGNO_REG_SET (regs, regno + nr);
4952
  else
4953
    {
4954
      /* Since value of X has changed,
4955
         forget any value previously copied from it.  */
4956
 
4957
      while (nr-- > 0)
4958
        /* But don't forget a copy if this is the output reload
4959
           that establishes the copy's validity.  */
4960
        if (n_reloads == 0
4961
            || !REGNO_REG_SET_P (&reg_has_output_reload, regno + nr))
4962
          reg_last_reload_reg[regno + nr] = 0;
4963
     }
4964
}
4965
 
4966
/* Forget the reloads marked in regset by previous function.  */
4967
static void
4968
forget_marked_reloads (regset regs)
4969
{
4970
  unsigned int reg;
4971
  reg_set_iterator rsi;
4972
  EXECUTE_IF_SET_IN_REG_SET (regs, 0, reg, rsi)
4973
    {
4974
      if (reg < FIRST_PSEUDO_REGISTER
4975
          /* But don't do this if the reg actually serves as an output
4976
             reload reg in the current instruction.  */
4977
          && (n_reloads == 0
4978
              || ! TEST_HARD_REG_BIT (reg_is_output_reload, reg)))
4979
          {
4980
            CLEAR_HARD_REG_BIT (reg_reloaded_valid, reg);
4981
            spill_reg_store[reg] = 0;
4982
          }
4983
      if (n_reloads == 0
4984
          || !REGNO_REG_SET_P (&reg_has_output_reload, reg))
4985
        reg_last_reload_reg[reg] = 0;
4986
    }
4987
}
4988
 
4989
/* The following HARD_REG_SETs indicate when each hard register is
4990
   used for a reload of various parts of the current insn.  */
4991
 
4992
/* If reg is unavailable for all reloads.  */
4993
static HARD_REG_SET reload_reg_unavailable;
4994
/* If reg is in use as a reload reg for a RELOAD_OTHER reload.  */
4995
static HARD_REG_SET reload_reg_used;
4996
/* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I.  */
4997
static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4998
/* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I.  */
4999
static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
5000
/* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I.  */
5001
static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
5002
/* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I.  */
5003
static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
5004
/* If reg is in use for a RELOAD_FOR_INPUT reload for operand I.  */
5005
static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
5006
/* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I.  */
5007
static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
5008
/* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload.  */
5009
static HARD_REG_SET reload_reg_used_in_op_addr;
5010
/* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload.  */
5011
static HARD_REG_SET reload_reg_used_in_op_addr_reload;
5012
/* If reg is in use for a RELOAD_FOR_INSN reload.  */
5013
static HARD_REG_SET reload_reg_used_in_insn;
5014
/* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload.  */
5015
static HARD_REG_SET reload_reg_used_in_other_addr;
5016
 
5017
/* If reg is in use as a reload reg for any sort of reload.  */
5018
static HARD_REG_SET reload_reg_used_at_all;
5019
 
5020
/* If reg is use as an inherited reload.  We just mark the first register
5021
   in the group.  */
5022
static HARD_REG_SET reload_reg_used_for_inherit;
5023
 
5024
/* Records which hard regs are used in any way, either as explicit use or
5025
   by being allocated to a pseudo during any point of the current insn.  */
5026
static HARD_REG_SET reg_used_in_insn;
5027
 
5028
/* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
5029
   TYPE. MODE is used to indicate how many consecutive regs are
5030
   actually used.  */
5031
 
5032
static void
5033
mark_reload_reg_in_use (unsigned int regno, int opnum, enum reload_type type,
5034
                        enum machine_mode mode)
5035
{
5036
  switch (type)
5037
    {
5038
    case RELOAD_OTHER:
5039
      add_to_hard_reg_set (&reload_reg_used, mode, regno);
5040
      break;
5041
 
5042
    case RELOAD_FOR_INPUT_ADDRESS:
5043
      add_to_hard_reg_set (&reload_reg_used_in_input_addr[opnum], mode, regno);
5044
      break;
5045
 
5046
    case RELOAD_FOR_INPADDR_ADDRESS:
5047
      add_to_hard_reg_set (&reload_reg_used_in_inpaddr_addr[opnum], mode, regno);
5048
      break;
5049
 
5050
    case RELOAD_FOR_OUTPUT_ADDRESS:
5051
      add_to_hard_reg_set (&reload_reg_used_in_output_addr[opnum], mode, regno);
5052
      break;
5053
 
5054
    case RELOAD_FOR_OUTADDR_ADDRESS:
5055
      add_to_hard_reg_set (&reload_reg_used_in_outaddr_addr[opnum], mode, regno);
5056
      break;
5057
 
5058
    case RELOAD_FOR_OPERAND_ADDRESS:
5059
      add_to_hard_reg_set (&reload_reg_used_in_op_addr, mode, regno);
5060
      break;
5061
 
5062
    case RELOAD_FOR_OPADDR_ADDR:
5063
      add_to_hard_reg_set (&reload_reg_used_in_op_addr_reload, mode, regno);
5064
      break;
5065
 
5066
    case RELOAD_FOR_OTHER_ADDRESS:
5067
      add_to_hard_reg_set (&reload_reg_used_in_other_addr, mode, regno);
5068
      break;
5069
 
5070
    case RELOAD_FOR_INPUT:
5071
      add_to_hard_reg_set (&reload_reg_used_in_input[opnum], mode, regno);
5072
      break;
5073
 
5074
    case RELOAD_FOR_OUTPUT:
5075
      add_to_hard_reg_set (&reload_reg_used_in_output[opnum], mode, regno);
5076
      break;
5077
 
5078
    case RELOAD_FOR_INSN:
5079
      add_to_hard_reg_set (&reload_reg_used_in_insn,  mode, regno);
5080
      break;
5081
    }
5082
 
5083
  add_to_hard_reg_set (&reload_reg_used_at_all, mode, regno);
5084
}
5085
 
5086
/* Similarly, but show REGNO is no longer in use for a reload.  */
5087
 
5088
static void
5089
clear_reload_reg_in_use (unsigned int regno, int opnum,
5090
                         enum reload_type type, enum machine_mode mode)
5091
{
5092
  unsigned int nregs = hard_regno_nregs[regno][mode];
5093
  unsigned int start_regno, end_regno, r;
5094
  int i;
5095
  /* A complication is that for some reload types, inheritance might
5096
     allow multiple reloads of the same types to share a reload register.
5097
     We set check_opnum if we have to check only reloads with the same
5098
     operand number, and check_any if we have to check all reloads.  */
5099
  int check_opnum = 0;
5100
  int check_any = 0;
5101
  HARD_REG_SET *used_in_set;
5102
 
5103
  switch (type)
5104
    {
5105
    case RELOAD_OTHER:
5106
      used_in_set = &reload_reg_used;
5107
      break;
5108
 
5109
    case RELOAD_FOR_INPUT_ADDRESS:
5110
      used_in_set = &reload_reg_used_in_input_addr[opnum];
5111
      break;
5112
 
5113
    case RELOAD_FOR_INPADDR_ADDRESS:
5114
      check_opnum = 1;
5115
      used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
5116
      break;
5117
 
5118
    case RELOAD_FOR_OUTPUT_ADDRESS:
5119
      used_in_set = &reload_reg_used_in_output_addr[opnum];
5120
      break;
5121
 
5122
    case RELOAD_FOR_OUTADDR_ADDRESS:
5123
      check_opnum = 1;
5124
      used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
5125
      break;
5126
 
5127
    case RELOAD_FOR_OPERAND_ADDRESS:
5128
      used_in_set = &reload_reg_used_in_op_addr;
5129
      break;
5130
 
5131
    case RELOAD_FOR_OPADDR_ADDR:
5132
      check_any = 1;
5133
      used_in_set = &reload_reg_used_in_op_addr_reload;
5134
      break;
5135
 
5136
    case RELOAD_FOR_OTHER_ADDRESS:
5137
      used_in_set = &reload_reg_used_in_other_addr;
5138
      check_any = 1;
5139
      break;
5140
 
5141
    case RELOAD_FOR_INPUT:
5142
      used_in_set = &reload_reg_used_in_input[opnum];
5143
      break;
5144
 
5145
    case RELOAD_FOR_OUTPUT:
5146
      used_in_set = &reload_reg_used_in_output[opnum];
5147
      break;
5148
 
5149
    case RELOAD_FOR_INSN:
5150
      used_in_set = &reload_reg_used_in_insn;
5151
      break;
5152
    default:
5153
      gcc_unreachable ();
5154
    }
5155
  /* We resolve conflicts with remaining reloads of the same type by
5156
     excluding the intervals of reload registers by them from the
5157
     interval of freed reload registers.  Since we only keep track of
5158
     one set of interval bounds, we might have to exclude somewhat
5159
     more than what would be necessary if we used a HARD_REG_SET here.
5160
     But this should only happen very infrequently, so there should
5161
     be no reason to worry about it.  */
5162
 
5163
  start_regno = regno;
5164
  end_regno = regno + nregs;
5165
  if (check_opnum || check_any)
5166
    {
5167
      for (i = n_reloads - 1; i >= 0; i--)
5168
        {
5169
          if (rld[i].when_needed == type
5170
              && (check_any || rld[i].opnum == opnum)
5171
              && rld[i].reg_rtx)
5172
            {
5173
              unsigned int conflict_start = true_regnum (rld[i].reg_rtx);
5174
              unsigned int conflict_end
5175
                = end_hard_regno (rld[i].mode, conflict_start);
5176
 
5177
              /* If there is an overlap with the first to-be-freed register,
5178
                 adjust the interval start.  */
5179
              if (conflict_start <= start_regno && conflict_end > start_regno)
5180
                start_regno = conflict_end;
5181
              /* Otherwise, if there is a conflict with one of the other
5182
                 to-be-freed registers, adjust the interval end.  */
5183
              if (conflict_start > start_regno && conflict_start < end_regno)
5184
                end_regno = conflict_start;
5185
            }
5186
        }
5187
    }
5188
 
5189
  for (r = start_regno; r < end_regno; r++)
5190
    CLEAR_HARD_REG_BIT (*used_in_set, r);
5191
}
5192
 
5193
/* 1 if reg REGNO is free as a reload reg for a reload of the sort
5194
   specified by OPNUM and TYPE.  */
5195
 
5196
static int
5197
reload_reg_free_p (unsigned int regno, int opnum, enum reload_type type)
5198
{
5199
  int i;
5200
 
5201
  /* In use for a RELOAD_OTHER means it's not available for anything.  */
5202
  if (TEST_HARD_REG_BIT (reload_reg_used, regno)
5203
      || TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5204
    return 0;
5205
 
5206
  switch (type)
5207
    {
5208
    case RELOAD_OTHER:
5209
      /* In use for anything means we can't use it for RELOAD_OTHER.  */
5210
      if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
5211
          || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5212
          || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5213
          || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
5214
        return 0;
5215
 
5216
      for (i = 0; i < reload_n_operands; i++)
5217
        if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5218
            || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5219
            || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5220
            || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5221
            || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
5222
            || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5223
          return 0;
5224
 
5225
      return 1;
5226
 
5227
    case RELOAD_FOR_INPUT:
5228
      if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5229
          || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
5230
        return 0;
5231
 
5232
      if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5233
        return 0;
5234
 
5235
      /* If it is used for some other input, can't use it.  */
5236
      for (i = 0; i < reload_n_operands; i++)
5237
        if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5238
          return 0;
5239
 
5240
      /* If it is used in a later operand's address, can't use it.  */
5241
      for (i = opnum + 1; i < reload_n_operands; i++)
5242
        if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5243
            || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5244
          return 0;
5245
 
5246
      return 1;
5247
 
5248
    case RELOAD_FOR_INPUT_ADDRESS:
5249
      /* Can't use a register if it is used for an input address for this
5250
         operand or used as an input in an earlier one.  */
5251
      if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
5252
          || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
5253
        return 0;
5254
 
5255
      for (i = 0; i < opnum; i++)
5256
        if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5257
          return 0;
5258
 
5259
      return 1;
5260
 
5261
    case RELOAD_FOR_INPADDR_ADDRESS:
5262
      /* Can't use a register if it is used for an input address
5263
         for this operand or used as an input in an earlier
5264
         one.  */
5265
      if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
5266
        return 0;
5267
 
5268
      for (i = 0; i < opnum; i++)
5269
        if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5270
          return 0;
5271
 
5272
      return 1;
5273
 
5274
    case RELOAD_FOR_OUTPUT_ADDRESS:
5275
      /* Can't use a register if it is used for an output address for this
5276
         operand or used as an output in this or a later operand.  Note
5277
         that multiple output operands are emitted in reverse order, so
5278
         the conflicting ones are those with lower indices.  */
5279
      if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
5280
        return 0;
5281
 
5282
      for (i = 0; i <= opnum; i++)
5283
        if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5284
          return 0;
5285
 
5286
      return 1;
5287
 
5288
    case RELOAD_FOR_OUTADDR_ADDRESS:
5289
      /* Can't use a register if it is used for an output address
5290
         for this operand or used as an output in this or a
5291
         later operand.  Note that multiple output operands are
5292
         emitted in reverse order, so the conflicting ones are
5293
         those with lower indices.  */
5294
      if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
5295
        return 0;
5296
 
5297
      for (i = 0; i <= opnum; i++)
5298
        if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5299
          return 0;
5300
 
5301
      return 1;
5302
 
5303
    case RELOAD_FOR_OPERAND_ADDRESS:
5304
      for (i = 0; i < reload_n_operands; i++)
5305
        if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5306
          return 0;
5307
 
5308
      return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5309
              && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
5310
 
5311
    case RELOAD_FOR_OPADDR_ADDR:
5312
      for (i = 0; i < reload_n_operands; i++)
5313
        if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5314
          return 0;
5315
 
5316
      return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
5317
 
5318
    case RELOAD_FOR_OUTPUT:
5319
      /* This cannot share a register with RELOAD_FOR_INSN reloads, other
5320
         outputs, or an operand address for this or an earlier output.
5321
         Note that multiple output operands are emitted in reverse order,
5322
         so the conflicting ones are those with higher indices.  */
5323
      if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
5324
        return 0;
5325
 
5326
      for (i = 0; i < reload_n_operands; i++)
5327
        if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5328
          return 0;
5329
 
5330
      for (i = opnum; i < reload_n_operands; i++)
5331
        if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5332
            || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5333
          return 0;
5334
 
5335
      return 1;
5336
 
5337
    case RELOAD_FOR_INSN:
5338
      for (i = 0; i < reload_n_operands; i++)
5339
        if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
5340
            || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5341
          return 0;
5342
 
5343
      return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5344
              && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
5345
 
5346
    case RELOAD_FOR_OTHER_ADDRESS:
5347
      return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
5348
 
5349
    default:
5350
      gcc_unreachable ();
5351
    }
5352
}
5353
 
5354
/* Return 1 if the value in reload reg REGNO, as used by the reload with
5355
   the number RELOADNUM, is still available in REGNO at the end of the insn.
5356
 
5357
   We can assume that the reload reg was already tested for availability
5358
   at the time it is needed, and we should not check this again,
5359
   in case the reg has already been marked in use.  */
5360
 
5361
static int
5362
reload_reg_reaches_end_p (unsigned int regno, int reloadnum)
5363
{
5364
  int opnum = rld[reloadnum].opnum;
5365
  enum reload_type type = rld[reloadnum].when_needed;
5366
  int i;
5367
 
5368
  /* See if there is a reload with the same type for this operand, using
5369
     the same register. This case is not handled by the code below.  */
5370
  for (i = reloadnum + 1; i < n_reloads; i++)
5371
    {
5372
      rtx reg;
5373
      int nregs;
5374
 
5375
      if (rld[i].opnum != opnum || rld[i].when_needed != type)
5376
        continue;
5377
      reg = rld[i].reg_rtx;
5378
      if (reg == NULL_RTX)
5379
        continue;
5380
      nregs = hard_regno_nregs[REGNO (reg)][GET_MODE (reg)];
5381
      if (regno >= REGNO (reg) && regno < REGNO (reg) + nregs)
5382
        return 0;
5383
    }
5384
 
5385
  switch (type)
5386
    {
5387
    case RELOAD_OTHER:
5388
      /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
5389
         its value must reach the end.  */
5390
      return 1;
5391
 
5392
      /* If this use is for part of the insn,
5393
         its value reaches if no subsequent part uses the same register.
5394
         Just like the above function, don't try to do this with lots
5395
         of fallthroughs.  */
5396
 
5397
    case RELOAD_FOR_OTHER_ADDRESS:
5398
      /* Here we check for everything else, since these don't conflict
5399
         with anything else and everything comes later.  */
5400
 
5401
      for (i = 0; i < reload_n_operands; i++)
5402
        if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5403
            || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5404
            || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
5405
            || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5406
            || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5407
            || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5408
          return 0;
5409
 
5410
      return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5411
              && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5412
              && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5413
              && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
5414
 
5415
    case RELOAD_FOR_INPUT_ADDRESS:
5416
    case RELOAD_FOR_INPADDR_ADDRESS:
5417
      /* Similar, except that we check only for this and subsequent inputs
5418
         and the address of only subsequent inputs and we do not need
5419
         to check for RELOAD_OTHER objects since they are known not to
5420
         conflict.  */
5421
 
5422
      for (i = opnum; i < reload_n_operands; i++)
5423
        if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5424
          return 0;
5425
 
5426
      for (i = opnum + 1; i < reload_n_operands; i++)
5427
        if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5428
            || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5429
          return 0;
5430
 
5431
      for (i = 0; i < reload_n_operands; i++)
5432
        if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5433
            || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5434
            || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5435
          return 0;
5436
 
5437
      if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5438
        return 0;
5439
 
5440
      return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5441
              && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5442
              && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5443
 
5444
    case RELOAD_FOR_INPUT:
5445
      /* Similar to input address, except we start at the next operand for
5446
         both input and input address and we do not check for
5447
         RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
5448
         would conflict.  */
5449
 
5450
      for (i = opnum + 1; i < reload_n_operands; i++)
5451
        if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5452
            || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5453
            || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5454
          return 0;
5455
 
5456
      /* ... fall through ...  */
5457
 
5458
    case RELOAD_FOR_OPERAND_ADDRESS:
5459
      /* Check outputs and their addresses.  */
5460
 
5461
      for (i = 0; i < reload_n_operands; i++)
5462
        if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5463
            || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5464
            || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5465
          return 0;
5466
 
5467
      return (!TEST_HARD_REG_BIT (reload_reg_used, regno));
5468
 
5469
    case RELOAD_FOR_OPADDR_ADDR:
5470
      for (i = 0; i < reload_n_operands; i++)
5471
        if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5472
            || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5473
            || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5474
          return 0;
5475
 
5476
      return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5477
              && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5478
              && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5479
 
5480
    case RELOAD_FOR_INSN:
5481
      /* These conflict with other outputs with RELOAD_OTHER.  So
5482
         we need only check for output addresses.  */
5483
 
5484
      opnum = reload_n_operands;
5485
 
5486
      /* ... fall through ...  */
5487
 
5488
    case RELOAD_FOR_OUTPUT:
5489
    case RELOAD_FOR_OUTPUT_ADDRESS:
5490
    case RELOAD_FOR_OUTADDR_ADDRESS:
5491
      /* We already know these can't conflict with a later output.  So the
5492
         only thing to check are later output addresses.
5493
         Note that multiple output operands are emitted in reverse order,
5494
         so the conflicting ones are those with lower indices.  */
5495
      for (i = 0; i < opnum; i++)
5496
        if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5497
            || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5498
          return 0;
5499
 
5500
      return 1;
5501
 
5502
    default:
5503
      gcc_unreachable ();
5504
    }
5505
}
5506
 
5507
/* Like reload_reg_reaches_end_p, but check that the condition holds for
5508
   every register in REG.  */
5509
 
5510
static bool
5511
reload_reg_rtx_reaches_end_p (rtx reg, int reloadnum)
5512
{
5513
  unsigned int i;
5514
 
5515
  for (i = REGNO (reg); i < END_REGNO (reg); i++)
5516
    if (!reload_reg_reaches_end_p (i, reloadnum))
5517
      return false;
5518
  return true;
5519
}
5520
 
5521
 
5522
/*  Returns whether R1 and R2 are uniquely chained: the value of one
5523
    is used by the other, and that value is not used by any other
5524
    reload for this insn.  This is used to partially undo the decision
5525
    made in find_reloads when in the case of multiple
5526
    RELOAD_FOR_OPERAND_ADDRESS reloads it converts all
5527
    RELOAD_FOR_OPADDR_ADDR reloads into RELOAD_FOR_OPERAND_ADDRESS
5528
    reloads.  This code tries to avoid the conflict created by that
5529
    change.  It might be cleaner to explicitly keep track of which
5530
    RELOAD_FOR_OPADDR_ADDR reload is associated with which
5531
    RELOAD_FOR_OPERAND_ADDRESS reload, rather than to try to detect
5532
    this after the fact. */
5533
static bool
5534
reloads_unique_chain_p (int r1, int r2)
5535
{
5536
  int i;
5537
 
5538
  /* We only check input reloads.  */
5539
  if (! rld[r1].in || ! rld[r2].in)
5540
    return false;
5541
 
5542
  /* Avoid anything with output reloads.  */
5543
  if (rld[r1].out || rld[r2].out)
5544
    return false;
5545
 
5546
  /* "chained" means one reload is a component of the other reload,
5547
     not the same as the other reload.  */
5548
  if (rld[r1].opnum != rld[r2].opnum
5549
      || rtx_equal_p (rld[r1].in, rld[r2].in)
5550
      || rld[r1].optional || rld[r2].optional
5551
      || ! (reg_mentioned_p (rld[r1].in, rld[r2].in)
5552
            || reg_mentioned_p (rld[r2].in, rld[r1].in)))
5553
    return false;
5554
 
5555
  for (i = 0; i < n_reloads; i ++)
5556
    /* Look for input reloads that aren't our two */
5557
    if (i != r1 && i != r2 && rld[i].in)
5558
      {
5559
        /* If our reload is mentioned at all, it isn't a simple chain.  */
5560
        if (reg_mentioned_p (rld[r1].in, rld[i].in))
5561
          return false;
5562
      }
5563
  return true;
5564
}
5565
 
5566
/* The recursive function change all occurrences of WHAT in *WHERE
5567
   to REPL.  */
5568
static void
5569
substitute (rtx *where, const_rtx what, rtx repl)
5570
{
5571
  const char *fmt;
5572
  int i;
5573
  enum rtx_code code;
5574
 
5575
  if (*where == 0)
5576
    return;
5577
 
5578
  if (*where == what || rtx_equal_p (*where, what))
5579
    {
5580
      /* Record the location of the changed rtx.  */
5581
      VEC_safe_push (rtx_p, heap, substitute_stack, where);
5582
      *where = repl;
5583
      return;
5584
    }
5585
 
5586
  code = GET_CODE (*where);
5587
  fmt = GET_RTX_FORMAT (code);
5588
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5589
    {
5590
      if (fmt[i] == 'E')
5591
        {
5592
          int j;
5593
 
5594
          for (j = XVECLEN (*where, i) - 1; j >= 0; j--)
5595
            substitute (&XVECEXP (*where, i, j), what, repl);
5596
        }
5597
      else if (fmt[i] == 'e')
5598
        substitute (&XEXP (*where, i), what, repl);
5599
    }
5600
}
5601
 
5602
/* The function returns TRUE if chain of reload R1 and R2 (in any
5603
   order) can be evaluated without usage of intermediate register for
5604
   the reload containing another reload.  It is important to see
5605
   gen_reload to understand what the function is trying to do.  As an
5606
   example, let us have reload chain
5607
 
5608
      r2: const
5609
      r1: <something> + const
5610
 
5611
   and reload R2 got reload reg HR.  The function returns true if
5612
   there is a correct insn HR = HR + <something>.  Otherwise,
5613
   gen_reload will use intermediate register (and this is the reload
5614
   reg for R1) to reload <something>.
5615
 
5616
   We need this function to find a conflict for chain reloads.  In our
5617
   example, if HR = HR + <something> is incorrect insn, then we cannot
5618
   use HR as a reload register for R2.  If we do use it then we get a
5619
   wrong code:
5620
 
5621
      HR = const
5622
      HR = <something>
5623
      HR = HR + HR
5624
 
5625
*/
5626
static bool
5627
gen_reload_chain_without_interm_reg_p (int r1, int r2)
5628
{
5629
  /* Assume other cases in gen_reload are not possible for
5630
     chain reloads or do need an intermediate hard registers.  */
5631
  bool result = true;
5632
  int regno, n, code;
5633
  rtx out, in, insn;
5634
  rtx last = get_last_insn ();
5635
 
5636
  /* Make r2 a component of r1.  */
5637
  if (reg_mentioned_p (rld[r1].in, rld[r2].in))
5638
    {
5639
      n = r1;
5640
      r1 = r2;
5641
      r2 = n;
5642
    }
5643
  gcc_assert (reg_mentioned_p (rld[r2].in, rld[r1].in));
5644
  regno = rld[r1].regno >= 0 ? rld[r1].regno : rld[r2].regno;
5645
  gcc_assert (regno >= 0);
5646
  out = gen_rtx_REG (rld[r1].mode, regno);
5647
  in = rld[r1].in;
5648
  substitute (&in, rld[r2].in, gen_rtx_REG (rld[r2].mode, regno));
5649
 
5650
  /* If IN is a paradoxical SUBREG, remove it and try to put the
5651
     opposite SUBREG on OUT.  Likewise for a paradoxical SUBREG on OUT.  */
5652
  strip_paradoxical_subreg (&in, &out);
5653
 
5654
  if (GET_CODE (in) == PLUS
5655
      && (REG_P (XEXP (in, 0))
5656
          || GET_CODE (XEXP (in, 0)) == SUBREG
5657
          || MEM_P (XEXP (in, 0)))
5658
      && (REG_P (XEXP (in, 1))
5659
          || GET_CODE (XEXP (in, 1)) == SUBREG
5660
          || CONSTANT_P (XEXP (in, 1))
5661
          || MEM_P (XEXP (in, 1))))
5662
    {
5663
      insn = emit_insn (gen_rtx_SET (VOIDmode, out, in));
5664
      code = recog_memoized (insn);
5665
      result = false;
5666
 
5667
      if (code >= 0)
5668
        {
5669
          extract_insn (insn);
5670
          /* We want constrain operands to treat this insn strictly in
5671
             its validity determination, i.e., the way it would after
5672
             reload has completed.  */
5673
          result = constrain_operands (1);
5674
        }
5675
 
5676
      delete_insns_since (last);
5677
    }
5678
 
5679
  /* Restore the original value at each changed address within R1.  */
5680
  while (!VEC_empty (rtx_p, substitute_stack))
5681
    {
5682
      rtx *where = VEC_pop (rtx_p, substitute_stack);
5683
      *where = rld[r2].in;
5684
    }
5685
 
5686
  return result;
5687
}
5688
 
5689
/* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
5690
   Return 0 otherwise.
5691
 
5692
   This function uses the same algorithm as reload_reg_free_p above.  */
5693
 
5694
static int
5695
reloads_conflict (int r1, int r2)
5696
{
5697
  enum reload_type r1_type = rld[r1].when_needed;
5698
  enum reload_type r2_type = rld[r2].when_needed;
5699
  int r1_opnum = rld[r1].opnum;
5700
  int r2_opnum = rld[r2].opnum;
5701
 
5702
  /* RELOAD_OTHER conflicts with everything.  */
5703
  if (r2_type == RELOAD_OTHER)
5704
    return 1;
5705
 
5706
  /* Otherwise, check conflicts differently for each type.  */
5707
 
5708
  switch (r1_type)
5709
    {
5710
    case RELOAD_FOR_INPUT:
5711
      return (r2_type == RELOAD_FOR_INSN
5712
              || r2_type == RELOAD_FOR_OPERAND_ADDRESS
5713
              || r2_type == RELOAD_FOR_OPADDR_ADDR
5714
              || r2_type == RELOAD_FOR_INPUT
5715
              || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
5716
                   || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
5717
                  && r2_opnum > r1_opnum));
5718
 
5719
    case RELOAD_FOR_INPUT_ADDRESS:
5720
      return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
5721
              || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5722
 
5723
    case RELOAD_FOR_INPADDR_ADDRESS:
5724
      return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
5725
              || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5726
 
5727
    case RELOAD_FOR_OUTPUT_ADDRESS:
5728
      return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
5729
              || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5730
 
5731
    case RELOAD_FOR_OUTADDR_ADDRESS:
5732
      return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
5733
              || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5734
 
5735
    case RELOAD_FOR_OPERAND_ADDRESS:
5736
      return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
5737
              || (r2_type == RELOAD_FOR_OPERAND_ADDRESS
5738
                  && (!reloads_unique_chain_p (r1, r2)
5739
                      || !gen_reload_chain_without_interm_reg_p (r1, r2))));
5740
 
5741
    case RELOAD_FOR_OPADDR_ADDR:
5742
      return (r2_type == RELOAD_FOR_INPUT
5743
              || r2_type == RELOAD_FOR_OPADDR_ADDR);
5744
 
5745
    case RELOAD_FOR_OUTPUT:
5746
      return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
5747
              || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
5748
                   || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
5749
                  && r2_opnum >= r1_opnum));
5750
 
5751
    case RELOAD_FOR_INSN:
5752
      return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
5753
              || r2_type == RELOAD_FOR_INSN
5754
              || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
5755
 
5756
    case RELOAD_FOR_OTHER_ADDRESS:
5757
      return r2_type == RELOAD_FOR_OTHER_ADDRESS;
5758
 
5759
    case RELOAD_OTHER:
5760
      return 1;
5761
 
5762
    default:
5763
      gcc_unreachable ();
5764
    }
5765
}
5766
 
5767
/* Indexed by reload number, 1 if incoming value
5768
   inherited from previous insns.  */
5769
static char reload_inherited[MAX_RELOADS];
5770
 
5771
/* For an inherited reload, this is the insn the reload was inherited from,
5772
   if we know it.  Otherwise, this is 0.  */
5773
static rtx reload_inheritance_insn[MAX_RELOADS];
5774
 
5775
/* If nonzero, this is a place to get the value of the reload,
5776
   rather than using reload_in.  */
5777
static rtx reload_override_in[MAX_RELOADS];
5778
 
5779
/* For each reload, the hard register number of the register used,
5780
   or -1 if we did not need a register for this reload.  */
5781
static int reload_spill_index[MAX_RELOADS];
5782
 
5783
/* Index X is the value of rld[X].reg_rtx, adjusted for the input mode.  */
5784
static rtx reload_reg_rtx_for_input[MAX_RELOADS];
5785
 
5786
/* Index X is the value of rld[X].reg_rtx, adjusted for the output mode.  */
5787
static rtx reload_reg_rtx_for_output[MAX_RELOADS];
5788
 
5789
/* Subroutine of free_for_value_p, used to check a single register.
5790
   START_REGNO is the starting regno of the full reload register
5791
   (possibly comprising multiple hard registers) that we are considering.  */
5792
 
5793
static int
5794
reload_reg_free_for_value_p (int start_regno, int regno, int opnum,
5795
                             enum reload_type type, rtx value, rtx out,
5796
                             int reloadnum, int ignore_address_reloads)
5797
{
5798
  int time1;
5799
  /* Set if we see an input reload that must not share its reload register
5800
     with any new earlyclobber, but might otherwise share the reload
5801
     register with an output or input-output reload.  */
5802
  int check_earlyclobber = 0;
5803
  int i;
5804
  int copy = 0;
5805
 
5806
  if (TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5807
    return 0;
5808
 
5809
  if (out == const0_rtx)
5810
    {
5811
      copy = 1;
5812
      out = NULL_RTX;
5813
    }
5814
 
5815
  /* We use some pseudo 'time' value to check if the lifetimes of the
5816
     new register use would overlap with the one of a previous reload
5817
     that is not read-only or uses a different value.
5818
     The 'time' used doesn't have to be linear in any shape or form, just
5819
     monotonic.
5820
     Some reload types use different 'buckets' for each operand.
5821
     So there are MAX_RECOG_OPERANDS different time values for each
5822
     such reload type.
5823
     We compute TIME1 as the time when the register for the prospective
5824
     new reload ceases to be live, and TIME2 for each existing
5825
     reload as the time when that the reload register of that reload
5826
     becomes live.
5827
     Where there is little to be gained by exact lifetime calculations,
5828
     we just make conservative assumptions, i.e. a longer lifetime;
5829
     this is done in the 'default:' cases.  */
5830
  switch (type)
5831
    {
5832
    case RELOAD_FOR_OTHER_ADDRESS:
5833
      /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads.  */
5834
      time1 = copy ? 0 : 1;
5835
      break;
5836
    case RELOAD_OTHER:
5837
      time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
5838
      break;
5839
      /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5840
         RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT.  By adding 0 / 1 / 2 ,
5841
         respectively, to the time values for these, we get distinct time
5842
         values.  To get distinct time values for each operand, we have to
5843
         multiply opnum by at least three.  We round that up to four because
5844
         multiply by four is often cheaper.  */
5845
    case RELOAD_FOR_INPADDR_ADDRESS:
5846
      time1 = opnum * 4 + 2;
5847
      break;
5848
    case RELOAD_FOR_INPUT_ADDRESS:
5849
      time1 = opnum * 4 + 3;
5850
      break;
5851
    case RELOAD_FOR_INPUT:
5852
      /* All RELOAD_FOR_INPUT reloads remain live till the instruction
5853
         executes (inclusive).  */
5854
      time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
5855
      break;
5856
    case RELOAD_FOR_OPADDR_ADDR:
5857
      /* opnum * 4 + 4
5858
         <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
5859
      time1 = MAX_RECOG_OPERANDS * 4 + 1;
5860
      break;
5861
    case RELOAD_FOR_OPERAND_ADDRESS:
5862
      /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
5863
         is executed.  */
5864
      time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
5865
      break;
5866
    case RELOAD_FOR_OUTADDR_ADDRESS:
5867
      time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
5868
      break;
5869
    case RELOAD_FOR_OUTPUT_ADDRESS:
5870
      time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
5871
      break;
5872
    default:
5873
      time1 = MAX_RECOG_OPERANDS * 5 + 5;
5874
    }
5875
 
5876
  for (i = 0; i < n_reloads; i++)
5877
    {
5878
      rtx reg = rld[i].reg_rtx;
5879
      if (reg && REG_P (reg)
5880
          && ((unsigned) regno - true_regnum (reg)
5881
              <= hard_regno_nregs[REGNO (reg)][GET_MODE (reg)] - (unsigned) 1)
5882
          && i != reloadnum)
5883
        {
5884
          rtx other_input = rld[i].in;
5885
 
5886
          /* If the other reload loads the same input value, that
5887
             will not cause a conflict only if it's loading it into
5888
             the same register.  */
5889
          if (true_regnum (reg) != start_regno)
5890
            other_input = NULL_RTX;
5891
          if (! other_input || ! rtx_equal_p (other_input, value)
5892
              || rld[i].out || out)
5893
            {
5894
              int time2;
5895
              switch (rld[i].when_needed)
5896
                {
5897
                case RELOAD_FOR_OTHER_ADDRESS:
5898
                  time2 = 0;
5899
                  break;
5900
                case RELOAD_FOR_INPADDR_ADDRESS:
5901
                  /* find_reloads makes sure that a
5902
                     RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
5903
                     by at most one - the first -
5904
                     RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS .  If the
5905
                     address reload is inherited, the address address reload
5906
                     goes away, so we can ignore this conflict.  */
5907
                  if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
5908
                      && ignore_address_reloads
5909
                      /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
5910
                         Then the address address is still needed to store
5911
                         back the new address.  */
5912
                      && ! rld[reloadnum].out)
5913
                    continue;
5914
                  /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
5915
                     RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
5916
                     reloads go away.  */
5917
                  if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5918
                      && ignore_address_reloads
5919
                      /* Unless we are reloading an auto_inc expression.  */
5920
                      && ! rld[reloadnum].out)
5921
                    continue;
5922
                  time2 = rld[i].opnum * 4 + 2;
5923
                  break;
5924
                case RELOAD_FOR_INPUT_ADDRESS:
5925
                  if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5926
                      && ignore_address_reloads
5927
                      && ! rld[reloadnum].out)
5928
                    continue;
5929
                  time2 = rld[i].opnum * 4 + 3;
5930
                  break;
5931
                case RELOAD_FOR_INPUT:
5932
                  time2 = rld[i].opnum * 4 + 4;
5933
                  check_earlyclobber = 1;
5934
                  break;
5935
                  /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
5936
                     == MAX_RECOG_OPERAND * 4  */
5937
                case RELOAD_FOR_OPADDR_ADDR:
5938
                  if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
5939
                      && ignore_address_reloads
5940
                      && ! rld[reloadnum].out)
5941
                    continue;
5942
                  time2 = MAX_RECOG_OPERANDS * 4 + 1;
5943
                  break;
5944
                case RELOAD_FOR_OPERAND_ADDRESS:
5945
                  time2 = MAX_RECOG_OPERANDS * 4 + 2;
5946
                  check_earlyclobber = 1;
5947
                  break;
5948
                case RELOAD_FOR_INSN:
5949
                  time2 = MAX_RECOG_OPERANDS * 4 + 3;
5950
                  break;
5951
                case RELOAD_FOR_OUTPUT:
5952
                  /* All RELOAD_FOR_OUTPUT reloads become live just after the
5953
                     instruction is executed.  */
5954
                  time2 = MAX_RECOG_OPERANDS * 4 + 4;
5955
                  break;
5956
                  /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
5957
                     the RELOAD_FOR_OUTPUT reloads, so assign it the same time
5958
                     value.  */
5959
                case RELOAD_FOR_OUTADDR_ADDRESS:
5960
                  if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
5961
                      && ignore_address_reloads
5962
                      && ! rld[reloadnum].out)
5963
                    continue;
5964
                  time2 = MAX_RECOG_OPERANDS * 4 + 4 + rld[i].opnum;
5965
                  break;
5966
                case RELOAD_FOR_OUTPUT_ADDRESS:
5967
                  time2 = MAX_RECOG_OPERANDS * 4 + 5 + rld[i].opnum;
5968
                  break;
5969
                case RELOAD_OTHER:
5970
                  /* If there is no conflict in the input part, handle this
5971
                     like an output reload.  */
5972
                  if (! rld[i].in || rtx_equal_p (other_input, value))
5973
                    {
5974
                      time2 = MAX_RECOG_OPERANDS * 4 + 4;
5975
                      /* Earlyclobbered outputs must conflict with inputs.  */
5976
                      if (earlyclobber_operand_p (rld[i].out))
5977
                        time2 = MAX_RECOG_OPERANDS * 4 + 3;
5978
 
5979
                      break;
5980
                    }
5981
                  time2 = 1;
5982
                  /* RELOAD_OTHER might be live beyond instruction execution,
5983
                     but this is not obvious when we set time2 = 1.  So check
5984
                     here if there might be a problem with the new reload
5985
                     clobbering the register used by the RELOAD_OTHER.  */
5986
                  if (out)
5987
                    return 0;
5988
                  break;
5989
                default:
5990
                  return 0;
5991
                }
5992
              if ((time1 >= time2
5993
                   && (! rld[i].in || rld[i].out
5994
                       || ! rtx_equal_p (other_input, value)))
5995
                  || (out && rld[reloadnum].out_reg
5996
                      && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
5997
                return 0;
5998
            }
5999
        }
6000
    }
6001
 
6002
  /* Earlyclobbered outputs must conflict with inputs.  */
6003
  if (check_earlyclobber && out && earlyclobber_operand_p (out))
6004
    return 0;
6005
 
6006
  return 1;
6007
}
6008
 
6009
/* Return 1 if the value in reload reg REGNO, as used by a reload
6010
   needed for the part of the insn specified by OPNUM and TYPE,
6011
   may be used to load VALUE into it.
6012
 
6013
   MODE is the mode in which the register is used, this is needed to
6014
   determine how many hard regs to test.
6015
 
6016
   Other read-only reloads with the same value do not conflict
6017
   unless OUT is nonzero and these other reloads have to live while
6018
   output reloads live.
6019
   If OUT is CONST0_RTX, this is a special case: it means that the
6020
   test should not be for using register REGNO as reload register, but
6021
   for copying from register REGNO into the reload register.
6022
 
6023
   RELOADNUM is the number of the reload we want to load this value for;
6024
   a reload does not conflict with itself.
6025
 
6026
   When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
6027
   reloads that load an address for the very reload we are considering.
6028
 
6029
   The caller has to make sure that there is no conflict with the return
6030
   register.  */
6031
 
6032
static int
6033
free_for_value_p (int regno, enum machine_mode mode, int opnum,
6034
                  enum reload_type type, rtx value, rtx out, int reloadnum,
6035
                  int ignore_address_reloads)
6036
{
6037
  int nregs = hard_regno_nregs[regno][mode];
6038
  while (nregs-- > 0)
6039
    if (! reload_reg_free_for_value_p (regno, regno + nregs, opnum, type,
6040
                                       value, out, reloadnum,
6041
                                       ignore_address_reloads))
6042
      return 0;
6043
  return 1;
6044
}
6045
 
6046
/* Return nonzero if the rtx X is invariant over the current function.  */
6047
/* ??? Actually, the places where we use this expect exactly what is
6048
   tested here, and not everything that is function invariant.  In
6049
   particular, the frame pointer and arg pointer are special cased;
6050
   pic_offset_table_rtx is not, and we must not spill these things to
6051
   memory.  */
6052
 
6053
int
6054
function_invariant_p (const_rtx x)
6055
{
6056
  if (CONSTANT_P (x))
6057
    return 1;
6058
  if (x == frame_pointer_rtx || x == arg_pointer_rtx)
6059
    return 1;
6060
  if (GET_CODE (x) == PLUS
6061
      && (XEXP (x, 0) == frame_pointer_rtx || XEXP (x, 0) == arg_pointer_rtx)
6062
      && GET_CODE (XEXP (x, 1)) == CONST_INT)
6063
    return 1;
6064
  return 0;
6065
}
6066
 
6067
/* Determine whether the reload reg X overlaps any rtx'es used for
6068
   overriding inheritance.  Return nonzero if so.  */
6069
 
6070
static int
6071
conflicts_with_override (rtx x)
6072
{
6073
  int i;
6074
  for (i = 0; i < n_reloads; i++)
6075
    if (reload_override_in[i]
6076
        && reg_overlap_mentioned_p (x, reload_override_in[i]))
6077
      return 1;
6078
  return 0;
6079
}
6080
 
6081
/* Give an error message saying we failed to find a reload for INSN,
6082
   and clear out reload R.  */
6083
static void
6084
failed_reload (rtx insn, int r)
6085
{
6086
  if (asm_noperands (PATTERN (insn)) < 0)
6087
    /* It's the compiler's fault.  */
6088
    fatal_insn ("could not find a spill register", insn);
6089
 
6090
  /* It's the user's fault; the operand's mode and constraint
6091
     don't match.  Disable this reload so we don't crash in final.  */
6092
  error_for_asm (insn,
6093
                 "%<asm%> operand constraint incompatible with operand size");
6094
  rld[r].in = 0;
6095
  rld[r].out = 0;
6096
  rld[r].reg_rtx = 0;
6097
  rld[r].optional = 1;
6098
  rld[r].secondary_p = 1;
6099
}
6100
 
6101
/* I is the index in SPILL_REG_RTX of the reload register we are to allocate
6102
   for reload R.  If it's valid, get an rtx for it.  Return nonzero if
6103
   successful.  */
6104
static int
6105
set_reload_reg (int i, int r)
6106
{
6107
  /* regno is 'set but not used' if HARD_REGNO_MODE_OK doesn't use its first
6108
     parameter.  */
6109
  int regno ATTRIBUTE_UNUSED;
6110
  rtx reg = spill_reg_rtx[i];
6111
 
6112
  if (reg == 0 || GET_MODE (reg) != rld[r].mode)
6113
    spill_reg_rtx[i] = reg
6114
      = gen_rtx_REG (rld[r].mode, spill_regs[i]);
6115
 
6116
  regno = true_regnum (reg);
6117
 
6118
  /* Detect when the reload reg can't hold the reload mode.
6119
     This used to be one `if', but Sequent compiler can't handle that.  */
6120
  if (HARD_REGNO_MODE_OK (regno, rld[r].mode))
6121
    {
6122
      enum machine_mode test_mode = VOIDmode;
6123
      if (rld[r].in)
6124
        test_mode = GET_MODE (rld[r].in);
6125
      /* If rld[r].in has VOIDmode, it means we will load it
6126
         in whatever mode the reload reg has: to wit, rld[r].mode.
6127
         We have already tested that for validity.  */
6128
      /* Aside from that, we need to test that the expressions
6129
         to reload from or into have modes which are valid for this
6130
         reload register.  Otherwise the reload insns would be invalid.  */
6131
      if (! (rld[r].in != 0 && test_mode != VOIDmode
6132
             && ! HARD_REGNO_MODE_OK (regno, test_mode)))
6133
        if (! (rld[r].out != 0
6134
               && ! HARD_REGNO_MODE_OK (regno, GET_MODE (rld[r].out))))
6135
          {
6136
            /* The reg is OK.  */
6137
            last_spill_reg = i;
6138
 
6139
            /* Mark as in use for this insn the reload regs we use
6140
               for this.  */
6141
            mark_reload_reg_in_use (spill_regs[i], rld[r].opnum,
6142
                                    rld[r].when_needed, rld[r].mode);
6143
 
6144
            rld[r].reg_rtx = reg;
6145
            reload_spill_index[r] = spill_regs[i];
6146
            return 1;
6147
          }
6148
    }
6149
  return 0;
6150
}
6151
 
6152
/* Find a spill register to use as a reload register for reload R.
6153
   LAST_RELOAD is nonzero if this is the last reload for the insn being
6154
   processed.
6155
 
6156
   Set rld[R].reg_rtx to the register allocated.
6157
 
6158
   We return 1 if successful, or 0 if we couldn't find a spill reg and
6159
   we didn't change anything.  */
6160
 
6161
static int
6162
allocate_reload_reg (struct insn_chain *chain ATTRIBUTE_UNUSED, int r,
6163
                     int last_reload)
6164
{
6165
  int i, pass, count;
6166
 
6167
  /* If we put this reload ahead, thinking it is a group,
6168
     then insist on finding a group.  Otherwise we can grab a
6169
     reg that some other reload needs.
6170
     (That can happen when we have a 68000 DATA_OR_FP_REG
6171
     which is a group of data regs or one fp reg.)
6172
     We need not be so restrictive if there are no more reloads
6173
     for this insn.
6174
 
6175
     ??? Really it would be nicer to have smarter handling
6176
     for that kind of reg class, where a problem like this is normal.
6177
     Perhaps those classes should be avoided for reloading
6178
     by use of more alternatives.  */
6179
 
6180
  int force_group = rld[r].nregs > 1 && ! last_reload;
6181
 
6182
  /* If we want a single register and haven't yet found one,
6183
     take any reg in the right class and not in use.
6184
     If we want a consecutive group, here is where we look for it.
6185
 
6186
     We use three passes so we can first look for reload regs to
6187
     reuse, which are already in use for other reloads in this insn,
6188
     and only then use additional registers which are not "bad", then
6189
     finally any register.
6190
 
6191
     I think that maximizing reuse is needed to make sure we don't
6192
     run out of reload regs.  Suppose we have three reloads, and
6193
     reloads A and B can share regs.  These need two regs.
6194
     Suppose A and B are given different regs.
6195
     That leaves none for C.  */
6196
  for (pass = 0; pass < 3; pass++)
6197
    {
6198
      /* I is the index in spill_regs.
6199
         We advance it round-robin between insns to use all spill regs
6200
         equally, so that inherited reloads have a chance
6201
         of leapfrogging each other.  */
6202
 
6203
      i = last_spill_reg;
6204
 
6205
      for (count = 0; count < n_spills; count++)
6206
        {
6207
          int rclass = (int) rld[r].rclass;
6208
          int regnum;
6209
 
6210
          i++;
6211
          if (i >= n_spills)
6212
            i -= n_spills;
6213
          regnum = spill_regs[i];
6214
 
6215
          if ((reload_reg_free_p (regnum, rld[r].opnum,
6216
                                  rld[r].when_needed)
6217
               || (rld[r].in
6218
                   /* We check reload_reg_used to make sure we
6219
                      don't clobber the return register.  */
6220
                   && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
6221
                   && free_for_value_p (regnum, rld[r].mode, rld[r].opnum,
6222
                                        rld[r].when_needed, rld[r].in,
6223
                                        rld[r].out, r, 1)))
6224
              && TEST_HARD_REG_BIT (reg_class_contents[rclass], regnum)
6225
              && HARD_REGNO_MODE_OK (regnum, rld[r].mode)
6226
              /* Look first for regs to share, then for unshared.  But
6227
                 don't share regs used for inherited reloads; they are
6228
                 the ones we want to preserve.  */
6229
              && (pass
6230
                  || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
6231
                                         regnum)
6232
                      && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
6233
                                              regnum))))
6234
            {
6235
              int nr = hard_regno_nregs[regnum][rld[r].mode];
6236
 
6237
              /* During the second pass we want to avoid reload registers
6238
                 which are "bad" for this reload.  */
6239
              if (pass == 1
6240
                  && ira_bad_reload_regno (regnum, rld[r].in, rld[r].out))
6241
                continue;
6242
 
6243
              /* Avoid the problem where spilling a GENERAL_OR_FP_REG
6244
                 (on 68000) got us two FP regs.  If NR is 1,
6245
                 we would reject both of them.  */
6246
              if (force_group)
6247
                nr = rld[r].nregs;
6248
              /* If we need only one reg, we have already won.  */
6249
              if (nr == 1)
6250
                {
6251
                  /* But reject a single reg if we demand a group.  */
6252
                  if (force_group)
6253
                    continue;
6254
                  break;
6255
                }
6256
              /* Otherwise check that as many consecutive regs as we need
6257
                 are available here.  */
6258
              while (nr > 1)
6259
                {
6260
                  int regno = regnum + nr - 1;
6261
                  if (!(TEST_HARD_REG_BIT (reg_class_contents[rclass], regno)
6262
                        && spill_reg_order[regno] >= 0
6263
                        && reload_reg_free_p (regno, rld[r].opnum,
6264
                                              rld[r].when_needed)))
6265
                    break;
6266
                  nr--;
6267
                }
6268
              if (nr == 1)
6269
                break;
6270
            }
6271
        }
6272
 
6273
      /* If we found something on the current pass, omit later passes.  */
6274
      if (count < n_spills)
6275
        break;
6276
    }
6277
 
6278
  /* We should have found a spill register by now.  */
6279
  if (count >= n_spills)
6280
    return 0;
6281
 
6282
  /* I is the index in SPILL_REG_RTX of the reload register we are to
6283
     allocate.  Get an rtx for it and find its register number.  */
6284
 
6285
  return set_reload_reg (i, r);
6286
}
6287
 
6288
/* Initialize all the tables needed to allocate reload registers.
6289
   CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
6290
   is the array we use to restore the reg_rtx field for every reload.  */
6291
 
6292
static void
6293
choose_reload_regs_init (struct insn_chain *chain, rtx *save_reload_reg_rtx)
6294
{
6295
  int i;
6296
 
6297
  for (i = 0; i < n_reloads; i++)
6298
    rld[i].reg_rtx = save_reload_reg_rtx[i];
6299
 
6300
  memset (reload_inherited, 0, MAX_RELOADS);
6301
  memset (reload_inheritance_insn, 0, MAX_RELOADS * sizeof (rtx));
6302
  memset (reload_override_in, 0, MAX_RELOADS * sizeof (rtx));
6303
 
6304
  CLEAR_HARD_REG_SET (reload_reg_used);
6305
  CLEAR_HARD_REG_SET (reload_reg_used_at_all);
6306
  CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
6307
  CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
6308
  CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
6309
  CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
6310
 
6311
  CLEAR_HARD_REG_SET (reg_used_in_insn);
6312
  {
6313
    HARD_REG_SET tmp;
6314
    REG_SET_TO_HARD_REG_SET (tmp, &chain->live_throughout);
6315
    IOR_HARD_REG_SET (reg_used_in_insn, tmp);
6316
    REG_SET_TO_HARD_REG_SET (tmp, &chain->dead_or_set);
6317
    IOR_HARD_REG_SET (reg_used_in_insn, tmp);
6318
    compute_use_by_pseudos (&reg_used_in_insn, &chain->live_throughout);
6319
    compute_use_by_pseudos (&reg_used_in_insn, &chain->dead_or_set);
6320
  }
6321
 
6322
  for (i = 0; i < reload_n_operands; i++)
6323
    {
6324
      CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
6325
      CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
6326
      CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
6327
      CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
6328
      CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
6329
      CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
6330
    }
6331
 
6332
  COMPL_HARD_REG_SET (reload_reg_unavailable, chain->used_spill_regs);
6333
 
6334
  CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
6335
 
6336
  for (i = 0; i < n_reloads; i++)
6337
    /* If we have already decided to use a certain register,
6338
       don't use it in another way.  */
6339
    if (rld[i].reg_rtx)
6340
      mark_reload_reg_in_use (REGNO (rld[i].reg_rtx), rld[i].opnum,
6341
                              rld[i].when_needed, rld[i].mode);
6342
}
6343
 
6344
/* Assign hard reg targets for the pseudo-registers we must reload
6345
   into hard regs for this insn.
6346
   Also output the instructions to copy them in and out of the hard regs.
6347
 
6348
   For machines with register classes, we are responsible for
6349
   finding a reload reg in the proper class.  */
6350
 
6351
static void
6352
choose_reload_regs (struct insn_chain *chain)
6353
{
6354
  rtx insn = chain->insn;
6355
  int i, j;
6356
  unsigned int max_group_size = 1;
6357
  enum reg_class group_class = NO_REGS;
6358
  int pass, win, inheritance;
6359
 
6360
  rtx save_reload_reg_rtx[MAX_RELOADS];
6361
 
6362
  /* In order to be certain of getting the registers we need,
6363
     we must sort the reloads into order of increasing register class.
6364
     Then our grabbing of reload registers will parallel the process
6365
     that provided the reload registers.
6366
 
6367
     Also note whether any of the reloads wants a consecutive group of regs.
6368
     If so, record the maximum size of the group desired and what
6369
     register class contains all the groups needed by this insn.  */
6370
 
6371
  for (j = 0; j < n_reloads; j++)
6372
    {
6373
      reload_order[j] = j;
6374
      if (rld[j].reg_rtx != NULL_RTX)
6375
        {
6376
          gcc_assert (REG_P (rld[j].reg_rtx)
6377
                      && HARD_REGISTER_P (rld[j].reg_rtx));
6378
          reload_spill_index[j] = REGNO (rld[j].reg_rtx);
6379
        }
6380
      else
6381
        reload_spill_index[j] = -1;
6382
 
6383
      if (rld[j].nregs > 1)
6384
        {
6385
          max_group_size = MAX (rld[j].nregs, max_group_size);
6386
          group_class
6387
            = reg_class_superunion[(int) rld[j].rclass][(int) group_class];
6388
        }
6389
 
6390
      save_reload_reg_rtx[j] = rld[j].reg_rtx;
6391
    }
6392
 
6393
  if (n_reloads > 1)
6394
    qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
6395
 
6396
  /* If -O, try first with inheritance, then turning it off.
6397
     If not -O, don't do inheritance.
6398
     Using inheritance when not optimizing leads to paradoxes
6399
     with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
6400
     because one side of the comparison might be inherited.  */
6401
  win = 0;
6402
  for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
6403
    {
6404
      choose_reload_regs_init (chain, save_reload_reg_rtx);
6405
 
6406
      /* Process the reloads in order of preference just found.
6407
         Beyond this point, subregs can be found in reload_reg_rtx.
6408
 
6409
         This used to look for an existing reloaded home for all of the
6410
         reloads, and only then perform any new reloads.  But that could lose
6411
         if the reloads were done out of reg-class order because a later
6412
         reload with a looser constraint might have an old home in a register
6413
         needed by an earlier reload with a tighter constraint.
6414
 
6415
         To solve this, we make two passes over the reloads, in the order
6416
         described above.  In the first pass we try to inherit a reload
6417
         from a previous insn.  If there is a later reload that needs a
6418
         class that is a proper subset of the class being processed, we must
6419
         also allocate a spill register during the first pass.
6420
 
6421
         Then make a second pass over the reloads to allocate any reloads
6422
         that haven't been given registers yet.  */
6423
 
6424
      for (j = 0; j < n_reloads; j++)
6425
        {
6426
          int r = reload_order[j];
6427
          rtx search_equiv = NULL_RTX;
6428
 
6429
          /* Ignore reloads that got marked inoperative.  */
6430
          if (rld[r].out == 0 && rld[r].in == 0
6431
              && ! rld[r].secondary_p)
6432
            continue;
6433
 
6434
          /* If find_reloads chose to use reload_in or reload_out as a reload
6435
             register, we don't need to chose one.  Otherwise, try even if it
6436
             found one since we might save an insn if we find the value lying
6437
             around.
6438
             Try also when reload_in is a pseudo without a hard reg.  */
6439
          if (rld[r].in != 0 && rld[r].reg_rtx != 0
6440
              && (rtx_equal_p (rld[r].in, rld[r].reg_rtx)
6441
                  || (rtx_equal_p (rld[r].out, rld[r].reg_rtx)
6442
                      && !MEM_P (rld[r].in)
6443
                      && true_regnum (rld[r].in) < FIRST_PSEUDO_REGISTER)))
6444
            continue;
6445
 
6446
#if 0 /* No longer needed for correct operation.
6447
         It might give better code, or might not; worth an experiment?  */
6448
          /* If this is an optional reload, we can't inherit from earlier insns
6449
             until we are sure that any non-optional reloads have been allocated.
6450
             The following code takes advantage of the fact that optional reloads
6451
             are at the end of reload_order.  */
6452
          if (rld[r].optional != 0)
6453
            for (i = 0; i < j; i++)
6454
              if ((rld[reload_order[i]].out != 0
6455
                   || rld[reload_order[i]].in != 0
6456
                   || rld[reload_order[i]].secondary_p)
6457
                  && ! rld[reload_order[i]].optional
6458
                  && rld[reload_order[i]].reg_rtx == 0)
6459
                allocate_reload_reg (chain, reload_order[i], 0);
6460
#endif
6461
 
6462
          /* First see if this pseudo is already available as reloaded
6463
             for a previous insn.  We cannot try to inherit for reloads
6464
             that are smaller than the maximum number of registers needed
6465
             for groups unless the register we would allocate cannot be used
6466
             for the groups.
6467
 
6468
             We could check here to see if this is a secondary reload for
6469
             an object that is already in a register of the desired class.
6470
             This would avoid the need for the secondary reload register.
6471
             But this is complex because we can't easily determine what
6472
             objects might want to be loaded via this reload.  So let a
6473
             register be allocated here.  In `emit_reload_insns' we suppress
6474
             one of the loads in the case described above.  */
6475
 
6476
          if (inheritance)
6477
            {
6478
              int byte = 0;
6479
              int regno = -1;
6480
              enum machine_mode mode = VOIDmode;
6481
 
6482
              if (rld[r].in == 0)
6483
                ;
6484
              else if (REG_P (rld[r].in))
6485
                {
6486
                  regno = REGNO (rld[r].in);
6487
                  mode = GET_MODE (rld[r].in);
6488
                }
6489
              else if (REG_P (rld[r].in_reg))
6490
                {
6491
                  regno = REGNO (rld[r].in_reg);
6492
                  mode = GET_MODE (rld[r].in_reg);
6493
                }
6494
              else if (GET_CODE (rld[r].in_reg) == SUBREG
6495
                       && REG_P (SUBREG_REG (rld[r].in_reg)))
6496
                {
6497
                  regno = REGNO (SUBREG_REG (rld[r].in_reg));
6498
                  if (regno < FIRST_PSEUDO_REGISTER)
6499
                    regno = subreg_regno (rld[r].in_reg);
6500
                  else
6501
                    byte = SUBREG_BYTE (rld[r].in_reg);
6502
                  mode = GET_MODE (rld[r].in_reg);
6503
                }
6504
#ifdef AUTO_INC_DEC
6505
              else if (GET_RTX_CLASS (GET_CODE (rld[r].in_reg)) == RTX_AUTOINC
6506
                       && REG_P (XEXP (rld[r].in_reg, 0)))
6507
                {
6508
                  regno = REGNO (XEXP (rld[r].in_reg, 0));
6509
                  mode = GET_MODE (XEXP (rld[r].in_reg, 0));
6510
                  rld[r].out = rld[r].in;
6511
                }
6512
#endif
6513
#if 0
6514
              /* This won't work, since REGNO can be a pseudo reg number.
6515
                 Also, it takes much more hair to keep track of all the things
6516
                 that can invalidate an inherited reload of part of a pseudoreg.  */
6517
              else if (GET_CODE (rld[r].in) == SUBREG
6518
                       && REG_P (SUBREG_REG (rld[r].in)))
6519
                regno = subreg_regno (rld[r].in);
6520
#endif
6521
 
6522
              if (regno >= 0
6523
                  && reg_last_reload_reg[regno] != 0
6524
                  && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
6525
                      >= GET_MODE_SIZE (mode) + byte)
6526
#ifdef CANNOT_CHANGE_MODE_CLASS
6527
                  /* Verify that the register it's in can be used in
6528
                     mode MODE.  */
6529
                  && !REG_CANNOT_CHANGE_MODE_P (REGNO (reg_last_reload_reg[regno]),
6530
                                                GET_MODE (reg_last_reload_reg[regno]),
6531
                                                mode)
6532
#endif
6533
                  )
6534
                {
6535
                  enum reg_class rclass = rld[r].rclass, last_class;
6536
                  rtx last_reg = reg_last_reload_reg[regno];
6537
 
6538
                  i = REGNO (last_reg);
6539
                  i += subreg_regno_offset (i, GET_MODE (last_reg), byte, mode);
6540
                  last_class = REGNO_REG_CLASS (i);
6541
 
6542
                  if (reg_reloaded_contents[i] == regno
6543
                      && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
6544
                      && HARD_REGNO_MODE_OK (i, rld[r].mode)
6545
                      && (TEST_HARD_REG_BIT (reg_class_contents[(int) rclass], i)
6546
                          /* Even if we can't use this register as a reload
6547
                             register, we might use it for reload_override_in,
6548
                             if copying it to the desired class is cheap
6549
                             enough.  */
6550
                          || ((register_move_cost (mode, last_class, rclass)
6551
                               < memory_move_cost (mode, rclass, true))
6552
                              && (secondary_reload_class (1, rclass, mode,
6553
                                                          last_reg)
6554
                                  == NO_REGS)
6555
#ifdef SECONDARY_MEMORY_NEEDED
6556
                              && ! SECONDARY_MEMORY_NEEDED (last_class, rclass,
6557
                                                            mode)
6558
#endif
6559
                              ))
6560
 
6561
                      && (rld[r].nregs == max_group_size
6562
                          || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
6563
                                                  i))
6564
                      && free_for_value_p (i, rld[r].mode, rld[r].opnum,
6565
                                           rld[r].when_needed, rld[r].in,
6566
                                           const0_rtx, r, 1))
6567
                    {
6568
                      /* If a group is needed, verify that all the subsequent
6569
                         registers still have their values intact.  */
6570
                      int nr = hard_regno_nregs[i][rld[r].mode];
6571
                      int k;
6572
 
6573
                      for (k = 1; k < nr; k++)
6574
                        if (reg_reloaded_contents[i + k] != regno
6575
                            || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
6576
                          break;
6577
 
6578
                      if (k == nr)
6579
                        {
6580
                          int i1;
6581
                          int bad_for_class;
6582
 
6583
                          last_reg = (GET_MODE (last_reg) == mode
6584
                                      ? last_reg : gen_rtx_REG (mode, i));
6585
 
6586
                          bad_for_class = 0;
6587
                          for (k = 0; k < nr; k++)
6588
                            bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6589
                                                                  i+k);
6590
 
6591
                          /* We found a register that contains the
6592
                             value we need.  If this register is the
6593
                             same as an `earlyclobber' operand of the
6594
                             current insn, just mark it as a place to
6595
                             reload from since we can't use it as the
6596
                             reload register itself.  */
6597
 
6598
                          for (i1 = 0; i1 < n_earlyclobbers; i1++)
6599
                            if (reg_overlap_mentioned_for_reload_p
6600
                                (reg_last_reload_reg[regno],
6601
                                 reload_earlyclobbers[i1]))
6602
                              break;
6603
 
6604
                          if (i1 != n_earlyclobbers
6605
                              || ! (free_for_value_p (i, rld[r].mode,
6606
                                                      rld[r].opnum,
6607
                                                      rld[r].when_needed, rld[r].in,
6608
                                                      rld[r].out, r, 1))
6609
                              /* Don't use it if we'd clobber a pseudo reg.  */
6610
                              || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
6611
                                  && rld[r].out
6612
                                  && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
6613
                              /* Don't clobber the frame pointer.  */
6614
                              || (i == HARD_FRAME_POINTER_REGNUM
6615
                                  && frame_pointer_needed
6616
                                  && rld[r].out)
6617
                              /* Don't really use the inherited spill reg
6618
                                 if we need it wider than we've got it.  */
6619
                              || (GET_MODE_SIZE (rld[r].mode)
6620
                                  > GET_MODE_SIZE (mode))
6621
                              || bad_for_class
6622
 
6623
                              /* If find_reloads chose reload_out as reload
6624
                                 register, stay with it - that leaves the
6625
                                 inherited register for subsequent reloads.  */
6626
                              || (rld[r].out && rld[r].reg_rtx
6627
                                  && rtx_equal_p (rld[r].out, rld[r].reg_rtx)))
6628
                            {
6629
                              if (! rld[r].optional)
6630
                                {
6631
                                  reload_override_in[r] = last_reg;
6632
                                  reload_inheritance_insn[r]
6633
                                    = reg_reloaded_insn[i];
6634
                                }
6635
                            }
6636
                          else
6637
                            {
6638
                              int k;
6639
                              /* We can use this as a reload reg.  */
6640
                              /* Mark the register as in use for this part of
6641
                                 the insn.  */
6642
                              mark_reload_reg_in_use (i,
6643
                                                      rld[r].opnum,
6644
                                                      rld[r].when_needed,
6645
                                                      rld[r].mode);
6646
                              rld[r].reg_rtx = last_reg;
6647
                              reload_inherited[r] = 1;
6648
                              reload_inheritance_insn[r]
6649
                                = reg_reloaded_insn[i];
6650
                              reload_spill_index[r] = i;
6651
                              for (k = 0; k < nr; k++)
6652
                                SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6653
                                                  i + k);
6654
                            }
6655
                        }
6656
                    }
6657
                }
6658
            }
6659
 
6660
          /* Here's another way to see if the value is already lying around.  */
6661
          if (inheritance
6662
              && rld[r].in != 0
6663
              && ! reload_inherited[r]
6664
              && rld[r].out == 0
6665
              && (CONSTANT_P (rld[r].in)
6666
                  || GET_CODE (rld[r].in) == PLUS
6667
                  || REG_P (rld[r].in)
6668
                  || MEM_P (rld[r].in))
6669
              && (rld[r].nregs == max_group_size
6670
                  || ! reg_classes_intersect_p (rld[r].rclass, group_class)))
6671
            search_equiv = rld[r].in;
6672
 
6673
          if (search_equiv)
6674
            {
6675
              rtx equiv
6676
                = find_equiv_reg (search_equiv, insn, rld[r].rclass,
6677
                                  -1, NULL, 0, rld[r].mode);
6678
              int regno = 0;
6679
 
6680
              if (equiv != 0)
6681
                {
6682
                  if (REG_P (equiv))
6683
                    regno = REGNO (equiv);
6684
                  else
6685
                    {
6686
                      /* This must be a SUBREG of a hard register.
6687
                         Make a new REG since this might be used in an
6688
                         address and not all machines support SUBREGs
6689
                         there.  */
6690
                      gcc_assert (GET_CODE (equiv) == SUBREG);
6691
                      regno = subreg_regno (equiv);
6692
                      equiv = gen_rtx_REG (rld[r].mode, regno);
6693
                      /* If we choose EQUIV as the reload register, but the
6694
                         loop below decides to cancel the inheritance, we'll
6695
                         end up reloading EQUIV in rld[r].mode, not the mode
6696
                         it had originally.  That isn't safe when EQUIV isn't
6697
                         available as a spill register since its value might
6698
                         still be live at this point.  */
6699
                      for (i = regno; i < regno + (int) rld[r].nregs; i++)
6700
                        if (TEST_HARD_REG_BIT (reload_reg_unavailable, i))
6701
                          equiv = 0;
6702
                    }
6703
                }
6704
 
6705
              /* If we found a spill reg, reject it unless it is free
6706
                 and of the desired class.  */
6707
              if (equiv != 0)
6708
                {
6709
                  int regs_used = 0;
6710
                  int bad_for_class = 0;
6711
                  int max_regno = regno + rld[r].nregs;
6712
 
6713
                  for (i = regno; i < max_regno; i++)
6714
                    {
6715
                      regs_used |= TEST_HARD_REG_BIT (reload_reg_used_at_all,
6716
                                                      i);
6717
                      bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6718
                                                           i);
6719
                    }
6720
 
6721
                  if ((regs_used
6722
                       && ! free_for_value_p (regno, rld[r].mode,
6723
                                              rld[r].opnum, rld[r].when_needed,
6724
                                              rld[r].in, rld[r].out, r, 1))
6725
                      || bad_for_class)
6726
                    equiv = 0;
6727
                }
6728
 
6729
              if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, rld[r].mode))
6730
                equiv = 0;
6731
 
6732
              /* We found a register that contains the value we need.
6733
                 If this register is the same as an `earlyclobber' operand
6734
                 of the current insn, just mark it as a place to reload from
6735
                 since we can't use it as the reload register itself.  */
6736
 
6737
              if (equiv != 0)
6738
                for (i = 0; i < n_earlyclobbers; i++)
6739
                  if (reg_overlap_mentioned_for_reload_p (equiv,
6740
                                                          reload_earlyclobbers[i]))
6741
                    {
6742
                      if (! rld[r].optional)
6743
                        reload_override_in[r] = equiv;
6744
                      equiv = 0;
6745
                      break;
6746
                    }
6747
 
6748
              /* If the equiv register we have found is explicitly clobbered
6749
                 in the current insn, it depends on the reload type if we
6750
                 can use it, use it for reload_override_in, or not at all.
6751
                 In particular, we then can't use EQUIV for a
6752
                 RELOAD_FOR_OUTPUT_ADDRESS reload.  */
6753
 
6754
              if (equiv != 0)
6755
                {
6756
                  if (regno_clobbered_p (regno, insn, rld[r].mode, 2))
6757
                    switch (rld[r].when_needed)
6758
                      {
6759
                      case RELOAD_FOR_OTHER_ADDRESS:
6760
                      case RELOAD_FOR_INPADDR_ADDRESS:
6761
                      case RELOAD_FOR_INPUT_ADDRESS:
6762
                      case RELOAD_FOR_OPADDR_ADDR:
6763
                        break;
6764
                      case RELOAD_OTHER:
6765
                      case RELOAD_FOR_INPUT:
6766
                      case RELOAD_FOR_OPERAND_ADDRESS:
6767
                        if (! rld[r].optional)
6768
                          reload_override_in[r] = equiv;
6769
                        /* Fall through.  */
6770
                      default:
6771
                        equiv = 0;
6772
                        break;
6773
                      }
6774
                  else if (regno_clobbered_p (regno, insn, rld[r].mode, 1))
6775
                    switch (rld[r].when_needed)
6776
                      {
6777
                      case RELOAD_FOR_OTHER_ADDRESS:
6778
                      case RELOAD_FOR_INPADDR_ADDRESS:
6779
                      case RELOAD_FOR_INPUT_ADDRESS:
6780
                      case RELOAD_FOR_OPADDR_ADDR:
6781
                      case RELOAD_FOR_OPERAND_ADDRESS:
6782
                      case RELOAD_FOR_INPUT:
6783
                        break;
6784
                      case RELOAD_OTHER:
6785
                        if (! rld[r].optional)
6786
                          reload_override_in[r] = equiv;
6787
                        /* Fall through.  */
6788
                      default:
6789
                        equiv = 0;
6790
                        break;
6791
                      }
6792
                }
6793
 
6794
              /* If we found an equivalent reg, say no code need be generated
6795
                 to load it, and use it as our reload reg.  */
6796
              if (equiv != 0
6797
                  && (regno != HARD_FRAME_POINTER_REGNUM
6798
                      || !frame_pointer_needed))
6799
                {
6800
                  int nr = hard_regno_nregs[regno][rld[r].mode];
6801
                  int k;
6802
                  rld[r].reg_rtx = equiv;
6803
                  reload_spill_index[r] = regno;
6804
                  reload_inherited[r] = 1;
6805
 
6806
                  /* If reg_reloaded_valid is not set for this register,
6807
                     there might be a stale spill_reg_store lying around.
6808
                     We must clear it, since otherwise emit_reload_insns
6809
                     might delete the store.  */
6810
                  if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
6811
                    spill_reg_store[regno] = NULL_RTX;
6812
                  /* If any of the hard registers in EQUIV are spill
6813
                     registers, mark them as in use for this insn.  */
6814
                  for (k = 0; k < nr; k++)
6815
                    {
6816
                      i = spill_reg_order[regno + k];
6817
                      if (i >= 0)
6818
                        {
6819
                          mark_reload_reg_in_use (regno, rld[r].opnum,
6820
                                                  rld[r].when_needed,
6821
                                                  rld[r].mode);
6822
                          SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6823
                                            regno + k);
6824
                        }
6825
                    }
6826
                }
6827
            }
6828
 
6829
          /* If we found a register to use already, or if this is an optional
6830
             reload, we are done.  */
6831
          if (rld[r].reg_rtx != 0 || rld[r].optional != 0)
6832
            continue;
6833
 
6834
#if 0
6835
          /* No longer needed for correct operation.  Might or might
6836
             not give better code on the average.  Want to experiment?  */
6837
 
6838
          /* See if there is a later reload that has a class different from our
6839
             class that intersects our class or that requires less register
6840
             than our reload.  If so, we must allocate a register to this
6841
             reload now, since that reload might inherit a previous reload
6842
             and take the only available register in our class.  Don't do this
6843
             for optional reloads since they will force all previous reloads
6844
             to be allocated.  Also don't do this for reloads that have been
6845
             turned off.  */
6846
 
6847
          for (i = j + 1; i < n_reloads; i++)
6848
            {
6849
              int s = reload_order[i];
6850
 
6851
              if ((rld[s].in == 0 && rld[s].out == 0
6852
                   && ! rld[s].secondary_p)
6853
                  || rld[s].optional)
6854
                continue;
6855
 
6856
              if ((rld[s].rclass != rld[r].rclass
6857
                   && reg_classes_intersect_p (rld[r].rclass,
6858
                                               rld[s].rclass))
6859
                  || rld[s].nregs < rld[r].nregs)
6860
                break;
6861
            }
6862
 
6863
          if (i == n_reloads)
6864
            continue;
6865
 
6866
          allocate_reload_reg (chain, r, j == n_reloads - 1);
6867
#endif
6868
        }
6869
 
6870
      /* Now allocate reload registers for anything non-optional that
6871
         didn't get one yet.  */
6872
      for (j = 0; j < n_reloads; j++)
6873
        {
6874
          int r = reload_order[j];
6875
 
6876
          /* Ignore reloads that got marked inoperative.  */
6877
          if (rld[r].out == 0 && rld[r].in == 0 && ! rld[r].secondary_p)
6878
            continue;
6879
 
6880
          /* Skip reloads that already have a register allocated or are
6881
             optional.  */
6882
          if (rld[r].reg_rtx != 0 || rld[r].optional)
6883
            continue;
6884
 
6885
          if (! allocate_reload_reg (chain, r, j == n_reloads - 1))
6886
            break;
6887
        }
6888
 
6889
      /* If that loop got all the way, we have won.  */
6890
      if (j == n_reloads)
6891
        {
6892
          win = 1;
6893
          break;
6894
        }
6895
 
6896
      /* Loop around and try without any inheritance.  */
6897
    }
6898
 
6899
  if (! win)
6900
    {
6901
      /* First undo everything done by the failed attempt
6902
         to allocate with inheritance.  */
6903
      choose_reload_regs_init (chain, save_reload_reg_rtx);
6904
 
6905
      /* Some sanity tests to verify that the reloads found in the first
6906
         pass are identical to the ones we have now.  */
6907
      gcc_assert (chain->n_reloads == n_reloads);
6908
 
6909
      for (i = 0; i < n_reloads; i++)
6910
        {
6911
          if (chain->rld[i].regno < 0 || chain->rld[i].reg_rtx != 0)
6912
            continue;
6913
          gcc_assert (chain->rld[i].when_needed == rld[i].when_needed);
6914
          for (j = 0; j < n_spills; j++)
6915
            if (spill_regs[j] == chain->rld[i].regno)
6916
              if (! set_reload_reg (j, i))
6917
                failed_reload (chain->insn, i);
6918
        }
6919
    }
6920
 
6921
  /* If we thought we could inherit a reload, because it seemed that
6922
     nothing else wanted the same reload register earlier in the insn,
6923
     verify that assumption, now that all reloads have been assigned.
6924
     Likewise for reloads where reload_override_in has been set.  */
6925
 
6926
  /* If doing expensive optimizations, do one preliminary pass that doesn't
6927
     cancel any inheritance, but removes reloads that have been needed only
6928
     for reloads that we know can be inherited.  */
6929
  for (pass = flag_expensive_optimizations; pass >= 0; pass--)
6930
    {
6931
      for (j = 0; j < n_reloads; j++)
6932
        {
6933
          int r = reload_order[j];
6934
          rtx check_reg;
6935
          if (reload_inherited[r] && rld[r].reg_rtx)
6936
            check_reg = rld[r].reg_rtx;
6937
          else if (reload_override_in[r]
6938
                   && (REG_P (reload_override_in[r])
6939
                       || GET_CODE (reload_override_in[r]) == SUBREG))
6940
            check_reg = reload_override_in[r];
6941
          else
6942
            continue;
6943
          if (! free_for_value_p (true_regnum (check_reg), rld[r].mode,
6944
                                  rld[r].opnum, rld[r].when_needed, rld[r].in,
6945
                                  (reload_inherited[r]
6946
                                   ? rld[r].out : const0_rtx),
6947
                                  r, 1))
6948
            {
6949
              if (pass)
6950
                continue;
6951
              reload_inherited[r] = 0;
6952
              reload_override_in[r] = 0;
6953
            }
6954
          /* If we can inherit a RELOAD_FOR_INPUT, or can use a
6955
             reload_override_in, then we do not need its related
6956
             RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
6957
             likewise for other reload types.
6958
             We handle this by removing a reload when its only replacement
6959
             is mentioned in reload_in of the reload we are going to inherit.
6960
             A special case are auto_inc expressions; even if the input is
6961
             inherited, we still need the address for the output.  We can
6962
             recognize them because they have RELOAD_OUT set to RELOAD_IN.
6963
             If we succeeded removing some reload and we are doing a preliminary
6964
             pass just to remove such reloads, make another pass, since the
6965
             removal of one reload might allow us to inherit another one.  */
6966
          else if (rld[r].in
6967
                   && rld[r].out != rld[r].in
6968
                   && remove_address_replacements (rld[r].in) && pass)
6969
            pass = 2;
6970
        }
6971
    }
6972
 
6973
  /* Now that reload_override_in is known valid,
6974
     actually override reload_in.  */
6975
  for (j = 0; j < n_reloads; j++)
6976
    if (reload_override_in[j])
6977
      rld[j].in = reload_override_in[j];
6978
 
6979
  /* If this reload won't be done because it has been canceled or is
6980
     optional and not inherited, clear reload_reg_rtx so other
6981
     routines (such as subst_reloads) don't get confused.  */
6982
  for (j = 0; j < n_reloads; j++)
6983
    if (rld[j].reg_rtx != 0
6984
        && ((rld[j].optional && ! reload_inherited[j])
6985
            || (rld[j].in == 0 && rld[j].out == 0
6986
                && ! rld[j].secondary_p)))
6987
      {
6988
        int regno = true_regnum (rld[j].reg_rtx);
6989
 
6990
        if (spill_reg_order[regno] >= 0)
6991
          clear_reload_reg_in_use (regno, rld[j].opnum,
6992
                                   rld[j].when_needed, rld[j].mode);
6993
        rld[j].reg_rtx = 0;
6994
        reload_spill_index[j] = -1;
6995
      }
6996
 
6997
  /* Record which pseudos and which spill regs have output reloads.  */
6998
  for (j = 0; j < n_reloads; j++)
6999
    {
7000
      int r = reload_order[j];
7001
 
7002
      i = reload_spill_index[r];
7003
 
7004
      /* I is nonneg if this reload uses a register.
7005
         If rld[r].reg_rtx is 0, this is an optional reload
7006
         that we opted to ignore.  */
7007
      if (rld[r].out_reg != 0 && REG_P (rld[r].out_reg)
7008
          && rld[r].reg_rtx != 0)
7009
        {
7010
          int nregno = REGNO (rld[r].out_reg);
7011
          int nr = 1;
7012
 
7013
          if (nregno < FIRST_PSEUDO_REGISTER)
7014
            nr = hard_regno_nregs[nregno][rld[r].mode];
7015
 
7016
          while (--nr >= 0)
7017
            SET_REGNO_REG_SET (&reg_has_output_reload,
7018
                               nregno + nr);
7019
 
7020
          if (i >= 0)
7021
            add_to_hard_reg_set (&reg_is_output_reload, rld[r].mode, i);
7022
 
7023
          gcc_assert (rld[r].when_needed == RELOAD_OTHER
7024
                      || rld[r].when_needed == RELOAD_FOR_OUTPUT
7025
                      || rld[r].when_needed == RELOAD_FOR_INSN);
7026
        }
7027
    }
7028
}
7029
 
7030
/* Deallocate the reload register for reload R.  This is called from
7031
   remove_address_replacements.  */
7032
 
7033
void
7034
deallocate_reload_reg (int r)
7035
{
7036
  int regno;
7037
 
7038
  if (! rld[r].reg_rtx)
7039
    return;
7040
  regno = true_regnum (rld[r].reg_rtx);
7041
  rld[r].reg_rtx = 0;
7042
  if (spill_reg_order[regno] >= 0)
7043
    clear_reload_reg_in_use (regno, rld[r].opnum, rld[r].when_needed,
7044
                             rld[r].mode);
7045
  reload_spill_index[r] = -1;
7046
}
7047
 
7048
/* These arrays are filled by emit_reload_insns and its subroutines.  */
7049
static rtx input_reload_insns[MAX_RECOG_OPERANDS];
7050
static rtx other_input_address_reload_insns = 0;
7051
static rtx other_input_reload_insns = 0;
7052
static rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
7053
static rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
7054
static rtx output_reload_insns[MAX_RECOG_OPERANDS];
7055
static rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
7056
static rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
7057
static rtx operand_reload_insns = 0;
7058
static rtx other_operand_reload_insns = 0;
7059
static rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
7060
 
7061
/* Values to be put in spill_reg_store are put here first.  Instructions
7062
   must only be placed here if the associated reload register reaches
7063
   the end of the instruction's reload sequence.  */
7064
static rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
7065
static HARD_REG_SET reg_reloaded_died;
7066
 
7067
/* Check if *RELOAD_REG is suitable as an intermediate or scratch register
7068
   of class NEW_CLASS with mode NEW_MODE.  Or alternatively, if alt_reload_reg
7069
   is nonzero, if that is suitable.  On success, change *RELOAD_REG to the
7070
   adjusted register, and return true.  Otherwise, return false.  */
7071
static bool
7072
reload_adjust_reg_for_temp (rtx *reload_reg, rtx alt_reload_reg,
7073
                            enum reg_class new_class,
7074
                            enum machine_mode new_mode)
7075
 
7076
{
7077
  rtx reg;
7078
 
7079
  for (reg = *reload_reg; reg; reg = alt_reload_reg, alt_reload_reg = 0)
7080
    {
7081
      unsigned regno = REGNO (reg);
7082
 
7083
      if (!TEST_HARD_REG_BIT (reg_class_contents[(int) new_class], regno))
7084
        continue;
7085
      if (GET_MODE (reg) != new_mode)
7086
        {
7087
          if (!HARD_REGNO_MODE_OK (regno, new_mode))
7088
            continue;
7089
          if (hard_regno_nregs[regno][new_mode]
7090
              > hard_regno_nregs[regno][GET_MODE (reg)])
7091
            continue;
7092
          reg = reload_adjust_reg_for_mode (reg, new_mode);
7093
        }
7094
      *reload_reg = reg;
7095
      return true;
7096
    }
7097
  return false;
7098
}
7099
 
7100
/* Check if *RELOAD_REG is suitable as a scratch register for the reload
7101
   pattern with insn_code ICODE, or alternatively, if alt_reload_reg is
7102
   nonzero, if that is suitable.  On success, change *RELOAD_REG to the
7103
   adjusted register, and return true.  Otherwise, return false.  */
7104
static bool
7105
reload_adjust_reg_for_icode (rtx *reload_reg, rtx alt_reload_reg,
7106
                             enum insn_code icode)
7107
 
7108
{
7109
  enum reg_class new_class = scratch_reload_class (icode);
7110
  enum machine_mode new_mode = insn_data[(int) icode].operand[2].mode;
7111
 
7112
  return reload_adjust_reg_for_temp (reload_reg, alt_reload_reg,
7113
                                     new_class, new_mode);
7114
}
7115
 
7116
/* Generate insns to perform reload RL, which is for the insn in CHAIN and
7117
   has the number J.  OLD contains the value to be used as input.  */
7118
 
7119
static void
7120
emit_input_reload_insns (struct insn_chain *chain, struct reload *rl,
7121
                         rtx old, int j)
7122
{
7123
  rtx insn = chain->insn;
7124
  rtx reloadreg;
7125
  rtx oldequiv_reg = 0;
7126
  rtx oldequiv = 0;
7127
  int special = 0;
7128
  enum machine_mode mode;
7129
  rtx *where;
7130
 
7131
  /* delete_output_reload is only invoked properly if old contains
7132
     the original pseudo register.  Since this is replaced with a
7133
     hard reg when RELOAD_OVERRIDE_IN is set, see if we can
7134
     find the pseudo in RELOAD_IN_REG.  */
7135
  if (reload_override_in[j]
7136
      && REG_P (rl->in_reg))
7137
    {
7138
      oldequiv = old;
7139
      old = rl->in_reg;
7140
    }
7141
  if (oldequiv == 0)
7142
    oldequiv = old;
7143
  else if (REG_P (oldequiv))
7144
    oldequiv_reg = oldequiv;
7145
  else if (GET_CODE (oldequiv) == SUBREG)
7146
    oldequiv_reg = SUBREG_REG (oldequiv);
7147
 
7148
  reloadreg = reload_reg_rtx_for_input[j];
7149
  mode = GET_MODE (reloadreg);
7150
 
7151
  /* If we are reloading from a register that was recently stored in
7152
     with an output-reload, see if we can prove there was
7153
     actually no need to store the old value in it.  */
7154
 
7155
  if (optimize && REG_P (oldequiv)
7156
      && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
7157
      && spill_reg_store[REGNO (oldequiv)]
7158
      && REG_P (old)
7159
      && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
7160
          || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
7161
                          rl->out_reg)))
7162
    delete_output_reload (insn, j, REGNO (oldequiv), reloadreg);
7163
 
7164
  /* Encapsulate OLDEQUIV into the reload mode, then load RELOADREG from
7165
     OLDEQUIV.  */
7166
 
7167
  while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
7168
    oldequiv = SUBREG_REG (oldequiv);
7169
  if (GET_MODE (oldequiv) != VOIDmode
7170
      && mode != GET_MODE (oldequiv))
7171
    oldequiv = gen_lowpart_SUBREG (mode, oldequiv);
7172
 
7173
  /* Switch to the right place to emit the reload insns.  */
7174
  switch (rl->when_needed)
7175
    {
7176
    case RELOAD_OTHER:
7177
      where = &other_input_reload_insns;
7178
      break;
7179
    case RELOAD_FOR_INPUT:
7180
      where = &input_reload_insns[rl->opnum];
7181
      break;
7182
    case RELOAD_FOR_INPUT_ADDRESS:
7183
      where = &input_address_reload_insns[rl->opnum];
7184
      break;
7185
    case RELOAD_FOR_INPADDR_ADDRESS:
7186
      where = &inpaddr_address_reload_insns[rl->opnum];
7187
      break;
7188
    case RELOAD_FOR_OUTPUT_ADDRESS:
7189
      where = &output_address_reload_insns[rl->opnum];
7190
      break;
7191
    case RELOAD_FOR_OUTADDR_ADDRESS:
7192
      where = &outaddr_address_reload_insns[rl->opnum];
7193
      break;
7194
    case RELOAD_FOR_OPERAND_ADDRESS:
7195
      where = &operand_reload_insns;
7196
      break;
7197
    case RELOAD_FOR_OPADDR_ADDR:
7198
      where = &other_operand_reload_insns;
7199
      break;
7200
    case RELOAD_FOR_OTHER_ADDRESS:
7201
      where = &other_input_address_reload_insns;
7202
      break;
7203
    default:
7204
      gcc_unreachable ();
7205
    }
7206
 
7207
  push_to_sequence (*where);
7208
 
7209
  /* Auto-increment addresses must be reloaded in a special way.  */
7210
  if (rl->out && ! rl->out_reg)
7211
    {
7212
      /* We are not going to bother supporting the case where a
7213
         incremented register can't be copied directly from
7214
         OLDEQUIV since this seems highly unlikely.  */
7215
      gcc_assert (rl->secondary_in_reload < 0);
7216
 
7217
      if (reload_inherited[j])
7218
        oldequiv = reloadreg;
7219
 
7220
      old = XEXP (rl->in_reg, 0);
7221
 
7222
      /* Prevent normal processing of this reload.  */
7223
      special = 1;
7224
      /* Output a special code sequence for this case.  */
7225
      inc_for_reload (reloadreg, oldequiv, rl->out, rl->inc);
7226
    }
7227
 
7228
  /* If we are reloading a pseudo-register that was set by the previous
7229
     insn, see if we can get rid of that pseudo-register entirely
7230
     by redirecting the previous insn into our reload register.  */
7231
 
7232
  else if (optimize && REG_P (old)
7233
           && REGNO (old) >= FIRST_PSEUDO_REGISTER
7234
           && dead_or_set_p (insn, old)
7235
           /* This is unsafe if some other reload
7236
              uses the same reg first.  */
7237
           && ! conflicts_with_override (reloadreg)
7238
           && free_for_value_p (REGNO (reloadreg), rl->mode, rl->opnum,
7239
                                rl->when_needed, old, rl->out, j, 0))
7240
    {
7241
      rtx temp = PREV_INSN (insn);
7242
      while (temp && (NOTE_P (temp) || DEBUG_INSN_P (temp)))
7243
        temp = PREV_INSN (temp);
7244
      if (temp
7245
          && NONJUMP_INSN_P (temp)
7246
          && GET_CODE (PATTERN (temp)) == SET
7247
          && SET_DEST (PATTERN (temp)) == old
7248
          /* Make sure we can access insn_operand_constraint.  */
7249
          && asm_noperands (PATTERN (temp)) < 0
7250
          /* This is unsafe if operand occurs more than once in current
7251
             insn.  Perhaps some occurrences aren't reloaded.  */
7252
          && count_occurrences (PATTERN (insn), old, 0) == 1)
7253
        {
7254
          rtx old = SET_DEST (PATTERN (temp));
7255
          /* Store into the reload register instead of the pseudo.  */
7256
          SET_DEST (PATTERN (temp)) = reloadreg;
7257
 
7258
          /* Verify that resulting insn is valid.  */
7259
          extract_insn (temp);
7260
          if (constrain_operands (1))
7261
            {
7262
              /* If the previous insn is an output reload, the source is
7263
                 a reload register, and its spill_reg_store entry will
7264
                 contain the previous destination.  This is now
7265
                 invalid.  */
7266
              if (REG_P (SET_SRC (PATTERN (temp)))
7267
                  && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
7268
                {
7269
                  spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7270
                  spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7271
                }
7272
 
7273
              /* If these are the only uses of the pseudo reg,
7274
                 pretend for GDB it lives in the reload reg we used.  */
7275
              if (REG_N_DEATHS (REGNO (old)) == 1
7276
                  && REG_N_SETS (REGNO (old)) == 1)
7277
                {
7278
                  reg_renumber[REGNO (old)] = REGNO (reloadreg);
7279
                  if (ira_conflicts_p)
7280
                    /* Inform IRA about the change.  */
7281
                    ira_mark_allocation_change (REGNO (old));
7282
                  alter_reg (REGNO (old), -1, false);
7283
                }
7284
              special = 1;
7285
 
7286
              /* Adjust any debug insns between temp and insn.  */
7287
              while ((temp = NEXT_INSN (temp)) != insn)
7288
                if (DEBUG_INSN_P (temp))
7289
                  replace_rtx (PATTERN (temp), old, reloadreg);
7290
                else
7291
                  gcc_assert (NOTE_P (temp));
7292
            }
7293
          else
7294
            {
7295
              SET_DEST (PATTERN (temp)) = old;
7296
            }
7297
        }
7298
    }
7299
 
7300
  /* We can't do that, so output an insn to load RELOADREG.  */
7301
 
7302
  /* If we have a secondary reload, pick up the secondary register
7303
     and icode, if any.  If OLDEQUIV and OLD are different or
7304
     if this is an in-out reload, recompute whether or not we
7305
     still need a secondary register and what the icode should
7306
     be.  If we still need a secondary register and the class or
7307
     icode is different, go back to reloading from OLD if using
7308
     OLDEQUIV means that we got the wrong type of register.  We
7309
     cannot have different class or icode due to an in-out reload
7310
     because we don't make such reloads when both the input and
7311
     output need secondary reload registers.  */
7312
 
7313
  if (! special && rl->secondary_in_reload >= 0)
7314
    {
7315
      rtx second_reload_reg = 0;
7316
      rtx third_reload_reg = 0;
7317
      int secondary_reload = rl->secondary_in_reload;
7318
      rtx real_oldequiv = oldequiv;
7319
      rtx real_old = old;
7320
      rtx tmp;
7321
      enum insn_code icode;
7322
      enum insn_code tertiary_icode = CODE_FOR_nothing;
7323
 
7324
      /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
7325
         and similarly for OLD.
7326
         See comments in get_secondary_reload in reload.c.  */
7327
      /* If it is a pseudo that cannot be replaced with its
7328
         equivalent MEM, we must fall back to reload_in, which
7329
         will have all the necessary substitutions registered.
7330
         Likewise for a pseudo that can't be replaced with its
7331
         equivalent constant.
7332
 
7333
         Take extra care for subregs of such pseudos.  Note that
7334
         we cannot use reg_equiv_mem in this case because it is
7335
         not in the right mode.  */
7336
 
7337
      tmp = oldequiv;
7338
      if (GET_CODE (tmp) == SUBREG)
7339
        tmp = SUBREG_REG (tmp);
7340
      if (REG_P (tmp)
7341
          && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7342
          && (reg_equiv_memory_loc (REGNO (tmp)) != 0
7343
              || reg_equiv_constant (REGNO (tmp)) != 0))
7344
        {
7345
          if (! reg_equiv_mem (REGNO (tmp))
7346
              || num_not_at_initial_offset
7347
              || GET_CODE (oldequiv) == SUBREG)
7348
            real_oldequiv = rl->in;
7349
          else
7350
            real_oldequiv = reg_equiv_mem (REGNO (tmp));
7351
        }
7352
 
7353
      tmp = old;
7354
      if (GET_CODE (tmp) == SUBREG)
7355
        tmp = SUBREG_REG (tmp);
7356
      if (REG_P (tmp)
7357
          && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7358
          && (reg_equiv_memory_loc (REGNO (tmp)) != 0
7359
              || reg_equiv_constant (REGNO (tmp)) != 0))
7360
        {
7361
          if (! reg_equiv_mem (REGNO (tmp))
7362
              || num_not_at_initial_offset
7363
              || GET_CODE (old) == SUBREG)
7364
            real_old = rl->in;
7365
          else
7366
            real_old = reg_equiv_mem (REGNO (tmp));
7367
        }
7368
 
7369
      second_reload_reg = rld[secondary_reload].reg_rtx;
7370
      if (rld[secondary_reload].secondary_in_reload >= 0)
7371
        {
7372
          int tertiary_reload = rld[secondary_reload].secondary_in_reload;
7373
 
7374
          third_reload_reg = rld[tertiary_reload].reg_rtx;
7375
          tertiary_icode = rld[secondary_reload].secondary_in_icode;
7376
          /* We'd have to add more code for quartary reloads.  */
7377
          gcc_assert (rld[tertiary_reload].secondary_in_reload < 0);
7378
        }
7379
      icode = rl->secondary_in_icode;
7380
 
7381
      if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
7382
          || (rl->in != 0 && rl->out != 0))
7383
        {
7384
          secondary_reload_info sri, sri2;
7385
          enum reg_class new_class, new_t_class;
7386
 
7387
          sri.icode = CODE_FOR_nothing;
7388
          sri.prev_sri = NULL;
7389
          new_class
7390
            = (enum reg_class) targetm.secondary_reload (1, real_oldequiv,
7391
                                                         rl->rclass, mode,
7392
                                                         &sri);
7393
 
7394
          if (new_class == NO_REGS && sri.icode == CODE_FOR_nothing)
7395
            second_reload_reg = 0;
7396
          else if (new_class == NO_REGS)
7397
            {
7398
              if (reload_adjust_reg_for_icode (&second_reload_reg,
7399
                                               third_reload_reg,
7400
                                               (enum insn_code) sri.icode))
7401
                {
7402
                  icode = (enum insn_code) sri.icode;
7403
                  third_reload_reg = 0;
7404
                }
7405
              else
7406
                {
7407
                  oldequiv = old;
7408
                  real_oldequiv = real_old;
7409
                }
7410
            }
7411
          else if (sri.icode != CODE_FOR_nothing)
7412
            /* We currently lack a way to express this in reloads.  */
7413
            gcc_unreachable ();
7414
          else
7415
            {
7416
              sri2.icode = CODE_FOR_nothing;
7417
              sri2.prev_sri = &sri;
7418
              new_t_class
7419
                = (enum reg_class) targetm.secondary_reload (1, real_oldequiv,
7420
                                                             new_class, mode,
7421
                                                             &sri);
7422
              if (new_t_class == NO_REGS && sri2.icode == CODE_FOR_nothing)
7423
                {
7424
                  if (reload_adjust_reg_for_temp (&second_reload_reg,
7425
                                                  third_reload_reg,
7426
                                                  new_class, mode))
7427
                    {
7428
                      third_reload_reg = 0;
7429
                      tertiary_icode = (enum insn_code) sri2.icode;
7430
                    }
7431
                  else
7432
                    {
7433
                      oldequiv = old;
7434
                      real_oldequiv = real_old;
7435
                    }
7436
                }
7437
              else if (new_t_class == NO_REGS && sri2.icode != CODE_FOR_nothing)
7438
                {
7439
                  rtx intermediate = second_reload_reg;
7440
 
7441
                  if (reload_adjust_reg_for_temp (&intermediate, NULL,
7442
                                                  new_class, mode)
7443
                      && reload_adjust_reg_for_icode (&third_reload_reg, NULL,
7444
                                                      ((enum insn_code)
7445
                                                       sri2.icode)))
7446
                    {
7447
                      second_reload_reg = intermediate;
7448
                      tertiary_icode = (enum insn_code) sri2.icode;
7449
                    }
7450
                  else
7451
                    {
7452
                      oldequiv = old;
7453
                      real_oldequiv = real_old;
7454
                    }
7455
                }
7456
              else if (new_t_class != NO_REGS && sri2.icode == CODE_FOR_nothing)
7457
                {
7458
                  rtx intermediate = second_reload_reg;
7459
 
7460
                  if (reload_adjust_reg_for_temp (&intermediate, NULL,
7461
                                                  new_class, mode)
7462
                      && reload_adjust_reg_for_temp (&third_reload_reg, NULL,
7463
                                                      new_t_class, mode))
7464
                    {
7465
                      second_reload_reg = intermediate;
7466
                      tertiary_icode = (enum insn_code) sri2.icode;
7467
                    }
7468
                  else
7469
                    {
7470
                      oldequiv = old;
7471
                      real_oldequiv = real_old;
7472
                    }
7473
                }
7474
              else
7475
                {
7476
                  /* This could be handled more intelligently too.  */
7477
                  oldequiv = old;
7478
                  real_oldequiv = real_old;
7479
                }
7480
            }
7481
        }
7482
 
7483
      /* If we still need a secondary reload register, check
7484
         to see if it is being used as a scratch or intermediate
7485
         register and generate code appropriately.  If we need
7486
         a scratch register, use REAL_OLDEQUIV since the form of
7487
         the insn may depend on the actual address if it is
7488
         a MEM.  */
7489
 
7490
      if (second_reload_reg)
7491
        {
7492
          if (icode != CODE_FOR_nothing)
7493
            {
7494
              /* We'd have to add extra code to handle this case.  */
7495
              gcc_assert (!third_reload_reg);
7496
 
7497
              emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
7498
                                          second_reload_reg));
7499
              special = 1;
7500
            }
7501
          else
7502
            {
7503
              /* See if we need a scratch register to load the
7504
                 intermediate register (a tertiary reload).  */
7505
              if (tertiary_icode != CODE_FOR_nothing)
7506
                {
7507
                  emit_insn ((GEN_FCN (tertiary_icode)
7508
                              (second_reload_reg, real_oldequiv,
7509
                               third_reload_reg)));
7510
                }
7511
              else if (third_reload_reg)
7512
                {
7513
                  gen_reload (third_reload_reg, real_oldequiv,
7514
                              rl->opnum,
7515
                              rl->when_needed);
7516
                  gen_reload (second_reload_reg, third_reload_reg,
7517
                              rl->opnum,
7518
                              rl->when_needed);
7519
                }
7520
              else
7521
                gen_reload (second_reload_reg, real_oldequiv,
7522
                            rl->opnum,
7523
                            rl->when_needed);
7524
 
7525
              oldequiv = second_reload_reg;
7526
            }
7527
        }
7528
    }
7529
 
7530
  if (! special && ! rtx_equal_p (reloadreg, oldequiv))
7531
    {
7532
      rtx real_oldequiv = oldequiv;
7533
 
7534
      if ((REG_P (oldequiv)
7535
           && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
7536
           && (reg_equiv_memory_loc (REGNO (oldequiv)) != 0
7537
               || reg_equiv_constant (REGNO (oldequiv)) != 0))
7538
          || (GET_CODE (oldequiv) == SUBREG
7539
              && REG_P (SUBREG_REG (oldequiv))
7540
              && (REGNO (SUBREG_REG (oldequiv))
7541
                  >= FIRST_PSEUDO_REGISTER)
7542
              && ((reg_equiv_memory_loc (REGNO (SUBREG_REG (oldequiv))) != 0)
7543
                  || (reg_equiv_constant (REGNO (SUBREG_REG (oldequiv))) != 0)))
7544
          || (CONSTANT_P (oldequiv)
7545
              && (targetm.preferred_reload_class (oldequiv,
7546
                                                  REGNO_REG_CLASS (REGNO (reloadreg)))
7547
                  == NO_REGS)))
7548
        real_oldequiv = rl->in;
7549
      gen_reload (reloadreg, real_oldequiv, rl->opnum,
7550
                  rl->when_needed);
7551
    }
7552
 
7553
  if (cfun->can_throw_non_call_exceptions)
7554
    copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7555
 
7556
  /* End this sequence.  */
7557
  *where = get_insns ();
7558
  end_sequence ();
7559
 
7560
  /* Update reload_override_in so that delete_address_reloads_1
7561
     can see the actual register usage.  */
7562
  if (oldequiv_reg)
7563
    reload_override_in[j] = oldequiv;
7564
}
7565
 
7566
/* Generate insns to for the output reload RL, which is for the insn described
7567
   by CHAIN and has the number J.  */
7568
static void
7569
emit_output_reload_insns (struct insn_chain *chain, struct reload *rl,
7570
                          int j)
7571
{
7572
  rtx reloadreg;
7573
  rtx insn = chain->insn;
7574
  int special = 0;
7575
  rtx old = rl->out;
7576
  enum machine_mode mode;
7577
  rtx p;
7578
  rtx rl_reg_rtx;
7579
 
7580
  if (rl->when_needed == RELOAD_OTHER)
7581
    start_sequence ();
7582
  else
7583
    push_to_sequence (output_reload_insns[rl->opnum]);
7584
 
7585
  rl_reg_rtx = reload_reg_rtx_for_output[j];
7586
  mode = GET_MODE (rl_reg_rtx);
7587
 
7588
  reloadreg = rl_reg_rtx;
7589
 
7590
  /* If we need two reload regs, set RELOADREG to the intermediate
7591
     one, since it will be stored into OLD.  We might need a secondary
7592
     register only for an input reload, so check again here.  */
7593
 
7594
  if (rl->secondary_out_reload >= 0)
7595
    {
7596
      rtx real_old = old;
7597
      int secondary_reload = rl->secondary_out_reload;
7598
      int tertiary_reload = rld[secondary_reload].secondary_out_reload;
7599
 
7600
      if (REG_P (old) && REGNO (old) >= FIRST_PSEUDO_REGISTER
7601
          && reg_equiv_mem (REGNO (old)) != 0)
7602
        real_old = reg_equiv_mem (REGNO (old));
7603
 
7604
      if (secondary_reload_class (0, rl->rclass, mode, real_old) != NO_REGS)
7605
        {
7606
          rtx second_reloadreg = reloadreg;
7607
          reloadreg = rld[secondary_reload].reg_rtx;
7608
 
7609
          /* See if RELOADREG is to be used as a scratch register
7610
             or as an intermediate register.  */
7611
          if (rl->secondary_out_icode != CODE_FOR_nothing)
7612
            {
7613
              /* We'd have to add extra code to handle this case.  */
7614
              gcc_assert (tertiary_reload < 0);
7615
 
7616
              emit_insn ((GEN_FCN (rl->secondary_out_icode)
7617
                          (real_old, second_reloadreg, reloadreg)));
7618
              special = 1;
7619
            }
7620
          else
7621
            {
7622
              /* See if we need both a scratch and intermediate reload
7623
                 register.  */
7624
 
7625
              enum insn_code tertiary_icode
7626
                = rld[secondary_reload].secondary_out_icode;
7627
 
7628
              /* We'd have to add more code for quartary reloads.  */
7629
              gcc_assert (tertiary_reload < 0
7630
                          || rld[tertiary_reload].secondary_out_reload < 0);
7631
 
7632
              if (GET_MODE (reloadreg) != mode)
7633
                reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
7634
 
7635
              if (tertiary_icode != CODE_FOR_nothing)
7636
                {
7637
                  rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7638
 
7639
                  /* Copy primary reload reg to secondary reload reg.
7640
                     (Note that these have been swapped above, then
7641
                     secondary reload reg to OLD using our insn.)  */
7642
 
7643
                  /* If REAL_OLD is a paradoxical SUBREG, remove it
7644
                     and try to put the opposite SUBREG on
7645
                     RELOADREG.  */
7646
                  strip_paradoxical_subreg (&real_old, &reloadreg);
7647
 
7648
                  gen_reload (reloadreg, second_reloadreg,
7649
                              rl->opnum, rl->when_needed);
7650
                  emit_insn ((GEN_FCN (tertiary_icode)
7651
                              (real_old, reloadreg, third_reloadreg)));
7652
                  special = 1;
7653
                }
7654
 
7655
              else
7656
                {
7657
                  /* Copy between the reload regs here and then to
7658
                     OUT later.  */
7659
 
7660
                  gen_reload (reloadreg, second_reloadreg,
7661
                              rl->opnum, rl->when_needed);
7662
                  if (tertiary_reload >= 0)
7663
                    {
7664
                      rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7665
 
7666
                      gen_reload (third_reloadreg, reloadreg,
7667
                                  rl->opnum, rl->when_needed);
7668
                      reloadreg = third_reloadreg;
7669
                    }
7670
                }
7671
            }
7672
        }
7673
    }
7674
 
7675
  /* Output the last reload insn.  */
7676
  if (! special)
7677
    {
7678
      rtx set;
7679
 
7680
      /* Don't output the last reload if OLD is not the dest of
7681
         INSN and is in the src and is clobbered by INSN.  */
7682
      if (! flag_expensive_optimizations
7683
          || !REG_P (old)
7684
          || !(set = single_set (insn))
7685
          || rtx_equal_p (old, SET_DEST (set))
7686
          || !reg_mentioned_p (old, SET_SRC (set))
7687
          || !((REGNO (old) < FIRST_PSEUDO_REGISTER)
7688
               && regno_clobbered_p (REGNO (old), insn, rl->mode, 0)))
7689
        gen_reload (old, reloadreg, rl->opnum,
7690
                    rl->when_needed);
7691
    }
7692
 
7693
  /* Look at all insns we emitted, just to be safe.  */
7694
  for (p = get_insns (); p; p = NEXT_INSN (p))
7695
    if (INSN_P (p))
7696
      {
7697
        rtx pat = PATTERN (p);
7698
 
7699
        /* If this output reload doesn't come from a spill reg,
7700
           clear any memory of reloaded copies of the pseudo reg.
7701
           If this output reload comes from a spill reg,
7702
           reg_has_output_reload will make this do nothing.  */
7703
        note_stores (pat, forget_old_reloads_1, NULL);
7704
 
7705
        if (reg_mentioned_p (rl_reg_rtx, pat))
7706
          {
7707
            rtx set = single_set (insn);
7708
            if (reload_spill_index[j] < 0
7709
                && set
7710
                && SET_SRC (set) == rl_reg_rtx)
7711
              {
7712
                int src = REGNO (SET_SRC (set));
7713
 
7714
                reload_spill_index[j] = src;
7715
                SET_HARD_REG_BIT (reg_is_output_reload, src);
7716
                if (find_regno_note (insn, REG_DEAD, src))
7717
                  SET_HARD_REG_BIT (reg_reloaded_died, src);
7718
              }
7719
            if (HARD_REGISTER_P (rl_reg_rtx))
7720
              {
7721
                int s = rl->secondary_out_reload;
7722
                set = single_set (p);
7723
                /* If this reload copies only to the secondary reload
7724
                   register, the secondary reload does the actual
7725
                   store.  */
7726
                if (s >= 0 && set == NULL_RTX)
7727
                  /* We can't tell what function the secondary reload
7728
                     has and where the actual store to the pseudo is
7729
                     made; leave new_spill_reg_store alone.  */
7730
                  ;
7731
                else if (s >= 0
7732
                         && SET_SRC (set) == rl_reg_rtx
7733
                         && SET_DEST (set) == rld[s].reg_rtx)
7734
                  {
7735
                    /* Usually the next instruction will be the
7736
                       secondary reload insn;  if we can confirm
7737
                       that it is, setting new_spill_reg_store to
7738
                       that insn will allow an extra optimization.  */
7739
                    rtx s_reg = rld[s].reg_rtx;
7740
                    rtx next = NEXT_INSN (p);
7741
                    rld[s].out = rl->out;
7742
                    rld[s].out_reg = rl->out_reg;
7743
                    set = single_set (next);
7744
                    if (set && SET_SRC (set) == s_reg
7745
                        && reload_reg_rtx_reaches_end_p (s_reg, s))
7746
                      {
7747
                        SET_HARD_REG_BIT (reg_is_output_reload,
7748
                                          REGNO (s_reg));
7749
                        new_spill_reg_store[REGNO (s_reg)] = next;
7750
                      }
7751
                  }
7752
                else if (reload_reg_rtx_reaches_end_p (rl_reg_rtx, j))
7753
                  new_spill_reg_store[REGNO (rl_reg_rtx)] = p;
7754
              }
7755
          }
7756
      }
7757
 
7758
  if (rl->when_needed == RELOAD_OTHER)
7759
    {
7760
      emit_insn (other_output_reload_insns[rl->opnum]);
7761
      other_output_reload_insns[rl->opnum] = get_insns ();
7762
    }
7763
  else
7764
    output_reload_insns[rl->opnum] = get_insns ();
7765
 
7766
  if (cfun->can_throw_non_call_exceptions)
7767
    copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7768
 
7769
  end_sequence ();
7770
}
7771
 
7772
/* Do input reloading for reload RL, which is for the insn described by CHAIN
7773
   and has the number J.  */
7774
static void
7775
do_input_reload (struct insn_chain *chain, struct reload *rl, int j)
7776
{
7777
  rtx insn = chain->insn;
7778
  rtx old = (rl->in && MEM_P (rl->in)
7779
             ? rl->in_reg : rl->in);
7780
  rtx reg_rtx = rl->reg_rtx;
7781
 
7782
  if (old && reg_rtx)
7783
    {
7784
      enum machine_mode mode;
7785
 
7786
      /* Determine the mode to reload in.
7787
         This is very tricky because we have three to choose from.
7788
         There is the mode the insn operand wants (rl->inmode).
7789
         There is the mode of the reload register RELOADREG.
7790
         There is the intrinsic mode of the operand, which we could find
7791
         by stripping some SUBREGs.
7792
         It turns out that RELOADREG's mode is irrelevant:
7793
         we can change that arbitrarily.
7794
 
7795
         Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
7796
         then the reload reg may not support QImode moves, so use SImode.
7797
         If foo is in memory due to spilling a pseudo reg, this is safe,
7798
         because the QImode value is in the least significant part of a
7799
         slot big enough for a SImode.  If foo is some other sort of
7800
         memory reference, then it is impossible to reload this case,
7801
         so previous passes had better make sure this never happens.
7802
 
7803
         Then consider a one-word union which has SImode and one of its
7804
         members is a float, being fetched as (SUBREG:SF union:SI).
7805
         We must fetch that as SFmode because we could be loading into
7806
         a float-only register.  In this case OLD's mode is correct.
7807
 
7808
         Consider an immediate integer: it has VOIDmode.  Here we need
7809
         to get a mode from something else.
7810
 
7811
         In some cases, there is a fourth mode, the operand's
7812
         containing mode.  If the insn specifies a containing mode for
7813
         this operand, it overrides all others.
7814
 
7815
         I am not sure whether the algorithm here is always right,
7816
         but it does the right things in those cases.  */
7817
 
7818
      mode = GET_MODE (old);
7819
      if (mode == VOIDmode)
7820
        mode = rl->inmode;
7821
 
7822
      /* We cannot use gen_lowpart_common since it can do the wrong thing
7823
         when REG_RTX has a multi-word mode.  Note that REG_RTX must
7824
         always be a REG here.  */
7825
      if (GET_MODE (reg_rtx) != mode)
7826
        reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
7827
    }
7828
  reload_reg_rtx_for_input[j] = reg_rtx;
7829
 
7830
  if (old != 0
7831
      /* AUTO_INC reloads need to be handled even if inherited.  We got an
7832
         AUTO_INC reload if reload_out is set but reload_out_reg isn't.  */
7833
      && (! reload_inherited[j] || (rl->out && ! rl->out_reg))
7834
      && ! rtx_equal_p (reg_rtx, old)
7835
      && reg_rtx != 0)
7836
    emit_input_reload_insns (chain, rld + j, old, j);
7837
 
7838
  /* When inheriting a wider reload, we have a MEM in rl->in,
7839
     e.g. inheriting a SImode output reload for
7840
     (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10)))  */
7841
  if (optimize && reload_inherited[j] && rl->in
7842
      && MEM_P (rl->in)
7843
      && MEM_P (rl->in_reg)
7844
      && reload_spill_index[j] >= 0
7845
      && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
7846
    rl->in = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
7847
 
7848
  /* If we are reloading a register that was recently stored in with an
7849
     output-reload, see if we can prove there was
7850
     actually no need to store the old value in it.  */
7851
 
7852
  if (optimize
7853
      && (reload_inherited[j] || reload_override_in[j])
7854
      && reg_rtx
7855
      && REG_P (reg_rtx)
7856
      && spill_reg_store[REGNO (reg_rtx)] != 0
7857
#if 0
7858
      /* There doesn't seem to be any reason to restrict this to pseudos
7859
         and doing so loses in the case where we are copying from a
7860
         register of the wrong class.  */
7861
      && !HARD_REGISTER_P (spill_reg_stored_to[REGNO (reg_rtx)])
7862
#endif
7863
      /* The insn might have already some references to stackslots
7864
         replaced by MEMs, while reload_out_reg still names the
7865
         original pseudo.  */
7866
      && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (reg_rtx)])
7867
          || rtx_equal_p (spill_reg_stored_to[REGNO (reg_rtx)], rl->out_reg)))
7868
    delete_output_reload (insn, j, REGNO (reg_rtx), reg_rtx);
7869
}
7870
 
7871
/* Do output reloading for reload RL, which is for the insn described by
7872
   CHAIN and has the number J.
7873
   ??? At some point we need to support handling output reloads of
7874
   JUMP_INSNs or insns that set cc0.  */
7875
static void
7876
do_output_reload (struct insn_chain *chain, struct reload *rl, int j)
7877
{
7878
  rtx note, old;
7879
  rtx insn = chain->insn;
7880
  /* If this is an output reload that stores something that is
7881
     not loaded in this same reload, see if we can eliminate a previous
7882
     store.  */
7883
  rtx pseudo = rl->out_reg;
7884
  rtx reg_rtx = rl->reg_rtx;
7885
 
7886
  if (rl->out && reg_rtx)
7887
    {
7888
      enum machine_mode mode;
7889
 
7890
      /* Determine the mode to reload in.
7891
         See comments above (for input reloading).  */
7892
      mode = GET_MODE (rl->out);
7893
      if (mode == VOIDmode)
7894
        {
7895
          /* VOIDmode should never happen for an output.  */
7896
          if (asm_noperands (PATTERN (insn)) < 0)
7897
            /* It's the compiler's fault.  */
7898
            fatal_insn ("VOIDmode on an output", insn);
7899
          error_for_asm (insn, "output operand is constant in %<asm%>");
7900
          /* Prevent crash--use something we know is valid.  */
7901
          mode = word_mode;
7902
          rl->out = gen_rtx_REG (mode, REGNO (reg_rtx));
7903
        }
7904
      if (GET_MODE (reg_rtx) != mode)
7905
        reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
7906
    }
7907
  reload_reg_rtx_for_output[j] = reg_rtx;
7908
 
7909
  if (pseudo
7910
      && optimize
7911
      && REG_P (pseudo)
7912
      && ! rtx_equal_p (rl->in_reg, pseudo)
7913
      && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
7914
      && reg_last_reload_reg[REGNO (pseudo)])
7915
    {
7916
      int pseudo_no = REGNO (pseudo);
7917
      int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
7918
 
7919
      /* We don't need to test full validity of last_regno for
7920
         inherit here; we only want to know if the store actually
7921
         matches the pseudo.  */
7922
      if (TEST_HARD_REG_BIT (reg_reloaded_valid, last_regno)
7923
          && reg_reloaded_contents[last_regno] == pseudo_no
7924
          && spill_reg_store[last_regno]
7925
          && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
7926
        delete_output_reload (insn, j, last_regno, reg_rtx);
7927
    }
7928
 
7929
  old = rl->out_reg;
7930
  if (old == 0
7931
      || reg_rtx == 0
7932
      || rtx_equal_p (old, reg_rtx))
7933
    return;
7934
 
7935
  /* An output operand that dies right away does need a reload,
7936
     but need not be copied from it.  Show the new location in the
7937
     REG_UNUSED note.  */
7938
  if ((REG_P (old) || GET_CODE (old) == SCRATCH)
7939
      && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
7940
    {
7941
      XEXP (note, 0) = reg_rtx;
7942
      return;
7943
    }
7944
  /* Likewise for a SUBREG of an operand that dies.  */
7945
  else if (GET_CODE (old) == SUBREG
7946
           && REG_P (SUBREG_REG (old))
7947
           && 0 != (note = find_reg_note (insn, REG_UNUSED,
7948
                                          SUBREG_REG (old))))
7949
    {
7950
      XEXP (note, 0) = gen_lowpart_common (GET_MODE (old), reg_rtx);
7951
      return;
7952
    }
7953
  else if (GET_CODE (old) == SCRATCH)
7954
    /* If we aren't optimizing, there won't be a REG_UNUSED note,
7955
       but we don't want to make an output reload.  */
7956
    return;
7957
 
7958
  /* If is a JUMP_INSN, we can't support output reloads yet.  */
7959
  gcc_assert (NONJUMP_INSN_P (insn));
7960
 
7961
  emit_output_reload_insns (chain, rld + j, j);
7962
}
7963
 
7964
/* A reload copies values of MODE from register SRC to register DEST.
7965
   Return true if it can be treated for inheritance purposes like a
7966
   group of reloads, each one reloading a single hard register.  The
7967
   caller has already checked that (reg:MODE SRC) and (reg:MODE DEST)
7968
   occupy the same number of hard registers.  */
7969
 
7970
static bool
7971
inherit_piecemeal_p (int dest ATTRIBUTE_UNUSED,
7972
                     int src ATTRIBUTE_UNUSED,
7973
                     enum machine_mode mode ATTRIBUTE_UNUSED)
7974
{
7975
#ifdef CANNOT_CHANGE_MODE_CLASS
7976
  return (!REG_CANNOT_CHANGE_MODE_P (dest, mode, reg_raw_mode[dest])
7977
          && !REG_CANNOT_CHANGE_MODE_P (src, mode, reg_raw_mode[src]));
7978
#else
7979
  return true;
7980
#endif
7981
}
7982
 
7983
/* Output insns to reload values in and out of the chosen reload regs.  */
7984
 
7985
static void
7986
emit_reload_insns (struct insn_chain *chain)
7987
{
7988
  rtx insn = chain->insn;
7989
 
7990
  int j;
7991
 
7992
  CLEAR_HARD_REG_SET (reg_reloaded_died);
7993
 
7994
  for (j = 0; j < reload_n_operands; j++)
7995
    input_reload_insns[j] = input_address_reload_insns[j]
7996
      = inpaddr_address_reload_insns[j]
7997
      = output_reload_insns[j] = output_address_reload_insns[j]
7998
      = outaddr_address_reload_insns[j]
7999
      = other_output_reload_insns[j] = 0;
8000
  other_input_address_reload_insns = 0;
8001
  other_input_reload_insns = 0;
8002
  operand_reload_insns = 0;
8003
  other_operand_reload_insns = 0;
8004
 
8005
  /* Dump reloads into the dump file.  */
8006
  if (dump_file)
8007
    {
8008
      fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
8009
      debug_reload_to_stream (dump_file);
8010
    }
8011
 
8012
  for (j = 0; j < n_reloads; j++)
8013
    if (rld[j].reg_rtx && HARD_REGISTER_P (rld[j].reg_rtx))
8014
      {
8015
        unsigned int i;
8016
 
8017
        for (i = REGNO (rld[j].reg_rtx); i < END_REGNO (rld[j].reg_rtx); i++)
8018
          new_spill_reg_store[i] = 0;
8019
      }
8020
 
8021
  /* Now output the instructions to copy the data into and out of the
8022
     reload registers.  Do these in the order that the reloads were reported,
8023
     since reloads of base and index registers precede reloads of operands
8024
     and the operands may need the base and index registers reloaded.  */
8025
 
8026
  for (j = 0; j < n_reloads; j++)
8027
    {
8028
      do_input_reload (chain, rld + j, j);
8029
      do_output_reload (chain, rld + j, j);
8030
    }
8031
 
8032
  /* Now write all the insns we made for reloads in the order expected by
8033
     the allocation functions.  Prior to the insn being reloaded, we write
8034
     the following reloads:
8035
 
8036
     RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
8037
 
8038
     RELOAD_OTHER reloads.
8039
 
8040
     For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
8041
     by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
8042
     RELOAD_FOR_INPUT reload for the operand.
8043
 
8044
     RELOAD_FOR_OPADDR_ADDRS reloads.
8045
 
8046
     RELOAD_FOR_OPERAND_ADDRESS reloads.
8047
 
8048
     After the insn being reloaded, we write the following:
8049
 
8050
     For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
8051
     by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
8052
     RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
8053
     reloads for the operand.  The RELOAD_OTHER output reloads are
8054
     output in descending order by reload number.  */
8055
 
8056
  emit_insn_before (other_input_address_reload_insns, insn);
8057
  emit_insn_before (other_input_reload_insns, insn);
8058
 
8059
  for (j = 0; j < reload_n_operands; j++)
8060
    {
8061
      emit_insn_before (inpaddr_address_reload_insns[j], insn);
8062
      emit_insn_before (input_address_reload_insns[j], insn);
8063
      emit_insn_before (input_reload_insns[j], insn);
8064
    }
8065
 
8066
  emit_insn_before (other_operand_reload_insns, insn);
8067
  emit_insn_before (operand_reload_insns, insn);
8068
 
8069
  for (j = 0; j < reload_n_operands; j++)
8070
    {
8071
      rtx x = emit_insn_after (outaddr_address_reload_insns[j], insn);
8072
      x = emit_insn_after (output_address_reload_insns[j], x);
8073
      x = emit_insn_after (output_reload_insns[j], x);
8074
      emit_insn_after (other_output_reload_insns[j], x);
8075
    }
8076
 
8077
  /* For all the spill regs newly reloaded in this instruction,
8078
     record what they were reloaded from, so subsequent instructions
8079
     can inherit the reloads.
8080
 
8081
     Update spill_reg_store for the reloads of this insn.
8082
     Copy the elements that were updated in the loop above.  */
8083
 
8084
  for (j = 0; j < n_reloads; j++)
8085
    {
8086
      int r = reload_order[j];
8087
      int i = reload_spill_index[r];
8088
 
8089
      /* If this is a non-inherited input reload from a pseudo, we must
8090
         clear any memory of a previous store to the same pseudo.  Only do
8091
         something if there will not be an output reload for the pseudo
8092
         being reloaded.  */
8093
      if (rld[r].in_reg != 0
8094
          && ! (reload_inherited[r] || reload_override_in[r]))
8095
        {
8096
          rtx reg = rld[r].in_reg;
8097
 
8098
          if (GET_CODE (reg) == SUBREG)
8099
            reg = SUBREG_REG (reg);
8100
 
8101
          if (REG_P (reg)
8102
              && REGNO (reg) >= FIRST_PSEUDO_REGISTER
8103
              && !REGNO_REG_SET_P (&reg_has_output_reload, REGNO (reg)))
8104
            {
8105
              int nregno = REGNO (reg);
8106
 
8107
              if (reg_last_reload_reg[nregno])
8108
                {
8109
                  int last_regno = REGNO (reg_last_reload_reg[nregno]);
8110
 
8111
                  if (reg_reloaded_contents[last_regno] == nregno)
8112
                    spill_reg_store[last_regno] = 0;
8113
                }
8114
            }
8115
        }
8116
 
8117
      /* I is nonneg if this reload used a register.
8118
         If rld[r].reg_rtx is 0, this is an optional reload
8119
         that we opted to ignore.  */
8120
 
8121
      if (i >= 0 && rld[r].reg_rtx != 0)
8122
        {
8123
          int nr = hard_regno_nregs[i][GET_MODE (rld[r].reg_rtx)];
8124
          int k;
8125
 
8126
          /* For a multi register reload, we need to check if all or part
8127
             of the value lives to the end.  */
8128
          for (k = 0; k < nr; k++)
8129
            if (reload_reg_reaches_end_p (i + k, r))
8130
              CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
8131
 
8132
          /* Maybe the spill reg contains a copy of reload_out.  */
8133
          if (rld[r].out != 0
8134
              && (REG_P (rld[r].out)
8135
                  || (rld[r].out_reg
8136
                      ? REG_P (rld[r].out_reg)
8137
                      /* The reload value is an auto-modification of
8138
                         some kind.  For PRE_INC, POST_INC, PRE_DEC
8139
                         and POST_DEC, we record an equivalence
8140
                         between the reload register and the operand
8141
                         on the optimistic assumption that we can make
8142
                         the equivalence hold.  reload_as_needed must
8143
                         then either make it hold or invalidate the
8144
                         equivalence.
8145
 
8146
                         PRE_MODIFY and POST_MODIFY addresses are reloaded
8147
                         somewhat differently, and allowing them here leads
8148
                         to problems.  */
8149
                      : (GET_CODE (rld[r].out) != POST_MODIFY
8150
                         && GET_CODE (rld[r].out) != PRE_MODIFY))))
8151
            {
8152
              rtx reg;
8153
 
8154
              reg = reload_reg_rtx_for_output[r];
8155
              if (reload_reg_rtx_reaches_end_p (reg, r))
8156
                {
8157
                  enum machine_mode mode = GET_MODE (reg);
8158
                  int regno = REGNO (reg);
8159
                  int nregs = hard_regno_nregs[regno][mode];
8160
                  rtx out = (REG_P (rld[r].out)
8161
                             ? rld[r].out
8162
                             : rld[r].out_reg
8163
                             ? rld[r].out_reg
8164
/* AUTO_INC */               : XEXP (rld[r].in_reg, 0));
8165
                  int out_regno = REGNO (out);
8166
                  int out_nregs = (!HARD_REGISTER_NUM_P (out_regno) ? 1
8167
                                   : hard_regno_nregs[out_regno][mode]);
8168
                  bool piecemeal;
8169
 
8170
                  spill_reg_store[regno] = new_spill_reg_store[regno];
8171
                  spill_reg_stored_to[regno] = out;
8172
                  reg_last_reload_reg[out_regno] = reg;
8173
 
8174
                  piecemeal = (HARD_REGISTER_NUM_P (out_regno)
8175
                               && nregs == out_nregs
8176
                               && inherit_piecemeal_p (out_regno, regno, mode));
8177
 
8178
                  /* If OUT_REGNO is a hard register, it may occupy more than
8179
                     one register.  If it does, say what is in the
8180
                     rest of the registers assuming that both registers
8181
                     agree on how many words the object takes.  If not,
8182
                     invalidate the subsequent registers.  */
8183
 
8184
                  if (HARD_REGISTER_NUM_P (out_regno))
8185
                    for (k = 1; k < out_nregs; k++)
8186
                      reg_last_reload_reg[out_regno + k]
8187
                        = (piecemeal ? regno_reg_rtx[regno + k] : 0);
8188
 
8189
                  /* Now do the inverse operation.  */
8190
                  for (k = 0; k < nregs; k++)
8191
                    {
8192
                      CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8193
                      reg_reloaded_contents[regno + k]
8194
                        = (!HARD_REGISTER_NUM_P (out_regno) || !piecemeal
8195
                           ? out_regno
8196
                           : out_regno + k);
8197
                      reg_reloaded_insn[regno + k] = insn;
8198
                      SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8199
                      if (HARD_REGNO_CALL_PART_CLOBBERED (regno + k, mode))
8200
                        SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8201
                                          regno + k);
8202
                      else
8203
                        CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8204
                                            regno + k);
8205
                    }
8206
                }
8207
            }
8208
          /* Maybe the spill reg contains a copy of reload_in.  Only do
8209
             something if there will not be an output reload for
8210
             the register being reloaded.  */
8211
          else if (rld[r].out_reg == 0
8212
                   && rld[r].in != 0
8213
                   && ((REG_P (rld[r].in)
8214
                        && !HARD_REGISTER_P (rld[r].in)
8215
                        && !REGNO_REG_SET_P (&reg_has_output_reload,
8216
                                             REGNO (rld[r].in)))
8217
                       || (REG_P (rld[r].in_reg)
8218
                           && !REGNO_REG_SET_P (&reg_has_output_reload,
8219
                                                REGNO (rld[r].in_reg))))
8220
                   && !reg_set_p (reload_reg_rtx_for_input[r], PATTERN (insn)))
8221
            {
8222
              rtx reg;
8223
 
8224
              reg = reload_reg_rtx_for_input[r];
8225
              if (reload_reg_rtx_reaches_end_p (reg, r))
8226
                {
8227
                  enum machine_mode mode;
8228
                  int regno;
8229
                  int nregs;
8230
                  int in_regno;
8231
                  int in_nregs;
8232
                  rtx in;
8233
                  bool piecemeal;
8234
 
8235
                  mode = GET_MODE (reg);
8236
                  regno = REGNO (reg);
8237
                  nregs = hard_regno_nregs[regno][mode];
8238
                  if (REG_P (rld[r].in)
8239
                      && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER)
8240
                    in = rld[r].in;
8241
                  else if (REG_P (rld[r].in_reg))
8242
                    in = rld[r].in_reg;
8243
                  else
8244
                    in = XEXP (rld[r].in_reg, 0);
8245
                  in_regno = REGNO (in);
8246
 
8247
                  in_nregs = (!HARD_REGISTER_NUM_P (in_regno) ? 1
8248
                              : hard_regno_nregs[in_regno][mode]);
8249
 
8250
                  reg_last_reload_reg[in_regno] = reg;
8251
 
8252
                  piecemeal = (HARD_REGISTER_NUM_P (in_regno)
8253
                               && nregs == in_nregs
8254
                               && inherit_piecemeal_p (regno, in_regno, mode));
8255
 
8256
                  if (HARD_REGISTER_NUM_P (in_regno))
8257
                    for (k = 1; k < in_nregs; k++)
8258
                      reg_last_reload_reg[in_regno + k]
8259
                        = (piecemeal ? regno_reg_rtx[regno + k] : 0);
8260
 
8261
                  /* Unless we inherited this reload, show we haven't
8262
                     recently done a store.
8263
                     Previous stores of inherited auto_inc expressions
8264
                     also have to be discarded.  */
8265
                  if (! reload_inherited[r]
8266
                      || (rld[r].out && ! rld[r].out_reg))
8267
                    spill_reg_store[regno] = 0;
8268
 
8269
                  for (k = 0; k < nregs; k++)
8270
                    {
8271
                      CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8272
                      reg_reloaded_contents[regno + k]
8273
                        = (!HARD_REGISTER_NUM_P (in_regno) || !piecemeal
8274
                           ? in_regno
8275
                           : in_regno + k);
8276
                      reg_reloaded_insn[regno + k] = insn;
8277
                      SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8278
                      if (HARD_REGNO_CALL_PART_CLOBBERED (regno + k, mode))
8279
                        SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8280
                                          regno + k);
8281
                      else
8282
                        CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8283
                                            regno + k);
8284
                    }
8285
                }
8286
            }
8287
        }
8288
 
8289
      /* The following if-statement was #if 0'd in 1.34 (or before...).
8290
         It's reenabled in 1.35 because supposedly nothing else
8291
         deals with this problem.  */
8292
 
8293
      /* If a register gets output-reloaded from a non-spill register,
8294
         that invalidates any previous reloaded copy of it.
8295
         But forget_old_reloads_1 won't get to see it, because
8296
         it thinks only about the original insn.  So invalidate it here.
8297
         Also do the same thing for RELOAD_OTHER constraints where the
8298
         output is discarded.  */
8299
      if (i < 0
8300
          && ((rld[r].out != 0
8301
               && (REG_P (rld[r].out)
8302
                   || (MEM_P (rld[r].out)
8303
                       && REG_P (rld[r].out_reg))))
8304
              || (rld[r].out == 0 && rld[r].out_reg
8305
                  && REG_P (rld[r].out_reg))))
8306
        {
8307
          rtx out = ((rld[r].out && REG_P (rld[r].out))
8308
                     ? rld[r].out : rld[r].out_reg);
8309
          int out_regno = REGNO (out);
8310
          enum machine_mode mode = GET_MODE (out);
8311
 
8312
          /* REG_RTX is now set or clobbered by the main instruction.
8313
             As the comment above explains, forget_old_reloads_1 only
8314
             sees the original instruction, and there is no guarantee
8315
             that the original instruction also clobbered REG_RTX.
8316
             For example, if find_reloads sees that the input side of
8317
             a matched operand pair dies in this instruction, it may
8318
             use the input register as the reload register.
8319
 
8320
             Calling forget_old_reloads_1 is a waste of effort if
8321
             REG_RTX is also the output register.
8322
 
8323
             If we know that REG_RTX holds the value of a pseudo
8324
             register, the code after the call will record that fact.  */
8325
          if (rld[r].reg_rtx && rld[r].reg_rtx != out)
8326
            forget_old_reloads_1 (rld[r].reg_rtx, NULL_RTX, NULL);
8327
 
8328
          if (!HARD_REGISTER_NUM_P (out_regno))
8329
            {
8330
              rtx src_reg, store_insn = NULL_RTX;
8331
 
8332
              reg_last_reload_reg[out_regno] = 0;
8333
 
8334
              /* If we can find a hard register that is stored, record
8335
                 the storing insn so that we may delete this insn with
8336
                 delete_output_reload.  */
8337
              src_reg = reload_reg_rtx_for_output[r];
8338
 
8339
              if (src_reg)
8340
                {
8341
                  if (reload_reg_rtx_reaches_end_p (src_reg, r))
8342
                    store_insn = new_spill_reg_store[REGNO (src_reg)];
8343
                  else
8344
                    src_reg = NULL_RTX;
8345
                }
8346
              else
8347
                {
8348
                  /* If this is an optional reload, try to find the
8349
                     source reg from an input reload.  */
8350
                  rtx set = single_set (insn);
8351
                  if (set && SET_DEST (set) == rld[r].out)
8352
                    {
8353
                      int k;
8354
 
8355
                      src_reg = SET_SRC (set);
8356
                      store_insn = insn;
8357
                      for (k = 0; k < n_reloads; k++)
8358
                        {
8359
                          if (rld[k].in == src_reg)
8360
                            {
8361
                              src_reg = reload_reg_rtx_for_input[k];
8362
                              break;
8363
                            }
8364
                        }
8365
                    }
8366
                }
8367
              if (src_reg && REG_P (src_reg)
8368
                  && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
8369
                {
8370
                  int src_regno, src_nregs, k;
8371
                  rtx note;
8372
 
8373
                  gcc_assert (GET_MODE (src_reg) == mode);
8374
                  src_regno = REGNO (src_reg);
8375
                  src_nregs = hard_regno_nregs[src_regno][mode];
8376
                  /* The place where to find a death note varies with
8377
                     PRESERVE_DEATH_INFO_REGNO_P .  The condition is not
8378
                     necessarily checked exactly in the code that moves
8379
                     notes, so just check both locations.  */
8380
                  note = find_regno_note (insn, REG_DEAD, src_regno);
8381
                  if (! note && store_insn)
8382
                    note = find_regno_note (store_insn, REG_DEAD, src_regno);
8383
                  for (k = 0; k < src_nregs; k++)
8384
                    {
8385
                      spill_reg_store[src_regno + k] = store_insn;
8386
                      spill_reg_stored_to[src_regno + k] = out;
8387
                      reg_reloaded_contents[src_regno + k] = out_regno;
8388
                      reg_reloaded_insn[src_regno + k] = store_insn;
8389
                      CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + k);
8390
                      SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + k);
8391
                      if (HARD_REGNO_CALL_PART_CLOBBERED (src_regno + k,
8392
                                                          mode))
8393
                        SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8394
                                          src_regno + k);
8395
                      else
8396
                        CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8397
                                            src_regno + k);
8398
                      SET_HARD_REG_BIT (reg_is_output_reload, src_regno + k);
8399
                      if (note)
8400
                        SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
8401
                      else
8402
                        CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
8403
                    }
8404
                  reg_last_reload_reg[out_regno] = src_reg;
8405
                  /* We have to set reg_has_output_reload here, or else
8406
                     forget_old_reloads_1 will clear reg_last_reload_reg
8407
                     right away.  */
8408
                  SET_REGNO_REG_SET (&reg_has_output_reload,
8409
                                     out_regno);
8410
                }
8411
            }
8412
          else
8413
            {
8414
              int k, out_nregs = hard_regno_nregs[out_regno][mode];
8415
 
8416
              for (k = 0; k < out_nregs; k++)
8417
                reg_last_reload_reg[out_regno + k] = 0;
8418
            }
8419
        }
8420
    }
8421
  IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
8422
}
8423
 
8424
/* Go through the motions to emit INSN and test if it is strictly valid.
8425
   Return the emitted insn if valid, else return NULL.  */
8426
 
8427
static rtx
8428
emit_insn_if_valid_for_reload (rtx insn)
8429
{
8430
  rtx last = get_last_insn ();
8431
  int code;
8432
 
8433
  insn = emit_insn (insn);
8434
  code = recog_memoized (insn);
8435
 
8436
  if (code >= 0)
8437
    {
8438
      extract_insn (insn);
8439
      /* We want constrain operands to treat this insn strictly in its
8440
         validity determination, i.e., the way it would after reload has
8441
         completed.  */
8442
      if (constrain_operands (1))
8443
        return insn;
8444
    }
8445
 
8446
  delete_insns_since (last);
8447
  return NULL;
8448
}
8449
 
8450
/* Emit code to perform a reload from IN (which may be a reload register) to
8451
   OUT (which may also be a reload register).  IN or OUT is from operand
8452
   OPNUM with reload type TYPE.
8453
 
8454
   Returns first insn emitted.  */
8455
 
8456
static rtx
8457
gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
8458
{
8459
  rtx last = get_last_insn ();
8460
  rtx tem;
8461
 
8462
  /* If IN is a paradoxical SUBREG, remove it and try to put the
8463
     opposite SUBREG on OUT.  Likewise for a paradoxical SUBREG on OUT.  */
8464
  if (!strip_paradoxical_subreg (&in, &out))
8465
    strip_paradoxical_subreg (&out, &in);
8466
 
8467
  /* How to do this reload can get quite tricky.  Normally, we are being
8468
     asked to reload a simple operand, such as a MEM, a constant, or a pseudo
8469
     register that didn't get a hard register.  In that case we can just
8470
     call emit_move_insn.
8471
 
8472
     We can also be asked to reload a PLUS that adds a register or a MEM to
8473
     another register, constant or MEM.  This can occur during frame pointer
8474
     elimination and while reloading addresses.  This case is handled by
8475
     trying to emit a single insn to perform the add.  If it is not valid,
8476
     we use a two insn sequence.
8477
 
8478
     Or we can be asked to reload an unary operand that was a fragment of
8479
     an addressing mode, into a register.  If it isn't recognized as-is,
8480
     we try making the unop operand and the reload-register the same:
8481
     (set reg:X (unop:X expr:Y))
8482
     -> (set reg:Y expr:Y) (set reg:X (unop:X reg:Y)).
8483
 
8484
     Finally, we could be called to handle an 'o' constraint by putting
8485
     an address into a register.  In that case, we first try to do this
8486
     with a named pattern of "reload_load_address".  If no such pattern
8487
     exists, we just emit a SET insn and hope for the best (it will normally
8488
     be valid on machines that use 'o').
8489
 
8490
     This entire process is made complex because reload will never
8491
     process the insns we generate here and so we must ensure that
8492
     they will fit their constraints and also by the fact that parts of
8493
     IN might be being reloaded separately and replaced with spill registers.
8494
     Because of this, we are, in some sense, just guessing the right approach
8495
     here.  The one listed above seems to work.
8496
 
8497
     ??? At some point, this whole thing needs to be rethought.  */
8498
 
8499
  if (GET_CODE (in) == PLUS
8500
      && (REG_P (XEXP (in, 0))
8501
          || GET_CODE (XEXP (in, 0)) == SUBREG
8502
          || MEM_P (XEXP (in, 0)))
8503
      && (REG_P (XEXP (in, 1))
8504
          || GET_CODE (XEXP (in, 1)) == SUBREG
8505
          || CONSTANT_P (XEXP (in, 1))
8506
          || MEM_P (XEXP (in, 1))))
8507
    {
8508
      /* We need to compute the sum of a register or a MEM and another
8509
         register, constant, or MEM, and put it into the reload
8510
         register.  The best possible way of doing this is if the machine
8511
         has a three-operand ADD insn that accepts the required operands.
8512
 
8513
         The simplest approach is to try to generate such an insn and see if it
8514
         is recognized and matches its constraints.  If so, it can be used.
8515
 
8516
         It might be better not to actually emit the insn unless it is valid,
8517
         but we need to pass the insn as an operand to `recog' and
8518
         `extract_insn' and it is simpler to emit and then delete the insn if
8519
         not valid than to dummy things up.  */
8520
 
8521
      rtx op0, op1, tem, insn;
8522
      enum insn_code code;
8523
 
8524
      op0 = find_replacement (&XEXP (in, 0));
8525
      op1 = find_replacement (&XEXP (in, 1));
8526
 
8527
      /* Since constraint checking is strict, commutativity won't be
8528
         checked, so we need to do that here to avoid spurious failure
8529
         if the add instruction is two-address and the second operand
8530
         of the add is the same as the reload reg, which is frequently
8531
         the case.  If the insn would be A = B + A, rearrange it so
8532
         it will be A = A + B as constrain_operands expects.  */
8533
 
8534
      if (REG_P (XEXP (in, 1))
8535
          && REGNO (out) == REGNO (XEXP (in, 1)))
8536
        tem = op0, op0 = op1, op1 = tem;
8537
 
8538
      if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
8539
        in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
8540
 
8541
      insn = emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode, out, in));
8542
      if (insn)
8543
        return insn;
8544
 
8545
      /* If that failed, we must use a conservative two-insn sequence.
8546
 
8547
         Use a move to copy one operand into the reload register.  Prefer
8548
         to reload a constant, MEM or pseudo since the move patterns can
8549
         handle an arbitrary operand.  If OP1 is not a constant, MEM or
8550
         pseudo and OP1 is not a valid operand for an add instruction, then
8551
         reload OP1.
8552
 
8553
         After reloading one of the operands into the reload register, add
8554
         the reload register to the output register.
8555
 
8556
         If there is another way to do this for a specific machine, a
8557
         DEFINE_PEEPHOLE should be specified that recognizes the sequence
8558
         we emit below.  */
8559
 
8560
      code = optab_handler (add_optab, GET_MODE (out));
8561
 
8562
      if (CONSTANT_P (op1) || MEM_P (op1) || GET_CODE (op1) == SUBREG
8563
          || (REG_P (op1)
8564
              && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
8565
          || (code != CODE_FOR_nothing
8566
              && !insn_operand_matches (code, 2, op1)))
8567
        tem = op0, op0 = op1, op1 = tem;
8568
 
8569
      gen_reload (out, op0, opnum, type);
8570
 
8571
      /* If OP0 and OP1 are the same, we can use OUT for OP1.
8572
         This fixes a problem on the 32K where the stack pointer cannot
8573
         be used as an operand of an add insn.  */
8574
 
8575
      if (rtx_equal_p (op0, op1))
8576
        op1 = out;
8577
 
8578
      insn = emit_insn_if_valid_for_reload (gen_add2_insn (out, op1));
8579
      if (insn)
8580
        {
8581
          /* Add a REG_EQUIV note so that find_equiv_reg can find it.  */
8582
          set_dst_reg_note (insn, REG_EQUIV, in, out);
8583
          return insn;
8584
        }
8585
 
8586
      /* If that failed, copy the address register to the reload register.
8587
         Then add the constant to the reload register.  */
8588
 
8589
      gcc_assert (!reg_overlap_mentioned_p (out, op0));
8590
      gen_reload (out, op1, opnum, type);
8591
      insn = emit_insn (gen_add2_insn (out, op0));
8592
      set_dst_reg_note (insn, REG_EQUIV, in, out);
8593
    }
8594
 
8595
#ifdef SECONDARY_MEMORY_NEEDED
8596
  /* If we need a memory location to do the move, do it that way.  */
8597
  else if ((REG_P (in)
8598
            || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
8599
           && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER
8600
           && (REG_P (out)
8601
               || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
8602
           && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
8603
           && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in)),
8604
                                       REGNO_REG_CLASS (reg_or_subregno (out)),
8605
                                       GET_MODE (out)))
8606
    {
8607
      /* Get the memory to use and rewrite both registers to its mode.  */
8608
      rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
8609
 
8610
      if (GET_MODE (loc) != GET_MODE (out))
8611
        out = gen_rtx_REG (GET_MODE (loc), reg_or_subregno (out));
8612
 
8613
      if (GET_MODE (loc) != GET_MODE (in))
8614
        in = gen_rtx_REG (GET_MODE (loc), reg_or_subregno (in));
8615
 
8616
      gen_reload (loc, in, opnum, type);
8617
      gen_reload (out, loc, opnum, type);
8618
    }
8619
#endif
8620
  else if (REG_P (out) && UNARY_P (in))
8621
    {
8622
      rtx insn;
8623
      rtx op1;
8624
      rtx out_moded;
8625
      rtx set;
8626
 
8627
      op1 = find_replacement (&XEXP (in, 0));
8628
      if (op1 != XEXP (in, 0))
8629
        in = gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in), op1);
8630
 
8631
      /* First, try a plain SET.  */
8632
      set = emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode, out, in));
8633
      if (set)
8634
        return set;
8635
 
8636
      /* If that failed, move the inner operand to the reload
8637
         register, and try the same unop with the inner expression
8638
         replaced with the reload register.  */
8639
 
8640
      if (GET_MODE (op1) != GET_MODE (out))
8641
        out_moded = gen_rtx_REG (GET_MODE (op1), REGNO (out));
8642
      else
8643
        out_moded = out;
8644
 
8645
      gen_reload (out_moded, op1, opnum, type);
8646
 
8647
      insn
8648
        = gen_rtx_SET (VOIDmode, out,
8649
                       gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in),
8650
                                      out_moded));
8651
      insn = emit_insn_if_valid_for_reload (insn);
8652
      if (insn)
8653
        {
8654
          set_unique_reg_note (insn, REG_EQUIV, in);
8655
          return insn;
8656
        }
8657
 
8658
      fatal_insn ("failure trying to reload:", set);
8659
    }
8660
  /* If IN is a simple operand, use gen_move_insn.  */
8661
  else if (OBJECT_P (in) || GET_CODE (in) == SUBREG)
8662
    {
8663
      tem = emit_insn (gen_move_insn (out, in));
8664
      /* IN may contain a LABEL_REF, if so add a REG_LABEL_OPERAND note.  */
8665
      mark_jump_label (in, tem, 0);
8666
    }
8667
 
8668
#ifdef HAVE_reload_load_address
8669
  else if (HAVE_reload_load_address)
8670
    emit_insn (gen_reload_load_address (out, in));
8671
#endif
8672
 
8673
  /* Otherwise, just write (set OUT IN) and hope for the best.  */
8674
  else
8675
    emit_insn (gen_rtx_SET (VOIDmode, out, in));
8676
 
8677
  /* Return the first insn emitted.
8678
     We can not just return get_last_insn, because there may have
8679
     been multiple instructions emitted.  Also note that gen_move_insn may
8680
     emit more than one insn itself, so we can not assume that there is one
8681
     insn emitted per emit_insn_before call.  */
8682
 
8683
  return last ? NEXT_INSN (last) : get_insns ();
8684
}
8685
 
8686
/* Delete a previously made output-reload whose result we now believe
8687
   is not needed.  First we double-check.
8688
 
8689
   INSN is the insn now being processed.
8690
   LAST_RELOAD_REG is the hard register number for which we want to delete
8691
   the last output reload.
8692
   J is the reload-number that originally used REG.  The caller has made
8693
   certain that reload J doesn't use REG any longer for input.
8694
   NEW_RELOAD_REG is reload register that reload J is using for REG.  */
8695
 
8696
static void
8697
delete_output_reload (rtx insn, int j, int last_reload_reg, rtx new_reload_reg)
8698
{
8699
  rtx output_reload_insn = spill_reg_store[last_reload_reg];
8700
  rtx reg = spill_reg_stored_to[last_reload_reg];
8701
  int k;
8702
  int n_occurrences;
8703
  int n_inherited = 0;
8704
  rtx i1;
8705
  rtx substed;
8706
  unsigned regno;
8707
  int nregs;
8708
 
8709
  /* It is possible that this reload has been only used to set another reload
8710
     we eliminated earlier and thus deleted this instruction too.  */
8711
  if (INSN_DELETED_P (output_reload_insn))
8712
    return;
8713
 
8714
  /* Get the raw pseudo-register referred to.  */
8715
 
8716
  while (GET_CODE (reg) == SUBREG)
8717
    reg = SUBREG_REG (reg);
8718
  substed = reg_equiv_memory_loc (REGNO (reg));
8719
 
8720
  /* This is unsafe if the operand occurs more often in the current
8721
     insn than it is inherited.  */
8722
  for (k = n_reloads - 1; k >= 0; k--)
8723
    {
8724
      rtx reg2 = rld[k].in;
8725
      if (! reg2)
8726
        continue;
8727
      if (MEM_P (reg2) || reload_override_in[k])
8728
        reg2 = rld[k].in_reg;
8729
#ifdef AUTO_INC_DEC
8730
      if (rld[k].out && ! rld[k].out_reg)
8731
        reg2 = XEXP (rld[k].in_reg, 0);
8732
#endif
8733
      while (GET_CODE (reg2) == SUBREG)
8734
        reg2 = SUBREG_REG (reg2);
8735
      if (rtx_equal_p (reg2, reg))
8736
        {
8737
          if (reload_inherited[k] || reload_override_in[k] || k == j)
8738
            n_inherited++;
8739
          else
8740
            return;
8741
        }
8742
    }
8743
  n_occurrences = count_occurrences (PATTERN (insn), reg, 0);
8744
  if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
8745
    n_occurrences += count_occurrences (CALL_INSN_FUNCTION_USAGE (insn),
8746
                                        reg, 0);
8747
  if (substed)
8748
    n_occurrences += count_occurrences (PATTERN (insn),
8749
                                        eliminate_regs (substed, VOIDmode,
8750
                                                        NULL_RTX), 0);
8751
  for (i1 = reg_equiv_alt_mem_list (REGNO (reg)); i1; i1 = XEXP (i1, 1))
8752
    {
8753
      gcc_assert (!rtx_equal_p (XEXP (i1, 0), substed));
8754
      n_occurrences += count_occurrences (PATTERN (insn), XEXP (i1, 0), 0);
8755
    }
8756
  if (n_occurrences > n_inherited)
8757
    return;
8758
 
8759
  regno = REGNO (reg);
8760
  if (regno >= FIRST_PSEUDO_REGISTER)
8761
    nregs = 1;
8762
  else
8763
    nregs = hard_regno_nregs[regno][GET_MODE (reg)];
8764
 
8765
  /* If the pseudo-reg we are reloading is no longer referenced
8766
     anywhere between the store into it and here,
8767
     and we're within the same basic block, then the value can only
8768
     pass through the reload reg and end up here.
8769
     Otherwise, give up--return.  */
8770
  for (i1 = NEXT_INSN (output_reload_insn);
8771
       i1 != insn; i1 = NEXT_INSN (i1))
8772
    {
8773
      if (NOTE_INSN_BASIC_BLOCK_P (i1))
8774
        return;
8775
      if ((NONJUMP_INSN_P (i1) || CALL_P (i1))
8776
          && refers_to_regno_p (regno, regno + nregs, PATTERN (i1), NULL))
8777
        {
8778
          /* If this is USE in front of INSN, we only have to check that
8779
             there are no more references than accounted for by inheritance.  */
8780
          while (NONJUMP_INSN_P (i1) && GET_CODE (PATTERN (i1)) == USE)
8781
            {
8782
              n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
8783
              i1 = NEXT_INSN (i1);
8784
            }
8785
          if (n_occurrences <= n_inherited && i1 == insn)
8786
            break;
8787
          return;
8788
        }
8789
    }
8790
 
8791
  /* We will be deleting the insn.  Remove the spill reg information.  */
8792
  for (k = hard_regno_nregs[last_reload_reg][GET_MODE (reg)]; k-- > 0; )
8793
    {
8794
      spill_reg_store[last_reload_reg + k] = 0;
8795
      spill_reg_stored_to[last_reload_reg + k] = 0;
8796
    }
8797
 
8798
  /* The caller has already checked that REG dies or is set in INSN.
8799
     It has also checked that we are optimizing, and thus some
8800
     inaccuracies in the debugging information are acceptable.
8801
     So we could just delete output_reload_insn.  But in some cases
8802
     we can improve the debugging information without sacrificing
8803
     optimization - maybe even improving the code: See if the pseudo
8804
     reg has been completely replaced with reload regs.  If so, delete
8805
     the store insn and forget we had a stack slot for the pseudo.  */
8806
  if (rld[j].out != rld[j].in
8807
      && REG_N_DEATHS (REGNO (reg)) == 1
8808
      && REG_N_SETS (REGNO (reg)) == 1
8809
      && REG_BASIC_BLOCK (REGNO (reg)) >= NUM_FIXED_BLOCKS
8810
      && find_regno_note (insn, REG_DEAD, REGNO (reg)))
8811
    {
8812
      rtx i2;
8813
 
8814
      /* We know that it was used only between here and the beginning of
8815
         the current basic block.  (We also know that the last use before
8816
         INSN was the output reload we are thinking of deleting, but never
8817
         mind that.)  Search that range; see if any ref remains.  */
8818
      for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8819
        {
8820
          rtx set = single_set (i2);
8821
 
8822
          /* Uses which just store in the pseudo don't count,
8823
             since if they are the only uses, they are dead.  */
8824
          if (set != 0 && SET_DEST (set) == reg)
8825
            continue;
8826
          if (LABEL_P (i2)
8827
              || JUMP_P (i2))
8828
            break;
8829
          if ((NONJUMP_INSN_P (i2) || CALL_P (i2))
8830
              && reg_mentioned_p (reg, PATTERN (i2)))
8831
            {
8832
              /* Some other ref remains; just delete the output reload we
8833
                 know to be dead.  */
8834
              delete_address_reloads (output_reload_insn, insn);
8835
              delete_insn (output_reload_insn);
8836
              return;
8837
            }
8838
        }
8839
 
8840
      /* Delete the now-dead stores into this pseudo.  Note that this
8841
         loop also takes care of deleting output_reload_insn.  */
8842
      for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8843
        {
8844
          rtx set = single_set (i2);
8845
 
8846
          if (set != 0 && SET_DEST (set) == reg)
8847
            {
8848
              delete_address_reloads (i2, insn);
8849
              delete_insn (i2);
8850
            }
8851
          if (LABEL_P (i2)
8852
              || JUMP_P (i2))
8853
            break;
8854
        }
8855
 
8856
      /* For the debugging info, say the pseudo lives in this reload reg.  */
8857
      reg_renumber[REGNO (reg)] = REGNO (new_reload_reg);
8858
      if (ira_conflicts_p)
8859
        /* Inform IRA about the change.  */
8860
        ira_mark_allocation_change (REGNO (reg));
8861
      alter_reg (REGNO (reg), -1, false);
8862
    }
8863
  else
8864
    {
8865
      delete_address_reloads (output_reload_insn, insn);
8866
      delete_insn (output_reload_insn);
8867
    }
8868
}
8869
 
8870
/* We are going to delete DEAD_INSN.  Recursively delete loads of
8871
   reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
8872
   CURRENT_INSN is being reloaded, so we have to check its reloads too.  */
8873
static void
8874
delete_address_reloads (rtx dead_insn, rtx current_insn)
8875
{
8876
  rtx set = single_set (dead_insn);
8877
  rtx set2, dst, prev, next;
8878
  if (set)
8879
    {
8880
      rtx dst = SET_DEST (set);
8881
      if (MEM_P (dst))
8882
        delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
8883
    }
8884
  /* If we deleted the store from a reloaded post_{in,de}c expression,
8885
     we can delete the matching adds.  */
8886
  prev = PREV_INSN (dead_insn);
8887
  next = NEXT_INSN (dead_insn);
8888
  if (! prev || ! next)
8889
    return;
8890
  set = single_set (next);
8891
  set2 = single_set (prev);
8892
  if (! set || ! set2
8893
      || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
8894
      || !CONST_INT_P (XEXP (SET_SRC (set), 1))
8895
      || !CONST_INT_P (XEXP (SET_SRC (set2), 1)))
8896
    return;
8897
  dst = SET_DEST (set);
8898
  if (! rtx_equal_p (dst, SET_DEST (set2))
8899
      || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
8900
      || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
8901
      || (INTVAL (XEXP (SET_SRC (set), 1))
8902
          != -INTVAL (XEXP (SET_SRC (set2), 1))))
8903
    return;
8904
  delete_related_insns (prev);
8905
  delete_related_insns (next);
8906
}
8907
 
8908
/* Subfunction of delete_address_reloads: process registers found in X.  */
8909
static void
8910
delete_address_reloads_1 (rtx dead_insn, rtx x, rtx current_insn)
8911
{
8912
  rtx prev, set, dst, i2;
8913
  int i, j;
8914
  enum rtx_code code = GET_CODE (x);
8915
 
8916
  if (code != REG)
8917
    {
8918
      const char *fmt = GET_RTX_FORMAT (code);
8919
      for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8920
        {
8921
          if (fmt[i] == 'e')
8922
            delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
8923
          else if (fmt[i] == 'E')
8924
            {
8925
              for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8926
                delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
8927
                                          current_insn);
8928
            }
8929
        }
8930
      return;
8931
    }
8932
 
8933
  if (spill_reg_order[REGNO (x)] < 0)
8934
    return;
8935
 
8936
  /* Scan backwards for the insn that sets x.  This might be a way back due
8937
     to inheritance.  */
8938
  for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
8939
    {
8940
      code = GET_CODE (prev);
8941
      if (code == CODE_LABEL || code == JUMP_INSN)
8942
        return;
8943
      if (!INSN_P (prev))
8944
        continue;
8945
      if (reg_set_p (x, PATTERN (prev)))
8946
        break;
8947
      if (reg_referenced_p (x, PATTERN (prev)))
8948
        return;
8949
    }
8950
  if (! prev || INSN_UID (prev) < reload_first_uid)
8951
    return;
8952
  /* Check that PREV only sets the reload register.  */
8953
  set = single_set (prev);
8954
  if (! set)
8955
    return;
8956
  dst = SET_DEST (set);
8957
  if (!REG_P (dst)
8958
      || ! rtx_equal_p (dst, x))
8959
    return;
8960
  if (! reg_set_p (dst, PATTERN (dead_insn)))
8961
    {
8962
      /* Check if DST was used in a later insn -
8963
         it might have been inherited.  */
8964
      for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
8965
        {
8966
          if (LABEL_P (i2))
8967
            break;
8968
          if (! INSN_P (i2))
8969
            continue;
8970
          if (reg_referenced_p (dst, PATTERN (i2)))
8971
            {
8972
              /* If there is a reference to the register in the current insn,
8973
                 it might be loaded in a non-inherited reload.  If no other
8974
                 reload uses it, that means the register is set before
8975
                 referenced.  */
8976
              if (i2 == current_insn)
8977
                {
8978
                  for (j = n_reloads - 1; j >= 0; j--)
8979
                    if ((rld[j].reg_rtx == dst && reload_inherited[j])
8980
                        || reload_override_in[j] == dst)
8981
                      return;
8982
                  for (j = n_reloads - 1; j >= 0; j--)
8983
                    if (rld[j].in && rld[j].reg_rtx == dst)
8984
                      break;
8985
                  if (j >= 0)
8986
                    break;
8987
                }
8988
              return;
8989
            }
8990
          if (JUMP_P (i2))
8991
            break;
8992
          /* If DST is still live at CURRENT_INSN, check if it is used for
8993
             any reload.  Note that even if CURRENT_INSN sets DST, we still
8994
             have to check the reloads.  */
8995
          if (i2 == current_insn)
8996
            {
8997
              for (j = n_reloads - 1; j >= 0; j--)
8998
                if ((rld[j].reg_rtx == dst && reload_inherited[j])
8999
                    || reload_override_in[j] == dst)
9000
                  return;
9001
              /* ??? We can't finish the loop here, because dst might be
9002
                 allocated to a pseudo in this block if no reload in this
9003
                 block needs any of the classes containing DST - see
9004
                 spill_hard_reg.  There is no easy way to tell this, so we
9005
                 have to scan till the end of the basic block.  */
9006
            }
9007
          if (reg_set_p (dst, PATTERN (i2)))
9008
            break;
9009
        }
9010
    }
9011
  delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
9012
  reg_reloaded_contents[REGNO (dst)] = -1;
9013
  delete_insn (prev);
9014
}
9015
 
9016
/* Output reload-insns to reload VALUE into RELOADREG.
9017
   VALUE is an autoincrement or autodecrement RTX whose operand
9018
   is a register or memory location;
9019
   so reloading involves incrementing that location.
9020
   IN is either identical to VALUE, or some cheaper place to reload from.
9021
 
9022
   INC_AMOUNT is the number to increment or decrement by (always positive).
9023
   This cannot be deduced from VALUE.  */
9024
 
9025
static void
9026
inc_for_reload (rtx reloadreg, rtx in, rtx value, int inc_amount)
9027
{
9028
  /* REG or MEM to be copied and incremented.  */
9029
  rtx incloc = find_replacement (&XEXP (value, 0));
9030
  /* Nonzero if increment after copying.  */
9031
  int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC
9032
              || GET_CODE (value) == POST_MODIFY);
9033
  rtx last;
9034
  rtx inc;
9035
  rtx add_insn;
9036
  int code;
9037
  rtx real_in = in == value ? incloc : in;
9038
 
9039
  /* No hard register is equivalent to this register after
9040
     inc/dec operation.  If REG_LAST_RELOAD_REG were nonzero,
9041
     we could inc/dec that register as well (maybe even using it for
9042
     the source), but I'm not sure it's worth worrying about.  */
9043
  if (REG_P (incloc))
9044
    reg_last_reload_reg[REGNO (incloc)] = 0;
9045
 
9046
  if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
9047
    {
9048
      gcc_assert (GET_CODE (XEXP (value, 1)) == PLUS);
9049
      inc = find_replacement (&XEXP (XEXP (value, 1), 1));
9050
    }
9051
  else
9052
    {
9053
      if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
9054
        inc_amount = -inc_amount;
9055
 
9056
      inc = GEN_INT (inc_amount);
9057
    }
9058
 
9059
  /* If this is post-increment, first copy the location to the reload reg.  */
9060
  if (post && real_in != reloadreg)
9061
    emit_insn (gen_move_insn (reloadreg, real_in));
9062
 
9063
  if (in == value)
9064
    {
9065
      /* See if we can directly increment INCLOC.  Use a method similar to
9066
         that in gen_reload.  */
9067
 
9068
      last = get_last_insn ();
9069
      add_insn = emit_insn (gen_rtx_SET (VOIDmode, incloc,
9070
                                         gen_rtx_PLUS (GET_MODE (incloc),
9071
                                                       incloc, inc)));
9072
 
9073
      code = recog_memoized (add_insn);
9074
      if (code >= 0)
9075
        {
9076
          extract_insn (add_insn);
9077
          if (constrain_operands (1))
9078
            {
9079
              /* If this is a pre-increment and we have incremented the value
9080
                 where it lives, copy the incremented value to RELOADREG to
9081
                 be used as an address.  */
9082
 
9083
              if (! post)
9084
                emit_insn (gen_move_insn (reloadreg, incloc));
9085
              return;
9086
            }
9087
        }
9088
      delete_insns_since (last);
9089
    }
9090
 
9091
  /* If couldn't do the increment directly, must increment in RELOADREG.
9092
     The way we do this depends on whether this is pre- or post-increment.
9093
     For pre-increment, copy INCLOC to the reload register, increment it
9094
     there, then save back.  */
9095
 
9096
  if (! post)
9097
    {
9098
      if (in != reloadreg)
9099
        emit_insn (gen_move_insn (reloadreg, real_in));
9100
      emit_insn (gen_add2_insn (reloadreg, inc));
9101
      emit_insn (gen_move_insn (incloc, reloadreg));
9102
    }
9103
  else
9104
    {
9105
      /* Postincrement.
9106
         Because this might be a jump insn or a compare, and because RELOADREG
9107
         may not be available after the insn in an input reload, we must do
9108
         the incrementation before the insn being reloaded for.
9109
 
9110
         We have already copied IN to RELOADREG.  Increment the copy in
9111
         RELOADREG, save that back, then decrement RELOADREG so it has
9112
         the original value.  */
9113
 
9114
      emit_insn (gen_add2_insn (reloadreg, inc));
9115
      emit_insn (gen_move_insn (incloc, reloadreg));
9116
      if (CONST_INT_P (inc))
9117
        emit_insn (gen_add2_insn (reloadreg, GEN_INT (-INTVAL (inc))));
9118
      else
9119
        emit_insn (gen_sub2_insn (reloadreg, inc));
9120
    }
9121
}
9122
 
9123
#ifdef AUTO_INC_DEC
9124
static void
9125
add_auto_inc_notes (rtx insn, rtx x)
9126
{
9127
  enum rtx_code code = GET_CODE (x);
9128
  const char *fmt;
9129
  int i, j;
9130
 
9131
  if (code == MEM && auto_inc_p (XEXP (x, 0)))
9132
    {
9133
      add_reg_note (insn, REG_INC, XEXP (XEXP (x, 0), 0));
9134
      return;
9135
    }
9136
 
9137
  /* Scan all the operand sub-expressions.  */
9138
  fmt = GET_RTX_FORMAT (code);
9139
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9140
    {
9141
      if (fmt[i] == 'e')
9142
        add_auto_inc_notes (insn, XEXP (x, i));
9143
      else if (fmt[i] == 'E')
9144
        for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9145
          add_auto_inc_notes (insn, XVECEXP (x, i, j));
9146
    }
9147
}
9148
#endif

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.