OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-stable/] [gcc-4.5.1/] [gcc/] [postreload.c] - Blame information for rev 859

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 280 jeremybenn
/* Perform simple optimizations to clean up the result of reload.
2
   Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3
   1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4
   2010 Free Software Foundation, Inc.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
 
27
#include "machmode.h"
28
#include "hard-reg-set.h"
29
#include "rtl.h"
30
#include "tm_p.h"
31
#include "obstack.h"
32
#include "insn-config.h"
33
#include "flags.h"
34
#include "function.h"
35
#include "expr.h"
36
#include "optabs.h"
37
#include "regs.h"
38
#include "basic-block.h"
39
#include "reload.h"
40
#include "recog.h"
41
#include "output.h"
42
#include "cselib.h"
43
#include "real.h"
44
#include "toplev.h"
45
#include "except.h"
46
#include "tree.h"
47
#include "timevar.h"
48
#include "tree-pass.h"
49
#include "df.h"
50
#include "dbgcnt.h"
51
 
52
static int reload_cse_noop_set_p (rtx);
53
static void reload_cse_simplify (rtx, rtx);
54
static void reload_cse_regs_1 (rtx);
55
static int reload_cse_simplify_set (rtx, rtx);
56
static int reload_cse_simplify_operands (rtx, rtx);
57
 
58
static void reload_combine (void);
59
static void reload_combine_note_use (rtx *, rtx);
60
static void reload_combine_note_store (rtx, const_rtx, void *);
61
 
62
static void reload_cse_move2add (rtx);
63
static void move2add_note_store (rtx, const_rtx, void *);
64
 
65
/* Call cse / combine like post-reload optimization phases.
66
   FIRST is the first instruction.  */
67
void
68
reload_cse_regs (rtx first ATTRIBUTE_UNUSED)
69
{
70
  reload_cse_regs_1 (first);
71
  reload_combine ();
72
  reload_cse_move2add (first);
73
  if (flag_expensive_optimizations)
74
    reload_cse_regs_1 (first);
75
}
76
 
77
/* See whether a single set SET is a noop.  */
78
static int
79
reload_cse_noop_set_p (rtx set)
80
{
81
  if (cselib_reg_set_mode (SET_DEST (set)) != GET_MODE (SET_DEST (set)))
82
    return 0;
83
 
84
  return rtx_equal_for_cselib_p (SET_DEST (set), SET_SRC (set));
85
}
86
 
87
/* Try to simplify INSN.  */
88
static void
89
reload_cse_simplify (rtx insn, rtx testreg)
90
{
91
  rtx body = PATTERN (insn);
92
 
93
  if (GET_CODE (body) == SET)
94
    {
95
      int count = 0;
96
 
97
      /* Simplify even if we may think it is a no-op.
98
         We may think a memory load of a value smaller than WORD_SIZE
99
         is redundant because we haven't taken into account possible
100
         implicit extension.  reload_cse_simplify_set() will bring
101
         this out, so it's safer to simplify before we delete.  */
102
      count += reload_cse_simplify_set (body, insn);
103
 
104
      if (!count && reload_cse_noop_set_p (body))
105
        {
106
          rtx value = SET_DEST (body);
107
          if (REG_P (value)
108
              && ! REG_FUNCTION_VALUE_P (value))
109
            value = 0;
110
          delete_insn_and_edges (insn);
111
          return;
112
        }
113
 
114
      if (count > 0)
115
        apply_change_group ();
116
      else
117
        reload_cse_simplify_operands (insn, testreg);
118
    }
119
  else if (GET_CODE (body) == PARALLEL)
120
    {
121
      int i;
122
      int count = 0;
123
      rtx value = NULL_RTX;
124
 
125
      /* Registers mentioned in the clobber list for an asm cannot be reused
126
         within the body of the asm.  Invalidate those registers now so that
127
         we don't try to substitute values for them.  */
128
      if (asm_noperands (body) >= 0)
129
        {
130
          for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
131
            {
132
              rtx part = XVECEXP (body, 0, i);
133
              if (GET_CODE (part) == CLOBBER && REG_P (XEXP (part, 0)))
134
                cselib_invalidate_rtx (XEXP (part, 0));
135
            }
136
        }
137
 
138
      /* If every action in a PARALLEL is a noop, we can delete
139
         the entire PARALLEL.  */
140
      for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
141
        {
142
          rtx part = XVECEXP (body, 0, i);
143
          if (GET_CODE (part) == SET)
144
            {
145
              if (! reload_cse_noop_set_p (part))
146
                break;
147
              if (REG_P (SET_DEST (part))
148
                  && REG_FUNCTION_VALUE_P (SET_DEST (part)))
149
                {
150
                  if (value)
151
                    break;
152
                  value = SET_DEST (part);
153
                }
154
            }
155
          else if (GET_CODE (part) != CLOBBER)
156
            break;
157
        }
158
 
159
      if (i < 0)
160
        {
161
          delete_insn_and_edges (insn);
162
          /* We're done with this insn.  */
163
          return;
164
        }
165
 
166
      /* It's not a no-op, but we can try to simplify it.  */
167
      for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
168
        if (GET_CODE (XVECEXP (body, 0, i)) == SET)
169
          count += reload_cse_simplify_set (XVECEXP (body, 0, i), insn);
170
 
171
      if (count > 0)
172
        apply_change_group ();
173
      else
174
        reload_cse_simplify_operands (insn, testreg);
175
    }
176
}
177
 
178
/* Do a very simple CSE pass over the hard registers.
179
 
180
   This function detects no-op moves where we happened to assign two
181
   different pseudo-registers to the same hard register, and then
182
   copied one to the other.  Reload will generate a useless
183
   instruction copying a register to itself.
184
 
185
   This function also detects cases where we load a value from memory
186
   into two different registers, and (if memory is more expensive than
187
   registers) changes it to simply copy the first register into the
188
   second register.
189
 
190
   Another optimization is performed that scans the operands of each
191
   instruction to see whether the value is already available in a
192
   hard register.  It then replaces the operand with the hard register
193
   if possible, much like an optional reload would.  */
194
 
195
static void
196
reload_cse_regs_1 (rtx first)
197
{
198
  rtx insn;
199
  rtx testreg = gen_rtx_REG (VOIDmode, -1);
200
 
201
  cselib_init (CSELIB_RECORD_MEMORY);
202
  init_alias_analysis ();
203
 
204
  for (insn = first; insn; insn = NEXT_INSN (insn))
205
    {
206
      if (INSN_P (insn))
207
        reload_cse_simplify (insn, testreg);
208
 
209
      cselib_process_insn (insn);
210
    }
211
 
212
  /* Clean up.  */
213
  end_alias_analysis ();
214
  cselib_finish ();
215
}
216
 
217
/* Try to simplify a single SET instruction.  SET is the set pattern.
218
   INSN is the instruction it came from.
219
   This function only handles one case: if we set a register to a value
220
   which is not a register, we try to find that value in some other register
221
   and change the set into a register copy.  */
222
 
223
static int
224
reload_cse_simplify_set (rtx set, rtx insn)
225
{
226
  int did_change = 0;
227
  int dreg;
228
  rtx src;
229
  enum reg_class dclass;
230
  int old_cost;
231
  cselib_val *val;
232
  struct elt_loc_list *l;
233
#ifdef LOAD_EXTEND_OP
234
  enum rtx_code extend_op = UNKNOWN;
235
#endif
236
  bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
237
 
238
  dreg = true_regnum (SET_DEST (set));
239
  if (dreg < 0)
240
    return 0;
241
 
242
  src = SET_SRC (set);
243
  if (side_effects_p (src) || true_regnum (src) >= 0)
244
    return 0;
245
 
246
  dclass = REGNO_REG_CLASS (dreg);
247
 
248
#ifdef LOAD_EXTEND_OP
249
  /* When replacing a memory with a register, we need to honor assumptions
250
     that combine made wrt the contents of sign bits.  We'll do this by
251
     generating an extend instruction instead of a reg->reg copy.  Thus
252
     the destination must be a register that we can widen.  */
253
  if (MEM_P (src)
254
      && GET_MODE_BITSIZE (GET_MODE (src)) < BITS_PER_WORD
255
      && (extend_op = LOAD_EXTEND_OP (GET_MODE (src))) != UNKNOWN
256
      && !REG_P (SET_DEST (set)))
257
    return 0;
258
#endif
259
 
260
  val = cselib_lookup (src, GET_MODE (SET_DEST (set)), 0);
261
  if (! val)
262
    return 0;
263
 
264
  /* If memory loads are cheaper than register copies, don't change them.  */
265
  if (MEM_P (src))
266
    old_cost = MEMORY_MOVE_COST (GET_MODE (src), dclass, 1);
267
  else if (REG_P (src))
268
    old_cost = REGISTER_MOVE_COST (GET_MODE (src),
269
                                   REGNO_REG_CLASS (REGNO (src)), dclass);
270
  else
271
    old_cost = rtx_cost (src, SET, speed);
272
 
273
  for (l = val->locs; l; l = l->next)
274
    {
275
      rtx this_rtx = l->loc;
276
      int this_cost;
277
 
278
      if (CONSTANT_P (this_rtx) && ! references_value_p (this_rtx, 0))
279
        {
280
#ifdef LOAD_EXTEND_OP
281
          if (extend_op != UNKNOWN)
282
            {
283
              HOST_WIDE_INT this_val;
284
 
285
              /* ??? I'm lazy and don't wish to handle CONST_DOUBLE.  Other
286
                 constants, such as SYMBOL_REF, cannot be extended.  */
287
              if (!CONST_INT_P (this_rtx))
288
                continue;
289
 
290
              this_val = INTVAL (this_rtx);
291
              switch (extend_op)
292
                {
293
                case ZERO_EXTEND:
294
                  this_val &= GET_MODE_MASK (GET_MODE (src));
295
                  break;
296
                case SIGN_EXTEND:
297
                  /* ??? In theory we're already extended.  */
298
                  if (this_val == trunc_int_for_mode (this_val, GET_MODE (src)))
299
                    break;
300
                default:
301
                  gcc_unreachable ();
302
                }
303
              this_rtx = GEN_INT (this_val);
304
            }
305
#endif
306
          this_cost = rtx_cost (this_rtx, SET, speed);
307
        }
308
      else if (REG_P (this_rtx))
309
        {
310
#ifdef LOAD_EXTEND_OP
311
          if (extend_op != UNKNOWN)
312
            {
313
              this_rtx = gen_rtx_fmt_e (extend_op, word_mode, this_rtx);
314
              this_cost = rtx_cost (this_rtx, SET, speed);
315
            }
316
          else
317
#endif
318
            this_cost = REGISTER_MOVE_COST (GET_MODE (this_rtx),
319
                                            REGNO_REG_CLASS (REGNO (this_rtx)),
320
                                            dclass);
321
        }
322
      else
323
        continue;
324
 
325
      /* If equal costs, prefer registers over anything else.  That
326
         tends to lead to smaller instructions on some machines.  */
327
      if (this_cost < old_cost
328
          || (this_cost == old_cost
329
              && REG_P (this_rtx)
330
              && !REG_P (SET_SRC (set))))
331
        {
332
#ifdef LOAD_EXTEND_OP
333
          if (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set))) < BITS_PER_WORD
334
              && extend_op != UNKNOWN
335
#ifdef CANNOT_CHANGE_MODE_CLASS
336
              && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SET_DEST (set)),
337
                                            word_mode,
338
                                            REGNO_REG_CLASS (REGNO (SET_DEST (set))))
339
#endif
340
              )
341
            {
342
              rtx wide_dest = gen_rtx_REG (word_mode, REGNO (SET_DEST (set)));
343
              ORIGINAL_REGNO (wide_dest) = ORIGINAL_REGNO (SET_DEST (set));
344
              validate_change (insn, &SET_DEST (set), wide_dest, 1);
345
            }
346
#endif
347
 
348
          validate_unshare_change (insn, &SET_SRC (set), this_rtx, 1);
349
          old_cost = this_cost, did_change = 1;
350
        }
351
    }
352
 
353
  return did_change;
354
}
355
 
356
/* Try to replace operands in INSN with equivalent values that are already
357
   in registers.  This can be viewed as optional reloading.
358
 
359
   For each non-register operand in the insn, see if any hard regs are
360
   known to be equivalent to that operand.  Record the alternatives which
361
   can accept these hard registers.  Among all alternatives, select the
362
   ones which are better or equal to the one currently matching, where
363
   "better" is in terms of '?' and '!' constraints.  Among the remaining
364
   alternatives, select the one which replaces most operands with
365
   hard registers.  */
366
 
367
static int
368
reload_cse_simplify_operands (rtx insn, rtx testreg)
369
{
370
  int i, j;
371
 
372
  /* For each operand, all registers that are equivalent to it.  */
373
  HARD_REG_SET equiv_regs[MAX_RECOG_OPERANDS];
374
 
375
  const char *constraints[MAX_RECOG_OPERANDS];
376
 
377
  /* Vector recording how bad an alternative is.  */
378
  int *alternative_reject;
379
  /* Vector recording how many registers can be introduced by choosing
380
     this alternative.  */
381
  int *alternative_nregs;
382
  /* Array of vectors recording, for each operand and each alternative,
383
     which hard register to substitute, or -1 if the operand should be
384
     left as it is.  */
385
  int *op_alt_regno[MAX_RECOG_OPERANDS];
386
  /* Array of alternatives, sorted in order of decreasing desirability.  */
387
  int *alternative_order;
388
 
389
  extract_insn (insn);
390
 
391
  if (recog_data.n_alternatives == 0 || recog_data.n_operands == 0)
392
    return 0;
393
 
394
  /* Figure out which alternative currently matches.  */
395
  if (! constrain_operands (1))
396
    fatal_insn_not_found (insn);
397
 
398
  alternative_reject = XALLOCAVEC (int, recog_data.n_alternatives);
399
  alternative_nregs = XALLOCAVEC (int, recog_data.n_alternatives);
400
  alternative_order = XALLOCAVEC (int, recog_data.n_alternatives);
401
  memset (alternative_reject, 0, recog_data.n_alternatives * sizeof (int));
402
  memset (alternative_nregs, 0, recog_data.n_alternatives * sizeof (int));
403
 
404
  /* For each operand, find out which regs are equivalent.  */
405
  for (i = 0; i < recog_data.n_operands; i++)
406
    {
407
      cselib_val *v;
408
      struct elt_loc_list *l;
409
      rtx op;
410
 
411
      CLEAR_HARD_REG_SET (equiv_regs[i]);
412
 
413
      /* cselib blows up on CODE_LABELs.  Trying to fix that doesn't seem
414
         right, so avoid the problem here.  Likewise if we have a constant
415
         and the insn pattern doesn't tell us the mode we need.  */
416
      if (LABEL_P (recog_data.operand[i])
417
          || (CONSTANT_P (recog_data.operand[i])
418
              && recog_data.operand_mode[i] == VOIDmode))
419
        continue;
420
 
421
      op = recog_data.operand[i];
422
#ifdef LOAD_EXTEND_OP
423
      if (MEM_P (op)
424
          && GET_MODE_BITSIZE (GET_MODE (op)) < BITS_PER_WORD
425
          && LOAD_EXTEND_OP (GET_MODE (op)) != UNKNOWN)
426
        {
427
          rtx set = single_set (insn);
428
 
429
          /* We might have multiple sets, some of which do implicit
430
             extension.  Punt on this for now.  */
431
          if (! set)
432
            continue;
433
          /* If the destination is also a MEM or a STRICT_LOW_PART, no
434
             extension applies.
435
             Also, if there is an explicit extension, we don't have to
436
             worry about an implicit one.  */
437
          else if (MEM_P (SET_DEST (set))
438
                   || GET_CODE (SET_DEST (set)) == STRICT_LOW_PART
439
                   || GET_CODE (SET_SRC (set)) == ZERO_EXTEND
440
                   || GET_CODE (SET_SRC (set)) == SIGN_EXTEND)
441
            ; /* Continue ordinary processing.  */
442
#ifdef CANNOT_CHANGE_MODE_CLASS
443
          /* If the register cannot change mode to word_mode, it follows that
444
             it cannot have been used in word_mode.  */
445
          else if (REG_P (SET_DEST (set))
446
                   && CANNOT_CHANGE_MODE_CLASS (GET_MODE (SET_DEST (set)),
447
                                                word_mode,
448
                                                REGNO_REG_CLASS (REGNO (SET_DEST (set)))))
449
            ; /* Continue ordinary processing.  */
450
#endif
451
          /* If this is a straight load, make the extension explicit.  */
452
          else if (REG_P (SET_DEST (set))
453
                   && recog_data.n_operands == 2
454
                   && SET_SRC (set) == op
455
                   && SET_DEST (set) == recog_data.operand[1-i])
456
            {
457
              validate_change (insn, recog_data.operand_loc[i],
458
                               gen_rtx_fmt_e (LOAD_EXTEND_OP (GET_MODE (op)),
459
                                              word_mode, op),
460
                               1);
461
              validate_change (insn, recog_data.operand_loc[1-i],
462
                               gen_rtx_REG (word_mode, REGNO (SET_DEST (set))),
463
                               1);
464
              if (! apply_change_group ())
465
                return 0;
466
              return reload_cse_simplify_operands (insn, testreg);
467
            }
468
          else
469
            /* ??? There might be arithmetic operations with memory that are
470
               safe to optimize, but is it worth the trouble?  */
471
            continue;
472
        }
473
#endif /* LOAD_EXTEND_OP */
474
      v = cselib_lookup (op, recog_data.operand_mode[i], 0);
475
      if (! v)
476
        continue;
477
 
478
      for (l = v->locs; l; l = l->next)
479
        if (REG_P (l->loc))
480
          SET_HARD_REG_BIT (equiv_regs[i], REGNO (l->loc));
481
    }
482
 
483
  for (i = 0; i < recog_data.n_operands; i++)
484
    {
485
      enum machine_mode mode;
486
      int regno;
487
      const char *p;
488
 
489
      op_alt_regno[i] = XALLOCAVEC (int, recog_data.n_alternatives);
490
      for (j = 0; j < recog_data.n_alternatives; j++)
491
        op_alt_regno[i][j] = -1;
492
 
493
      p = constraints[i] = recog_data.constraints[i];
494
      mode = recog_data.operand_mode[i];
495
 
496
      /* Add the reject values for each alternative given by the constraints
497
         for this operand.  */
498
      j = 0;
499
      while (*p != '\0')
500
        {
501
          char c = *p++;
502
          if (c == ',')
503
            j++;
504
          else if (c == '?')
505
            alternative_reject[j] += 3;
506
          else if (c == '!')
507
            alternative_reject[j] += 300;
508
        }
509
 
510
      /* We won't change operands which are already registers.  We
511
         also don't want to modify output operands.  */
512
      regno = true_regnum (recog_data.operand[i]);
513
      if (regno >= 0
514
          || constraints[i][0] == '='
515
          || constraints[i][0] == '+')
516
        continue;
517
 
518
      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
519
        {
520
          enum reg_class rclass = NO_REGS;
521
 
522
          if (! TEST_HARD_REG_BIT (equiv_regs[i], regno))
523
            continue;
524
 
525
          SET_REGNO (testreg, regno);
526
          PUT_MODE (testreg, mode);
527
 
528
          /* We found a register equal to this operand.  Now look for all
529
             alternatives that can accept this register and have not been
530
             assigned a register they can use yet.  */
531
          j = 0;
532
          p = constraints[i];
533
          for (;;)
534
            {
535
              char c = *p;
536
 
537
              switch (c)
538
                {
539
                case '=':  case '+':  case '?':
540
                case '#':  case '&':  case '!':
541
                case '*':  case '%':
542
                case '0':  case '1':  case '2':  case '3':  case '4':
543
                case '5':  case '6':  case '7':  case '8':  case '9':
544
                case '<':  case '>':  case 'V':  case 'o':
545
                case 'E':  case 'F':  case 'G':  case 'H':
546
                case 's':  case 'i':  case 'n':
547
                case 'I':  case 'J':  case 'K':  case 'L':
548
                case 'M':  case 'N':  case 'O':  case 'P':
549
                case 'p':  case 'X':  case TARGET_MEM_CONSTRAINT:
550
                  /* These don't say anything we care about.  */
551
                  break;
552
 
553
                case 'g': case 'r':
554
                  rclass = reg_class_subunion[(int) rclass][(int) GENERAL_REGS];
555
                  break;
556
 
557
                default:
558
                  rclass
559
                    = (reg_class_subunion
560
                       [(int) rclass]
561
                       [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
562
                  break;
563
 
564
                case ',': case '\0':
565
                  /* See if REGNO fits this alternative, and set it up as the
566
                     replacement register if we don't have one for this
567
                     alternative yet and the operand being replaced is not
568
                     a cheap CONST_INT.  */
569
                  if (op_alt_regno[i][j] == -1
570
                      && reg_fits_class_p (testreg, rclass, 0, mode)
571
                      && (!CONST_INT_P (recog_data.operand[i])
572
                          || (rtx_cost (recog_data.operand[i], SET,
573
                                        optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)))
574
                              > rtx_cost (testreg, SET,
575
                                        optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn))))))
576
                    {
577
                      alternative_nregs[j]++;
578
                      op_alt_regno[i][j] = regno;
579
                    }
580
                  j++;
581
                  rclass = NO_REGS;
582
                  break;
583
                }
584
              p += CONSTRAINT_LEN (c, p);
585
 
586
              if (c == '\0')
587
                break;
588
            }
589
        }
590
    }
591
 
592
  /* Record all alternatives which are better or equal to the currently
593
     matching one in the alternative_order array.  */
594
  for (i = j = 0; i < recog_data.n_alternatives; i++)
595
    if (alternative_reject[i] <= alternative_reject[which_alternative])
596
      alternative_order[j++] = i;
597
  recog_data.n_alternatives = j;
598
 
599
  /* Sort it.  Given a small number of alternatives, a dumb algorithm
600
     won't hurt too much.  */
601
  for (i = 0; i < recog_data.n_alternatives - 1; i++)
602
    {
603
      int best = i;
604
      int best_reject = alternative_reject[alternative_order[i]];
605
      int best_nregs = alternative_nregs[alternative_order[i]];
606
      int tmp;
607
 
608
      for (j = i + 1; j < recog_data.n_alternatives; j++)
609
        {
610
          int this_reject = alternative_reject[alternative_order[j]];
611
          int this_nregs = alternative_nregs[alternative_order[j]];
612
 
613
          if (this_reject < best_reject
614
              || (this_reject == best_reject && this_nregs > best_nregs))
615
            {
616
              best = j;
617
              best_reject = this_reject;
618
              best_nregs = this_nregs;
619
            }
620
        }
621
 
622
      tmp = alternative_order[best];
623
      alternative_order[best] = alternative_order[i];
624
      alternative_order[i] = tmp;
625
    }
626
 
627
  /* Substitute the operands as determined by op_alt_regno for the best
628
     alternative.  */
629
  j = alternative_order[0];
630
 
631
  for (i = 0; i < recog_data.n_operands; i++)
632
    {
633
      enum machine_mode mode = recog_data.operand_mode[i];
634
      if (op_alt_regno[i][j] == -1)
635
        continue;
636
 
637
      validate_change (insn, recog_data.operand_loc[i],
638
                       gen_rtx_REG (mode, op_alt_regno[i][j]), 1);
639
    }
640
 
641
  for (i = recog_data.n_dups - 1; i >= 0; i--)
642
    {
643
      int op = recog_data.dup_num[i];
644
      enum machine_mode mode = recog_data.operand_mode[op];
645
 
646
      if (op_alt_regno[op][j] == -1)
647
        continue;
648
 
649
      validate_change (insn, recog_data.dup_loc[i],
650
                       gen_rtx_REG (mode, op_alt_regno[op][j]), 1);
651
    }
652
 
653
  return apply_change_group ();
654
}
655
 
656
/* If reload couldn't use reg+reg+offset addressing, try to use reg+reg
657
   addressing now.
658
   This code might also be useful when reload gave up on reg+reg addressing
659
   because of clashes between the return register and INDEX_REG_CLASS.  */
660
 
661
/* The maximum number of uses of a register we can keep track of to
662
   replace them with reg+reg addressing.  */
663
#define RELOAD_COMBINE_MAX_USES 6
664
 
665
/* INSN is the insn where a register has been used, and USEP points to the
666
   location of the register within the rtl.  */
667
struct reg_use { rtx insn, *usep; };
668
 
669
/* If the register is used in some unknown fashion, USE_INDEX is negative.
670
   If it is dead, USE_INDEX is RELOAD_COMBINE_MAX_USES, and STORE_RUID
671
   indicates where it becomes live again.
672
   Otherwise, USE_INDEX is the index of the last encountered use of the
673
   register (which is first among these we have seen since we scan backwards),
674
   OFFSET contains the constant offset that is added to the register in
675
   all encountered uses, and USE_RUID indicates the first encountered, i.e.
676
   last, of these uses.
677
   STORE_RUID is always meaningful if we only want to use a value in a
678
   register in a different place: it denotes the next insn in the insn
679
   stream (i.e. the last encountered) that sets or clobbers the register.  */
680
static struct
681
  {
682
    struct reg_use reg_use[RELOAD_COMBINE_MAX_USES];
683
    int use_index;
684
    rtx offset;
685
    int store_ruid;
686
    int use_ruid;
687
  } reg_state[FIRST_PSEUDO_REGISTER];
688
 
689
/* Reverse linear uid.  This is increased in reload_combine while scanning
690
   the instructions from last to first.  It is used to set last_label_ruid
691
   and the store_ruid / use_ruid fields in reg_state.  */
692
static int reload_combine_ruid;
693
 
694
#define LABEL_LIVE(LABEL) \
695
  (label_live[CODE_LABEL_NUMBER (LABEL) - min_labelno])
696
 
697
static void
698
reload_combine (void)
699
{
700
  rtx insn, set;
701
  int first_index_reg = -1;
702
  int last_index_reg = 0;
703
  int i;
704
  basic_block bb;
705
  unsigned int r;
706
  int last_label_ruid;
707
  int min_labelno, n_labels;
708
  HARD_REG_SET ever_live_at_start, *label_live;
709
 
710
  /* If reg+reg can be used in offsetable memory addresses, the main chunk of
711
     reload has already used it where appropriate, so there is no use in
712
     trying to generate it now.  */
713
  if (double_reg_address_ok && INDEX_REG_CLASS != NO_REGS)
714
    return;
715
 
716
  /* To avoid wasting too much time later searching for an index register,
717
     determine the minimum and maximum index register numbers.  */
718
  for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
719
    if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], r))
720
      {
721
        if (first_index_reg == -1)
722
          first_index_reg = r;
723
 
724
        last_index_reg = r;
725
      }
726
 
727
  /* If no index register is available, we can quit now.  */
728
  if (first_index_reg == -1)
729
    return;
730
 
731
  /* Set up LABEL_LIVE and EVER_LIVE_AT_START.  The register lifetime
732
     information is a bit fuzzy immediately after reload, but it's
733
     still good enough to determine which registers are live at a jump
734
     destination.  */
735
  min_labelno = get_first_label_num ();
736
  n_labels = max_label_num () - min_labelno;
737
  label_live = XNEWVEC (HARD_REG_SET, n_labels);
738
  CLEAR_HARD_REG_SET (ever_live_at_start);
739
 
740
  FOR_EACH_BB_REVERSE (bb)
741
    {
742
      insn = BB_HEAD (bb);
743
      if (LABEL_P (insn))
744
        {
745
          HARD_REG_SET live;
746
          bitmap live_in = df_get_live_in (bb);
747
 
748
          REG_SET_TO_HARD_REG_SET (live, live_in);
749
          compute_use_by_pseudos (&live, live_in);
750
          COPY_HARD_REG_SET (LABEL_LIVE (insn), live);
751
          IOR_HARD_REG_SET (ever_live_at_start, live);
752
        }
753
    }
754
 
755
  /* Initialize last_label_ruid, reload_combine_ruid and reg_state.  */
756
  last_label_ruid = reload_combine_ruid = 0;
757
  for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
758
    {
759
      reg_state[r].store_ruid = reload_combine_ruid;
760
      if (fixed_regs[r])
761
        reg_state[r].use_index = -1;
762
      else
763
        reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
764
    }
765
 
766
  for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
767
    {
768
      rtx note;
769
 
770
      /* We cannot do our optimization across labels.  Invalidating all the use
771
         information we have would be costly, so we just note where the label
772
         is and then later disable any optimization that would cross it.  */
773
      if (LABEL_P (insn))
774
        last_label_ruid = reload_combine_ruid;
775
      else if (BARRIER_P (insn))
776
        for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
777
          if (! fixed_regs[r])
778
              reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
779
 
780
      if (! INSN_P (insn))
781
        continue;
782
 
783
      reload_combine_ruid++;
784
 
785
      /* Look for (set (REGX) (CONST_INT))
786
         (set (REGX) (PLUS (REGX) (REGY)))
787
         ...
788
         ... (MEM (REGX)) ...
789
         and convert it to
790
         (set (REGZ) (CONST_INT))
791
         ...
792
         ... (MEM (PLUS (REGZ) (REGY)))... .
793
 
794
         First, check that we have (set (REGX) (PLUS (REGX) (REGY)))
795
         and that we know all uses of REGX before it dies.
796
         Also, explicitly check that REGX != REGY; our life information
797
         does not yet show whether REGY changes in this insn.  */
798
      set = single_set (insn);
799
      if (set != NULL_RTX
800
          && REG_P (SET_DEST (set))
801
          && (hard_regno_nregs[REGNO (SET_DEST (set))]
802
                              [GET_MODE (SET_DEST (set))]
803
              == 1)
804
          && GET_CODE (SET_SRC (set)) == PLUS
805
          && REG_P (XEXP (SET_SRC (set), 1))
806
          && rtx_equal_p (XEXP (SET_SRC (set), 0), SET_DEST (set))
807
          && !rtx_equal_p (XEXP (SET_SRC (set), 1), SET_DEST (set))
808
          && last_label_ruid < reg_state[REGNO (SET_DEST (set))].use_ruid)
809
        {
810
          rtx reg = SET_DEST (set);
811
          rtx plus = SET_SRC (set);
812
          rtx base = XEXP (plus, 1);
813 378 julius
          rtx prev = prev_nonnote_nondebug_insn (insn);
814 280 jeremybenn
          rtx prev_set = prev ? single_set (prev) : NULL_RTX;
815
          unsigned int regno = REGNO (reg);
816
          rtx index_reg = NULL_RTX;
817
          rtx reg_sum = NULL_RTX;
818
 
819
          /* Now we need to set INDEX_REG to an index register (denoted as
820
             REGZ in the illustration above) and REG_SUM to the expression
821
             register+register that we want to use to substitute uses of REG
822
             (typically in MEMs) with.  First check REG and BASE for being
823
             index registers; we can use them even if they are not dead.  */
824
          if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], regno)
825
              || TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS],
826
                                    REGNO (base)))
827
            {
828
              index_reg = reg;
829
              reg_sum = plus;
830
            }
831
          else
832
            {
833
              /* Otherwise, look for a free index register.  Since we have
834
                 checked above that neither REG nor BASE are index registers,
835
                 if we find anything at all, it will be different from these
836
                 two registers.  */
837
              for (i = first_index_reg; i <= last_index_reg; i++)
838
                {
839
                  if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS],
840
                                         i)
841
                      && reg_state[i].use_index == RELOAD_COMBINE_MAX_USES
842
                      && reg_state[i].store_ruid <= reg_state[regno].use_ruid
843
                      && hard_regno_nregs[i][GET_MODE (reg)] == 1)
844
                    {
845
                      index_reg = gen_rtx_REG (GET_MODE (reg), i);
846
                      reg_sum = gen_rtx_PLUS (GET_MODE (reg), index_reg, base);
847
                      break;
848
                    }
849
                }
850
            }
851
 
852
          /* Check that PREV_SET is indeed (set (REGX) (CONST_INT)) and that
853
             (REGY), i.e. BASE, is not clobbered before the last use we'll
854
             create.  */
855
          if (reg_sum
856
              && prev_set
857
              && CONST_INT_P (SET_SRC (prev_set))
858
              && rtx_equal_p (SET_DEST (prev_set), reg)
859
              && reg_state[regno].use_index >= 0
860
              && (reg_state[REGNO (base)].store_ruid
861
                  <= reg_state[regno].use_ruid))
862
            {
863
              int i;
864
 
865
              /* Change destination register and, if necessary, the constant
866
                 value in PREV, the constant loading instruction.  */
867
              validate_change (prev, &SET_DEST (prev_set), index_reg, 1);
868
              if (reg_state[regno].offset != const0_rtx)
869
                validate_change (prev,
870
                                 &SET_SRC (prev_set),
871
                                 GEN_INT (INTVAL (SET_SRC (prev_set))
872
                                          + INTVAL (reg_state[regno].offset)),
873
                                 1);
874
 
875
              /* Now for every use of REG that we have recorded, replace REG
876
                 with REG_SUM.  */
877
              for (i = reg_state[regno].use_index;
878
                   i < RELOAD_COMBINE_MAX_USES; i++)
879
                validate_unshare_change (reg_state[regno].reg_use[i].insn,
880
                                         reg_state[regno].reg_use[i].usep,
881
                                         /* Each change must have its own
882
                                            replacement.  */
883
                                         reg_sum, 1);
884
 
885
              if (apply_change_group ())
886
                {
887
                  /* For every new use of REG_SUM, we have to record the use
888
                     of BASE therein, i.e. operand 1.  */
889
                  for (i = reg_state[regno].use_index;
890
                       i < RELOAD_COMBINE_MAX_USES; i++)
891
                    reload_combine_note_use
892
                      (&XEXP (*reg_state[regno].reg_use[i].usep, 1),
893
                       reg_state[regno].reg_use[i].insn);
894
 
895
                  if (reg_state[REGNO (base)].use_ruid
896
                      > reg_state[regno].use_ruid)
897
                    reg_state[REGNO (base)].use_ruid
898
                      = reg_state[regno].use_ruid;
899
 
900
                  /* Delete the reg-reg addition.  */
901
                  delete_insn (insn);
902
 
903
                  if (reg_state[regno].offset != const0_rtx)
904
                    /* Previous REG_EQUIV / REG_EQUAL notes for PREV
905
                       are now invalid.  */
906
                    remove_reg_equal_equiv_notes (prev);
907
 
908
                  reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES;
909
                  reg_state[REGNO (index_reg)].store_ruid
910
                    = reload_combine_ruid;
911
                  continue;
912
                }
913
            }
914
        }
915
 
916
      note_stores (PATTERN (insn), reload_combine_note_store, NULL);
917
 
918
      if (CALL_P (insn))
919
        {
920
          rtx link;
921
 
922
          for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
923
            if (call_used_regs[r])
924
              {
925
                reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
926
                reg_state[r].store_ruid = reload_combine_ruid;
927
              }
928
 
929
          for (link = CALL_INSN_FUNCTION_USAGE (insn); link;
930
               link = XEXP (link, 1))
931
            {
932
              rtx usage_rtx = XEXP (XEXP (link, 0), 0);
933
              if (REG_P (usage_rtx))
934
                {
935
                  unsigned int i;
936
                  unsigned int start_reg = REGNO (usage_rtx);
937
                  unsigned int num_regs =
938
                        hard_regno_nregs[start_reg][GET_MODE (usage_rtx)];
939
                  unsigned int end_reg  = start_reg + num_regs - 1;
940
                  for (i = start_reg; i <= end_reg; i++)
941
                    if (GET_CODE (XEXP (link, 0)) == CLOBBER)
942
                      {
943
                        reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
944
                        reg_state[i].store_ruid = reload_combine_ruid;
945
                      }
946
                    else
947
                      reg_state[i].use_index = -1;
948
                 }
949
             }
950
 
951
        }
952
      else if (JUMP_P (insn)
953
               && GET_CODE (PATTERN (insn)) != RETURN)
954
        {
955
          /* Non-spill registers might be used at the call destination in
956
             some unknown fashion, so we have to mark the unknown use.  */
957
          HARD_REG_SET *live;
958
 
959
          if ((condjump_p (insn) || condjump_in_parallel_p (insn))
960
              && JUMP_LABEL (insn))
961
            live = &LABEL_LIVE (JUMP_LABEL (insn));
962
          else
963
            live = &ever_live_at_start;
964
 
965
          for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
966
            if (TEST_HARD_REG_BIT (*live, i))
967
              reg_state[i].use_index = -1;
968
        }
969
 
970
      reload_combine_note_use (&PATTERN (insn), insn);
971
      for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
972
        {
973
          if (REG_NOTE_KIND (note) == REG_INC
974
              && REG_P (XEXP (note, 0)))
975
            {
976
              int regno = REGNO (XEXP (note, 0));
977
 
978
              reg_state[regno].store_ruid = reload_combine_ruid;
979
              reg_state[regno].use_index = -1;
980
            }
981
        }
982
    }
983
 
984
  free (label_live);
985
}
986
 
987
/* Check if DST is a register or a subreg of a register; if it is,
988
   update reg_state[regno].store_ruid and reg_state[regno].use_index
989
   accordingly.  Called via note_stores from reload_combine.  */
990
 
991
static void
992
reload_combine_note_store (rtx dst, const_rtx set, void *data ATTRIBUTE_UNUSED)
993
{
994
  int regno = 0;
995
  int i;
996
  enum machine_mode mode = GET_MODE (dst);
997
 
998
  if (GET_CODE (dst) == SUBREG)
999
    {
1000
      regno = subreg_regno_offset (REGNO (SUBREG_REG (dst)),
1001
                                   GET_MODE (SUBREG_REG (dst)),
1002
                                   SUBREG_BYTE (dst),
1003
                                   GET_MODE (dst));
1004
      dst = SUBREG_REG (dst);
1005
    }
1006
  if (!REG_P (dst))
1007
    return;
1008
  regno += REGNO (dst);
1009
 
1010
  /* note_stores might have stripped a STRICT_LOW_PART, so we have to be
1011
     careful with registers / register parts that are not full words.
1012
     Similarly for ZERO_EXTRACT.  */
1013
  if (GET_CODE (set) != SET
1014
      || GET_CODE (SET_DEST (set)) == ZERO_EXTRACT
1015
      || GET_CODE (SET_DEST (set)) == STRICT_LOW_PART)
1016
    {
1017
      for (i = hard_regno_nregs[regno][mode] - 1 + regno; i >= regno; i--)
1018
        {
1019
          reg_state[i].use_index = -1;
1020
          reg_state[i].store_ruid = reload_combine_ruid;
1021
        }
1022
    }
1023
  else
1024
    {
1025
      for (i = hard_regno_nregs[regno][mode] - 1 + regno; i >= regno; i--)
1026
        {
1027
          reg_state[i].store_ruid = reload_combine_ruid;
1028
          reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
1029
        }
1030
    }
1031
}
1032
 
1033
/* XP points to a piece of rtl that has to be checked for any uses of
1034
   registers.
1035
   *XP is the pattern of INSN, or a part of it.
1036
   Called from reload_combine, and recursively by itself.  */
1037
static void
1038
reload_combine_note_use (rtx *xp, rtx insn)
1039
{
1040
  rtx x = *xp;
1041
  enum rtx_code code = x->code;
1042
  const char *fmt;
1043
  int i, j;
1044
  rtx offset = const0_rtx; /* For the REG case below.  */
1045
 
1046
  switch (code)
1047
    {
1048
    case SET:
1049
      if (REG_P (SET_DEST (x)))
1050
        {
1051
          reload_combine_note_use (&SET_SRC (x), insn);
1052
          return;
1053
        }
1054
      break;
1055
 
1056
    case USE:
1057
      /* If this is the USE of a return value, we can't change it.  */
1058
      if (REG_P (XEXP (x, 0)) && REG_FUNCTION_VALUE_P (XEXP (x, 0)))
1059
        {
1060
        /* Mark the return register as used in an unknown fashion.  */
1061
          rtx reg = XEXP (x, 0);
1062
          int regno = REGNO (reg);
1063
          int nregs = hard_regno_nregs[regno][GET_MODE (reg)];
1064
 
1065
          while (--nregs >= 0)
1066
            reg_state[regno + nregs].use_index = -1;
1067
          return;
1068
        }
1069
      break;
1070
 
1071
    case CLOBBER:
1072
      if (REG_P (SET_DEST (x)))
1073
        {
1074
          /* No spurious CLOBBERs of pseudo registers may remain.  */
1075
          gcc_assert (REGNO (SET_DEST (x)) < FIRST_PSEUDO_REGISTER);
1076
          return;
1077
        }
1078
      break;
1079
 
1080
    case PLUS:
1081
      /* We are interested in (plus (reg) (const_int)) .  */
1082
      if (!REG_P (XEXP (x, 0))
1083
          || !CONST_INT_P (XEXP (x, 1)))
1084
        break;
1085
      offset = XEXP (x, 1);
1086
      x = XEXP (x, 0);
1087
      /* Fall through.  */
1088
    case REG:
1089
      {
1090
        int regno = REGNO (x);
1091
        int use_index;
1092
        int nregs;
1093
 
1094
        /* No spurious USEs of pseudo registers may remain.  */
1095
        gcc_assert (regno < FIRST_PSEUDO_REGISTER);
1096
 
1097
        nregs = hard_regno_nregs[regno][GET_MODE (x)];
1098
 
1099
        /* We can't substitute into multi-hard-reg uses.  */
1100
        if (nregs > 1)
1101
          {
1102
            while (--nregs >= 0)
1103
              reg_state[regno + nregs].use_index = -1;
1104
            return;
1105
          }
1106
 
1107
        /* If this register is already used in some unknown fashion, we
1108
           can't do anything.
1109
           If we decrement the index from zero to -1, we can't store more
1110
           uses, so this register becomes used in an unknown fashion.  */
1111
        use_index = --reg_state[regno].use_index;
1112
        if (use_index < 0)
1113
          return;
1114
 
1115
        if (use_index != RELOAD_COMBINE_MAX_USES - 1)
1116
          {
1117
            /* We have found another use for a register that is already
1118
               used later.  Check if the offsets match; if not, mark the
1119
               register as used in an unknown fashion.  */
1120
            if (! rtx_equal_p (offset, reg_state[regno].offset))
1121
              {
1122
                reg_state[regno].use_index = -1;
1123
                return;
1124
              }
1125
          }
1126
        else
1127
          {
1128
            /* This is the first use of this register we have seen since we
1129
               marked it as dead.  */
1130
            reg_state[regno].offset = offset;
1131
            reg_state[regno].use_ruid = reload_combine_ruid;
1132
          }
1133
        reg_state[regno].reg_use[use_index].insn = insn;
1134
        reg_state[regno].reg_use[use_index].usep = xp;
1135
        return;
1136
      }
1137
 
1138
    default:
1139
      break;
1140
    }
1141
 
1142
  /* Recursively process the components of X.  */
1143
  fmt = GET_RTX_FORMAT (code);
1144
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1145
    {
1146
      if (fmt[i] == 'e')
1147
        reload_combine_note_use (&XEXP (x, i), insn);
1148
      else if (fmt[i] == 'E')
1149
        {
1150
          for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1151
            reload_combine_note_use (&XVECEXP (x, i, j), insn);
1152
        }
1153
    }
1154
}
1155
 
1156
/* See if we can reduce the cost of a constant by replacing a move
1157
   with an add.  We track situations in which a register is set to a
1158
   constant or to a register plus a constant.  */
1159
/* We cannot do our optimization across labels.  Invalidating all the
1160
   information about register contents we have would be costly, so we
1161
   use move2add_last_label_luid to note where the label is and then
1162
   later disable any optimization that would cross it.
1163
   reg_offset[n] / reg_base_reg[n] / reg_mode[n] are only valid if
1164
   reg_set_luid[n] is greater than move2add_last_label_luid.  */
1165
static int reg_set_luid[FIRST_PSEUDO_REGISTER];
1166
 
1167
/* If reg_base_reg[n] is negative, register n has been set to
1168
   reg_offset[n] in mode reg_mode[n] .
1169
   If reg_base_reg[n] is non-negative, register n has been set to the
1170
   sum of reg_offset[n] and the value of register reg_base_reg[n]
1171
   before reg_set_luid[n], calculated in mode reg_mode[n] .  */
1172
static HOST_WIDE_INT reg_offset[FIRST_PSEUDO_REGISTER];
1173
static int reg_base_reg[FIRST_PSEUDO_REGISTER];
1174
static enum machine_mode reg_mode[FIRST_PSEUDO_REGISTER];
1175
 
1176
/* move2add_luid is linearly increased while scanning the instructions
1177
   from first to last.  It is used to set reg_set_luid in
1178
   reload_cse_move2add and move2add_note_store.  */
1179
static int move2add_luid;
1180
 
1181
/* move2add_last_label_luid is set whenever a label is found.  Labels
1182
   invalidate all previously collected reg_offset data.  */
1183
static int move2add_last_label_luid;
1184
 
1185
/* ??? We don't know how zero / sign extension is handled, hence we
1186
   can't go from a narrower to a wider mode.  */
1187
#define MODES_OK_FOR_MOVE2ADD(OUTMODE, INMODE) \
1188
  (GET_MODE_SIZE (OUTMODE) == GET_MODE_SIZE (INMODE) \
1189
   || (GET_MODE_SIZE (OUTMODE) <= GET_MODE_SIZE (INMODE) \
1190
       && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (OUTMODE), \
1191
                                 GET_MODE_BITSIZE (INMODE))))
1192
 
1193
static void
1194
reload_cse_move2add (rtx first)
1195
{
1196
  int i;
1197
  rtx insn;
1198
 
1199
  for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1200
    reg_set_luid[i] = 0;
1201
 
1202
  move2add_last_label_luid = 0;
1203
  move2add_luid = 2;
1204
  for (insn = first; insn; insn = NEXT_INSN (insn), move2add_luid++)
1205
    {
1206
      rtx pat, note;
1207
 
1208
      if (LABEL_P (insn))
1209
        {
1210
          move2add_last_label_luid = move2add_luid;
1211
          /* We're going to increment move2add_luid twice after a
1212
             label, so that we can use move2add_last_label_luid + 1 as
1213
             the luid for constants.  */
1214
          move2add_luid++;
1215
          continue;
1216
        }
1217
      if (! INSN_P (insn))
1218
        continue;
1219
      pat = PATTERN (insn);
1220
      /* For simplicity, we only perform this optimization on
1221
         straightforward SETs.  */
1222
      if (GET_CODE (pat) == SET
1223
          && REG_P (SET_DEST (pat)))
1224
        {
1225
          rtx reg = SET_DEST (pat);
1226
          int regno = REGNO (reg);
1227
          rtx src = SET_SRC (pat);
1228
 
1229
          /* Check if we have valid information on the contents of this
1230
             register in the mode of REG.  */
1231
          if (reg_set_luid[regno] > move2add_last_label_luid
1232
              && MODES_OK_FOR_MOVE2ADD (GET_MODE (reg), reg_mode[regno])
1233
              && dbg_cnt (cse2_move2add))
1234
            {
1235
              /* Try to transform (set (REGX) (CONST_INT A))
1236
                                  ...
1237
                                  (set (REGX) (CONST_INT B))
1238
                 to
1239
                                  (set (REGX) (CONST_INT A))
1240
                                  ...
1241
                                  (set (REGX) (plus (REGX) (CONST_INT B-A)))
1242
                 or
1243
                                  (set (REGX) (CONST_INT A))
1244
                                  ...
1245
                                  (set (STRICT_LOW_PART (REGX)) (CONST_INT B))
1246
              */
1247
 
1248
              if (CONST_INT_P (src) && reg_base_reg[regno] < 0)
1249
                {
1250
                  rtx new_src = gen_int_mode (INTVAL (src) - reg_offset[regno],
1251
                                              GET_MODE (reg));
1252
                  bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
1253
 
1254
                  /* (set (reg) (plus (reg) (const_int 0))) is not canonical;
1255
                     use (set (reg) (reg)) instead.
1256
                     We don't delete this insn, nor do we convert it into a
1257
                     note, to avoid losing register notes or the return
1258
                     value flag.  jump2 already knows how to get rid of
1259
                     no-op moves.  */
1260
                  if (new_src == const0_rtx)
1261
                    {
1262
                      /* If the constants are different, this is a
1263
                         truncation, that, if turned into (set (reg)
1264
                         (reg)), would be discarded.  Maybe we should
1265
                         try a truncMN pattern?  */
1266
                      if (INTVAL (src) == reg_offset [regno])
1267
                        validate_change (insn, &SET_SRC (pat), reg, 0);
1268
                    }
1269
                  else if (rtx_cost (new_src, PLUS, speed) < rtx_cost (src, SET, speed)
1270
                           && have_add2_insn (reg, new_src))
1271
                    {
1272
                      rtx tem = gen_rtx_PLUS (GET_MODE (reg), reg, new_src);
1273
                      validate_change (insn, &SET_SRC (pat), tem, 0);
1274
                    }
1275
                  else if (GET_MODE (reg) != BImode)
1276
                    {
1277
                      enum machine_mode narrow_mode;
1278
                      for (narrow_mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1279
                           narrow_mode != VOIDmode
1280
                           && narrow_mode != GET_MODE (reg);
1281
                           narrow_mode = GET_MODE_WIDER_MODE (narrow_mode))
1282
                        {
1283
                          if (have_insn_for (STRICT_LOW_PART, narrow_mode)
1284
                              && ((reg_offset[regno]
1285
                                   & ~GET_MODE_MASK (narrow_mode))
1286
                                  == (INTVAL (src)
1287
                                      & ~GET_MODE_MASK (narrow_mode))))
1288
                            {
1289
                              rtx narrow_reg = gen_rtx_REG (narrow_mode,
1290
                                                            REGNO (reg));
1291
                              rtx narrow_src = gen_int_mode (INTVAL (src),
1292
                                                             narrow_mode);
1293
                              rtx new_set =
1294
                                gen_rtx_SET (VOIDmode,
1295
                                             gen_rtx_STRICT_LOW_PART (VOIDmode,
1296
                                                                      narrow_reg),
1297
                                             narrow_src);
1298
                              if (validate_change (insn, &PATTERN (insn),
1299
                                                   new_set, 0))
1300
                                break;
1301
                            }
1302
                        }
1303
                    }
1304
                  reg_set_luid[regno] = move2add_luid;
1305
                  reg_mode[regno] = GET_MODE (reg);
1306
                  reg_offset[regno] = INTVAL (src);
1307
                  continue;
1308
                }
1309
 
1310
              /* Try to transform (set (REGX) (REGY))
1311
                                  (set (REGX) (PLUS (REGX) (CONST_INT A)))
1312
                                  ...
1313
                                  (set (REGX) (REGY))
1314
                                  (set (REGX) (PLUS (REGX) (CONST_INT B)))
1315
                 to
1316
                                  (set (REGX) (REGY))
1317
                                  (set (REGX) (PLUS (REGX) (CONST_INT A)))
1318
                                  ...
1319
                                  (set (REGX) (plus (REGX) (CONST_INT B-A)))  */
1320
              else if (REG_P (src)
1321
                       && reg_set_luid[regno] == reg_set_luid[REGNO (src)]
1322
                       && reg_base_reg[regno] == reg_base_reg[REGNO (src)]
1323
                       && MODES_OK_FOR_MOVE2ADD (GET_MODE (reg),
1324
                                                 reg_mode[REGNO (src)]))
1325
                {
1326 378 julius
                  rtx next = next_nonnote_nondebug_insn (insn);
1327 280 jeremybenn
                  rtx set = NULL_RTX;
1328
                  if (next)
1329
                    set = single_set (next);
1330
                  if (set
1331
                      && SET_DEST (set) == reg
1332
                      && GET_CODE (SET_SRC (set)) == PLUS
1333
                      && XEXP (SET_SRC (set), 0) == reg
1334
                      && CONST_INT_P (XEXP (SET_SRC (set), 1)))
1335
                    {
1336
                      rtx src3 = XEXP (SET_SRC (set), 1);
1337
                      HOST_WIDE_INT added_offset = INTVAL (src3);
1338
                      HOST_WIDE_INT base_offset = reg_offset[REGNO (src)];
1339
                      HOST_WIDE_INT regno_offset = reg_offset[regno];
1340
                      rtx new_src =
1341
                        gen_int_mode (added_offset
1342
                                      + base_offset
1343
                                      - regno_offset,
1344
                                      GET_MODE (reg));
1345
                      bool success = false;
1346
                      bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
1347
 
1348
                      if (new_src == const0_rtx)
1349
                        /* See above why we create (set (reg) (reg)) here.  */
1350
                        success
1351
                          = validate_change (next, &SET_SRC (set), reg, 0);
1352
                      else if ((rtx_cost (new_src, PLUS, speed)
1353
                                < COSTS_N_INSNS (1) + rtx_cost (src3, SET, speed))
1354
                               && have_add2_insn (reg, new_src))
1355
                        {
1356
                          rtx newpat = gen_rtx_SET (VOIDmode,
1357
                                                    reg,
1358
                                                    gen_rtx_PLUS (GET_MODE (reg),
1359
                                                                  reg,
1360
                                                                  new_src));
1361
                          success
1362
                            = validate_change (next, &PATTERN (next),
1363
                                               newpat, 0);
1364
                        }
1365
                      if (success)
1366
                        delete_insn (insn);
1367
                      insn = next;
1368
                      reg_mode[regno] = GET_MODE (reg);
1369
                      reg_offset[regno] =
1370
                        trunc_int_for_mode (added_offset + base_offset,
1371
                                            GET_MODE (reg));
1372
                      continue;
1373
                    }
1374
                }
1375
            }
1376
        }
1377
 
1378
      for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1379
        {
1380
          if (REG_NOTE_KIND (note) == REG_INC
1381
              && REG_P (XEXP (note, 0)))
1382
            {
1383
              /* Reset the information about this register.  */
1384
              int regno = REGNO (XEXP (note, 0));
1385
              if (regno < FIRST_PSEUDO_REGISTER)
1386
                reg_set_luid[regno] = 0;
1387
            }
1388
        }
1389
      note_stores (PATTERN (insn), move2add_note_store, NULL);
1390
 
1391
      /* If INSN is a conditional branch, we try to extract an
1392
         implicit set out of it.  */
1393
      if (any_condjump_p (insn))
1394
        {
1395
          rtx cnd = fis_get_condition (insn);
1396
 
1397
          if (cnd != NULL_RTX
1398
              && GET_CODE (cnd) == NE
1399
              && REG_P (XEXP (cnd, 0))
1400
              && !reg_set_p (XEXP (cnd, 0), insn)
1401
              /* The following two checks, which are also in
1402
                 move2add_note_store, are intended to reduce the
1403
                 number of calls to gen_rtx_SET to avoid memory
1404
                 allocation if possible.  */
1405
              && SCALAR_INT_MODE_P (GET_MODE (XEXP (cnd, 0)))
1406
              && hard_regno_nregs[REGNO (XEXP (cnd, 0))][GET_MODE (XEXP (cnd, 0))] == 1
1407
              && CONST_INT_P (XEXP (cnd, 1)))
1408
            {
1409
              rtx implicit_set =
1410
                gen_rtx_SET (VOIDmode, XEXP (cnd, 0), XEXP (cnd, 1));
1411
              move2add_note_store (SET_DEST (implicit_set), implicit_set, 0);
1412
            }
1413
        }
1414
 
1415
      /* If this is a CALL_INSN, all call used registers are stored with
1416
         unknown values.  */
1417
      if (CALL_P (insn))
1418
        {
1419
          for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1420
            {
1421
              if (call_used_regs[i])
1422
                /* Reset the information about this register.  */
1423
                reg_set_luid[i] = 0;
1424
            }
1425
        }
1426
    }
1427
}
1428
 
1429
/* SET is a SET or CLOBBER that sets DST.
1430
   Update reg_set_luid, reg_offset and reg_base_reg accordingly.
1431
   Called from reload_cse_move2add via note_stores.  */
1432
 
1433
static void
1434
move2add_note_store (rtx dst, const_rtx set, void *data ATTRIBUTE_UNUSED)
1435
{
1436
  unsigned int regno = 0;
1437
  unsigned int nregs = 0;
1438
  unsigned int i;
1439
  enum machine_mode mode = GET_MODE (dst);
1440
 
1441
  if (GET_CODE (dst) == SUBREG)
1442
    {
1443
      regno = subreg_regno_offset (REGNO (SUBREG_REG (dst)),
1444
                                   GET_MODE (SUBREG_REG (dst)),
1445
                                   SUBREG_BYTE (dst),
1446
                                   GET_MODE (dst));
1447
      nregs = subreg_nregs (dst);
1448
      dst = SUBREG_REG (dst);
1449
    }
1450
 
1451
  /* Some targets do argument pushes without adding REG_INC notes.  */
1452
 
1453
  if (MEM_P (dst))
1454
    {
1455
      dst = XEXP (dst, 0);
1456
      if (GET_CODE (dst) == PRE_INC || GET_CODE (dst) == POST_INC
1457
          || GET_CODE (dst) == PRE_DEC || GET_CODE (dst) == POST_DEC)
1458
        reg_set_luid[REGNO (XEXP (dst, 0))] = 0;
1459
      return;
1460
    }
1461
  if (!REG_P (dst))
1462
    return;
1463
 
1464
  regno += REGNO (dst);
1465
  if (!nregs)
1466
    nregs = hard_regno_nregs[regno][mode];
1467
 
1468
  if (SCALAR_INT_MODE_P (GET_MODE (dst))
1469
      && nregs == 1 && GET_CODE (set) == SET
1470
      && GET_CODE (SET_DEST (set)) != ZERO_EXTRACT
1471
      && GET_CODE (SET_DEST (set)) != STRICT_LOW_PART)
1472
    {
1473
      rtx src = SET_SRC (set);
1474
      rtx base_reg;
1475
      HOST_WIDE_INT offset;
1476
      int base_regno;
1477
      /* This may be different from mode, if SET_DEST (set) is a
1478
         SUBREG.  */
1479
      enum machine_mode dst_mode = GET_MODE (dst);
1480
 
1481
      switch (GET_CODE (src))
1482
        {
1483
        case PLUS:
1484
          if (REG_P (XEXP (src, 0)))
1485
            {
1486
              base_reg = XEXP (src, 0);
1487
 
1488
              if (CONST_INT_P (XEXP (src, 1)))
1489
                offset = INTVAL (XEXP (src, 1));
1490
              else if (REG_P (XEXP (src, 1))
1491
                       && (reg_set_luid[REGNO (XEXP (src, 1))]
1492
                           > move2add_last_label_luid)
1493
                       && (MODES_OK_FOR_MOVE2ADD
1494
                           (dst_mode, reg_mode[REGNO (XEXP (src, 1))])))
1495
                {
1496
                  if (reg_base_reg[REGNO (XEXP (src, 1))] < 0)
1497
                    offset = reg_offset[REGNO (XEXP (src, 1))];
1498
                  /* Maybe the first register is known to be a
1499
                     constant.  */
1500
                  else if (reg_set_luid[REGNO (base_reg)]
1501
                           > move2add_last_label_luid
1502
                           && (MODES_OK_FOR_MOVE2ADD
1503
                               (dst_mode, reg_mode[REGNO (XEXP (src, 1))]))
1504
                           && reg_base_reg[REGNO (base_reg)] < 0)
1505
                    {
1506
                      offset = reg_offset[REGNO (base_reg)];
1507
                      base_reg = XEXP (src, 1);
1508
                    }
1509
                  else
1510
                    goto invalidate;
1511
                }
1512
              else
1513
                goto invalidate;
1514
 
1515
              break;
1516
            }
1517
 
1518
          goto invalidate;
1519
 
1520
        case REG:
1521
          base_reg = src;
1522
          offset = 0;
1523
          break;
1524
 
1525
        case CONST_INT:
1526
          /* Start tracking the register as a constant.  */
1527
          reg_base_reg[regno] = -1;
1528
          reg_offset[regno] = INTVAL (SET_SRC (set));
1529
          /* We assign the same luid to all registers set to constants.  */
1530
          reg_set_luid[regno] = move2add_last_label_luid + 1;
1531
          reg_mode[regno] = mode;
1532
          return;
1533
 
1534
        default:
1535
        invalidate:
1536
          /* Invalidate the contents of the register.  */
1537
          reg_set_luid[regno] = 0;
1538
          return;
1539
        }
1540
 
1541
      base_regno = REGNO (base_reg);
1542
      /* If information about the base register is not valid, set it
1543
         up as a new base register, pretending its value is known
1544
         starting from the current insn.  */
1545
      if (reg_set_luid[base_regno] <= move2add_last_label_luid)
1546
        {
1547
          reg_base_reg[base_regno] = base_regno;
1548
          reg_offset[base_regno] = 0;
1549
          reg_set_luid[base_regno] = move2add_luid;
1550
          reg_mode[base_regno] = mode;
1551
        }
1552
      else if (! MODES_OK_FOR_MOVE2ADD (dst_mode,
1553
                                        reg_mode[base_regno]))
1554
        goto invalidate;
1555
 
1556
      reg_mode[regno] = mode;
1557
 
1558
      /* Copy base information from our base register.  */
1559
      reg_set_luid[regno] = reg_set_luid[base_regno];
1560
      reg_base_reg[regno] = reg_base_reg[base_regno];
1561
 
1562
      /* Compute the sum of the offsets or constants.  */
1563
      reg_offset[regno] = trunc_int_for_mode (offset
1564
                                              + reg_offset[base_regno],
1565
                                              dst_mode);
1566
    }
1567
  else
1568
    {
1569
      unsigned int endregno = regno + nregs;
1570
 
1571
      for (i = regno; i < endregno; i++)
1572
        /* Reset the information about this register.  */
1573
        reg_set_luid[i] = 0;
1574
    }
1575
}
1576
 
1577
static bool
1578
gate_handle_postreload (void)
1579
{
1580
  return (optimize > 0 && reload_completed);
1581
}
1582
 
1583
 
1584
static unsigned int
1585
rest_of_handle_postreload (void)
1586
{
1587
  if (!dbg_cnt (postreload_cse))
1588
    return 0;
1589
 
1590
  /* Do a very simple CSE pass over just the hard registers.  */
1591
  reload_cse_regs (get_insns ());
1592
  /* Reload_cse_regs can eliminate potentially-trapping MEMs.
1593
     Remove any EH edges associated with them.  */
1594
  if (flag_non_call_exceptions)
1595
    purge_all_dead_edges ();
1596
 
1597
  return 0;
1598
}
1599
 
1600
struct rtl_opt_pass pass_postreload_cse =
1601
{
1602
 {
1603
  RTL_PASS,
1604
  "postreload",                         /* name */
1605
  gate_handle_postreload,               /* gate */
1606
  rest_of_handle_postreload,            /* execute */
1607
  NULL,                                 /* sub */
1608
  NULL,                                 /* next */
1609
  0,                                    /* static_pass_number */
1610
  TV_RELOAD_CSE_REGS,                   /* tv_id */
1611
  0,                                    /* properties_required */
1612
  0,                                    /* properties_provided */
1613
  0,                                    /* properties_destroyed */
1614
  0,                                    /* todo_flags_start */
1615
  TODO_df_finish | TODO_verify_rtl_sharing |
1616
  TODO_dump_func                        /* todo_flags_finish */
1617
 }
1618
};

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.