OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [cprop.c] - Blame information for rev 806

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 684 jeremybenn
/* Global constant/copy propagation for RTL.
2
   Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
3
   2006, 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify it under
8
the terms of the GNU General Public License as published by the Free
9
Software Foundation; either version 3, or (at your option) any later
10
version.
11
 
12
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13
WARRANTY; without even the implied warranty of MERCHANTABILITY or
14
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15
for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING3.  If not see
19
<http://www.gnu.org/licenses/>.  */
20
 
21
#include "config.h"
22
#include "system.h"
23
#include "coretypes.h"
24
#include "tm.h"
25
#include "diagnostic-core.h"
26
#include "toplev.h"
27
 
28
#include "rtl.h"
29
#include "tree.h"
30
#include "tm_p.h"
31
#include "regs.h"
32
#include "hard-reg-set.h"
33
#include "flags.h"
34
#include "insn-config.h"
35
#include "recog.h"
36
#include "basic-block.h"
37
#include "output.h"
38
#include "function.h"
39
#include "expr.h"
40
#include "except.h"
41
#include "params.h"
42
#include "cselib.h"
43
#include "intl.h"
44
#include "obstack.h"
45
#include "timevar.h"
46
#include "tree-pass.h"
47
#include "hashtab.h"
48
#include "df.h"
49
#include "dbgcnt.h"
50
#include "target.h"
51
 
52
 
53
/* An obstack for our working variables.  */
54
static struct obstack cprop_obstack;
55
 
56
/* Occurrence of an expression.
57
   There is one per basic block.  If a pattern appears more than once the
58
   last appearance is used.  */
59
 
60
struct occr
61
{
62
  /* Next occurrence of this expression.  */
63
  struct occr *next;
64
  /* The insn that computes the expression.  */
65
  rtx insn;
66
};
67
 
68
typedef struct occr *occr_t;
69
DEF_VEC_P (occr_t);
70
DEF_VEC_ALLOC_P (occr_t, heap);
71
 
72
/* Hash table entry for assignment expressions.  */
73
 
74
struct expr
75
{
76
  /* The expression (DEST := SRC).  */
77
  rtx dest;
78
  rtx src;
79
 
80
  /* Index in the available expression bitmaps.  */
81
  int bitmap_index;
82
  /* Next entry with the same hash.  */
83
  struct expr *next_same_hash;
84
  /* List of available occurrence in basic blocks in the function.
85
     An "available occurrence" is one that is the last occurrence in the
86
     basic block and whose operands are not modified by following statements
87
     in the basic block [including this insn].  */
88
  struct occr *avail_occr;
89
};
90
 
91
/* Hash table for copy propagation expressions.
92
   Each hash table is an array of buckets.
93
   ??? It is known that if it were an array of entries, structure elements
94
   `next_same_hash' and `bitmap_index' wouldn't be necessary.  However, it is
95
   not clear whether in the final analysis a sufficient amount of memory would
96
   be saved as the size of the available expression bitmaps would be larger
97
   [one could build a mapping table without holes afterwards though].
98
   Someday I'll perform the computation and figure it out.  */
99
 
100
struct hash_table_d
101
{
102
  /* The table itself.
103
     This is an array of `set_hash_table_size' elements.  */
104
  struct expr **table;
105
 
106
  /* Size of the hash table, in elements.  */
107
  unsigned int size;
108
 
109
  /* Number of hash table elements.  */
110
  unsigned int n_elems;
111
};
112
 
113
/* Copy propagation hash table.  */
114
static struct hash_table_d set_hash_table;
115
 
116
/* Array of implicit set patterns indexed by basic block index.  */
117
static rtx *implicit_sets;
118
 
119
/* Array of indexes of expressions for implicit set patterns indexed by basic
120
   block index.  In other words, implicit_set_indexes[i] is the bitmap_index
121
   of the expression whose RTX is implicit_sets[i].  */
122
static int *implicit_set_indexes;
123
 
124
/* Bitmap containing one bit for each register in the program.
125
   Used when performing GCSE to track which registers have been set since
126
   the start or end of the basic block while traversing that block.  */
127
static regset reg_set_bitmap;
128
 
129
/* Various variables for statistics gathering.  */
130
 
131
/* Memory used in a pass.
132
   This isn't intended to be absolutely precise.  Its intent is only
133
   to keep an eye on memory usage.  */
134
static int bytes_used;
135
 
136
/* Number of local constants propagated.  */
137
static int local_const_prop_count;
138
/* Number of local copies propagated.  */
139
static int local_copy_prop_count;
140
/* Number of global constants propagated.  */
141
static int global_const_prop_count;
142
/* Number of global copies propagated.  */
143
static int global_copy_prop_count;
144
 
145
#define GOBNEW(T)               ((T *) cprop_alloc (sizeof (T)))
146
#define GOBNEWVAR(T, S)         ((T *) cprop_alloc ((S)))
147
 
148
/* Cover function to obstack_alloc.  */
149
 
150
static void *
151
cprop_alloc (unsigned long size)
152
{
153
  bytes_used += size;
154
  return obstack_alloc (&cprop_obstack, size);
155
}
156
 
157
/* Return nonzero if register X is unchanged from INSN to the end
158
   of INSN's basic block.  */
159
 
160
static int
161
reg_available_p (const_rtx x, const_rtx insn ATTRIBUTE_UNUSED)
162
{
163
  return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
164
}
165
 
166
/* Hash a set of register REGNO.
167
 
168
   Sets are hashed on the register that is set.  This simplifies the PRE copy
169
   propagation code.
170
 
171
   ??? May need to make things more elaborate.  Later, as necessary.  */
172
 
173
static unsigned int
174
hash_set (int regno, int hash_table_size)
175
{
176
  unsigned int hash;
177
 
178
  hash = regno;
179
  return hash % hash_table_size;
180
}
181
 
182
/* Insert assignment DEST:=SET from INSN in the hash table.
183
   DEST is a register and SET is a register or a suitable constant.
184
   If the assignment is already present in the table, record it as
185
   the last occurrence in INSN's basic block.
186
   IMPLICIT is true if it's an implicit set, false otherwise.  */
187
 
188
static void
189
insert_set_in_table (rtx dest, rtx src, rtx insn, struct hash_table_d *table,
190
                     bool implicit)
191
{
192
  bool found = false;
193
  unsigned int hash;
194
  struct expr *cur_expr, *last_expr = NULL;
195
  struct occr *cur_occr;
196
 
197
  hash = hash_set (REGNO (dest), table->size);
198
 
199
  for (cur_expr = table->table[hash]; cur_expr;
200
       cur_expr = cur_expr->next_same_hash)
201
    {
202
      if (dest == cur_expr->dest
203
          && src == cur_expr->src)
204
        {
205
          found = true;
206
          break;
207
        }
208
      last_expr = cur_expr;
209
    }
210
 
211
  if (! found)
212
    {
213
      cur_expr = GOBNEW (struct expr);
214
      bytes_used += sizeof (struct expr);
215
      if (table->table[hash] == NULL)
216
        /* This is the first pattern that hashed to this index.  */
217
        table->table[hash] = cur_expr;
218
      else
219
        /* Add EXPR to end of this hash chain.  */
220
        last_expr->next_same_hash = cur_expr;
221
 
222
      /* Set the fields of the expr element.
223
         We must copy X because it can be modified when copy propagation is
224
         performed on its operands.  */
225
      cur_expr->dest = copy_rtx (dest);
226
      cur_expr->src = copy_rtx (src);
227
      cur_expr->bitmap_index = table->n_elems++;
228
      cur_expr->next_same_hash = NULL;
229
      cur_expr->avail_occr = NULL;
230
    }
231
 
232
  /* Now record the occurrence.  */
233
  cur_occr = cur_expr->avail_occr;
234
 
235
  if (cur_occr
236
      && BLOCK_FOR_INSN (cur_occr->insn) == BLOCK_FOR_INSN (insn))
237
    {
238
      /* Found another instance of the expression in the same basic block.
239
         Prefer this occurrence to the currently recorded one.  We want
240
         the last one in the block and the block is scanned from start
241
         to end.  */
242
      cur_occr->insn = insn;
243
    }
244
  else
245
    {
246
      /* First occurrence of this expression in this basic block.  */
247
      cur_occr = GOBNEW (struct occr);
248
      bytes_used += sizeof (struct occr);
249
      cur_occr->insn = insn;
250
      cur_occr->next = cur_expr->avail_occr;
251
      cur_expr->avail_occr = cur_occr;
252
    }
253
 
254
  /* Record bitmap_index of the implicit set in implicit_set_indexes.  */
255
  if (implicit)
256
    implicit_set_indexes[BLOCK_FOR_INSN(insn)->index] = cur_expr->bitmap_index;
257
}
258
 
259
/* Determine whether the rtx X should be treated as a constant for CPROP.
260
   Since X might be inserted more than once we have to take care that it
261
   is sharable.  */
262
 
263
static bool
264
cprop_constant_p (const_rtx x)
265
{
266
  return CONSTANT_P (x) && (GET_CODE (x) != CONST || shared_const_p (x));
267
}
268
 
269
/* Scan SET present in INSN and add an entry to the hash TABLE.
270
   IMPLICIT is true if it's an implicit set, false otherwise.  */
271
 
272
static void
273
hash_scan_set (rtx set, rtx insn, struct hash_table_d *table, bool implicit)
274
{
275
  rtx src = SET_SRC (set);
276
  rtx dest = SET_DEST (set);
277
 
278
  if (REG_P (dest)
279
      && ! HARD_REGISTER_P (dest)
280
      && reg_available_p (dest, insn)
281
      && can_copy_p (GET_MODE (dest)))
282
    {
283
      /* See if a REG_EQUAL note shows this equivalent to a simpler expression.
284
 
285
         This allows us to do a single CPROP pass and still eliminate
286
         redundant constants, addresses or other expressions that are
287
         constructed with multiple instructions.
288
 
289
         However, keep the original SRC if INSN is a simple reg-reg move.  In
290
         In this case, there will almost always be a REG_EQUAL note on the
291
         insn that sets SRC.  By recording the REG_EQUAL value here as SRC
292
         for INSN, we miss copy propagation opportunities.
293
 
294
         Note that this does not impede profitable constant propagations.  We
295
         "look through" reg-reg sets in lookup_set.  */
296
      rtx note = find_reg_equal_equiv_note (insn);
297
      if (note != 0
298
          && REG_NOTE_KIND (note) == REG_EQUAL
299
          && !REG_P (src)
300
          && cprop_constant_p (XEXP (note, 0)))
301
        src = XEXP (note, 0), set = gen_rtx_SET (VOIDmode, dest, src);
302
 
303
      /* Record sets for constant/copy propagation.  */
304
      if ((REG_P (src)
305
           && src != dest
306
           && ! HARD_REGISTER_P (src)
307
           && reg_available_p (src, insn))
308
          || cprop_constant_p (src))
309
        insert_set_in_table (dest, src, insn, table, implicit);
310
    }
311
}
312
 
313
/* Process INSN and add hash table entries as appropriate.  */
314
 
315
static void
316
hash_scan_insn (rtx insn, struct hash_table_d *table)
317
{
318
  rtx pat = PATTERN (insn);
319
  int i;
320
 
321
  /* Pick out the sets of INSN and for other forms of instructions record
322
     what's been modified.  */
323
 
324
  if (GET_CODE (pat) == SET)
325
    hash_scan_set (pat, insn, table, false);
326
  else if (GET_CODE (pat) == PARALLEL)
327
    for (i = 0; i < XVECLEN (pat, 0); i++)
328
      {
329
        rtx x = XVECEXP (pat, 0, i);
330
 
331
        if (GET_CODE (x) == SET)
332
          hash_scan_set (x, insn, table, false);
333
      }
334
}
335
 
336
/* Dump the hash table TABLE to file FILE under the name NAME.  */
337
 
338
static void
339
dump_hash_table (FILE *file, const char *name, struct hash_table_d *table)
340
{
341
  int i;
342
  /* Flattened out table, so it's printed in proper order.  */
343
  struct expr **flat_table;
344
  unsigned int *hash_val;
345
  struct expr *expr;
346
 
347
  flat_table = XCNEWVEC (struct expr *, table->n_elems);
348
  hash_val = XNEWVEC (unsigned int, table->n_elems);
349
 
350
  for (i = 0; i < (int) table->size; i++)
351
    for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
352
      {
353
        flat_table[expr->bitmap_index] = expr;
354
        hash_val[expr->bitmap_index] = i;
355
      }
356
 
357
  fprintf (file, "%s hash table (%d buckets, %d entries)\n",
358
           name, table->size, table->n_elems);
359
 
360
  for (i = 0; i < (int) table->n_elems; i++)
361
    if (flat_table[i] != 0)
362
      {
363
        expr = flat_table[i];
364
        fprintf (file, "Index %d (hash value %d)\n  ",
365
                 expr->bitmap_index, hash_val[i]);
366
        print_rtl (file, expr->dest);
367
        fprintf (file, " := ");
368
        print_rtl (file, expr->src);
369
        fprintf (file, "\n");
370
      }
371
 
372
  fprintf (file, "\n");
373
 
374
  free (flat_table);
375
  free (hash_val);
376
}
377
 
378
/* Record as unavailable all registers that are DEF operands of INSN.  */
379
 
380
static void
381
make_set_regs_unavailable (rtx insn)
382
{
383
  struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
384
  df_ref *def_rec;
385
 
386
  for (def_rec = DF_INSN_INFO_DEFS (insn_info); *def_rec; def_rec++)
387
    SET_REGNO_REG_SET (reg_set_bitmap, DF_REF_REGNO (*def_rec));
388
}
389
 
390
/* Top level function to create an assignment hash table.
391
 
392
   Assignment entries are placed in the hash table if
393
   - they are of the form (set (pseudo-reg) src),
394
   - src is something we want to perform const/copy propagation on,
395
   - none of the operands or target are subsequently modified in the block
396
 
397
   Currently src must be a pseudo-reg or a const_int.
398
 
399
   TABLE is the table computed.  */
400
 
401
static void
402
compute_hash_table_work (struct hash_table_d *table)
403
{
404
  basic_block bb;
405
 
406
  /* Allocate vars to track sets of regs.  */
407
  reg_set_bitmap = ALLOC_REG_SET (NULL);
408
 
409
  FOR_EACH_BB (bb)
410
    {
411
      rtx insn;
412
 
413
      /* Reset tables used to keep track of what's not yet invalid [since
414
         the end of the block].  */
415
      CLEAR_REG_SET (reg_set_bitmap);
416
 
417
      /* Go over all insns from the last to the first.  This is convenient
418
         for tracking available registers, i.e. not set between INSN and
419
         the end of the basic block BB.  */
420
      FOR_BB_INSNS_REVERSE (bb, insn)
421
        {
422
          /* Only real insns are interesting.  */
423
          if (!NONDEBUG_INSN_P (insn))
424
            continue;
425
 
426
          /* Record interesting sets from INSN in the hash table.  */
427
          hash_scan_insn (insn, table);
428
 
429
          /* Any registers set in INSN will make SETs above it not AVAIL.  */
430
          make_set_regs_unavailable (insn);
431
        }
432
 
433
      /* Insert implicit sets in the hash table, pretending they appear as
434
         insns at the head of the basic block.  */
435
      if (implicit_sets[bb->index] != NULL_RTX)
436
        hash_scan_set (implicit_sets[bb->index], BB_HEAD (bb), table, true);
437
    }
438
 
439
  FREE_REG_SET (reg_set_bitmap);
440
}
441
 
442
/* Allocate space for the set/expr hash TABLE.
443
   It is used to determine the number of buckets to use.  */
444
 
445
static void
446
alloc_hash_table (struct hash_table_d *table)
447
{
448
  int n;
449
 
450
  n = get_max_insn_count ();
451
 
452
  table->size = n / 4;
453
  if (table->size < 11)
454
    table->size = 11;
455
 
456
  /* Attempt to maintain efficient use of hash table.
457
     Making it an odd number is simplest for now.
458
     ??? Later take some measurements.  */
459
  table->size |= 1;
460
  n = table->size * sizeof (struct expr *);
461
  table->table = XNEWVAR (struct expr *, n);
462
}
463
 
464
/* Free things allocated by alloc_hash_table.  */
465
 
466
static void
467
free_hash_table (struct hash_table_d *table)
468
{
469
  free (table->table);
470
}
471
 
472
/* Compute the hash TABLE for doing copy/const propagation or
473
   expression hash table.  */
474
 
475
static void
476
compute_hash_table (struct hash_table_d *table)
477
{
478
  /* Initialize count of number of entries in hash table.  */
479
  table->n_elems = 0;
480
  memset (table->table, 0, table->size * sizeof (struct expr *));
481
 
482
  compute_hash_table_work (table);
483
}
484
 
485
/* Expression tracking support.  */
486
 
487
/* Lookup REGNO in the set TABLE.  The result is a pointer to the
488
   table entry, or NULL if not found.  */
489
 
490
static struct expr *
491
lookup_set (unsigned int regno, struct hash_table_d *table)
492
{
493
  unsigned int hash = hash_set (regno, table->size);
494
  struct expr *expr;
495
 
496
  expr = table->table[hash];
497
 
498
  while (expr && REGNO (expr->dest) != regno)
499
    expr = expr->next_same_hash;
500
 
501
  return expr;
502
}
503
 
504
/* Return the next entry for REGNO in list EXPR.  */
505
 
506
static struct expr *
507
next_set (unsigned int regno, struct expr *expr)
508
{
509
  do
510
    expr = expr->next_same_hash;
511
  while (expr && REGNO (expr->dest) != regno);
512
 
513
  return expr;
514
}
515
 
516
/* Reset tables used to keep track of what's still available [since the
517
   start of the block].  */
518
 
519
static void
520
reset_opr_set_tables (void)
521
{
522
  /* Maintain a bitmap of which regs have been set since beginning of
523
     the block.  */
524
  CLEAR_REG_SET (reg_set_bitmap);
525
}
526
 
527
/* Return nonzero if the register X has not been set yet [since the
528
   start of the basic block containing INSN].  */
529
 
530
static int
531
reg_not_set_p (const_rtx x, const_rtx insn ATTRIBUTE_UNUSED)
532
{
533
  return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
534
}
535
 
536
/* Record things set by INSN.
537
   This data is used by reg_not_set_p.  */
538
 
539
static void
540
mark_oprs_set (rtx insn)
541
{
542
  struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
543
  df_ref *def_rec;
544
 
545
  for (def_rec = DF_INSN_INFO_DEFS (insn_info); *def_rec; def_rec++)
546
    SET_REGNO_REG_SET (reg_set_bitmap, DF_REF_REGNO (*def_rec));
547
}
548
 
549
/* Compute copy/constant propagation working variables.  */
550
 
551
/* Local properties of assignments.  */
552
static sbitmap *cprop_avloc;
553
static sbitmap *cprop_kill;
554
 
555
/* Global properties of assignments (computed from the local properties).  */
556
static sbitmap *cprop_avin;
557
static sbitmap *cprop_avout;
558
 
559
/* Allocate vars used for copy/const propagation.  N_BLOCKS is the number of
560
   basic blocks.  N_SETS is the number of sets.  */
561
 
562
static void
563
alloc_cprop_mem (int n_blocks, int n_sets)
564
{
565
  cprop_avloc = sbitmap_vector_alloc (n_blocks, n_sets);
566
  cprop_kill = sbitmap_vector_alloc (n_blocks, n_sets);
567
 
568
  cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
569
  cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
570
}
571
 
572
/* Free vars used by copy/const propagation.  */
573
 
574
static void
575
free_cprop_mem (void)
576
{
577
  sbitmap_vector_free (cprop_avloc);
578
  sbitmap_vector_free (cprop_kill);
579
  sbitmap_vector_free (cprop_avin);
580
  sbitmap_vector_free (cprop_avout);
581
}
582
 
583
/* Compute the local properties of each recorded expression.
584
 
585
   Local properties are those that are defined by the block, irrespective of
586
   other blocks.
587
 
588
   An expression is killed in a block if its operands, either DEST or SRC, are
589
   modified in the block.
590
 
591
   An expression is computed (locally available) in a block if it is computed
592
   at least once and expression would contain the same value if the
593
   computation was moved to the end of the block.
594
 
595
   KILL and COMP are destination sbitmaps for recording local properties.  */
596
 
597
static void
598
compute_local_properties (sbitmap *kill, sbitmap *comp,
599
                          struct hash_table_d *table)
600
{
601
  unsigned int i;
602
 
603
  /* Initialize the bitmaps that were passed in.  */
604
  sbitmap_vector_zero (kill, last_basic_block);
605
  sbitmap_vector_zero (comp, last_basic_block);
606
 
607
  for (i = 0; i < table->size; i++)
608
    {
609
      struct expr *expr;
610
 
611
      for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
612
        {
613
          int indx = expr->bitmap_index;
614
          df_ref def;
615
          struct occr *occr;
616
 
617
          /* For each definition of the destination pseudo-reg, the expression
618
             is killed in the block where the definition is.  */
619
          for (def = DF_REG_DEF_CHAIN (REGNO (expr->dest));
620
               def; def = DF_REF_NEXT_REG (def))
621
            SET_BIT (kill[DF_REF_BB (def)->index], indx);
622
 
623
          /* If the source is a pseudo-reg, for each definition of the source,
624
             the expression is killed in the block where the definition is.  */
625
          if (REG_P (expr->src))
626
            for (def = DF_REG_DEF_CHAIN (REGNO (expr->src));
627
                 def; def = DF_REF_NEXT_REG (def))
628
              SET_BIT (kill[DF_REF_BB (def)->index], indx);
629
 
630
          /* The occurrences recorded in avail_occr are exactly those that
631
             are locally available in the block where they are.  */
632
          for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
633
            {
634
              SET_BIT (comp[BLOCK_FOR_INSN (occr->insn)->index], indx);
635
            }
636
        }
637
    }
638
}
639
 
640
/* Hash table support.  */
641
 
642
/* Top level routine to do the dataflow analysis needed by copy/const
643
   propagation.  */
644
 
645
static void
646
compute_cprop_data (void)
647
{
648
  basic_block bb;
649
 
650
  compute_local_properties (cprop_kill, cprop_avloc, &set_hash_table);
651
  compute_available (cprop_avloc, cprop_kill, cprop_avout, cprop_avin);
652
 
653
  /* Merge implicit sets into CPROP_AVIN.  They are always available at the
654
     entry of their basic block.  We need to do this because 1) implicit sets
655
     aren't recorded for the local pass so they cannot be propagated within
656
     their basic block by this pass and 2) the global pass would otherwise
657
     propagate them only in the successors of their basic block.  */
658
  FOR_EACH_BB (bb)
659
    {
660
      int index = implicit_set_indexes[bb->index];
661
      if (index != -1)
662
        SET_BIT (cprop_avin[bb->index], index);
663
    }
664
}
665
 
666
/* Copy/constant propagation.  */
667
 
668
/* Maximum number of register uses in an insn that we handle.  */
669
#define MAX_USES 8
670
 
671
/* Table of uses (registers, both hard and pseudo) found in an insn.
672
   Allocated statically to avoid alloc/free complexity and overhead.  */
673
static rtx reg_use_table[MAX_USES];
674
 
675
/* Index into `reg_use_table' while building it.  */
676
static unsigned reg_use_count;
677
 
678
/* Set up a list of register numbers used in INSN.  The found uses are stored
679
   in `reg_use_table'.  `reg_use_count' is initialized to zero before entry,
680
   and contains the number of uses in the table upon exit.
681
 
682
   ??? If a register appears multiple times we will record it multiple times.
683
   This doesn't hurt anything but it will slow things down.  */
684
 
685
static void
686
find_used_regs (rtx *xptr, void *data ATTRIBUTE_UNUSED)
687
{
688
  int i, j;
689
  enum rtx_code code;
690
  const char *fmt;
691
  rtx x = *xptr;
692
 
693
  /* repeat is used to turn tail-recursion into iteration since GCC
694
     can't do it when there's no return value.  */
695
 repeat:
696
  if (x == 0)
697
    return;
698
 
699
  code = GET_CODE (x);
700
  if (REG_P (x))
701
    {
702
      if (reg_use_count == MAX_USES)
703
        return;
704
 
705
      reg_use_table[reg_use_count] = x;
706
      reg_use_count++;
707
    }
708
 
709
  /* Recursively scan the operands of this expression.  */
710
 
711
  for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
712
    {
713
      if (fmt[i] == 'e')
714
        {
715
          /* If we are about to do the last recursive call
716
             needed at this level, change it into iteration.
717
             This function is called enough to be worth it.  */
718
          if (i == 0)
719
            {
720
              x = XEXP (x, 0);
721
              goto repeat;
722
            }
723
 
724
          find_used_regs (&XEXP (x, i), data);
725
        }
726
      else if (fmt[i] == 'E')
727
        for (j = 0; j < XVECLEN (x, i); j++)
728
          find_used_regs (&XVECEXP (x, i, j), data);
729
    }
730
}
731
 
732
/* Try to replace all uses of FROM in INSN with TO.
733
   Return nonzero if successful.  */
734
 
735
static int
736
try_replace_reg (rtx from, rtx to, rtx insn)
737
{
738
  rtx note = find_reg_equal_equiv_note (insn);
739
  rtx src = 0;
740
  int success = 0;
741
  rtx set = single_set (insn);
742
 
743
  /* Usually we substitute easy stuff, so we won't copy everything.
744
     We however need to take care to not duplicate non-trivial CONST
745
     expressions.  */
746
  to = copy_rtx (to);
747
 
748
  validate_replace_src_group (from, to, insn);
749
  if (num_changes_pending () && apply_change_group ())
750
    success = 1;
751
 
752
  /* Try to simplify SET_SRC if we have substituted a constant.  */
753
  if (success && set && CONSTANT_P (to))
754
    {
755
      src = simplify_rtx (SET_SRC (set));
756
 
757
      if (src)
758
        validate_change (insn, &SET_SRC (set), src, 0);
759
    }
760
 
761
  /* If there is already a REG_EQUAL note, update the expression in it
762
     with our replacement.  */
763
  if (note != 0 && REG_NOTE_KIND (note) == REG_EQUAL)
764
    set_unique_reg_note (insn, REG_EQUAL,
765
                         simplify_replace_rtx (XEXP (note, 0), from, to));
766
  if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
767
    {
768
      /* If above failed and this is a single set, try to simplify the source
769
         of the set given our substitution.  We could perhaps try this for
770
         multiple SETs, but it probably won't buy us anything.  */
771
      src = simplify_replace_rtx (SET_SRC (set), from, to);
772
 
773
      if (!rtx_equal_p (src, SET_SRC (set))
774
          && validate_change (insn, &SET_SRC (set), src, 0))
775
        success = 1;
776
 
777
      /* If we've failed perform the replacement, have a single SET to
778
         a REG destination and don't yet have a note, add a REG_EQUAL note
779
         to not lose information.  */
780
      if (!success && note == 0 && set != 0 && REG_P (SET_DEST (set)))
781
        note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
782
    }
783
 
784
  if (set && MEM_P (SET_DEST (set)) && reg_mentioned_p (from, SET_DEST (set)))
785
    {
786
      /* Registers can also appear as uses in SET_DEST if it is a MEM.
787
         We could perhaps try this for multiple SETs, but it probably
788
         won't buy us anything.  */
789
      rtx dest = simplify_replace_rtx (SET_DEST (set), from, to);
790
 
791
      if (!rtx_equal_p (dest, SET_DEST (set))
792
          && validate_change (insn, &SET_DEST (set), dest, 0))
793
        success = 1;
794
    }
795
 
796
  /* REG_EQUAL may get simplified into register.
797
     We don't allow that. Remove that note. This code ought
798
     not to happen, because previous code ought to synthesize
799
     reg-reg move, but be on the safe side.  */
800
  if (note && REG_NOTE_KIND (note) == REG_EQUAL && REG_P (XEXP (note, 0)))
801
    remove_note (insn, note);
802
 
803
  return success;
804
}
805
 
806
/* Find a set of REGNOs that are available on entry to INSN's block.  Return
807
   NULL no such set is found.  */
808
 
809
static struct expr *
810
find_avail_set (int regno, rtx insn)
811
{
812
  /* SET1 contains the last set found that can be returned to the caller for
813
     use in a substitution.  */
814
  struct expr *set1 = 0;
815
 
816
  /* Loops are not possible here.  To get a loop we would need two sets
817
     available at the start of the block containing INSN.  i.e. we would
818
     need two sets like this available at the start of the block:
819
 
820
       (set (reg X) (reg Y))
821
       (set (reg Y) (reg X))
822
 
823
     This can not happen since the set of (reg Y) would have killed the
824
     set of (reg X) making it unavailable at the start of this block.  */
825
  while (1)
826
    {
827
      rtx src;
828
      struct expr *set = lookup_set (regno, &set_hash_table);
829
 
830
      /* Find a set that is available at the start of the block
831
         which contains INSN.  */
832
      while (set)
833
        {
834
          if (TEST_BIT (cprop_avin[BLOCK_FOR_INSN (insn)->index],
835
                        set->bitmap_index))
836
            break;
837
          set = next_set (regno, set);
838
        }
839
 
840
      /* If no available set was found we've reached the end of the
841
         (possibly empty) copy chain.  */
842
      if (set == 0)
843
        break;
844
 
845
      src = set->src;
846
 
847
      /* We know the set is available.
848
         Now check that SRC is locally anticipatable (i.e. none of the
849
         source operands have changed since the start of the block).
850
 
851
         If the source operand changed, we may still use it for the next
852
         iteration of this loop, but we may not use it for substitutions.  */
853
 
854
      if (cprop_constant_p (src) || reg_not_set_p (src, insn))
855
        set1 = set;
856
 
857
      /* If the source of the set is anything except a register, then
858
         we have reached the end of the copy chain.  */
859
      if (! REG_P (src))
860
        break;
861
 
862
      /* Follow the copy chain, i.e. start another iteration of the loop
863
         and see if we have an available copy into SRC.  */
864
      regno = REGNO (src);
865
    }
866
 
867
  /* SET1 holds the last set that was available and anticipatable at
868
     INSN.  */
869
  return set1;
870
}
871
 
872
/* Subroutine of cprop_insn that tries to propagate constants into
873
   JUMP_INSNS.  JUMP must be a conditional jump.  If SETCC is non-NULL
874
   it is the instruction that immediately precedes JUMP, and must be a
875
   single SET of a register.  FROM is what we will try to replace,
876
   SRC is the constant we will try to substitute for it.  Return nonzero
877
   if a change was made.  */
878
 
879
static int
880
cprop_jump (basic_block bb, rtx setcc, rtx jump, rtx from, rtx src)
881
{
882
  rtx new_rtx, set_src, note_src;
883
  rtx set = pc_set (jump);
884
  rtx note = find_reg_equal_equiv_note (jump);
885
 
886
  if (note)
887
    {
888
      note_src = XEXP (note, 0);
889
      if (GET_CODE (note_src) == EXPR_LIST)
890
        note_src = NULL_RTX;
891
    }
892
  else note_src = NULL_RTX;
893
 
894
  /* Prefer REG_EQUAL notes except those containing EXPR_LISTs.  */
895
  set_src = note_src ? note_src : SET_SRC (set);
896
 
897
  /* First substitute the SETCC condition into the JUMP instruction,
898
     then substitute that given values into this expanded JUMP.  */
899
  if (setcc != NULL_RTX
900
      && !modified_between_p (from, setcc, jump)
901
      && !modified_between_p (src, setcc, jump))
902
    {
903
      rtx setcc_src;
904
      rtx setcc_set = single_set (setcc);
905
      rtx setcc_note = find_reg_equal_equiv_note (setcc);
906
      setcc_src = (setcc_note && GET_CODE (XEXP (setcc_note, 0)) != EXPR_LIST)
907
                ? XEXP (setcc_note, 0) : SET_SRC (setcc_set);
908
      set_src = simplify_replace_rtx (set_src, SET_DEST (setcc_set),
909
                                      setcc_src);
910
    }
911
  else
912
    setcc = NULL_RTX;
913
 
914
  new_rtx = simplify_replace_rtx (set_src, from, src);
915
 
916
  /* If no simplification can be made, then try the next register.  */
917
  if (rtx_equal_p (new_rtx, SET_SRC (set)))
918
    return 0;
919
 
920
  /* If this is now a no-op delete it, otherwise this must be a valid insn.  */
921
  if (new_rtx == pc_rtx)
922
    delete_insn (jump);
923
  else
924
    {
925
      /* Ensure the value computed inside the jump insn to be equivalent
926
         to one computed by setcc.  */
927
      if (setcc && modified_in_p (new_rtx, setcc))
928
        return 0;
929
      if (! validate_unshare_change (jump, &SET_SRC (set), new_rtx, 0))
930
        {
931
          /* When (some) constants are not valid in a comparison, and there
932
             are two registers to be replaced by constants before the entire
933
             comparison can be folded into a constant, we need to keep
934
             intermediate information in REG_EQUAL notes.  For targets with
935
             separate compare insns, such notes are added by try_replace_reg.
936
             When we have a combined compare-and-branch instruction, however,
937
             we need to attach a note to the branch itself to make this
938
             optimization work.  */
939
 
940
          if (!rtx_equal_p (new_rtx, note_src))
941
            set_unique_reg_note (jump, REG_EQUAL, copy_rtx (new_rtx));
942
          return 0;
943
        }
944
 
945
      /* Remove REG_EQUAL note after simplification.  */
946
      if (note_src)
947
        remove_note (jump, note);
948
     }
949
 
950
#ifdef HAVE_cc0
951
  /* Delete the cc0 setter.  */
952
  if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
953
    delete_insn (setcc);
954
#endif
955
 
956
  global_const_prop_count++;
957
  if (dump_file != NULL)
958
    {
959
      fprintf (dump_file,
960
               "GLOBAL CONST-PROP: Replacing reg %d in jump_insn %d with"
961
               "constant ", REGNO (from), INSN_UID (jump));
962
      print_rtl (dump_file, src);
963
      fprintf (dump_file, "\n");
964
    }
965
  purge_dead_edges (bb);
966
 
967
  /* If a conditional jump has been changed into unconditional jump, remove
968
     the jump and make the edge fallthru - this is always called in
969
     cfglayout mode.  */
970
  if (new_rtx != pc_rtx && simplejump_p (jump))
971
    {
972
      edge e;
973
      edge_iterator ei;
974
 
975
      FOR_EACH_EDGE (e, ei, bb->succs)
976
        if (e->dest != EXIT_BLOCK_PTR
977
            && BB_HEAD (e->dest) == JUMP_LABEL (jump))
978
          {
979
            e->flags |= EDGE_FALLTHRU;
980
            break;
981
          }
982
      delete_insn (jump);
983
    }
984
 
985
  return 1;
986
}
987
 
988
/* Subroutine of cprop_insn that tries to propagate constants.  FROM is what
989
   we will try to replace, SRC is the constant we will try to substitute for
990
   it and INSN is the instruction where this will be happening.  */
991
 
992
static int
993
constprop_register (rtx from, rtx src, rtx insn)
994
{
995
  rtx sset;
996
 
997
  /* Check for reg or cc0 setting instructions followed by
998
     conditional branch instructions first.  */
999
  if ((sset = single_set (insn)) != NULL
1000
      && NEXT_INSN (insn)
1001
      && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
1002
    {
1003
      rtx dest = SET_DEST (sset);
1004
      if ((REG_P (dest) || CC0_P (dest))
1005
          && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn),
1006
                         from, src))
1007
        return 1;
1008
    }
1009
 
1010
  /* Handle normal insns next.  */
1011
  if (NONJUMP_INSN_P (insn) && try_replace_reg (from, src, insn))
1012
    return 1;
1013
 
1014
  /* Try to propagate a CONST_INT into a conditional jump.
1015
     We're pretty specific about what we will handle in this
1016
     code, we can extend this as necessary over time.
1017
 
1018
     Right now the insn in question must look like
1019
     (set (pc) (if_then_else ...))  */
1020
  else if (any_condjump_p (insn) && onlyjump_p (insn))
1021
    return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, src);
1022
  return 0;
1023
}
1024
 
1025
/* Perform constant and copy propagation on INSN.
1026
   Return nonzero if a change was made.  */
1027
 
1028
static int
1029
cprop_insn (rtx insn)
1030
{
1031
  unsigned i;
1032
  int changed = 0, changed_this_round;
1033
  rtx note;
1034
 
1035
retry:
1036
  changed_this_round = 0;
1037
  reg_use_count = 0;
1038
  note_uses (&PATTERN (insn), find_used_regs, NULL);
1039
 
1040
  /* We may win even when propagating constants into notes.  */
1041
  note = find_reg_equal_equiv_note (insn);
1042
  if (note)
1043
    find_used_regs (&XEXP (note, 0), NULL);
1044
 
1045
  for (i = 0; i < reg_use_count; i++)
1046
    {
1047
      rtx reg_used = reg_use_table[i];
1048
      unsigned int regno = REGNO (reg_used);
1049
      rtx src;
1050
      struct expr *set;
1051
 
1052
      /* If the register has already been set in this block, there's
1053
         nothing we can do.  */
1054
      if (! reg_not_set_p (reg_used, insn))
1055
        continue;
1056
 
1057
      /* Find an assignment that sets reg_used and is available
1058
         at the start of the block.  */
1059
      set = find_avail_set (regno, insn);
1060
      if (! set)
1061
        continue;
1062
 
1063
      src = set->src;
1064
 
1065
      /* Constant propagation.  */
1066
      if (cprop_constant_p (src))
1067
        {
1068
          if (constprop_register (reg_used, src, insn))
1069
            {
1070
              changed_this_round = changed = 1;
1071
              global_const_prop_count++;
1072
              if (dump_file != NULL)
1073
                {
1074
                  fprintf (dump_file,
1075
                           "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
1076
                  fprintf (dump_file, "insn %d with constant ",
1077
                           INSN_UID (insn));
1078
                  print_rtl (dump_file, src);
1079
                  fprintf (dump_file, "\n");
1080
                }
1081
              if (INSN_DELETED_P (insn))
1082
                return 1;
1083
            }
1084
        }
1085
      else if (REG_P (src)
1086
               && REGNO (src) >= FIRST_PSEUDO_REGISTER
1087
               && REGNO (src) != regno)
1088
        {
1089
          if (try_replace_reg (reg_used, src, insn))
1090
            {
1091
              changed_this_round = changed = 1;
1092
              global_copy_prop_count++;
1093
              if (dump_file != NULL)
1094
                {
1095
                  fprintf (dump_file,
1096
                           "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
1097
                           regno, INSN_UID (insn));
1098
                  fprintf (dump_file, " with reg %d\n", REGNO (src));
1099
                }
1100
 
1101
              /* The original insn setting reg_used may or may not now be
1102
                 deletable.  We leave the deletion to DCE.  */
1103
              /* FIXME: If it turns out that the insn isn't deletable,
1104
                 then we may have unnecessarily extended register lifetimes
1105
                 and made things worse.  */
1106
            }
1107
        }
1108
 
1109
      /* If try_replace_reg simplified the insn, the regs found
1110
         by find_used_regs may not be valid anymore.  Start over.  */
1111
      if (changed_this_round)
1112
        goto retry;
1113
    }
1114
 
1115
  if (changed && DEBUG_INSN_P (insn))
1116
    return 0;
1117
 
1118
  return changed;
1119
}
1120
 
1121
/* Like find_used_regs, but avoid recording uses that appear in
1122
   input-output contexts such as zero_extract or pre_dec.  This
1123
   restricts the cases we consider to those for which local cprop
1124
   can legitimately make replacements.  */
1125
 
1126
static void
1127
local_cprop_find_used_regs (rtx *xptr, void *data)
1128
{
1129
  rtx x = *xptr;
1130
 
1131
  if (x == 0)
1132
    return;
1133
 
1134
  switch (GET_CODE (x))
1135
    {
1136
    case ZERO_EXTRACT:
1137
    case SIGN_EXTRACT:
1138
    case STRICT_LOW_PART:
1139
      return;
1140
 
1141
    case PRE_DEC:
1142
    case PRE_INC:
1143
    case POST_DEC:
1144
    case POST_INC:
1145
    case PRE_MODIFY:
1146
    case POST_MODIFY:
1147
      /* Can only legitimately appear this early in the context of
1148
         stack pushes for function arguments, but handle all of the
1149
         codes nonetheless.  */
1150
      return;
1151
 
1152
    case SUBREG:
1153
      /* Setting a subreg of a register larger than word_mode leaves
1154
         the non-written words unchanged.  */
1155
      if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) > BITS_PER_WORD)
1156
        return;
1157
      break;
1158
 
1159
    default:
1160
      break;
1161
    }
1162
 
1163
  find_used_regs (xptr, data);
1164
}
1165
 
1166
/* Try to perform local const/copy propagation on X in INSN.  */
1167
 
1168
static bool
1169
do_local_cprop (rtx x, rtx insn)
1170
{
1171
  rtx newreg = NULL, newcnst = NULL;
1172
 
1173
  /* Rule out USE instructions and ASM statements as we don't want to
1174
     change the hard registers mentioned.  */
1175
  if (REG_P (x)
1176
      && (REGNO (x) >= FIRST_PSEUDO_REGISTER
1177
          || (GET_CODE (PATTERN (insn)) != USE
1178
              && asm_noperands (PATTERN (insn)) < 0)))
1179
    {
1180
      cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
1181
      struct elt_loc_list *l;
1182
 
1183
      if (!val)
1184
        return false;
1185
      for (l = val->locs; l; l = l->next)
1186
        {
1187
          rtx this_rtx = l->loc;
1188
          rtx note;
1189
 
1190
          if (cprop_constant_p (this_rtx))
1191
            newcnst = this_rtx;
1192
          if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
1193
              /* Don't copy propagate if it has attached REG_EQUIV note.
1194
                 At this point this only function parameters should have
1195
                 REG_EQUIV notes and if the argument slot is used somewhere
1196
                 explicitly, it means address of parameter has been taken,
1197
                 so we should not extend the lifetime of the pseudo.  */
1198
              && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
1199
                  || ! MEM_P (XEXP (note, 0))))
1200
            newreg = this_rtx;
1201
        }
1202
      if (newcnst && constprop_register (x, newcnst, insn))
1203
        {
1204
          if (dump_file != NULL)
1205
            {
1206
              fprintf (dump_file, "LOCAL CONST-PROP: Replacing reg %d in ",
1207
                       REGNO (x));
1208
              fprintf (dump_file, "insn %d with constant ",
1209
                       INSN_UID (insn));
1210
              print_rtl (dump_file, newcnst);
1211
              fprintf (dump_file, "\n");
1212
            }
1213
          local_const_prop_count++;
1214
          return true;
1215
        }
1216
      else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
1217
        {
1218
          if (dump_file != NULL)
1219
            {
1220
              fprintf (dump_file,
1221
                       "LOCAL COPY-PROP: Replacing reg %d in insn %d",
1222
                       REGNO (x), INSN_UID (insn));
1223
              fprintf (dump_file, " with reg %d\n", REGNO (newreg));
1224
            }
1225
          local_copy_prop_count++;
1226
          return true;
1227
        }
1228
    }
1229
  return false;
1230
}
1231
 
1232
/* Do local const/copy propagation (i.e. within each basic block).  */
1233
 
1234
static int
1235
local_cprop_pass (void)
1236
{
1237
  basic_block bb;
1238
  rtx insn;
1239
  bool changed = false;
1240
  unsigned i;
1241
 
1242
  cselib_init (0);
1243
  FOR_EACH_BB (bb)
1244
    {
1245
      FOR_BB_INSNS (bb, insn)
1246
        {
1247
          if (INSN_P (insn))
1248
            {
1249
              rtx note = find_reg_equal_equiv_note (insn);
1250
              do
1251
                {
1252
                  reg_use_count = 0;
1253
                  note_uses (&PATTERN (insn), local_cprop_find_used_regs,
1254
                             NULL);
1255
                  if (note)
1256
                    local_cprop_find_used_regs (&XEXP (note, 0), NULL);
1257
 
1258
                  for (i = 0; i < reg_use_count; i++)
1259
                    {
1260
                      if (do_local_cprop (reg_use_table[i], insn))
1261
                        {
1262
                          if (!DEBUG_INSN_P (insn))
1263
                            changed = true;
1264
                          break;
1265
                        }
1266
                    }
1267
                  if (INSN_DELETED_P (insn))
1268
                    break;
1269
                }
1270
              while (i < reg_use_count);
1271
            }
1272
          cselib_process_insn (insn);
1273
        }
1274
 
1275
      /* Forget everything at the end of a basic block.  */
1276
      cselib_clear_table ();
1277
    }
1278
 
1279
  cselib_finish ();
1280
 
1281
  return changed;
1282
}
1283
 
1284
/* Similar to get_condition, only the resulting condition must be
1285
   valid at JUMP, instead of at EARLIEST.
1286
 
1287
   This differs from noce_get_condition in ifcvt.c in that we prefer not to
1288
   settle for the condition variable in the jump instruction being integral.
1289
   We prefer to be able to record the value of a user variable, rather than
1290
   the value of a temporary used in a condition.  This could be solved by
1291
   recording the value of *every* register scanned by canonicalize_condition,
1292
   but this would require some code reorganization.  */
1293
 
1294
rtx
1295
fis_get_condition (rtx jump)
1296
{
1297
  return get_condition (jump, NULL, false, true);
1298
}
1299
 
1300
/* Check the comparison COND to see if we can safely form an implicit
1301
   set from it.  */
1302
 
1303
static bool
1304
implicit_set_cond_p (const_rtx cond)
1305
{
1306
  enum machine_mode mode;
1307
  rtx cst;
1308
 
1309
  /* COND must be either an EQ or NE comparison.  */
1310
  if (GET_CODE (cond) != EQ && GET_CODE (cond) != NE)
1311
    return false;
1312
 
1313
  /* The first operand of COND must be a pseudo-reg.  */
1314
  if (! REG_P (XEXP (cond, 0))
1315
      || HARD_REGISTER_P (XEXP (cond, 0)))
1316
    return false;
1317
 
1318
  /* The second operand of COND must be a suitable constant.  */
1319
  mode = GET_MODE (XEXP (cond, 0));
1320
  cst = XEXP (cond, 1);
1321
 
1322
  /* We can't perform this optimization if either operand might be or might
1323
     contain a signed zero.  */
1324
  if (HONOR_SIGNED_ZEROS (mode))
1325
    {
1326
      /* It is sufficient to check if CST is or contains a zero.  We must
1327
         handle float, complex, and vector.  If any subpart is a zero, then
1328
         the optimization can't be performed.  */
1329
      /* ??? The complex and vector checks are not implemented yet.  We just
1330
         always return zero for them.  */
1331
      if (GET_CODE (cst) == CONST_DOUBLE)
1332
        {
1333
          REAL_VALUE_TYPE d;
1334
          REAL_VALUE_FROM_CONST_DOUBLE (d, cst);
1335
          if (REAL_VALUES_EQUAL (d, dconst0))
1336
            return 0;
1337
        }
1338
      else
1339
        return 0;
1340
    }
1341
 
1342
  return cprop_constant_p (cst);
1343
}
1344
 
1345
/* Find the implicit sets of a function.  An "implicit set" is a constraint
1346
   on the value of a variable, implied by a conditional jump.  For example,
1347
   following "if (x == 2)", the then branch may be optimized as though the
1348
   conditional performed an "explicit set", in this example, "x = 2".  This
1349
   function records the set patterns that are implicit at the start of each
1350
   basic block.
1351
 
1352
   If an implicit set is found but the set is implicit on a critical edge,
1353
   this critical edge is split.
1354
 
1355
   Return true if the CFG was modified, false otherwise.  */
1356
 
1357
static bool
1358
find_implicit_sets (void)
1359
{
1360
  basic_block bb, dest;
1361
  rtx cond, new_rtx;
1362
  unsigned int count = 0;
1363
  bool edges_split = false;
1364
  size_t implicit_sets_size = last_basic_block + 10;
1365
 
1366
  implicit_sets = XCNEWVEC (rtx, implicit_sets_size);
1367
 
1368
  FOR_EACH_BB (bb)
1369
    {
1370
      /* Check for more than one successor.  */
1371
      if (EDGE_COUNT (bb->succs) <= 1)
1372
        continue;
1373
 
1374
      cond = fis_get_condition (BB_END (bb));
1375
 
1376
      /* If no condition is found or if it isn't of a suitable form,
1377
         ignore it.  */
1378
      if (! cond || ! implicit_set_cond_p (cond))
1379
        continue;
1380
 
1381
      dest = GET_CODE (cond) == EQ
1382
        ? BRANCH_EDGE (bb)->dest : FALLTHRU_EDGE (bb)->dest;
1383
 
1384
      /* If DEST doesn't go anywhere, ignore it.  */
1385
      if (! dest || dest == EXIT_BLOCK_PTR)
1386
        continue;
1387
 
1388
      /* We have found a suitable implicit set.  Try to record it now as
1389
         a SET in DEST.  If DEST has more than one predecessor, the edge
1390
         between BB and DEST is a critical edge and we must split it,
1391
         because we can only record one implicit set per DEST basic block.  */
1392
      if (! single_pred_p (dest))
1393
        {
1394
          dest = split_edge (find_edge (bb, dest));
1395
          edges_split = true;
1396
        }
1397
 
1398
      if (implicit_sets_size <= (size_t) dest->index)
1399
      {
1400
        size_t old_implicit_sets_size = implicit_sets_size;
1401
        implicit_sets_size *= 2;
1402
        implicit_sets = XRESIZEVEC (rtx, implicit_sets, implicit_sets_size);
1403
        memset (implicit_sets + old_implicit_sets_size, 0,
1404
                (implicit_sets_size - old_implicit_sets_size) * sizeof (rtx));
1405
      }
1406
 
1407
      new_rtx = gen_rtx_SET (VOIDmode, XEXP (cond, 0),
1408
                             XEXP (cond, 1));
1409
      implicit_sets[dest->index] = new_rtx;
1410
      if (dump_file)
1411
        {
1412
          fprintf(dump_file, "Implicit set of reg %d in ",
1413
                  REGNO (XEXP (cond, 0)));
1414
          fprintf(dump_file, "basic block %d\n", dest->index);
1415
        }
1416
      count++;
1417
    }
1418
 
1419
  if (dump_file)
1420
    fprintf (dump_file, "Found %d implicit sets\n", count);
1421
 
1422
  /* Confess our sins.  */
1423
  return edges_split;
1424
}
1425
 
1426
/* Bypass conditional jumps.  */
1427
 
1428
/* The value of last_basic_block at the beginning of the jump_bypass
1429
   pass.  The use of redirect_edge_and_branch_force may introduce new
1430
   basic blocks, but the data flow analysis is only valid for basic
1431
   block indices less than bypass_last_basic_block.  */
1432
 
1433
static int bypass_last_basic_block;
1434
 
1435
/* Find a set of REGNO to a constant that is available at the end of basic
1436
   block BB.  Return NULL if no such set is found.  Based heavily upon
1437
   find_avail_set.  */
1438
 
1439
static struct expr *
1440
find_bypass_set (int regno, int bb)
1441
{
1442
  struct expr *result = 0;
1443
 
1444
  for (;;)
1445
    {
1446
      rtx src;
1447
      struct expr *set = lookup_set (regno, &set_hash_table);
1448
 
1449
      while (set)
1450
        {
1451
          if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
1452
            break;
1453
          set = next_set (regno, set);
1454
        }
1455
 
1456
      if (set == 0)
1457
        break;
1458
 
1459
      src = set->src;
1460
      if (cprop_constant_p (src))
1461
        result = set;
1462
 
1463
      if (! REG_P (src))
1464
        break;
1465
 
1466
      regno = REGNO (src);
1467
    }
1468
  return result;
1469
}
1470
 
1471
/* Subroutine of bypass_block that checks whether a pseudo is killed by
1472
   any of the instructions inserted on an edge.  Jump bypassing places
1473
   condition code setters on CFG edges using insert_insn_on_edge.  This
1474
   function is required to check that our data flow analysis is still
1475
   valid prior to commit_edge_insertions.  */
1476
 
1477
static bool
1478
reg_killed_on_edge (const_rtx reg, const_edge e)
1479
{
1480
  rtx insn;
1481
 
1482
  for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
1483
    if (INSN_P (insn) && reg_set_p (reg, insn))
1484
      return true;
1485
 
1486
  return false;
1487
}
1488
 
1489
/* Subroutine of bypass_conditional_jumps that attempts to bypass the given
1490
   basic block BB which has more than one predecessor.  If not NULL, SETCC
1491
   is the first instruction of BB, which is immediately followed by JUMP_INSN
1492
   JUMP.  Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
1493
   Returns nonzero if a change was made.
1494
 
1495
   During the jump bypassing pass, we may place copies of SETCC instructions
1496
   on CFG edges.  The following routine must be careful to pay attention to
1497
   these inserted insns when performing its transformations.  */
1498
 
1499
static int
1500
bypass_block (basic_block bb, rtx setcc, rtx jump)
1501
{
1502
  rtx insn, note;
1503
  edge e, edest;
1504
  int change;
1505
  int may_be_loop_header;
1506
  unsigned removed_p;
1507
  unsigned i;
1508
  edge_iterator ei;
1509
 
1510
  insn = (setcc != NULL) ? setcc : jump;
1511
 
1512
  /* Determine set of register uses in INSN.  */
1513
  reg_use_count = 0;
1514
  note_uses (&PATTERN (insn), find_used_regs, NULL);
1515
  note = find_reg_equal_equiv_note (insn);
1516
  if (note)
1517
    find_used_regs (&XEXP (note, 0), NULL);
1518
 
1519
  may_be_loop_header = false;
1520
  FOR_EACH_EDGE (e, ei, bb->preds)
1521
    if (e->flags & EDGE_DFS_BACK)
1522
      {
1523
        may_be_loop_header = true;
1524
        break;
1525
      }
1526
 
1527
  change = 0;
1528
  for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
1529
    {
1530
      removed_p = 0;
1531
 
1532
      if (e->flags & EDGE_COMPLEX)
1533
        {
1534
          ei_next (&ei);
1535
          continue;
1536
        }
1537
 
1538
      /* We can't redirect edges from new basic blocks.  */
1539
      if (e->src->index >= bypass_last_basic_block)
1540
        {
1541
          ei_next (&ei);
1542
          continue;
1543
        }
1544
 
1545
      /* The irreducible loops created by redirecting of edges entering the
1546
         loop from outside would decrease effectiveness of some of the
1547
         following optimizations, so prevent this.  */
1548
      if (may_be_loop_header
1549
          && !(e->flags & EDGE_DFS_BACK))
1550
        {
1551
          ei_next (&ei);
1552
          continue;
1553
        }
1554
 
1555
      for (i = 0; i < reg_use_count; i++)
1556
        {
1557
          rtx reg_used = reg_use_table[i];
1558
          unsigned int regno = REGNO (reg_used);
1559
          basic_block dest, old_dest;
1560
          struct expr *set;
1561
          rtx src, new_rtx;
1562
 
1563
          set = find_bypass_set (regno, e->src->index);
1564
 
1565
          if (! set)
1566
            continue;
1567
 
1568
          /* Check the data flow is valid after edge insertions.  */
1569
          if (e->insns.r && reg_killed_on_edge (reg_used, e))
1570
            continue;
1571
 
1572
          src = SET_SRC (pc_set (jump));
1573
 
1574
          if (setcc != NULL)
1575
            src = simplify_replace_rtx (src,
1576
                                        SET_DEST (PATTERN (setcc)),
1577
                                        SET_SRC (PATTERN (setcc)));
1578
 
1579
          new_rtx = simplify_replace_rtx (src, reg_used, set->src);
1580
 
1581
          /* Jump bypassing may have already placed instructions on
1582
             edges of the CFG.  We can't bypass an outgoing edge that
1583
             has instructions associated with it, as these insns won't
1584
             get executed if the incoming edge is redirected.  */
1585
          if (new_rtx == pc_rtx)
1586
            {
1587
              edest = FALLTHRU_EDGE (bb);
1588
              dest = edest->insns.r ? NULL : edest->dest;
1589
            }
1590
          else if (GET_CODE (new_rtx) == LABEL_REF)
1591
            {
1592
              dest = BLOCK_FOR_INSN (XEXP (new_rtx, 0));
1593
              /* Don't bypass edges containing instructions.  */
1594
              edest = find_edge (bb, dest);
1595
              if (edest && edest->insns.r)
1596
                dest = NULL;
1597
            }
1598
          else
1599
            dest = NULL;
1600
 
1601
          /* Avoid unification of the edge with other edges from original
1602
             branch.  We would end up emitting the instruction on "both"
1603
             edges.  */
1604
          if (dest && setcc && !CC0_P (SET_DEST (PATTERN (setcc)))
1605
              && find_edge (e->src, dest))
1606
            dest = NULL;
1607
 
1608
          old_dest = e->dest;
1609
          if (dest != NULL
1610
              && dest != old_dest
1611
              && dest != EXIT_BLOCK_PTR)
1612
            {
1613
              redirect_edge_and_branch_force (e, dest);
1614
 
1615
              /* Copy the register setter to the redirected edge.
1616
                 Don't copy CC0 setters, as CC0 is dead after jump.  */
1617
              if (setcc)
1618
                {
1619
                  rtx pat = PATTERN (setcc);
1620
                  if (!CC0_P (SET_DEST (pat)))
1621
                    insert_insn_on_edge (copy_insn (pat), e);
1622
                }
1623
 
1624
              if (dump_file != NULL)
1625
                {
1626
                  fprintf (dump_file, "JUMP-BYPASS: Proved reg %d "
1627
                                      "in jump_insn %d equals constant ",
1628
                           regno, INSN_UID (jump));
1629
                  print_rtl (dump_file, set->src);
1630
                  fprintf (dump_file, "\nBypass edge from %d->%d to %d\n",
1631
                           e->src->index, old_dest->index, dest->index);
1632
                }
1633
              change = 1;
1634
              removed_p = 1;
1635
              break;
1636
            }
1637
        }
1638
      if (!removed_p)
1639
        ei_next (&ei);
1640
    }
1641
  return change;
1642
}
1643
 
1644
/* Find basic blocks with more than one predecessor that only contain a
1645
   single conditional jump.  If the result of the comparison is known at
1646
   compile-time from any incoming edge, redirect that edge to the
1647
   appropriate target.  Return nonzero if a change was made.
1648
 
1649
   This function is now mis-named, because we also handle indirect jumps.  */
1650
 
1651
static int
1652
bypass_conditional_jumps (void)
1653
{
1654
  basic_block bb;
1655
  int changed;
1656
  rtx setcc;
1657
  rtx insn;
1658
  rtx dest;
1659
 
1660
  /* Note we start at block 1.  */
1661
  if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
1662
    return 0;
1663
 
1664
  bypass_last_basic_block = last_basic_block;
1665
  mark_dfs_back_edges ();
1666
 
1667
  changed = 0;
1668
  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
1669
                  EXIT_BLOCK_PTR, next_bb)
1670
    {
1671
      /* Check for more than one predecessor.  */
1672
      if (!single_pred_p (bb))
1673
        {
1674
          setcc = NULL_RTX;
1675
          FOR_BB_INSNS (bb, insn)
1676
            if (DEBUG_INSN_P (insn))
1677
              continue;
1678
            else if (NONJUMP_INSN_P (insn))
1679
              {
1680
                if (setcc)
1681
                  break;
1682
                if (GET_CODE (PATTERN (insn)) != SET)
1683
                  break;
1684
 
1685
                dest = SET_DEST (PATTERN (insn));
1686
                if (REG_P (dest) || CC0_P (dest))
1687
                  setcc = insn;
1688
                else
1689
                  break;
1690
              }
1691
            else if (JUMP_P (insn))
1692
              {
1693
                if ((any_condjump_p (insn) || computed_jump_p (insn))
1694
                    && onlyjump_p (insn))
1695
                  changed |= bypass_block (bb, setcc, insn);
1696
                break;
1697
              }
1698
            else if (INSN_P (insn))
1699
              break;
1700
        }
1701
    }
1702
 
1703
  /* If we bypassed any register setting insns, we inserted a
1704
     copy on the redirected edge.  These need to be committed.  */
1705
  if (changed)
1706
    commit_edge_insertions ();
1707
 
1708
  return changed;
1709
}
1710
 
1711
/* Return true if the graph is too expensive to optimize.  PASS is the
1712
   optimization about to be performed.  */
1713
 
1714
static bool
1715
is_too_expensive (const char *pass)
1716
{
1717
  /* Trying to perform global optimizations on flow graphs which have
1718
     a high connectivity will take a long time and is unlikely to be
1719
     particularly useful.
1720
 
1721
     In normal circumstances a cfg should have about twice as many
1722
     edges as blocks.  But we do not want to punish small functions
1723
     which have a couple switch statements.  Rather than simply
1724
     threshold the number of blocks, uses something with a more
1725
     graceful degradation.  */
1726
  if (n_edges > 20000 + n_basic_blocks * 4)
1727
    {
1728
      warning (OPT_Wdisabled_optimization,
1729
               "%s: %d basic blocks and %d edges/basic block",
1730
               pass, n_basic_blocks, n_edges / n_basic_blocks);
1731
 
1732
      return true;
1733
    }
1734
 
1735
  /* If allocating memory for the cprop bitmap would take up too much
1736
     storage it's better just to disable the optimization.  */
1737
  if ((n_basic_blocks
1738
       * SBITMAP_SET_SIZE (max_reg_num ())
1739
       * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
1740
    {
1741
      warning (OPT_Wdisabled_optimization,
1742
               "%s: %d basic blocks and %d registers",
1743
               pass, n_basic_blocks, max_reg_num ());
1744
 
1745
      return true;
1746
    }
1747
 
1748
  return false;
1749
}
1750
 
1751
/* Main function for the CPROP pass.  */
1752
 
1753
static int
1754
one_cprop_pass (void)
1755
{
1756
  int i;
1757
  int changed = 0;
1758
 
1759
  /* Return if there's nothing to do, or it is too expensive.  */
1760
  if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1
1761
      || is_too_expensive (_ ("const/copy propagation disabled")))
1762
    return 0;
1763
 
1764
  global_const_prop_count = local_const_prop_count = 0;
1765
  global_copy_prop_count = local_copy_prop_count = 0;
1766
 
1767
  bytes_used = 0;
1768
  gcc_obstack_init (&cprop_obstack);
1769
 
1770
  /* Do a local const/copy propagation pass first.  The global pass
1771
     only handles global opportunities.
1772
     If the local pass changes something, remove any unreachable blocks
1773
     because the CPROP global dataflow analysis may get into infinite
1774
     loops for CFGs with unreachable blocks.
1775
 
1776
     FIXME: This local pass should not be necessary after CSE (but for
1777
            some reason it still is).  It is also (proven) not necessary
1778
            to run the local pass right after FWPWOP.
1779
 
1780
     FIXME: The global analysis would not get into infinite loops if it
1781
            would use the DF solver (via df_simple_dataflow) instead of
1782
            the solver implemented in this file.  */
1783
  changed |= local_cprop_pass ();
1784
  if (changed)
1785
    delete_unreachable_blocks ();
1786
 
1787
  /* Determine implicit sets.  This may change the CFG (split critical
1788
     edges if that exposes an implicit set).
1789
     Note that find_implicit_sets() does not rely on up-to-date DF caches
1790
     so that we do not have to re-run df_analyze() even if local CPROP
1791
     changed something.
1792
     ??? This could run earlier so that any uncovered implicit sets
1793
         sets could be exploited in local_cprop_pass() also.  Later.  */
1794
  changed |= find_implicit_sets ();
1795
 
1796
  /* If local_cprop_pass() or find_implicit_sets() changed something,
1797
     run df_analyze() to bring all insn caches up-to-date, and to take
1798
     new basic blocks from edge splitting on the DF radar.
1799
     NB: This also runs the fast DCE pass, because execute_rtl_cprop
1800
     sets DF_LR_RUN_DCE.  */
1801
  if (changed)
1802
    df_analyze ();
1803
 
1804
  /* Initialize implicit_set_indexes array.  */
1805
  implicit_set_indexes = XNEWVEC (int, last_basic_block);
1806
  for (i = 0; i < last_basic_block; i++)
1807
    implicit_set_indexes[i] = -1;
1808
 
1809
  alloc_hash_table (&set_hash_table);
1810
  compute_hash_table (&set_hash_table);
1811
 
1812
  /* Free implicit_sets before peak usage.  */
1813
  free (implicit_sets);
1814
  implicit_sets = NULL;
1815
 
1816
  if (dump_file)
1817
    dump_hash_table (dump_file, "SET", &set_hash_table);
1818
  if (set_hash_table.n_elems > 0)
1819
    {
1820
      basic_block bb;
1821
      rtx insn;
1822
 
1823
      alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
1824
      compute_cprop_data ();
1825
 
1826
      free (implicit_set_indexes);
1827
      implicit_set_indexes = NULL;
1828
 
1829
      /* Allocate vars to track sets of regs.  */
1830
      reg_set_bitmap = ALLOC_REG_SET (NULL);
1831
 
1832
      FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR,
1833
                      next_bb)
1834
        {
1835
          /* Reset tables used to keep track of what's still valid [since
1836
             the start of the block].  */
1837
          reset_opr_set_tables ();
1838
 
1839
          FOR_BB_INSNS (bb, insn)
1840
            if (INSN_P (insn))
1841
              {
1842
                changed |= cprop_insn (insn);
1843
 
1844
                /* Keep track of everything modified by this insn.  */
1845
                /* ??? Need to be careful w.r.t. mods done to INSN.
1846
                       Don't call mark_oprs_set if we turned the
1847
                       insn into a NOTE, or deleted the insn.  */
1848
                if (! NOTE_P (insn) && ! INSN_DELETED_P (insn))
1849
                  mark_oprs_set (insn);
1850
              }
1851
        }
1852
 
1853
      changed |= bypass_conditional_jumps ();
1854
 
1855
      FREE_REG_SET (reg_set_bitmap);
1856
      free_cprop_mem ();
1857
    }
1858
  else
1859
    {
1860
      free (implicit_set_indexes);
1861
      implicit_set_indexes = NULL;
1862
    }
1863
 
1864
  free_hash_table (&set_hash_table);
1865
  obstack_free (&cprop_obstack, NULL);
1866
 
1867
  if (dump_file)
1868
    {
1869
      fprintf (dump_file, "CPROP of %s, %d basic blocks, %d bytes needed, ",
1870
               current_function_name (), n_basic_blocks, bytes_used);
1871
      fprintf (dump_file, "%d local const props, %d local copy props, ",
1872
               local_const_prop_count, local_copy_prop_count);
1873
      fprintf (dump_file, "%d global const props, %d global copy props\n\n",
1874
               global_const_prop_count, global_copy_prop_count);
1875
    }
1876
 
1877
  return changed;
1878
}
1879
 
1880
/* All the passes implemented in this file.  Each pass has its
1881
   own gate and execute function, and at the end of the file a
1882
   pass definition for passes.c.
1883
 
1884
   We do not construct an accurate cfg in functions which call
1885
   setjmp, so none of these passes runs if the function calls
1886
   setjmp.
1887
   FIXME: Should just handle setjmp via REG_SETJMP notes.  */
1888
 
1889
static bool
1890
gate_rtl_cprop (void)
1891
{
1892
  return optimize > 0 && flag_gcse
1893
    && !cfun->calls_setjmp
1894
    && dbg_cnt (cprop);
1895
}
1896
 
1897
static unsigned int
1898
execute_rtl_cprop (void)
1899
{
1900
  int changed;
1901
  delete_unreachable_blocks ();
1902
  df_set_flags (DF_LR_RUN_DCE);
1903
  df_analyze ();
1904
  changed = one_cprop_pass ();
1905
  flag_rerun_cse_after_global_opts |= changed;
1906
  if (changed)
1907
    cleanup_cfg (0);
1908
  return 0;
1909
}
1910
 
1911
struct rtl_opt_pass pass_rtl_cprop =
1912
{
1913
 {
1914
  RTL_PASS,
1915
  "cprop",                              /* name */
1916
  gate_rtl_cprop,                       /* gate */
1917
  execute_rtl_cprop,                    /* execute */
1918
  NULL,                                 /* sub */
1919
  NULL,                                 /* next */
1920
  0,                                    /* static_pass_number */
1921
  TV_CPROP,                             /* tv_id */
1922
  PROP_cfglayout,                       /* properties_required */
1923
  0,                                    /* properties_provided */
1924
  0,                                    /* properties_destroyed */
1925
  0,                                    /* todo_flags_start */
1926
  TODO_df_finish | TODO_verify_rtl_sharing |
1927
  TODO_verify_flow | TODO_ggc_collect   /* todo_flags_finish */
1928
 }
1929
};

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.