OpenCores
URL https://opencores.org/ocsvn/scarts/scarts/trunk

Subversion Repositories scarts

[/] [scarts/] [trunk/] [toolchain/] [scarts-gcc/] [gcc-4.1.1/] [gcc/] [postreload-gcse.c] - Blame information for rev 20

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 12 jlechner
/* Post reload partially redundant load elimination
2
   Copyright (C) 2004, 2005
3
   Free Software Foundation, Inc.
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify it under
8
the terms of the GNU General Public License as published by the Free
9
Software Foundation; either version 2, or (at your option) any later
10
version.
11
 
12
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13
WARRANTY; without even the implied warranty of MERCHANTABILITY or
14
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15
for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING.  If not, write to the Free
19
Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20
02110-1301, USA.  */
21
 
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
#include "toplev.h"
27
 
28
#include "rtl.h"
29
#include "tree.h"
30
#include "tm_p.h"
31
#include "regs.h"
32
#include "hard-reg-set.h"
33
#include "flags.h"
34
#include "real.h"
35
#include "insn-config.h"
36
#include "recog.h"
37
#include "basic-block.h"
38
#include "output.h"
39
#include "function.h"
40
#include "expr.h"
41
#include "except.h"
42
#include "intl.h"
43
#include "obstack.h"
44
#include "hashtab.h"
45
#include "params.h"
46
#include "target.h"
47
#include "timevar.h"
48
#include "tree-pass.h"
49
 
50
/* The following code implements gcse after reload, the purpose of this
51
   pass is to cleanup redundant loads generated by reload and other
52
   optimizations that come after gcse. It searches for simple inter-block
53
   redundancies and tries to eliminate them by adding moves and loads
54
   in cold places.
55
 
56
   Perform partially redundant load elimination, try to eliminate redundant
57
   loads created by the reload pass.  We try to look for full or partial
58
   redundant loads fed by one or more loads/stores in predecessor BBs,
59
   and try adding loads to make them fully redundant.  We also check if
60
   it's worth adding loads to be able to delete the redundant load.
61
 
62
   Algorithm:
63
   1. Build available expressions hash table:
64
       For each load/store instruction, if the loaded/stored memory didn't
65
       change until the end of the basic block add this memory expression to
66
       the hash table.
67
   2. Perform Redundancy elimination:
68
      For each load instruction do the following:
69
         perform partial redundancy elimination, check if it's worth adding
70
         loads to make the load fully redundant.  If so add loads and
71
         register copies and delete the load.
72
   3. Delete instructions made redundant in step 2.
73
 
74
   Future enhancement:
75
     If the loaded register is used/defined between load and some store,
76
     look for some other free register between load and all its stores,
77
     and replace the load with a copy from this register to the loaded
78
     register.
79
*/
80
 
81
 
82
/* Keep statistics of this pass.  */
83
static struct
84
{
85
  int moves_inserted;
86
  int copies_inserted;
87
  int insns_deleted;
88
} stats;
89
 
90
/* We need to keep a hash table of expressions.  The table entries are of
91
   type 'struct expr', and for each expression there is a single linked
92
   list of occurrences.  */
93
 
94
/* The table itself.  */
95
static htab_t expr_table;
96
 
97
/* Expression elements in the hash table.  */
98
struct expr
99
{
100
  /* The expression (SET_SRC for expressions, PATTERN for assignments).  */
101
  rtx expr;
102
 
103
  /* The same hash for this entry.  */
104
  hashval_t hash;
105
 
106
  /* List of available occurrence in basic blocks in the function.  */
107
  struct occr *avail_occr;
108
};
109
 
110
static struct obstack expr_obstack;
111
 
112
/* Occurrence of an expression.
113
   There is at most one occurrence per basic block.  If a pattern appears
114
   more than once, the last appearance is used.  */
115
 
116
struct occr
117
{
118
  /* Next occurrence of this expression.  */
119
  struct occr *next;
120
  /* The insn that computes the expression.  */
121
  rtx insn;
122
  /* Nonzero if this [anticipatable] occurrence has been deleted.  */
123
  char deleted_p;
124
};
125
 
126
static struct obstack occr_obstack;
127
 
128
/* The following structure holds the information about the occurrences of
129
   the redundant instructions.  */
130
struct unoccr
131
{
132
  struct unoccr *next;
133
  edge pred;
134
  rtx insn;
135
};
136
 
137
static struct obstack unoccr_obstack;
138
 
139
/* Array where each element is the CUID if the insn that last set the hard
140
   register with the number of the element, since the start of the current
141
   basic block.
142
 
143
   This array is used during the building of the hash table (step 1) to
144
   determine if a reg is killed before the end of a basic block.
145
 
146
   It is also used when eliminating partial redundancies (step 2) to see
147
   if a reg was modified since the start of a basic block.  */
148
static int *reg_avail_info;
149
 
150
/* A list of insns that may modify memory within the current basic block.  */
151
struct modifies_mem
152
{
153
  rtx insn;
154
  struct modifies_mem *next;
155
};
156
static struct modifies_mem *modifies_mem_list;
157
 
158
/* The modifies_mem structs also go on an obstack, only this obstack is
159
   freed each time after completing the analysis or transformations on
160
   a basic block.  So we allocate a dummy modifies_mem_obstack_bottom
161
   object on the obstack to keep track of the bottom of the obstack.  */
162
static struct obstack modifies_mem_obstack;
163
static struct modifies_mem  *modifies_mem_obstack_bottom;
164
 
165
/* Mapping of insn UIDs to CUIDs.
166
   CUIDs are like UIDs except they increase monotonically in each basic
167
   block, have no gaps, and only apply to real insns.  */
168
static int *uid_cuid;
169
#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
170
 
171
 
172
/* Helpers for memory allocation/freeing.  */
173
static void alloc_mem (void);
174
static void free_mem (void);
175
 
176
/* Support for hash table construction and transformations.  */
177
static bool oprs_unchanged_p (rtx, rtx, bool);
178
static void record_last_reg_set_info (rtx, int);
179
static void record_last_mem_set_info (rtx);
180
static void record_last_set_info (rtx, rtx, void *);
181
static void record_opr_changes (rtx);
182
 
183
static void find_mem_conflicts (rtx, rtx, void *);
184
static int load_killed_in_block_p (int, rtx, bool);
185
static void reset_opr_set_tables (void);
186
 
187
/* Hash table support.  */
188
static hashval_t hash_expr (rtx, int *);
189
static hashval_t hash_expr_for_htab (const void *);
190
static int expr_equiv_p (const void *, const void *);
191
static void insert_expr_in_table (rtx, rtx);
192
static struct expr *lookup_expr_in_table (rtx);
193
static int dump_hash_table_entry (void **, void *);
194
static void dump_hash_table (FILE *);
195
 
196
/* Helpers for eliminate_partially_redundant_load.  */
197
static bool reg_killed_on_edge (rtx, edge);
198
static bool reg_used_on_edge (rtx, edge);
199
 
200
static rtx reg_set_between_after_reload_p (rtx, rtx, rtx);
201
static rtx reg_used_between_after_reload_p (rtx, rtx, rtx);
202
static rtx get_avail_load_store_reg (rtx);
203
 
204
static bool bb_has_well_behaved_predecessors (basic_block);
205
static struct occr* get_bb_avail_insn (basic_block, struct occr *);
206
static void hash_scan_set (rtx);
207
static void compute_hash_table (void);
208
 
209
/* The work horses of this pass.  */
210
static void eliminate_partially_redundant_load (basic_block,
211
                                                rtx,
212
                                                struct expr *);
213
static void eliminate_partially_redundant_loads (void);
214
 
215
 
216
/* Allocate memory for the CUID mapping array and register/memory
217
   tracking tables.  */
218
 
219
static void
220
alloc_mem (void)
221
{
222
  int i;
223
  basic_block bb;
224
  rtx insn;
225
 
226
  /* Find the largest UID and create a mapping from UIDs to CUIDs.  */
227
  uid_cuid = xcalloc (get_max_uid () + 1, sizeof (int));
228
  i = 1;
229
  FOR_EACH_BB (bb)
230
    FOR_BB_INSNS (bb, insn)
231
      {
232
        if (INSN_P (insn))
233
          uid_cuid[INSN_UID (insn)] = i++;
234
        else
235
          uid_cuid[INSN_UID (insn)] = i;
236
      }
237
 
238
  /* Allocate the available expressions hash table.  We don't want to
239
     make the hash table too small, but unnecessarily making it too large
240
     also doesn't help.  The i/4 is a gcse.c relic, and seems like a
241
     reasonable choice.  */
242
  expr_table = htab_create (MAX (i / 4, 13),
243
                            hash_expr_for_htab, expr_equiv_p, NULL);
244
 
245
  /* We allocate everything on obstacks because we often can roll back
246
     the whole obstack to some point.  Freeing obstacks is very fast.  */
247
  gcc_obstack_init (&expr_obstack);
248
  gcc_obstack_init (&occr_obstack);
249
  gcc_obstack_init (&unoccr_obstack);
250
  gcc_obstack_init (&modifies_mem_obstack);
251
 
252
  /* Working array used to track the last set for each register
253
     in the current block.  */
254
  reg_avail_info = (int *) xmalloc (FIRST_PSEUDO_REGISTER * sizeof (int));
255
 
256
  /* Put a dummy modifies_mem object on the modifies_mem_obstack, so we
257
     can roll it back in reset_opr_set_tables.  */
258
  modifies_mem_obstack_bottom =
259
    (struct modifies_mem *) obstack_alloc (&modifies_mem_obstack,
260
                                           sizeof (struct modifies_mem));
261
}
262
 
263
/* Free memory allocated by alloc_mem.  */
264
 
265
static void
266
free_mem (void)
267
{
268
  free (uid_cuid);
269
 
270
  htab_delete (expr_table);
271
 
272
  obstack_free (&expr_obstack, NULL);
273
  obstack_free (&occr_obstack, NULL);
274
  obstack_free (&unoccr_obstack, NULL);
275
  obstack_free (&modifies_mem_obstack, NULL);
276
 
277
  free (reg_avail_info);
278
}
279
 
280
 
281
/* Hash expression X.
282
   DO_NOT_RECORD_P is a boolean indicating if a volatile operand is found
283
   or if the expression contains something we don't want to insert in the
284
   table.  */
285
 
286
static hashval_t
287
hash_expr (rtx x, int *do_not_record_p)
288
{
289
  *do_not_record_p = 0;
290
  return hash_rtx (x, GET_MODE (x), do_not_record_p,
291
                   NULL,  /*have_reg_qty=*/false);
292
}
293
 
294
/* Callback for hashtab.
295
   Return the hash value for expression EXP.  We don't actually hash
296
   here, we just return the cached hash value.  */
297
 
298
static hashval_t
299
hash_expr_for_htab (const void *expp)
300
{
301
  struct expr *exp = (struct expr *) expp;
302
  return exp->hash;
303
}
304
 
305
/* Callback for hashtab.
306
   Return nonzero if exp1 is equivalent to exp2.  */
307
 
308
static int
309
expr_equiv_p (const void *exp1p, const void *exp2p)
310
{
311
  struct expr *exp1 = (struct expr *) exp1p;
312
  struct expr *exp2 = (struct expr *) exp2p;
313
  int equiv_p = exp_equiv_p (exp1->expr, exp2->expr, 0, true);
314
 
315
  gcc_assert (!equiv_p || exp1->hash == exp2->hash);
316
  return equiv_p;
317
}
318
 
319
 
320
/* Insert expression X in INSN in the hash TABLE.
321
   If it is already present, record it as the last occurrence in INSN's
322
   basic block.  */
323
 
324
static void
325
insert_expr_in_table (rtx x, rtx insn)
326
{
327
  int do_not_record_p;
328
  hashval_t hash;
329
  struct expr *cur_expr, **slot;
330
  struct occr *avail_occr, *last_occr = NULL;
331
 
332
  hash = hash_expr (x, &do_not_record_p);
333
 
334
  /* Do not insert expression in the table if it contains volatile operands,
335
     or if hash_expr determines the expression is something we don't want
336
     to or can't handle.  */
337
  if (do_not_record_p)
338
    return;
339
 
340
  /* We anticipate that redundant expressions are rare, so for convenience
341
     allocate a new hash table element here already and set its fields.
342
     If we don't do this, we need a hack with a static struct expr.  Anyway,
343
     obstack_free is really fast and one more obstack_alloc doesn't hurt if
344
     we're going to see more expressions later on.  */
345
  cur_expr = (struct expr *) obstack_alloc (&expr_obstack,
346
                                            sizeof (struct expr));
347
  cur_expr->expr = x;
348
  cur_expr->hash = hash;
349
  cur_expr->avail_occr = NULL;
350
 
351
  slot = (struct expr **) htab_find_slot_with_hash (expr_table, cur_expr,
352
                                                    hash, INSERT);
353
 
354
  if (! (*slot))
355
    /* The expression isn't found, so insert it.  */
356
    *slot = cur_expr;
357
  else
358
    {
359
      /* The expression is already in the table, so roll back the
360
         obstack and use the existing table entry.  */
361
      obstack_free (&expr_obstack, cur_expr);
362
      cur_expr = *slot;
363
    }
364
 
365
  /* Search for another occurrence in the same basic block.  */
366
  avail_occr = cur_expr->avail_occr;
367
  while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
368
    {
369
      /* If an occurrence isn't found, save a pointer to the end of
370
         the list.  */
371
      last_occr = avail_occr;
372
      avail_occr = avail_occr->next;
373
    }
374
 
375
  if (avail_occr)
376
    /* Found another instance of the expression in the same basic block.
377
       Prefer this occurrence to the currently recorded one.  We want
378
       the last one in the block and the block is scanned from start
379
       to end.  */
380
    avail_occr->insn = insn;
381
  else
382
    {
383
      /* First occurrence of this expression in this basic block.  */
384
      avail_occr = (struct occr *) obstack_alloc (&occr_obstack,
385
                                                  sizeof (struct occr));
386
 
387
      /* First occurrence of this expression in any block?  */
388
      if (cur_expr->avail_occr == NULL)
389
        cur_expr->avail_occr = avail_occr;
390
      else
391
        last_occr->next = avail_occr;
392
 
393
      avail_occr->insn = insn;
394
      avail_occr->next = NULL;
395
      avail_occr->deleted_p = 0;
396
    }
397
}
398
 
399
 
400
/* Lookup pattern PAT in the expression hash table.
401
   The result is a pointer to the table entry, or NULL if not found.  */
402
 
403
static struct expr *
404
lookup_expr_in_table (rtx pat)
405
{
406
  int do_not_record_p;
407
  struct expr **slot, *tmp_expr;
408
  hashval_t hash = hash_expr (pat, &do_not_record_p);
409
 
410
  if (do_not_record_p)
411
    return NULL;
412
 
413
  tmp_expr = (struct expr *) obstack_alloc (&expr_obstack,
414
                                            sizeof (struct expr));
415
  tmp_expr->expr = pat;
416
  tmp_expr->hash = hash;
417
  tmp_expr->avail_occr = NULL;
418
 
419
  slot = (struct expr **) htab_find_slot_with_hash (expr_table, tmp_expr,
420
                                                    hash, INSERT);
421
  obstack_free (&expr_obstack, tmp_expr);
422
 
423
  if (!slot)
424
    return NULL;
425
  else
426
    return (*slot);
427
}
428
 
429
 
430
/* Dump all expressions and occurrences that are currently in the
431
   expression hash table to FILE.  */
432
 
433
/* This helper is called via htab_traverse.  */
434
static int
435
dump_hash_table_entry (void **slot, void *filep)
436
{
437
  struct expr *expr = (struct expr *) *slot;
438
  FILE *file = (FILE *) filep;
439
  struct occr *occr;
440
 
441
  fprintf (file, "expr: ");
442
  print_rtl (file, expr->expr);
443
  fprintf (file,"\nhashcode: %u\n", expr->hash);
444
  fprintf (file,"list of occurrences:\n");
445
  occr = expr->avail_occr;
446
  while (occr)
447
    {
448
      rtx insn = occr->insn;
449
      print_rtl_single (file, insn);
450
      fprintf (file, "\n");
451
      occr = occr->next;
452
    }
453
  fprintf (file, "\n");
454
  return 1;
455
}
456
 
457
static void
458
dump_hash_table (FILE *file)
459
{
460
  fprintf (file, "\n\nexpression hash table\n");
461
  fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
462
           (long) htab_size (expr_table),
463
           (long) htab_elements (expr_table),
464
           htab_collisions (expr_table));
465
  if (htab_elements (expr_table) > 0)
466
    {
467
      fprintf (file, "\n\ntable entries:\n");
468
      htab_traverse (expr_table, dump_hash_table_entry, file);
469
    }
470
  fprintf (file, "\n");
471
}
472
 
473
 
474
/* Return nonzero if the operands of expression X are unchanged
475
   1) from the start of INSN's basic block up to but not including INSN
476
      if AFTER_INSN is false, or
477
   2) from INSN to the end of INSN's basic block if AFTER_INSN is true.  */
478
 
479
static bool
480
oprs_unchanged_p (rtx x, rtx insn, bool after_insn)
481
{
482
  int i, j;
483
  enum rtx_code code;
484
  const char *fmt;
485
 
486
  if (x == 0)
487
    return 1;
488
 
489
  code = GET_CODE (x);
490
  switch (code)
491
    {
492
    case REG:
493
      /* We are called after register allocation.  */
494
      gcc_assert (REGNO (x) < FIRST_PSEUDO_REGISTER);
495
      if (after_insn)
496
        /* If the last CUID setting the insn is less than the CUID of
497
           INSN, then reg X is not changed in or after INSN.  */
498
        return reg_avail_info[REGNO (x)] < INSN_CUID (insn);
499
      else
500
        /* Reg X is not set before INSN in the current basic block if
501
           we have not yet recorded the CUID of an insn that touches
502
           the reg.  */
503
        return reg_avail_info[REGNO (x)] == 0;
504
 
505
    case MEM:
506
      if (load_killed_in_block_p (INSN_CUID (insn), x, after_insn))
507
        return 0;
508
      else
509
        return oprs_unchanged_p (XEXP (x, 0), insn, after_insn);
510
 
511
    case PC:
512
    case CC0: /*FIXME*/
513
    case CONST:
514
    case CONST_INT:
515
    case CONST_DOUBLE:
516
    case CONST_VECTOR:
517
    case SYMBOL_REF:
518
    case LABEL_REF:
519
    case ADDR_VEC:
520
    case ADDR_DIFF_VEC:
521
      return 1;
522
 
523
    case PRE_DEC:
524
    case PRE_INC:
525
    case POST_DEC:
526
    case POST_INC:
527
    case PRE_MODIFY:
528
    case POST_MODIFY:
529
      if (after_insn)
530
        return 0;
531
      break;
532
 
533
    default:
534
      break;
535
    }
536
 
537
  for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
538
    {
539
      if (fmt[i] == 'e')
540
        {
541
          if (! oprs_unchanged_p (XEXP (x, i), insn, after_insn))
542
            return 0;
543
        }
544
      else if (fmt[i] == 'E')
545
        for (j = 0; j < XVECLEN (x, i); j++)
546
          if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, after_insn))
547
            return 0;
548
    }
549
 
550
  return 1;
551
}
552
 
553
 
554
/* Used for communication between find_mem_conflicts and
555
   load_killed_in_block_p.  Nonzero if find_mem_conflicts finds a
556
   conflict between two memory references.
557
   This is a bit of a hack to work around the limitations of note_stores.  */
558
static int mems_conflict_p;
559
 
560
/* DEST is the output of an instruction.  If it is a memory reference, and
561
   possibly conflicts with the load found in DATA, then set mems_conflict_p
562
   to a nonzero value.  */
563
 
564
static void
565
find_mem_conflicts (rtx dest, rtx setter ATTRIBUTE_UNUSED,
566
                    void *data)
567
{
568
  rtx mem_op = (rtx) data;
569
 
570
  while (GET_CODE (dest) == SUBREG
571
         || GET_CODE (dest) == ZERO_EXTRACT
572
         || GET_CODE (dest) == STRICT_LOW_PART)
573
    dest = XEXP (dest, 0);
574
 
575
  /* If DEST is not a MEM, then it will not conflict with the load.  Note
576
     that function calls are assumed to clobber memory, but are handled
577
     elsewhere.  */
578
  if (! MEM_P (dest))
579
    return;
580
 
581
  if (true_dependence (dest, GET_MODE (dest), mem_op,
582
                       rtx_addr_varies_p))
583
    mems_conflict_p = 1;
584
}
585
 
586
 
587
/* Return nonzero if the expression in X (a memory reference) is killed
588
   in the current basic block before (if AFTER_INSN is false) or after
589
   (if AFTER_INSN is true) the insn with the CUID in UID_LIMIT.
590
 
591
   This function assumes that the modifies_mem table is flushed when
592
   the hash table construction or redundancy elimination phases start
593
   processing a new basic block.  */
594
 
595
static int
596
load_killed_in_block_p (int uid_limit, rtx x, bool after_insn)
597
{
598
  struct modifies_mem *list_entry = modifies_mem_list;
599
 
600
  while (list_entry)
601
    {
602
      rtx setter = list_entry->insn;
603
 
604
      /* Ignore entries in the list that do not apply.  */
605
      if ((after_insn
606
           && INSN_CUID (setter) < uid_limit)
607
          || (! after_insn
608
              && INSN_CUID (setter) > uid_limit))
609
        {
610
          list_entry = list_entry->next;
611
          continue;
612
        }
613
 
614
      /* If SETTER is a call everything is clobbered.  Note that calls
615
         to pure functions are never put on the list, so we need not
616
         worry about them.  */
617
      if (CALL_P (setter))
618
        return 1;
619
 
620
      /* SETTER must be an insn of some kind that sets memory.  Call
621
         note_stores to examine each hunk of memory that is modified.
622
         It will set mems_conflict_p to nonzero if there may be a
623
         conflict between X and SETTER.  */
624
      mems_conflict_p = 0;
625
      note_stores (PATTERN (setter), find_mem_conflicts, x);
626
      if (mems_conflict_p)
627
        return 1;
628
 
629
      list_entry = list_entry->next;
630
    }
631
  return 0;
632
}
633
 
634
 
635
/* Record register first/last/block set information for REGNO in INSN.  */
636
 
637
static inline void
638
record_last_reg_set_info (rtx insn, int regno)
639
{
640
  reg_avail_info[regno] = INSN_CUID (insn);
641
}
642
 
643
 
644
/* Record memory modification information for INSN.  We do not actually care
645
   about the memory location(s) that are set, or even how they are set (consider
646
   a CALL_INSN).  We merely need to record which insns modify memory.  */
647
 
648
static void
649
record_last_mem_set_info (rtx insn)
650
{
651
  struct modifies_mem *list_entry;
652
 
653
  list_entry = (struct modifies_mem *) obstack_alloc (&modifies_mem_obstack,
654
                                                      sizeof (struct modifies_mem));
655
  list_entry->insn = insn;
656
  list_entry->next = modifies_mem_list;
657
  modifies_mem_list = list_entry;
658
}
659
 
660
/* Called from compute_hash_table via note_stores to handle one
661
   SET or CLOBBER in an insn.  DATA is really the instruction in which
662
   the SET is taking place.  */
663
 
664
static void
665
record_last_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
666
{
667
  rtx last_set_insn = (rtx) data;
668
 
669
  if (GET_CODE (dest) == SUBREG)
670
    dest = SUBREG_REG (dest);
671
 
672
  if (REG_P (dest))
673
    record_last_reg_set_info (last_set_insn, REGNO (dest));
674
  else if (MEM_P (dest))
675
    {
676
      /* Ignore pushes, they don't clobber memory.  They may still
677
         clobber the stack pointer though.  Some targets do argument
678
         pushes without adding REG_INC notes.  See e.g. PR25196,
679
         where a pushsi2 on i386 doesn't have REG_INC notes.  Note
680
         such changes here too.  */
681
      if (! push_operand (dest, GET_MODE (dest)))
682
        record_last_mem_set_info (last_set_insn);
683
      else
684
        record_last_reg_set_info (last_set_insn, STACK_POINTER_REGNUM);
685
    }
686
}
687
 
688
 
689
/* Reset tables used to keep track of what's still available since the
690
   start of the block.  */
691
 
692
static void
693
reset_opr_set_tables (void)
694
{
695
  memset (reg_avail_info, 0, FIRST_PSEUDO_REGISTER * sizeof (int));
696
  obstack_free (&modifies_mem_obstack, modifies_mem_obstack_bottom);
697
  modifies_mem_list = NULL;
698
}
699
 
700
 
701
/* Record things set by INSN.
702
   This data is used by oprs_unchanged_p.  */
703
 
704
static void
705
record_opr_changes (rtx insn)
706
{
707
  rtx note;
708
 
709
  /* Find all stores and record them.  */
710
  note_stores (PATTERN (insn), record_last_set_info, insn);
711
 
712
  /* Also record autoincremented REGs for this insn as changed.  */
713
  for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
714
    if (REG_NOTE_KIND (note) == REG_INC)
715
      record_last_reg_set_info (insn, REGNO (XEXP (note, 0)));
716
 
717
  /* Finally, if this is a call, record all call clobbers.  */
718
  if (CALL_P (insn))
719
    {
720
      unsigned int regno;
721
 
722
      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
723
        if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
724
          record_last_reg_set_info (insn, regno);
725
 
726
      if (! CONST_OR_PURE_CALL_P (insn))
727
        record_last_mem_set_info (insn);
728
    }
729
}
730
 
731
 
732
/* Scan the pattern of INSN and add an entry to the hash TABLE.
733
   After reload we are interested in loads/stores only.  */
734
 
735
static void
736
hash_scan_set (rtx insn)
737
{
738
  rtx pat = PATTERN (insn);
739
  rtx src = SET_SRC (pat);
740
  rtx dest = SET_DEST (pat);
741
 
742
  /* We are only interested in loads and stores.  */
743
  if (! MEM_P (src) && ! MEM_P (dest))
744
    return;
745
 
746
  /* Don't mess with jumps and nops.  */
747
  if (JUMP_P (insn) || set_noop_p (pat))
748
    return;
749
 
750
  if (REG_P (dest))
751
    {
752
      if (/* Don't CSE something if we can't do a reg/reg copy.  */
753
          can_copy_p (GET_MODE (dest))
754
          /* Is SET_SRC something we want to gcse?  */
755
          && general_operand (src, GET_MODE (src))
756
#ifdef STACK_REGS
757
          /* Never consider insns touching the register stack.  It may
758
             create situations that reg-stack cannot handle (e.g. a stack
759
             register live across an abnormal edge).  */
760
          && (REGNO (dest) < FIRST_STACK_REG || REGNO (dest) > LAST_STACK_REG)
761
#endif
762
          /* An expression is not available if its operands are
763
             subsequently modified, including this insn.  */
764
          && oprs_unchanged_p (src, insn, true))
765
        {
766
          insert_expr_in_table (src, insn);
767
        }
768
    }
769
  else if (REG_P (src))
770
    {
771
      /* Only record sets of pseudo-regs in the hash table.  */
772
      if (/* Don't CSE something if we can't do a reg/reg copy.  */
773
          can_copy_p (GET_MODE (src))
774
          /* Is SET_DEST something we want to gcse?  */
775
          && general_operand (dest, GET_MODE (dest))
776
#ifdef STACK_REGS
777
          /* As above for STACK_REGS.  */
778
          && (REGNO (src) < FIRST_STACK_REG || REGNO (src) > LAST_STACK_REG)
779
#endif
780
          && ! (flag_float_store && FLOAT_MODE_P (GET_MODE (dest)))
781
          /* Check if the memory expression is killed after insn.  */
782
          && ! load_killed_in_block_p (INSN_CUID (insn) + 1, dest, true)
783
          && oprs_unchanged_p (XEXP (dest, 0), insn, true))
784
        {
785
          insert_expr_in_table (dest, insn);
786
        }
787
    }
788
}
789
 
790
 
791
/* Create hash table of memory expressions available at end of basic
792
   blocks.  Basically you should think of this hash table as the
793
   representation of AVAIL_OUT.  This is the set of expressions that
794
   is generated in a basic block and not killed before the end of the
795
   same basic block.  Notice that this is really a local computation.  */
796
 
797
static void
798
compute_hash_table (void)
799
{
800
  basic_block bb;
801
 
802
  FOR_EACH_BB (bb)
803
    {
804
      rtx insn;
805
 
806
      /* First pass over the instructions records information used to
807
         determine when registers and memory are last set.
808
         Since we compute a "local" AVAIL_OUT, reset the tables that
809
         help us keep track of what has been modified since the start
810
         of the block.  */
811
      reset_opr_set_tables ();
812
      FOR_BB_INSNS (bb, insn)
813
        {
814
          if (INSN_P (insn))
815
            record_opr_changes (insn);
816
        }
817
 
818
      /* The next pass actually builds the hash table.  */
819
      FOR_BB_INSNS (bb, insn)
820
        if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == SET)
821
          hash_scan_set (insn);
822
    }
823
}
824
 
825
 
826
/* Check if register REG is killed in any insn waiting to be inserted on
827
   edge E.  This function is required to check that our data flow analysis
828
   is still valid prior to commit_edge_insertions.  */
829
 
830
static bool
831
reg_killed_on_edge (rtx reg, edge e)
832
{
833
  rtx insn;
834
 
835
  for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
836
    if (INSN_P (insn) && reg_set_p (reg, insn))
837
      return true;
838
 
839
  return false;
840
}
841
 
842
/* Similar to above - check if register REG is used in any insn waiting
843
   to be inserted on edge E.
844
   Assumes no such insn can be a CALL_INSN; if so call reg_used_between_p
845
   with PREV(insn),NEXT(insn) instead of calling reg_overlap_mentioned_p.  */
846
 
847
static bool
848
reg_used_on_edge (rtx reg, edge e)
849
{
850
  rtx insn;
851
 
852
  for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
853
    if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
854
      return true;
855
 
856
  return false;
857
}
858
 
859
 
860
/* Return the insn that sets register REG or clobbers it in between
861
   FROM_INSN and TO_INSN (exclusive of those two).
862
   Just like reg_set_between but for hard registers and not pseudos.  */
863
 
864
static rtx
865
reg_set_between_after_reload_p (rtx reg, rtx from_insn, rtx to_insn)
866
{
867
  rtx insn;
868
 
869
  /* We are called after register allocation.  */
870
  gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
871
 
872
  if (from_insn == to_insn)
873
    return NULL_RTX;
874
 
875
  for (insn = NEXT_INSN (from_insn);
876
       insn != to_insn;
877
       insn = NEXT_INSN (insn))
878
    if (INSN_P (insn))
879
      {
880
        if (set_of (reg, insn) != NULL_RTX)
881
          return insn;
882
        if ((CALL_P (insn)
883
              && call_used_regs[REGNO (reg)])
884
            || find_reg_fusage (insn, CLOBBER, reg))
885
          return insn;
886
 
887
        if (FIND_REG_INC_NOTE (insn, reg))
888
          return insn;
889
      }
890
 
891
  return NULL_RTX;
892
}
893
 
894
/* Return the insn that uses register REG in between FROM_INSN and TO_INSN
895
   (exclusive of those two). Similar to reg_used_between but for hard
896
   registers and not pseudos.  */
897
 
898
static rtx
899
reg_used_between_after_reload_p (rtx reg, rtx from_insn, rtx to_insn)
900
{
901
  rtx insn;
902
 
903
  /* We are called after register allocation.  */
904
  gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
905
 
906
  if (from_insn == to_insn)
907
    return NULL_RTX;
908
 
909
  for (insn = NEXT_INSN (from_insn);
910
       insn != to_insn;
911
       insn = NEXT_INSN (insn))
912
    if (INSN_P (insn))
913
      {
914
        if (reg_overlap_mentioned_p (reg, PATTERN (insn))
915
            || (CALL_P (insn)
916
                && call_used_regs[REGNO (reg)])
917
            || find_reg_fusage (insn, USE, reg)
918
            || find_reg_fusage (insn, CLOBBER, reg))
919
          return insn;
920
 
921
        if (FIND_REG_INC_NOTE (insn, reg))
922
          return insn;
923
      }
924
 
925
  return NULL_RTX;
926
}
927
 
928
/* Return true if REG is used, set, or killed between the beginning of
929
   basic block BB and UP_TO_INSN.  Caches the result in reg_avail_info.  */
930
 
931
static bool
932
reg_set_or_used_since_bb_start (rtx reg, basic_block bb, rtx up_to_insn)
933
{
934
  rtx insn, start = PREV_INSN (BB_HEAD (bb));
935
 
936
  if (reg_avail_info[REGNO (reg)] != 0)
937
    return true;
938
 
939
  insn = reg_used_between_after_reload_p (reg, start, up_to_insn);
940
  if (! insn)
941
    insn = reg_set_between_after_reload_p (reg, start, up_to_insn);
942
 
943
  if (insn)
944
    reg_avail_info[REGNO (reg)] = INSN_CUID (insn);
945
 
946
  return insn != NULL_RTX;
947
}
948
 
949
/* Return the loaded/stored register of a load/store instruction.  */
950
 
951
static rtx
952
get_avail_load_store_reg (rtx insn)
953
{
954
  if (REG_P (SET_DEST (PATTERN (insn))))
955
    /* A load.  */
956
    return SET_DEST(PATTERN(insn));
957
  else
958
    {
959
      /* A store.  */
960
      gcc_assert (REG_P (SET_SRC (PATTERN (insn))));
961
      return SET_SRC (PATTERN (insn));
962
    }
963
}
964
 
965
/* Return nonzero if the predecessors of BB are "well behaved".  */
966
 
967
static bool
968
bb_has_well_behaved_predecessors (basic_block bb)
969
{
970
  edge pred;
971
  edge_iterator ei;
972
 
973
  if (EDGE_COUNT (bb->preds) == 0)
974
    return false;
975
 
976
  FOR_EACH_EDGE (pred, ei, bb->preds)
977
    {
978
      if ((pred->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (pred))
979
        return false;
980
 
981
      if (JUMP_TABLE_DATA_P (BB_END (pred->src)))
982
        return false;
983
    }
984
  return true;
985
}
986
 
987
 
988
/* Search for the occurrences of expression in BB.  */
989
 
990
static struct occr*
991
get_bb_avail_insn (basic_block bb, struct occr *occr)
992
{
993
  for (; occr != NULL; occr = occr->next)
994
    if (BLOCK_FOR_INSN (occr->insn) == bb)
995
      return occr;
996
  return NULL;
997
}
998
 
999
 
1000
/* This handles the case where several stores feed a partially redundant
1001
   load. It checks if the redundancy elimination is possible and if it's
1002
   worth it.
1003
 
1004
   Redundancy elimination is possible if,
1005
   1) None of the operands of an insn have been modified since the start
1006
      of the current basic block.
1007
   2) In any predecessor of the current basic block, the same expression
1008
      is generated.
1009
 
1010
   See the function body for the heuristics that determine if eliminating
1011
   a redundancy is also worth doing, assuming it is possible.  */
1012
 
1013
static void
1014
eliminate_partially_redundant_load (basic_block bb, rtx insn,
1015
                                    struct expr *expr)
1016
{
1017
  edge pred;
1018
  rtx avail_insn = NULL_RTX;
1019
  rtx avail_reg;
1020
  rtx dest, pat;
1021
  struct occr *a_occr;
1022
  struct unoccr *occr, *avail_occrs = NULL;
1023
  struct unoccr *unoccr, *unavail_occrs = NULL, *rollback_unoccr = NULL;
1024
  int npred_ok = 0;
1025
  gcov_type ok_count = 0; /* Redundant load execution count.  */
1026
  gcov_type critical_count = 0; /* Execution count of critical edges.  */
1027
  edge_iterator ei;
1028
  bool critical_edge_split = false;
1029
 
1030
  /* The execution count of the loads to be added to make the
1031
     load fully redundant.  */
1032
  gcov_type not_ok_count = 0;
1033
  basic_block pred_bb;
1034
 
1035
  pat = PATTERN (insn);
1036
  dest = SET_DEST (pat);
1037
 
1038
  /* Check that the loaded register is not used, set, or killed from the
1039
     beginning of the block.  */
1040
  if (reg_set_or_used_since_bb_start (dest, bb, insn))
1041
    return;
1042
 
1043
  /* Check potential for replacing load with copy for predecessors.  */
1044
  FOR_EACH_EDGE (pred, ei, bb->preds)
1045
    {
1046
      rtx next_pred_bb_end;
1047
 
1048
      avail_insn = NULL_RTX;
1049
      avail_reg = NULL_RTX;
1050
      pred_bb = pred->src;
1051
      next_pred_bb_end = NEXT_INSN (BB_END (pred_bb));
1052
      for (a_occr = get_bb_avail_insn (pred_bb, expr->avail_occr); a_occr;
1053
           a_occr = get_bb_avail_insn (pred_bb, a_occr->next))
1054
        {
1055
          /* Check if the loaded register is not used.  */
1056
          avail_insn = a_occr->insn;
1057
          avail_reg = get_avail_load_store_reg (avail_insn);
1058
          gcc_assert (avail_reg);
1059
 
1060
          /* Make sure we can generate a move from register avail_reg to
1061
             dest.  */
1062
          extract_insn (gen_move_insn (copy_rtx (dest),
1063
                                       copy_rtx (avail_reg)));
1064
          if (! constrain_operands (1)
1065
              || reg_killed_on_edge (avail_reg, pred)
1066
              || reg_used_on_edge (dest, pred))
1067
            {
1068
              avail_insn = NULL;
1069
              continue;
1070
            }
1071
          if (! reg_set_between_after_reload_p (avail_reg, avail_insn,
1072
                                                next_pred_bb_end))
1073
            /* AVAIL_INSN remains non-null.  */
1074
            break;
1075
          else
1076
            avail_insn = NULL;
1077
        }
1078
 
1079
      if (EDGE_CRITICAL_P (pred))
1080
        critical_count += pred->count;
1081
 
1082
      if (avail_insn != NULL_RTX)
1083
        {
1084
          npred_ok++;
1085
          ok_count += pred->count;
1086
          if (! set_noop_p (PATTERN (gen_move_insn (copy_rtx (dest),
1087
                                                    copy_rtx (avail_reg)))))
1088
            {
1089
              /* Check if there is going to be a split.  */
1090
              if (EDGE_CRITICAL_P (pred))
1091
                critical_edge_split = true;
1092
            }
1093
          else /* Its a dead move no need to generate.  */
1094
            continue;
1095
          occr = (struct unoccr *) obstack_alloc (&unoccr_obstack,
1096
                                                  sizeof (struct unoccr));
1097
          occr->insn = avail_insn;
1098
          occr->pred = pred;
1099
          occr->next = avail_occrs;
1100
          avail_occrs = occr;
1101
          if (! rollback_unoccr)
1102
            rollback_unoccr = occr;
1103
        }
1104
      else
1105
        {
1106
          /* Adding a load on a critical edge will cause a split.  */
1107
          if (EDGE_CRITICAL_P (pred))
1108
            critical_edge_split = true;
1109
          not_ok_count += pred->count;
1110
          unoccr = (struct unoccr *) obstack_alloc (&unoccr_obstack,
1111
                                                    sizeof (struct unoccr));
1112
          unoccr->insn = NULL_RTX;
1113
          unoccr->pred = pred;
1114
          unoccr->next = unavail_occrs;
1115
          unavail_occrs = unoccr;
1116
          if (! rollback_unoccr)
1117
            rollback_unoccr = unoccr;
1118
        }
1119
    }
1120
 
1121
  if (/* No load can be replaced by copy.  */
1122
      npred_ok == 0
1123
      /* Prevent exploding the code.  */
1124
      || (optimize_size && npred_ok > 1)
1125
      /* If we don't have profile information we cannot tell if splitting
1126
         a critical edge is profitable or not so don't do it.  */
1127
      || ((! profile_info || ! flag_branch_probabilities
1128
           || targetm.cannot_modify_jumps_p ())
1129
          && critical_edge_split))
1130
    goto cleanup;
1131
 
1132
  /* Check if it's worth applying the partial redundancy elimination.  */
1133
  if (ok_count < GCSE_AFTER_RELOAD_PARTIAL_FRACTION * not_ok_count)
1134
    goto cleanup;
1135
  if (ok_count < GCSE_AFTER_RELOAD_CRITICAL_FRACTION * critical_count)
1136
    goto cleanup;
1137
 
1138
  /* Generate moves to the loaded register from where
1139
     the memory is available.  */
1140
  for (occr = avail_occrs; occr; occr = occr->next)
1141
    {
1142
      avail_insn = occr->insn;
1143
      pred = occr->pred;
1144
      /* Set avail_reg to be the register having the value of the
1145
         memory.  */
1146
      avail_reg = get_avail_load_store_reg (avail_insn);
1147
      gcc_assert (avail_reg);
1148
 
1149
      insert_insn_on_edge (gen_move_insn (copy_rtx (dest),
1150
                                          copy_rtx (avail_reg)),
1151
                           pred);
1152
      stats.moves_inserted++;
1153
 
1154
      if (dump_file)
1155
        fprintf (dump_file,
1156
                 "generating move from %d to %d on edge from %d to %d\n",
1157
                 REGNO (avail_reg),
1158
                 REGNO (dest),
1159
                 pred->src->index,
1160
                 pred->dest->index);
1161
    }
1162
 
1163
  /* Regenerate loads where the memory is unavailable.  */
1164
  for (unoccr = unavail_occrs; unoccr; unoccr = unoccr->next)
1165
    {
1166
      pred = unoccr->pred;
1167
      insert_insn_on_edge (copy_insn (PATTERN (insn)), pred);
1168
      stats.copies_inserted++;
1169
 
1170
      if (dump_file)
1171
        {
1172
          fprintf (dump_file,
1173
                   "generating on edge from %d to %d a copy of load: ",
1174
                   pred->src->index,
1175
                   pred->dest->index);
1176
          print_rtl (dump_file, PATTERN (insn));
1177
          fprintf (dump_file, "\n");
1178
        }
1179
    }
1180
 
1181
  /* Delete the insn if it is not available in this block and mark it
1182
     for deletion if it is available. If insn is available it may help
1183
     discover additional redundancies, so mark it for later deletion.  */
1184
  for (a_occr = get_bb_avail_insn (bb, expr->avail_occr);
1185
       a_occr && (a_occr->insn != insn);
1186
       a_occr = get_bb_avail_insn (bb, a_occr->next));
1187
 
1188
  if (!a_occr)
1189
    {
1190
      stats.insns_deleted++;
1191
 
1192
      if (dump_file)
1193
        {
1194
          fprintf (dump_file, "deleting insn:\n");
1195
          print_rtl_single (dump_file, insn);
1196
          fprintf (dump_file, "\n");
1197
        }
1198
      delete_insn (insn);
1199
    }
1200
  else
1201
    a_occr->deleted_p = 1;
1202
 
1203
cleanup:
1204
  if (rollback_unoccr)
1205
    obstack_free (&unoccr_obstack, rollback_unoccr);
1206
}
1207
 
1208
/* Performing the redundancy elimination as described before.  */
1209
 
1210
static void
1211
eliminate_partially_redundant_loads (void)
1212
{
1213
  rtx insn;
1214
  basic_block bb;
1215
 
1216
  /* Note we start at block 1.  */
1217
 
1218
  if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
1219
    return;
1220
 
1221
  FOR_BB_BETWEEN (bb,
1222
                  ENTRY_BLOCK_PTR->next_bb->next_bb,
1223
                  EXIT_BLOCK_PTR,
1224
                  next_bb)
1225
    {
1226
      /* Don't try anything on basic blocks with strange predecessors.  */
1227
      if (! bb_has_well_behaved_predecessors (bb))
1228
        continue;
1229
 
1230
      /* Do not try anything on cold basic blocks.  */
1231
      if (probably_cold_bb_p (bb))
1232
        continue;
1233
 
1234
      /* Reset the table of things changed since the start of the current
1235
         basic block.  */
1236
      reset_opr_set_tables ();
1237
 
1238
      /* Look at all insns in the current basic block and see if there are
1239
         any loads in it that we can record.  */
1240
      FOR_BB_INSNS (bb, insn)
1241
        {
1242
          /* Is it a load - of the form (set (reg) (mem))?  */
1243
          if (NONJUMP_INSN_P (insn)
1244
              && GET_CODE (PATTERN (insn)) == SET
1245
              && REG_P (SET_DEST (PATTERN (insn)))
1246
              && MEM_P (SET_SRC (PATTERN (insn))))
1247
            {
1248
              rtx pat = PATTERN (insn);
1249
              rtx src = SET_SRC (pat);
1250
              struct expr *expr;
1251
 
1252
              if (!MEM_VOLATILE_P (src)
1253
                  && GET_MODE (src) != BLKmode
1254
                  && general_operand (src, GET_MODE (src))
1255
                  /* Are the operands unchanged since the start of the
1256
                     block?  */
1257
                  && oprs_unchanged_p (src, insn, false)
1258
                  && !(flag_non_call_exceptions && may_trap_p (src))
1259
                  && !side_effects_p (src)
1260
                  /* Is the expression recorded?  */
1261
                  && (expr = lookup_expr_in_table (src)) != NULL)
1262
                {
1263
                  /* We now have a load (insn) and an available memory at
1264
                     its BB start (expr). Try to remove the loads if it is
1265
                     redundant.  */
1266
                  eliminate_partially_redundant_load (bb, insn, expr);
1267
                }
1268
            }
1269
 
1270
          /* Keep track of everything modified by this insn, so that we
1271
             know what has been modified since the start of the current
1272
             basic block.  */
1273
          if (INSN_P (insn))
1274
            record_opr_changes (insn);
1275
        }
1276
    }
1277
 
1278
  commit_edge_insertions ();
1279
}
1280
 
1281
/* Go over the expression hash table and delete insns that were
1282
   marked for later deletion.  */
1283
 
1284
/* This helper is called via htab_traverse.  */
1285
static int
1286
delete_redundant_insns_1 (void **slot, void *data ATTRIBUTE_UNUSED)
1287
{
1288
  struct expr *expr = (struct expr *) *slot;
1289
  struct occr *occr;
1290
 
1291
  for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1292
    {
1293
      if (occr->deleted_p)
1294
        {
1295
          delete_insn (occr->insn);
1296
          stats.insns_deleted++;
1297
 
1298
          if (dump_file)
1299
            {
1300
              fprintf (dump_file, "deleting insn:\n");
1301
              print_rtl_single (dump_file, occr->insn);
1302
              fprintf (dump_file, "\n");
1303
            }
1304
        }
1305
    }
1306
 
1307
  return 1;
1308
}
1309
 
1310
static void
1311
delete_redundant_insns (void)
1312
{
1313
  htab_traverse (expr_table, delete_redundant_insns_1, NULL);
1314
  if (dump_file)
1315
    fprintf (dump_file, "\n");
1316
}
1317
 
1318
/* Main entry point of the GCSE after reload - clean some redundant loads
1319
   due to spilling.  */
1320
 
1321
void
1322
gcse_after_reload_main (rtx f ATTRIBUTE_UNUSED)
1323
{
1324
 
1325
  memset (&stats, 0, sizeof (stats));
1326
 
1327
  /* Allocate ememory for this pass.
1328
     Also computes and initializes the insns' CUIDs.  */
1329
  alloc_mem ();
1330
 
1331
  /* We need alias analysis.  */
1332
  init_alias_analysis ();
1333
 
1334
  compute_hash_table ();
1335
 
1336
  if (dump_file)
1337
    dump_hash_table (dump_file);
1338
 
1339
  if (htab_elements (expr_table) > 0)
1340
    {
1341
      eliminate_partially_redundant_loads ();
1342
      delete_redundant_insns ();
1343
 
1344
      if (dump_file)
1345
        {
1346
          fprintf (dump_file, "GCSE AFTER RELOAD stats:\n");
1347
          fprintf (dump_file, "copies inserted: %d\n", stats.copies_inserted);
1348
          fprintf (dump_file, "moves inserted:  %d\n", stats.moves_inserted);
1349
          fprintf (dump_file, "insns deleted:   %d\n", stats.insns_deleted);
1350
          fprintf (dump_file, "\n\n");
1351
        }
1352
    }
1353
 
1354
  /* We are finished with alias.  */
1355
  end_alias_analysis ();
1356
 
1357
  free_mem ();
1358
}
1359
 
1360
 
1361
static bool
1362
gate_handle_gcse2 (void)
1363
{
1364
  return (optimize > 0 && flag_gcse_after_reload);
1365
}
1366
 
1367
 
1368
static void
1369
rest_of_handle_gcse2 (void)
1370
{
1371
  gcse_after_reload_main (get_insns ());
1372
  rebuild_jump_labels (get_insns ());
1373
  delete_trivially_dead_insns (get_insns (), max_reg_num ());
1374
}
1375
 
1376
struct tree_opt_pass pass_gcse2 =
1377
{
1378
  "gcse2",                              /* name */
1379
  gate_handle_gcse2,                    /* gate */
1380
  rest_of_handle_gcse2,                 /* execute */
1381
  NULL,                                 /* sub */
1382
  NULL,                                 /* next */
1383
  0,                                    /* static_pass_number */
1384
  TV_GCSE_AFTER_RELOAD,                 /* tv_id */
1385
  0,                                    /* properties_required */
1386
  0,                                    /* properties_provided */
1387
  0,                                    /* properties_destroyed */
1388
  0,                                    /* todo_flags_start */
1389
  TODO_dump_func |
1390
  TODO_verify_flow | TODO_ggc_collect,  /* todo_flags_finish */
1391
  'J'                                   /* letter */
1392
};
1393
 

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.