OpenCores
URL https://opencores.org/ocsvn/openrisc_2011-10-31/openrisc_2011-10-31/trunk

Subversion Repositories openrisc_2011-10-31

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.2.2/] [gcc/] [postreload-gcse.c] - Blame information for rev 482

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 38 julius
/* Post reload partially redundant load elimination
2
   Copyright (C) 2004, 2005, 2007
3
   Free Software Foundation, Inc.
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify it under
8
the terms of the GNU General Public License as published by the Free
9
Software Foundation; either version 3, or (at your option) any later
10
version.
11
 
12
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13
WARRANTY; without even the implied warranty of MERCHANTABILITY or
14
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15
for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING3.  If not see
19
<http://www.gnu.org/licenses/>.  */
20
 
21
#include "config.h"
22
#include "system.h"
23
#include "coretypes.h"
24
#include "tm.h"
25
#include "toplev.h"
26
 
27
#include "rtl.h"
28
#include "tree.h"
29
#include "tm_p.h"
30
#include "regs.h"
31
#include "hard-reg-set.h"
32
#include "flags.h"
33
#include "real.h"
34
#include "insn-config.h"
35
#include "recog.h"
36
#include "basic-block.h"
37
#include "output.h"
38
#include "function.h"
39
#include "expr.h"
40
#include "except.h"
41
#include "intl.h"
42
#include "obstack.h"
43
#include "hashtab.h"
44
#include "params.h"
45
#include "target.h"
46
#include "timevar.h"
47
#include "tree-pass.h"
48
 
49
/* The following code implements gcse after reload, the purpose of this
50
   pass is to cleanup redundant loads generated by reload and other
51
   optimizations that come after gcse. It searches for simple inter-block
52
   redundancies and tries to eliminate them by adding moves and loads
53
   in cold places.
54
 
55
   Perform partially redundant load elimination, try to eliminate redundant
56
   loads created by the reload pass.  We try to look for full or partial
57
   redundant loads fed by one or more loads/stores in predecessor BBs,
58
   and try adding loads to make them fully redundant.  We also check if
59
   it's worth adding loads to be able to delete the redundant load.
60
 
61
   Algorithm:
62
   1. Build available expressions hash table:
63
       For each load/store instruction, if the loaded/stored memory didn't
64
       change until the end of the basic block add this memory expression to
65
       the hash table.
66
   2. Perform Redundancy elimination:
67
      For each load instruction do the following:
68
         perform partial redundancy elimination, check if it's worth adding
69
         loads to make the load fully redundant.  If so add loads and
70
         register copies and delete the load.
71
   3. Delete instructions made redundant in step 2.
72
 
73
   Future enhancement:
74
     If the loaded register is used/defined between load and some store,
75
     look for some other free register between load and all its stores,
76
     and replace the load with a copy from this register to the loaded
77
     register.
78
*/
79
 
80
 
81
/* Keep statistics of this pass.  */
82
static struct
83
{
84
  int moves_inserted;
85
  int copies_inserted;
86
  int insns_deleted;
87
} stats;
88
 
89
/* We need to keep a hash table of expressions.  The table entries are of
90
   type 'struct expr', and for each expression there is a single linked
91
   list of occurrences.  */
92
 
93
/* The table itself.  */
94
static htab_t expr_table;
95
 
96
/* Expression elements in the hash table.  */
97
struct expr
98
{
99
  /* The expression (SET_SRC for expressions, PATTERN for assignments).  */
100
  rtx expr;
101
 
102
  /* The same hash for this entry.  */
103
  hashval_t hash;
104
 
105
  /* List of available occurrence in basic blocks in the function.  */
106
  struct occr *avail_occr;
107
};
108
 
109
static struct obstack expr_obstack;
110
 
111
/* Occurrence of an expression.
112
   There is at most one occurrence per basic block.  If a pattern appears
113
   more than once, the last appearance is used.  */
114
 
115
struct occr
116
{
117
  /* Next occurrence of this expression.  */
118
  struct occr *next;
119
  /* The insn that computes the expression.  */
120
  rtx insn;
121
  /* Nonzero if this [anticipatable] occurrence has been deleted.  */
122
  char deleted_p;
123
};
124
 
125
static struct obstack occr_obstack;
126
 
127
/* The following structure holds the information about the occurrences of
128
   the redundant instructions.  */
129
struct unoccr
130
{
131
  struct unoccr *next;
132
  edge pred;
133
  rtx insn;
134
};
135
 
136
static struct obstack unoccr_obstack;
137
 
138
/* Array where each element is the CUID if the insn that last set the hard
139
   register with the number of the element, since the start of the current
140
   basic block.
141
 
142
   This array is used during the building of the hash table (step 1) to
143
   determine if a reg is killed before the end of a basic block.
144
 
145
   It is also used when eliminating partial redundancies (step 2) to see
146
   if a reg was modified since the start of a basic block.  */
147
static int *reg_avail_info;
148
 
149
/* A list of insns that may modify memory within the current basic block.  */
150
struct modifies_mem
151
{
152
  rtx insn;
153
  struct modifies_mem *next;
154
};
155
static struct modifies_mem *modifies_mem_list;
156
 
157
/* The modifies_mem structs also go on an obstack, only this obstack is
158
   freed each time after completing the analysis or transformations on
159
   a basic block.  So we allocate a dummy modifies_mem_obstack_bottom
160
   object on the obstack to keep track of the bottom of the obstack.  */
161
static struct obstack modifies_mem_obstack;
162
static struct modifies_mem  *modifies_mem_obstack_bottom;
163
 
164
/* Mapping of insn UIDs to CUIDs.
165
   CUIDs are like UIDs except they increase monotonically in each basic
166
   block, have no gaps, and only apply to real insns.  */
167
static int *uid_cuid;
168
#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
169
 
170
 
171
/* Helpers for memory allocation/freeing.  */
172
static void alloc_mem (void);
173
static void free_mem (void);
174
 
175
/* Support for hash table construction and transformations.  */
176
static bool oprs_unchanged_p (rtx, rtx, bool);
177
static void record_last_reg_set_info (rtx, int);
178
static void record_last_mem_set_info (rtx);
179
static void record_last_set_info (rtx, rtx, void *);
180
static void record_opr_changes (rtx);
181
 
182
static void find_mem_conflicts (rtx, rtx, void *);
183
static int load_killed_in_block_p (int, rtx, bool);
184
static void reset_opr_set_tables (void);
185
 
186
/* Hash table support.  */
187
static hashval_t hash_expr (rtx, int *);
188
static hashval_t hash_expr_for_htab (const void *);
189
static int expr_equiv_p (const void *, const void *);
190
static void insert_expr_in_table (rtx, rtx);
191
static struct expr *lookup_expr_in_table (rtx);
192
static int dump_hash_table_entry (void **, void *);
193
static void dump_hash_table (FILE *);
194
 
195
/* Helpers for eliminate_partially_redundant_load.  */
196
static bool reg_killed_on_edge (rtx, edge);
197
static bool reg_used_on_edge (rtx, edge);
198
 
199
static rtx reg_set_between_after_reload_p (rtx, rtx, rtx);
200
static rtx reg_used_between_after_reload_p (rtx, rtx, rtx);
201
static rtx get_avail_load_store_reg (rtx);
202
 
203
static bool bb_has_well_behaved_predecessors (basic_block);
204
static struct occr* get_bb_avail_insn (basic_block, struct occr *);
205
static void hash_scan_set (rtx);
206
static void compute_hash_table (void);
207
 
208
/* The work horses of this pass.  */
209
static void eliminate_partially_redundant_load (basic_block,
210
                                                rtx,
211
                                                struct expr *);
212
static void eliminate_partially_redundant_loads (void);
213
 
214
 
215
/* Allocate memory for the CUID mapping array and register/memory
216
   tracking tables.  */
217
 
218
static void
219
alloc_mem (void)
220
{
221
  int i;
222
  basic_block bb;
223
  rtx insn;
224
 
225
  /* Find the largest UID and create a mapping from UIDs to CUIDs.  */
226
  uid_cuid = XCNEWVEC (int, get_max_uid () + 1);
227
  i = 1;
228
  FOR_EACH_BB (bb)
229
    FOR_BB_INSNS (bb, insn)
230
      {
231
        if (INSN_P (insn))
232
          uid_cuid[INSN_UID (insn)] = i++;
233
        else
234
          uid_cuid[INSN_UID (insn)] = i;
235
      }
236
 
237
  /* Allocate the available expressions hash table.  We don't want to
238
     make the hash table too small, but unnecessarily making it too large
239
     also doesn't help.  The i/4 is a gcse.c relic, and seems like a
240
     reasonable choice.  */
241
  expr_table = htab_create (MAX (i / 4, 13),
242
                            hash_expr_for_htab, expr_equiv_p, NULL);
243
 
244
  /* We allocate everything on obstacks because we often can roll back
245
     the whole obstack to some point.  Freeing obstacks is very fast.  */
246
  gcc_obstack_init (&expr_obstack);
247
  gcc_obstack_init (&occr_obstack);
248
  gcc_obstack_init (&unoccr_obstack);
249
  gcc_obstack_init (&modifies_mem_obstack);
250
 
251
  /* Working array used to track the last set for each register
252
     in the current block.  */
253
  reg_avail_info = (int *) xmalloc (FIRST_PSEUDO_REGISTER * sizeof (int));
254
 
255
  /* Put a dummy modifies_mem object on the modifies_mem_obstack, so we
256
     can roll it back in reset_opr_set_tables.  */
257
  modifies_mem_obstack_bottom =
258
    (struct modifies_mem *) obstack_alloc (&modifies_mem_obstack,
259
                                           sizeof (struct modifies_mem));
260
}
261
 
262
/* Free memory allocated by alloc_mem.  */
263
 
264
static void
265
free_mem (void)
266
{
267
  free (uid_cuid);
268
 
269
  htab_delete (expr_table);
270
 
271
  obstack_free (&expr_obstack, NULL);
272
  obstack_free (&occr_obstack, NULL);
273
  obstack_free (&unoccr_obstack, NULL);
274
  obstack_free (&modifies_mem_obstack, NULL);
275
 
276
  free (reg_avail_info);
277
}
278
 
279
 
280
/* Hash expression X.
281
   DO_NOT_RECORD_P is a boolean indicating if a volatile operand is found
282
   or if the expression contains something we don't want to insert in the
283
   table.  */
284
 
285
static hashval_t
286
hash_expr (rtx x, int *do_not_record_p)
287
{
288
  *do_not_record_p = 0;
289
  return hash_rtx (x, GET_MODE (x), do_not_record_p,
290
                   NULL,  /*have_reg_qty=*/false);
291
}
292
 
293
/* Callback for hashtab.
294
   Return the hash value for expression EXP.  We don't actually hash
295
   here, we just return the cached hash value.  */
296
 
297
static hashval_t
298
hash_expr_for_htab (const void *expp)
299
{
300
  struct expr *exp = (struct expr *) expp;
301
  return exp->hash;
302
}
303
 
304
/* Callback for hashtab.
305
   Return nonzero if exp1 is equivalent to exp2.  */
306
 
307
static int
308
expr_equiv_p (const void *exp1p, const void *exp2p)
309
{
310
  struct expr *exp1 = (struct expr *) exp1p;
311
  struct expr *exp2 = (struct expr *) exp2p;
312
  int equiv_p = exp_equiv_p (exp1->expr, exp2->expr, 0, true);
313
 
314
  gcc_assert (!equiv_p || exp1->hash == exp2->hash);
315
  return equiv_p;
316
}
317
 
318
 
319
/* Insert expression X in INSN in the hash TABLE.
320
   If it is already present, record it as the last occurrence in INSN's
321
   basic block.  */
322
 
323
static void
324
insert_expr_in_table (rtx x, rtx insn)
325
{
326
  int do_not_record_p;
327
  hashval_t hash;
328
  struct expr *cur_expr, **slot;
329
  struct occr *avail_occr, *last_occr = NULL;
330
 
331
  hash = hash_expr (x, &do_not_record_p);
332
 
333
  /* Do not insert expression in the table if it contains volatile operands,
334
     or if hash_expr determines the expression is something we don't want
335
     to or can't handle.  */
336
  if (do_not_record_p)
337
    return;
338
 
339
  /* We anticipate that redundant expressions are rare, so for convenience
340
     allocate a new hash table element here already and set its fields.
341
     If we don't do this, we need a hack with a static struct expr.  Anyway,
342
     obstack_free is really fast and one more obstack_alloc doesn't hurt if
343
     we're going to see more expressions later on.  */
344
  cur_expr = (struct expr *) obstack_alloc (&expr_obstack,
345
                                            sizeof (struct expr));
346
  cur_expr->expr = x;
347
  cur_expr->hash = hash;
348
  cur_expr->avail_occr = NULL;
349
 
350
  slot = (struct expr **) htab_find_slot_with_hash (expr_table, cur_expr,
351
                                                    hash, INSERT);
352
 
353
  if (! (*slot))
354
    /* The expression isn't found, so insert it.  */
355
    *slot = cur_expr;
356
  else
357
    {
358
      /* The expression is already in the table, so roll back the
359
         obstack and use the existing table entry.  */
360
      obstack_free (&expr_obstack, cur_expr);
361
      cur_expr = *slot;
362
    }
363
 
364
  /* Search for another occurrence in the same basic block.  */
365
  avail_occr = cur_expr->avail_occr;
366
  while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
367
    {
368
      /* If an occurrence isn't found, save a pointer to the end of
369
         the list.  */
370
      last_occr = avail_occr;
371
      avail_occr = avail_occr->next;
372
    }
373
 
374
  if (avail_occr)
375
    /* Found another instance of the expression in the same basic block.
376
       Prefer this occurrence to the currently recorded one.  We want
377
       the last one in the block and the block is scanned from start
378
       to end.  */
379
    avail_occr->insn = insn;
380
  else
381
    {
382
      /* First occurrence of this expression in this basic block.  */
383
      avail_occr = (struct occr *) obstack_alloc (&occr_obstack,
384
                                                  sizeof (struct occr));
385
 
386
      /* First occurrence of this expression in any block?  */
387
      if (cur_expr->avail_occr == NULL)
388
        cur_expr->avail_occr = avail_occr;
389
      else
390
        last_occr->next = avail_occr;
391
 
392
      avail_occr->insn = insn;
393
      avail_occr->next = NULL;
394
      avail_occr->deleted_p = 0;
395
    }
396
}
397
 
398
 
399
/* Lookup pattern PAT in the expression hash table.
400
   The result is a pointer to the table entry, or NULL if not found.  */
401
 
402
static struct expr *
403
lookup_expr_in_table (rtx pat)
404
{
405
  int do_not_record_p;
406
  struct expr **slot, *tmp_expr;
407
  hashval_t hash = hash_expr (pat, &do_not_record_p);
408
 
409
  if (do_not_record_p)
410
    return NULL;
411
 
412
  tmp_expr = (struct expr *) obstack_alloc (&expr_obstack,
413
                                            sizeof (struct expr));
414
  tmp_expr->expr = pat;
415
  tmp_expr->hash = hash;
416
  tmp_expr->avail_occr = NULL;
417
 
418
  slot = (struct expr **) htab_find_slot_with_hash (expr_table, tmp_expr,
419
                                                    hash, INSERT);
420
  obstack_free (&expr_obstack, tmp_expr);
421
 
422
  if (!slot)
423
    return NULL;
424
  else
425
    return (*slot);
426
}
427
 
428
 
429
/* Dump all expressions and occurrences that are currently in the
430
   expression hash table to FILE.  */
431
 
432
/* This helper is called via htab_traverse.  */
433
static int
434
dump_hash_table_entry (void **slot, void *filep)
435
{
436
  struct expr *expr = (struct expr *) *slot;
437
  FILE *file = (FILE *) filep;
438
  struct occr *occr;
439
 
440
  fprintf (file, "expr: ");
441
  print_rtl (file, expr->expr);
442
  fprintf (file,"\nhashcode: %u\n", expr->hash);
443
  fprintf (file,"list of occurrences:\n");
444
  occr = expr->avail_occr;
445
  while (occr)
446
    {
447
      rtx insn = occr->insn;
448
      print_rtl_single (file, insn);
449
      fprintf (file, "\n");
450
      occr = occr->next;
451
    }
452
  fprintf (file, "\n");
453
  return 1;
454
}
455
 
456
static void
457
dump_hash_table (FILE *file)
458
{
459
  fprintf (file, "\n\nexpression hash table\n");
460
  fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
461
           (long) htab_size (expr_table),
462
           (long) htab_elements (expr_table),
463
           htab_collisions (expr_table));
464
  if (htab_elements (expr_table) > 0)
465
    {
466
      fprintf (file, "\n\ntable entries:\n");
467
      htab_traverse (expr_table, dump_hash_table_entry, file);
468
    }
469
  fprintf (file, "\n");
470
}
471
 
472
 
473
/* Return nonzero if the operands of expression X are unchanged
474
   1) from the start of INSN's basic block up to but not including INSN
475
      if AFTER_INSN is false, or
476
   2) from INSN to the end of INSN's basic block if AFTER_INSN is true.  */
477
 
478
static bool
479
oprs_unchanged_p (rtx x, rtx insn, bool after_insn)
480
{
481
  int i, j;
482
  enum rtx_code code;
483
  const char *fmt;
484
 
485
  if (x == 0)
486
    return 1;
487
 
488
  code = GET_CODE (x);
489
  switch (code)
490
    {
491
    case REG:
492
      /* We are called after register allocation.  */
493
      gcc_assert (REGNO (x) < FIRST_PSEUDO_REGISTER);
494
      if (after_insn)
495
        /* If the last CUID setting the insn is less than the CUID of
496
           INSN, then reg X is not changed in or after INSN.  */
497
        return reg_avail_info[REGNO (x)] < INSN_CUID (insn);
498
      else
499
        /* Reg X is not set before INSN in the current basic block if
500
           we have not yet recorded the CUID of an insn that touches
501
           the reg.  */
502
        return reg_avail_info[REGNO (x)] == 0;
503
 
504
    case MEM:
505
      if (load_killed_in_block_p (INSN_CUID (insn), x, after_insn))
506
        return 0;
507
      else
508
        return oprs_unchanged_p (XEXP (x, 0), insn, after_insn);
509
 
510
    case PC:
511
    case CC0: /*FIXME*/
512
    case CONST:
513
    case CONST_INT:
514
    case CONST_DOUBLE:
515
    case CONST_VECTOR:
516
    case SYMBOL_REF:
517
    case LABEL_REF:
518
    case ADDR_VEC:
519
    case ADDR_DIFF_VEC:
520
      return 1;
521
 
522
    case PRE_DEC:
523
    case PRE_INC:
524
    case POST_DEC:
525
    case POST_INC:
526
    case PRE_MODIFY:
527
    case POST_MODIFY:
528
      if (after_insn)
529
        return 0;
530
      break;
531
 
532
    default:
533
      break;
534
    }
535
 
536
  for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
537
    {
538
      if (fmt[i] == 'e')
539
        {
540
          if (! oprs_unchanged_p (XEXP (x, i), insn, after_insn))
541
            return 0;
542
        }
543
      else if (fmt[i] == 'E')
544
        for (j = 0; j < XVECLEN (x, i); j++)
545
          if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, after_insn))
546
            return 0;
547
    }
548
 
549
  return 1;
550
}
551
 
552
 
553
/* Used for communication between find_mem_conflicts and
554
   load_killed_in_block_p.  Nonzero if find_mem_conflicts finds a
555
   conflict between two memory references.
556
   This is a bit of a hack to work around the limitations of note_stores.  */
557
static int mems_conflict_p;
558
 
559
/* DEST is the output of an instruction.  If it is a memory reference, and
560
   possibly conflicts with the load found in DATA, then set mems_conflict_p
561
   to a nonzero value.  */
562
 
563
static void
564
find_mem_conflicts (rtx dest, rtx setter ATTRIBUTE_UNUSED,
565
                    void *data)
566
{
567
  rtx mem_op = (rtx) data;
568
 
569
  while (GET_CODE (dest) == SUBREG
570
         || GET_CODE (dest) == ZERO_EXTRACT
571
         || GET_CODE (dest) == STRICT_LOW_PART)
572
    dest = XEXP (dest, 0);
573
 
574
  /* If DEST is not a MEM, then it will not conflict with the load.  Note
575
     that function calls are assumed to clobber memory, but are handled
576
     elsewhere.  */
577
  if (! MEM_P (dest))
578
    return;
579
 
580
  if (true_dependence (dest, GET_MODE (dest), mem_op,
581
                       rtx_addr_varies_p))
582
    mems_conflict_p = 1;
583
}
584
 
585
 
586
/* Return nonzero if the expression in X (a memory reference) is killed
587
   in the current basic block before (if AFTER_INSN is false) or after
588
   (if AFTER_INSN is true) the insn with the CUID in UID_LIMIT.
589
 
590
   This function assumes that the modifies_mem table is flushed when
591
   the hash table construction or redundancy elimination phases start
592
   processing a new basic block.  */
593
 
594
static int
595
load_killed_in_block_p (int uid_limit, rtx x, bool after_insn)
596
{
597
  struct modifies_mem *list_entry = modifies_mem_list;
598
 
599
  while (list_entry)
600
    {
601
      rtx setter = list_entry->insn;
602
 
603
      /* Ignore entries in the list that do not apply.  */
604
      if ((after_insn
605
           && INSN_CUID (setter) < uid_limit)
606
          || (! after_insn
607
              && INSN_CUID (setter) > uid_limit))
608
        {
609
          list_entry = list_entry->next;
610
          continue;
611
        }
612
 
613
      /* If SETTER is a call everything is clobbered.  Note that calls
614
         to pure functions are never put on the list, so we need not
615
         worry about them.  */
616
      if (CALL_P (setter))
617
        return 1;
618
 
619
      /* SETTER must be an insn of some kind that sets memory.  Call
620
         note_stores to examine each hunk of memory that is modified.
621
         It will set mems_conflict_p to nonzero if there may be a
622
         conflict between X and SETTER.  */
623
      mems_conflict_p = 0;
624
      note_stores (PATTERN (setter), find_mem_conflicts, x);
625
      if (mems_conflict_p)
626
        return 1;
627
 
628
      list_entry = list_entry->next;
629
    }
630
  return 0;
631
}
632
 
633
 
634
/* Record register first/last/block set information for REGNO in INSN.  */
635
 
636
static inline void
637
record_last_reg_set_info (rtx insn, int regno)
638
{
639
  reg_avail_info[regno] = INSN_CUID (insn);
640
}
641
 
642
 
643
/* Record memory modification information for INSN.  We do not actually care
644
   about the memory location(s) that are set, or even how they are set (consider
645
   a CALL_INSN).  We merely need to record which insns modify memory.  */
646
 
647
static void
648
record_last_mem_set_info (rtx insn)
649
{
650
  struct modifies_mem *list_entry;
651
 
652
  list_entry = (struct modifies_mem *) obstack_alloc (&modifies_mem_obstack,
653
                                                      sizeof (struct modifies_mem));
654
  list_entry->insn = insn;
655
  list_entry->next = modifies_mem_list;
656
  modifies_mem_list = list_entry;
657
}
658
 
659
/* Called from compute_hash_table via note_stores to handle one
660
   SET or CLOBBER in an insn.  DATA is really the instruction in which
661
   the SET is taking place.  */
662
 
663
static void
664
record_last_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
665
{
666
  rtx last_set_insn = (rtx) data;
667
 
668
  if (GET_CODE (dest) == SUBREG)
669
    dest = SUBREG_REG (dest);
670
 
671
  if (REG_P (dest))
672
    record_last_reg_set_info (last_set_insn, REGNO (dest));
673
  else if (MEM_P (dest))
674
    {
675
      /* Ignore pushes, they don't clobber memory.  They may still
676
         clobber the stack pointer though.  Some targets do argument
677
         pushes without adding REG_INC notes.  See e.g. PR25196,
678
         where a pushsi2 on i386 doesn't have REG_INC notes.  Note
679
         such changes here too.  */
680
      if (! push_operand (dest, GET_MODE (dest)))
681
        record_last_mem_set_info (last_set_insn);
682
      else
683
        record_last_reg_set_info (last_set_insn, STACK_POINTER_REGNUM);
684
    }
685
}
686
 
687
 
688
/* Reset tables used to keep track of what's still available since the
689
   start of the block.  */
690
 
691
static void
692
reset_opr_set_tables (void)
693
{
694
  memset (reg_avail_info, 0, FIRST_PSEUDO_REGISTER * sizeof (int));
695
  obstack_free (&modifies_mem_obstack, modifies_mem_obstack_bottom);
696
  modifies_mem_list = NULL;
697
}
698
 
699
 
700
/* Record things set by INSN.
701
   This data is used by oprs_unchanged_p.  */
702
 
703
static void
704
record_opr_changes (rtx insn)
705
{
706
  rtx note;
707
 
708
  /* Find all stores and record them.  */
709
  note_stores (PATTERN (insn), record_last_set_info, insn);
710
 
711
  /* Also record autoincremented REGs for this insn as changed.  */
712
  for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
713
    if (REG_NOTE_KIND (note) == REG_INC)
714
      record_last_reg_set_info (insn, REGNO (XEXP (note, 0)));
715
 
716
  /* Finally, if this is a call, record all call clobbers.  */
717
  if (CALL_P (insn))
718
    {
719
      unsigned int regno;
720
 
721
      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
722
        if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
723
          record_last_reg_set_info (insn, regno);
724
 
725
      if (! CONST_OR_PURE_CALL_P (insn))
726
        record_last_mem_set_info (insn);
727
    }
728
}
729
 
730
 
731
/* Scan the pattern of INSN and add an entry to the hash TABLE.
732
   After reload we are interested in loads/stores only.  */
733
 
734
static void
735
hash_scan_set (rtx insn)
736
{
737
  rtx pat = PATTERN (insn);
738
  rtx src = SET_SRC (pat);
739
  rtx dest = SET_DEST (pat);
740
 
741
  /* We are only interested in loads and stores.  */
742
  if (! MEM_P (src) && ! MEM_P (dest))
743
    return;
744
 
745
  /* Don't mess with jumps and nops.  */
746
  if (JUMP_P (insn) || set_noop_p (pat))
747
    return;
748
 
749
  if (REG_P (dest))
750
    {
751
      if (/* Don't CSE something if we can't do a reg/reg copy.  */
752
          can_copy_p (GET_MODE (dest))
753
          /* Is SET_SRC something we want to gcse?  */
754
          && general_operand (src, GET_MODE (src))
755
#ifdef STACK_REGS
756
          /* Never consider insns touching the register stack.  It may
757
             create situations that reg-stack cannot handle (e.g. a stack
758
             register live across an abnormal edge).  */
759
          && (REGNO (dest) < FIRST_STACK_REG || REGNO (dest) > LAST_STACK_REG)
760
#endif
761
          /* An expression is not available if its operands are
762
             subsequently modified, including this insn.  */
763
          && oprs_unchanged_p (src, insn, true))
764
        {
765
          insert_expr_in_table (src, insn);
766
        }
767
    }
768
  else if (REG_P (src))
769
    {
770
      /* Only record sets of pseudo-regs in the hash table.  */
771
      if (/* Don't CSE something if we can't do a reg/reg copy.  */
772
          can_copy_p (GET_MODE (src))
773
          /* Is SET_DEST something we want to gcse?  */
774
          && general_operand (dest, GET_MODE (dest))
775
#ifdef STACK_REGS
776
          /* As above for STACK_REGS.  */
777
          && (REGNO (src) < FIRST_STACK_REG || REGNO (src) > LAST_STACK_REG)
778
#endif
779
          && ! (flag_float_store && FLOAT_MODE_P (GET_MODE (dest)))
780
          /* Check if the memory expression is killed after insn.  */
781
          && ! load_killed_in_block_p (INSN_CUID (insn) + 1, dest, true)
782
          && oprs_unchanged_p (XEXP (dest, 0), insn, true))
783
        {
784
          insert_expr_in_table (dest, insn);
785
        }
786
    }
787
}
788
 
789
 
790
/* Create hash table of memory expressions available at end of basic
791
   blocks.  Basically you should think of this hash table as the
792
   representation of AVAIL_OUT.  This is the set of expressions that
793
   is generated in a basic block and not killed before the end of the
794
   same basic block.  Notice that this is really a local computation.  */
795
 
796
static void
797
compute_hash_table (void)
798
{
799
  basic_block bb;
800
 
801
  FOR_EACH_BB (bb)
802
    {
803
      rtx insn;
804
 
805
      /* First pass over the instructions records information used to
806
         determine when registers and memory are last set.
807
         Since we compute a "local" AVAIL_OUT, reset the tables that
808
         help us keep track of what has been modified since the start
809
         of the block.  */
810
      reset_opr_set_tables ();
811
      FOR_BB_INSNS (bb, insn)
812
        {
813
          if (INSN_P (insn))
814
            record_opr_changes (insn);
815
        }
816
 
817
      /* The next pass actually builds the hash table.  */
818
      FOR_BB_INSNS (bb, insn)
819
        if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == SET)
820
          hash_scan_set (insn);
821
    }
822
}
823
 
824
 
825
/* Check if register REG is killed in any insn waiting to be inserted on
826
   edge E.  This function is required to check that our data flow analysis
827
   is still valid prior to commit_edge_insertions.  */
828
 
829
static bool
830
reg_killed_on_edge (rtx reg, edge e)
831
{
832
  rtx insn;
833
 
834
  for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
835
    if (INSN_P (insn) && reg_set_p (reg, insn))
836
      return true;
837
 
838
  return false;
839
}
840
 
841
/* Similar to above - check if register REG is used in any insn waiting
842
   to be inserted on edge E.
843
   Assumes no such insn can be a CALL_INSN; if so call reg_used_between_p
844
   with PREV(insn),NEXT(insn) instead of calling reg_overlap_mentioned_p.  */
845
 
846
static bool
847
reg_used_on_edge (rtx reg, edge e)
848
{
849
  rtx insn;
850
 
851
  for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
852
    if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
853
      return true;
854
 
855
  return false;
856
}
857
 
858
 
859
/* Return the insn that sets register REG or clobbers it in between
860
   FROM_INSN and TO_INSN (exclusive of those two).
861
   Just like reg_set_between but for hard registers and not pseudos.  */
862
 
863
static rtx
864
reg_set_between_after_reload_p (rtx reg, rtx from_insn, rtx to_insn)
865
{
866
  rtx insn;
867
 
868
  /* We are called after register allocation.  */
869
  gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
870
 
871
  if (from_insn == to_insn)
872
    return NULL_RTX;
873
 
874
  for (insn = NEXT_INSN (from_insn);
875
       insn != to_insn;
876
       insn = NEXT_INSN (insn))
877
    if (INSN_P (insn))
878
      {
879
        if (set_of (reg, insn) != NULL_RTX)
880
          return insn;
881
        if ((CALL_P (insn)
882
              && call_used_regs[REGNO (reg)])
883
            || find_reg_fusage (insn, CLOBBER, reg))
884
          return insn;
885
 
886
        if (FIND_REG_INC_NOTE (insn, reg))
887
          return insn;
888
      }
889
 
890
  return NULL_RTX;
891
}
892
 
893
/* Return the insn that uses register REG in between FROM_INSN and TO_INSN
894
   (exclusive of those two). Similar to reg_used_between but for hard
895
   registers and not pseudos.  */
896
 
897
static rtx
898
reg_used_between_after_reload_p (rtx reg, rtx from_insn, rtx to_insn)
899
{
900
  rtx insn;
901
 
902
  /* We are called after register allocation.  */
903
  gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
904
 
905
  if (from_insn == to_insn)
906
    return NULL_RTX;
907
 
908
  for (insn = NEXT_INSN (from_insn);
909
       insn != to_insn;
910
       insn = NEXT_INSN (insn))
911
    if (INSN_P (insn))
912
      {
913
        if (reg_overlap_mentioned_p (reg, PATTERN (insn))
914
            || (CALL_P (insn)
915
                && call_used_regs[REGNO (reg)])
916
            || find_reg_fusage (insn, USE, reg)
917
            || find_reg_fusage (insn, CLOBBER, reg))
918
          return insn;
919
 
920
        if (FIND_REG_INC_NOTE (insn, reg))
921
          return insn;
922
      }
923
 
924
  return NULL_RTX;
925
}
926
 
927
/* Return true if REG is used, set, or killed between the beginning of
928
   basic block BB and UP_TO_INSN.  Caches the result in reg_avail_info.  */
929
 
930
static bool
931
reg_set_or_used_since_bb_start (rtx reg, basic_block bb, rtx up_to_insn)
932
{
933
  rtx insn, start = PREV_INSN (BB_HEAD (bb));
934
 
935
  if (reg_avail_info[REGNO (reg)] != 0)
936
    return true;
937
 
938
  insn = reg_used_between_after_reload_p (reg, start, up_to_insn);
939
  if (! insn)
940
    insn = reg_set_between_after_reload_p (reg, start, up_to_insn);
941
 
942
  if (insn)
943
    reg_avail_info[REGNO (reg)] = INSN_CUID (insn);
944
 
945
  return insn != NULL_RTX;
946
}
947
 
948
/* Return the loaded/stored register of a load/store instruction.  */
949
 
950
static rtx
951
get_avail_load_store_reg (rtx insn)
952
{
953
  if (REG_P (SET_DEST (PATTERN (insn))))
954
    /* A load.  */
955
    return SET_DEST(PATTERN(insn));
956
  else
957
    {
958
      /* A store.  */
959
      gcc_assert (REG_P (SET_SRC (PATTERN (insn))));
960
      return SET_SRC (PATTERN (insn));
961
    }
962
}
963
 
964
/* Return nonzero if the predecessors of BB are "well behaved".  */
965
 
966
static bool
967
bb_has_well_behaved_predecessors (basic_block bb)
968
{
969
  edge pred;
970
  edge_iterator ei;
971
 
972
  if (EDGE_COUNT (bb->preds) == 0)
973
    return false;
974
 
975
  FOR_EACH_EDGE (pred, ei, bb->preds)
976
    {
977
      if ((pred->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (pred))
978
        return false;
979
 
980
      if (JUMP_TABLE_DATA_P (BB_END (pred->src)))
981
        return false;
982
    }
983
  return true;
984
}
985
 
986
 
987
/* Search for the occurrences of expression in BB.  */
988
 
989
static struct occr*
990
get_bb_avail_insn (basic_block bb, struct occr *occr)
991
{
992
  for (; occr != NULL; occr = occr->next)
993
    if (BLOCK_FOR_INSN (occr->insn) == bb)
994
      return occr;
995
  return NULL;
996
}
997
 
998
 
999
/* This handles the case where several stores feed a partially redundant
1000
   load. It checks if the redundancy elimination is possible and if it's
1001
   worth it.
1002
 
1003
   Redundancy elimination is possible if,
1004
   1) None of the operands of an insn have been modified since the start
1005
      of the current basic block.
1006
   2) In any predecessor of the current basic block, the same expression
1007
      is generated.
1008
 
1009
   See the function body for the heuristics that determine if eliminating
1010
   a redundancy is also worth doing, assuming it is possible.  */
1011
 
1012
static void
1013
eliminate_partially_redundant_load (basic_block bb, rtx insn,
1014
                                    struct expr *expr)
1015
{
1016
  edge pred;
1017
  rtx avail_insn = NULL_RTX;
1018
  rtx avail_reg;
1019
  rtx dest, pat;
1020
  struct occr *a_occr;
1021
  struct unoccr *occr, *avail_occrs = NULL;
1022
  struct unoccr *unoccr, *unavail_occrs = NULL, *rollback_unoccr = NULL;
1023
  int npred_ok = 0;
1024
  gcov_type ok_count = 0; /* Redundant load execution count.  */
1025
  gcov_type critical_count = 0; /* Execution count of critical edges.  */
1026
  edge_iterator ei;
1027
  bool critical_edge_split = false;
1028
 
1029
  /* The execution count of the loads to be added to make the
1030
     load fully redundant.  */
1031
  gcov_type not_ok_count = 0;
1032
  basic_block pred_bb;
1033
 
1034
  pat = PATTERN (insn);
1035
  dest = SET_DEST (pat);
1036
 
1037
  /* Check that the loaded register is not used, set, or killed from the
1038
     beginning of the block.  */
1039
  if (reg_set_or_used_since_bb_start (dest, bb, insn))
1040
    return;
1041
 
1042
  /* Check potential for replacing load with copy for predecessors.  */
1043
  FOR_EACH_EDGE (pred, ei, bb->preds)
1044
    {
1045
      rtx next_pred_bb_end;
1046
 
1047
      avail_insn = NULL_RTX;
1048
      avail_reg = NULL_RTX;
1049
      pred_bb = pred->src;
1050
      next_pred_bb_end = NEXT_INSN (BB_END (pred_bb));
1051
      for (a_occr = get_bb_avail_insn (pred_bb, expr->avail_occr); a_occr;
1052
           a_occr = get_bb_avail_insn (pred_bb, a_occr->next))
1053
        {
1054
          /* Check if the loaded register is not used.  */
1055
          avail_insn = a_occr->insn;
1056
          avail_reg = get_avail_load_store_reg (avail_insn);
1057
          gcc_assert (avail_reg);
1058
 
1059
          /* Make sure we can generate a move from register avail_reg to
1060
             dest.  */
1061
          extract_insn (gen_move_insn (copy_rtx (dest),
1062
                                       copy_rtx (avail_reg)));
1063
          if (! constrain_operands (1)
1064
              || reg_killed_on_edge (avail_reg, pred)
1065
              || reg_used_on_edge (dest, pred))
1066
            {
1067
              avail_insn = NULL;
1068
              continue;
1069
            }
1070
          if (! reg_set_between_after_reload_p (avail_reg, avail_insn,
1071
                                                next_pred_bb_end))
1072
            /* AVAIL_INSN remains non-null.  */
1073
            break;
1074
          else
1075
            avail_insn = NULL;
1076
        }
1077
 
1078
      if (EDGE_CRITICAL_P (pred))
1079
        critical_count += pred->count;
1080
 
1081
      if (avail_insn != NULL_RTX)
1082
        {
1083
          npred_ok++;
1084
          ok_count += pred->count;
1085
          if (! set_noop_p (PATTERN (gen_move_insn (copy_rtx (dest),
1086
                                                    copy_rtx (avail_reg)))))
1087
            {
1088
              /* Check if there is going to be a split.  */
1089
              if (EDGE_CRITICAL_P (pred))
1090
                critical_edge_split = true;
1091
            }
1092
          else /* Its a dead move no need to generate.  */
1093
            continue;
1094
          occr = (struct unoccr *) obstack_alloc (&unoccr_obstack,
1095
                                                  sizeof (struct unoccr));
1096
          occr->insn = avail_insn;
1097
          occr->pred = pred;
1098
          occr->next = avail_occrs;
1099
          avail_occrs = occr;
1100
          if (! rollback_unoccr)
1101
            rollback_unoccr = occr;
1102
        }
1103
      else
1104
        {
1105
          /* Adding a load on a critical edge will cause a split.  */
1106
          if (EDGE_CRITICAL_P (pred))
1107
            critical_edge_split = true;
1108
          not_ok_count += pred->count;
1109
          unoccr = (struct unoccr *) obstack_alloc (&unoccr_obstack,
1110
                                                    sizeof (struct unoccr));
1111
          unoccr->insn = NULL_RTX;
1112
          unoccr->pred = pred;
1113
          unoccr->next = unavail_occrs;
1114
          unavail_occrs = unoccr;
1115
          if (! rollback_unoccr)
1116
            rollback_unoccr = unoccr;
1117
        }
1118
    }
1119
 
1120
  if (/* No load can be replaced by copy.  */
1121
      npred_ok == 0
1122
      /* Prevent exploding the code.  */
1123
      || (optimize_size && npred_ok > 1)
1124
      /* If we don't have profile information we cannot tell if splitting
1125
         a critical edge is profitable or not so don't do it.  */
1126
      || ((! profile_info || ! flag_branch_probabilities
1127
           || targetm.cannot_modify_jumps_p ())
1128
          && critical_edge_split))
1129
    goto cleanup;
1130
 
1131
  /* Check if it's worth applying the partial redundancy elimination.  */
1132
  if (ok_count < GCSE_AFTER_RELOAD_PARTIAL_FRACTION * not_ok_count)
1133
    goto cleanup;
1134
  if (ok_count < GCSE_AFTER_RELOAD_CRITICAL_FRACTION * critical_count)
1135
    goto cleanup;
1136
 
1137
  /* Generate moves to the loaded register from where
1138
     the memory is available.  */
1139
  for (occr = avail_occrs; occr; occr = occr->next)
1140
    {
1141
      avail_insn = occr->insn;
1142
      pred = occr->pred;
1143
      /* Set avail_reg to be the register having the value of the
1144
         memory.  */
1145
      avail_reg = get_avail_load_store_reg (avail_insn);
1146
      gcc_assert (avail_reg);
1147
 
1148
      insert_insn_on_edge (gen_move_insn (copy_rtx (dest),
1149
                                          copy_rtx (avail_reg)),
1150
                           pred);
1151
      stats.moves_inserted++;
1152
 
1153
      if (dump_file)
1154
        fprintf (dump_file,
1155
                 "generating move from %d to %d on edge from %d to %d\n",
1156
                 REGNO (avail_reg),
1157
                 REGNO (dest),
1158
                 pred->src->index,
1159
                 pred->dest->index);
1160
    }
1161
 
1162
  /* Regenerate loads where the memory is unavailable.  */
1163
  for (unoccr = unavail_occrs; unoccr; unoccr = unoccr->next)
1164
    {
1165
      pred = unoccr->pred;
1166
      insert_insn_on_edge (copy_insn (PATTERN (insn)), pred);
1167
      stats.copies_inserted++;
1168
 
1169
      if (dump_file)
1170
        {
1171
          fprintf (dump_file,
1172
                   "generating on edge from %d to %d a copy of load: ",
1173
                   pred->src->index,
1174
                   pred->dest->index);
1175
          print_rtl (dump_file, PATTERN (insn));
1176
          fprintf (dump_file, "\n");
1177
        }
1178
    }
1179
 
1180
  /* Delete the insn if it is not available in this block and mark it
1181
     for deletion if it is available. If insn is available it may help
1182
     discover additional redundancies, so mark it for later deletion.  */
1183
  for (a_occr = get_bb_avail_insn (bb, expr->avail_occr);
1184
       a_occr && (a_occr->insn != insn);
1185
       a_occr = get_bb_avail_insn (bb, a_occr->next));
1186
 
1187
  if (!a_occr)
1188
    {
1189
      stats.insns_deleted++;
1190
 
1191
      if (dump_file)
1192
        {
1193
          fprintf (dump_file, "deleting insn:\n");
1194
          print_rtl_single (dump_file, insn);
1195
          fprintf (dump_file, "\n");
1196
        }
1197
      delete_insn (insn);
1198
    }
1199
  else
1200
    a_occr->deleted_p = 1;
1201
 
1202
cleanup:
1203
  if (rollback_unoccr)
1204
    obstack_free (&unoccr_obstack, rollback_unoccr);
1205
}
1206
 
1207
/* Performing the redundancy elimination as described before.  */
1208
 
1209
static void
1210
eliminate_partially_redundant_loads (void)
1211
{
1212
  rtx insn;
1213
  basic_block bb;
1214
 
1215
  /* Note we start at block 1.  */
1216
 
1217
  if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
1218
    return;
1219
 
1220
  FOR_BB_BETWEEN (bb,
1221
                  ENTRY_BLOCK_PTR->next_bb->next_bb,
1222
                  EXIT_BLOCK_PTR,
1223
                  next_bb)
1224
    {
1225
      /* Don't try anything on basic blocks with strange predecessors.  */
1226
      if (! bb_has_well_behaved_predecessors (bb))
1227
        continue;
1228
 
1229
      /* Do not try anything on cold basic blocks.  */
1230
      if (probably_cold_bb_p (bb))
1231
        continue;
1232
 
1233
      /* Reset the table of things changed since the start of the current
1234
         basic block.  */
1235
      reset_opr_set_tables ();
1236
 
1237
      /* Look at all insns in the current basic block and see if there are
1238
         any loads in it that we can record.  */
1239
      FOR_BB_INSNS (bb, insn)
1240
        {
1241
          /* Is it a load - of the form (set (reg) (mem))?  */
1242
          if (NONJUMP_INSN_P (insn)
1243
              && GET_CODE (PATTERN (insn)) == SET
1244
              && REG_P (SET_DEST (PATTERN (insn)))
1245
              && MEM_P (SET_SRC (PATTERN (insn))))
1246
            {
1247
              rtx pat = PATTERN (insn);
1248
              rtx src = SET_SRC (pat);
1249
              struct expr *expr;
1250
 
1251
              if (!MEM_VOLATILE_P (src)
1252
                  && GET_MODE (src) != BLKmode
1253
                  && general_operand (src, GET_MODE (src))
1254
                  /* Are the operands unchanged since the start of the
1255
                     block?  */
1256
                  && oprs_unchanged_p (src, insn, false)
1257
                  && !(flag_non_call_exceptions && may_trap_p (src))
1258
                  && !side_effects_p (src)
1259
                  /* Is the expression recorded?  */
1260
                  && (expr = lookup_expr_in_table (src)) != NULL)
1261
                {
1262
                  /* We now have a load (insn) and an available memory at
1263
                     its BB start (expr). Try to remove the loads if it is
1264
                     redundant.  */
1265
                  eliminate_partially_redundant_load (bb, insn, expr);
1266
                }
1267
            }
1268
 
1269
          /* Keep track of everything modified by this insn, so that we
1270
             know what has been modified since the start of the current
1271
             basic block.  */
1272
          if (INSN_P (insn))
1273
            record_opr_changes (insn);
1274
        }
1275
    }
1276
 
1277
  commit_edge_insertions ();
1278
}
1279
 
1280
/* Go over the expression hash table and delete insns that were
1281
   marked for later deletion.  */
1282
 
1283
/* This helper is called via htab_traverse.  */
1284
static int
1285
delete_redundant_insns_1 (void **slot, void *data ATTRIBUTE_UNUSED)
1286
{
1287
  struct expr *expr = (struct expr *) *slot;
1288
  struct occr *occr;
1289
 
1290
  for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1291
    {
1292
      if (occr->deleted_p)
1293
        {
1294
          delete_insn (occr->insn);
1295
          stats.insns_deleted++;
1296
 
1297
          if (dump_file)
1298
            {
1299
              fprintf (dump_file, "deleting insn:\n");
1300
              print_rtl_single (dump_file, occr->insn);
1301
              fprintf (dump_file, "\n");
1302
            }
1303
        }
1304
    }
1305
 
1306
  return 1;
1307
}
1308
 
1309
static void
1310
delete_redundant_insns (void)
1311
{
1312
  htab_traverse (expr_table, delete_redundant_insns_1, NULL);
1313
  if (dump_file)
1314
    fprintf (dump_file, "\n");
1315
}
1316
 
1317
/* Main entry point of the GCSE after reload - clean some redundant loads
1318
   due to spilling.  */
1319
 
1320
static void
1321
gcse_after_reload_main (rtx f ATTRIBUTE_UNUSED)
1322
{
1323
 
1324
  memset (&stats, 0, sizeof (stats));
1325
 
1326
  /* Allocate ememory for this pass.
1327
     Also computes and initializes the insns' CUIDs.  */
1328
  alloc_mem ();
1329
 
1330
  /* We need alias analysis.  */
1331
  init_alias_analysis ();
1332
 
1333
  compute_hash_table ();
1334
 
1335
  if (dump_file)
1336
    dump_hash_table (dump_file);
1337
 
1338
  if (htab_elements (expr_table) > 0)
1339
    {
1340
      eliminate_partially_redundant_loads ();
1341
      delete_redundant_insns ();
1342
 
1343
      if (dump_file)
1344
        {
1345
          fprintf (dump_file, "GCSE AFTER RELOAD stats:\n");
1346
          fprintf (dump_file, "copies inserted: %d\n", stats.copies_inserted);
1347
          fprintf (dump_file, "moves inserted:  %d\n", stats.moves_inserted);
1348
          fprintf (dump_file, "insns deleted:   %d\n", stats.insns_deleted);
1349
          fprintf (dump_file, "\n\n");
1350
        }
1351
    }
1352
 
1353
  /* We are finished with alias.  */
1354
  end_alias_analysis ();
1355
 
1356
  free_mem ();
1357
}
1358
 
1359
 
1360
static bool
1361
gate_handle_gcse2 (void)
1362
{
1363
  return (optimize > 0 && flag_gcse_after_reload);
1364
}
1365
 
1366
 
1367
static unsigned int
1368
rest_of_handle_gcse2 (void)
1369
{
1370
  gcse_after_reload_main (get_insns ());
1371
  rebuild_jump_labels (get_insns ());
1372
  delete_trivially_dead_insns (get_insns (), max_reg_num ());
1373
  return 0;
1374
}
1375
 
1376
struct tree_opt_pass pass_gcse2 =
1377
{
1378
  "gcse2",                              /* name */
1379
  gate_handle_gcse2,                    /* gate */
1380
  rest_of_handle_gcse2,                 /* execute */
1381
  NULL,                                 /* sub */
1382
  NULL,                                 /* next */
1383
  0,                                    /* static_pass_number */
1384
  TV_GCSE_AFTER_RELOAD,                 /* tv_id */
1385
  0,                                    /* properties_required */
1386
  0,                                    /* properties_provided */
1387
  0,                                    /* properties_destroyed */
1388
  0,                                    /* todo_flags_start */
1389
  TODO_dump_func |
1390
  TODO_verify_flow | TODO_ggc_collect,  /* todo_flags_finish */
1391
  'J'                                   /* letter */
1392
};
1393
 

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.