OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [postreload-gcse.c] - Blame information for rev 689

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 684 jeremybenn
/* Post reload partially redundant load elimination
2
   Copyright (C) 2004, 2005, 2006, 2007, 2008, 2010, 2011
3
   Free Software Foundation, Inc.
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify it under
8
the terms of the GNU General Public License as published by the Free
9
Software Foundation; either version 3, or (at your option) any later
10
version.
11
 
12
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13
WARRANTY; without even the implied warranty of MERCHANTABILITY or
14
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15
for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING3.  If not see
19
<http://www.gnu.org/licenses/>.  */
20
 
21
#include "config.h"
22
#include "system.h"
23
#include "coretypes.h"
24
#include "tm.h"
25
#include "diagnostic-core.h"
26
 
27
#include "rtl.h"
28
#include "tree.h"
29
#include "tm_p.h"
30
#include "regs.h"
31
#include "hard-reg-set.h"
32
#include "flags.h"
33
#include "insn-config.h"
34
#include "recog.h"
35
#include "basic-block.h"
36
#include "output.h"
37
#include "function.h"
38
#include "expr.h"
39
#include "except.h"
40
#include "intl.h"
41
#include "obstack.h"
42
#include "hashtab.h"
43
#include "params.h"
44
#include "target.h"
45
#include "timevar.h"
46
#include "tree-pass.h"
47
#include "dbgcnt.h"
48
 
49
/* The following code implements gcse after reload, the purpose of this
50
   pass is to cleanup redundant loads generated by reload and other
51
   optimizations that come after gcse. It searches for simple inter-block
52
   redundancies and tries to eliminate them by adding moves and loads
53
   in cold places.
54
 
55
   Perform partially redundant load elimination, try to eliminate redundant
56
   loads created by the reload pass.  We try to look for full or partial
57
   redundant loads fed by one or more loads/stores in predecessor BBs,
58
   and try adding loads to make them fully redundant.  We also check if
59
   it's worth adding loads to be able to delete the redundant load.
60
 
61
   Algorithm:
62
   1. Build available expressions hash table:
63
       For each load/store instruction, if the loaded/stored memory didn't
64
       change until the end of the basic block add this memory expression to
65
       the hash table.
66
   2. Perform Redundancy elimination:
67
      For each load instruction do the following:
68
         perform partial redundancy elimination, check if it's worth adding
69
         loads to make the load fully redundant.  If so add loads and
70
         register copies and delete the load.
71
   3. Delete instructions made redundant in step 2.
72
 
73
   Future enhancement:
74
     If the loaded register is used/defined between load and some store,
75
     look for some other free register between load and all its stores,
76
     and replace the load with a copy from this register to the loaded
77
     register.
78
*/
79
 
80
 
81
/* Keep statistics of this pass.  */
82
static struct
83
{
84
  int moves_inserted;
85
  int copies_inserted;
86
  int insns_deleted;
87
} stats;
88
 
89
/* We need to keep a hash table of expressions.  The table entries are of
90
   type 'struct expr', and for each expression there is a single linked
91
   list of occurrences.  */
92
 
93
/* The table itself.  */
94
static htab_t expr_table;
95
 
96
/* Expression elements in the hash table.  */
97
struct expr
98
{
99
  /* The expression (SET_SRC for expressions, PATTERN for assignments).  */
100
  rtx expr;
101
 
102
  /* The same hash for this entry.  */
103
  hashval_t hash;
104
 
105
  /* List of available occurrence in basic blocks in the function.  */
106
  struct occr *avail_occr;
107
};
108
 
109
static struct obstack expr_obstack;
110
 
111
/* Occurrence of an expression.
112
   There is at most one occurrence per basic block.  If a pattern appears
113
   more than once, the last appearance is used.  */
114
 
115
struct occr
116
{
117
  /* Next occurrence of this expression.  */
118
  struct occr *next;
119
  /* The insn that computes the expression.  */
120
  rtx insn;
121
  /* Nonzero if this [anticipatable] occurrence has been deleted.  */
122
  char deleted_p;
123
};
124
 
125
static struct obstack occr_obstack;
126
 
127
/* The following structure holds the information about the occurrences of
128
   the redundant instructions.  */
129
struct unoccr
130
{
131
  struct unoccr *next;
132
  edge pred;
133
  rtx insn;
134
};
135
 
136
static struct obstack unoccr_obstack;
137
 
138
/* Array where each element is the CUID if the insn that last set the hard
139
   register with the number of the element, since the start of the current
140
   basic block.
141
 
142
   This array is used during the building of the hash table (step 1) to
143
   determine if a reg is killed before the end of a basic block.
144
 
145
   It is also used when eliminating partial redundancies (step 2) to see
146
   if a reg was modified since the start of a basic block.  */
147
static int *reg_avail_info;
148
 
149
/* A list of insns that may modify memory within the current basic block.  */
150
struct modifies_mem
151
{
152
  rtx insn;
153
  struct modifies_mem *next;
154
};
155
static struct modifies_mem *modifies_mem_list;
156
 
157
/* The modifies_mem structs also go on an obstack, only this obstack is
158
   freed each time after completing the analysis or transformations on
159
   a basic block.  So we allocate a dummy modifies_mem_obstack_bottom
160
   object on the obstack to keep track of the bottom of the obstack.  */
161
static struct obstack modifies_mem_obstack;
162
static struct modifies_mem  *modifies_mem_obstack_bottom;
163
 
164
/* Mapping of insn UIDs to CUIDs.
165
   CUIDs are like UIDs except they increase monotonically in each basic
166
   block, have no gaps, and only apply to real insns.  */
167
static int *uid_cuid;
168
#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
169
 
170
 
171
/* Helpers for memory allocation/freeing.  */
172
static void alloc_mem (void);
173
static void free_mem (void);
174
 
175
/* Support for hash table construction and transformations.  */
176
static bool oprs_unchanged_p (rtx, rtx, bool);
177
static void record_last_reg_set_info (rtx, rtx);
178
static void record_last_reg_set_info_regno (rtx, int);
179
static void record_last_mem_set_info (rtx);
180
static void record_last_set_info (rtx, const_rtx, void *);
181
static void record_opr_changes (rtx);
182
 
183
static void find_mem_conflicts (rtx, const_rtx, void *);
184
static int load_killed_in_block_p (int, rtx, bool);
185
static void reset_opr_set_tables (void);
186
 
187
/* Hash table support.  */
188
static hashval_t hash_expr (rtx, int *);
189
static hashval_t hash_expr_for_htab (const void *);
190
static int expr_equiv_p (const void *, const void *);
191
static void insert_expr_in_table (rtx, rtx);
192
static struct expr *lookup_expr_in_table (rtx);
193
static int dump_hash_table_entry (void **, void *);
194
static void dump_hash_table (FILE *);
195
 
196
/* Helpers for eliminate_partially_redundant_load.  */
197
static bool reg_killed_on_edge (rtx, edge);
198
static bool reg_used_on_edge (rtx, edge);
199
 
200
static rtx get_avail_load_store_reg (rtx);
201
 
202
static bool bb_has_well_behaved_predecessors (basic_block);
203
static struct occr* get_bb_avail_insn (basic_block, struct occr *);
204
static void hash_scan_set (rtx);
205
static void compute_hash_table (void);
206
 
207
/* The work horses of this pass.  */
208
static void eliminate_partially_redundant_load (basic_block,
209
                                                rtx,
210
                                                struct expr *);
211
static void eliminate_partially_redundant_loads (void);
212
 
213
 
214
/* Allocate memory for the CUID mapping array and register/memory
215
   tracking tables.  */
216
 
217
static void
218
alloc_mem (void)
219
{
220
  int i;
221
  basic_block bb;
222
  rtx insn;
223
 
224
  /* Find the largest UID and create a mapping from UIDs to CUIDs.  */
225
  uid_cuid = XCNEWVEC (int, get_max_uid () + 1);
226
  i = 1;
227
  FOR_EACH_BB (bb)
228
    FOR_BB_INSNS (bb, insn)
229
      {
230
        if (INSN_P (insn))
231
          uid_cuid[INSN_UID (insn)] = i++;
232
        else
233
          uid_cuid[INSN_UID (insn)] = i;
234
      }
235
 
236
  /* Allocate the available expressions hash table.  We don't want to
237
     make the hash table too small, but unnecessarily making it too large
238
     also doesn't help.  The i/4 is a gcse.c relic, and seems like a
239
     reasonable choice.  */
240
  expr_table = htab_create (MAX (i / 4, 13),
241
                            hash_expr_for_htab, expr_equiv_p, NULL);
242
 
243
  /* We allocate everything on obstacks because we often can roll back
244
     the whole obstack to some point.  Freeing obstacks is very fast.  */
245
  gcc_obstack_init (&expr_obstack);
246
  gcc_obstack_init (&occr_obstack);
247
  gcc_obstack_init (&unoccr_obstack);
248
  gcc_obstack_init (&modifies_mem_obstack);
249
 
250
  /* Working array used to track the last set for each register
251
     in the current block.  */
252
  reg_avail_info = (int *) xmalloc (FIRST_PSEUDO_REGISTER * sizeof (int));
253
 
254
  /* Put a dummy modifies_mem object on the modifies_mem_obstack, so we
255
     can roll it back in reset_opr_set_tables.  */
256
  modifies_mem_obstack_bottom =
257
    (struct modifies_mem *) obstack_alloc (&modifies_mem_obstack,
258
                                           sizeof (struct modifies_mem));
259
}
260
 
261
/* Free memory allocated by alloc_mem.  */
262
 
263
static void
264
free_mem (void)
265
{
266
  free (uid_cuid);
267
 
268
  htab_delete (expr_table);
269
 
270
  obstack_free (&expr_obstack, NULL);
271
  obstack_free (&occr_obstack, NULL);
272
  obstack_free (&unoccr_obstack, NULL);
273
  obstack_free (&modifies_mem_obstack, NULL);
274
 
275
  free (reg_avail_info);
276
}
277
 
278
 
279
/* Hash expression X.
280
   DO_NOT_RECORD_P is a boolean indicating if a volatile operand is found
281
   or if the expression contains something we don't want to insert in the
282
   table.  */
283
 
284
static hashval_t
285
hash_expr (rtx x, int *do_not_record_p)
286
{
287
  *do_not_record_p = 0;
288
  return hash_rtx (x, GET_MODE (x), do_not_record_p,
289
                   NULL,  /*have_reg_qty=*/false);
290
}
291
 
292
/* Callback for hashtab.
293
   Return the hash value for expression EXP.  We don't actually hash
294
   here, we just return the cached hash value.  */
295
 
296
static hashval_t
297
hash_expr_for_htab (const void *expp)
298
{
299
  const struct expr *const exp = (const struct expr *) expp;
300
  return exp->hash;
301
}
302
 
303
/* Callback for hashtab.
304
   Return nonzero if exp1 is equivalent to exp2.  */
305
 
306
static int
307
expr_equiv_p (const void *exp1p, const void *exp2p)
308
{
309
  const struct expr *const exp1 = (const struct expr *) exp1p;
310
  const struct expr *const exp2 = (const struct expr *) exp2p;
311
  int equiv_p = exp_equiv_p (exp1->expr, exp2->expr, 0, true);
312
 
313
  gcc_assert (!equiv_p || exp1->hash == exp2->hash);
314
  return equiv_p;
315
}
316
 
317
 
318
/* Insert expression X in INSN in the hash TABLE.
319
   If it is already present, record it as the last occurrence in INSN's
320
   basic block.  */
321
 
322
static void
323
insert_expr_in_table (rtx x, rtx insn)
324
{
325
  int do_not_record_p;
326
  hashval_t hash;
327
  struct expr *cur_expr, **slot;
328
  struct occr *avail_occr, *last_occr = NULL;
329
 
330
  hash = hash_expr (x, &do_not_record_p);
331
 
332
  /* Do not insert expression in the table if it contains volatile operands,
333
     or if hash_expr determines the expression is something we don't want
334
     to or can't handle.  */
335
  if (do_not_record_p)
336
    return;
337
 
338
  /* We anticipate that redundant expressions are rare, so for convenience
339
     allocate a new hash table element here already and set its fields.
340
     If we don't do this, we need a hack with a static struct expr.  Anyway,
341
     obstack_free is really fast and one more obstack_alloc doesn't hurt if
342
     we're going to see more expressions later on.  */
343
  cur_expr = (struct expr *) obstack_alloc (&expr_obstack,
344
                                            sizeof (struct expr));
345
  cur_expr->expr = x;
346
  cur_expr->hash = hash;
347
  cur_expr->avail_occr = NULL;
348
 
349
  slot = (struct expr **) htab_find_slot_with_hash (expr_table, cur_expr,
350
                                                    hash, INSERT);
351
 
352
  if (! (*slot))
353
    /* The expression isn't found, so insert it.  */
354
    *slot = cur_expr;
355
  else
356
    {
357
      /* The expression is already in the table, so roll back the
358
         obstack and use the existing table entry.  */
359
      obstack_free (&expr_obstack, cur_expr);
360
      cur_expr = *slot;
361
    }
362
 
363
  /* Search for another occurrence in the same basic block.  */
364
  avail_occr = cur_expr->avail_occr;
365
  while (avail_occr
366
         && BLOCK_FOR_INSN (avail_occr->insn) != BLOCK_FOR_INSN (insn))
367
    {
368
      /* If an occurrence isn't found, save a pointer to the end of
369
         the list.  */
370
      last_occr = avail_occr;
371
      avail_occr = avail_occr->next;
372
    }
373
 
374
  if (avail_occr)
375
    /* Found another instance of the expression in the same basic block.
376
       Prefer this occurrence to the currently recorded one.  We want
377
       the last one in the block and the block is scanned from start
378
       to end.  */
379
    avail_occr->insn = insn;
380
  else
381
    {
382
      /* First occurrence of this expression in this basic block.  */
383
      avail_occr = (struct occr *) obstack_alloc (&occr_obstack,
384
                                                  sizeof (struct occr));
385
 
386
      /* First occurrence of this expression in any block?  */
387
      if (cur_expr->avail_occr == NULL)
388
        cur_expr->avail_occr = avail_occr;
389
      else
390
        last_occr->next = avail_occr;
391
 
392
      avail_occr->insn = insn;
393
      avail_occr->next = NULL;
394
      avail_occr->deleted_p = 0;
395
    }
396
}
397
 
398
 
399
/* Lookup pattern PAT in the expression hash table.
400
   The result is a pointer to the table entry, or NULL if not found.  */
401
 
402
static struct expr *
403
lookup_expr_in_table (rtx pat)
404
{
405
  int do_not_record_p;
406
  struct expr **slot, *tmp_expr;
407
  hashval_t hash = hash_expr (pat, &do_not_record_p);
408
 
409
  if (do_not_record_p)
410
    return NULL;
411
 
412
  tmp_expr = (struct expr *) obstack_alloc (&expr_obstack,
413
                                            sizeof (struct expr));
414
  tmp_expr->expr = pat;
415
  tmp_expr->hash = hash;
416
  tmp_expr->avail_occr = NULL;
417
 
418
  slot = (struct expr **) htab_find_slot_with_hash (expr_table, tmp_expr,
419
                                                    hash, INSERT);
420
  obstack_free (&expr_obstack, tmp_expr);
421
 
422
  if (!slot)
423
    return NULL;
424
  else
425
    return (*slot);
426
}
427
 
428
 
429
/* Dump all expressions and occurrences that are currently in the
430
   expression hash table to FILE.  */
431
 
432
/* This helper is called via htab_traverse.  */
433
static int
434
dump_hash_table_entry (void **slot, void *filep)
435
{
436
  struct expr *expr = (struct expr *) *slot;
437
  FILE *file = (FILE *) filep;
438
  struct occr *occr;
439
 
440
  fprintf (file, "expr: ");
441
  print_rtl (file, expr->expr);
442
  fprintf (file,"\nhashcode: %u\n", expr->hash);
443
  fprintf (file,"list of occurrences:\n");
444
  occr = expr->avail_occr;
445
  while (occr)
446
    {
447
      rtx insn = occr->insn;
448
      print_rtl_single (file, insn);
449
      fprintf (file, "\n");
450
      occr = occr->next;
451
    }
452
  fprintf (file, "\n");
453
  return 1;
454
}
455
 
456
static void
457
dump_hash_table (FILE *file)
458
{
459
  fprintf (file, "\n\nexpression hash table\n");
460
  fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
461
           (long) htab_size (expr_table),
462
           (long) htab_elements (expr_table),
463
           htab_collisions (expr_table));
464
  if (htab_elements (expr_table) > 0)
465
    {
466
      fprintf (file, "\n\ntable entries:\n");
467
      htab_traverse (expr_table, dump_hash_table_entry, file);
468
    }
469
  fprintf (file, "\n");
470
}
471
 
472
/* Return true if register X is recorded as being set by an instruction
473
   whose CUID is greater than the one given.  */
474
 
475
static bool
476
reg_changed_after_insn_p (rtx x, int cuid)
477
{
478
  unsigned int regno, end_regno;
479
 
480
  regno = REGNO (x);
481
  end_regno = END_HARD_REGNO (x);
482
  do
483
    if (reg_avail_info[regno] > cuid)
484
      return true;
485
  while (++regno < end_regno);
486
  return false;
487
}
488
 
489
/* Return nonzero if the operands of expression X are unchanged
490
   1) from the start of INSN's basic block up to but not including INSN
491
      if AFTER_INSN is false, or
492
   2) from INSN to the end of INSN's basic block if AFTER_INSN is true.  */
493
 
494
static bool
495
oprs_unchanged_p (rtx x, rtx insn, bool after_insn)
496
{
497
  int i, j;
498
  enum rtx_code code;
499
  const char *fmt;
500
 
501
  if (x == 0)
502
    return 1;
503
 
504
  code = GET_CODE (x);
505
  switch (code)
506
    {
507
    case REG:
508
      /* We are called after register allocation.  */
509
      gcc_assert (REGNO (x) < FIRST_PSEUDO_REGISTER);
510
      if (after_insn)
511
        return !reg_changed_after_insn_p (x, INSN_CUID (insn) - 1);
512
      else
513
        return !reg_changed_after_insn_p (x, 0);
514
 
515
    case MEM:
516
      if (load_killed_in_block_p (INSN_CUID (insn), x, after_insn))
517
        return 0;
518
      else
519
        return oprs_unchanged_p (XEXP (x, 0), insn, after_insn);
520
 
521
    case PC:
522
    case CC0: /*FIXME*/
523
    case CONST:
524
    case CONST_INT:
525
    case CONST_DOUBLE:
526
    case CONST_FIXED:
527
    case CONST_VECTOR:
528
    case SYMBOL_REF:
529
    case LABEL_REF:
530
    case ADDR_VEC:
531
    case ADDR_DIFF_VEC:
532
      return 1;
533
 
534
    case PRE_DEC:
535
    case PRE_INC:
536
    case POST_DEC:
537
    case POST_INC:
538
    case PRE_MODIFY:
539
    case POST_MODIFY:
540
      if (after_insn)
541
        return 0;
542
      break;
543
 
544
    default:
545
      break;
546
    }
547
 
548
  for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
549
    {
550
      if (fmt[i] == 'e')
551
        {
552
          if (! oprs_unchanged_p (XEXP (x, i), insn, after_insn))
553
            return 0;
554
        }
555
      else if (fmt[i] == 'E')
556
        for (j = 0; j < XVECLEN (x, i); j++)
557
          if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, after_insn))
558
            return 0;
559
    }
560
 
561
  return 1;
562
}
563
 
564
 
565
/* Used for communication between find_mem_conflicts and
566
   load_killed_in_block_p.  Nonzero if find_mem_conflicts finds a
567
   conflict between two memory references.
568
   This is a bit of a hack to work around the limitations of note_stores.  */
569
static int mems_conflict_p;
570
 
571
/* DEST is the output of an instruction.  If it is a memory reference, and
572
   possibly conflicts with the load found in DATA, then set mems_conflict_p
573
   to a nonzero value.  */
574
 
575
static void
576
find_mem_conflicts (rtx dest, const_rtx setter ATTRIBUTE_UNUSED,
577
                    void *data)
578
{
579
  rtx mem_op = (rtx) data;
580
 
581
  while (GET_CODE (dest) == SUBREG
582
         || GET_CODE (dest) == ZERO_EXTRACT
583
         || GET_CODE (dest) == STRICT_LOW_PART)
584
    dest = XEXP (dest, 0);
585
 
586
  /* If DEST is not a MEM, then it will not conflict with the load.  Note
587
     that function calls are assumed to clobber memory, but are handled
588
     elsewhere.  */
589
  if (! MEM_P (dest))
590
    return;
591
 
592
  if (true_dependence (dest, GET_MODE (dest), mem_op))
593
    mems_conflict_p = 1;
594
}
595
 
596
 
597
/* Return nonzero if the expression in X (a memory reference) is killed
598
   in the current basic block before (if AFTER_INSN is false) or after
599
   (if AFTER_INSN is true) the insn with the CUID in UID_LIMIT.
600
 
601
   This function assumes that the modifies_mem table is flushed when
602
   the hash table construction or redundancy elimination phases start
603
   processing a new basic block.  */
604
 
605
static int
606
load_killed_in_block_p (int uid_limit, rtx x, bool after_insn)
607
{
608
  struct modifies_mem *list_entry = modifies_mem_list;
609
 
610
  while (list_entry)
611
    {
612
      rtx setter = list_entry->insn;
613
 
614
      /* Ignore entries in the list that do not apply.  */
615
      if ((after_insn
616
           && INSN_CUID (setter) < uid_limit)
617
          || (! after_insn
618
              && INSN_CUID (setter) > uid_limit))
619
        {
620
          list_entry = list_entry->next;
621
          continue;
622
        }
623
 
624
      /* If SETTER is a call everything is clobbered.  Note that calls
625
         to pure functions are never put on the list, so we need not
626
         worry about them.  */
627
      if (CALL_P (setter))
628
        return 1;
629
 
630
      /* SETTER must be an insn of some kind that sets memory.  Call
631
         note_stores to examine each hunk of memory that is modified.
632
         It will set mems_conflict_p to nonzero if there may be a
633
         conflict between X and SETTER.  */
634
      mems_conflict_p = 0;
635
      note_stores (PATTERN (setter), find_mem_conflicts, x);
636
      if (mems_conflict_p)
637
        return 1;
638
 
639
      list_entry = list_entry->next;
640
    }
641
  return 0;
642
}
643
 
644
 
645
/* Record register first/last/block set information for REGNO in INSN.  */
646
 
647
static inline void
648
record_last_reg_set_info (rtx insn, rtx reg)
649
{
650
  unsigned int regno, end_regno;
651
 
652
  regno = REGNO (reg);
653
  end_regno = END_HARD_REGNO (reg);
654
  do
655
    reg_avail_info[regno] = INSN_CUID (insn);
656
  while (++regno < end_regno);
657
}
658
 
659
static inline void
660
record_last_reg_set_info_regno (rtx insn, int regno)
661
{
662
  reg_avail_info[regno] = INSN_CUID (insn);
663
}
664
 
665
 
666
/* Record memory modification information for INSN.  We do not actually care
667
   about the memory location(s) that are set, or even how they are set (consider
668
   a CALL_INSN).  We merely need to record which insns modify memory.  */
669
 
670
static void
671
record_last_mem_set_info (rtx insn)
672
{
673
  struct modifies_mem *list_entry;
674
 
675
  list_entry = (struct modifies_mem *) obstack_alloc (&modifies_mem_obstack,
676
                                                      sizeof (struct modifies_mem));
677
  list_entry->insn = insn;
678
  list_entry->next = modifies_mem_list;
679
  modifies_mem_list = list_entry;
680
}
681
 
682
/* Called from compute_hash_table via note_stores to handle one
683
   SET or CLOBBER in an insn.  DATA is really the instruction in which
684
   the SET is taking place.  */
685
 
686
static void
687
record_last_set_info (rtx dest, const_rtx setter ATTRIBUTE_UNUSED, void *data)
688
{
689
  rtx last_set_insn = (rtx) data;
690
 
691
  if (GET_CODE (dest) == SUBREG)
692
    dest = SUBREG_REG (dest);
693
 
694
  if (REG_P (dest))
695
    record_last_reg_set_info (last_set_insn, dest);
696
  else if (MEM_P (dest))
697
    {
698
      /* Ignore pushes, they don't clobber memory.  They may still
699
         clobber the stack pointer though.  Some targets do argument
700
         pushes without adding REG_INC notes.  See e.g. PR25196,
701
         where a pushsi2 on i386 doesn't have REG_INC notes.  Note
702
         such changes here too.  */
703
      if (! push_operand (dest, GET_MODE (dest)))
704
        record_last_mem_set_info (last_set_insn);
705
      else
706
        record_last_reg_set_info_regno (last_set_insn, STACK_POINTER_REGNUM);
707
    }
708
}
709
 
710
 
711
/* Reset tables used to keep track of what's still available since the
712
   start of the block.  */
713
 
714
static void
715
reset_opr_set_tables (void)
716
{
717
  memset (reg_avail_info, 0, FIRST_PSEUDO_REGISTER * sizeof (int));
718
  obstack_free (&modifies_mem_obstack, modifies_mem_obstack_bottom);
719
  modifies_mem_list = NULL;
720
}
721
 
722
 
723
/* Record things set by INSN.
724
   This data is used by oprs_unchanged_p.  */
725
 
726
static void
727
record_opr_changes (rtx insn)
728
{
729
  rtx note;
730
 
731
  /* Find all stores and record them.  */
732
  note_stores (PATTERN (insn), record_last_set_info, insn);
733
 
734
  /* Also record autoincremented REGs for this insn as changed.  */
735
  for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
736
    if (REG_NOTE_KIND (note) == REG_INC)
737
      record_last_reg_set_info (insn, XEXP (note, 0));
738
 
739
  /* Finally, if this is a call, record all call clobbers.  */
740
  if (CALL_P (insn))
741
    {
742
      unsigned int regno;
743
      rtx link, x;
744
 
745
      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
746
        if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
747
          record_last_reg_set_info_regno (insn, regno);
748
 
749
      for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
750
        if (GET_CODE (XEXP (link, 0)) == CLOBBER)
751
          {
752
            x = XEXP (XEXP (link, 0), 0);
753
            if (REG_P (x))
754
              {
755
                gcc_assert (HARD_REGISTER_P (x));
756
                record_last_reg_set_info (insn, x);
757
              }
758
          }
759
 
760
      if (! RTL_CONST_OR_PURE_CALL_P (insn))
761
        record_last_mem_set_info (insn);
762
    }
763
}
764
 
765
 
766
/* Scan the pattern of INSN and add an entry to the hash TABLE.
767
   After reload we are interested in loads/stores only.  */
768
 
769
static void
770
hash_scan_set (rtx insn)
771
{
772
  rtx pat = PATTERN (insn);
773
  rtx src = SET_SRC (pat);
774
  rtx dest = SET_DEST (pat);
775
 
776
  /* We are only interested in loads and stores.  */
777
  if (! MEM_P (src) && ! MEM_P (dest))
778
    return;
779
 
780
  /* Don't mess with jumps and nops.  */
781
  if (JUMP_P (insn) || set_noop_p (pat))
782
    return;
783
 
784
  if (REG_P (dest))
785
    {
786
      if (/* Don't CSE something if we can't do a reg/reg copy.  */
787
          can_copy_p (GET_MODE (dest))
788
          /* Is SET_SRC something we want to gcse?  */
789
          && general_operand (src, GET_MODE (src))
790
#ifdef STACK_REGS
791
          /* Never consider insns touching the register stack.  It may
792
             create situations that reg-stack cannot handle (e.g. a stack
793
             register live across an abnormal edge).  */
794
          && (REGNO (dest) < FIRST_STACK_REG || REGNO (dest) > LAST_STACK_REG)
795
#endif
796
          /* An expression is not available if its operands are
797
             subsequently modified, including this insn.  */
798
          && oprs_unchanged_p (src, insn, true))
799
        {
800
          insert_expr_in_table (src, insn);
801
        }
802
    }
803
  else if (REG_P (src))
804
    {
805
      /* Only record sets of pseudo-regs in the hash table.  */
806
      if (/* Don't CSE something if we can't do a reg/reg copy.  */
807
          can_copy_p (GET_MODE (src))
808
          /* Is SET_DEST something we want to gcse?  */
809
          && general_operand (dest, GET_MODE (dest))
810
#ifdef STACK_REGS
811
          /* As above for STACK_REGS.  */
812
          && (REGNO (src) < FIRST_STACK_REG || REGNO (src) > LAST_STACK_REG)
813
#endif
814
          && ! (flag_float_store && FLOAT_MODE_P (GET_MODE (dest)))
815
          /* Check if the memory expression is killed after insn.  */
816
          && ! load_killed_in_block_p (INSN_CUID (insn) + 1, dest, true)
817
          && oprs_unchanged_p (XEXP (dest, 0), insn, true))
818
        {
819
          insert_expr_in_table (dest, insn);
820
        }
821
    }
822
}
823
 
824
 
825
/* Create hash table of memory expressions available at end of basic
826
   blocks.  Basically you should think of this hash table as the
827
   representation of AVAIL_OUT.  This is the set of expressions that
828
   is generated in a basic block and not killed before the end of the
829
   same basic block.  Notice that this is really a local computation.  */
830
 
831
static void
832
compute_hash_table (void)
833
{
834
  basic_block bb;
835
 
836
  FOR_EACH_BB (bb)
837
    {
838
      rtx insn;
839
 
840
      /* First pass over the instructions records information used to
841
         determine when registers and memory are last set.
842
         Since we compute a "local" AVAIL_OUT, reset the tables that
843
         help us keep track of what has been modified since the start
844
         of the block.  */
845
      reset_opr_set_tables ();
846
      FOR_BB_INSNS (bb, insn)
847
        {
848
          if (INSN_P (insn))
849
            record_opr_changes (insn);
850
        }
851
 
852
      /* The next pass actually builds the hash table.  */
853
      FOR_BB_INSNS (bb, insn)
854
        if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == SET)
855
          hash_scan_set (insn);
856
    }
857
}
858
 
859
 
860
/* Check if register REG is killed in any insn waiting to be inserted on
861
   edge E.  This function is required to check that our data flow analysis
862
   is still valid prior to commit_edge_insertions.  */
863
 
864
static bool
865
reg_killed_on_edge (rtx reg, edge e)
866
{
867
  rtx insn;
868
 
869
  for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
870
    if (INSN_P (insn) && reg_set_p (reg, insn))
871
      return true;
872
 
873
  return false;
874
}
875
 
876
/* Similar to above - check if register REG is used in any insn waiting
877
   to be inserted on edge E.
878
   Assumes no such insn can be a CALL_INSN; if so call reg_used_between_p
879
   with PREV(insn),NEXT(insn) instead of calling reg_overlap_mentioned_p.  */
880
 
881
static bool
882
reg_used_on_edge (rtx reg, edge e)
883
{
884
  rtx insn;
885
 
886
  for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
887
    if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
888
      return true;
889
 
890
  return false;
891
}
892
 
893
/* Return the loaded/stored register of a load/store instruction.  */
894
 
895
static rtx
896
get_avail_load_store_reg (rtx insn)
897
{
898
  if (REG_P (SET_DEST (PATTERN (insn))))
899
    /* A load.  */
900
    return SET_DEST(PATTERN(insn));
901
  else
902
    {
903
      /* A store.  */
904
      gcc_assert (REG_P (SET_SRC (PATTERN (insn))));
905
      return SET_SRC (PATTERN (insn));
906
    }
907
}
908
 
909
/* Return nonzero if the predecessors of BB are "well behaved".  */
910
 
911
static bool
912
bb_has_well_behaved_predecessors (basic_block bb)
913
{
914
  edge pred;
915
  edge_iterator ei;
916
 
917
  if (EDGE_COUNT (bb->preds) == 0)
918
    return false;
919
 
920
  FOR_EACH_EDGE (pred, ei, bb->preds)
921
    {
922
      if ((pred->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (pred))
923
        return false;
924
 
925
      if ((pred->flags & EDGE_ABNORMAL_CALL) && cfun->has_nonlocal_label)
926
        return false;
927
 
928
      if (JUMP_TABLE_DATA_P (BB_END (pred->src)))
929
        return false;
930
    }
931
  return true;
932
}
933
 
934
 
935
/* Search for the occurrences of expression in BB.  */
936
 
937
static struct occr*
938
get_bb_avail_insn (basic_block bb, struct occr *occr)
939
{
940
  for (; occr != NULL; occr = occr->next)
941
    if (BLOCK_FOR_INSN (occr->insn) == bb)
942
      return occr;
943
  return NULL;
944
}
945
 
946
 
947
/* This handles the case where several stores feed a partially redundant
948
   load. It checks if the redundancy elimination is possible and if it's
949
   worth it.
950
 
951
   Redundancy elimination is possible if,
952
   1) None of the operands of an insn have been modified since the start
953
      of the current basic block.
954
   2) In any predecessor of the current basic block, the same expression
955
      is generated.
956
 
957
   See the function body for the heuristics that determine if eliminating
958
   a redundancy is also worth doing, assuming it is possible.  */
959
 
960
static void
961
eliminate_partially_redundant_load (basic_block bb, rtx insn,
962
                                    struct expr *expr)
963
{
964
  edge pred;
965
  rtx avail_insn = NULL_RTX;
966
  rtx avail_reg;
967
  rtx dest, pat;
968
  struct occr *a_occr;
969
  struct unoccr *occr, *avail_occrs = NULL;
970
  struct unoccr *unoccr, *unavail_occrs = NULL, *rollback_unoccr = NULL;
971
  int npred_ok = 0;
972
  gcov_type ok_count = 0; /* Redundant load execution count.  */
973
  gcov_type critical_count = 0; /* Execution count of critical edges.  */
974
  edge_iterator ei;
975
  bool critical_edge_split = false;
976
 
977
  /* The execution count of the loads to be added to make the
978
     load fully redundant.  */
979
  gcov_type not_ok_count = 0;
980
  basic_block pred_bb;
981
 
982
  pat = PATTERN (insn);
983
  dest = SET_DEST (pat);
984
 
985
  /* Check that the loaded register is not used, set, or killed from the
986
     beginning of the block.  */
987
  if (reg_changed_after_insn_p (dest, 0)
988
      || reg_used_between_p (dest, PREV_INSN (BB_HEAD (bb)), insn))
989
    return;
990
 
991
  /* Check potential for replacing load with copy for predecessors.  */
992
  FOR_EACH_EDGE (pred, ei, bb->preds)
993
    {
994
      rtx next_pred_bb_end;
995
 
996
      avail_insn = NULL_RTX;
997
      avail_reg = NULL_RTX;
998
      pred_bb = pred->src;
999
      next_pred_bb_end = NEXT_INSN (BB_END (pred_bb));
1000
      for (a_occr = get_bb_avail_insn (pred_bb, expr->avail_occr); a_occr;
1001
           a_occr = get_bb_avail_insn (pred_bb, a_occr->next))
1002
        {
1003
          /* Check if the loaded register is not used.  */
1004
          avail_insn = a_occr->insn;
1005
          avail_reg = get_avail_load_store_reg (avail_insn);
1006
          gcc_assert (avail_reg);
1007
 
1008
          /* Make sure we can generate a move from register avail_reg to
1009
             dest.  */
1010
          extract_insn (gen_move_insn (copy_rtx (dest),
1011
                                       copy_rtx (avail_reg)));
1012
          if (! constrain_operands (1)
1013
              || reg_killed_on_edge (avail_reg, pred)
1014
              || reg_used_on_edge (dest, pred))
1015
            {
1016
              avail_insn = NULL;
1017
              continue;
1018
            }
1019
          if (!reg_set_between_p (avail_reg, avail_insn, next_pred_bb_end))
1020
            /* AVAIL_INSN remains non-null.  */
1021
            break;
1022
          else
1023
            avail_insn = NULL;
1024
        }
1025
 
1026
      if (EDGE_CRITICAL_P (pred))
1027
        critical_count += pred->count;
1028
 
1029
      if (avail_insn != NULL_RTX)
1030
        {
1031
          npred_ok++;
1032
          ok_count += pred->count;
1033
          if (! set_noop_p (PATTERN (gen_move_insn (copy_rtx (dest),
1034
                                                    copy_rtx (avail_reg)))))
1035
            {
1036
              /* Check if there is going to be a split.  */
1037
              if (EDGE_CRITICAL_P (pred))
1038
                critical_edge_split = true;
1039
            }
1040
          else /* Its a dead move no need to generate.  */
1041
            continue;
1042
          occr = (struct unoccr *) obstack_alloc (&unoccr_obstack,
1043
                                                  sizeof (struct unoccr));
1044
          occr->insn = avail_insn;
1045
          occr->pred = pred;
1046
          occr->next = avail_occrs;
1047
          avail_occrs = occr;
1048
          if (! rollback_unoccr)
1049
            rollback_unoccr = occr;
1050
        }
1051
      else
1052
        {
1053
          /* Adding a load on a critical edge will cause a split.  */
1054
          if (EDGE_CRITICAL_P (pred))
1055
            critical_edge_split = true;
1056
          not_ok_count += pred->count;
1057
          unoccr = (struct unoccr *) obstack_alloc (&unoccr_obstack,
1058
                                                    sizeof (struct unoccr));
1059
          unoccr->insn = NULL_RTX;
1060
          unoccr->pred = pred;
1061
          unoccr->next = unavail_occrs;
1062
          unavail_occrs = unoccr;
1063
          if (! rollback_unoccr)
1064
            rollback_unoccr = unoccr;
1065
        }
1066
    }
1067
 
1068
  if (/* No load can be replaced by copy.  */
1069
      npred_ok == 0
1070
      /* Prevent exploding the code.  */
1071
      || (optimize_bb_for_size_p (bb) && npred_ok > 1)
1072
      /* If we don't have profile information we cannot tell if splitting
1073
         a critical edge is profitable or not so don't do it.  */
1074
      || ((! profile_info || ! flag_branch_probabilities
1075
           || targetm.cannot_modify_jumps_p ())
1076
          && critical_edge_split))
1077
    goto cleanup;
1078
 
1079
  /* Check if it's worth applying the partial redundancy elimination.  */
1080
  if (ok_count < GCSE_AFTER_RELOAD_PARTIAL_FRACTION * not_ok_count)
1081
    goto cleanup;
1082
  if (ok_count < GCSE_AFTER_RELOAD_CRITICAL_FRACTION * critical_count)
1083
    goto cleanup;
1084
 
1085
  /* Generate moves to the loaded register from where
1086
     the memory is available.  */
1087
  for (occr = avail_occrs; occr; occr = occr->next)
1088
    {
1089
      avail_insn = occr->insn;
1090
      pred = occr->pred;
1091
      /* Set avail_reg to be the register having the value of the
1092
         memory.  */
1093
      avail_reg = get_avail_load_store_reg (avail_insn);
1094
      gcc_assert (avail_reg);
1095
 
1096
      insert_insn_on_edge (gen_move_insn (copy_rtx (dest),
1097
                                          copy_rtx (avail_reg)),
1098
                           pred);
1099
      stats.moves_inserted++;
1100
 
1101
      if (dump_file)
1102
        fprintf (dump_file,
1103
                 "generating move from %d to %d on edge from %d to %d\n",
1104
                 REGNO (avail_reg),
1105
                 REGNO (dest),
1106
                 pred->src->index,
1107
                 pred->dest->index);
1108
    }
1109
 
1110
  /* Regenerate loads where the memory is unavailable.  */
1111
  for (unoccr = unavail_occrs; unoccr; unoccr = unoccr->next)
1112
    {
1113
      pred = unoccr->pred;
1114
      insert_insn_on_edge (copy_insn (PATTERN (insn)), pred);
1115
      stats.copies_inserted++;
1116
 
1117
      if (dump_file)
1118
        {
1119
          fprintf (dump_file,
1120
                   "generating on edge from %d to %d a copy of load: ",
1121
                   pred->src->index,
1122
                   pred->dest->index);
1123
          print_rtl (dump_file, PATTERN (insn));
1124
          fprintf (dump_file, "\n");
1125
        }
1126
    }
1127
 
1128
  /* Delete the insn if it is not available in this block and mark it
1129
     for deletion if it is available. If insn is available it may help
1130
     discover additional redundancies, so mark it for later deletion.  */
1131
  for (a_occr = get_bb_avail_insn (bb, expr->avail_occr);
1132
       a_occr && (a_occr->insn != insn);
1133
       a_occr = get_bb_avail_insn (bb, a_occr->next))
1134
    ;
1135
 
1136
  if (!a_occr)
1137
    {
1138
      stats.insns_deleted++;
1139
 
1140
      if (dump_file)
1141
        {
1142
          fprintf (dump_file, "deleting insn:\n");
1143
          print_rtl_single (dump_file, insn);
1144
          fprintf (dump_file, "\n");
1145
        }
1146
      delete_insn (insn);
1147
    }
1148
  else
1149
    a_occr->deleted_p = 1;
1150
 
1151
cleanup:
1152
  if (rollback_unoccr)
1153
    obstack_free (&unoccr_obstack, rollback_unoccr);
1154
}
1155
 
1156
/* Performing the redundancy elimination as described before.  */
1157
 
1158
static void
1159
eliminate_partially_redundant_loads (void)
1160
{
1161
  rtx insn;
1162
  basic_block bb;
1163
 
1164
  /* Note we start at block 1.  */
1165
 
1166
  if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
1167
    return;
1168
 
1169
  FOR_BB_BETWEEN (bb,
1170
                  ENTRY_BLOCK_PTR->next_bb->next_bb,
1171
                  EXIT_BLOCK_PTR,
1172
                  next_bb)
1173
    {
1174
      /* Don't try anything on basic blocks with strange predecessors.  */
1175
      if (! bb_has_well_behaved_predecessors (bb))
1176
        continue;
1177
 
1178
      /* Do not try anything on cold basic blocks.  */
1179
      if (optimize_bb_for_size_p (bb))
1180
        continue;
1181
 
1182
      /* Reset the table of things changed since the start of the current
1183
         basic block.  */
1184
      reset_opr_set_tables ();
1185
 
1186
      /* Look at all insns in the current basic block and see if there are
1187
         any loads in it that we can record.  */
1188
      FOR_BB_INSNS (bb, insn)
1189
        {
1190
          /* Is it a load - of the form (set (reg) (mem))?  */
1191
          if (NONJUMP_INSN_P (insn)
1192
              && GET_CODE (PATTERN (insn)) == SET
1193
              && REG_P (SET_DEST (PATTERN (insn)))
1194
              && MEM_P (SET_SRC (PATTERN (insn))))
1195
            {
1196
              rtx pat = PATTERN (insn);
1197
              rtx src = SET_SRC (pat);
1198
              struct expr *expr;
1199
 
1200
              if (!MEM_VOLATILE_P (src)
1201
                  && GET_MODE (src) != BLKmode
1202
                  && general_operand (src, GET_MODE (src))
1203
                  /* Are the operands unchanged since the start of the
1204
                     block?  */
1205
                  && oprs_unchanged_p (src, insn, false)
1206
                  && !(cfun->can_throw_non_call_exceptions && may_trap_p (src))
1207
                  && !side_effects_p (src)
1208
                  /* Is the expression recorded?  */
1209
                  && (expr = lookup_expr_in_table (src)) != NULL)
1210
                {
1211
                  /* We now have a load (insn) and an available memory at
1212
                     its BB start (expr). Try to remove the loads if it is
1213
                     redundant.  */
1214
                  eliminate_partially_redundant_load (bb, insn, expr);
1215
                }
1216
            }
1217
 
1218
          /* Keep track of everything modified by this insn, so that we
1219
             know what has been modified since the start of the current
1220
             basic block.  */
1221
          if (INSN_P (insn))
1222
            record_opr_changes (insn);
1223
        }
1224
    }
1225
 
1226
  commit_edge_insertions ();
1227
}
1228
 
1229
/* Go over the expression hash table and delete insns that were
1230
   marked for later deletion.  */
1231
 
1232
/* This helper is called via htab_traverse.  */
1233
static int
1234
delete_redundant_insns_1 (void **slot, void *data ATTRIBUTE_UNUSED)
1235
{
1236
  struct expr *expr = (struct expr *) *slot;
1237
  struct occr *occr;
1238
 
1239
  for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1240
    {
1241
      if (occr->deleted_p && dbg_cnt (gcse2_delete))
1242
        {
1243
          delete_insn (occr->insn);
1244
          stats.insns_deleted++;
1245
 
1246
          if (dump_file)
1247
            {
1248
              fprintf (dump_file, "deleting insn:\n");
1249
              print_rtl_single (dump_file, occr->insn);
1250
              fprintf (dump_file, "\n");
1251
            }
1252
        }
1253
    }
1254
 
1255
  return 1;
1256
}
1257
 
1258
static void
1259
delete_redundant_insns (void)
1260
{
1261
  htab_traverse (expr_table, delete_redundant_insns_1, NULL);
1262
  if (dump_file)
1263
    fprintf (dump_file, "\n");
1264
}
1265
 
1266
/* Main entry point of the GCSE after reload - clean some redundant loads
1267
   due to spilling.  */
1268
 
1269
static void
1270
gcse_after_reload_main (rtx f ATTRIBUTE_UNUSED)
1271
{
1272
 
1273
  memset (&stats, 0, sizeof (stats));
1274
 
1275
  /* Allocate memory for this pass.
1276
     Also computes and initializes the insns' CUIDs.  */
1277
  alloc_mem ();
1278
 
1279
  /* We need alias analysis.  */
1280
  init_alias_analysis ();
1281
 
1282
  compute_hash_table ();
1283
 
1284
  if (dump_file)
1285
    dump_hash_table (dump_file);
1286
 
1287
  if (htab_elements (expr_table) > 0)
1288
    {
1289
      eliminate_partially_redundant_loads ();
1290
      delete_redundant_insns ();
1291
 
1292
      if (dump_file)
1293
        {
1294
          fprintf (dump_file, "GCSE AFTER RELOAD stats:\n");
1295
          fprintf (dump_file, "copies inserted: %d\n", stats.copies_inserted);
1296
          fprintf (dump_file, "moves inserted:  %d\n", stats.moves_inserted);
1297
          fprintf (dump_file, "insns deleted:   %d\n", stats.insns_deleted);
1298
          fprintf (dump_file, "\n\n");
1299
        }
1300
 
1301
      statistics_counter_event (cfun, "copies inserted",
1302
                                stats.copies_inserted);
1303
      statistics_counter_event (cfun, "moves inserted",
1304
                                stats.moves_inserted);
1305
      statistics_counter_event (cfun, "insns deleted",
1306
                                stats.insns_deleted);
1307
    }
1308
 
1309
  /* We are finished with alias.  */
1310
  end_alias_analysis ();
1311
 
1312
  free_mem ();
1313
}
1314
 
1315
 
1316
static bool
1317
gate_handle_gcse2 (void)
1318
{
1319
  return (optimize > 0 && flag_gcse_after_reload
1320
          && optimize_function_for_speed_p (cfun));
1321
}
1322
 
1323
 
1324
static unsigned int
1325
rest_of_handle_gcse2 (void)
1326
{
1327
  gcse_after_reload_main (get_insns ());
1328
  rebuild_jump_labels (get_insns ());
1329
  return 0;
1330
}
1331
 
1332
struct rtl_opt_pass pass_gcse2 =
1333
{
1334
 {
1335
  RTL_PASS,
1336
  "gcse2",                              /* name */
1337
  gate_handle_gcse2,                    /* gate */
1338
  rest_of_handle_gcse2,                 /* execute */
1339
  NULL,                                 /* sub */
1340
  NULL,                                 /* next */
1341
  0,                                    /* static_pass_number */
1342
  TV_GCSE_AFTER_RELOAD,                 /* tv_id */
1343
  0,                                    /* properties_required */
1344
  0,                                    /* properties_provided */
1345
  0,                                    /* properties_destroyed */
1346
  0,                                    /* todo_flags_start */
1347
  TODO_verify_rtl_sharing
1348
  | TODO_verify_flow | TODO_ggc_collect /* todo_flags_finish */
1349
 }
1350
};

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.