OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-stable/] [gcc-4.5.1/] [gcc/] [dce.c] - Blame information for rev 826

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 280 jeremybenn
/* RTL dead code elimination.
2
   Copyright (C) 2005, 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
3
 
4
This file is part of GCC.
5
 
6
GCC is free software; you can redistribute it and/or modify it under
7
the terms of the GNU General Public License as published by the Free
8
Software Foundation; either version 3, or (at your option) any later
9
version.
10
 
11
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12
WARRANTY; without even the implied warranty of MERCHANTABILITY or
13
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14
for more details.
15
 
16
You should have received a copy of the GNU General Public License
17
along with GCC; see the file COPYING3.  If not see
18
<http://www.gnu.org/licenses/>.  */
19
 
20
#include "config.h"
21
#include "system.h"
22
#include "coretypes.h"
23
#include "hashtab.h"
24
#include "tm.h"
25
#include "rtl.h"
26
#include "tree.h"
27
#include "regs.h"
28
#include "hard-reg-set.h"
29
#include "flags.h"
30
#include "except.h"
31
#include "df.h"
32
#include "cselib.h"
33
#include "dce.h"
34
#include "timevar.h"
35
#include "tree-pass.h"
36
#include "dbgcnt.h"
37
#include "tm_p.h"
38
 
39
 
40
/* -------------------------------------------------------------------------
41
   Core mark/delete routines
42
   ------------------------------------------------------------------------- */
43
 
44
/* True if we are invoked while the df engine is running; in this case,
45
   we don't want to reenter it.  */
46
static bool df_in_progress = false;
47
 
48
/* Instructions that have been marked but whose dependencies have not
49
   yet been processed.  */
50
static VEC(rtx,heap) *worklist;
51
 
52
/* Bitmap of instructions marked as needed indexed by INSN_UID.  */
53
static sbitmap marked;
54
 
55
/* Bitmap obstacks used for block processing by the fast algorithm.  */
56
static bitmap_obstack dce_blocks_bitmap_obstack;
57
static bitmap_obstack dce_tmp_bitmap_obstack;
58
 
59
static bool find_call_stack_args (rtx, bool, bool, bitmap);
60
 
61
/* A subroutine for which BODY is part of the instruction being tested;
62
   either the top-level pattern, or an element of a PARALLEL.  The
63
   instruction is known not to be a bare USE or CLOBBER.  */
64
 
65
static bool
66
deletable_insn_p_1 (rtx body)
67
{
68
  switch (GET_CODE (body))
69
    {
70
    case PREFETCH:
71
    case TRAP_IF:
72
      /* The UNSPEC case was added here because the ia-64 claims that
73
         USEs do not work after reload and generates UNSPECS rather
74
         than USEs.  Since dce is run after reload we need to avoid
75
         deleting these even if they are dead.  If it turns out that
76
         USEs really do work after reload, the ia-64 should be
77
         changed, and the UNSPEC case can be removed.  */
78
    case UNSPEC:
79
      return false;
80
 
81
    default:
82
      return !volatile_refs_p (body);
83
    }
84
}
85
 
86
 
87
/* Return true if INSN is a normal instruction that can be deleted by
88
   the DCE pass.  */
89
 
90
static bool
91
deletable_insn_p (rtx insn, bool fast, bitmap arg_stores)
92
{
93
  rtx body, x;
94
  int i;
95
 
96
  if (CALL_P (insn)
97
      /* We cannot delete calls inside of the recursive dce because
98
         this may cause basic blocks to be deleted and this messes up
99
         the rest of the stack of optimization passes.  */
100
      && (!df_in_progress)
101
      /* We cannot delete pure or const sibling calls because it is
102
         hard to see the result.  */
103
      && (!SIBLING_CALL_P (insn))
104
      /* We can delete dead const or pure calls as long as they do not
105
         infinite loop.  */
106
      && (RTL_CONST_OR_PURE_CALL_P (insn)
107
          && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)))
108
    return find_call_stack_args (insn, false, fast, arg_stores);
109
 
110
  /* Don't delete jumps, notes and the like.  */
111
  if (!NONJUMP_INSN_P (insn))
112
    return false;
113
 
114
  /* Don't delete insns that can throw.  */
115
  if (!insn_nothrow_p (insn))
116
    return false;
117
 
118
  body = PATTERN (insn);
119
  switch (GET_CODE (body))
120
    {
121
    case USE:
122
    case VAR_LOCATION:
123
      return false;
124
 
125
    case CLOBBER:
126
      if (fast)
127
        {
128
          /* A CLOBBER of a dead pseudo register serves no purpose.
129
             That is not necessarily true for hard registers until
130
             after reload.  */
131
          x = XEXP (body, 0);
132
          return REG_P (x) && (!HARD_REGISTER_P (x) || reload_completed);
133
        }
134
      else
135
        /* Because of the way that use-def chains are built, it is not
136
           possible to tell if the clobber is dead because it can
137
           never be the target of a use-def chain.  */
138
        return false;
139
 
140
    case PARALLEL:
141
      for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
142
        if (!deletable_insn_p_1 (XVECEXP (body, 0, i)))
143
          return false;
144
      return true;
145
 
146
    default:
147
      return deletable_insn_p_1 (body);
148
    }
149
}
150
 
151
 
152
/* Return true if INSN has been marked as needed.  */
153
 
154
static inline int
155
marked_insn_p (rtx insn)
156
{
157
  /* Artificial defs are always needed and they do not have an insn.
158
     We should never see them here.  */
159
  gcc_assert (insn);
160
  return TEST_BIT (marked, INSN_UID (insn));
161
}
162
 
163
 
164
/* If INSN has not yet been marked as needed, mark it now, and add it to
165
   the worklist.  */
166
 
167
static void
168
mark_insn (rtx insn, bool fast)
169
{
170
  if (!marked_insn_p (insn))
171
    {
172
      if (!fast)
173
        VEC_safe_push (rtx, heap, worklist, insn);
174
      SET_BIT (marked, INSN_UID (insn));
175
      if (dump_file)
176
        fprintf (dump_file, "  Adding insn %d to worklist\n", INSN_UID (insn));
177
      if (CALL_P (insn)
178
          && !df_in_progress
179
          && !SIBLING_CALL_P (insn)
180
          && (RTL_CONST_OR_PURE_CALL_P (insn)
181
              && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)))
182
        find_call_stack_args (insn, true, fast, NULL);
183
    }
184
}
185
 
186
 
187
/* A note_stores callback used by mark_nonreg_stores.  DATA is the
188
   instruction containing DEST.  */
189
 
190
static void
191
mark_nonreg_stores_1 (rtx dest, const_rtx pattern, void *data)
192
{
193
  if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
194
    mark_insn ((rtx) data, true);
195
}
196
 
197
 
198
/* A note_stores callback used by mark_nonreg_stores.  DATA is the
199
   instruction containing DEST.  */
200
 
201
static void
202
mark_nonreg_stores_2 (rtx dest, const_rtx pattern, void *data)
203
{
204
  if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
205
    mark_insn ((rtx) data, false);
206
}
207
 
208
 
209
/* Mark INSN if BODY stores to a non-register destination.  */
210
 
211
static void
212
mark_nonreg_stores (rtx body, rtx insn, bool fast)
213
{
214
  if (fast)
215
    note_stores (body, mark_nonreg_stores_1, insn);
216
  else
217
    note_stores (body, mark_nonreg_stores_2, insn);
218
}
219
 
220
 
221
/* Try to find all stack stores of CALL_INSN arguments if
222
   ACCUMULATE_OUTGOING_ARGS.  If all stack stores have been found
223
   and it is therefore safe to eliminate the call, return true,
224
   otherwise return false.  This function should be first called
225
   with DO_MARK false, and only when the CALL_INSN is actually
226
   going to be marked called again with DO_MARK true.  */
227
 
228
static bool
229
find_call_stack_args (rtx call_insn, bool do_mark, bool fast,
230
                      bitmap arg_stores)
231
{
232
  rtx p, insn, prev_insn;
233
  bool ret;
234
  HOST_WIDE_INT min_sp_off, max_sp_off;
235
  bitmap sp_bytes;
236
 
237
  gcc_assert (CALL_P (call_insn));
238
  if (!ACCUMULATE_OUTGOING_ARGS)
239
    return true;
240
 
241
  if (!do_mark)
242
    {
243
      gcc_assert (arg_stores);
244
      bitmap_clear (arg_stores);
245
    }
246
 
247
  min_sp_off = INTTYPE_MAXIMUM (HOST_WIDE_INT);
248
  max_sp_off = 0;
249
 
250
  /* First determine the minimum and maximum offset from sp for
251
     stored arguments.  */
252
  for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
253
    if (GET_CODE (XEXP (p, 0)) == USE
254
        && MEM_P (XEXP (XEXP (p, 0), 0)))
255
      {
256
        rtx mem = XEXP (XEXP (p, 0), 0), addr, size;
257
        HOST_WIDE_INT off = 0;
258
        size = MEM_SIZE (mem);
259
        if (size == NULL_RTX)
260
          return false;
261
        addr = XEXP (mem, 0);
262
        if (GET_CODE (addr) == PLUS
263
            && REG_P (XEXP (addr, 0))
264
            && CONST_INT_P (XEXP (addr, 1)))
265
          {
266
            off = INTVAL (XEXP (addr, 1));
267
            addr = XEXP (addr, 0);
268
          }
269
        if (addr != stack_pointer_rtx)
270
          {
271
            if (!REG_P (addr))
272
              return false;
273
            /* If not fast, use chains to see if addr wasn't set to
274
               sp + offset.  */
275
            if (!fast)
276
              {
277
                df_ref *use_rec;
278
                struct df_link *defs;
279
                rtx set;
280
 
281
                for (use_rec = DF_INSN_USES (call_insn); *use_rec; use_rec++)
282
                  if (rtx_equal_p (addr, DF_REF_REG (*use_rec)))
283
                    break;
284
 
285
                if (*use_rec == NULL)
286
                  return false;
287
 
288
                for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
289
                  if (! DF_REF_IS_ARTIFICIAL (defs->ref))
290
                    break;
291
 
292
                if (defs == NULL)
293
                  return false;
294
 
295
                set = single_set (DF_REF_INSN (defs->ref));
296
                if (!set)
297
                  return false;
298
 
299
                if (GET_CODE (SET_SRC (set)) != PLUS
300
                    || XEXP (SET_SRC (set), 0) != stack_pointer_rtx
301
                    || !CONST_INT_P (XEXP (SET_SRC (set), 1)))
302
                  return false;
303
 
304
                off += INTVAL (XEXP (SET_SRC (set), 1));
305
              }
306
            else
307
              return false;
308
          }
309
        min_sp_off = MIN (min_sp_off, off);
310
        max_sp_off = MAX (max_sp_off, off + INTVAL (size));
311
      }
312
 
313
  if (min_sp_off >= max_sp_off)
314
    return true;
315
  sp_bytes = BITMAP_ALLOC (NULL);
316
 
317
  /* Set bits in SP_BYTES bitmap for bytes relative to sp + min_sp_off
318
     which contain arguments.  Checking has been done in the previous
319
     loop.  */
320
  for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
321
    if (GET_CODE (XEXP (p, 0)) == USE
322
        && MEM_P (XEXP (XEXP (p, 0), 0)))
323
      {
324
        rtx mem = XEXP (XEXP (p, 0), 0), addr;
325
        HOST_WIDE_INT off = 0, byte;
326
        addr = XEXP (mem, 0);
327
        if (GET_CODE (addr) == PLUS
328
            && REG_P (XEXP (addr, 0))
329
            && CONST_INT_P (XEXP (addr, 1)))
330
          {
331
            off = INTVAL (XEXP (addr, 1));
332
            addr = XEXP (addr, 0);
333
          }
334
        if (addr != stack_pointer_rtx)
335
          {
336
            df_ref *use_rec;
337
            struct df_link *defs;
338
            rtx set;
339
 
340
            for (use_rec = DF_INSN_USES (call_insn); *use_rec; use_rec++)
341
              if (rtx_equal_p (addr, DF_REF_REG (*use_rec)))
342
                break;
343
 
344
            for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
345
              if (! DF_REF_IS_ARTIFICIAL (defs->ref))
346
                break;
347
 
348
            set = single_set (DF_REF_INSN (defs->ref));
349
            off += INTVAL (XEXP (SET_SRC (set), 1));
350
          }
351
        for (byte = off; byte < off + INTVAL (MEM_SIZE (mem)); byte++)
352
          {
353
            if (!bitmap_set_bit (sp_bytes, byte - min_sp_off))
354
              gcc_unreachable ();
355
          }
356
      }
357
 
358
  /* Walk backwards, looking for argument stores.  The search stops
359
     when seeing another call, sp adjustment or memory store other than
360
     argument store.  */
361
  ret = false;
362
  for (insn = PREV_INSN (call_insn); insn; insn = prev_insn)
363
    {
364
      rtx set, mem, addr;
365
      HOST_WIDE_INT off, byte;
366
 
367
      if (insn == BB_HEAD (BLOCK_FOR_INSN (call_insn)))
368
        prev_insn = NULL_RTX;
369
      else
370
        prev_insn = PREV_INSN (insn);
371
 
372
      if (CALL_P (insn))
373
        break;
374
 
375
      if (!INSN_P (insn))
376
        continue;
377
 
378
      set = single_set (insn);
379
      if (!set || SET_DEST (set) == stack_pointer_rtx)
380
        break;
381
 
382
      if (!MEM_P (SET_DEST (set)))
383
        continue;
384
 
385
      mem = SET_DEST (set);
386
      addr = XEXP (mem, 0);
387
      off = 0;
388
      if (GET_CODE (addr) == PLUS
389
          && REG_P (XEXP (addr, 0))
390
          && CONST_INT_P (XEXP (addr, 1)))
391
        {
392
          off = INTVAL (XEXP (addr, 1));
393
          addr = XEXP (addr, 0);
394
        }
395
      if (addr != stack_pointer_rtx)
396
        {
397
          if (!REG_P (addr))
398
            break;
399
          if (!fast)
400
            {
401
              df_ref *use_rec;
402
              struct df_link *defs;
403
              rtx set;
404
 
405
              for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
406
                if (rtx_equal_p (addr, DF_REF_REG (*use_rec)))
407
                  break;
408
 
409
              if (*use_rec == NULL)
410
                break;
411
 
412
              for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
413
                if (! DF_REF_IS_ARTIFICIAL (defs->ref))
414
                  break;
415
 
416
              if (defs == NULL)
417
                break;
418
 
419
              set = single_set (DF_REF_INSN (defs->ref));
420
              if (!set)
421
                break;
422
 
423
              if (GET_CODE (SET_SRC (set)) != PLUS
424
                  || XEXP (SET_SRC (set), 0) != stack_pointer_rtx
425
                  || !CONST_INT_P (XEXP (SET_SRC (set), 1)))
426
                break;
427
 
428
              off += INTVAL (XEXP (SET_SRC (set), 1));
429
            }
430
          else
431
            break;
432
        }
433
 
434
      if (GET_MODE_SIZE (GET_MODE (mem)) == 0)
435
        break;
436
 
437
      for (byte = off; byte < off + GET_MODE_SIZE (GET_MODE (mem)); byte++)
438
        {
439
          if (byte < min_sp_off
440
              || byte >= max_sp_off
441
              || !bitmap_clear_bit (sp_bytes, byte - min_sp_off))
442
            break;
443
        }
444
 
445
      if (!deletable_insn_p (insn, fast, NULL))
446
        break;
447
 
448
      if (do_mark)
449
        mark_insn (insn, fast);
450
      else
451
        bitmap_set_bit (arg_stores, INSN_UID (insn));
452
 
453
      if (bitmap_empty_p (sp_bytes))
454
        {
455
          ret = true;
456
          break;
457
        }
458
    }
459
 
460
  BITMAP_FREE (sp_bytes);
461
  if (!ret && arg_stores)
462
    bitmap_clear (arg_stores);
463
 
464
  return ret;
465
}
466
 
467
 
468
/* Delete all REG_EQUAL notes of the registers INSN writes, to prevent
469
   bad dangling REG_EQUAL notes. */
470
 
471
static void
472
delete_corresponding_reg_eq_notes (rtx insn)
473
{
474
  df_ref *def_rec;
475
  for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
476
    {
477
      df_ref def = *def_rec;
478
      unsigned int regno = DF_REF_REGNO (def);
479
      /* This loop is a little tricky.  We cannot just go down the
480
         chain because it is being modified by the actions in the
481
         loop.  So we just get the head.  We plan to drain the list
482
         anyway.  */
483
      while (DF_REG_EQ_USE_CHAIN (regno))
484
        {
485
          df_ref eq_use = DF_REG_EQ_USE_CHAIN (regno);
486
          rtx noted_insn = DF_REF_INSN (eq_use);
487
          rtx note = find_reg_note (noted_insn, REG_EQUAL, NULL_RTX);
488
          if (!note)
489
            note = find_reg_note (noted_insn, REG_EQUIV, NULL_RTX);
490
 
491
          /* This assert is generally triggered when someone deletes a
492
             REG_EQUAL or REG_EQUIV note by hacking the list manually
493
             rather than calling remove_note.  */
494
          gcc_assert (note);
495
          remove_note (noted_insn, note);
496
        }
497
    }
498
}
499
 
500
 
501
/* Delete every instruction that hasn't been marked.  */
502
 
503
static void
504
delete_unmarked_insns (void)
505
{
506
  basic_block bb;
507
  rtx insn, next;
508
  bool must_clean = false;
509
 
510
  FOR_EACH_BB_REVERSE (bb)
511
    FOR_BB_INSNS_REVERSE_SAFE (bb, insn, next)
512
      if (INSN_P (insn))
513
        {
514
          /* Always delete no-op moves.  */
515
          if (noop_move_p (insn))
516
            ;
517
 
518
          /* Otherwise rely only on the DCE algorithm.  */
519
          else if (marked_insn_p (insn))
520
            continue;
521
 
522
          /* Beware that reaching a dbg counter limit here can result
523
             in miscompiled file.  This occurs when a group of insns
524
             must be deleted together, typically because the kept insn
525
             depends on the output from the deleted insn.  Deleting
526
             this insns in reverse order (both at the bb level and
527
             when looking at the blocks) minimizes this, but does not
528
             eliminate it, since it is possible for the using insn to
529
             be top of a block and the producer to be at the bottom of
530
             the block.  However, in most cases this will only result
531
             in an uninitialized use of an insn that is dead anyway.
532
 
533
             However, there is one rare case that will cause a
534
             miscompile: deletion of non-looping pure and constant
535
             calls on a machine where ACCUMULATE_OUTGOING_ARGS is true.
536
             In this case it is possible to remove the call, but leave
537
             the argument pushes to the stack.  Because of the changes
538
             to the stack pointer, this will almost always lead to a
539
             miscompile.  */
540
          if (!dbg_cnt (dce))
541
            continue;
542
 
543
          if (dump_file)
544
            fprintf (dump_file, "DCE: Deleting insn %d\n", INSN_UID (insn));
545
 
546
          /* Before we delete the insn we have to delete REG_EQUAL notes
547
             for the destination regs in order to avoid dangling notes.  */
548
          delete_corresponding_reg_eq_notes (insn);
549
 
550
          /* If a pure or const call is deleted, this may make the cfg
551
             have unreachable blocks.  We rememeber this and call
552
             delete_unreachable_blocks at the end.  */
553
          if (CALL_P (insn))
554
            must_clean = true;
555
 
556
          /* Now delete the insn.  */
557
          delete_insn_and_edges (insn);
558
        }
559
 
560
  /* Deleted a pure or const call.  */
561
  if (must_clean)
562
    delete_unreachable_blocks ();
563
}
564
 
565
 
566
/* Go through the instructions and mark those whose necessity is not
567
   dependent on inter-instruction information.  Make sure all other
568
   instructions are not marked.  */
569
 
570
static void
571
prescan_insns_for_dce (bool fast)
572
{
573
  basic_block bb;
574
  rtx insn, prev;
575
  bitmap arg_stores = NULL;
576
 
577
  if (dump_file)
578
    fprintf (dump_file, "Finding needed instructions:\n");
579
 
580
  if (!df_in_progress && ACCUMULATE_OUTGOING_ARGS)
581
    arg_stores = BITMAP_ALLOC (NULL);
582
 
583
  FOR_EACH_BB (bb)
584
    {
585
      FOR_BB_INSNS_REVERSE_SAFE (bb, insn, prev)
586
        if (INSN_P (insn))
587
          {
588
            /* Don't mark argument stores now.  They will be marked
589
               if needed when the associated CALL is marked.  */
590
            if (arg_stores && bitmap_bit_p (arg_stores, INSN_UID (insn)))
591
              continue;
592
            if (deletable_insn_p (insn, fast, arg_stores))
593
              mark_nonreg_stores (PATTERN (insn), insn, fast);
594
            else
595
              mark_insn (insn, fast);
596
          }
597
      /* find_call_stack_args only looks at argument stores in the
598
         same bb.  */
599
      if (arg_stores)
600
        bitmap_clear (arg_stores);
601
    }
602
 
603
  if (arg_stores)
604
    BITMAP_FREE (arg_stores);
605
 
606
  if (dump_file)
607
    fprintf (dump_file, "Finished finding needed instructions:\n");
608
}
609
 
610
 
611
/* UD-based DSE routines. */
612
 
613
/* Mark instructions that define artificially-used registers, such as
614
   the frame pointer and the stack pointer.  */
615
 
616
static void
617
mark_artificial_uses (void)
618
{
619
  basic_block bb;
620
  struct df_link *defs;
621
  df_ref *use_rec;
622
 
623
  FOR_ALL_BB (bb)
624
    {
625
      for (use_rec = df_get_artificial_uses (bb->index);
626
           *use_rec; use_rec++)
627
        for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
628
          if (! DF_REF_IS_ARTIFICIAL (defs->ref))
629
            mark_insn (DF_REF_INSN (defs->ref), false);
630
    }
631
}
632
 
633
 
634
/* Mark every instruction that defines a register value that INSN uses.  */
635
 
636
static void
637
mark_reg_dependencies (rtx insn)
638
{
639
  struct df_link *defs;
640
  df_ref *use_rec;
641
 
642
  if (DEBUG_INSN_P (insn))
643
    return;
644
 
645
  for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
646
    {
647
      df_ref use = *use_rec;
648
      if (dump_file)
649
        {
650
          fprintf (dump_file, "Processing use of ");
651
          print_simple_rtl (dump_file, DF_REF_REG (use));
652
          fprintf (dump_file, " in insn %d:\n", INSN_UID (insn));
653
        }
654
      for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
655
        if (! DF_REF_IS_ARTIFICIAL (defs->ref))
656
          mark_insn (DF_REF_INSN (defs->ref), false);
657
    }
658
}
659
 
660
 
661
/* Initialize global variables for a new DCE pass.  */
662
 
663
static void
664
init_dce (bool fast)
665
{
666
  if (!df_in_progress)
667
    {
668
      if (!fast)
669
        df_chain_add_problem (DF_UD_CHAIN);
670
      df_analyze ();
671
    }
672
 
673
  if (dump_file)
674
    df_dump (dump_file);
675
 
676
  if (fast)
677
    {
678
      bitmap_obstack_initialize (&dce_blocks_bitmap_obstack);
679
      bitmap_obstack_initialize (&dce_tmp_bitmap_obstack);
680
    }
681
 
682
  marked = sbitmap_alloc (get_max_uid () + 1);
683
  sbitmap_zero (marked);
684
}
685
 
686
 
687
/* Free the data allocated by init_dce.  */
688
 
689
static void
690
fini_dce (bool fast)
691
{
692
  sbitmap_free (marked);
693
 
694
  if (fast)
695
    {
696
      bitmap_obstack_release (&dce_blocks_bitmap_obstack);
697
      bitmap_obstack_release (&dce_tmp_bitmap_obstack);
698
    }
699
}
700
 
701
 
702
/* UD-chain based DCE.  */
703
 
704
static unsigned int
705
rest_of_handle_ud_dce (void)
706
{
707
  rtx insn;
708
 
709
  init_dce (false);
710
 
711
  prescan_insns_for_dce (false);
712
  mark_artificial_uses ();
713
  while (VEC_length (rtx, worklist) > 0)
714
    {
715
      insn = VEC_pop (rtx, worklist);
716
      mark_reg_dependencies (insn);
717
    }
718
  VEC_free (rtx, heap, worklist);
719
 
720
  /* Before any insns are deleted, we must remove the chains since
721
     they are not bidirectional.  */
722
  df_remove_problem (df_chain);
723
  delete_unmarked_insns ();
724
 
725
  fini_dce (false);
726
  return 0;
727
}
728
 
729
 
730
static bool
731
gate_ud_dce (void)
732
{
733
  return optimize > 1 && flag_dce
734
    && dbg_cnt (dce_ud);
735
}
736
 
737
struct rtl_opt_pass pass_ud_rtl_dce =
738
{
739
 {
740
  RTL_PASS,
741
  "ud dce",                             /* name */
742
  gate_ud_dce,                          /* gate */
743
  rest_of_handle_ud_dce,                /* execute */
744
  NULL,                                 /* sub */
745
  NULL,                                 /* next */
746
  0,                                    /* static_pass_number */
747
  TV_DCE,                               /* tv_id */
748
  0,                                    /* properties_required */
749
  0,                                    /* properties_provided */
750
  0,                                    /* properties_destroyed */
751
  0,                                    /* todo_flags_start */
752
  TODO_dump_func |
753
  TODO_df_finish | TODO_verify_rtl_sharing |
754
  TODO_ggc_collect                     /* todo_flags_finish */
755
 }
756
};
757
 
758
 
759
/* -------------------------------------------------------------------------
760
   Fast DCE functions
761
   ------------------------------------------------------------------------- */
762
 
763
/* Process basic block BB.  Return true if the live_in set has
764
   changed. REDO_OUT is true if the info at the bottom of the block
765
   needs to be recalculated before starting.  AU is the proper set of
766
   artificial uses. */
767
 
768
static bool
769
byte_dce_process_block (basic_block bb, bool redo_out, bitmap au)
770
{
771
  bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
772
  rtx insn;
773
  bool block_changed;
774
  df_ref *def_rec;
775
 
776
  if (redo_out)
777
    {
778
      /* Need to redo the live_out set of this block if when one of
779
         the succs of this block has had a change in it live in
780
         set.  */
781
      edge e;
782
      edge_iterator ei;
783
      df_confluence_function_n con_fun_n = df_byte_lr->problem->con_fun_n;
784
      bitmap_clear (DF_BYTE_LR_OUT (bb));
785
      FOR_EACH_EDGE (e, ei, bb->succs)
786
        (*con_fun_n) (e);
787
    }
788
 
789
  if (dump_file)
790
    {
791
      fprintf (dump_file, "processing block %d live out = ", bb->index);
792
      df_print_byte_regset (dump_file, DF_BYTE_LR_OUT (bb));
793
    }
794
 
795
  bitmap_copy (local_live, DF_BYTE_LR_OUT (bb));
796
 
797
  df_byte_lr_simulate_artificial_refs_at_end (bb, local_live);
798
 
799
  FOR_BB_INSNS_REVERSE (bb, insn)
800
    if (INSN_P (insn))
801
      {
802
        /* The insn is needed if there is someone who uses the output.  */
803
        for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
804
          {
805
            df_ref def = *def_rec;
806
            unsigned int last;
807
            unsigned int dregno = DF_REF_REGNO (def);
808
            unsigned int start = df_byte_lr_get_regno_start (dregno);
809
            unsigned int len = df_byte_lr_get_regno_len (dregno);
810
 
811
            unsigned int sb;
812
            unsigned int lb;
813
            /* This is one of the only places where DF_MM_MAY should
814
               be used for defs.  Need to make sure that we are
815
               checking for all of the bits that may be used.  */
816
 
817
            if (!df_compute_accessed_bytes (def, DF_MM_MAY, &sb, &lb))
818
              {
819
                start += sb;
820
                len = lb - sb;
821
              }
822
 
823
            if (bitmap_bit_p (au, dregno))
824
              {
825
                mark_insn (insn, true);
826
                goto quickexit;
827
              }
828
 
829
            last = start + len;
830
            while (start < last)
831
              if (bitmap_bit_p (local_live, start++))
832
                {
833
                  mark_insn (insn, true);
834
                  goto quickexit;
835
                }
836
          }
837
 
838
      quickexit:
839
 
840
        /* No matter if the instruction is needed or not, we remove
841
           any regno in the defs from the live set.  */
842
        df_byte_lr_simulate_defs (insn, local_live);
843
 
844
        /* On the other hand, we do not allow the dead uses to set
845
           anything in local_live.  */
846
        if (marked_insn_p (insn))
847
          df_byte_lr_simulate_uses (insn, local_live);
848
 
849
        if (dump_file)
850
          {
851
            fprintf (dump_file, "finished processing insn %d live out = ",
852
                     INSN_UID (insn));
853
            df_print_byte_regset (dump_file, local_live);
854
          }
855
      }
856
 
857
  df_byte_lr_simulate_artificial_refs_at_top (bb, local_live);
858
 
859
  block_changed = !bitmap_equal_p (local_live, DF_BYTE_LR_IN (bb));
860
  if (block_changed)
861
    bitmap_copy (DF_BYTE_LR_IN (bb), local_live);
862
  BITMAP_FREE (local_live);
863
  return block_changed;
864
}
865
 
866
 
867
/* Process basic block BB.  Return true if the live_in set has
868
   changed. REDO_OUT is true if the info at the bottom of the block
869
   needs to be recalculated before starting.  AU is the proper set of
870
   artificial uses. */
871
 
872
static bool
873
dce_process_block (basic_block bb, bool redo_out, bitmap au)
874
{
875
  bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
876
  rtx insn;
877
  bool block_changed;
878
  df_ref *def_rec;
879
 
880
  if (redo_out)
881
    {
882
      /* Need to redo the live_out set of this block if when one of
883
         the succs of this block has had a change in it live in
884
         set.  */
885
      edge e;
886
      edge_iterator ei;
887
      df_confluence_function_n con_fun_n = df_lr->problem->con_fun_n;
888
      bitmap_clear (DF_LR_OUT (bb));
889
      FOR_EACH_EDGE (e, ei, bb->succs)
890
        (*con_fun_n) (e);
891
    }
892
 
893
  if (dump_file)
894
    {
895
      fprintf (dump_file, "processing block %d lr out = ", bb->index);
896
      df_print_regset (dump_file, DF_LR_OUT (bb));
897
    }
898
 
899
  bitmap_copy (local_live, DF_LR_OUT (bb));
900
 
901
  df_simulate_initialize_backwards (bb, local_live);
902
 
903
  FOR_BB_INSNS_REVERSE (bb, insn)
904
    if (INSN_P (insn))
905
      {
906
        bool needed = false;
907
 
908
        /* The insn is needed if there is someone who uses the output.  */
909
        for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
910
          if (bitmap_bit_p (local_live, DF_REF_REGNO (*def_rec))
911
              || bitmap_bit_p (au, DF_REF_REGNO (*def_rec)))
912
            {
913
              needed = true;
914
              break;
915
            }
916
 
917
        if (needed)
918
          mark_insn (insn, true);
919
 
920
        /* No matter if the instruction is needed or not, we remove
921
           any regno in the defs from the live set.  */
922
        df_simulate_defs (insn, local_live);
923
 
924
        /* On the other hand, we do not allow the dead uses to set
925
           anything in local_live.  */
926
        if (marked_insn_p (insn))
927
          df_simulate_uses (insn, local_live);
928
      }
929
 
930
  df_simulate_finalize_backwards (bb, local_live);
931
 
932
  block_changed = !bitmap_equal_p (local_live, DF_LR_IN (bb));
933
  if (block_changed)
934
    bitmap_copy (DF_LR_IN (bb), local_live);
935
 
936
  BITMAP_FREE (local_live);
937
  return block_changed;
938
}
939
 
940
 
941
/* Perform fast DCE once initialization is done.  If BYTE_LEVEL is
942
   true, use the byte level dce, otherwise do it at the pseudo
943
   level.  */
944
 
945
static void
946
fast_dce (bool byte_level)
947
{
948
  int *postorder = df_get_postorder (DF_BACKWARD);
949
  int n_blocks = df_get_n_blocks (DF_BACKWARD);
950
  /* The set of blocks that have been seen on this iteration.  */
951
  bitmap processed = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
952
  /* The set of blocks that need to have the out vectors reset because
953
     the in of one of their successors has changed.  */
954
  bitmap redo_out = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
955
  bitmap all_blocks = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
956
  bool global_changed = true;
957
 
958
  /* These regs are considered always live so if they end up dying
959
     because of some def, we need to bring the back again.  Calling
960
     df_simulate_fixup_sets has the disadvantage of calling
961
     bb_has_eh_pred once per insn, so we cache the information
962
     here.  */
963
  bitmap au = df->regular_block_artificial_uses;
964
  bitmap au_eh = df->eh_block_artificial_uses;
965
  int i;
966
 
967
  prescan_insns_for_dce (true);
968
 
969
  for (i = 0; i < n_blocks; i++)
970
    bitmap_set_bit (all_blocks, postorder[i]);
971
 
972
  while (global_changed)
973
    {
974
      global_changed = false;
975
 
976
      for (i = 0; i < n_blocks; i++)
977
        {
978
          int index = postorder[i];
979
          basic_block bb = BASIC_BLOCK (index);
980
          bool local_changed;
981
 
982
          if (index < NUM_FIXED_BLOCKS)
983
            {
984
              bitmap_set_bit (processed, index);
985
              continue;
986
            }
987
 
988
          if (byte_level)
989
            local_changed
990
              = byte_dce_process_block (bb, bitmap_bit_p (redo_out, index),
991
                                          bb_has_eh_pred (bb) ? au_eh : au);
992
          else
993
            local_changed
994
              = dce_process_block (bb, bitmap_bit_p (redo_out, index),
995
                                   bb_has_eh_pred (bb) ? au_eh : au);
996
          bitmap_set_bit (processed, index);
997
 
998
          if (local_changed)
999
            {
1000
              edge e;
1001
              edge_iterator ei;
1002
              FOR_EACH_EDGE (e, ei, bb->preds)
1003
                if (bitmap_bit_p (processed, e->src->index))
1004
                  /* Be tricky about when we need to iterate the
1005
                     analysis.  We only have redo the analysis if the
1006
                     bitmaps change at the top of a block that is the
1007
                     entry to a loop.  */
1008
                  global_changed = true;
1009
                else
1010
                  bitmap_set_bit (redo_out, e->src->index);
1011
            }
1012
        }
1013
 
1014
      if (global_changed)
1015
        {
1016
          /* Turn off the RUN_DCE flag to prevent recursive calls to
1017
             dce.  */
1018
          int old_flag = df_clear_flags (DF_LR_RUN_DCE);
1019
 
1020
          /* So something was deleted that requires a redo.  Do it on
1021
             the cheap.  */
1022
          delete_unmarked_insns ();
1023
          sbitmap_zero (marked);
1024
          bitmap_clear (processed);
1025
          bitmap_clear (redo_out);
1026
 
1027
          /* We do not need to rescan any instructions.  We only need
1028
             to redo the dataflow equations for the blocks that had a
1029
             change at the top of the block.  Then we need to redo the
1030
             iteration.  */
1031
          if (byte_level)
1032
            df_analyze_problem (df_byte_lr, all_blocks, postorder, n_blocks);
1033
          else
1034
            df_analyze_problem (df_lr, all_blocks, postorder, n_blocks);
1035
 
1036
          if (old_flag & DF_LR_RUN_DCE)
1037
            df_set_flags (DF_LR_RUN_DCE);
1038
 
1039
          prescan_insns_for_dce (true);
1040
        }
1041
    }
1042
 
1043
  delete_unmarked_insns ();
1044
 
1045
  BITMAP_FREE (processed);
1046
  BITMAP_FREE (redo_out);
1047
  BITMAP_FREE (all_blocks);
1048
}
1049
 
1050
 
1051
/* Fast register level DCE.  */
1052
 
1053
static unsigned int
1054
rest_of_handle_fast_dce (void)
1055
{
1056
  init_dce (true);
1057
  fast_dce (false);
1058
  fini_dce (true);
1059
  return 0;
1060
}
1061
 
1062
 
1063
/* Fast byte level DCE.  */
1064
 
1065
static unsigned int
1066
rest_of_handle_fast_byte_dce (void)
1067
{
1068
  df_byte_lr_add_problem ();
1069
  init_dce (true);
1070
  fast_dce (true);
1071
  fini_dce (true);
1072
  return 0;
1073
}
1074
 
1075
 
1076
/* This is an internal call that is used by the df live register
1077
   problem to run fast dce as a side effect of creating the live
1078
   information.  The stack is organized so that the lr problem is run,
1079
   this pass is run, which updates the live info and the df scanning
1080
   info, and then returns to allow the rest of the problems to be run.
1081
 
1082
   This can be called by elsewhere but it will not update the bit
1083
   vectors for any other problems than LR.  */
1084
 
1085
void
1086
run_fast_df_dce (void)
1087
{
1088
  if (flag_dce)
1089
    {
1090
      /* If dce is able to delete something, it has to happen
1091
         immediately.  Otherwise there will be problems handling the
1092
         eq_notes.  */
1093
      int old_flags =
1094
        df_clear_flags (DF_DEFER_INSN_RESCAN + DF_NO_INSN_RESCAN);
1095
 
1096
      df_in_progress = true;
1097
      rest_of_handle_fast_dce ();
1098
      df_in_progress = false;
1099
 
1100
      df_set_flags (old_flags);
1101
    }
1102
}
1103
 
1104
 
1105
/* Run a fast DCE pass.  */
1106
 
1107
void
1108
run_fast_dce (void)
1109
{
1110
  if (flag_dce)
1111
    rest_of_handle_fast_dce ();
1112
}
1113
 
1114
 
1115
static bool
1116
gate_fast_dce (void)
1117
{
1118
  return optimize > 0 && flag_dce
1119
    && dbg_cnt (dce_fast);
1120
}
1121
 
1122
struct rtl_opt_pass pass_fast_rtl_dce =
1123
{
1124
 {
1125
  RTL_PASS,
1126
  "rtl dce",                            /* name */
1127
  gate_fast_dce,                        /* gate */
1128
  rest_of_handle_fast_dce,              /* execute */
1129
  NULL,                                 /* sub */
1130
  NULL,                                 /* next */
1131
  0,                                    /* static_pass_number */
1132
  TV_DCE,                               /* tv_id */
1133
  0,                                    /* properties_required */
1134
  0,                                    /* properties_provided */
1135
  0,                                    /* properties_destroyed */
1136
  0,                                    /* todo_flags_start */
1137
  TODO_dump_func |
1138
  TODO_df_finish | TODO_verify_rtl_sharing |
1139
  TODO_ggc_collect                      /* todo_flags_finish */
1140
 }
1141
};
1142
 
1143
struct rtl_opt_pass pass_fast_rtl_byte_dce =
1144
{
1145
 {
1146
  RTL_PASS,
1147
  "byte-dce",                           /* name */
1148
  gate_fast_dce,                        /* gate */
1149
  rest_of_handle_fast_byte_dce,         /* execute */
1150
  NULL,                                 /* sub */
1151
  NULL,                                 /* next */
1152
  0,                                    /* static_pass_number */
1153
  TV_DCE,                               /* tv_id */
1154
  0,                                    /* properties_required */
1155
  0,                                    /* properties_provided */
1156
  0,                                    /* properties_destroyed */
1157
  0,                                    /* todo_flags_start */
1158
  TODO_dump_func |
1159
  TODO_df_finish | TODO_verify_rtl_sharing |
1160
  TODO_ggc_collect                      /* todo_flags_finish */
1161
 }
1162
};

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.