OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [dce.c] - Blame information for rev 749

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 684 jeremybenn
/* RTL dead code elimination.
2
   Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011
3
   Free Software Foundation, Inc.
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify it under
8
the terms of the GNU General Public License as published by the Free
9
Software Foundation; either version 3, or (at your option) any later
10
version.
11
 
12
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13
WARRANTY; without even the implied warranty of MERCHANTABILITY or
14
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15
for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING3.  If not see
19
<http://www.gnu.org/licenses/>.  */
20
 
21
#include "config.h"
22
#include "system.h"
23
#include "coretypes.h"
24
#include "hashtab.h"
25
#include "tm.h"
26
#include "rtl.h"
27
#include "tree.h"
28
#include "regs.h"
29
#include "hard-reg-set.h"
30
#include "flags.h"
31
#include "except.h"
32
#include "df.h"
33
#include "cselib.h"
34
#include "dce.h"
35
#include "timevar.h"
36
#include "tree-pass.h"
37
#include "dbgcnt.h"
38
#include "tm_p.h"
39
#include "emit-rtl.h"  /* FIXME: Can go away once crtl is moved to rtl.h.  */
40
 
41
 
42
/* -------------------------------------------------------------------------
43
   Core mark/delete routines
44
   ------------------------------------------------------------------------- */
45
 
46
/* True if we are invoked while the df engine is running; in this case,
47
   we don't want to reenter it.  */
48
static bool df_in_progress = false;
49
 
50
/* Instructions that have been marked but whose dependencies have not
51
   yet been processed.  */
52
static VEC(rtx,heap) *worklist;
53
 
54
/* Bitmap of instructions marked as needed indexed by INSN_UID.  */
55
static sbitmap marked;
56
 
57
/* Bitmap obstacks used for block processing by the fast algorithm.  */
58
static bitmap_obstack dce_blocks_bitmap_obstack;
59
static bitmap_obstack dce_tmp_bitmap_obstack;
60
 
61
static bool find_call_stack_args (rtx, bool, bool, bitmap);
62
 
63
/* A subroutine for which BODY is part of the instruction being tested;
64
   either the top-level pattern, or an element of a PARALLEL.  The
65
   instruction is known not to be a bare USE or CLOBBER.  */
66
 
67
static bool
68
deletable_insn_p_1 (rtx body)
69
{
70
  switch (GET_CODE (body))
71
    {
72
    case PREFETCH:
73
    case TRAP_IF:
74
      /* The UNSPEC case was added here because the ia-64 claims that
75
         USEs do not work after reload and generates UNSPECS rather
76
         than USEs.  Since dce is run after reload we need to avoid
77
         deleting these even if they are dead.  If it turns out that
78
         USEs really do work after reload, the ia-64 should be
79
         changed, and the UNSPEC case can be removed.  */
80
    case UNSPEC:
81
      return false;
82
 
83
    default:
84
      return !volatile_refs_p (body);
85
    }
86
}
87
 
88
 
89
/* Return true if INSN is a normal instruction that can be deleted by
90
   the DCE pass.  */
91
 
92
static bool
93
deletable_insn_p (rtx insn, bool fast, bitmap arg_stores)
94
{
95
  rtx body, x;
96
  int i;
97
 
98
  if (CALL_P (insn)
99
      /* We cannot delete calls inside of the recursive dce because
100
         this may cause basic blocks to be deleted and this messes up
101
         the rest of the stack of optimization passes.  */
102
      && (!df_in_progress)
103
      /* We cannot delete pure or const sibling calls because it is
104
         hard to see the result.  */
105
      && (!SIBLING_CALL_P (insn))
106
      /* We can delete dead const or pure calls as long as they do not
107
         infinite loop.  */
108
      && (RTL_CONST_OR_PURE_CALL_P (insn)
109
          && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)))
110
    return find_call_stack_args (insn, false, fast, arg_stores);
111
 
112
  /* Don't delete jumps, notes and the like.  */
113
  if (!NONJUMP_INSN_P (insn))
114
    return false;
115
 
116
  /* Don't delete insns that can throw.  */
117
  if (!insn_nothrow_p (insn))
118
    return false;
119
 
120
  body = PATTERN (insn);
121
  switch (GET_CODE (body))
122
    {
123
    case USE:
124
    case VAR_LOCATION:
125
      return false;
126
 
127
    case CLOBBER:
128
      if (fast)
129
        {
130
          /* A CLOBBER of a dead pseudo register serves no purpose.
131
             That is not necessarily true for hard registers until
132
             after reload.  */
133
          x = XEXP (body, 0);
134
          return REG_P (x) && (!HARD_REGISTER_P (x) || reload_completed);
135
        }
136
      else
137
        /* Because of the way that use-def chains are built, it is not
138
           possible to tell if the clobber is dead because it can
139
           never be the target of a use-def chain.  */
140
        return false;
141
 
142
    case PARALLEL:
143
      for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
144
        if (!deletable_insn_p_1 (XVECEXP (body, 0, i)))
145
          return false;
146
      return true;
147
 
148
    default:
149
      return deletable_insn_p_1 (body);
150
    }
151
}
152
 
153
 
154
/* Return true if INSN has been marked as needed.  */
155
 
156
static inline int
157
marked_insn_p (rtx insn)
158
{
159
  /* Artificial defs are always needed and they do not have an insn.
160
     We should never see them here.  */
161
  gcc_assert (insn);
162
  return TEST_BIT (marked, INSN_UID (insn));
163
}
164
 
165
 
166
/* If INSN has not yet been marked as needed, mark it now, and add it to
167
   the worklist.  */
168
 
169
static void
170
mark_insn (rtx insn, bool fast)
171
{
172
  if (!marked_insn_p (insn))
173
    {
174
      if (!fast)
175
        VEC_safe_push (rtx, heap, worklist, insn);
176
      SET_BIT (marked, INSN_UID (insn));
177
      if (dump_file)
178
        fprintf (dump_file, "  Adding insn %d to worklist\n", INSN_UID (insn));
179
      if (CALL_P (insn)
180
          && !df_in_progress
181
          && !SIBLING_CALL_P (insn)
182
          && (RTL_CONST_OR_PURE_CALL_P (insn)
183
              && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)))
184
        find_call_stack_args (insn, true, fast, NULL);
185
    }
186
}
187
 
188
 
189
/* A note_stores callback used by mark_nonreg_stores.  DATA is the
190
   instruction containing DEST.  */
191
 
192
static void
193
mark_nonreg_stores_1 (rtx dest, const_rtx pattern, void *data)
194
{
195
  if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
196
    mark_insn ((rtx) data, true);
197
}
198
 
199
 
200
/* A note_stores callback used by mark_nonreg_stores.  DATA is the
201
   instruction containing DEST.  */
202
 
203
static void
204
mark_nonreg_stores_2 (rtx dest, const_rtx pattern, void *data)
205
{
206
  if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
207
    mark_insn ((rtx) data, false);
208
}
209
 
210
 
211
/* Mark INSN if BODY stores to a non-register destination.  */
212
 
213
static void
214
mark_nonreg_stores (rtx body, rtx insn, bool fast)
215
{
216
  if (fast)
217
    note_stores (body, mark_nonreg_stores_1, insn);
218
  else
219
    note_stores (body, mark_nonreg_stores_2, insn);
220
}
221
 
222
 
223
/* Return true if store to MEM, starting OFF bytes from stack pointer,
224
   is a call argument store, and clear corresponding bits from SP_BYTES
225
   bitmap if it is.  */
226
 
227
static bool
228
check_argument_store (rtx mem, HOST_WIDE_INT off, HOST_WIDE_INT min_sp_off,
229
                      HOST_WIDE_INT max_sp_off, bitmap sp_bytes)
230
{
231
  HOST_WIDE_INT byte;
232
  for (byte = off; byte < off + GET_MODE_SIZE (GET_MODE (mem)); byte++)
233
    {
234
      if (byte < min_sp_off
235
          || byte >= max_sp_off
236
          || !bitmap_clear_bit (sp_bytes, byte - min_sp_off))
237
        return false;
238
    }
239
  return true;
240
}
241
 
242
 
243
/* Try to find all stack stores of CALL_INSN arguments if
244
   ACCUMULATE_OUTGOING_ARGS.  If all stack stores have been found
245
   and it is therefore safe to eliminate the call, return true,
246
   otherwise return false.  This function should be first called
247
   with DO_MARK false, and only when the CALL_INSN is actually
248
   going to be marked called again with DO_MARK true.  */
249
 
250
static bool
251
find_call_stack_args (rtx call_insn, bool do_mark, bool fast,
252
                      bitmap arg_stores)
253
{
254
  rtx p, insn, prev_insn;
255
  bool ret;
256
  HOST_WIDE_INT min_sp_off, max_sp_off;
257
  bitmap sp_bytes;
258
 
259
  gcc_assert (CALL_P (call_insn));
260
  if (!ACCUMULATE_OUTGOING_ARGS)
261
    return true;
262
 
263
  if (!do_mark)
264
    {
265
      gcc_assert (arg_stores);
266
      bitmap_clear (arg_stores);
267
    }
268
 
269
  min_sp_off = INTTYPE_MAXIMUM (HOST_WIDE_INT);
270
  max_sp_off = 0;
271
 
272
  /* First determine the minimum and maximum offset from sp for
273
     stored arguments.  */
274
  for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
275
    if (GET_CODE (XEXP (p, 0)) == USE
276
        && MEM_P (XEXP (XEXP (p, 0), 0)))
277
      {
278
        rtx mem = XEXP (XEXP (p, 0), 0), addr;
279
        HOST_WIDE_INT off = 0, size;
280
        if (!MEM_SIZE_KNOWN_P (mem))
281
          return false;
282
        size = MEM_SIZE (mem);
283
        addr = XEXP (mem, 0);
284
        if (GET_CODE (addr) == PLUS
285
            && REG_P (XEXP (addr, 0))
286
            && CONST_INT_P (XEXP (addr, 1)))
287
          {
288
            off = INTVAL (XEXP (addr, 1));
289
            addr = XEXP (addr, 0);
290
          }
291
        if (addr != stack_pointer_rtx)
292
          {
293
            if (!REG_P (addr))
294
              return false;
295
            /* If not fast, use chains to see if addr wasn't set to
296
               sp + offset.  */
297
            if (!fast)
298
              {
299
                df_ref *use_rec;
300
                struct df_link *defs;
301
                rtx set;
302
 
303
                for (use_rec = DF_INSN_USES (call_insn); *use_rec; use_rec++)
304
                  if (rtx_equal_p (addr, DF_REF_REG (*use_rec)))
305
                    break;
306
 
307
                if (*use_rec == NULL)
308
                  return false;
309
 
310
                for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
311
                  if (! DF_REF_IS_ARTIFICIAL (defs->ref))
312
                    break;
313
 
314
                if (defs == NULL)
315
                  return false;
316
 
317
                set = single_set (DF_REF_INSN (defs->ref));
318
                if (!set)
319
                  return false;
320
 
321
                if (GET_CODE (SET_SRC (set)) != PLUS
322
                    || XEXP (SET_SRC (set), 0) != stack_pointer_rtx
323
                    || !CONST_INT_P (XEXP (SET_SRC (set), 1)))
324
                  return false;
325
 
326
                off += INTVAL (XEXP (SET_SRC (set), 1));
327
              }
328
            else
329
              return false;
330
          }
331
        min_sp_off = MIN (min_sp_off, off);
332
        max_sp_off = MAX (max_sp_off, off + size);
333
      }
334
 
335
  if (min_sp_off >= max_sp_off)
336
    return true;
337
  sp_bytes = BITMAP_ALLOC (NULL);
338
 
339
  /* Set bits in SP_BYTES bitmap for bytes relative to sp + min_sp_off
340
     which contain arguments.  Checking has been done in the previous
341
     loop.  */
342
  for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
343
    if (GET_CODE (XEXP (p, 0)) == USE
344
        && MEM_P (XEXP (XEXP (p, 0), 0)))
345
      {
346
        rtx mem = XEXP (XEXP (p, 0), 0), addr;
347
        HOST_WIDE_INT off = 0, byte;
348
        addr = XEXP (mem, 0);
349
        if (GET_CODE (addr) == PLUS
350
            && REG_P (XEXP (addr, 0))
351
            && CONST_INT_P (XEXP (addr, 1)))
352
          {
353
            off = INTVAL (XEXP (addr, 1));
354
            addr = XEXP (addr, 0);
355
          }
356
        if (addr != stack_pointer_rtx)
357
          {
358
            df_ref *use_rec;
359
            struct df_link *defs;
360
            rtx set;
361
 
362
            for (use_rec = DF_INSN_USES (call_insn); *use_rec; use_rec++)
363
              if (rtx_equal_p (addr, DF_REF_REG (*use_rec)))
364
                break;
365
 
366
            for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
367
              if (! DF_REF_IS_ARTIFICIAL (defs->ref))
368
                break;
369
 
370
            set = single_set (DF_REF_INSN (defs->ref));
371
            off += INTVAL (XEXP (SET_SRC (set), 1));
372
          }
373
        for (byte = off; byte < off + MEM_SIZE (mem); byte++)
374
          {
375
            if (!bitmap_set_bit (sp_bytes, byte - min_sp_off))
376
              gcc_unreachable ();
377
          }
378
      }
379
 
380
  /* Walk backwards, looking for argument stores.  The search stops
381
     when seeing another call, sp adjustment or memory store other than
382
     argument store.  */
383
  ret = false;
384
  for (insn = PREV_INSN (call_insn); insn; insn = prev_insn)
385
    {
386
      rtx set, mem, addr;
387
      HOST_WIDE_INT off;
388
 
389
      if (insn == BB_HEAD (BLOCK_FOR_INSN (call_insn)))
390
        prev_insn = NULL_RTX;
391
      else
392
        prev_insn = PREV_INSN (insn);
393
 
394
      if (CALL_P (insn))
395
        break;
396
 
397
      if (!NONDEBUG_INSN_P (insn))
398
        continue;
399
 
400
      set = single_set (insn);
401
      if (!set || SET_DEST (set) == stack_pointer_rtx)
402
        break;
403
 
404
      if (!MEM_P (SET_DEST (set)))
405
        continue;
406
 
407
      mem = SET_DEST (set);
408
      addr = XEXP (mem, 0);
409
      off = 0;
410
      if (GET_CODE (addr) == PLUS
411
          && REG_P (XEXP (addr, 0))
412
          && CONST_INT_P (XEXP (addr, 1)))
413
        {
414
          off = INTVAL (XEXP (addr, 1));
415
          addr = XEXP (addr, 0);
416
        }
417
      if (addr != stack_pointer_rtx)
418
        {
419
          if (!REG_P (addr))
420
            break;
421
          if (!fast)
422
            {
423
              df_ref *use_rec;
424
              struct df_link *defs;
425
              rtx set;
426
 
427
              for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
428
                if (rtx_equal_p (addr, DF_REF_REG (*use_rec)))
429
                  break;
430
 
431
              if (*use_rec == NULL)
432
                break;
433
 
434
              for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
435
                if (! DF_REF_IS_ARTIFICIAL (defs->ref))
436
                  break;
437
 
438
              if (defs == NULL)
439
                break;
440
 
441
              set = single_set (DF_REF_INSN (defs->ref));
442
              if (!set)
443
                break;
444
 
445
              if (GET_CODE (SET_SRC (set)) != PLUS
446
                  || XEXP (SET_SRC (set), 0) != stack_pointer_rtx
447
                  || !CONST_INT_P (XEXP (SET_SRC (set), 1)))
448
                break;
449
 
450
              off += INTVAL (XEXP (SET_SRC (set), 1));
451
            }
452
          else
453
            break;
454
        }
455
 
456
      if (GET_MODE_SIZE (GET_MODE (mem)) == 0
457
          || !check_argument_store (mem, off, min_sp_off,
458
                                    max_sp_off, sp_bytes))
459
        break;
460
 
461
      if (!deletable_insn_p (insn, fast, NULL))
462
        break;
463
 
464
      if (do_mark)
465
        mark_insn (insn, fast);
466
      else
467
        bitmap_set_bit (arg_stores, INSN_UID (insn));
468
 
469
      if (bitmap_empty_p (sp_bytes))
470
        {
471
          ret = true;
472
          break;
473
        }
474
    }
475
 
476
  BITMAP_FREE (sp_bytes);
477
  if (!ret && arg_stores)
478
    bitmap_clear (arg_stores);
479
 
480
  return ret;
481
}
482
 
483
 
484
/* Remove all REG_EQUAL and REG_EQUIV notes referring to the registers INSN
485
   writes to.  */
486
 
487
static void
488
remove_reg_equal_equiv_notes_for_defs (rtx insn)
489
{
490
  df_ref *def_rec;
491
 
492
  for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
493
    remove_reg_equal_equiv_notes_for_regno (DF_REF_REGNO (*def_rec));
494
}
495
 
496
/* Scan all BBs for debug insns and reset those that reference values
497
   defined in unmarked insns.  */
498
 
499
static void
500
reset_unmarked_insns_debug_uses (void)
501
{
502
  basic_block bb;
503
  rtx insn, next;
504
 
505
  FOR_EACH_BB_REVERSE (bb)
506
    FOR_BB_INSNS_REVERSE_SAFE (bb, insn, next)
507
      if (DEBUG_INSN_P (insn))
508
        {
509
          df_ref *use_rec;
510
 
511
          for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
512
            {
513
              df_ref use = *use_rec;
514
              struct df_link *defs;
515
              for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
516
                {
517
                  rtx ref_insn;
518
                  if (DF_REF_IS_ARTIFICIAL (defs->ref))
519
                    continue;
520
                  ref_insn = DF_REF_INSN (defs->ref);
521
                  if (!marked_insn_p (ref_insn))
522
                    break;
523
                }
524
              if (!defs)
525
                continue;
526
              /* ??? FIXME could we propagate the values assigned to
527
                 each of the DEFs?  */
528
              INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
529
              df_insn_rescan_debug_internal (insn);
530
              break;
531
            }
532
        }
533
}
534
 
535
/* Delete every instruction that hasn't been marked.  */
536
 
537
static void
538
delete_unmarked_insns (void)
539
{
540
  basic_block bb;
541
  rtx insn, next;
542
  bool must_clean = false;
543
 
544
  FOR_EACH_BB_REVERSE (bb)
545
    FOR_BB_INSNS_REVERSE_SAFE (bb, insn, next)
546
      if (NONDEBUG_INSN_P (insn))
547
        {
548
          /* Always delete no-op moves.  */
549
          if (noop_move_p (insn))
550
            ;
551
 
552
          /* Otherwise rely only on the DCE algorithm.  */
553
          else if (marked_insn_p (insn))
554
            continue;
555
 
556
          /* Beware that reaching a dbg counter limit here can result
557
             in miscompiled file.  This occurs when a group of insns
558
             must be deleted together, typically because the kept insn
559
             depends on the output from the deleted insn.  Deleting
560
             this insns in reverse order (both at the bb level and
561
             when looking at the blocks) minimizes this, but does not
562
             eliminate it, since it is possible for the using insn to
563
             be top of a block and the producer to be at the bottom of
564
             the block.  However, in most cases this will only result
565
             in an uninitialized use of an insn that is dead anyway.
566
 
567
             However, there is one rare case that will cause a
568
             miscompile: deletion of non-looping pure and constant
569
             calls on a machine where ACCUMULATE_OUTGOING_ARGS is true.
570
             In this case it is possible to remove the call, but leave
571
             the argument pushes to the stack.  Because of the changes
572
             to the stack pointer, this will almost always lead to a
573
             miscompile.  */
574
          if (!dbg_cnt (dce))
575
            continue;
576
 
577
          if (dump_file)
578
            fprintf (dump_file, "DCE: Deleting insn %d\n", INSN_UID (insn));
579
 
580
          /* Before we delete the insn we have to remove the REG_EQUAL notes
581
             for the destination regs in order to avoid dangling notes.  */
582
          remove_reg_equal_equiv_notes_for_defs (insn);
583
 
584
          /* If a pure or const call is deleted, this may make the cfg
585
             have unreachable blocks.  We rememeber this and call
586
             delete_unreachable_blocks at the end.  */
587
          if (CALL_P (insn))
588
            must_clean = true;
589
 
590
          /* Now delete the insn.  */
591
          delete_insn_and_edges (insn);
592
        }
593
 
594
  /* Deleted a pure or const call.  */
595
  if (must_clean)
596
    delete_unreachable_blocks ();
597
}
598
 
599
 
600
/* Go through the instructions and mark those whose necessity is not
601
   dependent on inter-instruction information.  Make sure all other
602
   instructions are not marked.  */
603
 
604
static void
605
prescan_insns_for_dce (bool fast)
606
{
607
  basic_block bb;
608
  rtx insn, prev;
609
  bitmap arg_stores = NULL;
610
 
611
  if (dump_file)
612
    fprintf (dump_file, "Finding needed instructions:\n");
613
 
614
  if (!df_in_progress && ACCUMULATE_OUTGOING_ARGS)
615
    arg_stores = BITMAP_ALLOC (NULL);
616
 
617
  FOR_EACH_BB (bb)
618
    {
619
      FOR_BB_INSNS_REVERSE_SAFE (bb, insn, prev)
620
        if (NONDEBUG_INSN_P (insn))
621
          {
622
            /* Don't mark argument stores now.  They will be marked
623
               if needed when the associated CALL is marked.  */
624
            if (arg_stores && bitmap_bit_p (arg_stores, INSN_UID (insn)))
625
              continue;
626
            if (deletable_insn_p (insn, fast, arg_stores))
627
              mark_nonreg_stores (PATTERN (insn), insn, fast);
628
            else
629
              mark_insn (insn, fast);
630
          }
631
      /* find_call_stack_args only looks at argument stores in the
632
         same bb.  */
633
      if (arg_stores)
634
        bitmap_clear (arg_stores);
635
    }
636
 
637
  if (arg_stores)
638
    BITMAP_FREE (arg_stores);
639
 
640
  if (dump_file)
641
    fprintf (dump_file, "Finished finding needed instructions:\n");
642
}
643
 
644
 
645
/* UD-based DSE routines. */
646
 
647
/* Mark instructions that define artificially-used registers, such as
648
   the frame pointer and the stack pointer.  */
649
 
650
static void
651
mark_artificial_uses (void)
652
{
653
  basic_block bb;
654
  struct df_link *defs;
655
  df_ref *use_rec;
656
 
657
  FOR_ALL_BB (bb)
658
    {
659
      for (use_rec = df_get_artificial_uses (bb->index);
660
           *use_rec; use_rec++)
661
        for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
662
          if (! DF_REF_IS_ARTIFICIAL (defs->ref))
663
            mark_insn (DF_REF_INSN (defs->ref), false);
664
    }
665
}
666
 
667
 
668
/* Mark every instruction that defines a register value that INSN uses.  */
669
 
670
static void
671
mark_reg_dependencies (rtx insn)
672
{
673
  struct df_link *defs;
674
  df_ref *use_rec;
675
 
676
  if (DEBUG_INSN_P (insn))
677
    return;
678
 
679
  for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
680
    {
681
      df_ref use = *use_rec;
682
      if (dump_file)
683
        {
684
          fprintf (dump_file, "Processing use of ");
685
          print_simple_rtl (dump_file, DF_REF_REG (use));
686
          fprintf (dump_file, " in insn %d:\n", INSN_UID (insn));
687
        }
688
      for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
689
        if (! DF_REF_IS_ARTIFICIAL (defs->ref))
690
          mark_insn (DF_REF_INSN (defs->ref), false);
691
    }
692
}
693
 
694
 
695
/* Initialize global variables for a new DCE pass.  */
696
 
697
static void
698
init_dce (bool fast)
699
{
700
  if (!df_in_progress)
701
    {
702
      if (!fast)
703
        df_chain_add_problem (DF_UD_CHAIN);
704
      df_analyze ();
705
    }
706
 
707
  if (dump_file)
708
    df_dump (dump_file);
709
 
710
  if (fast)
711
    {
712
      bitmap_obstack_initialize (&dce_blocks_bitmap_obstack);
713
      bitmap_obstack_initialize (&dce_tmp_bitmap_obstack);
714
    }
715
 
716
  marked = sbitmap_alloc (get_max_uid () + 1);
717
  sbitmap_zero (marked);
718
}
719
 
720
 
721
/* Free the data allocated by init_dce.  */
722
 
723
static void
724
fini_dce (bool fast)
725
{
726
  sbitmap_free (marked);
727
 
728
  if (fast)
729
    {
730
      bitmap_obstack_release (&dce_blocks_bitmap_obstack);
731
      bitmap_obstack_release (&dce_tmp_bitmap_obstack);
732
    }
733
}
734
 
735
 
736
/* UD-chain based DCE.  */
737
 
738
static unsigned int
739
rest_of_handle_ud_dce (void)
740
{
741
  rtx insn;
742
 
743
  init_dce (false);
744
 
745
  prescan_insns_for_dce (false);
746
  mark_artificial_uses ();
747
  while (VEC_length (rtx, worklist) > 0)
748
    {
749
      insn = VEC_pop (rtx, worklist);
750
      mark_reg_dependencies (insn);
751
    }
752
  VEC_free (rtx, heap, worklist);
753
 
754
  if (MAY_HAVE_DEBUG_INSNS)
755
    reset_unmarked_insns_debug_uses ();
756
 
757
  /* Before any insns are deleted, we must remove the chains since
758
     they are not bidirectional.  */
759
  df_remove_problem (df_chain);
760
  delete_unmarked_insns ();
761
 
762
  fini_dce (false);
763
  return 0;
764
}
765
 
766
 
767
static bool
768
gate_ud_dce (void)
769
{
770
  return optimize > 1 && flag_dce
771
    && dbg_cnt (dce_ud);
772
}
773
 
774
struct rtl_opt_pass pass_ud_rtl_dce =
775
{
776
 {
777
  RTL_PASS,
778
  "ud_dce",                             /* name */
779
  gate_ud_dce,                          /* gate */
780
  rest_of_handle_ud_dce,                /* execute */
781
  NULL,                                 /* sub */
782
  NULL,                                 /* next */
783
  0,                                    /* static_pass_number */
784
  TV_DCE,                               /* tv_id */
785
  0,                                    /* properties_required */
786
  0,                                    /* properties_provided */
787
  0,                                    /* properties_destroyed */
788
  0,                                    /* todo_flags_start */
789
  TODO_df_finish | TODO_verify_rtl_sharing |
790
  TODO_ggc_collect                     /* todo_flags_finish */
791
 }
792
};
793
 
794
 
795
/* -------------------------------------------------------------------------
796
   Fast DCE functions
797
   ------------------------------------------------------------------------- */
798
 
799
/* Process basic block BB.  Return true if the live_in set has
800
   changed. REDO_OUT is true if the info at the bottom of the block
801
   needs to be recalculated before starting.  AU is the proper set of
802
   artificial uses. */
803
 
804
static bool
805
word_dce_process_block (basic_block bb, bool redo_out)
806
{
807
  bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
808
  rtx insn;
809
  bool block_changed;
810
 
811
  if (redo_out)
812
    {
813
      /* Need to redo the live_out set of this block if when one of
814
         the succs of this block has had a change in it live in
815
         set.  */
816
      edge e;
817
      edge_iterator ei;
818
      df_confluence_function_n con_fun_n = df_word_lr->problem->con_fun_n;
819
      bitmap_clear (DF_WORD_LR_OUT (bb));
820
      FOR_EACH_EDGE (e, ei, bb->succs)
821
        (*con_fun_n) (e);
822
    }
823
 
824
  if (dump_file)
825
    {
826
      fprintf (dump_file, "processing block %d live out = ", bb->index);
827
      df_print_word_regset (dump_file, DF_WORD_LR_OUT (bb));
828
    }
829
 
830
  bitmap_copy (local_live, DF_WORD_LR_OUT (bb));
831
 
832
  FOR_BB_INSNS_REVERSE (bb, insn)
833
    if (NONDEBUG_INSN_P (insn))
834
      {
835
        bool any_changed;
836
        /* No matter if the instruction is needed or not, we remove
837
           any regno in the defs from the live set.  */
838
        any_changed = df_word_lr_simulate_defs (insn, local_live);
839
        if (any_changed)
840
          mark_insn (insn, true);
841
 
842
        /* On the other hand, we do not allow the dead uses to set
843
           anything in local_live.  */
844
        if (marked_insn_p (insn))
845
          df_word_lr_simulate_uses (insn, local_live);
846
 
847
        if (dump_file)
848
          {
849
            fprintf (dump_file, "finished processing insn %d live out = ",
850
                     INSN_UID (insn));
851
            df_print_word_regset (dump_file, local_live);
852
          }
853
      }
854
 
855
  block_changed = !bitmap_equal_p (local_live, DF_WORD_LR_IN (bb));
856
  if (block_changed)
857
    bitmap_copy (DF_WORD_LR_IN (bb), local_live);
858
 
859
  BITMAP_FREE (local_live);
860
  return block_changed;
861
}
862
 
863
 
864
/* Process basic block BB.  Return true if the live_in set has
865
   changed. REDO_OUT is true if the info at the bottom of the block
866
   needs to be recalculated before starting.  AU is the proper set of
867
   artificial uses. */
868
 
869
static bool
870
dce_process_block (basic_block bb, bool redo_out, bitmap au)
871
{
872
  bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
873
  rtx insn;
874
  bool block_changed;
875
  df_ref *def_rec;
876
 
877
  if (redo_out)
878
    {
879
      /* Need to redo the live_out set of this block if when one of
880
         the succs of this block has had a change in it live in
881
         set.  */
882
      edge e;
883
      edge_iterator ei;
884
      df_confluence_function_n con_fun_n = df_lr->problem->con_fun_n;
885
      bitmap_clear (DF_LR_OUT (bb));
886
      FOR_EACH_EDGE (e, ei, bb->succs)
887
        (*con_fun_n) (e);
888
    }
889
 
890
  if (dump_file)
891
    {
892
      fprintf (dump_file, "processing block %d lr out = ", bb->index);
893
      df_print_regset (dump_file, DF_LR_OUT (bb));
894
    }
895
 
896
  bitmap_copy (local_live, DF_LR_OUT (bb));
897
 
898
  df_simulate_initialize_backwards (bb, local_live);
899
 
900
  FOR_BB_INSNS_REVERSE (bb, insn)
901
    if (INSN_P (insn))
902
      {
903
        bool needed = marked_insn_p (insn);
904
 
905
        /* The insn is needed if there is someone who uses the output.  */
906
        if (!needed)
907
          for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
908
            if (bitmap_bit_p (local_live, DF_REF_REGNO (*def_rec))
909
                || bitmap_bit_p (au, DF_REF_REGNO (*def_rec)))
910
              {
911
                needed = true;
912
                mark_insn (insn, true);
913
                break;
914
              }
915
 
916
        /* No matter if the instruction is needed or not, we remove
917
           any regno in the defs from the live set.  */
918
        df_simulate_defs (insn, local_live);
919
 
920
        /* On the other hand, we do not allow the dead uses to set
921
           anything in local_live.  */
922
        if (needed)
923
          df_simulate_uses (insn, local_live);
924
      }
925
 
926
  df_simulate_finalize_backwards (bb, local_live);
927
 
928
  block_changed = !bitmap_equal_p (local_live, DF_LR_IN (bb));
929
  if (block_changed)
930
    bitmap_copy (DF_LR_IN (bb), local_live);
931
 
932
  BITMAP_FREE (local_live);
933
  return block_changed;
934
}
935
 
936
 
937
/* Perform fast DCE once initialization is done.  If WORD_LEVEL is
938
   true, use the word level dce, otherwise do it at the pseudo
939
   level.  */
940
 
941
static void
942
fast_dce (bool word_level)
943
{
944
  int *postorder = df_get_postorder (DF_BACKWARD);
945
  int n_blocks = df_get_n_blocks (DF_BACKWARD);
946
  /* The set of blocks that have been seen on this iteration.  */
947
  bitmap processed = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
948
  /* The set of blocks that need to have the out vectors reset because
949
     the in of one of their successors has changed.  */
950
  bitmap redo_out = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
951
  bitmap all_blocks = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
952
  bool global_changed = true;
953
 
954
  /* These regs are considered always live so if they end up dying
955
     because of some def, we need to bring the back again.  Calling
956
     df_simulate_fixup_sets has the disadvantage of calling
957
     bb_has_eh_pred once per insn, so we cache the information
958
     here.  */
959
  bitmap au = &df->regular_block_artificial_uses;
960
  bitmap au_eh = &df->eh_block_artificial_uses;
961
  int i;
962
 
963
  prescan_insns_for_dce (true);
964
 
965
  for (i = 0; i < n_blocks; i++)
966
    bitmap_set_bit (all_blocks, postorder[i]);
967
 
968
  while (global_changed)
969
    {
970
      global_changed = false;
971
 
972
      for (i = 0; i < n_blocks; i++)
973
        {
974
          int index = postorder[i];
975
          basic_block bb = BASIC_BLOCK (index);
976
          bool local_changed;
977
 
978
          if (index < NUM_FIXED_BLOCKS)
979
            {
980
              bitmap_set_bit (processed, index);
981
              continue;
982
            }
983
 
984
          if (word_level)
985
            local_changed
986
              = word_dce_process_block (bb, bitmap_bit_p (redo_out, index));
987
          else
988
            local_changed
989
              = dce_process_block (bb, bitmap_bit_p (redo_out, index),
990
                                   bb_has_eh_pred (bb) ? au_eh : au);
991
          bitmap_set_bit (processed, index);
992
 
993
          if (local_changed)
994
            {
995
              edge e;
996
              edge_iterator ei;
997
              FOR_EACH_EDGE (e, ei, bb->preds)
998
                if (bitmap_bit_p (processed, e->src->index))
999
                  /* Be tricky about when we need to iterate the
1000
                     analysis.  We only have redo the analysis if the
1001
                     bitmaps change at the top of a block that is the
1002
                     entry to a loop.  */
1003
                  global_changed = true;
1004
                else
1005
                  bitmap_set_bit (redo_out, e->src->index);
1006
            }
1007
        }
1008
 
1009
      if (global_changed)
1010
        {
1011
          /* Turn off the RUN_DCE flag to prevent recursive calls to
1012
             dce.  */
1013
          int old_flag = df_clear_flags (DF_LR_RUN_DCE);
1014
 
1015
          /* So something was deleted that requires a redo.  Do it on
1016
             the cheap.  */
1017
          delete_unmarked_insns ();
1018
          sbitmap_zero (marked);
1019
          bitmap_clear (processed);
1020
          bitmap_clear (redo_out);
1021
 
1022
          /* We do not need to rescan any instructions.  We only need
1023
             to redo the dataflow equations for the blocks that had a
1024
             change at the top of the block.  Then we need to redo the
1025
             iteration.  */
1026
          if (word_level)
1027
            df_analyze_problem (df_word_lr, all_blocks, postorder, n_blocks);
1028
          else
1029
            df_analyze_problem (df_lr, all_blocks, postorder, n_blocks);
1030
 
1031
          if (old_flag & DF_LR_RUN_DCE)
1032
            df_set_flags (DF_LR_RUN_DCE);
1033
 
1034
          prescan_insns_for_dce (true);
1035
        }
1036
    }
1037
 
1038
  delete_unmarked_insns ();
1039
 
1040
  BITMAP_FREE (processed);
1041
  BITMAP_FREE (redo_out);
1042
  BITMAP_FREE (all_blocks);
1043
}
1044
 
1045
 
1046
/* Fast register level DCE.  */
1047
 
1048
static unsigned int
1049
rest_of_handle_fast_dce (void)
1050
{
1051
  init_dce (true);
1052
  fast_dce (false);
1053
  fini_dce (true);
1054
  return 0;
1055
}
1056
 
1057
 
1058
/* Fast byte level DCE.  */
1059
 
1060
void
1061
run_word_dce (void)
1062
{
1063
  int old_flags;
1064
 
1065
  if (!flag_dce)
1066
    return;
1067
 
1068
  timevar_push (TV_DCE);
1069
  old_flags = df_clear_flags (DF_DEFER_INSN_RESCAN + DF_NO_INSN_RESCAN);
1070
  df_word_lr_add_problem ();
1071
  init_dce (true);
1072
  fast_dce (true);
1073
  fini_dce (true);
1074
  df_set_flags (old_flags);
1075
  timevar_pop (TV_DCE);
1076
}
1077
 
1078
 
1079
/* This is an internal call that is used by the df live register
1080
   problem to run fast dce as a side effect of creating the live
1081
   information.  The stack is organized so that the lr problem is run,
1082
   this pass is run, which updates the live info and the df scanning
1083
   info, and then returns to allow the rest of the problems to be run.
1084
 
1085
   This can be called by elsewhere but it will not update the bit
1086
   vectors for any other problems than LR.  */
1087
 
1088
void
1089
run_fast_df_dce (void)
1090
{
1091
  if (flag_dce)
1092
    {
1093
      /* If dce is able to delete something, it has to happen
1094
         immediately.  Otherwise there will be problems handling the
1095
         eq_notes.  */
1096
      int old_flags =
1097
        df_clear_flags (DF_DEFER_INSN_RESCAN + DF_NO_INSN_RESCAN);
1098
 
1099
      df_in_progress = true;
1100
      rest_of_handle_fast_dce ();
1101
      df_in_progress = false;
1102
 
1103
      df_set_flags (old_flags);
1104
    }
1105
}
1106
 
1107
 
1108
/* Run a fast DCE pass.  */
1109
 
1110
void
1111
run_fast_dce (void)
1112
{
1113
  if (flag_dce)
1114
    rest_of_handle_fast_dce ();
1115
}
1116
 
1117
 
1118
static bool
1119
gate_fast_dce (void)
1120
{
1121
  return optimize > 0 && flag_dce
1122
    && dbg_cnt (dce_fast);
1123
}
1124
 
1125
struct rtl_opt_pass pass_fast_rtl_dce =
1126
{
1127
 {
1128
  RTL_PASS,
1129
  "rtl_dce",                            /* name */
1130
  gate_fast_dce,                        /* gate */
1131
  rest_of_handle_fast_dce,              /* execute */
1132
  NULL,                                 /* sub */
1133
  NULL,                                 /* next */
1134
  0,                                    /* static_pass_number */
1135
  TV_DCE,                               /* tv_id */
1136
  0,                                    /* properties_required */
1137
  0,                                    /* properties_provided */
1138
  0,                                    /* properties_destroyed */
1139
  0,                                    /* todo_flags_start */
1140
  TODO_df_finish | TODO_verify_rtl_sharing |
1141
  TODO_ggc_collect                      /* todo_flags_finish */
1142
 }
1143
};

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.