OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-stable/] [gcc-4.5.1/] [gcc/] [tree-ssa-dce.c] - Blame information for rev 847

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 280 jeremybenn
/* Dead code elimination pass for the GNU compiler.
2
   Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3
   Free Software Foundation, Inc.
4
   Contributed by Ben Elliston <bje@redhat.com>
5
   and Andrew MacLeod <amacleod@redhat.com>
6
   Adapted to use control dependence by Steven Bosscher, SUSE Labs.
7
 
8
This file is part of GCC.
9
 
10
GCC is free software; you can redistribute it and/or modify it
11
under the terms of the GNU General Public License as published by the
12
Free Software Foundation; either version 3, or (at your option) any
13
later version.
14
 
15
GCC is distributed in the hope that it will be useful, but WITHOUT
16
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
17
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
18
for more details.
19
 
20
You should have received a copy of the GNU General Public License
21
along with GCC; see the file COPYING3.  If not see
22
<http://www.gnu.org/licenses/>.  */
23
 
24
/* Dead code elimination.
25
 
26
   References:
27
 
28
     Building an Optimizing Compiler,
29
     Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
30
 
31
     Advanced Compiler Design and Implementation,
32
     Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
33
 
34
   Dead-code elimination is the removal of statements which have no
35
   impact on the program's output.  "Dead statements" have no impact
36
   on the program's output, while "necessary statements" may have
37
   impact on the output.
38
 
39
   The algorithm consists of three phases:
40
   1. Marking as necessary all statements known to be necessary,
41
      e.g. most function calls, writing a value to memory, etc;
42
   2. Propagating necessary statements, e.g., the statements
43
      giving values to operands in necessary statements; and
44
   3. Removing dead statements.  */
45
 
46
#include "config.h"
47
#include "system.h"
48
#include "coretypes.h"
49
#include "tm.h"
50
#include "ggc.h"
51
 
52
/* These RTL headers are needed for basic-block.h.  */
53
#include "rtl.h"
54
#include "tm_p.h"
55
#include "hard-reg-set.h"
56
#include "obstack.h"
57
#include "basic-block.h"
58
 
59
#include "tree.h"
60
#include "diagnostic.h"
61
#include "tree-flow.h"
62
#include "gimple.h"
63
#include "tree-dump.h"
64
#include "tree-pass.h"
65
#include "timevar.h"
66
#include "flags.h"
67
#include "cfgloop.h"
68
#include "tree-scalar-evolution.h"
69
 
70
static struct stmt_stats
71
{
72
  int total;
73
  int total_phis;
74
  int removed;
75
  int removed_phis;
76
} stats;
77
 
78
#define STMT_NECESSARY GF_PLF_1
79
 
80
static VEC(gimple,heap) *worklist;
81
 
82
/* Vector indicating an SSA name has already been processed and marked
83
   as necessary.  */
84
static sbitmap processed;
85
 
86
/* Vector indicating that last_stmt if a basic block has already been
87
   marked as necessary.  */
88
static sbitmap last_stmt_necessary;
89
 
90
/* Vector indicating that BB contains statements that are live.  */
91
static sbitmap bb_contains_live_stmts;
92
 
93
/* Before we can determine whether a control branch is dead, we need to
94
   compute which blocks are control dependent on which edges.
95
 
96
   We expect each block to be control dependent on very few edges so we
97
   use a bitmap for each block recording its edges.  An array holds the
98
   bitmap.  The Ith bit in the bitmap is set if that block is dependent
99
   on the Ith edge.  */
100
static bitmap *control_dependence_map;
101
 
102
/* Vector indicating that a basic block has already had all the edges
103
   processed that it is control dependent on.  */
104
static sbitmap visited_control_parents;
105
 
106
/* TRUE if this pass alters the CFG (by removing control statements).
107
   FALSE otherwise.
108
 
109
   If this pass alters the CFG, then it will arrange for the dominators
110
   to be recomputed.  */
111
static bool cfg_altered;
112
 
113
/* Execute code that follows the macro for each edge (given number
114
   EDGE_NUMBER within the CODE) for which the block with index N is
115
   control dependent.  */
116
#define EXECUTE_IF_CONTROL_DEPENDENT(BI, N, EDGE_NUMBER)        \
117
  EXECUTE_IF_SET_IN_BITMAP (control_dependence_map[(N)], 0,      \
118
                            (EDGE_NUMBER), (BI))
119
 
120
 
121
/* Indicate block BB is control dependent on an edge with index EDGE_INDEX.  */
122
static inline void
123
set_control_dependence_map_bit (basic_block bb, int edge_index)
124
{
125
  if (bb == ENTRY_BLOCK_PTR)
126
    return;
127
  gcc_assert (bb != EXIT_BLOCK_PTR);
128
  bitmap_set_bit (control_dependence_map[bb->index], edge_index);
129
}
130
 
131
/* Clear all control dependences for block BB.  */
132
static inline void
133
clear_control_dependence_bitmap (basic_block bb)
134
{
135
  bitmap_clear (control_dependence_map[bb->index]);
136
}
137
 
138
 
139
/* Find the immediate postdominator PDOM of the specified basic block BLOCK.
140
   This function is necessary because some blocks have negative numbers.  */
141
 
142
static inline basic_block
143
find_pdom (basic_block block)
144
{
145
  gcc_assert (block != ENTRY_BLOCK_PTR);
146
 
147
  if (block == EXIT_BLOCK_PTR)
148
    return EXIT_BLOCK_PTR;
149
  else
150
    {
151
      basic_block bb = get_immediate_dominator (CDI_POST_DOMINATORS, block);
152
      if (! bb)
153
        return EXIT_BLOCK_PTR;
154
      return bb;
155
    }
156
}
157
 
158
 
159
/* Determine all blocks' control dependences on the given edge with edge_list
160
   EL index EDGE_INDEX, ala Morgan, Section 3.6.  */
161
 
162
static void
163
find_control_dependence (struct edge_list *el, int edge_index)
164
{
165
  basic_block current_block;
166
  basic_block ending_block;
167
 
168
  gcc_assert (INDEX_EDGE_PRED_BB (el, edge_index) != EXIT_BLOCK_PTR);
169
 
170
  if (INDEX_EDGE_PRED_BB (el, edge_index) == ENTRY_BLOCK_PTR)
171
    ending_block = single_succ (ENTRY_BLOCK_PTR);
172
  else
173
    ending_block = find_pdom (INDEX_EDGE_PRED_BB (el, edge_index));
174
 
175
  for (current_block = INDEX_EDGE_SUCC_BB (el, edge_index);
176
       current_block != ending_block && current_block != EXIT_BLOCK_PTR;
177
       current_block = find_pdom (current_block))
178
    {
179
      edge e = INDEX_EDGE (el, edge_index);
180
 
181
      /* For abnormal edges, we don't make current_block control
182
         dependent because instructions that throw are always necessary
183
         anyway.  */
184
      if (e->flags & EDGE_ABNORMAL)
185
        continue;
186
 
187
      set_control_dependence_map_bit (current_block, edge_index);
188
    }
189
}
190
 
191
 
192
/* Record all blocks' control dependences on all edges in the edge
193
   list EL, ala Morgan, Section 3.6.  */
194
 
195
static void
196
find_all_control_dependences (struct edge_list *el)
197
{
198
  int i;
199
 
200
  for (i = 0; i < NUM_EDGES (el); ++i)
201
    find_control_dependence (el, i);
202
}
203
 
204
/* If STMT is not already marked necessary, mark it, and add it to the
205
   worklist if ADD_TO_WORKLIST is true.  */
206
static inline void
207
mark_stmt_necessary (gimple stmt, bool add_to_worklist)
208
{
209
  gcc_assert (stmt);
210
 
211
  if (gimple_plf (stmt, STMT_NECESSARY))
212
    return;
213
 
214
  if (dump_file && (dump_flags & TDF_DETAILS))
215
    {
216
      fprintf (dump_file, "Marking useful stmt: ");
217
      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
218
      fprintf (dump_file, "\n");
219
    }
220
 
221
  gimple_set_plf (stmt, STMT_NECESSARY, true);
222
  if (add_to_worklist)
223
    VEC_safe_push (gimple, heap, worklist, stmt);
224
  if (bb_contains_live_stmts && !is_gimple_debug (stmt))
225
    SET_BIT (bb_contains_live_stmts, gimple_bb (stmt)->index);
226
}
227
 
228
 
229
/* Mark the statement defining operand OP as necessary.  */
230
 
231
static inline void
232
mark_operand_necessary (tree op)
233
{
234
  gimple stmt;
235
  int ver;
236
 
237
  gcc_assert (op);
238
 
239
  ver = SSA_NAME_VERSION (op);
240
  if (TEST_BIT (processed, ver))
241
    {
242
      stmt = SSA_NAME_DEF_STMT (op);
243
      gcc_assert (gimple_nop_p (stmt)
244
                  || gimple_plf (stmt, STMT_NECESSARY));
245
      return;
246
    }
247
  SET_BIT (processed, ver);
248
 
249
  stmt = SSA_NAME_DEF_STMT (op);
250
  gcc_assert (stmt);
251
 
252
  if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt))
253
    return;
254
 
255
  if (dump_file && (dump_flags & TDF_DETAILS))
256
    {
257
      fprintf (dump_file, "marking necessary through ");
258
      print_generic_expr (dump_file, op, 0);
259
      fprintf (dump_file, " stmt ");
260
      print_gimple_stmt (dump_file, stmt, 0, 0);
261
    }
262
 
263
  gimple_set_plf (stmt, STMT_NECESSARY, true);
264
  if (bb_contains_live_stmts)
265
    SET_BIT (bb_contains_live_stmts, gimple_bb (stmt)->index);
266
  VEC_safe_push (gimple, heap, worklist, stmt);
267
}
268
 
269
 
270
/* Mark STMT as necessary if it obviously is.  Add it to the worklist if
271
   it can make other statements necessary.
272
 
273
   If AGGRESSIVE is false, control statements are conservatively marked as
274
   necessary.  */
275
 
276
static void
277
mark_stmt_if_obviously_necessary (gimple stmt, bool aggressive)
278
{
279
  tree lhs = NULL_TREE;
280
  /* With non-call exceptions, we have to assume that all statements could
281
     throw.  If a statement may throw, it is inherently necessary.  */
282
  if (flag_non_call_exceptions
283
      && stmt_could_throw_p (stmt))
284
    {
285
      mark_stmt_necessary (stmt, true);
286
      return;
287
    }
288
 
289
  /* Statements that are implicitly live.  Most function calls, asm
290
     and return statements are required.  Labels and GIMPLE_BIND nodes
291
     are kept because they are control flow, and we have no way of
292
     knowing whether they can be removed.  DCE can eliminate all the
293
     other statements in a block, and CFG can then remove the block
294
     and labels.  */
295
  switch (gimple_code (stmt))
296
    {
297
    case GIMPLE_PREDICT:
298
    case GIMPLE_LABEL:
299
      mark_stmt_necessary (stmt, false);
300
      return;
301
 
302
    case GIMPLE_ASM:
303
    case GIMPLE_RESX:
304
    case GIMPLE_RETURN:
305
      mark_stmt_necessary (stmt, true);
306
      return;
307
 
308
    case GIMPLE_CALL:
309
      /* Most, but not all function calls are required.  Function calls that
310
         produce no result and have no side effects (i.e. const pure
311
         functions) are unnecessary.  */
312
      if (gimple_has_side_effects (stmt))
313
        {
314
          mark_stmt_necessary (stmt, true);
315
          return;
316
        }
317
      if (!gimple_call_lhs (stmt))
318
        return;
319
      lhs = gimple_call_lhs (stmt);
320
      /* Fall through */
321
 
322
    case GIMPLE_ASSIGN:
323
      if (!lhs)
324
        lhs = gimple_assign_lhs (stmt);
325
      break;
326
 
327
    case GIMPLE_DEBUG:
328
      /* Debug temps without a value are not useful.  ??? If we could
329
         easily locate the debug temp bind stmt for a use thereof,
330
         would could refrain from marking all debug temps here, and
331
         mark them only if they're used.  */
332
      if (gimple_debug_bind_has_value_p (stmt)
333
          || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
334
        mark_stmt_necessary (stmt, false);
335
      return;
336
 
337
    case GIMPLE_GOTO:
338
      gcc_assert (!simple_goto_p (stmt));
339
      mark_stmt_necessary (stmt, true);
340
      return;
341
 
342
    case GIMPLE_COND:
343
      gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2);
344
      /* Fall through.  */
345
 
346
    case GIMPLE_SWITCH:
347
      if (! aggressive)
348
        mark_stmt_necessary (stmt, true);
349
      break;
350
 
351
    default:
352
      break;
353
    }
354
 
355
  /* If the statement has volatile operands, it needs to be preserved.
356
     Same for statements that can alter control flow in unpredictable
357
     ways.  */
358
  if (gimple_has_volatile_ops (stmt) || is_ctrl_altering_stmt (stmt))
359
    {
360
      mark_stmt_necessary (stmt, true);
361
      return;
362
    }
363
 
364
  if (is_hidden_global_store (stmt))
365
    {
366
      mark_stmt_necessary (stmt, true);
367
      return;
368
    }
369
 
370
  return;
371
}
372
 
373
 
374
/* Make corresponding control dependent edges necessary.  We only
375
   have to do this once for each basic block, so we clear the bitmap
376
   after we're done.  */
377
static void
378
mark_control_dependent_edges_necessary (basic_block bb, struct edge_list *el)
379
{
380
  bitmap_iterator bi;
381
  unsigned edge_number;
382
 
383
  gcc_assert (bb != EXIT_BLOCK_PTR);
384
 
385
  if (bb == ENTRY_BLOCK_PTR)
386
    return;
387
 
388
  EXECUTE_IF_CONTROL_DEPENDENT (bi, bb->index, edge_number)
389
    {
390
      gimple stmt;
391
      basic_block cd_bb = INDEX_EDGE_PRED_BB (el, edge_number);
392
 
393
      if (TEST_BIT (last_stmt_necessary, cd_bb->index))
394
        continue;
395
      SET_BIT (last_stmt_necessary, cd_bb->index);
396
      SET_BIT (bb_contains_live_stmts, cd_bb->index);
397
 
398
      stmt = last_stmt (cd_bb);
399
      if (stmt && is_ctrl_stmt (stmt))
400
        mark_stmt_necessary (stmt, true);
401
    }
402
}
403
 
404
 
405
/* Find obviously necessary statements.  These are things like most function
406
   calls, and stores to file level variables.
407
 
408
   If EL is NULL, control statements are conservatively marked as
409
   necessary.  Otherwise it contains the list of edges used by control
410
   dependence analysis.  */
411
 
412
static void
413
find_obviously_necessary_stmts (struct edge_list *el)
414
{
415
  basic_block bb;
416
  gimple_stmt_iterator gsi;
417
  edge e;
418
  gimple phi, stmt;
419
 
420
  FOR_EACH_BB (bb)
421
    {
422
      /* PHI nodes are never inherently necessary.  */
423
      for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
424
        {
425
          phi = gsi_stmt (gsi);
426
          gimple_set_plf (phi, STMT_NECESSARY, false);
427
        }
428
 
429
      /* Check all statements in the block.  */
430
      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
431
        {
432
          stmt = gsi_stmt (gsi);
433
          gimple_set_plf (stmt, STMT_NECESSARY, false);
434
          mark_stmt_if_obviously_necessary (stmt, el != NULL);
435
        }
436
    }
437
 
438
  /* Pure and const functions are finite and thus have no infinite loops in
439
     them.  */
440
  if ((TREE_READONLY (current_function_decl)
441
       || DECL_PURE_P (current_function_decl))
442
      && !DECL_LOOPING_CONST_OR_PURE_P (current_function_decl))
443
    return;
444
 
445
  /* Prevent the empty possibly infinite loops from being removed.  */
446
  if (el)
447
    {
448
      loop_iterator li;
449
      struct loop *loop;
450
      scev_initialize ();
451
      if (mark_irreducible_loops ())
452
        FOR_EACH_BB (bb)
453
          {
454
            edge_iterator ei;
455
            FOR_EACH_EDGE (e, ei, bb->succs)
456
              if ((e->flags & EDGE_DFS_BACK)
457
                  && (e->flags & EDGE_IRREDUCIBLE_LOOP))
458
                {
459
                  if (dump_file)
460
                    fprintf (dump_file, "Marking back edge of irreducible loop %i->%i\n",
461
                             e->src->index, e->dest->index);
462
                  mark_control_dependent_edges_necessary (e->dest, el);
463
                }
464
          }
465
 
466
      FOR_EACH_LOOP (li, loop, 0)
467
        if (!finite_loop_p (loop))
468
          {
469
            if (dump_file)
470
              fprintf (dump_file, "can not prove finiteness of loop %i\n", loop->num);
471
            mark_control_dependent_edges_necessary (loop->latch, el);
472
          }
473
      scev_finalize ();
474
    }
475
}
476
 
477
 
478
/* Return true if REF is based on an aliased base, otherwise false.  */
479
 
480
static bool
481
ref_may_be_aliased (tree ref)
482
{
483
  while (handled_component_p (ref))
484
    ref = TREE_OPERAND (ref, 0);
485
  return !(DECL_P (ref)
486
           && !may_be_aliased (ref));
487
}
488
 
489
static bitmap visited = NULL;
490
static unsigned int longest_chain = 0;
491
static unsigned int total_chain = 0;
492
static unsigned int nr_walks = 0;
493
static bool chain_ovfl = false;
494
 
495
/* Worker for the walker that marks reaching definitions of REF,
496
   which is based on a non-aliased decl, necessary.  It returns
497
   true whenever the defining statement of the current VDEF is
498
   a kill for REF, as no dominating may-defs are necessary for REF
499
   anymore.  DATA points to the basic-block that contains the
500
   stmt that refers to REF.  */
501
 
502
static bool
503
mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
504
{
505
  gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
506
 
507
  /* All stmts we visit are necessary.  */
508
  mark_operand_necessary (vdef);
509
 
510
  /* If the stmt lhs kills ref, then we can stop walking.  */
511
  if (gimple_has_lhs (def_stmt)
512
      && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME)
513
    {
514
      tree base, lhs = gimple_get_lhs (def_stmt);
515
      HOST_WIDE_INT size, offset, max_size;
516
      ao_ref_base (ref);
517
      base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
518
      /* We can get MEM[symbol: sZ, index: D.8862_1] here,
519
         so base == refd->base does not always hold.  */
520
      if (base == ref->base)
521
        {
522
          /* For a must-alias check we need to be able to constrain
523
             the accesses properly.  */
524
          if (size != -1 && size == max_size
525
              && ref->max_size != -1)
526
            {
527
              if (offset <= ref->offset
528
                  && offset + size >= ref->offset + ref->max_size)
529
                return true;
530
            }
531
          /* Or they need to be exactly the same.  */
532
          else if (ref->ref
533
                   /* Make sure there is no induction variable involved
534
                      in the references (gcc.c-torture/execute/pr42142.c).
535
                      The simplest way is to check if the kill dominates
536
                      the use.  */
537
                   && dominated_by_p (CDI_DOMINATORS, (basic_block) data,
538
                                      gimple_bb (def_stmt))
539
                   && operand_equal_p (ref->ref, lhs, 0))
540
            return true;
541
        }
542
    }
543
 
544
  /* Otherwise keep walking.  */
545
  return false;
546
}
547
 
548
static void
549
mark_aliased_reaching_defs_necessary (gimple stmt, tree ref)
550
{
551
  unsigned int chain;
552
  ao_ref refd;
553
  gcc_assert (!chain_ovfl);
554
  ao_ref_init (&refd, ref);
555
  chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt),
556
                              mark_aliased_reaching_defs_necessary_1,
557
                              gimple_bb (stmt), NULL);
558
  if (chain > longest_chain)
559
    longest_chain = chain;
560
  total_chain += chain;
561
  nr_walks++;
562
}
563
 
564
/* Worker for the walker that marks reaching definitions of REF, which
565
   is not based on a non-aliased decl.  For simplicity we need to end
566
   up marking all may-defs necessary that are not based on a non-aliased
567
   decl.  The only job of this walker is to skip may-defs based on
568
   a non-aliased decl.  */
569
 
570
static bool
571
mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
572
                                    tree vdef, void *data ATTRIBUTE_UNUSED)
573
{
574
  gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
575
 
576
  /* We have to skip already visited (and thus necessary) statements
577
     to make the chaining work after we dropped back to simple mode.  */
578
  if (chain_ovfl
579
      && TEST_BIT (processed, SSA_NAME_VERSION (vdef)))
580
    {
581
      gcc_assert (gimple_nop_p (def_stmt)
582
                  || gimple_plf (def_stmt, STMT_NECESSARY));
583
      return false;
584
    }
585
 
586
  /* We want to skip stores to non-aliased variables.  */
587
  if (!chain_ovfl
588
      && gimple_assign_single_p (def_stmt))
589
    {
590
      tree lhs = gimple_assign_lhs (def_stmt);
591
      if (!ref_may_be_aliased (lhs))
592
        return false;
593
    }
594
 
595
  mark_operand_necessary (vdef);
596
 
597
  return false;
598
}
599
 
600
static void
601
mark_all_reaching_defs_necessary (gimple stmt)
602
{
603
  walk_aliased_vdefs (NULL, gimple_vuse (stmt),
604
                      mark_all_reaching_defs_necessary_1, NULL, &visited);
605
}
606
 
607
/* Return true for PHI nodes with one or identical arguments
608
   can be removed.  */
609
static bool
610
degenerate_phi_p (gimple phi)
611
{
612
  unsigned int i;
613
  tree op = gimple_phi_arg_def (phi, 0);
614
  for (i = 1; i < gimple_phi_num_args (phi); i++)
615
    if (gimple_phi_arg_def (phi, i) != op)
616
      return false;
617
  return true;
618
}
619
 
620
/* Propagate necessity using the operands of necessary statements.
621
   Process the uses on each statement in the worklist, and add all
622
   feeding statements which contribute to the calculation of this
623
   value to the worklist.
624
 
625
   In conservative mode, EL is NULL.  */
626
 
627
static void
628
propagate_necessity (struct edge_list *el)
629
{
630
  gimple stmt;
631
  bool aggressive = (el ? true : false);
632
 
633
  if (dump_file && (dump_flags & TDF_DETAILS))
634
    fprintf (dump_file, "\nProcessing worklist:\n");
635
 
636
  while (VEC_length (gimple, worklist) > 0)
637
    {
638
      /* Take STMT from worklist.  */
639
      stmt = VEC_pop (gimple, worklist);
640
 
641
      if (dump_file && (dump_flags & TDF_DETAILS))
642
        {
643
          fprintf (dump_file, "processing: ");
644
          print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
645
          fprintf (dump_file, "\n");
646
        }
647
 
648
      if (aggressive)
649
        {
650
          /* Mark the last statements of the basic blocks that the block
651
             containing STMT is control dependent on, but only if we haven't
652
             already done so.  */
653
          basic_block bb = gimple_bb (stmt);
654
          if (bb != ENTRY_BLOCK_PTR
655
              && ! TEST_BIT (visited_control_parents, bb->index))
656
            {
657
              SET_BIT (visited_control_parents, bb->index);
658
              mark_control_dependent_edges_necessary (bb, el);
659
            }
660
        }
661
 
662
      if (gimple_code (stmt) == GIMPLE_PHI
663
          /* We do not process virtual PHI nodes nor do we track their
664
             necessity.  */
665
          && is_gimple_reg (gimple_phi_result (stmt)))
666
        {
667
          /* PHI nodes are somewhat special in that each PHI alternative has
668
             data and control dependencies.  All the statements feeding the
669
             PHI node's arguments are always necessary.  In aggressive mode,
670
             we also consider the control dependent edges leading to the
671
             predecessor block associated with each PHI alternative as
672
             necessary.  */
673
          size_t k;
674
 
675
          for (k = 0; k < gimple_phi_num_args (stmt); k++)
676
            {
677
              tree arg = PHI_ARG_DEF (stmt, k);
678
              if (TREE_CODE (arg) == SSA_NAME)
679
                mark_operand_necessary (arg);
680
            }
681
 
682
          if (aggressive && !degenerate_phi_p (stmt))
683
            {
684
              for (k = 0; k < gimple_phi_num_args (stmt); k++)
685
                {
686
                  basic_block arg_bb = gimple_phi_arg_edge (stmt, k)->src;
687
                  if (arg_bb != ENTRY_BLOCK_PTR
688
                      && ! TEST_BIT (visited_control_parents, arg_bb->index))
689
                    {
690
                      SET_BIT (visited_control_parents, arg_bb->index);
691
                      mark_control_dependent_edges_necessary (arg_bb, el);
692
                    }
693
                }
694
            }
695
        }
696
      else
697
        {
698
          /* Propagate through the operands.  Examine all the USE, VUSE and
699
             VDEF operands in this statement.  Mark all the statements
700
             which feed this statement's uses as necessary.  */
701
          ssa_op_iter iter;
702
          tree use;
703
 
704
          FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
705
            mark_operand_necessary (use);
706
 
707
          use = gimple_vuse (stmt);
708
          if (!use)
709
            continue;
710
 
711
          /* If we dropped to simple mode make all immediately
712
             reachable definitions necessary.  */
713
          if (chain_ovfl)
714
            {
715
              mark_all_reaching_defs_necessary (stmt);
716
              continue;
717
            }
718
 
719
          /* For statements that may load from memory (have a VUSE) we
720
             have to mark all reaching (may-)definitions as necessary.
721
             We partition this task into two cases:
722
              1) explicit loads based on decls that are not aliased
723
              2) implicit loads (like calls) and explicit loads not
724
                 based on decls that are not aliased (like indirect
725
                 references or loads from globals)
726
             For 1) we mark all reaching may-defs as necessary, stopping
727
             at dominating kills.  For 2) we want to mark all dominating
728
             references necessary, but non-aliased ones which we handle
729
             in 1).  By keeping a global visited bitmap for references
730
             we walk for 2) we avoid quadratic behavior for those.  */
731
 
732
          if (is_gimple_call (stmt))
733
            {
734
              tree callee = gimple_call_fndecl (stmt);
735
              unsigned i;
736
 
737
              /* Calls to functions that are merely acting as barriers
738
                 or that only store to memory do not make any previous
739
                 stores necessary.  */
740
              if (callee != NULL_TREE
741
                  && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
742
                  && (DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET
743
                      || DECL_FUNCTION_CODE (callee) == BUILT_IN_MALLOC
744
                      || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE))
745
                continue;
746
 
747
              /* Calls implicitly load from memory, their arguments
748
                 in addition may explicitly perform memory loads.  */
749
              mark_all_reaching_defs_necessary (stmt);
750
              for (i = 0; i < gimple_call_num_args (stmt); ++i)
751
                {
752
                  tree arg = gimple_call_arg (stmt, i);
753
                  if (TREE_CODE (arg) == SSA_NAME
754
                      || is_gimple_min_invariant (arg))
755
                    continue;
756
                  if (!ref_may_be_aliased (arg))
757
                    mark_aliased_reaching_defs_necessary (stmt, arg);
758
                }
759
            }
760
          else if (gimple_assign_single_p (stmt))
761
            {
762
              tree rhs;
763
              bool rhs_aliased = false;
764
              /* If this is a load mark things necessary.  */
765
              rhs = gimple_assign_rhs1 (stmt);
766
              if (TREE_CODE (rhs) != SSA_NAME
767
                  && !is_gimple_min_invariant (rhs))
768
                {
769
                  if (!ref_may_be_aliased (rhs))
770
                    mark_aliased_reaching_defs_necessary (stmt, rhs);
771
                  else
772
                    rhs_aliased = true;
773
                }
774
              if (rhs_aliased)
775
                mark_all_reaching_defs_necessary (stmt);
776
            }
777
          else if (gimple_code (stmt) == GIMPLE_RETURN)
778
            {
779
              tree rhs = gimple_return_retval (stmt);
780
              /* A return statement may perform a load.  */
781
              if (TREE_CODE (rhs) != SSA_NAME
782
                  && !is_gimple_min_invariant (rhs))
783
                {
784
                  if (!ref_may_be_aliased (rhs))
785
                    mark_aliased_reaching_defs_necessary (stmt, rhs);
786
                  else
787
                    mark_all_reaching_defs_necessary (stmt);
788
                }
789
            }
790
          else if (gimple_code (stmt) == GIMPLE_ASM)
791
            {
792
              unsigned i;
793
              mark_all_reaching_defs_necessary (stmt);
794
              /* Inputs may perform loads.  */
795
              for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
796
                {
797
                  tree op = TREE_VALUE (gimple_asm_input_op (stmt, i));
798
                  if (TREE_CODE (op) != SSA_NAME
799
                      && !is_gimple_min_invariant (op)
800
                      && !ref_may_be_aliased (op))
801
                    mark_aliased_reaching_defs_necessary (stmt, op);
802
                }
803
            }
804
          else
805
            gcc_unreachable ();
806
 
807
          /* If we over-used our alias oracle budget drop to simple
808
             mode.  The cost metric allows quadratic behavior
809
             (number of uses times number of may-defs queries) up to
810
             a constant maximal number of queries and after that falls back to
811
             super-linear complexity.  */
812
          if (/* Constant but quadratic for small functions.  */
813
              total_chain > 128 * 128
814
              /* Linear in the number of may-defs.  */
815
              && total_chain > 32 * longest_chain
816
              /* Linear in the number of uses.  */
817
              && total_chain > nr_walks * 32)
818
            {
819
              chain_ovfl = true;
820
              if (visited)
821
                bitmap_clear (visited);
822
            }
823
        }
824
    }
825
}
826
 
827
/* Replace all uses of result of PHI by underlying variable and mark it
828
   for renaming.  */
829
 
830
void
831
mark_virtual_phi_result_for_renaming (gimple phi)
832
{
833
  bool used = false;
834
  imm_use_iterator iter;
835
  use_operand_p use_p;
836
  gimple stmt;
837
  tree result_ssa, result_var;
838
 
839
  if (dump_file && (dump_flags & TDF_DETAILS))
840
    {
841
      fprintf (dump_file, "Marking result for renaming : ");
842
      print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
843
      fprintf (dump_file, "\n");
844
    }
845
 
846
  result_ssa = gimple_phi_result (phi);
847
  result_var = SSA_NAME_VAR (result_ssa);
848
  FOR_EACH_IMM_USE_STMT (stmt, iter, result_ssa)
849
    {
850
      FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
851
        SET_USE (use_p, result_var);
852
      update_stmt (stmt);
853
      used = true;
854
    }
855
  if (used)
856
    mark_sym_for_renaming (result_var);
857
}
858
 
859
/* Remove dead PHI nodes from block BB.  */
860
 
861
static bool
862
remove_dead_phis (basic_block bb)
863
{
864
  bool something_changed = false;
865
  gimple_seq phis;
866
  gimple phi;
867
  gimple_stmt_iterator gsi;
868
  phis = phi_nodes (bb);
869
 
870
  for (gsi = gsi_start (phis); !gsi_end_p (gsi);)
871
    {
872
      stats.total_phis++;
873
      phi = gsi_stmt (gsi);
874
 
875
      /* We do not track necessity of virtual PHI nodes.  Instead do
876
         very simple dead PHI removal here.  */
877
      if (!is_gimple_reg (gimple_phi_result (phi)))
878
        {
879
          /* Virtual PHI nodes with one or identical arguments
880
             can be removed.  */
881
          if (degenerate_phi_p (phi))
882
            {
883
              tree vdef = gimple_phi_result (phi);
884
              tree vuse = gimple_phi_arg_def (phi, 0);
885
 
886
              use_operand_p use_p;
887
              imm_use_iterator iter;
888
              gimple use_stmt;
889
              FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
890
                FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
891
                  SET_USE (use_p, vuse);
892
              if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)
893
                  && TREE_CODE (vuse) == SSA_NAME)
894
                SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
895
            }
896
          else
897
            gimple_set_plf (phi, STMT_NECESSARY, true);
898
        }
899
 
900
      if (!gimple_plf (phi, STMT_NECESSARY))
901
        {
902
          something_changed = true;
903
          if (dump_file && (dump_flags & TDF_DETAILS))
904
            {
905
              fprintf (dump_file, "Deleting : ");
906
              print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
907
              fprintf (dump_file, "\n");
908
            }
909
 
910
          remove_phi_node (&gsi, true);
911
          stats.removed_phis++;
912
          continue;
913
        }
914
 
915
      gsi_next (&gsi);
916
    }
917
  return something_changed;
918
}
919
 
920
/* Forward edge E to respective POST_DOM_BB and update PHIs.  */
921
 
922
static edge
923
forward_edge_to_pdom (edge e, basic_block post_dom_bb)
924
{
925
  gimple_stmt_iterator gsi;
926
  edge e2 = NULL;
927
  edge_iterator ei;
928
 
929
  if (dump_file && (dump_flags & TDF_DETAILS))
930
    fprintf (dump_file, "Redirecting edge %i->%i to %i\n", e->src->index,
931
             e->dest->index, post_dom_bb->index);
932
 
933
  e2 = redirect_edge_and_branch (e, post_dom_bb);
934
  cfg_altered = true;
935
 
936
  /* If edge was already around, no updating is neccesary.  */
937
  if (e2 != e)
938
    return e2;
939
 
940
  if (!gimple_seq_empty_p (phi_nodes (post_dom_bb)))
941
    {
942
      /* We are sure that for every live PHI we are seeing control dependent BB.
943
         This means that we can pick any edge to duplicate PHI args from.  */
944
      FOR_EACH_EDGE (e2, ei, post_dom_bb->preds)
945
        if (e2 != e)
946
          break;
947
      for (gsi = gsi_start_phis (post_dom_bb); !gsi_end_p (gsi);)
948
        {
949
          gimple phi = gsi_stmt (gsi);
950
          tree op;
951
          source_location locus;
952
 
953
          /* PHIs for virtuals have no control dependency relation on them.
954
             We are lost here and must force renaming of the symbol.  */
955
          if (!is_gimple_reg (gimple_phi_result (phi)))
956
            {
957
              mark_virtual_phi_result_for_renaming (phi);
958
              remove_phi_node (&gsi, true);
959
              continue;
960
            }
961
 
962
          /* Dead PHI do not imply control dependency.  */
963
          if (!gimple_plf (phi, STMT_NECESSARY))
964
            {
965
              gsi_next (&gsi);
966
              continue;
967
            }
968
 
969
          op = gimple_phi_arg_def (phi, e2->dest_idx);
970
          locus = gimple_phi_arg_location (phi, e2->dest_idx);
971
          add_phi_arg (phi, op, e, locus);
972
          /* The resulting PHI if not dead can only be degenerate.  */
973
          gcc_assert (degenerate_phi_p (phi));
974
          gsi_next (&gsi);
975
        }
976
    }
977
  return e;
978
}
979
 
980
/* Remove dead statement pointed to by iterator I.  Receives the basic block BB
981
   containing I so that we don't have to look it up.  */
982
 
983
static void
984
remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb)
985
{
986
  gimple stmt = gsi_stmt (*i);
987
 
988
  if (dump_file && (dump_flags & TDF_DETAILS))
989
    {
990
      fprintf (dump_file, "Deleting : ");
991
      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
992
      fprintf (dump_file, "\n");
993
    }
994
 
995
  stats.removed++;
996
 
997
  /* If we have determined that a conditional branch statement contributes
998
     nothing to the program, then we not only remove it, but we also change
999
     the flow graph so that the current block will simply fall-thru to its
1000
     immediate post-dominator.  The blocks we are circumventing will be
1001
     removed by cleanup_tree_cfg if this change in the flow graph makes them
1002
     unreachable.  */
1003
  if (is_ctrl_stmt (stmt))
1004
    {
1005
      basic_block post_dom_bb;
1006
      edge e, e2;
1007
      edge_iterator ei;
1008
 
1009
      post_dom_bb = get_immediate_dominator (CDI_POST_DOMINATORS, bb);
1010
 
1011
      e = find_edge (bb, post_dom_bb);
1012
 
1013
      /* If edge is already there, try to use it.  This avoids need to update
1014
         PHI nodes.  Also watch for cases where post dominator does not exists
1015
         or is exit block.  These can happen for infinite loops as we create
1016
         fake edges in the dominator tree.  */
1017
      if (e)
1018
        ;
1019
      else if (! post_dom_bb || post_dom_bb == EXIT_BLOCK_PTR)
1020
        e = EDGE_SUCC (bb, 0);
1021
      else
1022
        e = forward_edge_to_pdom (EDGE_SUCC (bb, 0), post_dom_bb);
1023
      gcc_assert (e);
1024
      e->probability = REG_BR_PROB_BASE;
1025
      e->count = bb->count;
1026
 
1027
      /* The edge is no longer associated with a conditional, so it does
1028
         not have TRUE/FALSE flags.  */
1029
      e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
1030
 
1031
      /* The lone outgoing edge from BB will be a fallthru edge.  */
1032
      e->flags |= EDGE_FALLTHRU;
1033
 
1034
      /* Remove the remaining outgoing edges.  */
1035
      for (ei = ei_start (bb->succs); (e2 = ei_safe_edge (ei)); )
1036
        if (e != e2)
1037
          {
1038
            cfg_altered = true;
1039
            remove_edge (e2);
1040
          }
1041
        else
1042
          ei_next (&ei);
1043
    }
1044
 
1045
  unlink_stmt_vdef (stmt);
1046
  gsi_remove (i, true);
1047
  release_defs (stmt);
1048
}
1049
 
1050
/* Eliminate unnecessary statements. Any instruction not marked as necessary
1051
   contributes nothing to the program, and can be deleted.  */
1052
 
1053
static bool
1054
eliminate_unnecessary_stmts (void)
1055
{
1056
  bool something_changed = false;
1057
  basic_block bb;
1058
  gimple_stmt_iterator gsi, psi;
1059
  gimple stmt;
1060
  tree call;
1061
  VEC (basic_block, heap) *h;
1062
 
1063
  if (dump_file && (dump_flags & TDF_DETAILS))
1064
    fprintf (dump_file, "\nEliminating unnecessary statements:\n");
1065
 
1066
  clear_special_calls ();
1067
 
1068
  /* Walking basic blocks and statements in reverse order avoids
1069
     releasing SSA names before any other DEFs that refer to them are
1070
     released.  This helps avoid loss of debug information, as we get
1071
     a chance to propagate all RHSs of removed SSAs into debug uses,
1072
     rather than only the latest ones.  E.g., consider:
1073
 
1074
     x_3 = y_1 + z_2;
1075
     a_5 = x_3 - b_4;
1076
     # DEBUG a => a_5
1077
 
1078
     If we were to release x_3 before a_5, when we reached a_5 and
1079
     tried to substitute it into the debug stmt, we'd see x_3 there,
1080
     but x_3's DEF, type, etc would have already been disconnected.
1081
     By going backwards, the debug stmt first changes to:
1082
 
1083
     # DEBUG a => x_3 - b_4
1084
 
1085
     and then to:
1086
 
1087
     # DEBUG a => y_1 + z_2 - b_4
1088
 
1089
     as desired.  */
1090
  gcc_assert (dom_info_available_p (CDI_DOMINATORS));
1091
  h = get_all_dominated_blocks (CDI_DOMINATORS, single_succ (ENTRY_BLOCK_PTR));
1092
 
1093
  while (VEC_length (basic_block, h))
1094
    {
1095
      bb = VEC_pop (basic_block, h);
1096
 
1097
      /* Remove dead statements.  */
1098
      for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi)
1099
        {
1100
          stmt = gsi_stmt (gsi);
1101
 
1102
          psi = gsi;
1103
          gsi_prev (&psi);
1104
 
1105
          stats.total++;
1106
 
1107
          /* If GSI is not necessary then remove it.  */
1108
          if (!gimple_plf (stmt, STMT_NECESSARY))
1109
            {
1110
              if (!is_gimple_debug (stmt))
1111
                something_changed = true;
1112
              remove_dead_stmt (&gsi, bb);
1113
            }
1114
          else if (is_gimple_call (stmt))
1115
            {
1116
              call = gimple_call_fndecl (stmt);
1117
              if (call)
1118
                {
1119
                  tree name;
1120
 
1121
                  /* When LHS of var = call (); is dead, simplify it into
1122
                     call (); saving one operand.  */
1123
                  name = gimple_call_lhs (stmt);
1124
                  if (name && TREE_CODE (name) == SSA_NAME
1125
                           && !TEST_BIT (processed, SSA_NAME_VERSION (name)))
1126
                    {
1127
                      something_changed = true;
1128
                      if (dump_file && (dump_flags & TDF_DETAILS))
1129
                        {
1130
                          fprintf (dump_file, "Deleting LHS of call: ");
1131
                          print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1132
                          fprintf (dump_file, "\n");
1133
                        }
1134
 
1135
                      gimple_call_set_lhs (stmt, NULL_TREE);
1136
                      maybe_clean_or_replace_eh_stmt (stmt, stmt);
1137
                      update_stmt (stmt);
1138
                      release_ssa_name (name);
1139
                    }
1140
                  notice_special_calls (stmt);
1141
                }
1142
            }
1143
        }
1144
    }
1145
 
1146
  VEC_free (basic_block, heap, h);
1147
 
1148
  /* Since we don't track liveness of virtual PHI nodes, it is possible that we
1149
     rendered some PHI nodes unreachable while they are still in use.
1150
     Mark them for renaming.  */
1151
  if (cfg_altered)
1152
    {
1153
      basic_block prev_bb;
1154
 
1155
      find_unreachable_blocks ();
1156
 
1157
      /* Delete all unreachable basic blocks in reverse dominator order.  */
1158
      for (bb = EXIT_BLOCK_PTR->prev_bb; bb != ENTRY_BLOCK_PTR; bb = prev_bb)
1159
        {
1160
          prev_bb = bb->prev_bb;
1161
 
1162
          if (!TEST_BIT (bb_contains_live_stmts, bb->index)
1163
              || !(bb->flags & BB_REACHABLE))
1164
            {
1165
              for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1166
                if (!is_gimple_reg (gimple_phi_result (gsi_stmt (gsi))))
1167
                  {
1168
                    bool found = false;
1169
                    imm_use_iterator iter;
1170
 
1171
                    FOR_EACH_IMM_USE_STMT (stmt, iter, gimple_phi_result (gsi_stmt (gsi)))
1172
                      {
1173
                        if (!(gimple_bb (stmt)->flags & BB_REACHABLE))
1174
                          continue;
1175
                        if (gimple_code (stmt) == GIMPLE_PHI
1176
                            || gimple_plf (stmt, STMT_NECESSARY))
1177
                          {
1178
                            found = true;
1179
                            BREAK_FROM_IMM_USE_STMT (iter);
1180
                          }
1181
                      }
1182
                    if (found)
1183
                      mark_virtual_phi_result_for_renaming (gsi_stmt (gsi));
1184
                  }
1185
 
1186
              if (!(bb->flags & BB_REACHABLE))
1187
                {
1188
                  /* Speed up the removal of blocks that don't
1189
                     dominate others.  Walking backwards, this should
1190
                     be the common case.  ??? Do we need to recompute
1191
                     dominators because of cfg_altered?  */
1192
                  if (!MAY_HAVE_DEBUG_STMTS
1193
                      || !first_dom_son (CDI_DOMINATORS, bb))
1194
                    delete_basic_block (bb);
1195
                  else
1196
                    {
1197
                      h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
1198
 
1199
                      while (VEC_length (basic_block, h))
1200
                        {
1201
                          bb = VEC_pop (basic_block, h);
1202
                          prev_bb = bb->prev_bb;
1203
                          /* Rearrangements to the CFG may have failed
1204
                             to update the dominators tree, so that
1205
                             formerly-dominated blocks are now
1206
                             otherwise reachable.  */
1207
                          if (!!(bb->flags & BB_REACHABLE))
1208
                            continue;
1209
                          delete_basic_block (bb);
1210
                        }
1211
 
1212
                      VEC_free (basic_block, heap, h);
1213
                    }
1214
                }
1215
            }
1216
        }
1217
    }
1218
  FOR_EACH_BB (bb)
1219
    {
1220
      /* Remove dead PHI nodes.  */
1221
      something_changed |= remove_dead_phis (bb);
1222
    }
1223
 
1224
  return something_changed;
1225
}
1226
 
1227
 
1228
/* Print out removed statement statistics.  */
1229
 
1230
static void
1231
print_stats (void)
1232
{
1233
  float percg;
1234
 
1235
  percg = ((float) stats.removed / (float) stats.total) * 100;
1236
  fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
1237
           stats.removed, stats.total, (int) percg);
1238
 
1239
  if (stats.total_phis == 0)
1240
    percg = 0;
1241
  else
1242
    percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
1243
 
1244
  fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
1245
           stats.removed_phis, stats.total_phis, (int) percg);
1246
}
1247
 
1248
/* Initialization for this pass.  Set up the used data structures.  */
1249
 
1250
static void
1251
tree_dce_init (bool aggressive)
1252
{
1253
  memset ((void *) &stats, 0, sizeof (stats));
1254
 
1255
  if (aggressive)
1256
    {
1257
      int i;
1258
 
1259
      control_dependence_map = XNEWVEC (bitmap, last_basic_block);
1260
      for (i = 0; i < last_basic_block; ++i)
1261
        control_dependence_map[i] = BITMAP_ALLOC (NULL);
1262
 
1263
      last_stmt_necessary = sbitmap_alloc (last_basic_block);
1264
      sbitmap_zero (last_stmt_necessary);
1265
      bb_contains_live_stmts = sbitmap_alloc (last_basic_block);
1266
      sbitmap_zero (bb_contains_live_stmts);
1267
    }
1268
 
1269
  processed = sbitmap_alloc (num_ssa_names + 1);
1270
  sbitmap_zero (processed);
1271
 
1272
  worklist = VEC_alloc (gimple, heap, 64);
1273
  cfg_altered = false;
1274
}
1275
 
1276
/* Cleanup after this pass.  */
1277
 
1278
static void
1279
tree_dce_done (bool aggressive)
1280
{
1281
  if (aggressive)
1282
    {
1283
      int i;
1284
 
1285
      for (i = 0; i < last_basic_block; ++i)
1286
        BITMAP_FREE (control_dependence_map[i]);
1287
      free (control_dependence_map);
1288
 
1289
      sbitmap_free (visited_control_parents);
1290
      sbitmap_free (last_stmt_necessary);
1291
      sbitmap_free (bb_contains_live_stmts);
1292
      bb_contains_live_stmts = NULL;
1293
    }
1294
 
1295
  sbitmap_free (processed);
1296
 
1297
  VEC_free (gimple, heap, worklist);
1298
}
1299
 
1300
/* Main routine to eliminate dead code.
1301
 
1302
   AGGRESSIVE controls the aggressiveness of the algorithm.
1303
   In conservative mode, we ignore control dependence and simply declare
1304
   all but the most trivially dead branches necessary.  This mode is fast.
1305
   In aggressive mode, control dependences are taken into account, which
1306
   results in more dead code elimination, but at the cost of some time.
1307
 
1308
   FIXME: Aggressive mode before PRE doesn't work currently because
1309
          the dominance info is not invalidated after DCE1.  This is
1310
          not an issue right now because we only run aggressive DCE
1311
          as the last tree SSA pass, but keep this in mind when you
1312
          start experimenting with pass ordering.  */
1313
 
1314
static unsigned int
1315
perform_tree_ssa_dce (bool aggressive)
1316
{
1317
  struct edge_list *el = NULL;
1318
  bool something_changed = 0;
1319
 
1320
  /* Preheaders are needed for SCEV to work.
1321
     Simple lateches and recorded exits improve chances that loop will
1322
     proved to be finite in testcases such as in loop-15.c and loop-24.c  */
1323
  if (aggressive)
1324
    loop_optimizer_init (LOOPS_NORMAL
1325
                         | LOOPS_HAVE_RECORDED_EXITS);
1326
 
1327
  tree_dce_init (aggressive);
1328
 
1329
  if (aggressive)
1330
    {
1331
      /* Compute control dependence.  */
1332
      timevar_push (TV_CONTROL_DEPENDENCES);
1333
      calculate_dominance_info (CDI_POST_DOMINATORS);
1334
      el = create_edge_list ();
1335
      find_all_control_dependences (el);
1336
      timevar_pop (TV_CONTROL_DEPENDENCES);
1337
 
1338
      visited_control_parents = sbitmap_alloc (last_basic_block);
1339
      sbitmap_zero (visited_control_parents);
1340
 
1341
      mark_dfs_back_edges ();
1342
    }
1343
 
1344
  find_obviously_necessary_stmts (el);
1345
 
1346
  if (aggressive)
1347
    loop_optimizer_finalize ();
1348
 
1349
  longest_chain = 0;
1350
  total_chain = 0;
1351
  nr_walks = 0;
1352
  chain_ovfl = false;
1353
  visited = BITMAP_ALLOC (NULL);
1354
  propagate_necessity (el);
1355
  BITMAP_FREE (visited);
1356
 
1357
  something_changed |= eliminate_unnecessary_stmts ();
1358
  something_changed |= cfg_altered;
1359
 
1360
  /* We do not update postdominators, so free them unconditionally.  */
1361
  free_dominance_info (CDI_POST_DOMINATORS);
1362
 
1363
  /* If we removed paths in the CFG, then we need to update
1364
     dominators as well.  I haven't investigated the possibility
1365
     of incrementally updating dominators.  */
1366
  if (cfg_altered)
1367
    free_dominance_info (CDI_DOMINATORS);
1368
 
1369
  statistics_counter_event (cfun, "Statements deleted", stats.removed);
1370
  statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis);
1371
 
1372
  /* Debugging dumps.  */
1373
  if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
1374
    print_stats ();
1375
 
1376
  tree_dce_done (aggressive);
1377
 
1378
  free_edge_list (el);
1379
 
1380
  if (something_changed)
1381
    return (TODO_update_ssa | TODO_cleanup_cfg | TODO_ggc_collect
1382
            | TODO_remove_unused_locals);
1383
  else
1384
    return 0;
1385
}
1386
 
1387
/* Pass entry points.  */
1388
static unsigned int
1389
tree_ssa_dce (void)
1390
{
1391
  return perform_tree_ssa_dce (/*aggressive=*/false);
1392
}
1393
 
1394
static unsigned int
1395
tree_ssa_dce_loop (void)
1396
{
1397
  unsigned int todo;
1398
  todo = perform_tree_ssa_dce (/*aggressive=*/false);
1399
  if (todo)
1400
    {
1401
      free_numbers_of_iterations_estimates ();
1402
      scev_reset ();
1403
    }
1404
  return todo;
1405
}
1406
 
1407
static unsigned int
1408
tree_ssa_cd_dce (void)
1409
{
1410
  return perform_tree_ssa_dce (/*aggressive=*/optimize >= 2);
1411
}
1412
 
1413
static bool
1414
gate_dce (void)
1415
{
1416
  return flag_tree_dce != 0;
1417
}
1418
 
1419
struct gimple_opt_pass pass_dce =
1420
{
1421
 {
1422
  GIMPLE_PASS,
1423
  "dce",                                /* name */
1424
  gate_dce,                             /* gate */
1425
  tree_ssa_dce,                         /* execute */
1426
  NULL,                                 /* sub */
1427
  NULL,                                 /* next */
1428
  0,                                     /* static_pass_number */
1429
  TV_TREE_DCE,                          /* tv_id */
1430
  PROP_cfg | PROP_ssa,                  /* properties_required */
1431
  0,                                     /* properties_provided */
1432
  0,                                     /* properties_destroyed */
1433
  0,                                     /* todo_flags_start */
1434
  TODO_dump_func | TODO_verify_ssa      /* todo_flags_finish */
1435
 }
1436
};
1437
 
1438
struct gimple_opt_pass pass_dce_loop =
1439
{
1440
 {
1441
  GIMPLE_PASS,
1442
  "dceloop",                            /* name */
1443
  gate_dce,                             /* gate */
1444
  tree_ssa_dce_loop,                    /* execute */
1445
  NULL,                                 /* sub */
1446
  NULL,                                 /* next */
1447
  0,                                     /* static_pass_number */
1448
  TV_TREE_DCE,                          /* tv_id */
1449
  PROP_cfg | PROP_ssa,                  /* properties_required */
1450
  0,                                     /* properties_provided */
1451
  0,                                     /* properties_destroyed */
1452
  0,                                     /* todo_flags_start */
1453
  TODO_dump_func | TODO_verify_ssa      /* todo_flags_finish */
1454
 }
1455
};
1456
 
1457
struct gimple_opt_pass pass_cd_dce =
1458
{
1459
 {
1460
  GIMPLE_PASS,
1461
  "cddce",                              /* name */
1462
  gate_dce,                             /* gate */
1463
  tree_ssa_cd_dce,                      /* execute */
1464
  NULL,                                 /* sub */
1465
  NULL,                                 /* next */
1466
  0,                                     /* static_pass_number */
1467
  TV_TREE_CD_DCE,                       /* tv_id */
1468
  PROP_cfg | PROP_ssa,                  /* properties_required */
1469
  0,                                     /* properties_provided */
1470
  0,                                     /* properties_destroyed */
1471
  0,                                     /* todo_flags_start */
1472
  TODO_dump_func | TODO_verify_ssa
1473
  | TODO_verify_flow                    /* todo_flags_finish */
1474
 }
1475
};

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.