OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [tree-ssa-dce.c] - Blame information for rev 772

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 684 jeremybenn
/* Dead code elimination pass for the GNU compiler.
2
   Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3
   Free Software Foundation, Inc.
4
   Contributed by Ben Elliston <bje@redhat.com>
5
   and Andrew MacLeod <amacleod@redhat.com>
6
   Adapted to use control dependence by Steven Bosscher, SUSE Labs.
7
 
8
This file is part of GCC.
9
 
10
GCC is free software; you can redistribute it and/or modify it
11
under the terms of the GNU General Public License as published by the
12
Free Software Foundation; either version 3, or (at your option) any
13
later version.
14
 
15
GCC is distributed in the hope that it will be useful, but WITHOUT
16
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
17
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
18
for more details.
19
 
20
You should have received a copy of the GNU General Public License
21
along with GCC; see the file COPYING3.  If not see
22
<http://www.gnu.org/licenses/>.  */
23
 
24
/* Dead code elimination.
25
 
26
   References:
27
 
28
     Building an Optimizing Compiler,
29
     Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
30
 
31
     Advanced Compiler Design and Implementation,
32
     Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
33
 
34
   Dead-code elimination is the removal of statements which have no
35
   impact on the program's output.  "Dead statements" have no impact
36
   on the program's output, while "necessary statements" may have
37
   impact on the output.
38
 
39
   The algorithm consists of three phases:
40
   1. Marking as necessary all statements known to be necessary,
41
      e.g. most function calls, writing a value to memory, etc;
42
   2. Propagating necessary statements, e.g., the statements
43
      giving values to operands in necessary statements; and
44
   3. Removing dead statements.  */
45
 
46
#include "config.h"
47
#include "system.h"
48
#include "coretypes.h"
49
#include "tm.h"
50
 
51
#include "tree.h"
52
#include "tree-pretty-print.h"
53
#include "gimple-pretty-print.h"
54
#include "basic-block.h"
55
#include "tree-flow.h"
56
#include "gimple.h"
57
#include "tree-dump.h"
58
#include "tree-pass.h"
59
#include "timevar.h"
60
#include "flags.h"
61
#include "cfgloop.h"
62
#include "tree-scalar-evolution.h"
63
 
64
static struct stmt_stats
65
{
66
  int total;
67
  int total_phis;
68
  int removed;
69
  int removed_phis;
70
} stats;
71
 
72
#define STMT_NECESSARY GF_PLF_1
73
 
74
static VEC(gimple,heap) *worklist;
75
 
76
/* Vector indicating an SSA name has already been processed and marked
77
   as necessary.  */
78
static sbitmap processed;
79
 
80
/* Vector indicating that the last statement of a basic block has already
81
   been marked as necessary.  */
82
static sbitmap last_stmt_necessary;
83
 
84
/* Vector indicating that BB contains statements that are live.  */
85
static sbitmap bb_contains_live_stmts;
86
 
87
/* Before we can determine whether a control branch is dead, we need to
88
   compute which blocks are control dependent on which edges.
89
 
90
   We expect each block to be control dependent on very few edges so we
91
   use a bitmap for each block recording its edges.  An array holds the
92
   bitmap.  The Ith bit in the bitmap is set if that block is dependent
93
   on the Ith edge.  */
94
static bitmap *control_dependence_map;
95
 
96
/* Vector indicating that a basic block has already had all the edges
97
   processed that it is control dependent on.  */
98
static sbitmap visited_control_parents;
99
 
100
/* TRUE if this pass alters the CFG (by removing control statements).
101
   FALSE otherwise.
102
 
103
   If this pass alters the CFG, then it will arrange for the dominators
104
   to be recomputed.  */
105
static bool cfg_altered;
106
 
107
/* Execute code that follows the macro for each edge (given number
108
   EDGE_NUMBER within the CODE) for which the block with index N is
109
   control dependent.  */
110
#define EXECUTE_IF_CONTROL_DEPENDENT(BI, N, EDGE_NUMBER)        \
111
  EXECUTE_IF_SET_IN_BITMAP (control_dependence_map[(N)], 0,      \
112
                            (EDGE_NUMBER), (BI))
113
 
114
 
115
/* Indicate block BB is control dependent on an edge with index EDGE_INDEX.  */
116
static inline void
117
set_control_dependence_map_bit (basic_block bb, int edge_index)
118
{
119
  if (bb == ENTRY_BLOCK_PTR)
120
    return;
121
  gcc_assert (bb != EXIT_BLOCK_PTR);
122
  bitmap_set_bit (control_dependence_map[bb->index], edge_index);
123
}
124
 
125
/* Clear all control dependences for block BB.  */
126
static inline void
127
clear_control_dependence_bitmap (basic_block bb)
128
{
129
  bitmap_clear (control_dependence_map[bb->index]);
130
}
131
 
132
 
133
/* Find the immediate postdominator PDOM of the specified basic block BLOCK.
134
   This function is necessary because some blocks have negative numbers.  */
135
 
136
static inline basic_block
137
find_pdom (basic_block block)
138
{
139
  gcc_assert (block != ENTRY_BLOCK_PTR);
140
 
141
  if (block == EXIT_BLOCK_PTR)
142
    return EXIT_BLOCK_PTR;
143
  else
144
    {
145
      basic_block bb = get_immediate_dominator (CDI_POST_DOMINATORS, block);
146
      if (! bb)
147
        return EXIT_BLOCK_PTR;
148
      return bb;
149
    }
150
}
151
 
152
 
153
/* Determine all blocks' control dependences on the given edge with edge_list
154
   EL index EDGE_INDEX, ala Morgan, Section 3.6.  */
155
 
156
static void
157
find_control_dependence (struct edge_list *el, int edge_index)
158
{
159
  basic_block current_block;
160
  basic_block ending_block;
161
 
162
  gcc_assert (INDEX_EDGE_PRED_BB (el, edge_index) != EXIT_BLOCK_PTR);
163
 
164
  if (INDEX_EDGE_PRED_BB (el, edge_index) == ENTRY_BLOCK_PTR)
165
    ending_block = single_succ (ENTRY_BLOCK_PTR);
166
  else
167
    ending_block = find_pdom (INDEX_EDGE_PRED_BB (el, edge_index));
168
 
169
  for (current_block = INDEX_EDGE_SUCC_BB (el, edge_index);
170
       current_block != ending_block && current_block != EXIT_BLOCK_PTR;
171
       current_block = find_pdom (current_block))
172
    {
173
      edge e = INDEX_EDGE (el, edge_index);
174
 
175
      /* For abnormal edges, we don't make current_block control
176
         dependent because instructions that throw are always necessary
177
         anyway.  */
178
      if (e->flags & EDGE_ABNORMAL)
179
        continue;
180
 
181
      set_control_dependence_map_bit (current_block, edge_index);
182
    }
183
}
184
 
185
 
186
/* Record all blocks' control dependences on all edges in the edge
187
   list EL, ala Morgan, Section 3.6.  */
188
 
189
static void
190
find_all_control_dependences (struct edge_list *el)
191
{
192
  int i;
193
 
194
  for (i = 0; i < NUM_EDGES (el); ++i)
195
    find_control_dependence (el, i);
196
}
197
 
198
/* If STMT is not already marked necessary, mark it, and add it to the
199
   worklist if ADD_TO_WORKLIST is true.  */
200
 
201
static inline void
202
mark_stmt_necessary (gimple stmt, bool add_to_worklist)
203
{
204
  gcc_assert (stmt);
205
 
206
  if (gimple_plf (stmt, STMT_NECESSARY))
207
    return;
208
 
209
  if (dump_file && (dump_flags & TDF_DETAILS))
210
    {
211
      fprintf (dump_file, "Marking useful stmt: ");
212
      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
213
      fprintf (dump_file, "\n");
214
    }
215
 
216
  gimple_set_plf (stmt, STMT_NECESSARY, true);
217
  if (add_to_worklist)
218
    VEC_safe_push (gimple, heap, worklist, stmt);
219
  if (bb_contains_live_stmts && !is_gimple_debug (stmt))
220
    SET_BIT (bb_contains_live_stmts, gimple_bb (stmt)->index);
221
}
222
 
223
 
224
/* Mark the statement defining operand OP as necessary.  */
225
 
226
static inline void
227
mark_operand_necessary (tree op)
228
{
229
  gimple stmt;
230
  int ver;
231
 
232
  gcc_assert (op);
233
 
234
  ver = SSA_NAME_VERSION (op);
235
  if (TEST_BIT (processed, ver))
236
    {
237
      stmt = SSA_NAME_DEF_STMT (op);
238
      gcc_assert (gimple_nop_p (stmt)
239
                  || gimple_plf (stmt, STMT_NECESSARY));
240
      return;
241
    }
242
  SET_BIT (processed, ver);
243
 
244
  stmt = SSA_NAME_DEF_STMT (op);
245
  gcc_assert (stmt);
246
 
247
  if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt))
248
    return;
249
 
250
  if (dump_file && (dump_flags & TDF_DETAILS))
251
    {
252
      fprintf (dump_file, "marking necessary through ");
253
      print_generic_expr (dump_file, op, 0);
254
      fprintf (dump_file, " stmt ");
255
      print_gimple_stmt (dump_file, stmt, 0, 0);
256
    }
257
 
258
  gimple_set_plf (stmt, STMT_NECESSARY, true);
259
  if (bb_contains_live_stmts)
260
    SET_BIT (bb_contains_live_stmts, gimple_bb (stmt)->index);
261
  VEC_safe_push (gimple, heap, worklist, stmt);
262
}
263
 
264
 
265
/* Mark STMT as necessary if it obviously is.  Add it to the worklist if
266
   it can make other statements necessary.
267
 
268
   If AGGRESSIVE is false, control statements are conservatively marked as
269
   necessary.  */
270
 
271
static void
272
mark_stmt_if_obviously_necessary (gimple stmt, bool aggressive)
273
{
274
  /* With non-call exceptions, we have to assume that all statements could
275
     throw.  If a statement may throw, it is inherently necessary.  */
276
  if (cfun->can_throw_non_call_exceptions && stmt_could_throw_p (stmt))
277
    {
278
      mark_stmt_necessary (stmt, true);
279
      return;
280
    }
281
 
282
  /* Statements that are implicitly live.  Most function calls, asm
283
     and return statements are required.  Labels and GIMPLE_BIND nodes
284
     are kept because they are control flow, and we have no way of
285
     knowing whether they can be removed.  DCE can eliminate all the
286
     other statements in a block, and CFG can then remove the block
287
     and labels.  */
288
  switch (gimple_code (stmt))
289
    {
290
    case GIMPLE_PREDICT:
291
    case GIMPLE_LABEL:
292
      mark_stmt_necessary (stmt, false);
293
      return;
294
 
295
    case GIMPLE_ASM:
296
    case GIMPLE_RESX:
297
    case GIMPLE_RETURN:
298
      mark_stmt_necessary (stmt, true);
299
      return;
300
 
301
    case GIMPLE_CALL:
302
      {
303
        tree callee = gimple_call_fndecl (stmt);
304
        if (callee != NULL_TREE
305
            && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
306
          switch (DECL_FUNCTION_CODE (callee))
307
            {
308
            case BUILT_IN_MALLOC:
309
            case BUILT_IN_CALLOC:
310
            case BUILT_IN_ALLOCA:
311
            case BUILT_IN_ALLOCA_WITH_ALIGN:
312
              return;
313
 
314
            default:;
315
            }
316
        /* Most, but not all function calls are required.  Function calls that
317
           produce no result and have no side effects (i.e. const pure
318
           functions) are unnecessary.  */
319
        if (gimple_has_side_effects (stmt))
320
          {
321
            mark_stmt_necessary (stmt, true);
322
            return;
323
          }
324
        if (!gimple_call_lhs (stmt))
325
          return;
326
        break;
327
      }
328
 
329
    case GIMPLE_DEBUG:
330
      /* Debug temps without a value are not useful.  ??? If we could
331
         easily locate the debug temp bind stmt for a use thereof,
332
         would could refrain from marking all debug temps here, and
333
         mark them only if they're used.  */
334
      if (!gimple_debug_bind_p (stmt)
335
          || gimple_debug_bind_has_value_p (stmt)
336
          || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
337
        mark_stmt_necessary (stmt, false);
338
      return;
339
 
340
    case GIMPLE_GOTO:
341
      gcc_assert (!simple_goto_p (stmt));
342
      mark_stmt_necessary (stmt, true);
343
      return;
344
 
345
    case GIMPLE_COND:
346
      gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2);
347
      /* Fall through.  */
348
 
349
    case GIMPLE_SWITCH:
350
      if (! aggressive)
351
        mark_stmt_necessary (stmt, true);
352
      break;
353
 
354
    case GIMPLE_ASSIGN:
355
      if (TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME
356
          && TREE_CLOBBER_P (gimple_assign_rhs1 (stmt)))
357
        return;
358
      break;
359
 
360
    default:
361
      break;
362
    }
363
 
364
  /* If the statement has volatile operands, it needs to be preserved.
365
     Same for statements that can alter control flow in unpredictable
366
     ways.  */
367
  if (gimple_has_volatile_ops (stmt) || is_ctrl_altering_stmt (stmt))
368
    {
369
      mark_stmt_necessary (stmt, true);
370
      return;
371
    }
372
 
373
  if (is_hidden_global_store (stmt))
374
    {
375
      mark_stmt_necessary (stmt, true);
376
      return;
377
    }
378
 
379
  return;
380
}
381
 
382
 
383
/* Mark the last statement of BB as necessary.  */
384
 
385
static void
386
mark_last_stmt_necessary (basic_block bb)
387
{
388
  gimple stmt = last_stmt (bb);
389
 
390
  SET_BIT (last_stmt_necessary, bb->index);
391
  SET_BIT (bb_contains_live_stmts, bb->index);
392
 
393
  /* We actually mark the statement only if it is a control statement.  */
394
  if (stmt && is_ctrl_stmt (stmt))
395
    mark_stmt_necessary (stmt, true);
396
}
397
 
398
 
399
/* Mark control dependent edges of BB as necessary.  We have to do this only
400
   once for each basic block so we set the appropriate bit after we're done.
401
 
402
   When IGNORE_SELF is true, ignore BB in the list of control dependences.  */
403
 
404
static void
405
mark_control_dependent_edges_necessary (basic_block bb, struct edge_list *el,
406
                                        bool ignore_self)
407
{
408
  bitmap_iterator bi;
409
  unsigned edge_number;
410
  bool skipped = false;
411
 
412
  gcc_assert (bb != EXIT_BLOCK_PTR);
413
 
414
  if (bb == ENTRY_BLOCK_PTR)
415
    return;
416
 
417
  EXECUTE_IF_CONTROL_DEPENDENT (bi, bb->index, edge_number)
418
    {
419
      basic_block cd_bb = INDEX_EDGE_PRED_BB (el, edge_number);
420
 
421
      if (ignore_self && cd_bb == bb)
422
        {
423
          skipped = true;
424
          continue;
425
        }
426
 
427
      if (!TEST_BIT (last_stmt_necessary, cd_bb->index))
428
        mark_last_stmt_necessary (cd_bb);
429
    }
430
 
431
  if (!skipped)
432
    SET_BIT (visited_control_parents, bb->index);
433
}
434
 
435
 
436
/* Find obviously necessary statements.  These are things like most function
437
   calls, and stores to file level variables.
438
 
439
   If EL is NULL, control statements are conservatively marked as
440
   necessary.  Otherwise it contains the list of edges used by control
441
   dependence analysis.  */
442
 
443
static void
444
find_obviously_necessary_stmts (struct edge_list *el)
445
{
446
  basic_block bb;
447
  gimple_stmt_iterator gsi;
448
  edge e;
449
  gimple phi, stmt;
450
  int flags;
451
 
452
  FOR_EACH_BB (bb)
453
    {
454
      /* PHI nodes are never inherently necessary.  */
455
      for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
456
        {
457
          phi = gsi_stmt (gsi);
458
          gimple_set_plf (phi, STMT_NECESSARY, false);
459
        }
460
 
461
      /* Check all statements in the block.  */
462
      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
463
        {
464
          stmt = gsi_stmt (gsi);
465
          gimple_set_plf (stmt, STMT_NECESSARY, false);
466
          mark_stmt_if_obviously_necessary (stmt, el != NULL);
467
        }
468
    }
469
 
470
  /* Pure and const functions are finite and thus have no infinite loops in
471
     them.  */
472
  flags = flags_from_decl_or_type (current_function_decl);
473
  if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE))
474
    return;
475
 
476
  /* Prevent the empty possibly infinite loops from being removed.  */
477
  if (el)
478
    {
479
      loop_iterator li;
480
      struct loop *loop;
481
      scev_initialize ();
482
      if (mark_irreducible_loops ())
483
        FOR_EACH_BB (bb)
484
          {
485
            edge_iterator ei;
486
            FOR_EACH_EDGE (e, ei, bb->succs)
487
              if ((e->flags & EDGE_DFS_BACK)
488
                  && (e->flags & EDGE_IRREDUCIBLE_LOOP))
489
                {
490
                  if (dump_file)
491
                    fprintf (dump_file, "Marking back edge of irreducible loop %i->%i\n",
492
                             e->src->index, e->dest->index);
493
                  mark_control_dependent_edges_necessary (e->dest, el, false);
494
                }
495
          }
496
 
497
      FOR_EACH_LOOP (li, loop, 0)
498
        if (!finite_loop_p (loop))
499
          {
500
            if (dump_file)
501
              fprintf (dump_file, "can not prove finiteness of loop %i\n", loop->num);
502
            mark_control_dependent_edges_necessary (loop->latch, el, false);
503
          }
504
      scev_finalize ();
505
    }
506
}
507
 
508
 
509
/* Return true if REF is based on an aliased base, otherwise false.  */
510
 
511
static bool
512
ref_may_be_aliased (tree ref)
513
{
514
  gcc_assert (TREE_CODE (ref) != WITH_SIZE_EXPR);
515
  while (handled_component_p (ref))
516
    ref = TREE_OPERAND (ref, 0);
517
  if (TREE_CODE (ref) == MEM_REF
518
      && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
519
    ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
520
  return !(DECL_P (ref)
521
           && !may_be_aliased (ref));
522
}
523
 
524
static bitmap visited = NULL;
525
static unsigned int longest_chain = 0;
526
static unsigned int total_chain = 0;
527
static unsigned int nr_walks = 0;
528
static bool chain_ovfl = false;
529
 
530
/* Worker for the walker that marks reaching definitions of REF,
531
   which is based on a non-aliased decl, necessary.  It returns
532
   true whenever the defining statement of the current VDEF is
533
   a kill for REF, as no dominating may-defs are necessary for REF
534
   anymore.  DATA points to the basic-block that contains the
535
   stmt that refers to REF.  */
536
 
537
static bool
538
mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
539
{
540
  gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
541
 
542
  /* All stmts we visit are necessary.  */
543
  mark_operand_necessary (vdef);
544
 
545
  /* If the stmt lhs kills ref, then we can stop walking.  */
546
  if (gimple_has_lhs (def_stmt)
547
      && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME
548
      /* The assignment is not necessarily carried out if it can throw
549
         and we can catch it in the current function where we could inspect
550
         the previous value.
551
         ???  We only need to care about the RHS throwing.  For aggregate
552
         assignments or similar calls and non-call exceptions the LHS
553
         might throw as well.  */
554
      && !stmt_can_throw_internal (def_stmt))
555
    {
556
      tree base, lhs = gimple_get_lhs (def_stmt);
557
      HOST_WIDE_INT size, offset, max_size;
558
      ao_ref_base (ref);
559
      base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
560
      /* We can get MEM[symbol: sZ, index: D.8862_1] here,
561
         so base == refd->base does not always hold.  */
562
      if (base == ref->base)
563
        {
564
          /* For a must-alias check we need to be able to constrain
565
             the accesses properly.  */
566
          if (size != -1 && size == max_size
567
              && ref->max_size != -1)
568
            {
569
              if (offset <= ref->offset
570
                  && offset + size >= ref->offset + ref->max_size)
571
                return true;
572
            }
573
          /* Or they need to be exactly the same.  */
574
          else if (ref->ref
575
                   /* Make sure there is no induction variable involved
576
                      in the references (gcc.c-torture/execute/pr42142.c).
577
                      The simplest way is to check if the kill dominates
578
                      the use.  */
579
                   && dominated_by_p (CDI_DOMINATORS, (basic_block) data,
580
                                      gimple_bb (def_stmt))
581
                   && operand_equal_p (ref->ref, lhs, 0))
582
            return true;
583
        }
584
    }
585
 
586
  /* Otherwise keep walking.  */
587
  return false;
588
}
589
 
590
static void
591
mark_aliased_reaching_defs_necessary (gimple stmt, tree ref)
592
{
593
  unsigned int chain;
594
  ao_ref refd;
595
  gcc_assert (!chain_ovfl);
596
  ao_ref_init (&refd, ref);
597
  chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt),
598
                              mark_aliased_reaching_defs_necessary_1,
599
                              gimple_bb (stmt), NULL);
600
  if (chain > longest_chain)
601
    longest_chain = chain;
602
  total_chain += chain;
603
  nr_walks++;
604
}
605
 
606
/* Worker for the walker that marks reaching definitions of REF, which
607
   is not based on a non-aliased decl.  For simplicity we need to end
608
   up marking all may-defs necessary that are not based on a non-aliased
609
   decl.  The only job of this walker is to skip may-defs based on
610
   a non-aliased decl.  */
611
 
612
static bool
613
mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
614
                                    tree vdef, void *data ATTRIBUTE_UNUSED)
615
{
616
  gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
617
 
618
  /* We have to skip already visited (and thus necessary) statements
619
     to make the chaining work after we dropped back to simple mode.  */
620
  if (chain_ovfl
621
      && TEST_BIT (processed, SSA_NAME_VERSION (vdef)))
622
    {
623
      gcc_assert (gimple_nop_p (def_stmt)
624
                  || gimple_plf (def_stmt, STMT_NECESSARY));
625
      return false;
626
    }
627
 
628
  /* We want to skip stores to non-aliased variables.  */
629
  if (!chain_ovfl
630
      && gimple_assign_single_p (def_stmt))
631
    {
632
      tree lhs = gimple_assign_lhs (def_stmt);
633
      if (!ref_may_be_aliased (lhs))
634
        return false;
635
    }
636
 
637
  /* We want to skip statments that do not constitute stores but have
638
     a virtual definition.  */
639
  if (is_gimple_call (def_stmt))
640
    {
641
      tree callee = gimple_call_fndecl (def_stmt);
642
      if (callee != NULL_TREE
643
          && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
644
        switch (DECL_FUNCTION_CODE (callee))
645
          {
646
          case BUILT_IN_MALLOC:
647
          case BUILT_IN_CALLOC:
648
          case BUILT_IN_ALLOCA:
649
          case BUILT_IN_ALLOCA_WITH_ALIGN:
650
          case BUILT_IN_FREE:
651
            return false;
652
 
653
          default:;
654
          }
655
    }
656
 
657
  mark_operand_necessary (vdef);
658
 
659
  return false;
660
}
661
 
662
static void
663
mark_all_reaching_defs_necessary (gimple stmt)
664
{
665
  walk_aliased_vdefs (NULL, gimple_vuse (stmt),
666
                      mark_all_reaching_defs_necessary_1, NULL, &visited);
667
}
668
 
669
/* Return true for PHI nodes with one or identical arguments
670
   can be removed.  */
671
static bool
672
degenerate_phi_p (gimple phi)
673
{
674
  unsigned int i;
675
  tree op = gimple_phi_arg_def (phi, 0);
676
  for (i = 1; i < gimple_phi_num_args (phi); i++)
677
    if (gimple_phi_arg_def (phi, i) != op)
678
      return false;
679
  return true;
680
}
681
 
682
/* Propagate necessity using the operands of necessary statements.
683
   Process the uses on each statement in the worklist, and add all
684
   feeding statements which contribute to the calculation of this
685
   value to the worklist.
686
 
687
   In conservative mode, EL is NULL.  */
688
 
689
static void
690
propagate_necessity (struct edge_list *el)
691
{
692
  gimple stmt;
693
  bool aggressive = (el ? true : false);
694
 
695
  if (dump_file && (dump_flags & TDF_DETAILS))
696
    fprintf (dump_file, "\nProcessing worklist:\n");
697
 
698
  while (VEC_length (gimple, worklist) > 0)
699
    {
700
      /* Take STMT from worklist.  */
701
      stmt = VEC_pop (gimple, worklist);
702
 
703
      if (dump_file && (dump_flags & TDF_DETAILS))
704
        {
705
          fprintf (dump_file, "processing: ");
706
          print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
707
          fprintf (dump_file, "\n");
708
        }
709
 
710
      if (aggressive)
711
        {
712
          /* Mark the last statement of the basic blocks on which the block
713
             containing STMT is control dependent, but only if we haven't
714
             already done so.  */
715
          basic_block bb = gimple_bb (stmt);
716
          if (bb != ENTRY_BLOCK_PTR
717
              && !TEST_BIT (visited_control_parents, bb->index))
718
            mark_control_dependent_edges_necessary (bb, el, false);
719
        }
720
 
721
      if (gimple_code (stmt) == GIMPLE_PHI
722
          /* We do not process virtual PHI nodes nor do we track their
723
             necessity.  */
724
          && is_gimple_reg (gimple_phi_result (stmt)))
725
        {
726
          /* PHI nodes are somewhat special in that each PHI alternative has
727
             data and control dependencies.  All the statements feeding the
728
             PHI node's arguments are always necessary.  In aggressive mode,
729
             we also consider the control dependent edges leading to the
730
             predecessor block associated with each PHI alternative as
731
             necessary.  */
732
          size_t k;
733
 
734
          for (k = 0; k < gimple_phi_num_args (stmt); k++)
735
            {
736
              tree arg = PHI_ARG_DEF (stmt, k);
737
              if (TREE_CODE (arg) == SSA_NAME)
738
                mark_operand_necessary (arg);
739
            }
740
 
741
          /* For PHI operands it matters from where the control flow arrives
742
             to the BB.  Consider the following example:
743
 
744
             a=exp1;
745
             b=exp2;
746
             if (test)
747
                ;
748
             else
749
                ;
750
             c=PHI(a,b)
751
 
752
             We need to mark control dependence of the empty basic blocks, since they
753
             contains computation of PHI operands.
754
 
755
             Doing so is too restrictive in the case the predecestor block is in
756
             the loop. Consider:
757
 
758
              if (b)
759
                {
760
                  int i;
761
                  for (i = 0; i<1000; ++i)
762
                    ;
763
                  j = 0;
764
                }
765
              return j;
766
 
767
             There is PHI for J in the BB containing return statement.
768
             In this case the control dependence of predecestor block (that is
769
             within the empty loop) also contains the block determining number
770
             of iterations of the block that would prevent removing of empty
771
             loop in this case.
772
 
773
             This scenario can be avoided by splitting critical edges.
774
             To save the critical edge splitting pass we identify how the control
775
             dependence would look like if the edge was split.
776
 
777
             Consider the modified CFG created from current CFG by splitting
778
             edge B->C.  In the postdominance tree of modified CFG, C' is
779
             always child of C.  There are two cases how chlids of C' can look
780
             like:
781
 
782
                1) C' is leaf
783
 
784
                   In this case the only basic block C' is control dependent on is B.
785
 
786
                2) C' has single child that is B
787
 
788
                   In this case control dependence of C' is same as control
789
                   dependence of B in original CFG except for block B itself.
790
                   (since C' postdominate B in modified CFG)
791
 
792
             Now how to decide what case happens?  There are two basic options:
793
 
794
                a) C postdominate B.  Then C immediately postdominate B and
795
                   case 2 happens iff there is no other way from B to C except
796
                   the edge B->C.
797
 
798
                   There is other way from B to C iff there is succesor of B that
799
                   is not postdominated by B.  Testing this condition is somewhat
800
                   expensive, because we need to iterate all succesors of B.
801
                   We are safe to assume that this does not happen: we will mark B
802
                   as needed when processing the other path from B to C that is
803
                   conrol dependent on B and marking control dependencies of B
804
                   itself is harmless because they will be processed anyway after
805
                   processing control statement in B.
806
 
807
                b) C does not postdominate B.  Always case 1 happens since there is
808
                   path from C to exit that does not go through B and thus also C'.  */
809
 
810
          if (aggressive && !degenerate_phi_p (stmt))
811
            {
812
              for (k = 0; k < gimple_phi_num_args (stmt); k++)
813
                {
814
                  basic_block arg_bb = gimple_phi_arg_edge (stmt, k)->src;
815
 
816
                  if (gimple_bb (stmt)
817
                      != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb))
818
                    {
819
                      if (!TEST_BIT (last_stmt_necessary, arg_bb->index))
820
                        mark_last_stmt_necessary (arg_bb);
821
                    }
822
                  else if (arg_bb != ENTRY_BLOCK_PTR
823
                           && !TEST_BIT (visited_control_parents,
824
                                         arg_bb->index))
825
                    mark_control_dependent_edges_necessary (arg_bb, el, true);
826
                }
827
            }
828
        }
829
      else
830
        {
831
          /* Propagate through the operands.  Examine all the USE, VUSE and
832
             VDEF operands in this statement.  Mark all the statements
833
             which feed this statement's uses as necessary.  */
834
          ssa_op_iter iter;
835
          tree use;
836
 
837
          /* If this is a call to free which is directly fed by an
838
             allocation function do not mark that necessary through
839
             processing the argument.  */
840
          if (gimple_call_builtin_p (stmt, BUILT_IN_FREE))
841
            {
842
              tree ptr = gimple_call_arg (stmt, 0);
843
              gimple def_stmt;
844
              tree def_callee;
845
              /* If the pointer we free is defined by an allocation
846
                 function do not add the call to the worklist.  */
847
              if (TREE_CODE (ptr) == SSA_NAME
848
                  && is_gimple_call (def_stmt = SSA_NAME_DEF_STMT (ptr))
849
                  && (def_callee = gimple_call_fndecl (def_stmt))
850
                  && DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL
851
                  && (DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC
852
                      || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC))
853
                continue;
854
            }
855
 
856
          FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
857
            mark_operand_necessary (use);
858
 
859
          use = gimple_vuse (stmt);
860
          if (!use)
861
            continue;
862
 
863
          /* If we dropped to simple mode make all immediately
864
             reachable definitions necessary.  */
865
          if (chain_ovfl)
866
            {
867
              mark_all_reaching_defs_necessary (stmt);
868
              continue;
869
            }
870
 
871
          /* For statements that may load from memory (have a VUSE) we
872
             have to mark all reaching (may-)definitions as necessary.
873
             We partition this task into two cases:
874
              1) explicit loads based on decls that are not aliased
875
              2) implicit loads (like calls) and explicit loads not
876
                 based on decls that are not aliased (like indirect
877
                 references or loads from globals)
878
             For 1) we mark all reaching may-defs as necessary, stopping
879
             at dominating kills.  For 2) we want to mark all dominating
880
             references necessary, but non-aliased ones which we handle
881
             in 1).  By keeping a global visited bitmap for references
882
             we walk for 2) we avoid quadratic behavior for those.  */
883
 
884
          if (is_gimple_call (stmt))
885
            {
886
              tree callee = gimple_call_fndecl (stmt);
887
              unsigned i;
888
 
889
              /* Calls to functions that are merely acting as barriers
890
                 or that only store to memory do not make any previous
891
                 stores necessary.  */
892
              if (callee != NULL_TREE
893
                  && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
894
                  && (DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET
895
                      || DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET_CHK
896
                      || DECL_FUNCTION_CODE (callee) == BUILT_IN_MALLOC
897
                      || DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC
898
                      || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE
899
                      || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END
900
                      || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
901
                      || (DECL_FUNCTION_CODE (callee)
902
                          == BUILT_IN_ALLOCA_WITH_ALIGN)
903
                      || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE
904
                      || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE
905
                      || DECL_FUNCTION_CODE (callee) == BUILT_IN_ASSUME_ALIGNED))
906
                continue;
907
 
908
              /* Calls implicitly load from memory, their arguments
909
                 in addition may explicitly perform memory loads.  */
910
              mark_all_reaching_defs_necessary (stmt);
911
              for (i = 0; i < gimple_call_num_args (stmt); ++i)
912
                {
913
                  tree arg = gimple_call_arg (stmt, i);
914
                  if (TREE_CODE (arg) == SSA_NAME
915
                      || is_gimple_min_invariant (arg))
916
                    continue;
917
                  if (TREE_CODE (arg) == WITH_SIZE_EXPR)
918
                    arg = TREE_OPERAND (arg, 0);
919
                  if (!ref_may_be_aliased (arg))
920
                    mark_aliased_reaching_defs_necessary (stmt, arg);
921
                }
922
            }
923
          else if (gimple_assign_single_p (stmt))
924
            {
925
              tree rhs;
926
              /* If this is a load mark things necessary.  */
927
              rhs = gimple_assign_rhs1 (stmt);
928
              if (TREE_CODE (rhs) != SSA_NAME
929
                  && !is_gimple_min_invariant (rhs)
930
                  && TREE_CODE (rhs) != CONSTRUCTOR)
931
                {
932
                  if (!ref_may_be_aliased (rhs))
933
                    mark_aliased_reaching_defs_necessary (stmt, rhs);
934
                  else
935
                    mark_all_reaching_defs_necessary (stmt);
936
                }
937
            }
938
          else if (gimple_code (stmt) == GIMPLE_RETURN)
939
            {
940
              tree rhs = gimple_return_retval (stmt);
941
              /* A return statement may perform a load.  */
942
              if (rhs
943
                  && TREE_CODE (rhs) != SSA_NAME
944
                  && !is_gimple_min_invariant (rhs)
945
                  && TREE_CODE (rhs) != CONSTRUCTOR)
946
                {
947
                  if (!ref_may_be_aliased (rhs))
948
                    mark_aliased_reaching_defs_necessary (stmt, rhs);
949
                  else
950
                    mark_all_reaching_defs_necessary (stmt);
951
                }
952
            }
953
          else if (gimple_code (stmt) == GIMPLE_ASM)
954
            {
955
              unsigned i;
956
              mark_all_reaching_defs_necessary (stmt);
957
              /* Inputs may perform loads.  */
958
              for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
959
                {
960
                  tree op = TREE_VALUE (gimple_asm_input_op (stmt, i));
961
                  if (TREE_CODE (op) != SSA_NAME
962
                      && !is_gimple_min_invariant (op)
963
                      && TREE_CODE (op) != CONSTRUCTOR
964
                      && !ref_may_be_aliased (op))
965
                    mark_aliased_reaching_defs_necessary (stmt, op);
966
                }
967
            }
968
          else if (gimple_code (stmt) == GIMPLE_TRANSACTION)
969
            {
970
              /* The beginning of a transaction is a memory barrier.  */
971
              /* ??? If we were really cool, we'd only be a barrier
972
                 for the memories touched within the transaction.  */
973
              mark_all_reaching_defs_necessary (stmt);
974
            }
975
          else
976
            gcc_unreachable ();
977
 
978
          /* If we over-used our alias oracle budget drop to simple
979
             mode.  The cost metric allows quadratic behavior
980
             (number of uses times number of may-defs queries) up to
981
             a constant maximal number of queries and after that falls back to
982
             super-linear complexity.  */
983
          if (/* Constant but quadratic for small functions.  */
984
              total_chain > 128 * 128
985
              /* Linear in the number of may-defs.  */
986
              && total_chain > 32 * longest_chain
987
              /* Linear in the number of uses.  */
988
              && total_chain > nr_walks * 32)
989
            {
990
              chain_ovfl = true;
991
              if (visited)
992
                bitmap_clear (visited);
993
            }
994
        }
995
    }
996
}
997
 
998
/* Replace all uses of NAME by underlying variable and mark it
999
   for renaming.  */
1000
 
1001
void
1002
mark_virtual_operand_for_renaming (tree name)
1003
{
1004
  bool used = false;
1005
  imm_use_iterator iter;
1006
  use_operand_p use_p;
1007
  gimple stmt;
1008
  tree name_var;
1009
 
1010
  name_var = SSA_NAME_VAR (name);
1011
  FOR_EACH_IMM_USE_STMT (stmt, iter, name)
1012
    {
1013
      FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1014
        SET_USE (use_p, name_var);
1015
      update_stmt (stmt);
1016
      used = true;
1017
    }
1018
  if (used)
1019
    mark_sym_for_renaming (name_var);
1020
}
1021
 
1022
/* Replace all uses of result of PHI by underlying variable and mark it
1023
   for renaming.  */
1024
 
1025
void
1026
mark_virtual_phi_result_for_renaming (gimple phi)
1027
{
1028
  if (dump_file && (dump_flags & TDF_DETAILS))
1029
    {
1030
      fprintf (dump_file, "Marking result for renaming : ");
1031
      print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
1032
      fprintf (dump_file, "\n");
1033
    }
1034
 
1035
  mark_virtual_operand_for_renaming (gimple_phi_result (phi));
1036
}
1037
 
1038
 
1039
/* Remove dead PHI nodes from block BB.  */
1040
 
1041
static bool
1042
remove_dead_phis (basic_block bb)
1043
{
1044
  bool something_changed = false;
1045
  gimple_seq phis;
1046
  gimple phi;
1047
  gimple_stmt_iterator gsi;
1048
  phis = phi_nodes (bb);
1049
 
1050
  for (gsi = gsi_start (phis); !gsi_end_p (gsi);)
1051
    {
1052
      stats.total_phis++;
1053
      phi = gsi_stmt (gsi);
1054
 
1055
      /* We do not track necessity of virtual PHI nodes.  Instead do
1056
         very simple dead PHI removal here.  */
1057
      if (!is_gimple_reg (gimple_phi_result (phi)))
1058
        {
1059
          /* Virtual PHI nodes with one or identical arguments
1060
             can be removed.  */
1061
          if (degenerate_phi_p (phi))
1062
            {
1063
              tree vdef = gimple_phi_result (phi);
1064
              tree vuse = gimple_phi_arg_def (phi, 0);
1065
 
1066
              use_operand_p use_p;
1067
              imm_use_iterator iter;
1068
              gimple use_stmt;
1069
              FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
1070
                FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1071
                  SET_USE (use_p, vuse);
1072
              if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)
1073
                  && TREE_CODE (vuse) == SSA_NAME)
1074
                SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
1075
            }
1076
          else
1077
            gimple_set_plf (phi, STMT_NECESSARY, true);
1078
        }
1079
 
1080
      if (!gimple_plf (phi, STMT_NECESSARY))
1081
        {
1082
          something_changed = true;
1083
          if (dump_file && (dump_flags & TDF_DETAILS))
1084
            {
1085
              fprintf (dump_file, "Deleting : ");
1086
              print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
1087
              fprintf (dump_file, "\n");
1088
            }
1089
 
1090
          remove_phi_node (&gsi, true);
1091
          stats.removed_phis++;
1092
          continue;
1093
        }
1094
 
1095
      gsi_next (&gsi);
1096
    }
1097
  return something_changed;
1098
}
1099
 
1100
/* Forward edge E to respective POST_DOM_BB and update PHIs.  */
1101
 
1102
static edge
1103
forward_edge_to_pdom (edge e, basic_block post_dom_bb)
1104
{
1105
  gimple_stmt_iterator gsi;
1106
  edge e2 = NULL;
1107
  edge_iterator ei;
1108
 
1109
  if (dump_file && (dump_flags & TDF_DETAILS))
1110
    fprintf (dump_file, "Redirecting edge %i->%i to %i\n", e->src->index,
1111
             e->dest->index, post_dom_bb->index);
1112
 
1113
  e2 = redirect_edge_and_branch (e, post_dom_bb);
1114
  cfg_altered = true;
1115
 
1116
  /* If edge was already around, no updating is neccesary.  */
1117
  if (e2 != e)
1118
    return e2;
1119
 
1120
  if (!gimple_seq_empty_p (phi_nodes (post_dom_bb)))
1121
    {
1122
      /* We are sure that for every live PHI we are seeing control dependent BB.
1123
         This means that we can pick any edge to duplicate PHI args from.  */
1124
      FOR_EACH_EDGE (e2, ei, post_dom_bb->preds)
1125
        if (e2 != e)
1126
          break;
1127
      for (gsi = gsi_start_phis (post_dom_bb); !gsi_end_p (gsi);)
1128
        {
1129
          gimple phi = gsi_stmt (gsi);
1130
          tree op;
1131
          source_location locus;
1132
 
1133
          /* PHIs for virtuals have no control dependency relation on them.
1134
             We are lost here and must force renaming of the symbol.  */
1135
          if (!is_gimple_reg (gimple_phi_result (phi)))
1136
            {
1137
              mark_virtual_phi_result_for_renaming (phi);
1138
              remove_phi_node (&gsi, true);
1139
              continue;
1140
            }
1141
 
1142
          /* Dead PHI do not imply control dependency.  */
1143
          if (!gimple_plf (phi, STMT_NECESSARY))
1144
            {
1145
              gsi_next (&gsi);
1146
              continue;
1147
            }
1148
 
1149
          op = gimple_phi_arg_def (phi, e2->dest_idx);
1150
          locus = gimple_phi_arg_location (phi, e2->dest_idx);
1151
          add_phi_arg (phi, op, e, locus);
1152
          /* The resulting PHI if not dead can only be degenerate.  */
1153
          gcc_assert (degenerate_phi_p (phi));
1154
          gsi_next (&gsi);
1155
        }
1156
    }
1157
  return e;
1158
}
1159
 
1160
/* Remove dead statement pointed to by iterator I.  Receives the basic block BB
1161
   containing I so that we don't have to look it up.  */
1162
 
1163
static void
1164
remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb)
1165
{
1166
  gimple stmt = gsi_stmt (*i);
1167
 
1168
  if (dump_file && (dump_flags & TDF_DETAILS))
1169
    {
1170
      fprintf (dump_file, "Deleting : ");
1171
      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1172
      fprintf (dump_file, "\n");
1173
    }
1174
 
1175
  stats.removed++;
1176
 
1177
  /* If we have determined that a conditional branch statement contributes
1178
     nothing to the program, then we not only remove it, but we also change
1179
     the flow graph so that the current block will simply fall-thru to its
1180
     immediate post-dominator.  The blocks we are circumventing will be
1181
     removed by cleanup_tree_cfg if this change in the flow graph makes them
1182
     unreachable.  */
1183
  if (is_ctrl_stmt (stmt))
1184
    {
1185
      basic_block post_dom_bb;
1186
      edge e, e2;
1187
      edge_iterator ei;
1188
 
1189
      post_dom_bb = get_immediate_dominator (CDI_POST_DOMINATORS, bb);
1190
 
1191
      e = find_edge (bb, post_dom_bb);
1192
 
1193
      /* If edge is already there, try to use it.  This avoids need to update
1194
         PHI nodes.  Also watch for cases where post dominator does not exists
1195
         or is exit block.  These can happen for infinite loops as we create
1196
         fake edges in the dominator tree.  */
1197
      if (e)
1198
        ;
1199
      else if (! post_dom_bb || post_dom_bb == EXIT_BLOCK_PTR)
1200
        e = EDGE_SUCC (bb, 0);
1201
      else
1202
        e = forward_edge_to_pdom (EDGE_SUCC (bb, 0), post_dom_bb);
1203
      gcc_assert (e);
1204
      e->probability = REG_BR_PROB_BASE;
1205
      e->count = bb->count;
1206
 
1207
      /* The edge is no longer associated with a conditional, so it does
1208
         not have TRUE/FALSE flags.  */
1209
      e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
1210
 
1211
      /* The lone outgoing edge from BB will be a fallthru edge.  */
1212
      e->flags |= EDGE_FALLTHRU;
1213
 
1214
      /* Remove the remaining outgoing edges.  */
1215
      for (ei = ei_start (bb->succs); (e2 = ei_safe_edge (ei)); )
1216
        if (e != e2)
1217
          {
1218
            cfg_altered = true;
1219
            remove_edge (e2);
1220
          }
1221
        else
1222
          ei_next (&ei);
1223
    }
1224
 
1225
  /* If this is a store into a variable that is being optimized away,
1226
     add a debug bind stmt if possible.  */
1227
  if (MAY_HAVE_DEBUG_STMTS
1228
      && gimple_assign_single_p (stmt)
1229
      && is_gimple_val (gimple_assign_rhs1 (stmt)))
1230
    {
1231
      tree lhs = gimple_assign_lhs (stmt);
1232
      if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL)
1233
          && !DECL_IGNORED_P (lhs)
1234
          && is_gimple_reg_type (TREE_TYPE (lhs))
1235
          && !is_global_var (lhs)
1236
          && !DECL_HAS_VALUE_EXPR_P (lhs))
1237
        {
1238
          tree rhs = gimple_assign_rhs1 (stmt);
1239
          gimple note
1240
            = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt);
1241
          gsi_insert_after (i, note, GSI_SAME_STMT);
1242
        }
1243
    }
1244
 
1245
  unlink_stmt_vdef (stmt);
1246
  gsi_remove (i, true);
1247
  release_defs (stmt);
1248
}
1249
 
1250
/* Eliminate unnecessary statements. Any instruction not marked as necessary
1251
   contributes nothing to the program, and can be deleted.  */
1252
 
1253
static bool
1254
eliminate_unnecessary_stmts (void)
1255
{
1256
  bool something_changed = false;
1257
  basic_block bb;
1258
  gimple_stmt_iterator gsi, psi;
1259
  gimple stmt;
1260
  tree call;
1261
  VEC (basic_block, heap) *h;
1262
 
1263
  if (dump_file && (dump_flags & TDF_DETAILS))
1264
    fprintf (dump_file, "\nEliminating unnecessary statements:\n");
1265
 
1266
  clear_special_calls ();
1267
 
1268
  /* Walking basic blocks and statements in reverse order avoids
1269
     releasing SSA names before any other DEFs that refer to them are
1270
     released.  This helps avoid loss of debug information, as we get
1271
     a chance to propagate all RHSs of removed SSAs into debug uses,
1272
     rather than only the latest ones.  E.g., consider:
1273
 
1274
     x_3 = y_1 + z_2;
1275
     a_5 = x_3 - b_4;
1276
     # DEBUG a => a_5
1277
 
1278
     If we were to release x_3 before a_5, when we reached a_5 and
1279
     tried to substitute it into the debug stmt, we'd see x_3 there,
1280
     but x_3's DEF, type, etc would have already been disconnected.
1281
     By going backwards, the debug stmt first changes to:
1282
 
1283
     # DEBUG a => x_3 - b_4
1284
 
1285
     and then to:
1286
 
1287
     # DEBUG a => y_1 + z_2 - b_4
1288
 
1289
     as desired.  */
1290
  gcc_assert (dom_info_available_p (CDI_DOMINATORS));
1291
  h = get_all_dominated_blocks (CDI_DOMINATORS, single_succ (ENTRY_BLOCK_PTR));
1292
 
1293
  while (VEC_length (basic_block, h))
1294
    {
1295
      bb = VEC_pop (basic_block, h);
1296
 
1297
      /* Remove dead statements.  */
1298
      for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi)
1299
        {
1300
          stmt = gsi_stmt (gsi);
1301
 
1302
          psi = gsi;
1303
          gsi_prev (&psi);
1304
 
1305
          stats.total++;
1306
 
1307
          /* We can mark a call to free as not necessary if the
1308
             defining statement of its argument is an allocation
1309
             function and that is not necessary itself.  */
1310
          if (gimple_call_builtin_p (stmt, BUILT_IN_FREE))
1311
            {
1312
              tree ptr = gimple_call_arg (stmt, 0);
1313
              tree callee2;
1314
              gimple def_stmt;
1315
              if (TREE_CODE (ptr) != SSA_NAME)
1316
                continue;
1317
              def_stmt = SSA_NAME_DEF_STMT (ptr);
1318
              if (!is_gimple_call (def_stmt)
1319
                  || gimple_plf (def_stmt, STMT_NECESSARY))
1320
                continue;
1321
              callee2 = gimple_call_fndecl (def_stmt);
1322
              if (callee2 == NULL_TREE
1323
                  || DECL_BUILT_IN_CLASS (callee2) != BUILT_IN_NORMAL
1324
                  || (DECL_FUNCTION_CODE (callee2) != BUILT_IN_MALLOC
1325
                      && DECL_FUNCTION_CODE (callee2) != BUILT_IN_CALLOC))
1326
                continue;
1327
              gimple_set_plf (stmt, STMT_NECESSARY, false);
1328
            }
1329
 
1330
          /* If GSI is not necessary then remove it.  */
1331
          if (!gimple_plf (stmt, STMT_NECESSARY))
1332
            {
1333
              if (!is_gimple_debug (stmt))
1334
                something_changed = true;
1335
              remove_dead_stmt (&gsi, bb);
1336
            }
1337
          else if (is_gimple_call (stmt))
1338
            {
1339
              tree name = gimple_call_lhs (stmt);
1340
 
1341
              notice_special_calls (stmt);
1342
 
1343
              /* When LHS of var = call (); is dead, simplify it into
1344
                 call (); saving one operand.  */
1345
              if (name
1346
                  && TREE_CODE (name) == SSA_NAME
1347
                  && !TEST_BIT (processed, SSA_NAME_VERSION (name))
1348
                  /* Avoid doing so for allocation calls which we
1349
                     did not mark as necessary, it will confuse the
1350
                     special logic we apply to malloc/free pair removal.  */
1351
                  && (!(call = gimple_call_fndecl (stmt))
1352
                      || DECL_BUILT_IN_CLASS (call) != BUILT_IN_NORMAL
1353
                      || (DECL_FUNCTION_CODE (call) != BUILT_IN_MALLOC
1354
                          && DECL_FUNCTION_CODE (call) != BUILT_IN_CALLOC
1355
                          && DECL_FUNCTION_CODE (call) != BUILT_IN_ALLOCA
1356
                          && (DECL_FUNCTION_CODE (call)
1357
                              != BUILT_IN_ALLOCA_WITH_ALIGN))))
1358
                {
1359
                  something_changed = true;
1360
                  if (dump_file && (dump_flags & TDF_DETAILS))
1361
                    {
1362
                      fprintf (dump_file, "Deleting LHS of call: ");
1363
                      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1364
                      fprintf (dump_file, "\n");
1365
                    }
1366
 
1367
                  gimple_call_set_lhs (stmt, NULL_TREE);
1368
                  maybe_clean_or_replace_eh_stmt (stmt, stmt);
1369
                  update_stmt (stmt);
1370
                  release_ssa_name (name);
1371
                }
1372
            }
1373
        }
1374
    }
1375
 
1376
  VEC_free (basic_block, heap, h);
1377
 
1378
  /* Since we don't track liveness of virtual PHI nodes, it is possible that we
1379
     rendered some PHI nodes unreachable while they are still in use.
1380
     Mark them for renaming.  */
1381
  if (cfg_altered)
1382
    {
1383
      basic_block prev_bb;
1384
 
1385
      find_unreachable_blocks ();
1386
 
1387
      /* Delete all unreachable basic blocks in reverse dominator order.  */
1388
      for (bb = EXIT_BLOCK_PTR->prev_bb; bb != ENTRY_BLOCK_PTR; bb = prev_bb)
1389
        {
1390
          prev_bb = bb->prev_bb;
1391
 
1392
          if (!TEST_BIT (bb_contains_live_stmts, bb->index)
1393
              || !(bb->flags & BB_REACHABLE))
1394
            {
1395
              for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1396
                if (!is_gimple_reg (gimple_phi_result (gsi_stmt (gsi))))
1397
                  {
1398
                    bool found = false;
1399
                    imm_use_iterator iter;
1400
 
1401
                    FOR_EACH_IMM_USE_STMT (stmt, iter, gimple_phi_result (gsi_stmt (gsi)))
1402
                      {
1403
                        if (!(gimple_bb (stmt)->flags & BB_REACHABLE))
1404
                          continue;
1405
                        if (gimple_code (stmt) == GIMPLE_PHI
1406
                            || gimple_plf (stmt, STMT_NECESSARY))
1407
                          {
1408
                            found = true;
1409
                            BREAK_FROM_IMM_USE_STMT (iter);
1410
                          }
1411
                      }
1412
                    if (found)
1413
                      mark_virtual_phi_result_for_renaming (gsi_stmt (gsi));
1414
                  }
1415
 
1416
              if (!(bb->flags & BB_REACHABLE))
1417
                {
1418
                  /* Speed up the removal of blocks that don't
1419
                     dominate others.  Walking backwards, this should
1420
                     be the common case.  ??? Do we need to recompute
1421
                     dominators because of cfg_altered?  */
1422
                  if (!MAY_HAVE_DEBUG_STMTS
1423
                      || !first_dom_son (CDI_DOMINATORS, bb))
1424
                    delete_basic_block (bb);
1425
                  else
1426
                    {
1427
                      h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
1428
 
1429
                      while (VEC_length (basic_block, h))
1430
                        {
1431
                          bb = VEC_pop (basic_block, h);
1432
                          prev_bb = bb->prev_bb;
1433
                          /* Rearrangements to the CFG may have failed
1434
                             to update the dominators tree, so that
1435
                             formerly-dominated blocks are now
1436
                             otherwise reachable.  */
1437
                          if (!!(bb->flags & BB_REACHABLE))
1438
                            continue;
1439
                          delete_basic_block (bb);
1440
                        }
1441
 
1442
                      VEC_free (basic_block, heap, h);
1443
                    }
1444
                }
1445
            }
1446
        }
1447
    }
1448
  FOR_EACH_BB (bb)
1449
    {
1450
      /* Remove dead PHI nodes.  */
1451
      something_changed |= remove_dead_phis (bb);
1452
    }
1453
 
1454
  return something_changed;
1455
}
1456
 
1457
 
1458
/* Print out removed statement statistics.  */
1459
 
1460
static void
1461
print_stats (void)
1462
{
1463
  float percg;
1464
 
1465
  percg = ((float) stats.removed / (float) stats.total) * 100;
1466
  fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
1467
           stats.removed, stats.total, (int) percg);
1468
 
1469
  if (stats.total_phis == 0)
1470
    percg = 0;
1471
  else
1472
    percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
1473
 
1474
  fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
1475
           stats.removed_phis, stats.total_phis, (int) percg);
1476
}
1477
 
1478
/* Initialization for this pass.  Set up the used data structures.  */
1479
 
1480
static void
1481
tree_dce_init (bool aggressive)
1482
{
1483
  memset ((void *) &stats, 0, sizeof (stats));
1484
 
1485
  if (aggressive)
1486
    {
1487
      int i;
1488
 
1489
      control_dependence_map = XNEWVEC (bitmap, last_basic_block);
1490
      for (i = 0; i < last_basic_block; ++i)
1491
        control_dependence_map[i] = BITMAP_ALLOC (NULL);
1492
 
1493
      last_stmt_necessary = sbitmap_alloc (last_basic_block);
1494
      sbitmap_zero (last_stmt_necessary);
1495
      bb_contains_live_stmts = sbitmap_alloc (last_basic_block);
1496
      sbitmap_zero (bb_contains_live_stmts);
1497
    }
1498
 
1499
  processed = sbitmap_alloc (num_ssa_names + 1);
1500
  sbitmap_zero (processed);
1501
 
1502
  worklist = VEC_alloc (gimple, heap, 64);
1503
  cfg_altered = false;
1504
}
1505
 
1506
/* Cleanup after this pass.  */
1507
 
1508
static void
1509
tree_dce_done (bool aggressive)
1510
{
1511
  if (aggressive)
1512
    {
1513
      int i;
1514
 
1515
      for (i = 0; i < last_basic_block; ++i)
1516
        BITMAP_FREE (control_dependence_map[i]);
1517
      free (control_dependence_map);
1518
 
1519
      sbitmap_free (visited_control_parents);
1520
      sbitmap_free (last_stmt_necessary);
1521
      sbitmap_free (bb_contains_live_stmts);
1522
      bb_contains_live_stmts = NULL;
1523
    }
1524
 
1525
  sbitmap_free (processed);
1526
 
1527
  VEC_free (gimple, heap, worklist);
1528
}
1529
 
1530
/* Main routine to eliminate dead code.
1531
 
1532
   AGGRESSIVE controls the aggressiveness of the algorithm.
1533
   In conservative mode, we ignore control dependence and simply declare
1534
   all but the most trivially dead branches necessary.  This mode is fast.
1535
   In aggressive mode, control dependences are taken into account, which
1536
   results in more dead code elimination, but at the cost of some time.
1537
 
1538
   FIXME: Aggressive mode before PRE doesn't work currently because
1539
          the dominance info is not invalidated after DCE1.  This is
1540
          not an issue right now because we only run aggressive DCE
1541
          as the last tree SSA pass, but keep this in mind when you
1542
          start experimenting with pass ordering.  */
1543
 
1544
static unsigned int
1545
perform_tree_ssa_dce (bool aggressive)
1546
{
1547
  struct edge_list *el = NULL;
1548
  bool something_changed = 0;
1549
 
1550
  calculate_dominance_info (CDI_DOMINATORS);
1551
 
1552
  /* Preheaders are needed for SCEV to work.
1553
     Simple lateches and recorded exits improve chances that loop will
1554
     proved to be finite in testcases such as in loop-15.c and loop-24.c  */
1555
  if (aggressive)
1556
    loop_optimizer_init (LOOPS_NORMAL
1557
                         | LOOPS_HAVE_RECORDED_EXITS);
1558
 
1559
  tree_dce_init (aggressive);
1560
 
1561
  if (aggressive)
1562
    {
1563
      /* Compute control dependence.  */
1564
      timevar_push (TV_CONTROL_DEPENDENCES);
1565
      calculate_dominance_info (CDI_POST_DOMINATORS);
1566
      el = create_edge_list ();
1567
      find_all_control_dependences (el);
1568
      timevar_pop (TV_CONTROL_DEPENDENCES);
1569
 
1570
      visited_control_parents = sbitmap_alloc (last_basic_block);
1571
      sbitmap_zero (visited_control_parents);
1572
 
1573
      mark_dfs_back_edges ();
1574
    }
1575
 
1576
  find_obviously_necessary_stmts (el);
1577
 
1578
  if (aggressive)
1579
    loop_optimizer_finalize ();
1580
 
1581
  longest_chain = 0;
1582
  total_chain = 0;
1583
  nr_walks = 0;
1584
  chain_ovfl = false;
1585
  visited = BITMAP_ALLOC (NULL);
1586
  propagate_necessity (el);
1587
  BITMAP_FREE (visited);
1588
 
1589
  something_changed |= eliminate_unnecessary_stmts ();
1590
  something_changed |= cfg_altered;
1591
 
1592
  /* We do not update postdominators, so free them unconditionally.  */
1593
  free_dominance_info (CDI_POST_DOMINATORS);
1594
 
1595
  /* If we removed paths in the CFG, then we need to update
1596
     dominators as well.  I haven't investigated the possibility
1597
     of incrementally updating dominators.  */
1598
  if (cfg_altered)
1599
    free_dominance_info (CDI_DOMINATORS);
1600
 
1601
  statistics_counter_event (cfun, "Statements deleted", stats.removed);
1602
  statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis);
1603
 
1604
  /* Debugging dumps.  */
1605
  if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
1606
    print_stats ();
1607
 
1608
  tree_dce_done (aggressive);
1609
 
1610
  free_edge_list (el);
1611
 
1612
  if (something_changed)
1613
    return (TODO_update_ssa | TODO_cleanup_cfg | TODO_ggc_collect
1614
            | TODO_remove_unused_locals);
1615
  else
1616
    return 0;
1617
}
1618
 
1619
/* Pass entry points.  */
1620
static unsigned int
1621
tree_ssa_dce (void)
1622
{
1623
  return perform_tree_ssa_dce (/*aggressive=*/false);
1624
}
1625
 
1626
static unsigned int
1627
tree_ssa_dce_loop (void)
1628
{
1629
  unsigned int todo;
1630
  todo = perform_tree_ssa_dce (/*aggressive=*/false);
1631
  if (todo)
1632
    {
1633
      free_numbers_of_iterations_estimates ();
1634
      scev_reset ();
1635
    }
1636
  return todo;
1637
}
1638
 
1639
static unsigned int
1640
tree_ssa_cd_dce (void)
1641
{
1642
  return perform_tree_ssa_dce (/*aggressive=*/optimize >= 2);
1643
}
1644
 
1645
static bool
1646
gate_dce (void)
1647
{
1648
  return flag_tree_dce != 0;
1649
}
1650
 
1651
struct gimple_opt_pass pass_dce =
1652
{
1653
 {
1654
  GIMPLE_PASS,
1655
  "dce",                                /* name */
1656
  gate_dce,                             /* gate */
1657
  tree_ssa_dce,                         /* execute */
1658
  NULL,                                 /* sub */
1659
  NULL,                                 /* next */
1660
  0,                                     /* static_pass_number */
1661
  TV_TREE_DCE,                          /* tv_id */
1662
  PROP_cfg | PROP_ssa,                  /* properties_required */
1663
  0,                                     /* properties_provided */
1664
  0,                                     /* properties_destroyed */
1665
  0,                                     /* todo_flags_start */
1666
  TODO_verify_ssa                       /* todo_flags_finish */
1667
 }
1668
};
1669
 
1670
struct gimple_opt_pass pass_dce_loop =
1671
{
1672
 {
1673
  GIMPLE_PASS,
1674
  "dceloop",                            /* name */
1675
  gate_dce,                             /* gate */
1676
  tree_ssa_dce_loop,                    /* execute */
1677
  NULL,                                 /* sub */
1678
  NULL,                                 /* next */
1679
  0,                                     /* static_pass_number */
1680
  TV_TREE_DCE,                          /* tv_id */
1681
  PROP_cfg | PROP_ssa,                  /* properties_required */
1682
  0,                                     /* properties_provided */
1683
  0,                                     /* properties_destroyed */
1684
  0,                                     /* todo_flags_start */
1685
  TODO_verify_ssa                       /* todo_flags_finish */
1686
 }
1687
};
1688
 
1689
struct gimple_opt_pass pass_cd_dce =
1690
{
1691
 {
1692
  GIMPLE_PASS,
1693
  "cddce",                              /* name */
1694
  gate_dce,                             /* gate */
1695
  tree_ssa_cd_dce,                      /* execute */
1696
  NULL,                                 /* sub */
1697
  NULL,                                 /* next */
1698
  0,                                     /* static_pass_number */
1699
  TV_TREE_CD_DCE,                       /* tv_id */
1700
  PROP_cfg | PROP_ssa,                  /* properties_required */
1701
  0,                                     /* properties_provided */
1702
  0,                                     /* properties_destroyed */
1703
  0,                                     /* todo_flags_start */
1704
  TODO_verify_ssa
1705
  | TODO_verify_flow                    /* todo_flags_finish */
1706
 }
1707
};

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.