OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-old/] [gcc-4.2.2/] [gcc/] [tree-cfgcleanup.c] - Blame information for rev 817

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 38 julius
/* CFG cleanup for trees.
2
   Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007
3
   Free Software Foundation, Inc.
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify
8
it under the terms of the GNU General Public License as published by
9
the Free Software Foundation; either version 3, or (at your option)
10
any later version.
11
 
12
GCC is distributed in the hope that it will be useful,
13
but WITHOUT ANY WARRANTY; without even the implied warranty of
14
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15
GNU General Public License for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING3.  If not see
19
<http://www.gnu.org/licenses/>.  */
20
 
21
#include "config.h"
22
#include "system.h"
23
#include "coretypes.h"
24
#include "tm.h"
25
#include "tree.h"
26
#include "rtl.h"
27
#include "tm_p.h"
28
#include "hard-reg-set.h"
29
#include "basic-block.h"
30
#include "output.h"
31
#include "toplev.h"
32
#include "flags.h"
33
#include "function.h"
34
#include "expr.h"
35
#include "ggc.h"
36
#include "langhooks.h"
37
#include "diagnostic.h"
38
#include "tree-flow.h"
39
#include "timevar.h"
40
#include "tree-dump.h"
41
#include "tree-pass.h"
42
#include "toplev.h"
43
#include "except.h"
44
#include "cfgloop.h"
45
#include "cfglayout.h"
46
#include "hashtab.h"
47
#include "tree-ssa-propagate.h"
48
#include "tree-scalar-evolution.h"
49
 
50
/* Remove any fallthru edge from EV.  Return true if an edge was removed.  */
51
 
52
static bool
53
remove_fallthru_edge (VEC(edge,gc) *ev)
54
{
55
  edge_iterator ei;
56
  edge e;
57
 
58
  FOR_EACH_EDGE (e, ei, ev)
59
    if ((e->flags & EDGE_FALLTHRU) != 0)
60
      {
61
        remove_edge (e);
62
        return true;
63
      }
64
  return false;
65
}
66
 
67
/* Disconnect an unreachable block in the control expression starting
68
   at block BB.  */
69
 
70
static bool
71
cleanup_control_expr_graph (basic_block bb, block_stmt_iterator bsi)
72
{
73
  edge taken_edge;
74
  bool retval = false;
75
  tree expr = bsi_stmt (bsi), val;
76
 
77
  if (!single_succ_p (bb))
78
    {
79
      edge e;
80
      edge_iterator ei;
81
      bool warned;
82
 
83
      fold_defer_overflow_warnings ();
84
 
85
      switch (TREE_CODE (expr))
86
        {
87
        case COND_EXPR:
88
          val = fold (COND_EXPR_COND (expr));
89
          break;
90
 
91
        case SWITCH_EXPR:
92
          val = fold (SWITCH_COND (expr));
93
          if (TREE_CODE (val) != INTEGER_CST)
94
            {
95
              fold_undefer_and_ignore_overflow_warnings ();
96
              return false;
97
            }
98
          break;
99
 
100
        default:
101
          gcc_unreachable ();
102
        }
103
 
104
      taken_edge = find_taken_edge (bb, val);
105
      if (!taken_edge)
106
        {
107
          fold_undefer_and_ignore_overflow_warnings ();
108
          return false;
109
        }
110
 
111
      /* Remove all the edges except the one that is always executed.  */
112
      warned = false;
113
      for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
114
        {
115
          if (e != taken_edge)
116
            {
117
              if (!warned)
118
                {
119
                  fold_undefer_overflow_warnings
120
                    (true, expr, WARN_STRICT_OVERFLOW_CONDITIONAL);
121
                  warned = true;
122
                }
123
 
124
              taken_edge->probability += e->probability;
125
              taken_edge->count += e->count;
126
              remove_edge (e);
127
              retval = true;
128
            }
129
          else
130
            ei_next (&ei);
131
        }
132
      if (!warned)
133
        fold_undefer_and_ignore_overflow_warnings ();
134
      if (taken_edge->probability > REG_BR_PROB_BASE)
135
        taken_edge->probability = REG_BR_PROB_BASE;
136
    }
137
  else
138
    taken_edge = single_succ_edge (bb);
139
 
140
  bsi_remove (&bsi, true);
141
  taken_edge->flags = EDGE_FALLTHRU;
142
 
143
  /* We removed some paths from the cfg.  */
144
  free_dominance_info (CDI_DOMINATORS);
145
 
146
  return retval;
147
}
148
 
149
/* A list of all the noreturn calls passed to modify_stmt.
150
   cleanup_control_flow uses it to detect cases where a mid-block
151
   indirect call has been turned into a noreturn call.  When this
152
   happens, all the instructions after the call are no longer
153
   reachable and must be deleted as dead.  */
154
 
155
VEC(tree,gc) *modified_noreturn_calls;
156
 
157
/* Try to remove superfluous control structures.  */
158
 
159
static bool
160
cleanup_control_flow (void)
161
{
162
  basic_block bb;
163
  block_stmt_iterator bsi;
164
  bool retval = false;
165
  tree stmt;
166
 
167
  /* Detect cases where a mid-block call is now known not to return.  */
168
  while (VEC_length (tree, modified_noreturn_calls))
169
    {
170
      stmt = VEC_pop (tree, modified_noreturn_calls);
171
      bb = bb_for_stmt (stmt);
172
      if (bb != NULL && last_stmt (bb) != stmt && noreturn_call_p (stmt))
173
        split_block (bb, stmt);
174
    }
175
 
176
  FOR_EACH_BB (bb)
177
    {
178
      bsi = bsi_last (bb);
179
 
180
      /* If the last statement of the block could throw and now cannot,
181
         we need to prune cfg.  */
182
      retval |= tree_purge_dead_eh_edges (bb);
183
 
184
      if (bsi_end_p (bsi))
185
        continue;
186
 
187
      stmt = bsi_stmt (bsi);
188
 
189
      if (TREE_CODE (stmt) == COND_EXPR
190
          || TREE_CODE (stmt) == SWITCH_EXPR)
191
        retval |= cleanup_control_expr_graph (bb, bsi);
192
      /* If we had a computed goto which has a compile-time determinable
193
         destination, then we can eliminate the goto.  */
194
      else if (TREE_CODE (stmt) == GOTO_EXPR
195
               && TREE_CODE (GOTO_DESTINATION (stmt)) == ADDR_EXPR
196
               && (TREE_CODE (TREE_OPERAND (GOTO_DESTINATION (stmt), 0))
197
                   == LABEL_DECL))
198
        {
199
          edge e;
200
          tree label;
201
          edge_iterator ei;
202
          basic_block target_block;
203
          bool removed_edge = false;
204
 
205
          /* First look at all the outgoing edges.  Delete any outgoing
206
             edges which do not go to the right block.  For the one
207
             edge which goes to the right block, fix up its flags.  */
208
          label = TREE_OPERAND (GOTO_DESTINATION (stmt), 0);
209
          target_block = label_to_block (label);
210
          for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
211
            {
212
              if (e->dest != target_block)
213
                {
214
                  removed_edge = true;
215
                  remove_edge (e);
216
                }
217
              else
218
                {
219
                  /* Turn off the EDGE_ABNORMAL flag.  */
220
                  e->flags &= ~EDGE_ABNORMAL;
221
 
222
                  /* And set EDGE_FALLTHRU.  */
223
                  e->flags |= EDGE_FALLTHRU;
224
                  ei_next (&ei);
225
                }
226
            }
227
 
228
          /* If we removed one or more edges, then we will need to fix the
229
             dominators.  It may be possible to incrementally update them.  */
230
          if (removed_edge)
231
            free_dominance_info (CDI_DOMINATORS);
232
 
233
          /* Remove the GOTO_EXPR as it is not needed.  The CFG has all the
234
             relevant information we need.  */
235
          bsi_remove (&bsi, true);
236
          retval = true;
237
        }
238
 
239
      /* Check for indirect calls that have been turned into
240
         noreturn calls.  */
241
      else if (noreturn_call_p (stmt) && remove_fallthru_edge (bb->succs))
242
        {
243
          free_dominance_info (CDI_DOMINATORS);
244
          retval = true;
245
        }
246
    }
247
  return retval;
248
}
249
 
250
/* Return true if basic block BB does nothing except pass control
251
   flow to another block and that we can safely insert a label at
252
   the start of the successor block.
253
 
254
   As a precondition, we require that BB be not equal to
255
   ENTRY_BLOCK_PTR.  */
256
 
257
static bool
258
tree_forwarder_block_p (basic_block bb, bool phi_wanted)
259
{
260
  block_stmt_iterator bsi;
261
  edge_iterator ei;
262
  edge e, succ;
263
  basic_block dest;
264
 
265
  /* BB must have a single outgoing edge.  */
266
  if (single_succ_p (bb) != 1
267
      /* If PHI_WANTED is false, BB must not have any PHI nodes.
268
         Otherwise, BB must have PHI nodes.  */
269
      || (phi_nodes (bb) != NULL_TREE) != phi_wanted
270
      /* BB may not be a predecessor of EXIT_BLOCK_PTR.  */
271
      || single_succ (bb) == EXIT_BLOCK_PTR
272
      /* Nor should this be an infinite loop.  */
273
      || single_succ (bb) == bb
274
      /* BB may not have an abnormal outgoing edge.  */
275
      || (single_succ_edge (bb)->flags & EDGE_ABNORMAL))
276
    return false;
277
 
278
#if ENABLE_CHECKING
279
  gcc_assert (bb != ENTRY_BLOCK_PTR);
280
#endif
281
 
282
  /* Now walk through the statements backward.  We can ignore labels,
283
     anything else means this is not a forwarder block.  */
284
  for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi))
285
    {
286
      tree stmt = bsi_stmt (bsi);
287
 
288
      switch (TREE_CODE (stmt))
289
        {
290
        case LABEL_EXPR:
291
          if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
292
            return false;
293
          break;
294
 
295
        default:
296
          return false;
297
        }
298
    }
299
 
300
  if (find_edge (ENTRY_BLOCK_PTR, bb))
301
    return false;
302
 
303
  if (current_loops)
304
    {
305
      basic_block dest;
306
      /* Protect loop latches, headers and preheaders.  */
307
      if (bb->loop_father->header == bb)
308
        return false;
309
      dest = EDGE_SUCC (bb, 0)->dest;
310
 
311
      if (dest->loop_father->header == dest)
312
        return false;
313
    }
314
 
315
  /* If we have an EH edge leaving this block, make sure that the
316
     destination of this block has only one predecessor.  This ensures
317
     that we don't get into the situation where we try to remove two
318
     forwarders that go to the same basic block but are handlers for
319
     different EH regions.  */
320
  succ = single_succ_edge (bb);
321
  dest = succ->dest;
322
  FOR_EACH_EDGE (e, ei, bb->preds)
323
    {
324
      if (e->flags & EDGE_EH)
325
        {
326
          if (!single_pred_p (dest))
327
            return false;
328
        }
329
    }
330
 
331
  return true;
332
}
333
 
334
/* Return true if BB has at least one abnormal incoming edge.  */
335
 
336
static inline bool
337
has_abnormal_incoming_edge_p (basic_block bb)
338
{
339
  edge e;
340
  edge_iterator ei;
341
 
342
  FOR_EACH_EDGE (e, ei, bb->preds)
343
    if (e->flags & EDGE_ABNORMAL)
344
      return true;
345
 
346
  return false;
347
}
348
 
349
/* If all the PHI nodes in DEST have alternatives for E1 and E2 and
350
   those alternatives are equal in each of the PHI nodes, then return
351
   true, else return false.  */
352
 
353
static bool
354
phi_alternatives_equal (basic_block dest, edge e1, edge e2)
355
{
356
  int n1 = e1->dest_idx;
357
  int n2 = e2->dest_idx;
358
  tree phi;
359
 
360
  for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
361
    {
362
      tree val1 = PHI_ARG_DEF (phi, n1);
363
      tree val2 = PHI_ARG_DEF (phi, n2);
364
 
365
      gcc_assert (val1 != NULL_TREE);
366
      gcc_assert (val2 != NULL_TREE);
367
 
368
      if (!operand_equal_for_phi_arg_p (val1, val2))
369
        return false;
370
    }
371
 
372
  return true;
373
}
374
 
375
/* Removes forwarder block BB.  Returns false if this failed.  If a new
376
   forwarder block is created due to redirection of edges, it is
377
   stored to worklist.  */
378
 
379
static bool
380
remove_forwarder_block (basic_block bb, basic_block **worklist)
381
{
382
  edge succ = single_succ_edge (bb), e, s;
383
  basic_block dest = succ->dest;
384
  tree label;
385
  tree phi;
386
  edge_iterator ei;
387
  block_stmt_iterator bsi, bsi_to;
388
  bool seen_abnormal_edge = false;
389
 
390
  /* We check for infinite loops already in tree_forwarder_block_p.
391
     However it may happen that the infinite loop is created
392
     afterwards due to removal of forwarders.  */
393
  if (dest == bb)
394
    return false;
395
 
396
  /* If the destination block consists of a nonlocal label, do not merge
397
     it.  */
398
  label = first_stmt (dest);
399
  if (label
400
      && TREE_CODE (label) == LABEL_EXPR
401
      && DECL_NONLOCAL (LABEL_EXPR_LABEL (label)))
402
    return false;
403
 
404
  /* If there is an abnormal edge to basic block BB, but not into
405
     dest, problems might occur during removal of the phi node at out
406
     of ssa due to overlapping live ranges of registers.
407
 
408
     If there is an abnormal edge in DEST, the problems would occur
409
     anyway since cleanup_dead_labels would then merge the labels for
410
     two different eh regions, and rest of exception handling code
411
     does not like it.
412
 
413
     So if there is an abnormal edge to BB, proceed only if there is
414
     no abnormal edge to DEST and there are no phi nodes in DEST.  */
415
  if (has_abnormal_incoming_edge_p (bb))
416
    {
417
      seen_abnormal_edge = true;
418
 
419
      if (has_abnormal_incoming_edge_p (dest)
420
          || phi_nodes (dest) != NULL_TREE)
421
        return false;
422
    }
423
 
424
  /* If there are phi nodes in DEST, and some of the blocks that are
425
     predecessors of BB are also predecessors of DEST, check that the
426
     phi node arguments match.  */
427
  if (phi_nodes (dest))
428
    {
429
      FOR_EACH_EDGE (e, ei, bb->preds)
430
        {
431
          s = find_edge (e->src, dest);
432
          if (!s)
433
            continue;
434
 
435
          if (!phi_alternatives_equal (dest, succ, s))
436
            return false;
437
        }
438
    }
439
 
440
  /* Redirect the edges.  */
441
  for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
442
    {
443
      if (e->flags & EDGE_ABNORMAL)
444
        {
445
          /* If there is an abnormal edge, redirect it anyway, and
446
             move the labels to the new block to make it legal.  */
447
          s = redirect_edge_succ_nodup (e, dest);
448
        }
449
      else
450
        s = redirect_edge_and_branch (e, dest);
451
 
452
      if (s == e)
453
        {
454
          /* Create arguments for the phi nodes, since the edge was not
455
             here before.  */
456
          for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
457
            add_phi_arg (phi, PHI_ARG_DEF (phi, succ->dest_idx), s);
458
        }
459
      else
460
        {
461
          /* The source basic block might become a forwarder.  We know
462
             that it was not a forwarder before, since it used to have
463
             at least two outgoing edges, so we may just add it to
464
             worklist.  */
465
          if (tree_forwarder_block_p (s->src, false))
466
            *(*worklist)++ = s->src;
467
        }
468
    }
469
 
470
  if (seen_abnormal_edge)
471
    {
472
      /* Move the labels to the new block, so that the redirection of
473
         the abnormal edges works.  */
474
 
475
      bsi_to = bsi_start (dest);
476
      for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
477
        {
478
          label = bsi_stmt (bsi);
479
          gcc_assert (TREE_CODE (label) == LABEL_EXPR);
480
          bsi_remove (&bsi, false);
481
          bsi_insert_before (&bsi_to, label, BSI_CONTINUE_LINKING);
482
        }
483
    }
484
 
485
  /* Update the dominators.  */
486
  if (dom_info_available_p (CDI_DOMINATORS))
487
    {
488
      basic_block dom, dombb, domdest;
489
 
490
      dombb = get_immediate_dominator (CDI_DOMINATORS, bb);
491
      domdest = get_immediate_dominator (CDI_DOMINATORS, dest);
492
      if (domdest == bb)
493
        {
494
          /* Shortcut to avoid calling (relatively expensive)
495
             nearest_common_dominator unless necessary.  */
496
          dom = dombb;
497
        }
498
      else
499
        dom = nearest_common_dominator (CDI_DOMINATORS, domdest, dombb);
500
 
501
      set_immediate_dominator (CDI_DOMINATORS, dest, dom);
502
    }
503
 
504
  /* And kill the forwarder block.  */
505
  delete_basic_block (bb);
506
 
507
  return true;
508
}
509
 
510
/* Removes forwarder blocks.  */
511
 
512
static bool
513
cleanup_forwarder_blocks (void)
514
{
515
  basic_block bb;
516
  bool changed = false;
517
  basic_block *worklist = XNEWVEC (basic_block, n_basic_blocks);
518
  basic_block *current = worklist;
519
 
520
  FOR_EACH_BB (bb)
521
    {
522
      if (tree_forwarder_block_p (bb, false))
523
        *current++ = bb;
524
    }
525
 
526
  while (current != worklist)
527
    {
528
      bb = *--current;
529
      changed |= remove_forwarder_block (bb, &current);
530
    }
531
 
532
  free (worklist);
533
  return changed;
534
}
535
 
536
/* Do one round of CFG cleanup.  */
537
 
538
static bool
539
cleanup_tree_cfg_1 (void)
540
{
541
  bool retval;
542
 
543
  retval = cleanup_control_flow ();
544
  retval |= delete_unreachable_blocks ();
545
 
546
  /* Forwarder blocks can carry line number information which is
547
     useful when debugging, so we only clean them up when
548
     optimizing.  */
549
 
550
  if (optimize > 0)
551
    {
552
      /* cleanup_forwarder_blocks can redirect edges out of
553
         SWITCH_EXPRs, which can get expensive.  So we want to enable
554
         recording of edge to CASE_LABEL_EXPR mappings around the call
555
         to cleanup_forwarder_blocks.  */
556
      start_recording_case_labels ();
557
      retval |= cleanup_forwarder_blocks ();
558
      end_recording_case_labels ();
559
    }
560
 
561
  /* Merging the blocks may create new opportunities for folding
562
     conditional branches (due to the elimination of single-valued PHI
563
     nodes).  */
564
  retval |= merge_seq_blocks ();
565
 
566
  return retval;
567
}
568
 
569
 
570
/* Remove unreachable blocks and other miscellaneous clean up work.
571
   Return true if the flowgraph was modified, false otherwise.  */
572
 
573
bool
574
cleanup_tree_cfg (void)
575
{
576
  bool retval, changed;
577
 
578
  timevar_push (TV_TREE_CLEANUP_CFG);
579
 
580
  /* Iterate until there are no more cleanups left to do.  If any
581
     iteration changed the flowgraph, set CHANGED to true.  */
582
  changed = false;
583
  do
584
    {
585
      retval = cleanup_tree_cfg_1 ();
586
      changed |= retval;
587
    }
588
  while (retval);
589
 
590
  compact_blocks ();
591
 
592
#ifdef ENABLE_CHECKING
593
  verify_flow_info ();
594
#endif
595
 
596
  timevar_pop (TV_TREE_CLEANUP_CFG);
597
 
598
  return changed;
599
}
600
 
601
/* Cleanup cfg and repair loop structures.  */
602
 
603
void
604
cleanup_tree_cfg_loop (void)
605
{
606
  bool changed = cleanup_tree_cfg ();
607
 
608
  if (changed)
609
    {
610
      bitmap changed_bbs = BITMAP_ALLOC (NULL);
611
      fix_loop_structure (current_loops, changed_bbs);
612
      calculate_dominance_info (CDI_DOMINATORS);
613
 
614
      /* This usually does nothing.  But sometimes parts of cfg that originally
615
         were inside a loop get out of it due to edge removal (since they
616
         become unreachable by back edges from latch).  */
617
      rewrite_into_loop_closed_ssa (changed_bbs, TODO_update_ssa);
618
 
619
      BITMAP_FREE (changed_bbs);
620
 
621
#ifdef ENABLE_CHECKING
622
      verify_loop_structure (current_loops);
623
#endif
624
      scev_reset ();
625
    }
626
}
627
 
628
/* Merge the PHI nodes at BB into those at BB's sole successor.  */
629
 
630
static void
631
remove_forwarder_block_with_phi (basic_block bb)
632
{
633
  edge succ = single_succ_edge (bb);
634
  basic_block dest = succ->dest;
635
  tree label;
636
  basic_block dombb, domdest, dom;
637
 
638
  /* We check for infinite loops already in tree_forwarder_block_p.
639
     However it may happen that the infinite loop is created
640
     afterwards due to removal of forwarders.  */
641
  if (dest == bb)
642
    return;
643
 
644
  /* If the destination block consists of a nonlocal label, do not
645
     merge it.  */
646
  label = first_stmt (dest);
647
  if (label
648
      && TREE_CODE (label) == LABEL_EXPR
649
      && DECL_NONLOCAL (LABEL_EXPR_LABEL (label)))
650
    return;
651
 
652
  /* Redirect each incoming edge to BB to DEST.  */
653
  while (EDGE_COUNT (bb->preds) > 0)
654
    {
655
      edge e = EDGE_PRED (bb, 0), s;
656
      tree phi;
657
 
658
      s = find_edge (e->src, dest);
659
      if (s)
660
        {
661
          /* We already have an edge S from E->src to DEST.  If S and
662
             E->dest's sole successor edge have the same PHI arguments
663
             at DEST, redirect S to DEST.  */
664
          if (phi_alternatives_equal (dest, s, succ))
665
            {
666
              e = redirect_edge_and_branch (e, dest);
667
              PENDING_STMT (e) = NULL_TREE;
668
              continue;
669
            }
670
 
671
          /* PHI arguments are different.  Create a forwarder block by
672
             splitting E so that we can merge PHI arguments on E to
673
             DEST.  */
674
          e = single_succ_edge (split_edge (e));
675
        }
676
 
677
      s = redirect_edge_and_branch (e, dest);
678
 
679
      /* redirect_edge_and_branch must not create a new edge.  */
680
      gcc_assert (s == e);
681
 
682
      /* Add to the PHI nodes at DEST each PHI argument removed at the
683
         destination of E.  */
684
      for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
685
        {
686
          tree def = PHI_ARG_DEF (phi, succ->dest_idx);
687
 
688
          if (TREE_CODE (def) == SSA_NAME)
689
            {
690
              tree var;
691
 
692
              /* If DEF is one of the results of PHI nodes removed during
693
                 redirection, replace it with the PHI argument that used
694
                 to be on E.  */
695
              for (var = PENDING_STMT (e); var; var = TREE_CHAIN (var))
696
                {
697
                  tree old_arg = TREE_PURPOSE (var);
698
                  tree new_arg = TREE_VALUE (var);
699
 
700
                  if (def == old_arg)
701
                    {
702
                      def = new_arg;
703
                      break;
704
                    }
705
                }
706
            }
707
 
708
          add_phi_arg (phi, def, s);
709
        }
710
 
711
      PENDING_STMT (e) = NULL;
712
    }
713
 
714
  /* Update the dominators.  */
715
  dombb = get_immediate_dominator (CDI_DOMINATORS, bb);
716
  domdest = get_immediate_dominator (CDI_DOMINATORS, dest);
717
  if (domdest == bb)
718
    {
719
      /* Shortcut to avoid calling (relatively expensive)
720
         nearest_common_dominator unless necessary.  */
721
      dom = dombb;
722
    }
723
  else
724
    dom = nearest_common_dominator (CDI_DOMINATORS, domdest, dombb);
725
 
726
  set_immediate_dominator (CDI_DOMINATORS, dest, dom);
727
 
728
  /* Remove BB since all of BB's incoming edges have been redirected
729
     to DEST.  */
730
  delete_basic_block (bb);
731
}
732
 
733
/* This pass merges PHI nodes if one feeds into another.  For example,
734
   suppose we have the following:
735
 
736
  goto <bb 9> (<L9>);
737
 
738
<L8>:;
739
  tem_17 = foo ();
740
 
741
  # tem_6 = PHI <tem_17(8), tem_23(7)>;
742
<L9>:;
743
 
744
  # tem_3 = PHI <tem_6(9), tem_2(5)>;
745
<L10>:;
746
 
747
  Then we merge the first PHI node into the second one like so:
748
 
749
  goto <bb 9> (<L10>);
750
 
751
<L8>:;
752
  tem_17 = foo ();
753
 
754
  # tem_3 = PHI <tem_23(7), tem_2(5), tem_17(8)>;
755
<L10>:;
756
*/
757
 
758
static unsigned int
759
merge_phi_nodes (void)
760
{
761
  basic_block *worklist = XNEWVEC (basic_block, n_basic_blocks);
762
  basic_block *current = worklist;
763
  basic_block bb;
764
 
765
  calculate_dominance_info (CDI_DOMINATORS);
766
 
767
  /* Find all PHI nodes that we may be able to merge.  */
768
  FOR_EACH_BB (bb)
769
    {
770
      basic_block dest;
771
 
772
      /* Look for a forwarder block with PHI nodes.  */
773
      if (!tree_forwarder_block_p (bb, true))
774
        continue;
775
 
776
      dest = single_succ (bb);
777
 
778
      /* We have to feed into another basic block with PHI
779
         nodes.  */
780
      if (!phi_nodes (dest)
781
          /* We don't want to deal with a basic block with
782
             abnormal edges.  */
783
          || has_abnormal_incoming_edge_p (bb))
784
        continue;
785
 
786
      if (!dominated_by_p (CDI_DOMINATORS, dest, bb))
787
        {
788
          /* If BB does not dominate DEST, then the PHI nodes at
789
             DEST must be the only users of the results of the PHI
790
             nodes at BB.  */
791
          *current++ = bb;
792
        }
793
      else
794
        {
795
          tree phi;
796
          unsigned int dest_idx = single_succ_edge (bb)->dest_idx;
797
 
798
          /* BB dominates DEST.  There may be many users of the PHI
799
             nodes in BB.  However, there is still a trivial case we
800
             can handle.  If the result of every PHI in BB is used
801
             only by a PHI in DEST, then we can trivially merge the
802
             PHI nodes from BB into DEST.  */
803
          for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
804
            {
805
              tree result = PHI_RESULT (phi);
806
              use_operand_p imm_use;
807
              tree use_stmt;
808
 
809
              /* If the PHI's result is never used, then we can just
810
                 ignore it.  */
811
              if (has_zero_uses (result))
812
                continue;
813
 
814
              /* Get the single use of the result of this PHI node.  */
815
              if (!single_imm_use (result, &imm_use, &use_stmt)
816
                  || TREE_CODE (use_stmt) != PHI_NODE
817
                  || bb_for_stmt (use_stmt) != dest
818
                  || PHI_ARG_DEF (use_stmt, dest_idx) != result)
819
                break;
820
            }
821
 
822
          /* If the loop above iterated through all the PHI nodes
823
             in BB, then we can merge the PHIs from BB into DEST.  */
824
          if (!phi)
825
            *current++ = bb;
826
        }
827
    }
828
 
829
  /* Now let's drain WORKLIST.  */
830
  while (current != worklist)
831
    {
832
      bb = *--current;
833
      remove_forwarder_block_with_phi (bb);
834
    }
835
 
836
  free (worklist);
837
  return 0;
838
}
839
 
840
static bool
841
gate_merge_phi (void)
842
{
843
  return 1;
844
}
845
 
846
struct tree_opt_pass pass_merge_phi = {
847
  "mergephi",                   /* name */
848
  gate_merge_phi,               /* gate */
849
  merge_phi_nodes,              /* execute */
850
  NULL,                         /* sub */
851
  NULL,                         /* next */
852
  0,                             /* static_pass_number */
853
  TV_TREE_MERGE_PHI,            /* tv_id */
854
  PROP_cfg | PROP_ssa,          /* properties_required */
855
  0,                             /* properties_provided */
856
  0,                             /* properties_destroyed */
857
  0,                             /* todo_flags_start */
858
  TODO_dump_func | TODO_ggc_collect     /* todo_flags_finish */
859
  | TODO_verify_ssa,
860
 
861
};

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.