OpenCores
URL https://opencores.org/ocsvn/openrisc_2011-10-31/openrisc_2011-10-31/trunk

Subversion Repositories openrisc_2011-10-31

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.2.2/] [gcc/] [cfganal.c] - Blame information for rev 355

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 38 julius
/* Control flow graph analysis code for GNU compiler.
2
   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3
   1999, 2000, 2001, 2003, 2004, 2005, 2007 Free Software Foundation, Inc.
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify it under
8
the terms of the GNU General Public License as published by the Free
9
Software Foundation; either version 3, or (at your option) any later
10
version.
11
 
12
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13
WARRANTY; without even the implied warranty of MERCHANTABILITY or
14
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15
for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING3.  If not see
19
<http://www.gnu.org/licenses/>.  */
20
 
21
/* This file contains various simple utilities to analyze the CFG.  */
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
#include "rtl.h"
27
#include "obstack.h"
28
#include "hard-reg-set.h"
29
#include "basic-block.h"
30
#include "insn-config.h"
31
#include "recog.h"
32
#include "toplev.h"
33
#include "tm_p.h"
34
#include "timevar.h"
35
 
36
/* Store the data structures necessary for depth-first search.  */
37
struct depth_first_search_dsS {
38
  /* stack for backtracking during the algorithm */
39
  basic_block *stack;
40
 
41
  /* number of edges in the stack.  That is, positions 0, ..., sp-1
42
     have edges.  */
43
  unsigned int sp;
44
 
45
  /* record of basic blocks already seen by depth-first search */
46
  sbitmap visited_blocks;
47
};
48
typedef struct depth_first_search_dsS *depth_first_search_ds;
49
 
50
static void flow_dfs_compute_reverse_init (depth_first_search_ds);
51
static void flow_dfs_compute_reverse_add_bb (depth_first_search_ds,
52
                                             basic_block);
53
static basic_block flow_dfs_compute_reverse_execute (depth_first_search_ds,
54
                                                     basic_block);
55
static void flow_dfs_compute_reverse_finish (depth_first_search_ds);
56
static bool flow_active_insn_p (rtx);
57
 
58
/* Like active_insn_p, except keep the return value clobber around
59
   even after reload.  */
60
 
61
static bool
62
flow_active_insn_p (rtx insn)
63
{
64
  if (active_insn_p (insn))
65
    return true;
66
 
67
  /* A clobber of the function return value exists for buggy
68
     programs that fail to return a value.  Its effect is to
69
     keep the return value from being live across the entire
70
     function.  If we allow it to be skipped, we introduce the
71
     possibility for register lifetime confusion.  */
72
  if (GET_CODE (PATTERN (insn)) == CLOBBER
73
      && REG_P (XEXP (PATTERN (insn), 0))
74
      && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
75
    return true;
76
 
77
  return false;
78
}
79
 
80
/* Return true if the block has no effect and only forwards control flow to
81
   its single destination.  */
82
 
83
bool
84
forwarder_block_p (basic_block bb)
85
{
86
  rtx insn;
87
 
88
  if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
89
      || !single_succ_p (bb))
90
    return false;
91
 
92
  for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
93
    if (INSN_P (insn) && flow_active_insn_p (insn))
94
      return false;
95
 
96
  return (!INSN_P (insn)
97
          || (JUMP_P (insn) && simplejump_p (insn))
98
          || !flow_active_insn_p (insn));
99
}
100
 
101
/* Return nonzero if we can reach target from src by falling through.  */
102
 
103
bool
104
can_fallthru (basic_block src, basic_block target)
105
{
106
  rtx insn = BB_END (src);
107
  rtx insn2;
108
  edge e;
109
  edge_iterator ei;
110
 
111
  if (target == EXIT_BLOCK_PTR)
112
    return true;
113
  if (src->next_bb != target)
114
    return 0;
115
  FOR_EACH_EDGE (e, ei, src->succs)
116
    if (e->dest == EXIT_BLOCK_PTR
117
        && e->flags & EDGE_FALLTHRU)
118
      return 0;
119
 
120
  insn2 = BB_HEAD (target);
121
  if (insn2 && !active_insn_p (insn2))
122
    insn2 = next_active_insn (insn2);
123
 
124
  /* ??? Later we may add code to move jump tables offline.  */
125
  return next_active_insn (insn) == insn2;
126
}
127
 
128
/* Return nonzero if we could reach target from src by falling through,
129
   if the target was made adjacent.  If we already have a fall-through
130
   edge to the exit block, we can't do that.  */
131
bool
132
could_fall_through (basic_block src, basic_block target)
133
{
134
  edge e;
135
  edge_iterator ei;
136
 
137
  if (target == EXIT_BLOCK_PTR)
138
    return true;
139
  FOR_EACH_EDGE (e, ei, src->succs)
140
    if (e->dest == EXIT_BLOCK_PTR
141
        && e->flags & EDGE_FALLTHRU)
142
      return 0;
143
  return true;
144
}
145
 
146
/* Mark the back edges in DFS traversal.
147
   Return nonzero if a loop (natural or otherwise) is present.
148
   Inspired by Depth_First_Search_PP described in:
149
 
150
     Advanced Compiler Design and Implementation
151
     Steven Muchnick
152
     Morgan Kaufmann, 1997
153
 
154
   and heavily borrowed from pre_and_rev_post_order_compute.  */
155
 
156
bool
157
mark_dfs_back_edges (void)
158
{
159
  edge_iterator *stack;
160
  int *pre;
161
  int *post;
162
  int sp;
163
  int prenum = 1;
164
  int postnum = 1;
165
  sbitmap visited;
166
  bool found = false;
167
 
168
  /* Allocate the preorder and postorder number arrays.  */
169
  pre = XCNEWVEC (int, last_basic_block);
170
  post = XCNEWVEC (int, last_basic_block);
171
 
172
  /* Allocate stack for back-tracking up CFG.  */
173
  stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
174
  sp = 0;
175
 
176
  /* Allocate bitmap to track nodes that have been visited.  */
177
  visited = sbitmap_alloc (last_basic_block);
178
 
179
  /* None of the nodes in the CFG have been visited yet.  */
180
  sbitmap_zero (visited);
181
 
182
  /* Push the first edge on to the stack.  */
183
  stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
184
 
185
  while (sp)
186
    {
187
      edge_iterator ei;
188
      basic_block src;
189
      basic_block dest;
190
 
191
      /* Look at the edge on the top of the stack.  */
192
      ei = stack[sp - 1];
193
      src = ei_edge (ei)->src;
194
      dest = ei_edge (ei)->dest;
195
      ei_edge (ei)->flags &= ~EDGE_DFS_BACK;
196
 
197
      /* Check if the edge destination has been visited yet.  */
198
      if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
199
        {
200
          /* Mark that we have visited the destination.  */
201
          SET_BIT (visited, dest->index);
202
 
203
          pre[dest->index] = prenum++;
204
          if (EDGE_COUNT (dest->succs) > 0)
205
            {
206
              /* Since the DEST node has been visited for the first
207
                 time, check its successors.  */
208
              stack[sp++] = ei_start (dest->succs);
209
            }
210
          else
211
            post[dest->index] = postnum++;
212
        }
213
      else
214
        {
215
          if (dest != EXIT_BLOCK_PTR && src != ENTRY_BLOCK_PTR
216
              && pre[src->index] >= pre[dest->index]
217
              && post[dest->index] == 0)
218
            ei_edge (ei)->flags |= EDGE_DFS_BACK, found = true;
219
 
220
          if (ei_one_before_end_p (ei) && src != ENTRY_BLOCK_PTR)
221
            post[src->index] = postnum++;
222
 
223
          if (!ei_one_before_end_p (ei))
224
            ei_next (&stack[sp - 1]);
225
          else
226
            sp--;
227
        }
228
    }
229
 
230
  free (pre);
231
  free (post);
232
  free (stack);
233
  sbitmap_free (visited);
234
 
235
  return found;
236
}
237
 
238
/* Set the flag EDGE_CAN_FALLTHRU for edges that can be fallthru.  */
239
 
240
void
241
set_edge_can_fallthru_flag (void)
242
{
243
  basic_block bb;
244
 
245
  FOR_EACH_BB (bb)
246
    {
247
      edge e;
248
      edge_iterator ei;
249
 
250
      FOR_EACH_EDGE (e, ei, bb->succs)
251
        {
252
          e->flags &= ~EDGE_CAN_FALLTHRU;
253
 
254
          /* The FALLTHRU edge is also CAN_FALLTHRU edge.  */
255
          if (e->flags & EDGE_FALLTHRU)
256
            e->flags |= EDGE_CAN_FALLTHRU;
257
        }
258
 
259
      /* If the BB ends with an invertible condjump all (2) edges are
260
         CAN_FALLTHRU edges.  */
261
      if (EDGE_COUNT (bb->succs) != 2)
262
        continue;
263
      if (!any_condjump_p (BB_END (bb)))
264
        continue;
265
      if (!invert_jump (BB_END (bb), JUMP_LABEL (BB_END (bb)), 0))
266
        continue;
267
      invert_jump (BB_END (bb), JUMP_LABEL (BB_END (bb)), 0);
268
      EDGE_SUCC (bb, 0)->flags |= EDGE_CAN_FALLTHRU;
269
      EDGE_SUCC (bb, 1)->flags |= EDGE_CAN_FALLTHRU;
270
    }
271
}
272
 
273
/* Find unreachable blocks.  An unreachable block will have 0 in
274
   the reachable bit in block->flags.  A nonzero value indicates the
275
   block is reachable.  */
276
 
277
void
278
find_unreachable_blocks (void)
279
{
280
  edge e;
281
  edge_iterator ei;
282
  basic_block *tos, *worklist, bb;
283
 
284
  tos = worklist = XNEWVEC (basic_block, n_basic_blocks);
285
 
286
  /* Clear all the reachability flags.  */
287
 
288
  FOR_EACH_BB (bb)
289
    bb->flags &= ~BB_REACHABLE;
290
 
291
  /* Add our starting points to the worklist.  Almost always there will
292
     be only one.  It isn't inconceivable that we might one day directly
293
     support Fortran alternate entry points.  */
294
 
295
  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
296
    {
297
      *tos++ = e->dest;
298
 
299
      /* Mark the block reachable.  */
300
      e->dest->flags |= BB_REACHABLE;
301
    }
302
 
303
  /* Iterate: find everything reachable from what we've already seen.  */
304
 
305
  while (tos != worklist)
306
    {
307
      basic_block b = *--tos;
308
 
309
      FOR_EACH_EDGE (e, ei, b->succs)
310
        {
311
          basic_block dest = e->dest;
312
 
313
          if (!(dest->flags & BB_REACHABLE))
314
            {
315
              *tos++ = dest;
316
              dest->flags |= BB_REACHABLE;
317
            }
318
        }
319
    }
320
 
321
  free (worklist);
322
}
323
 
324
/* Functions to access an edge list with a vector representation.
325
   Enough data is kept such that given an index number, the
326
   pred and succ that edge represents can be determined, or
327
   given a pred and a succ, its index number can be returned.
328
   This allows algorithms which consume a lot of memory to
329
   represent the normally full matrix of edge (pred,succ) with a
330
   single indexed vector,  edge (EDGE_INDEX (pred, succ)), with no
331
   wasted space in the client code due to sparse flow graphs.  */
332
 
333
/* This functions initializes the edge list. Basically the entire
334
   flowgraph is processed, and all edges are assigned a number,
335
   and the data structure is filled in.  */
336
 
337
struct edge_list *
338
create_edge_list (void)
339
{
340
  struct edge_list *elist;
341
  edge e;
342
  int num_edges;
343
  int block_count;
344
  basic_block bb;
345
  edge_iterator ei;
346
 
347
  block_count = n_basic_blocks; /* Include the entry and exit blocks.  */
348
 
349
  num_edges = 0;
350
 
351
  /* Determine the number of edges in the flow graph by counting successor
352
     edges on each basic block.  */
353
  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
354
    {
355
      num_edges += EDGE_COUNT (bb->succs);
356
    }
357
 
358
  elist = XNEW (struct edge_list);
359
  elist->num_blocks = block_count;
360
  elist->num_edges = num_edges;
361
  elist->index_to_edge = XNEWVEC (edge, num_edges);
362
 
363
  num_edges = 0;
364
 
365
  /* Follow successors of blocks, and register these edges.  */
366
  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
367
    FOR_EACH_EDGE (e, ei, bb->succs)
368
      elist->index_to_edge[num_edges++] = e;
369
 
370
  return elist;
371
}
372
 
373
/* This function free's memory associated with an edge list.  */
374
 
375
void
376
free_edge_list (struct edge_list *elist)
377
{
378
  if (elist)
379
    {
380
      free (elist->index_to_edge);
381
      free (elist);
382
    }
383
}
384
 
385
/* This function provides debug output showing an edge list.  */
386
 
387
void
388
print_edge_list (FILE *f, struct edge_list *elist)
389
{
390
  int x;
391
 
392
  fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
393
           elist->num_blocks, elist->num_edges);
394
 
395
  for (x = 0; x < elist->num_edges; x++)
396
    {
397
      fprintf (f, " %-4d - edge(", x);
398
      if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
399
        fprintf (f, "entry,");
400
      else
401
        fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
402
 
403
      if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
404
        fprintf (f, "exit)\n");
405
      else
406
        fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
407
    }
408
}
409
 
410
/* This function provides an internal consistency check of an edge list,
411
   verifying that all edges are present, and that there are no
412
   extra edges.  */
413
 
414
void
415
verify_edge_list (FILE *f, struct edge_list *elist)
416
{
417
  int pred, succ, index;
418
  edge e;
419
  basic_block bb, p, s;
420
  edge_iterator ei;
421
 
422
  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
423
    {
424
      FOR_EACH_EDGE (e, ei, bb->succs)
425
        {
426
          pred = e->src->index;
427
          succ = e->dest->index;
428
          index = EDGE_INDEX (elist, e->src, e->dest);
429
          if (index == EDGE_INDEX_NO_EDGE)
430
            {
431
              fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
432
              continue;
433
            }
434
 
435
          if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
436
            fprintf (f, "*p* Pred for index %d should be %d not %d\n",
437
                     index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
438
          if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
439
            fprintf (f, "*p* Succ for index %d should be %d not %d\n",
440
                     index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
441
        }
442
    }
443
 
444
  /* We've verified that all the edges are in the list, now lets make sure
445
     there are no spurious edges in the list.  */
446
 
447
  FOR_BB_BETWEEN (p, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
448
    FOR_BB_BETWEEN (s, ENTRY_BLOCK_PTR->next_bb, NULL, next_bb)
449
      {
450
        int found_edge = 0;
451
 
452
        FOR_EACH_EDGE (e, ei, p->succs)
453
          if (e->dest == s)
454
            {
455
              found_edge = 1;
456
              break;
457
            }
458
 
459
        FOR_EACH_EDGE (e, ei, s->preds)
460
          if (e->src == p)
461
            {
462
              found_edge = 1;
463
              break;
464
            }
465
 
466
        if (EDGE_INDEX (elist, p, s)
467
            == EDGE_INDEX_NO_EDGE && found_edge != 0)
468
          fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
469
                   p->index, s->index);
470
        if (EDGE_INDEX (elist, p, s)
471
            != EDGE_INDEX_NO_EDGE && found_edge == 0)
472
          fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
473
                   p->index, s->index, EDGE_INDEX (elist, p, s));
474
      }
475
}
476
 
477
/* Given PRED and SUCC blocks, return the edge which connects the blocks.
478
   If no such edge exists, return NULL.  */
479
 
480
edge
481
find_edge (basic_block pred, basic_block succ)
482
{
483
  edge e;
484
  edge_iterator ei;
485
 
486
  if (EDGE_COUNT (pred->succs) <= EDGE_COUNT (succ->preds))
487
    {
488
      FOR_EACH_EDGE (e, ei, pred->succs)
489
        if (e->dest == succ)
490
          return e;
491
    }
492
  else
493
    {
494
      FOR_EACH_EDGE (e, ei, succ->preds)
495
        if (e->src == pred)
496
          return e;
497
    }
498
 
499
  return NULL;
500
}
501
 
502
/* This routine will determine what, if any, edge there is between
503
   a specified predecessor and successor.  */
504
 
505
int
506
find_edge_index (struct edge_list *edge_list, basic_block pred, basic_block succ)
507
{
508
  int x;
509
 
510
  for (x = 0; x < NUM_EDGES (edge_list); x++)
511
    if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
512
        && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
513
      return x;
514
 
515
  return (EDGE_INDEX_NO_EDGE);
516
}
517
 
518
/* Dump the list of basic blocks in the bitmap NODES.  */
519
 
520
void
521
flow_nodes_print (const char *str, const sbitmap nodes, FILE *file)
522
{
523
  unsigned int node = 0;
524
  sbitmap_iterator sbi;
525
 
526
  if (! nodes)
527
    return;
528
 
529
  fprintf (file, "%s { ", str);
530
  EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, sbi)
531
    fprintf (file, "%d ", node);
532
  fputs ("}\n", file);
533
}
534
 
535
/* Dump the list of edges in the array EDGE_LIST.  */
536
 
537
void
538
flow_edge_list_print (const char *str, const edge *edge_list, int num_edges, FILE *file)
539
{
540
  int i;
541
 
542
  if (! edge_list)
543
    return;
544
 
545
  fprintf (file, "%s { ", str);
546
  for (i = 0; i < num_edges; i++)
547
    fprintf (file, "%d->%d ", edge_list[i]->src->index,
548
             edge_list[i]->dest->index);
549
 
550
  fputs ("}\n", file);
551
}
552
 
553
 
554
/* This routine will remove any fake predecessor edges for a basic block.
555
   When the edge is removed, it is also removed from whatever successor
556
   list it is in.  */
557
 
558
static void
559
remove_fake_predecessors (basic_block bb)
560
{
561
  edge e;
562
  edge_iterator ei;
563
 
564
  for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
565
    {
566
      if ((e->flags & EDGE_FAKE) == EDGE_FAKE)
567
        remove_edge (e);
568
      else
569
        ei_next (&ei);
570
    }
571
}
572
 
573
/* This routine will remove all fake edges from the flow graph.  If
574
   we remove all fake successors, it will automatically remove all
575
   fake predecessors.  */
576
 
577
void
578
remove_fake_edges (void)
579
{
580
  basic_block bb;
581
 
582
  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, NULL, next_bb)
583
    remove_fake_predecessors (bb);
584
}
585
 
586
/* This routine will remove all fake edges to the EXIT_BLOCK.  */
587
 
588
void
589
remove_fake_exit_edges (void)
590
{
591
  remove_fake_predecessors (EXIT_BLOCK_PTR);
592
}
593
 
594
 
595
/* This function will add a fake edge between any block which has no
596
   successors, and the exit block. Some data flow equations require these
597
   edges to exist.  */
598
 
599
void
600
add_noreturn_fake_exit_edges (void)
601
{
602
  basic_block bb;
603
 
604
  FOR_EACH_BB (bb)
605
    if (EDGE_COUNT (bb->succs) == 0)
606
      make_single_succ_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
607
}
608
 
609
/* This function adds a fake edge between any infinite loops to the
610
   exit block.  Some optimizations require a path from each node to
611
   the exit node.
612
 
613
   See also Morgan, Figure 3.10, pp. 82-83.
614
 
615
   The current implementation is ugly, not attempting to minimize the
616
   number of inserted fake edges.  To reduce the number of fake edges
617
   to insert, add fake edges from _innermost_ loops containing only
618
   nodes not reachable from the exit block.  */
619
 
620
void
621
connect_infinite_loops_to_exit (void)
622
{
623
  basic_block unvisited_block = EXIT_BLOCK_PTR;
624
  struct depth_first_search_dsS dfs_ds;
625
 
626
  /* Perform depth-first search in the reverse graph to find nodes
627
     reachable from the exit block.  */
628
  flow_dfs_compute_reverse_init (&dfs_ds);
629
  flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
630
 
631
  /* Repeatedly add fake edges, updating the unreachable nodes.  */
632
  while (1)
633
    {
634
      unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds,
635
                                                          unvisited_block);
636
      if (!unvisited_block)
637
        break;
638
 
639
      make_edge (unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
640
      flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
641
    }
642
 
643
  flow_dfs_compute_reverse_finish (&dfs_ds);
644
  return;
645
}
646
 
647
/* Compute reverse top sort order.
648
   This is computing a post order numbering of the graph.  */
649
 
650
int
651
post_order_compute (int *post_order, bool include_entry_exit)
652
{
653
  edge_iterator *stack;
654
  int sp;
655
  int post_order_num = 0;
656
  sbitmap visited;
657
 
658
  if (include_entry_exit)
659
    post_order[post_order_num++] = EXIT_BLOCK;
660
 
661
  /* Allocate stack for back-tracking up CFG.  */
662
  stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
663
  sp = 0;
664
 
665
  /* Allocate bitmap to track nodes that have been visited.  */
666
  visited = sbitmap_alloc (last_basic_block);
667
 
668
  /* None of the nodes in the CFG have been visited yet.  */
669
  sbitmap_zero (visited);
670
 
671
  /* Push the first edge on to the stack.  */
672
  stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
673
 
674
  while (sp)
675
    {
676
      edge_iterator ei;
677
      basic_block src;
678
      basic_block dest;
679
 
680
      /* Look at the edge on the top of the stack.  */
681
      ei = stack[sp - 1];
682
      src = ei_edge (ei)->src;
683
      dest = ei_edge (ei)->dest;
684
 
685
      /* Check if the edge destination has been visited yet.  */
686
      if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
687
        {
688
          /* Mark that we have visited the destination.  */
689
          SET_BIT (visited, dest->index);
690
 
691
          if (EDGE_COUNT (dest->succs) > 0)
692
            /* Since the DEST node has been visited for the first
693
               time, check its successors.  */
694
            stack[sp++] = ei_start (dest->succs);
695
          else
696
            post_order[post_order_num++] = dest->index;
697
        }
698
      else
699
        {
700
          if (ei_one_before_end_p (ei) && src != ENTRY_BLOCK_PTR)
701
           post_order[post_order_num++] = src->index;
702
 
703
          if (!ei_one_before_end_p (ei))
704
            ei_next (&stack[sp - 1]);
705
          else
706
            sp--;
707
        }
708
    }
709
 
710
  if (include_entry_exit)
711
    post_order[post_order_num++] = ENTRY_BLOCK;
712
 
713
  free (stack);
714
  sbitmap_free (visited);
715
  return post_order_num;
716
}
717
 
718
/* Compute the depth first search order and store in the array
719
  PRE_ORDER if nonzero, marking the nodes visited in VISITED.  If
720
  REV_POST_ORDER is nonzero, return the reverse completion number for each
721
  node.  Returns the number of nodes visited.  A depth first search
722
  tries to get as far away from the starting point as quickly as
723
  possible.
724
 
725
  pre_order is a really a preorder numbering of the graph.
726
  rev_post_order is really a reverse postorder numbering of the graph.
727
 */
728
 
729
int
730
pre_and_rev_post_order_compute (int *pre_order, int *rev_post_order,
731
                                bool include_entry_exit)
732
{
733
  edge_iterator *stack;
734
  int sp;
735
  int pre_order_num = 0;
736
  int rev_post_order_num = n_basic_blocks - 1;
737
  sbitmap visited;
738
 
739
  /* Allocate stack for back-tracking up CFG.  */
740
  stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
741
  sp = 0;
742
 
743
  if (include_entry_exit)
744
    {
745
      if (pre_order)
746
        pre_order[pre_order_num] = ENTRY_BLOCK;
747
      pre_order_num++;
748
      if (rev_post_order)
749
        rev_post_order[rev_post_order_num--] = ENTRY_BLOCK;
750
    }
751
  else
752
    rev_post_order_num -= NUM_FIXED_BLOCKS;
753
 
754
  /* Allocate bitmap to track nodes that have been visited.  */
755
  visited = sbitmap_alloc (last_basic_block);
756
 
757
  /* None of the nodes in the CFG have been visited yet.  */
758
  sbitmap_zero (visited);
759
 
760
  /* Push the first edge on to the stack.  */
761
  stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
762
 
763
  while (sp)
764
    {
765
      edge_iterator ei;
766
      basic_block src;
767
      basic_block dest;
768
 
769
      /* Look at the edge on the top of the stack.  */
770
      ei = stack[sp - 1];
771
      src = ei_edge (ei)->src;
772
      dest = ei_edge (ei)->dest;
773
 
774
      /* Check if the edge destination has been visited yet.  */
775
      if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
776
        {
777
          /* Mark that we have visited the destination.  */
778
          SET_BIT (visited, dest->index);
779
 
780
          if (pre_order)
781
            pre_order[pre_order_num] = dest->index;
782
 
783
          pre_order_num++;
784
 
785
          if (EDGE_COUNT (dest->succs) > 0)
786
            /* Since the DEST node has been visited for the first
787
               time, check its successors.  */
788
            stack[sp++] = ei_start (dest->succs);
789
          else if (rev_post_order)
790
            /* There are no successors for the DEST node so assign
791
               its reverse completion number.  */
792
            rev_post_order[rev_post_order_num--] = dest->index;
793
        }
794
      else
795
        {
796
          if (ei_one_before_end_p (ei) && src != ENTRY_BLOCK_PTR
797
              && rev_post_order)
798
            /* There are no more successors for the SRC node
799
               so assign its reverse completion number.  */
800
            rev_post_order[rev_post_order_num--] = src->index;
801
 
802
          if (!ei_one_before_end_p (ei))
803
            ei_next (&stack[sp - 1]);
804
          else
805
            sp--;
806
        }
807
    }
808
 
809
  free (stack);
810
  sbitmap_free (visited);
811
 
812
  if (include_entry_exit)
813
    {
814
      if (pre_order)
815
        pre_order[pre_order_num] = EXIT_BLOCK;
816
      pre_order_num++;
817
      if (rev_post_order)
818
        rev_post_order[rev_post_order_num--] = EXIT_BLOCK;
819
      /* The number of nodes visited should be the number of blocks.  */
820
      gcc_assert (pre_order_num == n_basic_blocks);
821
    }
822
  else
823
    /* The number of nodes visited should be the number of blocks minus
824
       the entry and exit blocks which are not visited here.  */
825
    gcc_assert (pre_order_num == n_basic_blocks - NUM_FIXED_BLOCKS);
826
 
827
  return pre_order_num;
828
}
829
 
830
/* Compute the depth first search order on the _reverse_ graph and
831
   store in the array DFS_ORDER, marking the nodes visited in VISITED.
832
   Returns the number of nodes visited.
833
 
834
   The computation is split into three pieces:
835
 
836
   flow_dfs_compute_reverse_init () creates the necessary data
837
   structures.
838
 
839
   flow_dfs_compute_reverse_add_bb () adds a basic block to the data
840
   structures.  The block will start the search.
841
 
842
   flow_dfs_compute_reverse_execute () continues (or starts) the
843
   search using the block on the top of the stack, stopping when the
844
   stack is empty.
845
 
846
   flow_dfs_compute_reverse_finish () destroys the necessary data
847
   structures.
848
 
849
   Thus, the user will probably call ..._init(), call ..._add_bb() to
850
   add a beginning basic block to the stack, call ..._execute(),
851
   possibly add another bb to the stack and again call ..._execute(),
852
   ..., and finally call _finish().  */
853
 
854
/* Initialize the data structures used for depth-first search on the
855
   reverse graph.  If INITIALIZE_STACK is nonzero, the exit block is
856
   added to the basic block stack.  DATA is the current depth-first
857
   search context.  If INITIALIZE_STACK is nonzero, there is an
858
   element on the stack.  */
859
 
860
static void
861
flow_dfs_compute_reverse_init (depth_first_search_ds data)
862
{
863
  /* Allocate stack for back-tracking up CFG.  */
864
  data->stack = XNEWVEC (basic_block, n_basic_blocks);
865
  data->sp = 0;
866
 
867
  /* Allocate bitmap to track nodes that have been visited.  */
868
  data->visited_blocks = sbitmap_alloc (last_basic_block);
869
 
870
  /* None of the nodes in the CFG have been visited yet.  */
871
  sbitmap_zero (data->visited_blocks);
872
 
873
  return;
874
}
875
 
876
/* Add the specified basic block to the top of the dfs data
877
   structures.  When the search continues, it will start at the
878
   block.  */
879
 
880
static void
881
flow_dfs_compute_reverse_add_bb (depth_first_search_ds data, basic_block bb)
882
{
883
  data->stack[data->sp++] = bb;
884
  SET_BIT (data->visited_blocks, bb->index);
885
}
886
 
887
/* Continue the depth-first search through the reverse graph starting with the
888
   block at the stack's top and ending when the stack is empty.  Visited nodes
889
   are marked.  Returns an unvisited basic block, or NULL if there is none
890
   available.  */
891
 
892
static basic_block
893
flow_dfs_compute_reverse_execute (depth_first_search_ds data,
894
                                  basic_block last_unvisited)
895
{
896
  basic_block bb;
897
  edge e;
898
  edge_iterator ei;
899
 
900
  while (data->sp > 0)
901
    {
902
      bb = data->stack[--data->sp];
903
 
904
      /* Perform depth-first search on adjacent vertices.  */
905
      FOR_EACH_EDGE (e, ei, bb->preds)
906
        if (!TEST_BIT (data->visited_blocks, e->src->index))
907
          flow_dfs_compute_reverse_add_bb (data, e->src);
908
    }
909
 
910
  /* Determine if there are unvisited basic blocks.  */
911
  FOR_BB_BETWEEN (bb, last_unvisited, NULL, prev_bb)
912
    if (!TEST_BIT (data->visited_blocks, bb->index))
913
      return bb;
914
 
915
  return NULL;
916
}
917
 
918
/* Destroy the data structures needed for depth-first search on the
919
   reverse graph.  */
920
 
921
static void
922
flow_dfs_compute_reverse_finish (depth_first_search_ds data)
923
{
924
  free (data->stack);
925
  sbitmap_free (data->visited_blocks);
926
}
927
 
928
/* Performs dfs search from BB over vertices satisfying PREDICATE;
929
   if REVERSE, go against direction of edges.  Returns number of blocks
930
   found and their list in RSLT.  RSLT can contain at most RSLT_MAX items.  */
931
int
932
dfs_enumerate_from (basic_block bb, int reverse,
933
                    bool (*predicate) (basic_block, void *),
934
                    basic_block *rslt, int rslt_max, void *data)
935
{
936
  basic_block *st, lbb;
937
  int sp = 0, tv = 0;
938
  unsigned size;
939
 
940
  /* A bitmap to keep track of visited blocks.  Allocating it each time
941
     this function is called is not possible, since dfs_enumerate_from
942
     is often used on small (almost) disjoint parts of cfg (bodies of
943
     loops), and allocating a large sbitmap would lead to quadratic
944
     behavior.  */
945
  static sbitmap visited;
946
  static unsigned v_size;
947
 
948
#define MARK_VISITED(BB) (SET_BIT (visited, (BB)->index)) 
949
#define UNMARK_VISITED(BB) (RESET_BIT (visited, (BB)->index)) 
950
#define VISITED_P(BB) (TEST_BIT (visited, (BB)->index)) 
951
 
952
  /* Resize the VISITED sbitmap if necessary.  */
953
  size = last_basic_block;
954
  if (size < 10)
955
    size = 10;
956
 
957
  if (!visited)
958
    {
959
 
960
      visited = sbitmap_alloc (size);
961
      sbitmap_zero (visited);
962
      v_size = size;
963
    }
964
  else if (v_size < size)
965
    {
966
      /* Ensure that we increase the size of the sbitmap exponentially.  */
967
      if (2 * v_size > size)
968
        size = 2 * v_size;
969
 
970
      visited = sbitmap_resize (visited, size, 0);
971
      v_size = size;
972
    }
973
 
974
  st = XCNEWVEC (basic_block, rslt_max);
975
  rslt[tv++] = st[sp++] = bb;
976
  MARK_VISITED (bb);
977
  while (sp)
978
    {
979
      edge e;
980
      edge_iterator ei;
981
      lbb = st[--sp];
982
      if (reverse)
983
        {
984
          FOR_EACH_EDGE (e, ei, lbb->preds)
985
            if (!VISITED_P (e->src) && predicate (e->src, data))
986
              {
987
                gcc_assert (tv != rslt_max);
988
                rslt[tv++] = st[sp++] = e->src;
989
                MARK_VISITED (e->src);
990
              }
991
        }
992
      else
993
        {
994
          FOR_EACH_EDGE (e, ei, lbb->succs)
995
            if (!VISITED_P (e->dest) && predicate (e->dest, data))
996
              {
997
                gcc_assert (tv != rslt_max);
998
                rslt[tv++] = st[sp++] = e->dest;
999
                MARK_VISITED (e->dest);
1000
              }
1001
        }
1002
    }
1003
  free (st);
1004
  for (sp = 0; sp < tv; sp++)
1005
    UNMARK_VISITED (rslt[sp]);
1006
  return tv;
1007
#undef MARK_VISITED
1008
#undef UNMARK_VISITED
1009
#undef VISITED_P
1010
}
1011
 
1012
 
1013
/* Compute dominance frontiers, ala Harvey, Ferrante, et al.
1014
 
1015
   This algorithm can be found in Timothy Harvey's PhD thesis, at
1016
   http://www.cs.rice.edu/~harv/dissertation.pdf in the section on iterative
1017
   dominance algorithms.
1018
 
1019
   First, we identify each join point, j (any node with more than one
1020
   incoming edge is a join point).
1021
 
1022
   We then examine each predecessor, p, of j and walk up the dominator tree
1023
   starting at p.
1024
 
1025
   We stop the walk when we reach j's immediate dominator - j is in the
1026
   dominance frontier of each of  the nodes in the walk, except for j's
1027
   immediate dominator. Intuitively, all of the rest of j's dominators are
1028
   shared by j's predecessors as well.
1029
   Since they dominate j, they will not have j in their dominance frontiers.
1030
 
1031
   The number of nodes touched by this algorithm is equal to the size
1032
   of the dominance frontiers, no more, no less.
1033
*/
1034
 
1035
 
1036
static void
1037
compute_dominance_frontiers_1 (bitmap *frontiers)
1038
{
1039
  edge p;
1040
  edge_iterator ei;
1041
  basic_block b;
1042
  FOR_EACH_BB (b)
1043
    {
1044
      if (EDGE_COUNT (b->preds) >= 2)
1045
        {
1046
          FOR_EACH_EDGE (p, ei, b->preds)
1047
            {
1048
              basic_block runner = p->src;
1049
              basic_block domsb;
1050
              if (runner == ENTRY_BLOCK_PTR)
1051
                continue;
1052
 
1053
              domsb = get_immediate_dominator (CDI_DOMINATORS, b);
1054
              while (runner != domsb)
1055
                {
1056
                  if (bitmap_bit_p (frontiers[runner->index], b->index))
1057
                    break;
1058
                  bitmap_set_bit (frontiers[runner->index],
1059
                                  b->index);
1060
                  runner = get_immediate_dominator (CDI_DOMINATORS,
1061
                                                    runner);
1062
                }
1063
            }
1064
        }
1065
    }
1066
}
1067
 
1068
 
1069
void
1070
compute_dominance_frontiers (bitmap *frontiers)
1071
{
1072
  timevar_push (TV_DOM_FRONTIERS);
1073
 
1074
  compute_dominance_frontiers_1 (frontiers);
1075
 
1076
  timevar_pop (TV_DOM_FRONTIERS);
1077
}
1078
 

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.