OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-old/] [gcc-4.2.2/] [gcc/] [cfgbuild.c] - Blame information for rev 827

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 38 julius
/* Control flow graph building code for GNU compiler.
2
   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3
   1999, 2000, 2001, 2002, 2003, 2004, 2005, 2007 Free Software Foundation, Inc.
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify it under
8
the terms of the GNU General Public License as published by the Free
9
Software Foundation; either version 3, or (at your option) any later
10
version.
11
 
12
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13
WARRANTY; without even the implied warranty of MERCHANTABILITY or
14
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15
for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING3.  If not see
19
<http://www.gnu.org/licenses/>.  */
20
 
21
/* find_basic_blocks divides the current function's rtl into basic
22
   blocks and constructs the CFG.  The blocks are recorded in the
23
   basic_block_info array; the CFG exists in the edge structures
24
   referenced by the blocks.
25
 
26
   find_basic_blocks also finds any unreachable loops and deletes them.
27
 
28
   Available functionality:
29
     - CFG construction
30
         find_basic_blocks  */
31
 
32
#include "config.h"
33
#include "system.h"
34
#include "coretypes.h"
35
#include "tm.h"
36
#include "tree.h"
37
#include "rtl.h"
38
#include "hard-reg-set.h"
39
#include "basic-block.h"
40
#include "regs.h"
41
#include "flags.h"
42
#include "output.h"
43
#include "function.h"
44
#include "except.h"
45
#include "toplev.h"
46
#include "timevar.h"
47
 
48
static int count_basic_blocks (rtx);
49
static void find_basic_blocks_1 (rtx);
50
static void make_edges (basic_block, basic_block, int);
51
static void make_label_edge (sbitmap, basic_block, rtx, int);
52
static void find_bb_boundaries (basic_block);
53
static void compute_outgoing_frequencies (basic_block);
54
 
55
/* Return true if insn is something that should be contained inside basic
56
   block.  */
57
 
58
bool
59
inside_basic_block_p (rtx insn)
60
{
61
  switch (GET_CODE (insn))
62
    {
63
    case CODE_LABEL:
64
      /* Avoid creating of basic block for jumptables.  */
65
      return (NEXT_INSN (insn) == 0
66
              || !JUMP_P (NEXT_INSN (insn))
67
              || (GET_CODE (PATTERN (NEXT_INSN (insn))) != ADDR_VEC
68
                  && GET_CODE (PATTERN (NEXT_INSN (insn))) != ADDR_DIFF_VEC));
69
 
70
    case JUMP_INSN:
71
      return (GET_CODE (PATTERN (insn)) != ADDR_VEC
72
              && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
73
 
74
    case CALL_INSN:
75
    case INSN:
76
      return true;
77
 
78
    case BARRIER:
79
    case NOTE:
80
      return false;
81
 
82
    default:
83
      gcc_unreachable ();
84
    }
85
}
86
 
87
/* Return true if INSN may cause control flow transfer, so it should be last in
88
   the basic block.  */
89
 
90
bool
91
control_flow_insn_p (rtx insn)
92
{
93
  rtx note;
94
 
95
  switch (GET_CODE (insn))
96
    {
97
    case NOTE:
98
    case CODE_LABEL:
99
      return false;
100
 
101
    case JUMP_INSN:
102
      /* Jump insn always causes control transfer except for tablejumps.  */
103
      return (GET_CODE (PATTERN (insn)) != ADDR_VEC
104
              && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
105
 
106
    case CALL_INSN:
107
      /* Noreturn and sibling call instructions terminate the basic blocks
108
         (but only if they happen unconditionally).  */
109
      if ((SIBLING_CALL_P (insn)
110
           || find_reg_note (insn, REG_NORETURN, 0))
111
          && GET_CODE (PATTERN (insn)) != COND_EXEC)
112
        return true;
113
      /* Call insn may return to the nonlocal goto handler.  */
114
      return ((nonlocal_goto_handler_labels
115
               && (0 == (note = find_reg_note (insn, REG_EH_REGION,
116
                                               NULL_RTX))
117
                   || INTVAL (XEXP (note, 0)) >= 0))
118
              /* Or may trap.  */
119
              || can_throw_internal (insn));
120
 
121
    case INSN:
122
      /* Treat trap instructions like noreturn calls (same provision).  */
123
      if (GET_CODE (PATTERN (insn)) == TRAP_IF
124
          && XEXP (PATTERN (insn), 0) == const1_rtx)
125
        return true;
126
 
127
      return (flag_non_call_exceptions && can_throw_internal (insn));
128
 
129
    case BARRIER:
130
      /* It is nonsense to reach barrier when looking for the
131
         end of basic block, but before dead code is eliminated
132
         this may happen.  */
133
      return false;
134
 
135
    default:
136
      gcc_unreachable ();
137
    }
138
}
139
 
140
/* Count the basic blocks of the function.  */
141
 
142
static int
143
count_basic_blocks (rtx f)
144
{
145
  int count = NUM_FIXED_BLOCKS;
146
  bool saw_insn = false;
147
  rtx insn;
148
 
149
  for (insn = f; insn; insn = NEXT_INSN (insn))
150
    {
151
      /* Code labels and barriers causes current basic block to be
152
         terminated at previous real insn.  */
153
      if ((LABEL_P (insn) || BARRIER_P (insn))
154
          && saw_insn)
155
        count++, saw_insn = false;
156
 
157
      /* Start basic block if needed.  */
158
      if (!saw_insn && inside_basic_block_p (insn))
159
        saw_insn = true;
160
 
161
      /* Control flow insn causes current basic block to be terminated.  */
162
      if (saw_insn && control_flow_insn_p (insn))
163
        count++, saw_insn = false;
164
    }
165
 
166
  if (saw_insn)
167
    count++;
168
 
169
  /* The rest of the compiler works a bit smoother when we don't have to
170
     check for the edge case of do-nothing functions with no basic blocks.  */
171
  if (count == NUM_FIXED_BLOCKS)
172
    {
173
      emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
174
      count = NUM_FIXED_BLOCKS + 1;
175
    }
176
 
177
  return count;
178
}
179
 
180
/* Create an edge between two basic blocks.  FLAGS are auxiliary information
181
   about the edge that is accumulated between calls.  */
182
 
183
/* Create an edge from a basic block to a label.  */
184
 
185
static void
186
make_label_edge (sbitmap edge_cache, basic_block src, rtx label, int flags)
187
{
188
  gcc_assert (LABEL_P (label));
189
 
190
  /* If the label was never emitted, this insn is junk, but avoid a
191
     crash trying to refer to BLOCK_FOR_INSN (label).  This can happen
192
     as a result of a syntax error and a diagnostic has already been
193
     printed.  */
194
 
195
  if (INSN_UID (label) == 0)
196
    return;
197
 
198
  cached_make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
199
}
200
 
201
/* Create the edges generated by INSN in REGION.  */
202
 
203
void
204
rtl_make_eh_edge (sbitmap edge_cache, basic_block src, rtx insn)
205
{
206
  int is_call = CALL_P (insn) ? EDGE_ABNORMAL_CALL : 0;
207
  rtx handlers, i;
208
 
209
  handlers = reachable_handlers (insn);
210
 
211
  for (i = handlers; i; i = XEXP (i, 1))
212
    make_label_edge (edge_cache, src, XEXP (i, 0),
213
                     EDGE_ABNORMAL | EDGE_EH | is_call);
214
 
215
  free_INSN_LIST_list (&handlers);
216
}
217
 
218
/* States of basic block as seen by find_many_sub_basic_blocks.  */
219
enum state {
220
  /* Basic blocks created via split_block belong to this state.
221
     make_edges will examine these basic blocks to see if we need to
222
     create edges going out of them.  */
223
  BLOCK_NEW = 0,
224
 
225
  /* Basic blocks that do not need examining belong to this state.
226
     These blocks will be left intact.  In particular, make_edges will
227
     not create edges going out of these basic blocks.  */
228
  BLOCK_ORIGINAL,
229
 
230
  /* Basic blocks that may need splitting (due to a label appearing in
231
     the middle, etc) belong to this state.  After splitting them,
232
     make_edges will create edges going out of them as needed.  */
233
  BLOCK_TO_SPLIT
234
};
235
 
236
#define STATE(BB) (enum state) ((size_t) (BB)->aux)
237
#define SET_STATE(BB, STATE) ((BB)->aux = (void *) (size_t) (STATE))
238
 
239
/* Used internally by purge_dead_tablejump_edges, ORed into state.  */
240
#define BLOCK_USED_BY_TABLEJUMP         32
241
#define FULL_STATE(BB) ((size_t) (BB)->aux)
242
 
243
/* Identify the edges going out of basic blocks between MIN and MAX,
244
   inclusive, that have their states set to BLOCK_NEW or
245
   BLOCK_TO_SPLIT.
246
 
247
   UPDATE_P should be nonzero if we are updating CFG and zero if we
248
   are building CFG from scratch.  */
249
 
250
static void
251
make_edges (basic_block min, basic_block max, int update_p)
252
{
253
  basic_block bb;
254
  sbitmap edge_cache = NULL;
255
 
256
  /* Heavy use of computed goto in machine-generated code can lead to
257
     nearly fully-connected CFGs.  In that case we spend a significant
258
     amount of time searching the edge lists for duplicates.  */
259
  if (forced_labels || cfun->max_jumptable_ents > 100)
260
    edge_cache = sbitmap_alloc (last_basic_block);
261
 
262
  /* By nature of the way these get numbered, ENTRY_BLOCK_PTR->next_bb block
263
     is always the entry.  */
264
  if (min == ENTRY_BLOCK_PTR->next_bb)
265
    make_edge (ENTRY_BLOCK_PTR, min, EDGE_FALLTHRU);
266
 
267
  FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
268
    {
269
      rtx insn, x;
270
      enum rtx_code code;
271
      edge e;
272
      edge_iterator ei;
273
 
274
      if (STATE (bb) == BLOCK_ORIGINAL)
275
        continue;
276
 
277
      /* If we have an edge cache, cache edges going out of BB.  */
278
      if (edge_cache)
279
        {
280
          sbitmap_zero (edge_cache);
281
          if (update_p)
282
            {
283
              FOR_EACH_EDGE (e, ei, bb->succs)
284
                if (e->dest != EXIT_BLOCK_PTR)
285
                  SET_BIT (edge_cache, e->dest->index);
286
            }
287
        }
288
 
289
      if (LABEL_P (BB_HEAD (bb))
290
          && LABEL_ALT_ENTRY_P (BB_HEAD (bb)))
291
        cached_make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
292
 
293
      /* Examine the last instruction of the block, and discover the
294
         ways we can leave the block.  */
295
 
296
      insn = BB_END (bb);
297
      code = GET_CODE (insn);
298
 
299
      /* A branch.  */
300
      if (code == JUMP_INSN)
301
        {
302
          rtx tmp;
303
 
304
          /* Recognize exception handling placeholders.  */
305
          if (GET_CODE (PATTERN (insn)) == RESX)
306
            rtl_make_eh_edge (edge_cache, bb, insn);
307
 
308
          /* Recognize a non-local goto as a branch outside the
309
             current function.  */
310
          else if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
311
            ;
312
 
313
          /* Recognize a tablejump and do the right thing.  */
314
          else if (tablejump_p (insn, NULL, &tmp))
315
            {
316
              rtvec vec;
317
              int j;
318
 
319
              if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
320
                vec = XVEC (PATTERN (tmp), 0);
321
              else
322
                vec = XVEC (PATTERN (tmp), 1);
323
 
324
              for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
325
                make_label_edge (edge_cache, bb,
326
                                 XEXP (RTVEC_ELT (vec, j), 0), 0);
327
 
328
              /* Some targets (eg, ARM) emit a conditional jump that also
329
                 contains the out-of-range target.  Scan for these and
330
                 add an edge if necessary.  */
331
              if ((tmp = single_set (insn)) != NULL
332
                  && SET_DEST (tmp) == pc_rtx
333
                  && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
334
                  && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
335
                make_label_edge (edge_cache, bb,
336
                                 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
337
            }
338
 
339
          /* If this is a computed jump, then mark it as reaching
340
             everything on the forced_labels list.  */
341
          else if (computed_jump_p (insn))
342
            {
343
              for (x = forced_labels; x; x = XEXP (x, 1))
344
                make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
345
            }
346
 
347
          /* Returns create an exit out.  */
348
          else if (returnjump_p (insn))
349
            cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
350
 
351
          /* Otherwise, we have a plain conditional or unconditional jump.  */
352
          else
353
            {
354
              gcc_assert (JUMP_LABEL (insn));
355
              make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
356
            }
357
        }
358
 
359
      /* If this is a sibling call insn, then this is in effect a combined call
360
         and return, and so we need an edge to the exit block.  No need to
361
         worry about EH edges, since we wouldn't have created the sibling call
362
         in the first place.  */
363
      if (code == CALL_INSN && SIBLING_CALL_P (insn))
364
        cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
365
                          EDGE_SIBCALL | EDGE_ABNORMAL);
366
 
367
      /* If this is a CALL_INSN, then mark it as reaching the active EH
368
         handler for this CALL_INSN.  If we're handling non-call
369
         exceptions then any insn can reach any of the active handlers.
370
         Also mark the CALL_INSN as reaching any nonlocal goto handler.  */
371
      else if (code == CALL_INSN || flag_non_call_exceptions)
372
        {
373
          /* Add any appropriate EH edges.  */
374
          rtl_make_eh_edge (edge_cache, bb, insn);
375
 
376
          if (code == CALL_INSN && nonlocal_goto_handler_labels)
377
            {
378
              /* ??? This could be made smarter: in some cases it's possible
379
                 to tell that certain calls will not do a nonlocal goto.
380
                 For example, if the nested functions that do the nonlocal
381
                 gotos do not have their addresses taken, then only calls to
382
                 those functions or to other nested functions that use them
383
                 could possibly do nonlocal gotos.  */
384
 
385
              /* We do know that a REG_EH_REGION note with a value less
386
                 than 0 is guaranteed not to perform a non-local goto.  */
387
              rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
388
 
389
              if (!note || INTVAL (XEXP (note, 0)) >=  0)
390
                for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
391
                  make_label_edge (edge_cache, bb, XEXP (x, 0),
392
                                   EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
393
            }
394
        }
395
 
396
      /* Find out if we can drop through to the next block.  */
397
      insn = NEXT_INSN (insn);
398
      e = find_edge (bb, EXIT_BLOCK_PTR);
399
      if (e && e->flags & EDGE_FALLTHRU)
400
        insn = NULL;
401
 
402
      while (insn
403
             && NOTE_P (insn)
404
             && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK)
405
        insn = NEXT_INSN (insn);
406
 
407
      if (!insn)
408
        cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
409
      else if (bb->next_bb != EXIT_BLOCK_PTR)
410
        {
411
          if (insn == BB_HEAD (bb->next_bb))
412
            cached_make_edge (edge_cache, bb, bb->next_bb, EDGE_FALLTHRU);
413
        }
414
    }
415
 
416
  if (edge_cache)
417
    sbitmap_vector_free (edge_cache);
418
}
419
 
420
/* Find all basic blocks of the function whose first insn is F.
421
 
422
   Collect and return a list of labels whose addresses are taken.  This
423
   will be used in make_edges for use with computed gotos.  */
424
 
425
static void
426
find_basic_blocks_1 (rtx f)
427
{
428
  rtx insn, next;
429
  rtx bb_note = NULL_RTX;
430
  rtx head = NULL_RTX;
431
  rtx end = NULL_RTX;
432
  basic_block prev = ENTRY_BLOCK_PTR;
433
 
434
  /* We process the instructions in a slightly different way than we did
435
     previously.  This is so that we see a NOTE_BASIC_BLOCK after we have
436
     closed out the previous block, so that it gets attached at the proper
437
     place.  Since this form should be equivalent to the previous,
438
     count_basic_blocks continues to use the old form as a check.  */
439
 
440
  for (insn = f; insn; insn = next)
441
    {
442
      enum rtx_code code = GET_CODE (insn);
443
 
444
      next = NEXT_INSN (insn);
445
 
446
      if ((LABEL_P (insn) || BARRIER_P (insn))
447
          && head)
448
        {
449
          prev = create_basic_block_structure (head, end, bb_note, prev);
450
          head = end = NULL_RTX;
451
          bb_note = NULL_RTX;
452
        }
453
 
454
      if (inside_basic_block_p (insn))
455
        {
456
          if (head == NULL_RTX)
457
            head = insn;
458
          end = insn;
459
        }
460
 
461
      if (head && control_flow_insn_p (insn))
462
        {
463
          prev = create_basic_block_structure (head, end, bb_note, prev);
464
          head = end = NULL_RTX;
465
          bb_note = NULL_RTX;
466
        }
467
 
468
      switch (code)
469
        {
470
        case NOTE:
471
          {
472
            int kind = NOTE_LINE_NUMBER (insn);
473
 
474
            /* Look for basic block notes with which to keep the
475
               basic_block_info pointers stable.  Unthread the note now;
476
               we'll put it back at the right place in create_basic_block.
477
               Or not at all if we've already found a note in this block.  */
478
            if (kind == NOTE_INSN_BASIC_BLOCK)
479
              {
480
                if (bb_note == NULL_RTX)
481
                  bb_note = insn;
482
                else
483
                  next = delete_insn (insn);
484
              }
485
            break;
486
          }
487
 
488
        case CODE_LABEL:
489
        case JUMP_INSN:
490
        case CALL_INSN:
491
        case INSN:
492
        case BARRIER:
493
          break;
494
 
495
        default:
496
          gcc_unreachable ();
497
        }
498
    }
499
 
500
  if (head != NULL_RTX)
501
    create_basic_block_structure (head, end, bb_note, prev);
502
  else if (bb_note)
503
    delete_insn (bb_note);
504
 
505
  gcc_assert (last_basic_block == n_basic_blocks);
506
 
507
  clear_aux_for_blocks ();
508
}
509
 
510
 
511
/* Find basic blocks of the current function.
512
   F is the first insn of the function.  */
513
 
514
void
515
find_basic_blocks (rtx f)
516
{
517
  basic_block bb;
518
 
519
  timevar_push (TV_CFG);
520
 
521
  /* Flush out existing data.  */
522
  if (basic_block_info != NULL)
523
    {
524
      clear_edges ();
525
 
526
      /* Clear bb->aux on all extant basic blocks.  We'll use this as a
527
         tag for reuse during create_basic_block, just in case some pass
528
         copies around basic block notes improperly.  */
529
      FOR_EACH_BB (bb)
530
        bb->aux = NULL;
531
 
532
      basic_block_info = NULL;
533
    }
534
 
535
  n_basic_blocks = count_basic_blocks (f);
536
  last_basic_block = NUM_FIXED_BLOCKS;
537
  ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
538
  EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
539
 
540
 
541
  /* Size the basic block table.  The actual structures will be allocated
542
     by find_basic_blocks_1, since we want to keep the structure pointers
543
     stable across calls to find_basic_blocks.  */
544
  /* ??? This whole issue would be much simpler if we called find_basic_blocks
545
     exactly once, and thereafter we don't have a single long chain of
546
     instructions at all until close to the end of compilation when we
547
     actually lay them out.  */
548
 
549
  basic_block_info = VEC_alloc (basic_block, gc, n_basic_blocks);
550
  VEC_safe_grow (basic_block, gc, basic_block_info, n_basic_blocks);
551
  memset (VEC_address (basic_block, basic_block_info), 0,
552
          sizeof (basic_block) * n_basic_blocks);
553
  SET_BASIC_BLOCK (ENTRY_BLOCK, ENTRY_BLOCK_PTR);
554
  SET_BASIC_BLOCK (EXIT_BLOCK, EXIT_BLOCK_PTR);
555
 
556
  find_basic_blocks_1 (f);
557
 
558
  profile_status = PROFILE_ABSENT;
559
 
560
  /* Tell make_edges to examine every block for out-going edges.  */
561
  FOR_EACH_BB (bb)
562
    SET_STATE (bb, BLOCK_NEW);
563
 
564
  /* Discover the edges of our cfg.  */
565
  make_edges (ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR->prev_bb, 0);
566
 
567
  /* Do very simple cleanup now, for the benefit of code that runs between
568
     here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns.  */
569
  tidy_fallthru_edges ();
570
 
571
#ifdef ENABLE_CHECKING
572
  verify_flow_info ();
573
#endif
574
  timevar_pop (TV_CFG);
575
}
576
 
577
static void
578
mark_tablejump_edge (rtx label)
579
{
580
  basic_block bb;
581
 
582
  gcc_assert (LABEL_P (label));
583
  /* See comment in make_label_edge.  */
584
  if (INSN_UID (label) == 0)
585
    return;
586
  bb = BLOCK_FOR_INSN (label);
587
  SET_STATE (bb, FULL_STATE (bb) | BLOCK_USED_BY_TABLEJUMP);
588
}
589
 
590
static void
591
purge_dead_tablejump_edges (basic_block bb, rtx table)
592
{
593
  rtx insn = BB_END (bb), tmp;
594
  rtvec vec;
595
  int j;
596
  edge_iterator ei;
597
  edge e;
598
 
599
  if (GET_CODE (PATTERN (table)) == ADDR_VEC)
600
    vec = XVEC (PATTERN (table), 0);
601
  else
602
    vec = XVEC (PATTERN (table), 1);
603
 
604
  for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
605
    mark_tablejump_edge (XEXP (RTVEC_ELT (vec, j), 0));
606
 
607
  /* Some targets (eg, ARM) emit a conditional jump that also
608
     contains the out-of-range target.  Scan for these and
609
     add an edge if necessary.  */
610
  if ((tmp = single_set (insn)) != NULL
611
       && SET_DEST (tmp) == pc_rtx
612
       && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
613
       && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
614
    mark_tablejump_edge (XEXP (XEXP (SET_SRC (tmp), 2), 0));
615
 
616
  for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
617
    {
618
      if (FULL_STATE (e->dest) & BLOCK_USED_BY_TABLEJUMP)
619
        SET_STATE (e->dest, FULL_STATE (e->dest)
620
                            & ~(size_t) BLOCK_USED_BY_TABLEJUMP);
621
      else if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
622
        {
623
          remove_edge (e);
624
          continue;
625
        }
626
      ei_next (&ei);
627
    }
628
}
629
 
630
/* Scan basic block BB for possible BB boundaries inside the block
631
   and create new basic blocks in the progress.  */
632
 
633
static void
634
find_bb_boundaries (basic_block bb)
635
{
636
  basic_block orig_bb = bb;
637
  rtx insn = BB_HEAD (bb);
638
  rtx end = BB_END (bb);
639
  rtx table;
640
  rtx flow_transfer_insn = NULL_RTX;
641
  edge fallthru = NULL;
642
 
643
  if (insn == BB_END (bb))
644
    return;
645
 
646
  if (LABEL_P (insn))
647
    insn = NEXT_INSN (insn);
648
 
649
  /* Scan insn chain and try to find new basic block boundaries.  */
650
  while (1)
651
    {
652
      enum rtx_code code = GET_CODE (insn);
653
 
654
      /* On code label, split current basic block.  */
655
      if (code == CODE_LABEL)
656
        {
657
          fallthru = split_block (bb, PREV_INSN (insn));
658
          if (flow_transfer_insn)
659
            BB_END (bb) = flow_transfer_insn;
660
 
661
          bb = fallthru->dest;
662
          remove_edge (fallthru);
663
          flow_transfer_insn = NULL_RTX;
664
          if (LABEL_ALT_ENTRY_P (insn))
665
            make_edge (ENTRY_BLOCK_PTR, bb, 0);
666
        }
667
 
668
      /* In case we've previously seen an insn that effects a control
669
         flow transfer, split the block.  */
670
      if (flow_transfer_insn && inside_basic_block_p (insn))
671
        {
672
          fallthru = split_block (bb, PREV_INSN (insn));
673
          BB_END (bb) = flow_transfer_insn;
674
          bb = fallthru->dest;
675
          remove_edge (fallthru);
676
          flow_transfer_insn = NULL_RTX;
677
        }
678
 
679
      if (control_flow_insn_p (insn))
680
        flow_transfer_insn = insn;
681
      if (insn == end)
682
        break;
683
      insn = NEXT_INSN (insn);
684
    }
685
 
686
  /* In case expander replaced normal insn by sequence terminating by
687
     return and barrier, or possibly other sequence not behaving like
688
     ordinary jump, we need to take care and move basic block boundary.  */
689
  if (flow_transfer_insn)
690
    BB_END (bb) = flow_transfer_insn;
691
 
692
  /* We've possibly replaced the conditional jump by conditional jump
693
     followed by cleanup at fallthru edge, so the outgoing edges may
694
     be dead.  */
695
  purge_dead_edges (bb);
696
 
697
  /* purge_dead_edges doesn't handle tablejump's, but if we have split the
698
     basic block, we might need to kill some edges.  */
699
  if (bb != orig_bb && tablejump_p (BB_END (bb), NULL, &table))
700
    purge_dead_tablejump_edges (bb, table);
701
}
702
 
703
/*  Assume that frequency of basic block B is known.  Compute frequencies
704
    and probabilities of outgoing edges.  */
705
 
706
static void
707
compute_outgoing_frequencies (basic_block b)
708
{
709
  edge e, f;
710
  edge_iterator ei;
711
 
712
  if (EDGE_COUNT (b->succs) == 2)
713
    {
714
      rtx note = find_reg_note (BB_END (b), REG_BR_PROB, NULL);
715
      int probability;
716
 
717
      if (note)
718
        {
719
          probability = INTVAL (XEXP (note, 0));
720
          e = BRANCH_EDGE (b);
721
          e->probability = probability;
722
          e->count = ((b->count * probability + REG_BR_PROB_BASE / 2)
723
                      / REG_BR_PROB_BASE);
724
          f = FALLTHRU_EDGE (b);
725
          f->probability = REG_BR_PROB_BASE - probability;
726
          f->count = b->count - e->count;
727
          return;
728
        }
729
    }
730
 
731
  if (single_succ_p (b))
732
    {
733
      e = single_succ_edge (b);
734
      e->probability = REG_BR_PROB_BASE;
735
      e->count = b->count;
736
      return;
737
    }
738
  guess_outgoing_edge_probabilities (b);
739
  if (b->count)
740
    FOR_EACH_EDGE (e, ei, b->succs)
741
      e->count = ((b->count * e->probability + REG_BR_PROB_BASE / 2)
742
                  / REG_BR_PROB_BASE);
743
}
744
 
745
/* Assume that some pass has inserted labels or control flow
746
   instructions within a basic block.  Split basic blocks as needed
747
   and create edges.  */
748
 
749
void
750
find_many_sub_basic_blocks (sbitmap blocks)
751
{
752
  basic_block bb, min, max;
753
 
754
  FOR_EACH_BB (bb)
755
    SET_STATE (bb,
756
               TEST_BIT (blocks, bb->index) ? BLOCK_TO_SPLIT : BLOCK_ORIGINAL);
757
 
758
  FOR_EACH_BB (bb)
759
    if (STATE (bb) == BLOCK_TO_SPLIT)
760
      find_bb_boundaries (bb);
761
 
762
  FOR_EACH_BB (bb)
763
    if (STATE (bb) != BLOCK_ORIGINAL)
764
      break;
765
 
766
  min = max = bb;
767
  for (; bb != EXIT_BLOCK_PTR; bb = bb->next_bb)
768
    if (STATE (bb) != BLOCK_ORIGINAL)
769
      max = bb;
770
 
771
  /* Now re-scan and wire in all edges.  This expect simple (conditional)
772
     jumps at the end of each new basic blocks.  */
773
  make_edges (min, max, 1);
774
 
775
  /* Update branch probabilities.  Expect only (un)conditional jumps
776
     to be created with only the forward edges.  */
777
  if (profile_status != PROFILE_ABSENT)
778
    FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
779
      {
780
        edge e;
781
        edge_iterator ei;
782
 
783
        if (STATE (bb) == BLOCK_ORIGINAL)
784
          continue;
785
        if (STATE (bb) == BLOCK_NEW)
786
          {
787
            bb->count = 0;
788
            bb->frequency = 0;
789
            FOR_EACH_EDGE (e, ei, bb->preds)
790
              {
791
                bb->count += e->count;
792
                bb->frequency += EDGE_FREQUENCY (e);
793
              }
794
          }
795
 
796
        compute_outgoing_frequencies (bb);
797
      }
798
 
799
  FOR_EACH_BB (bb)
800
    SET_STATE (bb, 0);
801
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.