OpenCores
URL https://opencores.org/ocsvn/scarts/scarts/trunk

Subversion Repositories scarts

[/] [scarts/] [trunk/] [toolchain/] [scarts-gcc/] [gcc-4.1.1/] [gcc/] [cfglayout.c] - Blame information for rev 20

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 12 jlechner
/* Basic block reordering routines for the GNU compiler.
2
   Copyright (C) 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.
3
 
4
This file is part of GCC.
5
 
6
GCC is free software; you can redistribute it and/or modify it under
7
the terms of the GNU General Public License as published by the Free
8
Software Foundation; either version 2, or (at your option) any later
9
version.
10
 
11
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12
WARRANTY; without even the implied warranty of MERCHANTABILITY or
13
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14
for more details.
15
 
16
You should have received a copy of the GNU General Public License
17
along with GCC; see the file COPYING.  If not, write to the Free
18
Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
19
02110-1301, USA.  */
20
 
21
#include "config.h"
22
#include "system.h"
23
#include "coretypes.h"
24
#include "tm.h"
25
#include "tree.h"
26
#include "rtl.h"
27
#include "hard-reg-set.h"
28
#include "obstack.h"
29
#include "basic-block.h"
30
#include "insn-config.h"
31
#include "output.h"
32
#include "function.h"
33
#include "cfglayout.h"
34
#include "cfgloop.h"
35
#include "target.h"
36
#include "ggc.h"
37
#include "alloc-pool.h"
38
#include "flags.h"
39
#include "tree-pass.h"
40
 
41
/* Holds the interesting trailing notes for the function.  */
42
rtx cfg_layout_function_footer, cfg_layout_function_header;
43
 
44
static rtx skip_insns_after_block (basic_block);
45
static void record_effective_endpoints (void);
46
static rtx label_for_bb (basic_block);
47
static void fixup_reorder_chain (void);
48
 
49
static void set_block_levels (tree, int);
50
static void change_scope (rtx, tree, tree);
51
 
52
void verify_insn_chain (void);
53
static void fixup_fallthru_exit_predecessor (void);
54
static tree insn_scope (rtx);
55
 
56
rtx
57
unlink_insn_chain (rtx first, rtx last)
58
{
59
  rtx prevfirst = PREV_INSN (first);
60
  rtx nextlast = NEXT_INSN (last);
61
 
62
  PREV_INSN (first) = NULL;
63
  NEXT_INSN (last) = NULL;
64
  if (prevfirst)
65
    NEXT_INSN (prevfirst) = nextlast;
66
  if (nextlast)
67
    PREV_INSN (nextlast) = prevfirst;
68
  else
69
    set_last_insn (prevfirst);
70
  if (!prevfirst)
71
    set_first_insn (nextlast);
72
  return first;
73
}
74
 
75
/* Skip over inter-block insns occurring after BB which are typically
76
   associated with BB (e.g., barriers). If there are any such insns,
77
   we return the last one. Otherwise, we return the end of BB.  */
78
 
79
static rtx
80
skip_insns_after_block (basic_block bb)
81
{
82
  rtx insn, last_insn, next_head, prev;
83
 
84
  next_head = NULL_RTX;
85
  if (bb->next_bb != EXIT_BLOCK_PTR)
86
    next_head = BB_HEAD (bb->next_bb);
87
 
88
  for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
89
    {
90
      if (insn == next_head)
91
        break;
92
 
93
      switch (GET_CODE (insn))
94
        {
95
        case BARRIER:
96
          last_insn = insn;
97
          continue;
98
 
99
        case NOTE:
100
          switch (NOTE_LINE_NUMBER (insn))
101
            {
102
            case NOTE_INSN_LOOP_END:
103
            case NOTE_INSN_BLOCK_END:
104
              last_insn = insn;
105
              continue;
106
            case NOTE_INSN_DELETED:
107
            case NOTE_INSN_DELETED_LABEL:
108
              continue;
109
 
110
            default:
111
              continue;
112
              break;
113
            }
114
          break;
115
 
116
        case CODE_LABEL:
117
          if (NEXT_INSN (insn)
118
              && JUMP_P (NEXT_INSN (insn))
119
              && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
120
                  || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
121
            {
122
              insn = NEXT_INSN (insn);
123
              last_insn = insn;
124
              continue;
125
            }
126
          break;
127
 
128
        default:
129
          break;
130
        }
131
 
132
      break;
133
    }
134
 
135
  /* It is possible to hit contradictory sequence.  For instance:
136
 
137
     jump_insn
138
     NOTE_INSN_LOOP_BEG
139
     barrier
140
 
141
     Where barrier belongs to jump_insn, but the note does not.  This can be
142
     created by removing the basic block originally following
143
     NOTE_INSN_LOOP_BEG.  In such case reorder the notes.  */
144
 
145
  for (insn = last_insn; insn != BB_END (bb); insn = prev)
146
    {
147
      prev = PREV_INSN (insn);
148
      if (NOTE_P (insn))
149
        switch (NOTE_LINE_NUMBER (insn))
150
          {
151
          case NOTE_INSN_LOOP_END:
152
          case NOTE_INSN_BLOCK_END:
153
          case NOTE_INSN_DELETED:
154
          case NOTE_INSN_DELETED_LABEL:
155
            continue;
156
          default:
157
            reorder_insns (insn, insn, last_insn);
158
          }
159
    }
160
 
161
  return last_insn;
162
}
163
 
164
/* Locate or create a label for a given basic block.  */
165
 
166
static rtx
167
label_for_bb (basic_block bb)
168
{
169
  rtx label = BB_HEAD (bb);
170
 
171
  if (!LABEL_P (label))
172
    {
173
      if (dump_file)
174
        fprintf (dump_file, "Emitting label for block %d\n", bb->index);
175
 
176
      label = block_label (bb);
177
    }
178
 
179
  return label;
180
}
181
 
182
/* Locate the effective beginning and end of the insn chain for each
183
   block, as defined by skip_insns_after_block above.  */
184
 
185
static void
186
record_effective_endpoints (void)
187
{
188
  rtx next_insn;
189
  basic_block bb;
190
  rtx insn;
191
 
192
  for (insn = get_insns ();
193
       insn
194
       && NOTE_P (insn)
195
       && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK;
196
       insn = NEXT_INSN (insn))
197
    continue;
198
  /* No basic blocks at all?  */
199
  gcc_assert (insn);
200
 
201
  if (PREV_INSN (insn))
202
    cfg_layout_function_header =
203
            unlink_insn_chain (get_insns (), PREV_INSN (insn));
204
  else
205
    cfg_layout_function_header = NULL_RTX;
206
 
207
  next_insn = get_insns ();
208
  FOR_EACH_BB (bb)
209
    {
210
      rtx end;
211
 
212
      if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
213
        bb->il.rtl->header = unlink_insn_chain (next_insn,
214
                                              PREV_INSN (BB_HEAD (bb)));
215
      end = skip_insns_after_block (bb);
216
      if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
217
        bb->il.rtl->footer = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
218
      next_insn = NEXT_INSN (BB_END (bb));
219
    }
220
 
221
  cfg_layout_function_footer = next_insn;
222
  if (cfg_layout_function_footer)
223
    cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
224
}
225
 
226
DEF_VEC_I(int);
227
DEF_VEC_ALLOC_I(int,heap);
228
 
229
/* Data structures representing mapping of INSN_LOCATOR into scope blocks, line
230
   numbers and files.  In order to be GGC friendly we need to use separate
231
   varrays.  This also slightly improve the memory locality in binary search.
232
   The _locs array contains locators where the given property change.  The
233
   block_locators_blocks contains the scope block that is used for all insn
234
   locator greater than corresponding block_locators_locs value and smaller
235
   than the following one.  Similarly for the other properties.  */
236
static VEC(int,heap) *block_locators_locs;
237
static GTY(()) VEC(tree,gc) *block_locators_blocks;
238
static VEC(int,heap) *line_locators_locs;
239
static VEC(int,heap) *line_locators_lines;
240
static VEC(int,heap) *file_locators_locs;
241
static GTY(()) varray_type file_locators_files;
242
int prologue_locator;
243
int epilogue_locator;
244
 
245
/* During the RTL expansion the lexical blocks and line numbers are
246
   represented via INSN_NOTEs.  Replace them by representation using
247
   INSN_LOCATORs.  */
248
 
249
void
250
insn_locators_initialize (void)
251
{
252
  tree block = NULL;
253
  tree last_block = NULL;
254
  rtx insn, next;
255
  int loc = 0;
256
  int line_number = 0, last_line_number = 0;
257
  const char *file_name = NULL, *last_file_name = NULL;
258
 
259
  prologue_locator = epilogue_locator = 0;
260
 
261
  block_locators_locs = VEC_alloc (int, heap, 32);
262
  block_locators_blocks = VEC_alloc (tree, gc, 32);
263
  line_locators_locs = VEC_alloc (int, heap, 32);
264
  line_locators_lines = VEC_alloc (int, heap, 32);
265
  file_locators_locs = VEC_alloc (int, heap, 32);
266
  VARRAY_CHAR_PTR_INIT (file_locators_files, 32, "file_locators_files");
267
 
268
  for (insn = get_insns (); insn; insn = next)
269
    {
270
      int active = 0;
271
 
272
      next = NEXT_INSN (insn);
273
 
274
      if (NOTE_P (insn))
275
        {
276
          gcc_assert (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_BEG
277
                      && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END);
278
          if (NOTE_LINE_NUMBER (insn) > 0)
279
            {
280
              expanded_location xloc;
281
              NOTE_EXPANDED_LOCATION (xloc, insn);
282
              line_number = xloc.line;
283
              file_name = xloc.file;
284
            }
285
        }
286
      else
287
        active = (active_insn_p (insn)
288
                  && GET_CODE (PATTERN (insn)) != ADDR_VEC
289
                  && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
290
 
291
      check_block_change (insn, &block);
292
 
293
      if (active
294
          || !next
295
          || (!prologue_locator && file_name))
296
        {
297
          if (last_block != block)
298
            {
299
              loc++;
300
              VEC_safe_push (int, heap, block_locators_locs, loc);
301
              VEC_safe_push (tree, gc, block_locators_blocks, block);
302
              last_block = block;
303
            }
304
          if (last_line_number != line_number)
305
            {
306
              loc++;
307
              VEC_safe_push (int, heap, line_locators_locs, loc);
308
              VEC_safe_push (int, heap, line_locators_lines, line_number);
309
              last_line_number = line_number;
310
            }
311
          if (last_file_name != file_name)
312
            {
313
              loc++;
314
              VEC_safe_push (int, heap, file_locators_locs, loc);
315
              VARRAY_PUSH_CHAR_PTR (file_locators_files, (char *) file_name);
316
              last_file_name = file_name;
317
            }
318
          if (!prologue_locator && file_name)
319
            prologue_locator = loc;
320
          if (!next)
321
            epilogue_locator = loc;
322
          if (active)
323
            INSN_LOCATOR (insn) = loc;
324
        }
325
    }
326
 
327
  /* Tag the blocks with a depth number so that change_scope can find
328
     the common parent easily.  */
329
  set_block_levels (DECL_INITIAL (cfun->decl), 0);
330
 
331
  free_block_changes ();
332
}
333
 
334
struct tree_opt_pass pass_insn_locators_initialize =
335
{
336
  "locators",                           /* name */
337
  NULL,                                 /* gate */
338
  insn_locators_initialize,             /* execute */
339
  NULL,                                 /* sub */
340
  NULL,                                 /* next */
341
  0,                                    /* static_pass_number */
342
  0,                                    /* tv_id */
343
  0,                                    /* properties_required */
344
  0,                                    /* properties_provided */
345
  0,                                    /* properties_destroyed */
346
  0,                                    /* todo_flags_start */
347
  TODO_dump_func,                       /* todo_flags_finish */
348
 
349
};
350
 
351
 
352
/* For each lexical block, set BLOCK_NUMBER to the depth at which it is
353
   found in the block tree.  */
354
 
355
static void
356
set_block_levels (tree block, int level)
357
{
358
  while (block)
359
    {
360
      BLOCK_NUMBER (block) = level;
361
      set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
362
      block = BLOCK_CHAIN (block);
363
    }
364
}
365
 
366
/* Return sope resulting from combination of S1 and S2.  */
367
static tree
368
choose_inner_scope (tree s1, tree s2)
369
{
370
   if (!s1)
371
     return s2;
372
   if (!s2)
373
     return s1;
374
   if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
375
     return s1;
376
   return s2;
377
}
378
 
379
/* Emit lexical block notes needed to change scope from S1 to S2.  */
380
 
381
static void
382
change_scope (rtx orig_insn, tree s1, tree s2)
383
{
384
  rtx insn = orig_insn;
385
  tree com = NULL_TREE;
386
  tree ts1 = s1, ts2 = s2;
387
  tree s;
388
 
389
  while (ts1 != ts2)
390
    {
391
      gcc_assert (ts1 && ts2);
392
      if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
393
        ts1 = BLOCK_SUPERCONTEXT (ts1);
394
      else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
395
        ts2 = BLOCK_SUPERCONTEXT (ts2);
396
      else
397
        {
398
          ts1 = BLOCK_SUPERCONTEXT (ts1);
399
          ts2 = BLOCK_SUPERCONTEXT (ts2);
400
        }
401
    }
402
  com = ts1;
403
 
404
  /* Close scopes.  */
405
  s = s1;
406
  while (s != com)
407
    {
408
      rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
409
      NOTE_BLOCK (note) = s;
410
      s = BLOCK_SUPERCONTEXT (s);
411
    }
412
 
413
  /* Open scopes.  */
414
  s = s2;
415
  while (s != com)
416
    {
417
      insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
418
      NOTE_BLOCK (insn) = s;
419
      s = BLOCK_SUPERCONTEXT (s);
420
    }
421
}
422
 
423
/* Return lexical scope block insn belong to.  */
424
static tree
425
insn_scope (rtx insn)
426
{
427
  int max = VEC_length (int, block_locators_locs);
428
  int min = 0;
429
  int loc = INSN_LOCATOR (insn);
430
 
431
  /* When block_locators_locs was initialized, the pro- and epilogue
432
     insns didn't exist yet and can therefore not be found this way.
433
     But we know that they belong to the outer most block of the
434
     current function.
435
     Without this test, the prologue would be put inside the block of
436
     the first valid instruction in the function and when that first
437
     insn is part of an inlined function then the low_pc of that
438
     inlined function is messed up.  Likewise for the epilogue and
439
     the last valid instruction.  */
440
  if (loc == prologue_locator || loc == epilogue_locator)
441
    return DECL_INITIAL (cfun->decl);
442
 
443
  if (!max || !loc)
444
    return NULL;
445
  while (1)
446
    {
447
      int pos = (min + max) / 2;
448
      int tmp = VEC_index (int, block_locators_locs, pos);
449
 
450
      if (tmp <= loc && min != pos)
451
        min = pos;
452
      else if (tmp > loc && max != pos)
453
        max = pos;
454
      else
455
        {
456
          min = pos;
457
          break;
458
        }
459
    }
460
  return VEC_index (tree, block_locators_blocks, min);
461
}
462
 
463
/* Return line number of the statement specified by the locator.  */
464
int
465
locator_line (int loc)
466
{
467
  int max = VEC_length (int, line_locators_locs);
468
  int min = 0;
469
 
470
  if (!max || !loc)
471
    return 0;
472
  while (1)
473
    {
474
      int pos = (min + max) / 2;
475
      int tmp = VEC_index (int, line_locators_locs, pos);
476
 
477
      if (tmp <= loc && min != pos)
478
        min = pos;
479
      else if (tmp > loc && max != pos)
480
        max = pos;
481
      else
482
        {
483
          min = pos;
484
          break;
485
        }
486
    }
487
  return VEC_index (int, line_locators_lines, min);
488
}
489
 
490
/* Return line number of the statement that produced this insn.  */
491
int
492
insn_line (rtx insn)
493
{
494
  return locator_line (INSN_LOCATOR (insn));
495
}
496
 
497
/* Return source file of the statement specified by LOC.  */
498
const char *
499
locator_file (int loc)
500
{
501
  int max = VEC_length (int, file_locators_locs);
502
  int min = 0;
503
 
504
  if (!max || !loc)
505
    return NULL;
506
  while (1)
507
    {
508
      int pos = (min + max) / 2;
509
      int tmp = VEC_index (int, file_locators_locs, pos);
510
 
511
      if (tmp <= loc && min != pos)
512
        min = pos;
513
      else if (tmp > loc && max != pos)
514
        max = pos;
515
      else
516
        {
517
          min = pos;
518
          break;
519
        }
520
    }
521
   return VARRAY_CHAR_PTR (file_locators_files, min);
522
}
523
 
524
/* Return source file of the statement that produced this insn.  */
525
const char *
526
insn_file (rtx insn)
527
{
528
  return locator_file (INSN_LOCATOR (insn));
529
}
530
 
531
/* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
532
   on the scope tree and the newly reordered instructions.  */
533
 
534
void
535
reemit_insn_block_notes (void)
536
{
537
  tree cur_block = DECL_INITIAL (cfun->decl);
538
  rtx insn, note;
539
 
540
  insn = get_insns ();
541
  if (!active_insn_p (insn))
542
    insn = next_active_insn (insn);
543
  for (; insn; insn = next_active_insn (insn))
544
    {
545
      tree this_block;
546
 
547
      /* Avoid putting scope notes between jump table and its label.  */
548
      if (JUMP_P (insn)
549
          && (GET_CODE (PATTERN (insn)) == ADDR_VEC
550
              || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
551
        continue;
552
 
553
      this_block = insn_scope (insn);
554
      /* For sequences compute scope resulting from merging all scopes
555
         of instructions nested inside.  */
556
      if (GET_CODE (PATTERN (insn)) == SEQUENCE)
557
        {
558
          int i;
559
          rtx body = PATTERN (insn);
560
 
561
          this_block = NULL;
562
          for (i = 0; i < XVECLEN (body, 0); i++)
563
            this_block = choose_inner_scope (this_block,
564
                                         insn_scope (XVECEXP (body, 0, i)));
565
        }
566
      if (! this_block)
567
        continue;
568
 
569
      if (this_block != cur_block)
570
        {
571
          change_scope (insn, cur_block, this_block);
572
          cur_block = this_block;
573
        }
574
    }
575
 
576
  /* change_scope emits before the insn, not after.  */
577
  note = emit_note (NOTE_INSN_DELETED);
578
  change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
579
  delete_insn (note);
580
 
581
  reorder_blocks ();
582
}
583
 
584
/* Given a reorder chain, rearrange the code to match.  */
585
 
586
static void
587
fixup_reorder_chain (void)
588
{
589
  basic_block bb, prev_bb;
590
  int index;
591
  rtx insn = NULL;
592
 
593
  if (cfg_layout_function_header)
594
    {
595
      set_first_insn (cfg_layout_function_header);
596
      insn = cfg_layout_function_header;
597
      while (NEXT_INSN (insn))
598
        insn = NEXT_INSN (insn);
599
    }
600
 
601
  /* First do the bulk reordering -- rechain the blocks without regard to
602
     the needed changes to jumps and labels.  */
603
 
604
  for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
605
       bb != 0;
606
       bb = bb->aux, index++)
607
    {
608
      if (bb->il.rtl->header)
609
        {
610
          if (insn)
611
            NEXT_INSN (insn) = bb->il.rtl->header;
612
          else
613
            set_first_insn (bb->il.rtl->header);
614
          PREV_INSN (bb->il.rtl->header) = insn;
615
          insn = bb->il.rtl->header;
616
          while (NEXT_INSN (insn))
617
            insn = NEXT_INSN (insn);
618
        }
619
      if (insn)
620
        NEXT_INSN (insn) = BB_HEAD (bb);
621
      else
622
        set_first_insn (BB_HEAD (bb));
623
      PREV_INSN (BB_HEAD (bb)) = insn;
624
      insn = BB_END (bb);
625
      if (bb->il.rtl->footer)
626
        {
627
          NEXT_INSN (insn) = bb->il.rtl->footer;
628
          PREV_INSN (bb->il.rtl->footer) = insn;
629
          while (NEXT_INSN (insn))
630
            insn = NEXT_INSN (insn);
631
        }
632
    }
633
 
634
  gcc_assert (index == n_basic_blocks);
635
 
636
  NEXT_INSN (insn) = cfg_layout_function_footer;
637
  if (cfg_layout_function_footer)
638
    PREV_INSN (cfg_layout_function_footer) = insn;
639
 
640
  while (NEXT_INSN (insn))
641
    insn = NEXT_INSN (insn);
642
 
643
  set_last_insn (insn);
644
#ifdef ENABLE_CHECKING
645
  verify_insn_chain ();
646
#endif
647
  delete_dead_jumptables ();
648
 
649
  /* Now add jumps and labels as needed to match the blocks new
650
     outgoing edges.  */
651
 
652
  for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = bb->aux)
653
    {
654
      edge e_fall, e_taken, e;
655
      rtx bb_end_insn;
656
      basic_block nb;
657
      edge_iterator ei;
658
 
659
      if (EDGE_COUNT (bb->succs) == 0)
660
        continue;
661
 
662
      /* Find the old fallthru edge, and another non-EH edge for
663
         a taken jump.  */
664
      e_taken = e_fall = NULL;
665
 
666
      FOR_EACH_EDGE (e, ei, bb->succs)
667
        if (e->flags & EDGE_FALLTHRU)
668
          e_fall = e;
669
        else if (! (e->flags & EDGE_EH))
670
          e_taken = e;
671
 
672
      bb_end_insn = BB_END (bb);
673
      if (JUMP_P (bb_end_insn))
674
        {
675
          if (any_condjump_p (bb_end_insn))
676
            {
677
              /* If the old fallthru is still next, nothing to do.  */
678
              if (bb->aux == e_fall->dest
679
                  || e_fall->dest == EXIT_BLOCK_PTR)
680
                continue;
681
 
682
              /* The degenerated case of conditional jump jumping to the next
683
                 instruction can happen on target having jumps with side
684
                 effects.
685
 
686
                 Create temporarily the duplicated edge representing branch.
687
                 It will get unidentified by force_nonfallthru_and_redirect
688
                 that would otherwise get confused by fallthru edge not pointing
689
                 to the next basic block.  */
690
              if (!e_taken)
691
                {
692
                  rtx note;
693
                  edge e_fake;
694
                  bool redirected;
695
 
696
                  e_fake = unchecked_make_edge (bb, e_fall->dest, 0);
697
 
698
                  redirected = redirect_jump (BB_END (bb),
699
                                              block_label (bb), 0);
700
                  gcc_assert (redirected);
701
 
702
                  note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
703
                  if (note)
704
                    {
705
                      int prob = INTVAL (XEXP (note, 0));
706
 
707
                      e_fake->probability = prob;
708
                      e_fake->count = e_fall->count * prob / REG_BR_PROB_BASE;
709
                      e_fall->probability -= e_fall->probability;
710
                      e_fall->count -= e_fake->count;
711
                      if (e_fall->probability < 0)
712
                        e_fall->probability = 0;
713
                      if (e_fall->count < 0)
714
                        e_fall->count = 0;
715
                    }
716
                }
717
              /* There is one special case: if *neither* block is next,
718
                 such as happens at the very end of a function, then we'll
719
                 need to add a new unconditional jump.  Choose the taken
720
                 edge based on known or assumed probability.  */
721
              else if (bb->aux != e_taken->dest)
722
                {
723
                  rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
724
 
725
                  if (note
726
                      && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
727
                      && invert_jump (bb_end_insn,
728
                                      (e_fall->dest == EXIT_BLOCK_PTR
729
                                       ? NULL_RTX
730
                                       : label_for_bb (e_fall->dest)), 0))
731
                    {
732
                      e_fall->flags &= ~EDGE_FALLTHRU;
733
#ifdef ENABLE_CHECKING
734
                      gcc_assert (could_fall_through
735
                                  (e_taken->src, e_taken->dest));
736
#endif
737
                      e_taken->flags |= EDGE_FALLTHRU;
738
                      update_br_prob_note (bb);
739
                      e = e_fall, e_fall = e_taken, e_taken = e;
740
                    }
741
                }
742
 
743
              /* If the "jumping" edge is a crossing edge, and the fall
744
                 through edge is non-crossing, leave things as they are.  */
745
              else if ((e_taken->flags & EDGE_CROSSING)
746
                       && !(e_fall->flags & EDGE_CROSSING))
747
                continue;
748
 
749
              /* Otherwise we can try to invert the jump.  This will
750
                 basically never fail, however, keep up the pretense.  */
751
              else if (invert_jump (bb_end_insn,
752
                                    (e_fall->dest == EXIT_BLOCK_PTR
753
                                     ? NULL_RTX
754
                                     : label_for_bb (e_fall->dest)), 0))
755
                {
756
                  e_fall->flags &= ~EDGE_FALLTHRU;
757
#ifdef ENABLE_CHECKING
758
                  gcc_assert (could_fall_through
759
                              (e_taken->src, e_taken->dest));
760
#endif
761
                  e_taken->flags |= EDGE_FALLTHRU;
762
                  update_br_prob_note (bb);
763
                  continue;
764
                }
765
            }
766
          else
767
            {
768
              /* Otherwise we have some return, switch or computed
769
                 jump.  In the 99% case, there should not have been a
770
                 fallthru edge.  */
771
              gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
772
              continue;
773
            }
774
        }
775
      else
776
        {
777
          /* No fallthru implies a noreturn function with EH edges, or
778
             something similarly bizarre.  In any case, we don't need to
779
             do anything.  */
780
          if (! e_fall)
781
            continue;
782
 
783
          /* If the fallthru block is still next, nothing to do.  */
784
          if (bb->aux == e_fall->dest)
785
            continue;
786
 
787
          /* A fallthru to exit block.  */
788
          if (e_fall->dest == EXIT_BLOCK_PTR)
789
            continue;
790
        }
791
 
792
      /* We got here if we need to add a new jump insn.  */
793
      nb = force_nonfallthru (e_fall);
794
      if (nb)
795
        {
796
          nb->il.rtl->visited = 1;
797
          nb->aux = bb->aux;
798
          bb->aux = nb;
799
          /* Don't process this new block.  */
800
          bb = nb;
801
 
802
          /* Make sure new bb is tagged for correct section (same as
803
             fall-thru source, since you cannot fall-throu across
804
             section boundaries).  */
805
          BB_COPY_PARTITION (e_fall->src, single_pred (bb));
806
          if (flag_reorder_blocks_and_partition
807
              && targetm.have_named_sections
808
              && JUMP_P (BB_END (bb))
809
              && !any_condjump_p (BB_END (bb))
810
              && (EDGE_SUCC (bb, 0)->flags & EDGE_CROSSING))
811
            REG_NOTES (BB_END (bb)) = gen_rtx_EXPR_LIST
812
              (REG_CROSSING_JUMP, NULL_RTX, REG_NOTES (BB_END (bb)));
813
        }
814
    }
815
 
816
  /* Put basic_block_info in the new order.  */
817
 
818
  if (dump_file)
819
    {
820
      fprintf (dump_file, "Reordered sequence:\n");
821
      for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
822
           bb;
823
           bb = bb->aux, index++)
824
        {
825
          fprintf (dump_file, " %i ", index);
826
          if (get_bb_original (bb))
827
            fprintf (dump_file, "duplicate of %i ",
828
                     get_bb_original (bb)->index);
829
          else if (forwarder_block_p (bb)
830
                   && !LABEL_P (BB_HEAD (bb)))
831
            fprintf (dump_file, "compensation ");
832
          else
833
            fprintf (dump_file, "bb %i ", bb->index);
834
          fprintf (dump_file, " [%i]\n", bb->frequency);
835
        }
836
    }
837
 
838
  prev_bb = ENTRY_BLOCK_PTR;
839
  bb = ENTRY_BLOCK_PTR->next_bb;
840
  index = 0;
841
 
842
  for (; bb; prev_bb = bb, bb = bb->aux, index ++)
843
    {
844
      bb->index = index;
845
      BASIC_BLOCK (index) = bb;
846
 
847
      bb->prev_bb = prev_bb;
848
      prev_bb->next_bb = bb;
849
    }
850
  prev_bb->next_bb = EXIT_BLOCK_PTR;
851
  EXIT_BLOCK_PTR->prev_bb = prev_bb;
852
 
853
  /* Annoying special case - jump around dead jumptables left in the code.  */
854
  FOR_EACH_BB (bb)
855
    {
856
      edge e;
857
      edge_iterator ei;
858
 
859
      FOR_EACH_EDGE (e, ei, bb->succs)
860
        if (e->flags & EDGE_FALLTHRU)
861
          break;
862
 
863
      if (e && !can_fallthru (e->src, e->dest))
864
        force_nonfallthru (e);
865
    }
866
}
867
 
868
/* Perform sanity checks on the insn chain.
869
   1. Check that next/prev pointers are consistent in both the forward and
870
      reverse direction.
871
   2. Count insns in chain, going both directions, and check if equal.
872
   3. Check that get_last_insn () returns the actual end of chain.  */
873
 
874
void
875
verify_insn_chain (void)
876
{
877
  rtx x, prevx, nextx;
878
  int insn_cnt1, insn_cnt2;
879
 
880
  for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
881
       x != 0;
882
       prevx = x, insn_cnt1++, x = NEXT_INSN (x))
883
    gcc_assert (PREV_INSN (x) == prevx);
884
 
885
  gcc_assert (prevx == get_last_insn ());
886
 
887
  for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
888
       x != 0;
889
       nextx = x, insn_cnt2++, x = PREV_INSN (x))
890
    gcc_assert (NEXT_INSN (x) == nextx);
891
 
892
  gcc_assert (insn_cnt1 == insn_cnt2);
893
}
894
 
895
/* If we have assembler epilogues, the block falling through to exit must
896
   be the last one in the reordered chain when we reach final.  Ensure
897
   that this condition is met.  */
898
static void
899
fixup_fallthru_exit_predecessor (void)
900
{
901
  edge e;
902
  edge_iterator ei;
903
  basic_block bb = NULL;
904
 
905
  /* This transformation is not valid before reload, because we might
906
     separate a call from the instruction that copies the return
907
     value.  */
908
  gcc_assert (reload_completed);
909
 
910
  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
911
    if (e->flags & EDGE_FALLTHRU)
912
      bb = e->src;
913
 
914
  if (bb && bb->aux)
915
    {
916
      basic_block c = ENTRY_BLOCK_PTR->next_bb;
917
 
918
      /* If the very first block is the one with the fall-through exit
919
         edge, we have to split that block.  */
920
      if (c == bb)
921
        {
922
          bb = split_block (bb, NULL)->dest;
923
          bb->aux = c->aux;
924
          c->aux = bb;
925
          bb->il.rtl->footer = c->il.rtl->footer;
926
          c->il.rtl->footer = NULL;
927
        }
928
 
929
      while (c->aux != bb)
930
        c = c->aux;
931
 
932
      c->aux = bb->aux;
933
      while (c->aux)
934
        c = c->aux;
935
 
936
      c->aux = bb;
937
      bb->aux = NULL;
938
    }
939
}
940
 
941
/* Return true in case it is possible to duplicate the basic block BB.  */
942
 
943
/* We do not want to declare the function in a header file, since it should
944
   only be used through the cfghooks interface, and we do not want to move
945
   it to cfgrtl.c since it would require also moving quite a lot of related
946
   code.  */
947
extern bool cfg_layout_can_duplicate_bb_p (basic_block);
948
 
949
bool
950
cfg_layout_can_duplicate_bb_p (basic_block bb)
951
{
952
  /* Do not attempt to duplicate tablejumps, as we need to unshare
953
     the dispatch table.  This is difficult to do, as the instructions
954
     computing jump destination may be hoisted outside the basic block.  */
955
  if (tablejump_p (BB_END (bb), NULL, NULL))
956
    return false;
957
 
958
  /* Do not duplicate blocks containing insns that can't be copied.  */
959
  if (targetm.cannot_copy_insn_p)
960
    {
961
      rtx insn = BB_HEAD (bb);
962
      while (1)
963
        {
964
          if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
965
            return false;
966
          if (insn == BB_END (bb))
967
            break;
968
          insn = NEXT_INSN (insn);
969
        }
970
    }
971
 
972
  return true;
973
}
974
 
975
rtx
976
duplicate_insn_chain (rtx from, rtx to)
977
{
978
  rtx insn, last;
979
 
980
  /* Avoid updating of boundaries of previous basic block.  The
981
     note will get removed from insn stream in fixup.  */
982
  last = emit_note (NOTE_INSN_DELETED);
983
 
984
  /* Create copy at the end of INSN chain.  The chain will
985
     be reordered later.  */
986
  for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
987
    {
988
      switch (GET_CODE (insn))
989
        {
990
        case INSN:
991
        case CALL_INSN:
992
        case JUMP_INSN:
993
          /* Avoid copying of dispatch tables.  We never duplicate
994
             tablejumps, so this can hit only in case the table got
995
             moved far from original jump.  */
996
          if (GET_CODE (PATTERN (insn)) == ADDR_VEC
997
              || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
998
            break;
999
          emit_copy_of_insn_after (insn, get_last_insn ());
1000
          break;
1001
 
1002
        case CODE_LABEL:
1003
          break;
1004
 
1005
        case BARRIER:
1006
          emit_barrier ();
1007
          break;
1008
 
1009
        case NOTE:
1010
          switch (NOTE_LINE_NUMBER (insn))
1011
            {
1012
              /* In case prologue is empty and function contain label
1013
                 in first BB, we may want to copy the block.  */
1014
            case NOTE_INSN_PROLOGUE_END:
1015
 
1016
            case NOTE_INSN_LOOP_BEG:
1017
            case NOTE_INSN_LOOP_END:
1018
              /* Strip down the loop notes - we don't really want to keep
1019
                 them consistent in loop copies.  */
1020
            case NOTE_INSN_DELETED:
1021
            case NOTE_INSN_DELETED_LABEL:
1022
              /* No problem to strip these.  */
1023
            case NOTE_INSN_EPILOGUE_BEG:
1024
            case NOTE_INSN_FUNCTION_END:
1025
              /* Debug code expect these notes to exist just once.
1026
                 Keep them in the master copy.
1027
                 ??? It probably makes more sense to duplicate them for each
1028
                 epilogue copy.  */
1029
            case NOTE_INSN_FUNCTION_BEG:
1030
              /* There is always just single entry to function.  */
1031
            case NOTE_INSN_BASIC_BLOCK:
1032
              break;
1033
 
1034
            case NOTE_INSN_REPEATED_LINE_NUMBER:
1035
            case NOTE_INSN_SWITCH_TEXT_SECTIONS:
1036
              emit_note_copy (insn);
1037
              break;
1038
 
1039
            default:
1040
              /* All other notes should have already been eliminated.
1041
               */
1042
              gcc_assert (NOTE_LINE_NUMBER (insn) >= 0);
1043
 
1044
              /* It is possible that no_line_number is set and the note
1045
                 won't be emitted.  */
1046
              emit_note_copy (insn);
1047
            }
1048
          break;
1049
        default:
1050
          gcc_unreachable ();
1051
        }
1052
    }
1053
  insn = NEXT_INSN (last);
1054
  delete_insn (last);
1055
  return insn;
1056
}
1057
/* Create a duplicate of the basic block BB.  */
1058
 
1059
/* We do not want to declare the function in a header file, since it should
1060
   only be used through the cfghooks interface, and we do not want to move
1061
   it to cfgrtl.c since it would require also moving quite a lot of related
1062
   code.  */
1063
extern basic_block cfg_layout_duplicate_bb (basic_block);
1064
 
1065
basic_block
1066
cfg_layout_duplicate_bb (basic_block bb)
1067
{
1068
  rtx insn;
1069
  basic_block new_bb;
1070
 
1071
  insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
1072
  new_bb = create_basic_block (insn,
1073
                               insn ? get_last_insn () : NULL,
1074
                               EXIT_BLOCK_PTR->prev_bb);
1075
 
1076
  BB_COPY_PARTITION (new_bb, bb);
1077
  if (bb->il.rtl->header)
1078
    {
1079
      insn = bb->il.rtl->header;
1080
      while (NEXT_INSN (insn))
1081
        insn = NEXT_INSN (insn);
1082
      insn = duplicate_insn_chain (bb->il.rtl->header, insn);
1083
      if (insn)
1084
        new_bb->il.rtl->header = unlink_insn_chain (insn, get_last_insn ());
1085
    }
1086
 
1087
  if (bb->il.rtl->footer)
1088
    {
1089
      insn = bb->il.rtl->footer;
1090
      while (NEXT_INSN (insn))
1091
        insn = NEXT_INSN (insn);
1092
      insn = duplicate_insn_chain (bb->il.rtl->footer, insn);
1093
      if (insn)
1094
        new_bb->il.rtl->footer = unlink_insn_chain (insn, get_last_insn ());
1095
    }
1096
 
1097
  if (bb->il.rtl->global_live_at_start)
1098
    {
1099
      new_bb->il.rtl->global_live_at_start = ALLOC_REG_SET (&reg_obstack);
1100
      new_bb->il.rtl->global_live_at_end = ALLOC_REG_SET (&reg_obstack);
1101
      COPY_REG_SET (new_bb->il.rtl->global_live_at_start,
1102
                    bb->il.rtl->global_live_at_start);
1103
      COPY_REG_SET (new_bb->il.rtl->global_live_at_end,
1104
                    bb->il.rtl->global_live_at_end);
1105
    }
1106
 
1107
  return new_bb;
1108
}
1109
 
1110
/* Main entry point to this module - initialize the datastructures for
1111
   CFG layout changes.  It keeps LOOPS up-to-date if not null.
1112
 
1113
   FLAGS is a set of additional flags to pass to cleanup_cfg().  It should
1114
   include CLEANUP_UPDATE_LIFE if liveness information must be kept up
1115
   to date.  */
1116
 
1117
void
1118
cfg_layout_initialize (unsigned int flags)
1119
{
1120
  initialize_original_copy_tables ();
1121
 
1122
  cfg_layout_rtl_register_cfg_hooks ();
1123
 
1124
  record_effective_endpoints ();
1125
 
1126
  cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
1127
}
1128
 
1129
/* Splits superblocks.  */
1130
void
1131
break_superblocks (void)
1132
{
1133
  sbitmap superblocks;
1134
  bool need = false;
1135
  basic_block bb;
1136
 
1137
  superblocks = sbitmap_alloc (last_basic_block);
1138
  sbitmap_zero (superblocks);
1139
 
1140
  FOR_EACH_BB (bb)
1141
    if (bb->flags & BB_SUPERBLOCK)
1142
      {
1143
        bb->flags &= ~BB_SUPERBLOCK;
1144
        SET_BIT (superblocks, bb->index);
1145
        need = true;
1146
      }
1147
 
1148
  if (need)
1149
    {
1150
      rebuild_jump_labels (get_insns ());
1151
      find_many_sub_basic_blocks (superblocks);
1152
    }
1153
 
1154
  free (superblocks);
1155
}
1156
 
1157
/* Finalize the changes: reorder insn list according to the sequence specified
1158
   by aux pointers, enter compensation code, rebuild scope forest.  */
1159
 
1160
void
1161
cfg_layout_finalize (void)
1162
{
1163
  basic_block bb;
1164
 
1165
#ifdef ENABLE_CHECKING
1166
  verify_flow_info ();
1167
#endif
1168
  rtl_register_cfg_hooks ();
1169
  if (reload_completed
1170
#ifdef HAVE_epilogue
1171
      && !HAVE_epilogue
1172
#endif
1173
      )
1174
    fixup_fallthru_exit_predecessor ();
1175
  fixup_reorder_chain ();
1176
 
1177
#ifdef ENABLE_CHECKING
1178
  verify_insn_chain ();
1179
#endif
1180
  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1181
  {
1182
    bb->il.rtl->header = bb->il.rtl->footer = NULL;
1183
    bb->aux = NULL;
1184
    bb->il.rtl->visited = 0;
1185
  }
1186
 
1187
  break_superblocks ();
1188
 
1189
#ifdef ENABLE_CHECKING
1190
  verify_flow_info ();
1191
#endif
1192
 
1193
  free_original_copy_tables ();
1194
}
1195
 
1196
/* Checks whether all N blocks in BBS array can be copied.  */
1197
bool
1198
can_copy_bbs_p (basic_block *bbs, unsigned n)
1199
{
1200
  unsigned i;
1201
  edge e;
1202
  int ret = true;
1203
 
1204
  for (i = 0; i < n; i++)
1205
    bbs[i]->flags |= BB_DUPLICATED;
1206
 
1207
  for (i = 0; i < n; i++)
1208
    {
1209
      /* In case we should redirect abnormal edge during duplication, fail.  */
1210
      edge_iterator ei;
1211
      FOR_EACH_EDGE (e, ei, bbs[i]->succs)
1212
        if ((e->flags & EDGE_ABNORMAL)
1213
            && (e->dest->flags & BB_DUPLICATED))
1214
          {
1215
            ret = false;
1216
            goto end;
1217
          }
1218
 
1219
      if (!can_duplicate_block_p (bbs[i]))
1220
        {
1221
          ret = false;
1222
          break;
1223
        }
1224
    }
1225
 
1226
end:
1227
  for (i = 0; i < n; i++)
1228
    bbs[i]->flags &= ~BB_DUPLICATED;
1229
 
1230
  return ret;
1231
}
1232
 
1233
/* Duplicates N basic blocks stored in array BBS.  Newly created basic blocks
1234
   are placed into array NEW_BBS in the same order.  Edges from basic blocks
1235
   in BBS are also duplicated and copies of those of them
1236
   that lead into BBS are redirected to appropriate newly created block.  The
1237
   function assigns bbs into loops (copy of basic block bb is assigned to
1238
   bb->loop_father->copy loop, so this must be set up correctly in advance)
1239
   and updates dominators locally (LOOPS structure that contains the information
1240
   about dominators is passed to enable this).
1241
 
1242
   BASE is the superloop to that basic block belongs; if its header or latch
1243
   is copied, we do not set the new blocks as header or latch.
1244
 
1245
   Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES,
1246
   also in the same order.
1247
 
1248
   Newly created basic blocks are put after the basic block AFTER in the
1249
   instruction stream, and the order of the blocks in BBS array is preserved.  */
1250
 
1251
void
1252
copy_bbs (basic_block *bbs, unsigned n, basic_block *new_bbs,
1253
          edge *edges, unsigned num_edges, edge *new_edges,
1254
          struct loop *base, basic_block after)
1255
{
1256
  unsigned i, j;
1257
  basic_block bb, new_bb, dom_bb;
1258
  edge e;
1259
 
1260
  /* Duplicate bbs, update dominators, assign bbs to loops.  */
1261
  for (i = 0; i < n; i++)
1262
    {
1263
      /* Duplicate.  */
1264
      bb = bbs[i];
1265
      new_bb = new_bbs[i] = duplicate_block (bb, NULL, after);
1266
      after = new_bb;
1267
      bb->flags |= BB_DUPLICATED;
1268
      /* Add to loop.  */
1269
      add_bb_to_loop (new_bb, bb->loop_father->copy);
1270
      /* Possibly set header.  */
1271
      if (bb->loop_father->header == bb && bb->loop_father != base)
1272
        new_bb->loop_father->header = new_bb;
1273
      /* Or latch.  */
1274
      if (bb->loop_father->latch == bb && bb->loop_father != base)
1275
        new_bb->loop_father->latch = new_bb;
1276
    }
1277
 
1278
  /* Set dominators.  */
1279
  for (i = 0; i < n; i++)
1280
    {
1281
      bb = bbs[i];
1282
      new_bb = new_bbs[i];
1283
 
1284
      dom_bb = get_immediate_dominator (CDI_DOMINATORS, bb);
1285
      if (dom_bb->flags & BB_DUPLICATED)
1286
        {
1287
          dom_bb = get_bb_copy (dom_bb);
1288
          set_immediate_dominator (CDI_DOMINATORS, new_bb, dom_bb);
1289
        }
1290
    }
1291
 
1292
  /* Redirect edges.  */
1293
  for (j = 0; j < num_edges; j++)
1294
    new_edges[j] = NULL;
1295
  for (i = 0; i < n; i++)
1296
    {
1297
      edge_iterator ei;
1298
      new_bb = new_bbs[i];
1299
      bb = bbs[i];
1300
 
1301
      FOR_EACH_EDGE (e, ei, new_bb->succs)
1302
        {
1303
          for (j = 0; j < num_edges; j++)
1304
            if (edges[j] && edges[j]->src == bb && edges[j]->dest == e->dest)
1305
              new_edges[j] = e;
1306
 
1307
          if (!(e->dest->flags & BB_DUPLICATED))
1308
            continue;
1309
          redirect_edge_and_branch_force (e, get_bb_copy (e->dest));
1310
        }
1311
    }
1312
 
1313
  /* Clear information about duplicates.  */
1314
  for (i = 0; i < n; i++)
1315
    bbs[i]->flags &= ~BB_DUPLICATED;
1316
}
1317
 
1318
#include "gt-cfglayout.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.