OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-old/] [gcc-4.2.2/] [gcc/] [cfglayout.c] - Blame information for rev 817

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 38 julius
/* Basic block reordering routines for the GNU compiler.
2
   Copyright (C) 2000, 2001, 2003, 2004, 2005, 2007 Free Software Foundation, Inc.
3
 
4
This file is part of GCC.
5
 
6
GCC is free software; you can redistribute it and/or modify it under
7
the terms of the GNU General Public License as published by the Free
8
Software Foundation; either version 3, or (at your option) any later
9
version.
10
 
11
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12
WARRANTY; without even the implied warranty of MERCHANTABILITY or
13
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14
for more details.
15
 
16
You should have received a copy of the GNU General Public License
17
along with GCC; see the file COPYING3.  If not see
18
<http://www.gnu.org/licenses/>.  */
19
 
20
#include "config.h"
21
#include "system.h"
22
#include "coretypes.h"
23
#include "tm.h"
24
#include "tree.h"
25
#include "rtl.h"
26
#include "hard-reg-set.h"
27
#include "obstack.h"
28
#include "basic-block.h"
29
#include "insn-config.h"
30
#include "output.h"
31
#include "function.h"
32
#include "cfglayout.h"
33
#include "cfgloop.h"
34
#include "target.h"
35
#include "ggc.h"
36
#include "alloc-pool.h"
37
#include "flags.h"
38
#include "tree-pass.h"
39
#include "vecprim.h"
40
 
41
/* Holds the interesting trailing notes for the function.  */
42
rtx cfg_layout_function_footer, cfg_layout_function_header;
43
 
44
static rtx skip_insns_after_block (basic_block);
45
static void record_effective_endpoints (void);
46
static rtx label_for_bb (basic_block);
47
static void fixup_reorder_chain (void);
48
 
49
static void set_block_levels (tree, int);
50
static void change_scope (rtx, tree, tree);
51
 
52
void verify_insn_chain (void);
53
static void fixup_fallthru_exit_predecessor (void);
54
static tree insn_scope (rtx);
55
 
56
rtx
57
unlink_insn_chain (rtx first, rtx last)
58
{
59
  rtx prevfirst = PREV_INSN (first);
60
  rtx nextlast = NEXT_INSN (last);
61
 
62
  PREV_INSN (first) = NULL;
63
  NEXT_INSN (last) = NULL;
64
  if (prevfirst)
65
    NEXT_INSN (prevfirst) = nextlast;
66
  if (nextlast)
67
    PREV_INSN (nextlast) = prevfirst;
68
  else
69
    set_last_insn (prevfirst);
70
  if (!prevfirst)
71
    set_first_insn (nextlast);
72
  return first;
73
}
74
 
75
/* Skip over inter-block insns occurring after BB which are typically
76
   associated with BB (e.g., barriers). If there are any such insns,
77
   we return the last one. Otherwise, we return the end of BB.  */
78
 
79
static rtx
80
skip_insns_after_block (basic_block bb)
81
{
82
  rtx insn, last_insn, next_head, prev;
83
 
84
  next_head = NULL_RTX;
85
  if (bb->next_bb != EXIT_BLOCK_PTR)
86
    next_head = BB_HEAD (bb->next_bb);
87
 
88
  for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
89
    {
90
      if (insn == next_head)
91
        break;
92
 
93
      switch (GET_CODE (insn))
94
        {
95
        case BARRIER:
96
          last_insn = insn;
97
          continue;
98
 
99
        case NOTE:
100
          switch (NOTE_LINE_NUMBER (insn))
101
            {
102
            case NOTE_INSN_BLOCK_END:
103
              last_insn = insn;
104
              continue;
105
            case NOTE_INSN_DELETED:
106
            case NOTE_INSN_DELETED_LABEL:
107
              continue;
108
 
109
            default:
110
              continue;
111
              break;
112
            }
113
          break;
114
 
115
        case CODE_LABEL:
116
          if (NEXT_INSN (insn)
117
              && JUMP_P (NEXT_INSN (insn))
118
              && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
119
                  || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
120
            {
121
              insn = NEXT_INSN (insn);
122
              last_insn = insn;
123
              continue;
124
            }
125
          break;
126
 
127
        default:
128
          break;
129
        }
130
 
131
      break;
132
    }
133
 
134
  /* It is possible to hit contradictory sequence.  For instance:
135
 
136
     jump_insn
137
     NOTE_INSN_BLOCK_BEG
138
     barrier
139
 
140
     Where barrier belongs to jump_insn, but the note does not.  This can be
141
     created by removing the basic block originally following
142
     NOTE_INSN_BLOCK_BEG.  In such case reorder the notes.  */
143
 
144
  for (insn = last_insn; insn != BB_END (bb); insn = prev)
145
    {
146
      prev = PREV_INSN (insn);
147
      if (NOTE_P (insn))
148
        switch (NOTE_LINE_NUMBER (insn))
149
          {
150
          case NOTE_INSN_BLOCK_END:
151
          case NOTE_INSN_DELETED:
152
          case NOTE_INSN_DELETED_LABEL:
153
            continue;
154
          default:
155
            reorder_insns (insn, insn, last_insn);
156
          }
157
    }
158
 
159
  return last_insn;
160
}
161
 
162
/* Locate or create a label for a given basic block.  */
163
 
164
static rtx
165
label_for_bb (basic_block bb)
166
{
167
  rtx label = BB_HEAD (bb);
168
 
169
  if (!LABEL_P (label))
170
    {
171
      if (dump_file)
172
        fprintf (dump_file, "Emitting label for block %d\n", bb->index);
173
 
174
      label = block_label (bb);
175
    }
176
 
177
  return label;
178
}
179
 
180
/* Locate the effective beginning and end of the insn chain for each
181
   block, as defined by skip_insns_after_block above.  */
182
 
183
static void
184
record_effective_endpoints (void)
185
{
186
  rtx next_insn;
187
  basic_block bb;
188
  rtx insn;
189
 
190
  for (insn = get_insns ();
191
       insn
192
       && NOTE_P (insn)
193
       && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK;
194
       insn = NEXT_INSN (insn))
195
    continue;
196
  /* No basic blocks at all?  */
197
  gcc_assert (insn);
198
 
199
  if (PREV_INSN (insn))
200
    cfg_layout_function_header =
201
            unlink_insn_chain (get_insns (), PREV_INSN (insn));
202
  else
203
    cfg_layout_function_header = NULL_RTX;
204
 
205
  next_insn = get_insns ();
206
  FOR_EACH_BB (bb)
207
    {
208
      rtx end;
209
 
210
      if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
211
        bb->il.rtl->header = unlink_insn_chain (next_insn,
212
                                              PREV_INSN (BB_HEAD (bb)));
213
      end = skip_insns_after_block (bb);
214
      if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
215
        bb->il.rtl->footer = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
216
      next_insn = NEXT_INSN (BB_END (bb));
217
    }
218
 
219
  cfg_layout_function_footer = next_insn;
220
  if (cfg_layout_function_footer)
221
    cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
222
}
223
 
224
/* Data structures representing mapping of INSN_LOCATOR into scope blocks, line
225
   numbers and files.  In order to be GGC friendly we need to use separate
226
   varrays.  This also slightly improve the memory locality in binary search.
227
   The _locs array contains locators where the given property change.  The
228
   block_locators_blocks contains the scope block that is used for all insn
229
   locator greater than corresponding block_locators_locs value and smaller
230
   than the following one.  Similarly for the other properties.  */
231
static VEC(int,heap) *block_locators_locs;
232
static GTY(()) VEC(tree,gc) *block_locators_blocks;
233
static VEC(int,heap) *line_locators_locs;
234
static VEC(int,heap) *line_locators_lines;
235
static VEC(int,heap) *file_locators_locs;
236
static GTY(()) varray_type file_locators_files;
237
int prologue_locator;
238
int epilogue_locator;
239
 
240
/* During the RTL expansion the lexical blocks and line numbers are
241
   represented via INSN_NOTEs.  Replace them by representation using
242
   INSN_LOCATORs.  */
243
 
244
unsigned int
245
insn_locators_initialize (void)
246
{
247
  tree block = NULL;
248
  tree last_block = NULL;
249
  rtx insn, next;
250
  int loc = 0;
251
  int line_number = 0, last_line_number = 0;
252
  const char *file_name = NULL, *last_file_name = NULL;
253
 
254
  prologue_locator = epilogue_locator = 0;
255
 
256
  block_locators_locs = VEC_alloc (int, heap, 32);
257
  block_locators_blocks = VEC_alloc (tree, gc, 32);
258
  line_locators_locs = VEC_alloc (int, heap, 32);
259
  line_locators_lines = VEC_alloc (int, heap, 32);
260
  file_locators_locs = VEC_alloc (int, heap, 32);
261
  VARRAY_CHAR_PTR_INIT (file_locators_files, 32, "file_locators_files");
262
 
263
  for (insn = get_insns (); insn; insn = next)
264
    {
265
      int active = 0;
266
 
267
      next = NEXT_INSN (insn);
268
 
269
      if (NOTE_P (insn))
270
        {
271
          gcc_assert (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_BEG
272
                      && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END);
273
          if (NOTE_LINE_NUMBER (insn) > 0)
274
            {
275
              expanded_location xloc;
276
              NOTE_EXPANDED_LOCATION (xloc, insn);
277
              line_number = xloc.line;
278
              file_name = xloc.file;
279
            }
280
        }
281
      else
282
        active = (active_insn_p (insn)
283
                  && GET_CODE (PATTERN (insn)) != ADDR_VEC
284
                  && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
285
 
286
      check_block_change (insn, &block);
287
 
288
      if (active
289
          || !next
290
          || (!prologue_locator && file_name))
291
        {
292
          if (last_block != block)
293
            {
294
              loc++;
295
              VEC_safe_push (int, heap, block_locators_locs, loc);
296
              VEC_safe_push (tree, gc, block_locators_blocks, block);
297
              last_block = block;
298
            }
299
          if (last_line_number != line_number)
300
            {
301
              loc++;
302
              VEC_safe_push (int, heap, line_locators_locs, loc);
303
              VEC_safe_push (int, heap, line_locators_lines, line_number);
304
              last_line_number = line_number;
305
            }
306
          if (last_file_name != file_name)
307
            {
308
              loc++;
309
              VEC_safe_push (int, heap, file_locators_locs, loc);
310
              VARRAY_PUSH_CHAR_PTR (file_locators_files, (char *) file_name);
311
              last_file_name = file_name;
312
            }
313
          if (!prologue_locator && file_name)
314
            prologue_locator = loc;
315
          if (!next)
316
            epilogue_locator = loc;
317
          if (active)
318
            INSN_LOCATOR (insn) = loc;
319
        }
320
    }
321
 
322
  /* Tag the blocks with a depth number so that change_scope can find
323
     the common parent easily.  */
324
  set_block_levels (DECL_INITIAL (cfun->decl), 0);
325
 
326
  free_block_changes ();
327
  return 0;
328
}
329
 
330
struct tree_opt_pass pass_insn_locators_initialize =
331
{
332
  "locators",                           /* name */
333
  NULL,                                 /* gate */
334
  insn_locators_initialize,             /* execute */
335
  NULL,                                 /* sub */
336
  NULL,                                 /* next */
337
  0,                                    /* static_pass_number */
338
  0,                                    /* tv_id */
339
  0,                                    /* properties_required */
340
  0,                                    /* properties_provided */
341
  0,                                    /* properties_destroyed */
342
  0,                                    /* todo_flags_start */
343
  TODO_dump_func,                       /* todo_flags_finish */
344
 
345
};
346
 
347
 
348
/* For each lexical block, set BLOCK_NUMBER to the depth at which it is
349
   found in the block tree.  */
350
 
351
static void
352
set_block_levels (tree block, int level)
353
{
354
  while (block)
355
    {
356
      BLOCK_NUMBER (block) = level;
357
      set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
358
      block = BLOCK_CHAIN (block);
359
    }
360
}
361
 
362
/* Return sope resulting from combination of S1 and S2.  */
363
static tree
364
choose_inner_scope (tree s1, tree s2)
365
{
366
   if (!s1)
367
     return s2;
368
   if (!s2)
369
     return s1;
370
   if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
371
     return s1;
372
   return s2;
373
}
374
 
375
/* Emit lexical block notes needed to change scope from S1 to S2.  */
376
 
377
static void
378
change_scope (rtx orig_insn, tree s1, tree s2)
379
{
380
  rtx insn = orig_insn;
381
  tree com = NULL_TREE;
382
  tree ts1 = s1, ts2 = s2;
383
  tree s;
384
 
385
  while (ts1 != ts2)
386
    {
387
      gcc_assert (ts1 && ts2);
388
      if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
389
        ts1 = BLOCK_SUPERCONTEXT (ts1);
390
      else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
391
        ts2 = BLOCK_SUPERCONTEXT (ts2);
392
      else
393
        {
394
          ts1 = BLOCK_SUPERCONTEXT (ts1);
395
          ts2 = BLOCK_SUPERCONTEXT (ts2);
396
        }
397
    }
398
  com = ts1;
399
 
400
  /* Close scopes.  */
401
  s = s1;
402
  while (s != com)
403
    {
404
      rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
405
      NOTE_BLOCK (note) = s;
406
      s = BLOCK_SUPERCONTEXT (s);
407
    }
408
 
409
  /* Open scopes.  */
410
  s = s2;
411
  while (s != com)
412
    {
413
      insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
414
      NOTE_BLOCK (insn) = s;
415
      s = BLOCK_SUPERCONTEXT (s);
416
    }
417
}
418
 
419
/* Return lexical scope block insn belong to.  */
420
static tree
421
insn_scope (rtx insn)
422
{
423
  int max = VEC_length (int, block_locators_locs);
424
  int min = 0;
425
  int loc = INSN_LOCATOR (insn);
426
 
427
  /* When block_locators_locs was initialized, the pro- and epilogue
428
     insns didn't exist yet and can therefore not be found this way.
429
     But we know that they belong to the outer most block of the
430
     current function.
431
     Without this test, the prologue would be put inside the block of
432
     the first valid instruction in the function and when that first
433
     insn is part of an inlined function then the low_pc of that
434
     inlined function is messed up.  Likewise for the epilogue and
435
     the last valid instruction.  */
436
  if (loc == prologue_locator || loc == epilogue_locator)
437
    return DECL_INITIAL (cfun->decl);
438
 
439
  if (!max || !loc)
440
    return NULL;
441
  while (1)
442
    {
443
      int pos = (min + max) / 2;
444
      int tmp = VEC_index (int, block_locators_locs, pos);
445
 
446
      if (tmp <= loc && min != pos)
447
        min = pos;
448
      else if (tmp > loc && max != pos)
449
        max = pos;
450
      else
451
        {
452
          min = pos;
453
          break;
454
        }
455
    }
456
  return VEC_index (tree, block_locators_blocks, min);
457
}
458
 
459
/* Return line number of the statement specified by the locator.  */
460
int
461
locator_line (int loc)
462
{
463
  int max = VEC_length (int, line_locators_locs);
464
  int min = 0;
465
 
466
  if (!max || !loc)
467
    return 0;
468
  while (1)
469
    {
470
      int pos = (min + max) / 2;
471
      int tmp = VEC_index (int, line_locators_locs, pos);
472
 
473
      if (tmp <= loc && min != pos)
474
        min = pos;
475
      else if (tmp > loc && max != pos)
476
        max = pos;
477
      else
478
        {
479
          min = pos;
480
          break;
481
        }
482
    }
483
  return VEC_index (int, line_locators_lines, min);
484
}
485
 
486
/* Return line number of the statement that produced this insn.  */
487
int
488
insn_line (rtx insn)
489
{
490
  return locator_line (INSN_LOCATOR (insn));
491
}
492
 
493
/* Return source file of the statement specified by LOC.  */
494
const char *
495
locator_file (int loc)
496
{
497
  int max = VEC_length (int, file_locators_locs);
498
  int min = 0;
499
 
500
  if (!max || !loc)
501
    return NULL;
502
  while (1)
503
    {
504
      int pos = (min + max) / 2;
505
      int tmp = VEC_index (int, file_locators_locs, pos);
506
 
507
      if (tmp <= loc && min != pos)
508
        min = pos;
509
      else if (tmp > loc && max != pos)
510
        max = pos;
511
      else
512
        {
513
          min = pos;
514
          break;
515
        }
516
    }
517
   return VARRAY_CHAR_PTR (file_locators_files, min);
518
}
519
 
520
/* Return source file of the statement that produced this insn.  */
521
const char *
522
insn_file (rtx insn)
523
{
524
  return locator_file (INSN_LOCATOR (insn));
525
}
526
 
527
/* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
528
   on the scope tree and the newly reordered instructions.  */
529
 
530
void
531
reemit_insn_block_notes (void)
532
{
533
  tree cur_block = DECL_INITIAL (cfun->decl);
534
  rtx insn, note;
535
 
536
  insn = get_insns ();
537
  if (!active_insn_p (insn))
538
    insn = next_active_insn (insn);
539
  for (; insn; insn = next_active_insn (insn))
540
    {
541
      tree this_block;
542
 
543
      /* Avoid putting scope notes between jump table and its label.  */
544
      if (JUMP_P (insn)
545
          && (GET_CODE (PATTERN (insn)) == ADDR_VEC
546
              || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
547
        continue;
548
 
549
      this_block = insn_scope (insn);
550
      /* For sequences compute scope resulting from merging all scopes
551
         of instructions nested inside.  */
552
      if (GET_CODE (PATTERN (insn)) == SEQUENCE)
553
        {
554
          int i;
555
          rtx body = PATTERN (insn);
556
 
557
          this_block = NULL;
558
          for (i = 0; i < XVECLEN (body, 0); i++)
559
            this_block = choose_inner_scope (this_block,
560
                                         insn_scope (XVECEXP (body, 0, i)));
561
        }
562
      if (! this_block)
563
        continue;
564
 
565
      if (this_block != cur_block)
566
        {
567
          change_scope (insn, cur_block, this_block);
568
          cur_block = this_block;
569
        }
570
    }
571
 
572
  /* change_scope emits before the insn, not after.  */
573
  note = emit_note (NOTE_INSN_DELETED);
574
  change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
575
  delete_insn (note);
576
 
577
  reorder_blocks ();
578
}
579
 
580
/* Given a reorder chain, rearrange the code to match.  */
581
 
582
static void
583
fixup_reorder_chain (void)
584
{
585
  basic_block bb, prev_bb;
586
  int index;
587
  rtx insn = NULL;
588
 
589
  if (cfg_layout_function_header)
590
    {
591
      set_first_insn (cfg_layout_function_header);
592
      insn = cfg_layout_function_header;
593
      while (NEXT_INSN (insn))
594
        insn = NEXT_INSN (insn);
595
    }
596
 
597
  /* First do the bulk reordering -- rechain the blocks without regard to
598
     the needed changes to jumps and labels.  */
599
 
600
  for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS;
601
       bb != 0;
602
       bb = bb->aux, index++)
603
    {
604
      if (bb->il.rtl->header)
605
        {
606
          if (insn)
607
            NEXT_INSN (insn) = bb->il.rtl->header;
608
          else
609
            set_first_insn (bb->il.rtl->header);
610
          PREV_INSN (bb->il.rtl->header) = insn;
611
          insn = bb->il.rtl->header;
612
          while (NEXT_INSN (insn))
613
            insn = NEXT_INSN (insn);
614
        }
615
      if (insn)
616
        NEXT_INSN (insn) = BB_HEAD (bb);
617
      else
618
        set_first_insn (BB_HEAD (bb));
619
      PREV_INSN (BB_HEAD (bb)) = insn;
620
      insn = BB_END (bb);
621
      if (bb->il.rtl->footer)
622
        {
623
          NEXT_INSN (insn) = bb->il.rtl->footer;
624
          PREV_INSN (bb->il.rtl->footer) = insn;
625
          while (NEXT_INSN (insn))
626
            insn = NEXT_INSN (insn);
627
        }
628
    }
629
 
630
  gcc_assert (index == n_basic_blocks);
631
 
632
  NEXT_INSN (insn) = cfg_layout_function_footer;
633
  if (cfg_layout_function_footer)
634
    PREV_INSN (cfg_layout_function_footer) = insn;
635
 
636
  while (NEXT_INSN (insn))
637
    insn = NEXT_INSN (insn);
638
 
639
  set_last_insn (insn);
640
#ifdef ENABLE_CHECKING
641
  verify_insn_chain ();
642
#endif
643
  delete_dead_jumptables ();
644
 
645
  /* Now add jumps and labels as needed to match the blocks new
646
     outgoing edges.  */
647
 
648
  for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = bb->aux)
649
    {
650
      edge e_fall, e_taken, e;
651
      rtx bb_end_insn;
652
      basic_block nb;
653
      edge_iterator ei;
654
 
655
      if (EDGE_COUNT (bb->succs) == 0)
656
        continue;
657
 
658
      /* Find the old fallthru edge, and another non-EH edge for
659
         a taken jump.  */
660
      e_taken = e_fall = NULL;
661
 
662
      FOR_EACH_EDGE (e, ei, bb->succs)
663
        if (e->flags & EDGE_FALLTHRU)
664
          e_fall = e;
665
        else if (! (e->flags & EDGE_EH))
666
          e_taken = e;
667
 
668
      bb_end_insn = BB_END (bb);
669
      if (JUMP_P (bb_end_insn))
670
        {
671
          if (any_condjump_p (bb_end_insn))
672
            {
673
              /* If the old fallthru is still next, nothing to do.  */
674
              if (bb->aux == e_fall->dest
675
                  || e_fall->dest == EXIT_BLOCK_PTR)
676
                continue;
677
 
678
              /* The degenerated case of conditional jump jumping to the next
679
                 instruction can happen for jumps with side effects.  We need
680
                 to construct a forwarder block and this will be done just
681
                 fine by force_nonfallthru below.  */
682
              if (!e_taken)
683
                ;
684
 
685
              /* There is another special case: if *neither* block is next,
686
                 such as happens at the very end of a function, then we'll
687
                 need to add a new unconditional jump.  Choose the taken
688
                 edge based on known or assumed probability.  */
689
              else if (bb->aux != e_taken->dest)
690
                {
691
                  rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
692
 
693
                  if (note
694
                      && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
695
                      && invert_jump (bb_end_insn,
696
                                      (e_fall->dest == EXIT_BLOCK_PTR
697
                                       ? NULL_RTX
698
                                       : label_for_bb (e_fall->dest)), 0))
699
                    {
700
                      e_fall->flags &= ~EDGE_FALLTHRU;
701
#ifdef ENABLE_CHECKING
702
                      gcc_assert (could_fall_through
703
                                  (e_taken->src, e_taken->dest));
704
#endif
705
                      e_taken->flags |= EDGE_FALLTHRU;
706
                      update_br_prob_note (bb);
707
                      e = e_fall, e_fall = e_taken, e_taken = e;
708
                    }
709
                }
710
 
711
              /* If the "jumping" edge is a crossing edge, and the fall
712
                 through edge is non-crossing, leave things as they are.  */
713
              else if ((e_taken->flags & EDGE_CROSSING)
714
                       && !(e_fall->flags & EDGE_CROSSING))
715
                continue;
716
 
717
              /* Otherwise we can try to invert the jump.  This will
718
                 basically never fail, however, keep up the pretense.  */
719
              else if (invert_jump (bb_end_insn,
720
                                    (e_fall->dest == EXIT_BLOCK_PTR
721
                                     ? NULL_RTX
722
                                     : label_for_bb (e_fall->dest)), 0))
723
                {
724
                  e_fall->flags &= ~EDGE_FALLTHRU;
725
#ifdef ENABLE_CHECKING
726
                  gcc_assert (could_fall_through
727
                              (e_taken->src, e_taken->dest));
728
#endif
729
                  e_taken->flags |= EDGE_FALLTHRU;
730
                  update_br_prob_note (bb);
731
                  continue;
732
                }
733
            }
734
          else
735
            {
736
              /* Otherwise we have some return, switch or computed
737
                 jump.  In the 99% case, there should not have been a
738
                 fallthru edge.  */
739
              gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
740
              continue;
741
            }
742
        }
743
      else
744
        {
745
          /* No fallthru implies a noreturn function with EH edges, or
746
             something similarly bizarre.  In any case, we don't need to
747
             do anything.  */
748
          if (! e_fall)
749
            continue;
750
 
751
          /* If the fallthru block is still next, nothing to do.  */
752
          if (bb->aux == e_fall->dest)
753
            continue;
754
 
755
          /* A fallthru to exit block.  */
756
          if (e_fall->dest == EXIT_BLOCK_PTR)
757
            continue;
758
        }
759
 
760
      /* We got here if we need to add a new jump insn.  */
761
      nb = force_nonfallthru (e_fall);
762
      if (nb)
763
        {
764
          nb->il.rtl->visited = 1;
765
          nb->aux = bb->aux;
766
          bb->aux = nb;
767
          /* Don't process this new block.  */
768
          bb = nb;
769
 
770
          /* Make sure new bb is tagged for correct section (same as
771
             fall-thru source, since you cannot fall-throu across
772
             section boundaries).  */
773
          BB_COPY_PARTITION (e_fall->src, single_pred (bb));
774
          if (flag_reorder_blocks_and_partition
775
              && targetm.have_named_sections
776
              && JUMP_P (BB_END (bb))
777
              && !any_condjump_p (BB_END (bb))
778
              && (EDGE_SUCC (bb, 0)->flags & EDGE_CROSSING))
779
            REG_NOTES (BB_END (bb)) = gen_rtx_EXPR_LIST
780
              (REG_CROSSING_JUMP, NULL_RTX, REG_NOTES (BB_END (bb)));
781
        }
782
    }
783
 
784
  /* Put basic_block_info in the new order.  */
785
 
786
  if (dump_file)
787
    {
788
      fprintf (dump_file, "Reordered sequence:\n");
789
      for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS;
790
           bb;
791
           bb = bb->aux, index++)
792
        {
793
          fprintf (dump_file, " %i ", index);
794
          if (get_bb_original (bb))
795
            fprintf (dump_file, "duplicate of %i ",
796
                     get_bb_original (bb)->index);
797
          else if (forwarder_block_p (bb)
798
                   && !LABEL_P (BB_HEAD (bb)))
799
            fprintf (dump_file, "compensation ");
800
          else
801
            fprintf (dump_file, "bb %i ", bb->index);
802
          fprintf (dump_file, " [%i]\n", bb->frequency);
803
        }
804
    }
805
 
806
  prev_bb = ENTRY_BLOCK_PTR;
807
  bb = ENTRY_BLOCK_PTR->next_bb;
808
  index = NUM_FIXED_BLOCKS;
809
 
810
  for (; bb; prev_bb = bb, bb = bb->aux, index ++)
811
    {
812
      bb->index = index;
813
      SET_BASIC_BLOCK (index, bb);
814
 
815
      bb->prev_bb = prev_bb;
816
      prev_bb->next_bb = bb;
817
    }
818
  prev_bb->next_bb = EXIT_BLOCK_PTR;
819
  EXIT_BLOCK_PTR->prev_bb = prev_bb;
820
 
821
  /* Annoying special case - jump around dead jumptables left in the code.  */
822
  FOR_EACH_BB (bb)
823
    {
824
      edge e;
825
      edge_iterator ei;
826
 
827
      FOR_EACH_EDGE (e, ei, bb->succs)
828
        if (e->flags & EDGE_FALLTHRU)
829
          break;
830
 
831
      if (e && !can_fallthru (e->src, e->dest))
832
        force_nonfallthru (e);
833
    }
834
}
835
 
836
/* Perform sanity checks on the insn chain.
837
   1. Check that next/prev pointers are consistent in both the forward and
838
      reverse direction.
839
   2. Count insns in chain, going both directions, and check if equal.
840
   3. Check that get_last_insn () returns the actual end of chain.  */
841
 
842
void
843
verify_insn_chain (void)
844
{
845
  rtx x, prevx, nextx;
846
  int insn_cnt1, insn_cnt2;
847
 
848
  for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
849
       x != 0;
850
       prevx = x, insn_cnt1++, x = NEXT_INSN (x))
851
    gcc_assert (PREV_INSN (x) == prevx);
852
 
853
  gcc_assert (prevx == get_last_insn ());
854
 
855
  for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
856
       x != 0;
857
       nextx = x, insn_cnt2++, x = PREV_INSN (x))
858
    gcc_assert (NEXT_INSN (x) == nextx);
859
 
860
  gcc_assert (insn_cnt1 == insn_cnt2);
861
}
862
 
863
/* If we have assembler epilogues, the block falling through to exit must
864
   be the last one in the reordered chain when we reach final.  Ensure
865
   that this condition is met.  */
866
static void
867
fixup_fallthru_exit_predecessor (void)
868
{
869
  edge e;
870
  edge_iterator ei;
871
  basic_block bb = NULL;
872
 
873
  /* This transformation is not valid before reload, because we might
874
     separate a call from the instruction that copies the return
875
     value.  */
876
  gcc_assert (reload_completed);
877
 
878
  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
879
    if (e->flags & EDGE_FALLTHRU)
880
      bb = e->src;
881
 
882
  if (bb && bb->aux)
883
    {
884
      basic_block c = ENTRY_BLOCK_PTR->next_bb;
885
 
886
      /* If the very first block is the one with the fall-through exit
887
         edge, we have to split that block.  */
888
      if (c == bb)
889
        {
890
          bb = split_block (bb, NULL)->dest;
891
          bb->aux = c->aux;
892
          c->aux = bb;
893
          bb->il.rtl->footer = c->il.rtl->footer;
894
          c->il.rtl->footer = NULL;
895
        }
896
 
897
      while (c->aux != bb)
898
        c = c->aux;
899
 
900
      c->aux = bb->aux;
901
      while (c->aux)
902
        c = c->aux;
903
 
904
      c->aux = bb;
905
      bb->aux = NULL;
906
    }
907
}
908
 
909
/* Return true in case it is possible to duplicate the basic block BB.  */
910
 
911
/* We do not want to declare the function in a header file, since it should
912
   only be used through the cfghooks interface, and we do not want to move
913
   it to cfgrtl.c since it would require also moving quite a lot of related
914
   code.  */
915
extern bool cfg_layout_can_duplicate_bb_p (basic_block);
916
 
917
bool
918
cfg_layout_can_duplicate_bb_p (basic_block bb)
919
{
920
  /* Do not attempt to duplicate tablejumps, as we need to unshare
921
     the dispatch table.  This is difficult to do, as the instructions
922
     computing jump destination may be hoisted outside the basic block.  */
923
  if (tablejump_p (BB_END (bb), NULL, NULL))
924
    return false;
925
 
926
  /* Do not duplicate blocks containing insns that can't be copied.  */
927
  if (targetm.cannot_copy_insn_p)
928
    {
929
      rtx insn = BB_HEAD (bb);
930
      while (1)
931
        {
932
          if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
933
            return false;
934
          if (insn == BB_END (bb))
935
            break;
936
          insn = NEXT_INSN (insn);
937
        }
938
    }
939
 
940
  return true;
941
}
942
 
943
rtx
944
duplicate_insn_chain (rtx from, rtx to)
945
{
946
  rtx insn, last;
947
 
948
  /* Avoid updating of boundaries of previous basic block.  The
949
     note will get removed from insn stream in fixup.  */
950
  last = emit_note (NOTE_INSN_DELETED);
951
 
952
  /* Create copy at the end of INSN chain.  The chain will
953
     be reordered later.  */
954
  for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
955
    {
956
      switch (GET_CODE (insn))
957
        {
958
        case INSN:
959
        case CALL_INSN:
960
        case JUMP_INSN:
961
          /* Avoid copying of dispatch tables.  We never duplicate
962
             tablejumps, so this can hit only in case the table got
963
             moved far from original jump.  */
964
          if (GET_CODE (PATTERN (insn)) == ADDR_VEC
965
              || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
966
            break;
967
          emit_copy_of_insn_after (insn, get_last_insn ());
968
          break;
969
 
970
        case CODE_LABEL:
971
          break;
972
 
973
        case BARRIER:
974
          emit_barrier ();
975
          break;
976
 
977
        case NOTE:
978
          switch (NOTE_LINE_NUMBER (insn))
979
            {
980
              /* In case prologue is empty and function contain label
981
                 in first BB, we may want to copy the block.  */
982
            case NOTE_INSN_PROLOGUE_END:
983
 
984
            case NOTE_INSN_DELETED:
985
            case NOTE_INSN_DELETED_LABEL:
986
              /* No problem to strip these.  */
987
            case NOTE_INSN_EPILOGUE_BEG:
988
            case NOTE_INSN_FUNCTION_END:
989
              /* Debug code expect these notes to exist just once.
990
                 Keep them in the master copy.
991
                 ??? It probably makes more sense to duplicate them for each
992
                 epilogue copy.  */
993
            case NOTE_INSN_FUNCTION_BEG:
994
              /* There is always just single entry to function.  */
995
            case NOTE_INSN_BASIC_BLOCK:
996
              break;
997
 
998
            case NOTE_INSN_REPEATED_LINE_NUMBER:
999
            case NOTE_INSN_SWITCH_TEXT_SECTIONS:
1000
              emit_note_copy (insn);
1001
              break;
1002
 
1003
            default:
1004
              /* All other notes should have already been eliminated.
1005
               */
1006
              gcc_assert (NOTE_LINE_NUMBER (insn) >= 0);
1007
 
1008
              /* It is possible that no_line_number is set and the note
1009
                 won't be emitted.  */
1010
              emit_note_copy (insn);
1011
            }
1012
          break;
1013
        default:
1014
          gcc_unreachable ();
1015
        }
1016
    }
1017
  insn = NEXT_INSN (last);
1018
  delete_insn (last);
1019
  return insn;
1020
}
1021
/* Create a duplicate of the basic block BB.  */
1022
 
1023
/* We do not want to declare the function in a header file, since it should
1024
   only be used through the cfghooks interface, and we do not want to move
1025
   it to cfgrtl.c since it would require also moving quite a lot of related
1026
   code.  */
1027
extern basic_block cfg_layout_duplicate_bb (basic_block);
1028
 
1029
basic_block
1030
cfg_layout_duplicate_bb (basic_block bb)
1031
{
1032
  rtx insn;
1033
  basic_block new_bb;
1034
 
1035
  insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
1036
  new_bb = create_basic_block (insn,
1037
                               insn ? get_last_insn () : NULL,
1038
                               EXIT_BLOCK_PTR->prev_bb);
1039
 
1040
  BB_COPY_PARTITION (new_bb, bb);
1041
  if (bb->il.rtl->header)
1042
    {
1043
      insn = bb->il.rtl->header;
1044
      while (NEXT_INSN (insn))
1045
        insn = NEXT_INSN (insn);
1046
      insn = duplicate_insn_chain (bb->il.rtl->header, insn);
1047
      if (insn)
1048
        new_bb->il.rtl->header = unlink_insn_chain (insn, get_last_insn ());
1049
    }
1050
 
1051
  if (bb->il.rtl->footer)
1052
    {
1053
      insn = bb->il.rtl->footer;
1054
      while (NEXT_INSN (insn))
1055
        insn = NEXT_INSN (insn);
1056
      insn = duplicate_insn_chain (bb->il.rtl->footer, insn);
1057
      if (insn)
1058
        new_bb->il.rtl->footer = unlink_insn_chain (insn, get_last_insn ());
1059
    }
1060
 
1061
  if (bb->il.rtl->global_live_at_start)
1062
    {
1063
      new_bb->il.rtl->global_live_at_start = ALLOC_REG_SET (&reg_obstack);
1064
      new_bb->il.rtl->global_live_at_end = ALLOC_REG_SET (&reg_obstack);
1065
      COPY_REG_SET (new_bb->il.rtl->global_live_at_start,
1066
                    bb->il.rtl->global_live_at_start);
1067
      COPY_REG_SET (new_bb->il.rtl->global_live_at_end,
1068
                    bb->il.rtl->global_live_at_end);
1069
    }
1070
 
1071
  return new_bb;
1072
}
1073
 
1074
/* Main entry point to this module - initialize the datastructures for
1075
   CFG layout changes.  It keeps LOOPS up-to-date if not null.
1076
 
1077
   FLAGS is a set of additional flags to pass to cleanup_cfg().  It should
1078
   include CLEANUP_UPDATE_LIFE if liveness information must be kept up
1079
   to date.  */
1080
 
1081
void
1082
cfg_layout_initialize (unsigned int flags)
1083
{
1084
  initialize_original_copy_tables ();
1085
 
1086
  cfg_layout_rtl_register_cfg_hooks ();
1087
 
1088
  record_effective_endpoints ();
1089
 
1090
  cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
1091
}
1092
 
1093
/* Splits superblocks.  */
1094
void
1095
break_superblocks (void)
1096
{
1097
  sbitmap superblocks;
1098
  bool need = false;
1099
  basic_block bb;
1100
 
1101
  superblocks = sbitmap_alloc (last_basic_block);
1102
  sbitmap_zero (superblocks);
1103
 
1104
  FOR_EACH_BB (bb)
1105
    if (bb->flags & BB_SUPERBLOCK)
1106
      {
1107
        bb->flags &= ~BB_SUPERBLOCK;
1108
        SET_BIT (superblocks, bb->index);
1109
        need = true;
1110
      }
1111
 
1112
  if (need)
1113
    {
1114
      rebuild_jump_labels (get_insns ());
1115
      find_many_sub_basic_blocks (superblocks);
1116
    }
1117
 
1118
  free (superblocks);
1119
}
1120
 
1121
/* Finalize the changes: reorder insn list according to the sequence specified
1122
   by aux pointers, enter compensation code, rebuild scope forest.  */
1123
 
1124
void
1125
cfg_layout_finalize (void)
1126
{
1127
  basic_block bb;
1128
 
1129
#ifdef ENABLE_CHECKING
1130
  verify_flow_info ();
1131
#endif
1132
  rtl_register_cfg_hooks ();
1133
  if (reload_completed
1134
#ifdef HAVE_epilogue
1135
      && !HAVE_epilogue
1136
#endif
1137
      )
1138
    fixup_fallthru_exit_predecessor ();
1139
  fixup_reorder_chain ();
1140
 
1141
#ifdef ENABLE_CHECKING
1142
  verify_insn_chain ();
1143
#endif
1144
  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1145
  {
1146
    bb->il.rtl->header = bb->il.rtl->footer = NULL;
1147
    bb->aux = NULL;
1148
    bb->il.rtl->visited = 0;
1149
  }
1150
 
1151
  break_superblocks ();
1152
 
1153
#ifdef ENABLE_CHECKING
1154
  verify_flow_info ();
1155
#endif
1156
 
1157
  free_original_copy_tables ();
1158
}
1159
 
1160
/* Checks whether all N blocks in BBS array can be copied.  */
1161
bool
1162
can_copy_bbs_p (basic_block *bbs, unsigned n)
1163
{
1164
  unsigned i;
1165
  edge e;
1166
  int ret = true;
1167
 
1168
  for (i = 0; i < n; i++)
1169
    bbs[i]->flags |= BB_DUPLICATED;
1170
 
1171
  for (i = 0; i < n; i++)
1172
    {
1173
      /* In case we should redirect abnormal edge during duplication, fail.  */
1174
      edge_iterator ei;
1175
      FOR_EACH_EDGE (e, ei, bbs[i]->succs)
1176
        if ((e->flags & EDGE_ABNORMAL)
1177
            && (e->dest->flags & BB_DUPLICATED))
1178
          {
1179
            ret = false;
1180
            goto end;
1181
          }
1182
 
1183
      if (!can_duplicate_block_p (bbs[i]))
1184
        {
1185
          ret = false;
1186
          break;
1187
        }
1188
    }
1189
 
1190
end:
1191
  for (i = 0; i < n; i++)
1192
    bbs[i]->flags &= ~BB_DUPLICATED;
1193
 
1194
  return ret;
1195
}
1196
 
1197
/* Duplicates N basic blocks stored in array BBS.  Newly created basic blocks
1198
   are placed into array NEW_BBS in the same order.  Edges from basic blocks
1199
   in BBS are also duplicated and copies of those of them
1200
   that lead into BBS are redirected to appropriate newly created block.  The
1201
   function assigns bbs into loops (copy of basic block bb is assigned to
1202
   bb->loop_father->copy loop, so this must be set up correctly in advance)
1203
   and updates dominators locally (LOOPS structure that contains the information
1204
   about dominators is passed to enable this).
1205
 
1206
   BASE is the superloop to that basic block belongs; if its header or latch
1207
   is copied, we do not set the new blocks as header or latch.
1208
 
1209
   Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES,
1210
   also in the same order.
1211
 
1212
   Newly created basic blocks are put after the basic block AFTER in the
1213
   instruction stream, and the order of the blocks in BBS array is preserved.  */
1214
 
1215
void
1216
copy_bbs (basic_block *bbs, unsigned n, basic_block *new_bbs,
1217
          edge *edges, unsigned num_edges, edge *new_edges,
1218
          struct loop *base, basic_block after)
1219
{
1220
  unsigned i, j;
1221
  basic_block bb, new_bb, dom_bb;
1222
  edge e;
1223
 
1224
  /* Duplicate bbs, update dominators, assign bbs to loops.  */
1225
  for (i = 0; i < n; i++)
1226
    {
1227
      /* Duplicate.  */
1228
      bb = bbs[i];
1229
      new_bb = new_bbs[i] = duplicate_block (bb, NULL, after);
1230
      after = new_bb;
1231
      bb->flags |= BB_DUPLICATED;
1232
      /* Add to loop.  */
1233
      add_bb_to_loop (new_bb, bb->loop_father->copy);
1234
      /* Possibly set header.  */
1235
      if (bb->loop_father->header == bb && bb->loop_father != base)
1236
        new_bb->loop_father->header = new_bb;
1237
      /* Or latch.  */
1238
      if (bb->loop_father->latch == bb && bb->loop_father != base)
1239
        new_bb->loop_father->latch = new_bb;
1240
    }
1241
 
1242
  /* Set dominators.  */
1243
  for (i = 0; i < n; i++)
1244
    {
1245
      bb = bbs[i];
1246
      new_bb = new_bbs[i];
1247
 
1248
      dom_bb = get_immediate_dominator (CDI_DOMINATORS, bb);
1249
      if (dom_bb->flags & BB_DUPLICATED)
1250
        {
1251
          dom_bb = get_bb_copy (dom_bb);
1252
          set_immediate_dominator (CDI_DOMINATORS, new_bb, dom_bb);
1253
        }
1254
    }
1255
 
1256
  /* Redirect edges.  */
1257
  for (j = 0; j < num_edges; j++)
1258
    new_edges[j] = NULL;
1259
  for (i = 0; i < n; i++)
1260
    {
1261
      edge_iterator ei;
1262
      new_bb = new_bbs[i];
1263
      bb = bbs[i];
1264
 
1265
      FOR_EACH_EDGE (e, ei, new_bb->succs)
1266
        {
1267
          for (j = 0; j < num_edges; j++)
1268
            if (edges[j] && edges[j]->src == bb && edges[j]->dest == e->dest)
1269
              new_edges[j] = e;
1270
 
1271
          if (!(e->dest->flags & BB_DUPLICATED))
1272
            continue;
1273
          redirect_edge_and_branch_force (e, get_bb_copy (e->dest));
1274
        }
1275
    }
1276
 
1277
  /* Clear information about duplicates.  */
1278
  for (i = 0; i < n; i++)
1279
    bbs[i]->flags &= ~BB_DUPLICATED;
1280
}
1281
 
1282
#include "gt-cfglayout.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.