OpenCores
URL https://opencores.org/ocsvn/openrisc_me/openrisc_me/trunk

Subversion Repositories openrisc_me

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.5.1/] [gcc/] [cfglayout.c] - Blame information for rev 309

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 280 jeremybenn
/* Basic block reordering routines for the GNU compiler.
2
   Copyright (C) 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3
   Free Software Foundation, Inc.
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify it under
8
the terms of the GNU General Public License as published by the Free
9
Software Foundation; either version 3, or (at your option) any later
10
version.
11
 
12
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13
WARRANTY; without even the implied warranty of MERCHANTABILITY or
14
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15
for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING3.  If not see
19
<http://www.gnu.org/licenses/>.  */
20
 
21
#include "config.h"
22
#include "system.h"
23
#include "coretypes.h"
24
#include "tm.h"
25
#include "tree.h"
26
#include "rtl.h"
27
#include "hard-reg-set.h"
28
#include "obstack.h"
29
#include "basic-block.h"
30
#include "insn-config.h"
31
#include "output.h"
32
#include "function.h"
33
#include "cfglayout.h"
34
#include "cfgloop.h"
35
#include "target.h"
36
#include "ggc.h"
37
#include "alloc-pool.h"
38
#include "flags.h"
39
#include "tree-pass.h"
40
#include "df.h"
41
#include "vecprim.h"
42
 
43
/* Holds the interesting trailing notes for the function.  */
44
rtx cfg_layout_function_footer;
45
rtx cfg_layout_function_header;
46
 
47
static rtx skip_insns_after_block (basic_block);
48
static void record_effective_endpoints (void);
49
static rtx label_for_bb (basic_block);
50
static void fixup_reorder_chain (void);
51
 
52
static void change_scope (rtx, tree, tree);
53
 
54
void verify_insn_chain (void);
55
static void fixup_fallthru_exit_predecessor (void);
56
static tree insn_scope (const_rtx);
57
 
58
rtx
59
unlink_insn_chain (rtx first, rtx last)
60
{
61
  rtx prevfirst = PREV_INSN (first);
62
  rtx nextlast = NEXT_INSN (last);
63
 
64
  PREV_INSN (first) = NULL;
65
  NEXT_INSN (last) = NULL;
66
  if (prevfirst)
67
    NEXT_INSN (prevfirst) = nextlast;
68
  if (nextlast)
69
    PREV_INSN (nextlast) = prevfirst;
70
  else
71
    set_last_insn (prevfirst);
72
  if (!prevfirst)
73
    set_first_insn (nextlast);
74
  return first;
75
}
76
 
77
/* Skip over inter-block insns occurring after BB which are typically
78
   associated with BB (e.g., barriers). If there are any such insns,
79
   we return the last one. Otherwise, we return the end of BB.  */
80
 
81
static rtx
82
skip_insns_after_block (basic_block bb)
83
{
84
  rtx insn, last_insn, next_head, prev;
85
 
86
  next_head = NULL_RTX;
87
  if (bb->next_bb != EXIT_BLOCK_PTR)
88
    next_head = BB_HEAD (bb->next_bb);
89
 
90
  for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
91
    {
92
      if (insn == next_head)
93
        break;
94
 
95
      switch (GET_CODE (insn))
96
        {
97
        case BARRIER:
98
          last_insn = insn;
99
          continue;
100
 
101
        case NOTE:
102
          switch (NOTE_KIND (insn))
103
            {
104
            case NOTE_INSN_BLOCK_END:
105
              gcc_unreachable ();
106
              continue;
107
            default:
108
              continue;
109
              break;
110
            }
111
          break;
112
 
113
        case CODE_LABEL:
114
          if (NEXT_INSN (insn)
115
              && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
116
            {
117
              insn = NEXT_INSN (insn);
118
              last_insn = insn;
119
              continue;
120
            }
121
          break;
122
 
123
        default:
124
          break;
125
        }
126
 
127
      break;
128
    }
129
 
130
  /* It is possible to hit contradictory sequence.  For instance:
131
 
132
     jump_insn
133
     NOTE_INSN_BLOCK_BEG
134
     barrier
135
 
136
     Where barrier belongs to jump_insn, but the note does not.  This can be
137
     created by removing the basic block originally following
138
     NOTE_INSN_BLOCK_BEG.  In such case reorder the notes.  */
139
 
140
  for (insn = last_insn; insn != BB_END (bb); insn = prev)
141
    {
142
      prev = PREV_INSN (insn);
143
      if (NOTE_P (insn))
144
        switch (NOTE_KIND (insn))
145
          {
146
          case NOTE_INSN_BLOCK_END:
147
            gcc_unreachable ();
148
            break;
149
          case NOTE_INSN_DELETED:
150
          case NOTE_INSN_DELETED_LABEL:
151
            continue;
152
          default:
153
            reorder_insns (insn, insn, last_insn);
154
          }
155
    }
156
 
157
  return last_insn;
158
}
159
 
160
/* Locate or create a label for a given basic block.  */
161
 
162
static rtx
163
label_for_bb (basic_block bb)
164
{
165
  rtx label = BB_HEAD (bb);
166
 
167
  if (!LABEL_P (label))
168
    {
169
      if (dump_file)
170
        fprintf (dump_file, "Emitting label for block %d\n", bb->index);
171
 
172
      label = block_label (bb);
173
    }
174
 
175
  return label;
176
}
177
 
178
/* Locate the effective beginning and end of the insn chain for each
179
   block, as defined by skip_insns_after_block above.  */
180
 
181
static void
182
record_effective_endpoints (void)
183
{
184
  rtx next_insn;
185
  basic_block bb;
186
  rtx insn;
187
 
188
  for (insn = get_insns ();
189
       insn
190
       && NOTE_P (insn)
191
       && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
192
       insn = NEXT_INSN (insn))
193
    continue;
194
  /* No basic blocks at all?  */
195
  gcc_assert (insn);
196
 
197
  if (PREV_INSN (insn))
198
    cfg_layout_function_header =
199
            unlink_insn_chain (get_insns (), PREV_INSN (insn));
200
  else
201
    cfg_layout_function_header = NULL_RTX;
202
 
203
  next_insn = get_insns ();
204
  FOR_EACH_BB (bb)
205
    {
206
      rtx end;
207
 
208
      if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
209
        bb->il.rtl->header = unlink_insn_chain (next_insn,
210
                                              PREV_INSN (BB_HEAD (bb)));
211
      end = skip_insns_after_block (bb);
212
      if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
213
        bb->il.rtl->footer = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
214
      next_insn = NEXT_INSN (BB_END (bb));
215
    }
216
 
217
  cfg_layout_function_footer = next_insn;
218
  if (cfg_layout_function_footer)
219
    cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
220
}
221
 
222
/* Data structures representing mapping of INSN_LOCATOR into scope blocks, line
223
   numbers and files.  In order to be GGC friendly we need to use separate
224
   varrays.  This also slightly improve the memory locality in binary search.
225
   The _locs array contains locators where the given property change.  The
226
   block_locators_blocks contains the scope block that is used for all insn
227
   locator greater than corresponding block_locators_locs value and smaller
228
   than the following one.  Similarly for the other properties.  */
229
static VEC(int,heap) *block_locators_locs;
230
static GTY(()) VEC(tree,gc) *block_locators_blocks;
231
static VEC(int,heap) *locations_locators_locs;
232
DEF_VEC_O(location_t);
233
DEF_VEC_ALLOC_O(location_t,heap);
234
static VEC(location_t,heap) *locations_locators_vals;
235
int prologue_locator;
236
int epilogue_locator;
237
 
238
/* Hold current location information and last location information, so the
239
   datastructures are built lazily only when some instructions in given
240
   place are needed.  */
241
static location_t curr_location, last_location;
242
static tree curr_block, last_block;
243
static int curr_rtl_loc = -1;
244
 
245
/* Allocate insn locator datastructure.  */
246
void
247
insn_locators_alloc (void)
248
{
249
  prologue_locator = epilogue_locator = 0;
250
 
251
  block_locators_locs = VEC_alloc (int, heap, 32);
252
  block_locators_blocks = VEC_alloc (tree, gc, 32);
253
  locations_locators_locs = VEC_alloc (int, heap, 32);
254
  locations_locators_vals = VEC_alloc (location_t, heap, 32);
255
 
256
  last_location = -1;
257
  curr_location = -1;
258
  curr_block = NULL;
259
  last_block = NULL;
260
  curr_rtl_loc = 0;
261
}
262
 
263
/* At the end of emit stage, clear current location.  */
264
void
265
insn_locators_finalize (void)
266
{
267
  if (curr_rtl_loc >= 0)
268
    epilogue_locator = curr_insn_locator ();
269
  curr_rtl_loc = -1;
270
}
271
 
272
/* Allocate insn locator datastructure.  */
273
void
274
insn_locators_free (void)
275
{
276
  prologue_locator = epilogue_locator = 0;
277
 
278
  VEC_free (int, heap, block_locators_locs);
279
  VEC_free (tree,gc, block_locators_blocks);
280
  VEC_free (int, heap, locations_locators_locs);
281
  VEC_free (location_t, heap, locations_locators_vals);
282
}
283
 
284
 
285
/* Set current location.  */
286
void
287
set_curr_insn_source_location (location_t location)
288
{
289
  /* IV opts calls into RTL expansion to compute costs of operations.  At this
290
     time locators are not initialized.  */
291
  if (curr_rtl_loc == -1)
292
    return;
293
  curr_location = location;
294
}
295
 
296
/* Get current location.  */
297
location_t
298
get_curr_insn_source_location (void)
299
{
300
  return curr_location;
301
}
302
 
303
/* Set current scope block.  */
304
void
305
set_curr_insn_block (tree b)
306
{
307
  /* IV opts calls into RTL expansion to compute costs of operations.  At this
308
     time locators are not initialized.  */
309
  if (curr_rtl_loc == -1)
310
    return;
311
  if (b)
312
    curr_block = b;
313
}
314
 
315
/* Get current scope block.  */
316
tree
317
get_curr_insn_block (void)
318
{
319
  return curr_block;
320
}
321
 
322
/* Return current insn locator.  */
323
int
324
curr_insn_locator (void)
325
{
326
  if (curr_rtl_loc == -1)
327
    return 0;
328
  if (last_block != curr_block)
329
    {
330
      curr_rtl_loc++;
331
      VEC_safe_push (int, heap, block_locators_locs, curr_rtl_loc);
332
      VEC_safe_push (tree, gc, block_locators_blocks, curr_block);
333
      last_block = curr_block;
334
    }
335
  if (last_location != curr_location)
336
    {
337
      curr_rtl_loc++;
338
      VEC_safe_push (int, heap, locations_locators_locs, curr_rtl_loc);
339
      VEC_safe_push (location_t, heap, locations_locators_vals, &curr_location);
340
      last_location = curr_location;
341
    }
342
  return curr_rtl_loc;
343
}
344
 
345
static unsigned int
346
into_cfg_layout_mode (void)
347
{
348
  cfg_layout_initialize (0);
349
  return 0;
350
}
351
 
352
static unsigned int
353
outof_cfg_layout_mode (void)
354
{
355
  basic_block bb;
356
 
357
  FOR_EACH_BB (bb)
358
    if (bb->next_bb != EXIT_BLOCK_PTR)
359
      bb->aux = bb->next_bb;
360
 
361
  cfg_layout_finalize ();
362
 
363
  return 0;
364
}
365
 
366
struct rtl_opt_pass pass_into_cfg_layout_mode =
367
{
368
 {
369
  RTL_PASS,
370
  "into_cfglayout",                     /* name */
371
  NULL,                                 /* gate */
372
  into_cfg_layout_mode,                 /* execute */
373
  NULL,                                 /* sub */
374
  NULL,                                 /* next */
375
  0,                                    /* static_pass_number */
376
  TV_NONE,                              /* tv_id */
377
  0,                                    /* properties_required */
378
  PROP_cfglayout,                       /* properties_provided */
379
  0,                                    /* properties_destroyed */
380
  0,                                    /* todo_flags_start */
381
  TODO_dump_func,                       /* todo_flags_finish */
382
 }
383
};
384
 
385
struct rtl_opt_pass pass_outof_cfg_layout_mode =
386
{
387
 {
388
  RTL_PASS,
389
  "outof_cfglayout",                    /* name */
390
  NULL,                                 /* gate */
391
  outof_cfg_layout_mode,                /* execute */
392
  NULL,                                 /* sub */
393
  NULL,                                 /* next */
394
  0,                                    /* static_pass_number */
395
  TV_NONE,                              /* tv_id */
396
  0,                                    /* properties_required */
397
  0,                                    /* properties_provided */
398
  PROP_cfglayout,                       /* properties_destroyed */
399
  0,                                    /* todo_flags_start */
400
  TODO_dump_func,                       /* todo_flags_finish */
401
 }
402
};
403
 
404
/* Return scope resulting from combination of S1 and S2.  */
405
static tree
406
choose_inner_scope (tree s1, tree s2)
407
{
408
   if (!s1)
409
     return s2;
410
   if (!s2)
411
     return s1;
412
   if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
413
     return s1;
414
   return s2;
415
}
416
 
417
/* Emit lexical block notes needed to change scope from S1 to S2.  */
418
 
419
static void
420
change_scope (rtx orig_insn, tree s1, tree s2)
421
{
422
  rtx insn = orig_insn;
423
  tree com = NULL_TREE;
424
  tree ts1 = s1, ts2 = s2;
425
  tree s;
426
 
427
  while (ts1 != ts2)
428
    {
429
      gcc_assert (ts1 && ts2);
430
      if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
431
        ts1 = BLOCK_SUPERCONTEXT (ts1);
432
      else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
433
        ts2 = BLOCK_SUPERCONTEXT (ts2);
434
      else
435
        {
436
          ts1 = BLOCK_SUPERCONTEXT (ts1);
437
          ts2 = BLOCK_SUPERCONTEXT (ts2);
438
        }
439
    }
440
  com = ts1;
441
 
442
  /* Close scopes.  */
443
  s = s1;
444
  while (s != com)
445
    {
446
      rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
447
      NOTE_BLOCK (note) = s;
448
      s = BLOCK_SUPERCONTEXT (s);
449
    }
450
 
451
  /* Open scopes.  */
452
  s = s2;
453
  while (s != com)
454
    {
455
      insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
456
      NOTE_BLOCK (insn) = s;
457
      s = BLOCK_SUPERCONTEXT (s);
458
    }
459
}
460
 
461
/* Return lexical scope block locator belongs to.  */
462
static tree
463
locator_scope (int loc)
464
{
465
  int max = VEC_length (int, block_locators_locs);
466
  int min = 0;
467
 
468
  /* When block_locators_locs was initialized, the pro- and epilogue
469
     insns didn't exist yet and can therefore not be found this way.
470
     But we know that they belong to the outer most block of the
471
     current function.
472
     Without this test, the prologue would be put inside the block of
473
     the first valid instruction in the function and when that first
474
     insn is part of an inlined function then the low_pc of that
475
     inlined function is messed up.  Likewise for the epilogue and
476
     the last valid instruction.  */
477
  if (loc == prologue_locator || loc == epilogue_locator)
478
    return DECL_INITIAL (cfun->decl);
479
 
480
  if (!max || !loc)
481
    return NULL;
482
  while (1)
483
    {
484
      int pos = (min + max) / 2;
485
      int tmp = VEC_index (int, block_locators_locs, pos);
486
 
487
      if (tmp <= loc && min != pos)
488
        min = pos;
489
      else if (tmp > loc && max != pos)
490
        max = pos;
491
      else
492
        {
493
          min = pos;
494
          break;
495
        }
496
    }
497
  return VEC_index (tree, block_locators_blocks, min);
498
}
499
 
500
/* Return lexical scope block insn belongs to.  */
501
static tree
502
insn_scope (const_rtx insn)
503
{
504
  return locator_scope (INSN_LOCATOR (insn));
505
}
506
 
507
/* Return line number of the statement specified by the locator.  */
508
location_t
509
locator_location (int loc)
510
{
511
  int max = VEC_length (int, locations_locators_locs);
512
  int min = 0;
513
 
514
  while (1)
515
    {
516
      int pos = (min + max) / 2;
517
      int tmp = VEC_index (int, locations_locators_locs, pos);
518
 
519
      if (tmp <= loc && min != pos)
520
        min = pos;
521
      else if (tmp > loc && max != pos)
522
        max = pos;
523
      else
524
        {
525
          min = pos;
526
          break;
527
        }
528
    }
529
  return *VEC_index (location_t, locations_locators_vals, min);
530
}
531
 
532
/* Return source line of the statement that produced this insn.  */
533
int
534
locator_line (int loc)
535
{
536
  expanded_location xloc;
537
  if (!loc)
538
    return 0;
539
  else
540
    xloc = expand_location (locator_location (loc));
541
  return xloc.line;
542
}
543
 
544
/* Return line number of the statement that produced this insn.  */
545
int
546
insn_line (const_rtx insn)
547
{
548
  return locator_line (INSN_LOCATOR (insn));
549
}
550
 
551
/* Return source file of the statement specified by LOC.  */
552
const char *
553
locator_file (int loc)
554
{
555
  expanded_location xloc;
556
  if (!loc)
557
    return 0;
558
  else
559
    xloc = expand_location (locator_location (loc));
560
  return xloc.file;
561
}
562
 
563
/* Return source file of the statement that produced this insn.  */
564
const char *
565
insn_file (const_rtx insn)
566
{
567
  return locator_file (INSN_LOCATOR (insn));
568
}
569
 
570
/* Return true if LOC1 and LOC2 locators have the same location and scope.  */
571
bool
572
locator_eq (int loc1, int loc2)
573
{
574
  if (loc1 == loc2)
575
    return true;
576
  if (locator_location (loc1) != locator_location (loc2))
577
    return false;
578
  return locator_scope (loc1) == locator_scope (loc2);
579
}
580
 
581
/* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
582
   on the scope tree and the newly reordered instructions.  */
583
 
584
void
585
reemit_insn_block_notes (void)
586
{
587
  tree cur_block = DECL_INITIAL (cfun->decl);
588
  rtx insn, note;
589
 
590
  insn = get_insns ();
591
  if (!active_insn_p (insn))
592
    insn = next_active_insn (insn);
593
  for (; insn; insn = next_active_insn (insn))
594
    {
595
      tree this_block;
596
 
597
      /* Avoid putting scope notes between jump table and its label.  */
598
      if (JUMP_TABLE_DATA_P (insn))
599
        continue;
600
 
601
      this_block = insn_scope (insn);
602
      /* For sequences compute scope resulting from merging all scopes
603
         of instructions nested inside.  */
604
      if (GET_CODE (PATTERN (insn)) == SEQUENCE)
605
        {
606
          int i;
607
          rtx body = PATTERN (insn);
608
 
609
          this_block = NULL;
610
          for (i = 0; i < XVECLEN (body, 0); i++)
611
            this_block = choose_inner_scope (this_block,
612
                                         insn_scope (XVECEXP (body, 0, i)));
613
        }
614
      if (! this_block)
615
        continue;
616
 
617
      if (this_block != cur_block)
618
        {
619
          change_scope (insn, cur_block, this_block);
620
          cur_block = this_block;
621
        }
622
    }
623
 
624
  /* change_scope emits before the insn, not after.  */
625
  note = emit_note (NOTE_INSN_DELETED);
626
  change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
627
  delete_insn (note);
628
 
629
  reorder_blocks ();
630
}
631
 
632
 
633
/* Link the basic blocks in the correct order, compacting the basic
634
   block queue while at it.  This also clears the visited flag on
635
   all basic blocks.  If STAY_IN_CFGLAYOUT_MODE is false, this function
636
   also clears the basic block header and footer fields.
637
 
638
   This function is usually called after a pass (e.g. tracer) finishes
639
   some transformations while in cfglayout mode.  The required sequence
640
   of the basic blocks is in a linked list along the bb->aux field.
641
   This functions re-links the basic block prev_bb and next_bb pointers
642
   accordingly, and it compacts and renumbers the blocks.  */
643
 
644
void
645
relink_block_chain (bool stay_in_cfglayout_mode)
646
{
647
  basic_block bb, prev_bb;
648
  int index;
649
 
650
  /* Maybe dump the re-ordered sequence.  */
651
  if (dump_file)
652
    {
653
      fprintf (dump_file, "Reordered sequence:\n");
654
      for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS;
655
           bb;
656
           bb = (basic_block) bb->aux, index++)
657
        {
658
          fprintf (dump_file, " %i ", index);
659
          if (get_bb_original (bb))
660
            fprintf (dump_file, "duplicate of %i ",
661
                     get_bb_original (bb)->index);
662
          else if (forwarder_block_p (bb)
663
                   && !LABEL_P (BB_HEAD (bb)))
664
            fprintf (dump_file, "compensation ");
665
          else
666
            fprintf (dump_file, "bb %i ", bb->index);
667
          fprintf (dump_file, " [%i]\n", bb->frequency);
668
        }
669
    }
670
 
671
  /* Now reorder the blocks.  */
672
  prev_bb = ENTRY_BLOCK_PTR;
673
  bb = ENTRY_BLOCK_PTR->next_bb;
674
  for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
675
    {
676
      bb->prev_bb = prev_bb;
677
      prev_bb->next_bb = bb;
678
    }
679
  prev_bb->next_bb = EXIT_BLOCK_PTR;
680
  EXIT_BLOCK_PTR->prev_bb = prev_bb;
681
 
682
  /* Then, clean up the aux and visited fields.  */
683
  FOR_ALL_BB (bb)
684
    {
685
      bb->aux = NULL;
686
      bb->il.rtl->visited = 0;
687
      if (!stay_in_cfglayout_mode)
688
        bb->il.rtl->header = bb->il.rtl->footer = NULL;
689
    }
690
 
691
  /* Maybe reset the original copy tables, they are not valid anymore
692
     when we renumber the basic blocks in compact_blocks.  If we are
693
     are going out of cfglayout mode, don't re-allocate the tables.  */
694
  free_original_copy_tables ();
695
  if (stay_in_cfglayout_mode)
696
    initialize_original_copy_tables ();
697
 
698
  /* Finally, put basic_block_info in the new order.  */
699
  compact_blocks ();
700
}
701
 
702
 
703
/* Given a reorder chain, rearrange the code to match.  */
704
 
705
static void
706
fixup_reorder_chain (void)
707
{
708
  basic_block bb;
709
  rtx insn = NULL;
710
 
711
  if (cfg_layout_function_header)
712
    {
713
      set_first_insn (cfg_layout_function_header);
714
      insn = cfg_layout_function_header;
715
      while (NEXT_INSN (insn))
716
        insn = NEXT_INSN (insn);
717
    }
718
 
719
  /* First do the bulk reordering -- rechain the blocks without regard to
720
     the needed changes to jumps and labels.  */
721
 
722
  for (bb = ENTRY_BLOCK_PTR->next_bb; bb; bb = (basic_block) bb->aux)
723
    {
724
      if (bb->il.rtl->header)
725
        {
726
          if (insn)
727
            NEXT_INSN (insn) = bb->il.rtl->header;
728
          else
729
            set_first_insn (bb->il.rtl->header);
730
          PREV_INSN (bb->il.rtl->header) = insn;
731
          insn = bb->il.rtl->header;
732
          while (NEXT_INSN (insn))
733
            insn = NEXT_INSN (insn);
734
        }
735
      if (insn)
736
        NEXT_INSN (insn) = BB_HEAD (bb);
737
      else
738
        set_first_insn (BB_HEAD (bb));
739
      PREV_INSN (BB_HEAD (bb)) = insn;
740
      insn = BB_END (bb);
741
      if (bb->il.rtl->footer)
742
        {
743
          NEXT_INSN (insn) = bb->il.rtl->footer;
744
          PREV_INSN (bb->il.rtl->footer) = insn;
745
          while (NEXT_INSN (insn))
746
            insn = NEXT_INSN (insn);
747
        }
748
    }
749
 
750
  NEXT_INSN (insn) = cfg_layout_function_footer;
751
  if (cfg_layout_function_footer)
752
    PREV_INSN (cfg_layout_function_footer) = insn;
753
 
754
  while (NEXT_INSN (insn))
755
    insn = NEXT_INSN (insn);
756
 
757
  set_last_insn (insn);
758
#ifdef ENABLE_CHECKING
759
  verify_insn_chain ();
760
#endif
761
 
762
  /* Now add jumps and labels as needed to match the blocks new
763
     outgoing edges.  */
764
 
765
  for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = (basic_block) bb->aux)
766
    {
767
      edge e_fall, e_taken, e;
768
      rtx bb_end_insn;
769
      basic_block nb;
770
      edge_iterator ei;
771
 
772
      if (EDGE_COUNT (bb->succs) == 0)
773
        continue;
774
 
775
      /* Find the old fallthru edge, and another non-EH edge for
776
         a taken jump.  */
777
      e_taken = e_fall = NULL;
778
 
779
      FOR_EACH_EDGE (e, ei, bb->succs)
780
        if (e->flags & EDGE_FALLTHRU)
781
          e_fall = e;
782
        else if (! (e->flags & EDGE_EH))
783
          e_taken = e;
784
 
785
      bb_end_insn = BB_END (bb);
786
      if (JUMP_P (bb_end_insn))
787
        {
788
          if (any_condjump_p (bb_end_insn))
789
            {
790
              /* This might happen if the conditional jump has side
791
                 effects and could therefore not be optimized away.
792
                 Make the basic block to end with a barrier in order
793
                 to prevent rtl_verify_flow_info from complaining.  */
794
              if (!e_fall)
795
                {
796
                  gcc_assert (!onlyjump_p (bb_end_insn)
797
                              || returnjump_p (bb_end_insn));
798
                  bb->il.rtl->footer = emit_barrier_after (bb_end_insn);
799
                  continue;
800
                }
801
 
802
              /* If the old fallthru is still next, nothing to do.  */
803
              if (bb->aux == e_fall->dest
804
                  || e_fall->dest == EXIT_BLOCK_PTR)
805
                continue;
806
 
807
              /* The degenerated case of conditional jump jumping to the next
808
                 instruction can happen for jumps with side effects.  We need
809
                 to construct a forwarder block and this will be done just
810
                 fine by force_nonfallthru below.  */
811
              if (!e_taken)
812
                ;
813
 
814
              /* There is another special case: if *neither* block is next,
815
                 such as happens at the very end of a function, then we'll
816
                 need to add a new unconditional jump.  Choose the taken
817
                 edge based on known or assumed probability.  */
818
              else if (bb->aux != e_taken->dest)
819
                {
820
                  rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
821
 
822
                  if (note
823
                      && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
824
                      && invert_jump (bb_end_insn,
825
                                      (e_fall->dest == EXIT_BLOCK_PTR
826
                                       ? NULL_RTX
827
                                       : label_for_bb (e_fall->dest)), 0))
828
                    {
829
                      e_fall->flags &= ~EDGE_FALLTHRU;
830
#ifdef ENABLE_CHECKING
831
                      gcc_assert (could_fall_through
832
                                  (e_taken->src, e_taken->dest));
833
#endif
834
                      e_taken->flags |= EDGE_FALLTHRU;
835
                      update_br_prob_note (bb);
836
                      e = e_fall, e_fall = e_taken, e_taken = e;
837
                    }
838
                }
839
 
840
              /* If the "jumping" edge is a crossing edge, and the fall
841
                 through edge is non-crossing, leave things as they are.  */
842
              else if ((e_taken->flags & EDGE_CROSSING)
843
                       && !(e_fall->flags & EDGE_CROSSING))
844
                continue;
845
 
846
              /* Otherwise we can try to invert the jump.  This will
847
                 basically never fail, however, keep up the pretense.  */
848
              else if (invert_jump (bb_end_insn,
849
                                    (e_fall->dest == EXIT_BLOCK_PTR
850
                                     ? NULL_RTX
851
                                     : label_for_bb (e_fall->dest)), 0))
852
                {
853
                  e_fall->flags &= ~EDGE_FALLTHRU;
854
#ifdef ENABLE_CHECKING
855
                  gcc_assert (could_fall_through
856
                              (e_taken->src, e_taken->dest));
857
#endif
858
                  e_taken->flags |= EDGE_FALLTHRU;
859
                  update_br_prob_note (bb);
860
                  continue;
861
                }
862
            }
863
          else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
864
            {
865
              /* If the old fallthru is still next or if
866
                 asm goto doesn't have a fallthru (e.g. when followed by
867
                 __builtin_unreachable ()), nothing to do.  */
868
              if (! e_fall
869
                  || bb->aux == e_fall->dest
870
                  || e_fall->dest == EXIT_BLOCK_PTR)
871
                continue;
872
 
873
              /* Otherwise we'll have to use the fallthru fixup below.  */
874
            }
875
          else
876
            {
877
              /* Otherwise we have some return, switch or computed
878
                 jump.  In the 99% case, there should not have been a
879
                 fallthru edge.  */
880
              gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
881
              continue;
882
            }
883
        }
884
      else
885
        {
886
          /* No fallthru implies a noreturn function with EH edges, or
887
             something similarly bizarre.  In any case, we don't need to
888
             do anything.  */
889
          if (! e_fall)
890
            continue;
891
 
892
          /* If the fallthru block is still next, nothing to do.  */
893
          if (bb->aux == e_fall->dest)
894
            continue;
895
 
896
          /* A fallthru to exit block.  */
897
          if (e_fall->dest == EXIT_BLOCK_PTR)
898
            continue;
899
        }
900
 
901
      /* We got here if we need to add a new jump insn.  */
902
      nb = force_nonfallthru (e_fall);
903
      if (nb)
904
        {
905
          nb->il.rtl->visited = 1;
906
          nb->aux = bb->aux;
907
          bb->aux = nb;
908
          /* Don't process this new block.  */
909
          bb = nb;
910
 
911
          /* Make sure new bb is tagged for correct section (same as
912
             fall-thru source, since you cannot fall-throu across
913
             section boundaries).  */
914
          BB_COPY_PARTITION (e_fall->src, single_pred (bb));
915
          if (flag_reorder_blocks_and_partition
916
              && targetm.have_named_sections
917
              && JUMP_P (BB_END (bb))
918
              && !any_condjump_p (BB_END (bb))
919
              && (EDGE_SUCC (bb, 0)->flags & EDGE_CROSSING))
920
            add_reg_note (BB_END (bb), REG_CROSSING_JUMP, NULL_RTX);
921
        }
922
    }
923
 
924
  relink_block_chain (/*stay_in_cfglayout_mode=*/false);
925
 
926
  /* Annoying special case - jump around dead jumptables left in the code.  */
927
  FOR_EACH_BB (bb)
928
    {
929
      edge e;
930
      edge_iterator ei;
931
 
932
      FOR_EACH_EDGE (e, ei, bb->succs)
933
        if (e->flags & EDGE_FALLTHRU)
934
          break;
935
 
936
      if (e && !can_fallthru (e->src, e->dest))
937
        force_nonfallthru (e);
938
    }
939
 
940
  /* Ensure goto_locus from edges has some instructions with that locus
941
     in RTL.  */
942
  if (!optimize)
943
    FOR_EACH_BB (bb)
944
      {
945
        edge e;
946
        edge_iterator ei;
947
 
948
        FOR_EACH_EDGE (e, ei, bb->succs)
949
          if (e->goto_locus && !(e->flags & EDGE_ABNORMAL))
950
            {
951
              basic_block nb;
952
              rtx end;
953
 
954
              insn = BB_END (e->src);
955
              end = PREV_INSN (BB_HEAD (e->src));
956
              while (insn != end
957
                     && (!INSN_P (insn) || INSN_LOCATOR (insn) == 0))
958
                insn = PREV_INSN (insn);
959
              if (insn != end
960
                  && locator_eq (INSN_LOCATOR (insn), (int) e->goto_locus))
961
                continue;
962
              if (simplejump_p (BB_END (e->src))
963
                  && INSN_LOCATOR (BB_END (e->src)) == 0)
964
                {
965
                  INSN_LOCATOR (BB_END (e->src)) = e->goto_locus;
966
                  continue;
967
                }
968
              if (e->dest != EXIT_BLOCK_PTR)
969
                {
970
                  insn = BB_HEAD (e->dest);
971
                  end = NEXT_INSN (BB_END (e->dest));
972
                  while (insn != end && !INSN_P (insn))
973
                    insn = NEXT_INSN (insn);
974
                  if (insn != end && INSN_LOCATOR (insn)
975
                      && locator_eq (INSN_LOCATOR (insn), (int) e->goto_locus))
976
                    continue;
977
                }
978
              nb = split_edge (e);
979
              if (!INSN_P (BB_END (nb)))
980
                BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
981
                                                     nb);
982
              INSN_LOCATOR (BB_END (nb)) = e->goto_locus;
983
            }
984
      }
985
}
986
 
987
/* Perform sanity checks on the insn chain.
988
   1. Check that next/prev pointers are consistent in both the forward and
989
      reverse direction.
990
   2. Count insns in chain, going both directions, and check if equal.
991
   3. Check that get_last_insn () returns the actual end of chain.  */
992
 
993
void
994
verify_insn_chain (void)
995
{
996
  rtx x, prevx, nextx;
997
  int insn_cnt1, insn_cnt2;
998
 
999
  for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
1000
       x != 0;
1001
       prevx = x, insn_cnt1++, x = NEXT_INSN (x))
1002
    gcc_assert (PREV_INSN (x) == prevx);
1003
 
1004
  gcc_assert (prevx == get_last_insn ());
1005
 
1006
  for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
1007
       x != 0;
1008
       nextx = x, insn_cnt2++, x = PREV_INSN (x))
1009
    gcc_assert (NEXT_INSN (x) == nextx);
1010
 
1011
  gcc_assert (insn_cnt1 == insn_cnt2);
1012
}
1013
 
1014
/* If we have assembler epilogues, the block falling through to exit must
1015
   be the last one in the reordered chain when we reach final.  Ensure
1016
   that this condition is met.  */
1017
static void
1018
fixup_fallthru_exit_predecessor (void)
1019
{
1020
  edge e;
1021
  edge_iterator ei;
1022
  basic_block bb = NULL;
1023
 
1024
  /* This transformation is not valid before reload, because we might
1025
     separate a call from the instruction that copies the return
1026
     value.  */
1027
  gcc_assert (reload_completed);
1028
 
1029
  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1030
    if (e->flags & EDGE_FALLTHRU)
1031
      bb = e->src;
1032
 
1033
  if (bb && bb->aux)
1034
    {
1035
      basic_block c = ENTRY_BLOCK_PTR->next_bb;
1036
 
1037
      /* If the very first block is the one with the fall-through exit
1038
         edge, we have to split that block.  */
1039
      if (c == bb)
1040
        {
1041
          bb = split_block (bb, NULL)->dest;
1042
          bb->aux = c->aux;
1043
          c->aux = bb;
1044
          bb->il.rtl->footer = c->il.rtl->footer;
1045
          c->il.rtl->footer = NULL;
1046
        }
1047
 
1048
      while (c->aux != bb)
1049
        c = (basic_block) c->aux;
1050
 
1051
      c->aux = bb->aux;
1052
      while (c->aux)
1053
        c = (basic_block) c->aux;
1054
 
1055
      c->aux = bb;
1056
      bb->aux = NULL;
1057
    }
1058
}
1059
 
1060
/* In case there are more than one fallthru predecessors of exit, force that
1061
   there is only one.  */
1062
 
1063
static void
1064
force_one_exit_fallthru (void)
1065
{
1066
  edge e, predecessor = NULL;
1067
  bool more = false;
1068
  edge_iterator ei;
1069
  basic_block forwarder, bb;
1070
 
1071
  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1072
    if (e->flags & EDGE_FALLTHRU)
1073
      {
1074
        if (predecessor == NULL)
1075
          predecessor = e;
1076
        else
1077
          {
1078
            more = true;
1079
            break;
1080
          }
1081
      }
1082
 
1083
  if (!more)
1084
    return;
1085
 
1086
  /* Exit has several fallthru predecessors.  Create a forwarder block for
1087
     them.  */
1088
  forwarder = split_edge (predecessor);
1089
  for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
1090
    {
1091
      if (e->src == forwarder
1092
          || !(e->flags & EDGE_FALLTHRU))
1093
        ei_next (&ei);
1094
      else
1095
        redirect_edge_and_branch_force (e, forwarder);
1096
    }
1097
 
1098
  /* Fix up the chain of blocks -- make FORWARDER immediately precede the
1099
     exit block.  */
1100
  FOR_EACH_BB (bb)
1101
    {
1102
      if (bb->aux == NULL && bb != forwarder)
1103
        {
1104
          bb->aux = forwarder;
1105
          break;
1106
        }
1107
    }
1108
}
1109
 
1110
/* Return true in case it is possible to duplicate the basic block BB.  */
1111
 
1112
/* We do not want to declare the function in a header file, since it should
1113
   only be used through the cfghooks interface, and we do not want to move
1114
   it to cfgrtl.c since it would require also moving quite a lot of related
1115
   code.  */
1116
extern bool cfg_layout_can_duplicate_bb_p (const_basic_block);
1117
 
1118
bool
1119
cfg_layout_can_duplicate_bb_p (const_basic_block bb)
1120
{
1121
  /* Do not attempt to duplicate tablejumps, as we need to unshare
1122
     the dispatch table.  This is difficult to do, as the instructions
1123
     computing jump destination may be hoisted outside the basic block.  */
1124
  if (tablejump_p (BB_END (bb), NULL, NULL))
1125
    return false;
1126
 
1127
  /* Do not duplicate blocks containing insns that can't be copied.  */
1128
  if (targetm.cannot_copy_insn_p)
1129
    {
1130
      rtx insn = BB_HEAD (bb);
1131
      while (1)
1132
        {
1133
          if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
1134
            return false;
1135
          if (insn == BB_END (bb))
1136
            break;
1137
          insn = NEXT_INSN (insn);
1138
        }
1139
    }
1140
 
1141
  return true;
1142
}
1143
 
1144
rtx
1145
duplicate_insn_chain (rtx from, rtx to)
1146
{
1147
  rtx insn, last, copy;
1148
 
1149
  /* Avoid updating of boundaries of previous basic block.  The
1150
     note will get removed from insn stream in fixup.  */
1151
  last = emit_note (NOTE_INSN_DELETED);
1152
 
1153
  /* Create copy at the end of INSN chain.  The chain will
1154
     be reordered later.  */
1155
  for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
1156
    {
1157
      switch (GET_CODE (insn))
1158
        {
1159
        case DEBUG_INSN:
1160
        case INSN:
1161
        case CALL_INSN:
1162
        case JUMP_INSN:
1163
          /* Avoid copying of dispatch tables.  We never duplicate
1164
             tablejumps, so this can hit only in case the table got
1165
             moved far from original jump.  */
1166
          if (GET_CODE (PATTERN (insn)) == ADDR_VEC
1167
              || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1168
            break;
1169
          copy = emit_copy_of_insn_after (insn, get_last_insn ());
1170
          maybe_copy_epilogue_insn (insn, copy);
1171
          break;
1172
 
1173
        case CODE_LABEL:
1174
          break;
1175
 
1176
        case BARRIER:
1177
          emit_barrier ();
1178
          break;
1179
 
1180
        case NOTE:
1181
          switch (NOTE_KIND (insn))
1182
            {
1183
              /* In case prologue is empty and function contain label
1184
                 in first BB, we may want to copy the block.  */
1185
            case NOTE_INSN_PROLOGUE_END:
1186
 
1187
            case NOTE_INSN_DELETED:
1188
            case NOTE_INSN_DELETED_LABEL:
1189
              /* No problem to strip these.  */
1190
            case NOTE_INSN_FUNCTION_BEG:
1191
              /* There is always just single entry to function.  */
1192
            case NOTE_INSN_BASIC_BLOCK:
1193
              break;
1194
 
1195
            case NOTE_INSN_EPILOGUE_BEG:
1196
            case NOTE_INSN_SWITCH_TEXT_SECTIONS:
1197
              emit_note_copy (insn);
1198
              break;
1199
 
1200
            default:
1201
              /* All other notes should have already been eliminated.  */
1202
              gcc_unreachable ();
1203
            }
1204
          break;
1205
        default:
1206
          gcc_unreachable ();
1207
        }
1208
    }
1209
  insn = NEXT_INSN (last);
1210
  delete_insn (last);
1211
  return insn;
1212
}
1213
/* Create a duplicate of the basic block BB.  */
1214
 
1215
/* We do not want to declare the function in a header file, since it should
1216
   only be used through the cfghooks interface, and we do not want to move
1217
   it to cfgrtl.c since it would require also moving quite a lot of related
1218
   code.  */
1219
extern basic_block cfg_layout_duplicate_bb (basic_block);
1220
 
1221
basic_block
1222
cfg_layout_duplicate_bb (basic_block bb)
1223
{
1224
  rtx insn;
1225
  basic_block new_bb;
1226
 
1227
  insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
1228
  new_bb = create_basic_block (insn,
1229
                               insn ? get_last_insn () : NULL,
1230
                               EXIT_BLOCK_PTR->prev_bb);
1231
 
1232
  BB_COPY_PARTITION (new_bb, bb);
1233
  if (bb->il.rtl->header)
1234
    {
1235
      insn = bb->il.rtl->header;
1236
      while (NEXT_INSN (insn))
1237
        insn = NEXT_INSN (insn);
1238
      insn = duplicate_insn_chain (bb->il.rtl->header, insn);
1239
      if (insn)
1240
        new_bb->il.rtl->header = unlink_insn_chain (insn, get_last_insn ());
1241
    }
1242
 
1243
  if (bb->il.rtl->footer)
1244
    {
1245
      insn = bb->il.rtl->footer;
1246
      while (NEXT_INSN (insn))
1247
        insn = NEXT_INSN (insn);
1248
      insn = duplicate_insn_chain (bb->il.rtl->footer, insn);
1249
      if (insn)
1250
        new_bb->il.rtl->footer = unlink_insn_chain (insn, get_last_insn ());
1251
    }
1252
 
1253
  return new_bb;
1254
}
1255
 
1256
 
1257
/* Main entry point to this module - initialize the datastructures for
1258
   CFG layout changes.  It keeps LOOPS up-to-date if not null.
1259
 
1260
   FLAGS is a set of additional flags to pass to cleanup_cfg().  */
1261
 
1262
void
1263
cfg_layout_initialize (unsigned int flags)
1264
{
1265
  rtx x;
1266
  basic_block bb;
1267
 
1268
  initialize_original_copy_tables ();
1269
 
1270
  cfg_layout_rtl_register_cfg_hooks ();
1271
 
1272
  record_effective_endpoints ();
1273
 
1274
  /* Make sure that the targets of non local gotos are marked.  */
1275
  for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
1276
    {
1277
      bb = BLOCK_FOR_INSN (XEXP (x, 0));
1278
      bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
1279
    }
1280
 
1281
  cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
1282
}
1283
 
1284
/* Splits superblocks.  */
1285
void
1286
break_superblocks (void)
1287
{
1288
  sbitmap superblocks;
1289
  bool need = false;
1290
  basic_block bb;
1291
 
1292
  superblocks = sbitmap_alloc (last_basic_block);
1293
  sbitmap_zero (superblocks);
1294
 
1295
  FOR_EACH_BB (bb)
1296
    if (bb->flags & BB_SUPERBLOCK)
1297
      {
1298
        bb->flags &= ~BB_SUPERBLOCK;
1299
        SET_BIT (superblocks, bb->index);
1300
        need = true;
1301
      }
1302
 
1303
  if (need)
1304
    {
1305
      rebuild_jump_labels (get_insns ());
1306
      find_many_sub_basic_blocks (superblocks);
1307
    }
1308
 
1309
  free (superblocks);
1310
}
1311
 
1312
/* Finalize the changes: reorder insn list according to the sequence specified
1313
   by aux pointers, enter compensation code, rebuild scope forest.  */
1314
 
1315
void
1316
cfg_layout_finalize (void)
1317
{
1318
#ifdef ENABLE_CHECKING
1319
  verify_flow_info ();
1320
#endif
1321
  force_one_exit_fallthru ();
1322
  rtl_register_cfg_hooks ();
1323
  if (reload_completed
1324
#ifdef HAVE_epilogue
1325
      && !HAVE_epilogue
1326
#endif
1327
      )
1328
    fixup_fallthru_exit_predecessor ();
1329
  fixup_reorder_chain ();
1330
 
1331
  rebuild_jump_labels (get_insns ());
1332
  delete_dead_jumptables ();
1333
 
1334
#ifdef ENABLE_CHECKING
1335
  verify_insn_chain ();
1336
  verify_flow_info ();
1337
#endif
1338
}
1339
 
1340
/* Checks whether all N blocks in BBS array can be copied.  */
1341
bool
1342
can_copy_bbs_p (basic_block *bbs, unsigned n)
1343
{
1344
  unsigned i;
1345
  edge e;
1346
  int ret = true;
1347
 
1348
  for (i = 0; i < n; i++)
1349
    bbs[i]->flags |= BB_DUPLICATED;
1350
 
1351
  for (i = 0; i < n; i++)
1352
    {
1353
      /* In case we should redirect abnormal edge during duplication, fail.  */
1354
      edge_iterator ei;
1355
      FOR_EACH_EDGE (e, ei, bbs[i]->succs)
1356
        if ((e->flags & EDGE_ABNORMAL)
1357
            && (e->dest->flags & BB_DUPLICATED))
1358
          {
1359
            ret = false;
1360
            goto end;
1361
          }
1362
 
1363
      if (!can_duplicate_block_p (bbs[i]))
1364
        {
1365
          ret = false;
1366
          break;
1367
        }
1368
    }
1369
 
1370
end:
1371
  for (i = 0; i < n; i++)
1372
    bbs[i]->flags &= ~BB_DUPLICATED;
1373
 
1374
  return ret;
1375
}
1376
 
1377
/* Duplicates N basic blocks stored in array BBS.  Newly created basic blocks
1378
   are placed into array NEW_BBS in the same order.  Edges from basic blocks
1379
   in BBS are also duplicated and copies of those of them
1380
   that lead into BBS are redirected to appropriate newly created block.  The
1381
   function assigns bbs into loops (copy of basic block bb is assigned to
1382
   bb->loop_father->copy loop, so this must be set up correctly in advance)
1383
   and updates dominators locally (LOOPS structure that contains the information
1384
   about dominators is passed to enable this).
1385
 
1386
   BASE is the superloop to that basic block belongs; if its header or latch
1387
   is copied, we do not set the new blocks as header or latch.
1388
 
1389
   Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES,
1390
   also in the same order.
1391
 
1392
   Newly created basic blocks are put after the basic block AFTER in the
1393
   instruction stream, and the order of the blocks in BBS array is preserved.  */
1394
 
1395
void
1396
copy_bbs (basic_block *bbs, unsigned n, basic_block *new_bbs,
1397
          edge *edges, unsigned num_edges, edge *new_edges,
1398
          struct loop *base, basic_block after)
1399
{
1400
  unsigned i, j;
1401
  basic_block bb, new_bb, dom_bb;
1402
  edge e;
1403
 
1404
  /* Duplicate bbs, update dominators, assign bbs to loops.  */
1405
  for (i = 0; i < n; i++)
1406
    {
1407
      /* Duplicate.  */
1408
      bb = bbs[i];
1409
      new_bb = new_bbs[i] = duplicate_block (bb, NULL, after);
1410
      after = new_bb;
1411
      bb->flags |= BB_DUPLICATED;
1412
      /* Possibly set loop header.  */
1413
      if (bb->loop_father->header == bb && bb->loop_father != base)
1414
        new_bb->loop_father->header = new_bb;
1415
      /* Or latch.  */
1416
      if (bb->loop_father->latch == bb && bb->loop_father != base)
1417
        new_bb->loop_father->latch = new_bb;
1418
    }
1419
 
1420
  /* Set dominators.  */
1421
  for (i = 0; i < n; i++)
1422
    {
1423
      bb = bbs[i];
1424
      new_bb = new_bbs[i];
1425
 
1426
      dom_bb = get_immediate_dominator (CDI_DOMINATORS, bb);
1427
      if (dom_bb->flags & BB_DUPLICATED)
1428
        {
1429
          dom_bb = get_bb_copy (dom_bb);
1430
          set_immediate_dominator (CDI_DOMINATORS, new_bb, dom_bb);
1431
        }
1432
    }
1433
 
1434
  /* Redirect edges.  */
1435
  for (j = 0; j < num_edges; j++)
1436
    new_edges[j] = NULL;
1437
  for (i = 0; i < n; i++)
1438
    {
1439
      edge_iterator ei;
1440
      new_bb = new_bbs[i];
1441
      bb = bbs[i];
1442
 
1443
      FOR_EACH_EDGE (e, ei, new_bb->succs)
1444
        {
1445
          for (j = 0; j < num_edges; j++)
1446
            if (edges[j] && edges[j]->src == bb && edges[j]->dest == e->dest)
1447
              new_edges[j] = e;
1448
 
1449
          if (!(e->dest->flags & BB_DUPLICATED))
1450
            continue;
1451
          redirect_edge_and_branch_force (e, get_bb_copy (e->dest));
1452
        }
1453
    }
1454
 
1455
  /* Clear information about duplicates.  */
1456
  for (i = 0; i < n; i++)
1457
    bbs[i]->flags &= ~BB_DUPLICATED;
1458
}
1459
 
1460
#include "gt-cfglayout.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.