OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-stable/] [gcc-4.5.1/] [gcc/] [cfgrtl.c] - Blame information for rev 843

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 280 jeremybenn
/* Control flow graph manipulation code for GNU compiler.
2
   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3
   1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4
   Free Software Foundation, Inc.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
/* This file contains low level functions to manipulate the CFG and analyze it
23
   that are aware of the RTL intermediate language.
24
 
25
   Available functionality:
26
     - Basic CFG/RTL manipulation API documented in cfghooks.h
27
     - CFG-aware instruction chain manipulation
28
         delete_insn, delete_insn_chain
29
     - Edge splitting and committing to edges
30
         insert_insn_on_edge, commit_edge_insertions
31
     - CFG updating after insn simplification
32
         purge_dead_edges, purge_all_dead_edges
33
 
34
   Functions not supposed for generic use:
35
     - Infrastructure to determine quickly basic block for insn
36
         compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
37
     - Edge redirection with updating and optimizing of insn chain
38
         block_label, tidy_fallthru_edge, force_nonfallthru  */
39
 
40
#include "config.h"
41
#include "system.h"
42
#include "coretypes.h"
43
#include "tm.h"
44
#include "tree.h"
45
#include "rtl.h"
46
#include "hard-reg-set.h"
47
#include "basic-block.h"
48
#include "regs.h"
49
#include "flags.h"
50
#include "output.h"
51
#include "function.h"
52
#include "except.h"
53
#include "toplev.h"
54
#include "tm_p.h"
55
#include "obstack.h"
56
#include "insn-attr.h"
57
#include "insn-config.h"
58
#include "cfglayout.h"
59
#include "expr.h"
60
#include "target.h"
61
#include "cfgloop.h"
62
#include "ggc.h"
63
#include "tree-pass.h"
64
#include "df.h"
65
 
66
static int can_delete_note_p (const_rtx);
67
static int can_delete_label_p (const_rtx);
68
static basic_block rtl_split_edge (edge);
69
static bool rtl_move_block_after (basic_block, basic_block);
70
static int rtl_verify_flow_info (void);
71
static basic_block cfg_layout_split_block (basic_block, void *);
72
static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
73
static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
74
static void cfg_layout_delete_block (basic_block);
75
static void rtl_delete_block (basic_block);
76
static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
77
static edge rtl_redirect_edge_and_branch (edge, basic_block);
78
static basic_block rtl_split_block (basic_block, void *);
79
static void rtl_dump_bb (basic_block, FILE *, int, int);
80
static int rtl_verify_flow_info_1 (void);
81
static void rtl_make_forwarder_block (edge);
82
 
83
/* Return true if NOTE is not one of the ones that must be kept paired,
84
   so that we may simply delete it.  */
85
 
86
static int
87
can_delete_note_p (const_rtx note)
88
{
89
  switch (NOTE_KIND (note))
90
    {
91
    case NOTE_INSN_DELETED:
92
    case NOTE_INSN_BASIC_BLOCK:
93
    case NOTE_INSN_EPILOGUE_BEG:
94
      return true;
95
 
96
    default:
97
      return false;
98
    }
99
}
100
 
101
/* True if a given label can be deleted.  */
102
 
103
static int
104
can_delete_label_p (const_rtx label)
105
{
106
  return (!LABEL_PRESERVE_P (label)
107
          /* User declared labels must be preserved.  */
108
          && LABEL_NAME (label) == 0
109
          && !in_expr_list_p (forced_labels, label));
110
}
111
 
112
/* Delete INSN by patching it out.  Return the next insn.  */
113
 
114
rtx
115
delete_insn (rtx insn)
116
{
117
  rtx next = NEXT_INSN (insn);
118
  rtx note;
119
  bool really_delete = true;
120
 
121
  if (LABEL_P (insn))
122
    {
123
      /* Some labels can't be directly removed from the INSN chain, as they
124
         might be references via variables, constant pool etc.
125
         Convert them to the special NOTE_INSN_DELETED_LABEL note.  */
126
      if (! can_delete_label_p (insn))
127
        {
128
          const char *name = LABEL_NAME (insn);
129
 
130
          really_delete = false;
131
          PUT_CODE (insn, NOTE);
132
          NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
133
          NOTE_DELETED_LABEL_NAME (insn) = name;
134
        }
135
 
136
      remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
137
    }
138
 
139
  if (really_delete)
140
    {
141
      /* If this insn has already been deleted, something is very wrong.  */
142
      gcc_assert (!INSN_DELETED_P (insn));
143
      remove_insn (insn);
144
      INSN_DELETED_P (insn) = 1;
145
    }
146
 
147
  /* If deleting a jump, decrement the use count of the label.  Deleting
148
     the label itself should happen in the normal course of block merging.  */
149
  if (JUMP_P (insn))
150
    {
151
      if (JUMP_LABEL (insn)
152
          && LABEL_P (JUMP_LABEL (insn)))
153
        LABEL_NUSES (JUMP_LABEL (insn))--;
154
 
155
      /* If there are more targets, remove them too.  */
156
      while ((note
157
              = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
158
             && LABEL_P (XEXP (note, 0)))
159
        {
160
          LABEL_NUSES (XEXP (note, 0))--;
161
          remove_note (insn, note);
162
        }
163
    }
164
 
165
  /* Also if deleting any insn that references a label as an operand.  */
166
  while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
167
         && LABEL_P (XEXP (note, 0)))
168
    {
169
      LABEL_NUSES (XEXP (note, 0))--;
170
      remove_note (insn, note);
171
    }
172
 
173
  if (JUMP_TABLE_DATA_P (insn))
174
    {
175
      rtx pat = PATTERN (insn);
176
      int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
177
      int len = XVECLEN (pat, diff_vec_p);
178
      int i;
179
 
180
      for (i = 0; i < len; i++)
181
        {
182
          rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
183
 
184
          /* When deleting code in bulk (e.g. removing many unreachable
185
             blocks) we can delete a label that's a target of the vector
186
             before deleting the vector itself.  */
187
          if (!NOTE_P (label))
188
            LABEL_NUSES (label)--;
189
        }
190
    }
191
 
192
  return next;
193
}
194
 
195
/* Like delete_insn but also purge dead edges from BB.  */
196
 
197
rtx
198
delete_insn_and_edges (rtx insn)
199
{
200
  rtx x;
201
  bool purge = false;
202
 
203
  if (INSN_P (insn)
204
      && BLOCK_FOR_INSN (insn)
205
      && BB_END (BLOCK_FOR_INSN (insn)) == insn)
206
    purge = true;
207
  x = delete_insn (insn);
208
  if (purge)
209
    purge_dead_edges (BLOCK_FOR_INSN (insn));
210
  return x;
211
}
212
 
213
/* Unlink a chain of insns between START and FINISH, leaving notes
214
   that must be paired.  If CLEAR_BB is true, we set bb field for
215
   insns that cannot be removed to NULL.  */
216
 
217
void
218
delete_insn_chain (rtx start, rtx finish, bool clear_bb)
219
{
220
  rtx next;
221
 
222
  /* Unchain the insns one by one.  It would be quicker to delete all of these
223
     with a single unchaining, rather than one at a time, but we need to keep
224
     the NOTE's.  */
225
  while (1)
226
    {
227
      next = NEXT_INSN (start);
228
      if (NOTE_P (start) && !can_delete_note_p (start))
229
        ;
230
      else
231
        next = delete_insn (start);
232
 
233
      if (clear_bb && !INSN_DELETED_P (start))
234
        set_block_for_insn (start, NULL);
235
 
236
      if (start == finish)
237
        break;
238
      start = next;
239
    }
240
}
241
 
242
/* Create a new basic block consisting of the instructions between HEAD and END
243
   inclusive.  This function is designed to allow fast BB construction - reuses
244
   the note and basic block struct in BB_NOTE, if any and do not grow
245
   BASIC_BLOCK chain and should be used directly only by CFG construction code.
246
   END can be NULL in to create new empty basic block before HEAD.  Both END
247
   and HEAD can be NULL to create basic block at the end of INSN chain.
248
   AFTER is the basic block we should be put after.  */
249
 
250
basic_block
251
create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
252
{
253
  basic_block bb;
254
 
255
  if (bb_note
256
      && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
257
      && bb->aux == NULL)
258
    {
259
      /* If we found an existing note, thread it back onto the chain.  */
260
 
261
      rtx after;
262
 
263
      if (LABEL_P (head))
264
        after = head;
265
      else
266
        {
267
          after = PREV_INSN (head);
268
          head = bb_note;
269
        }
270
 
271
      if (after != bb_note && NEXT_INSN (after) != bb_note)
272
        reorder_insns_nobb (bb_note, bb_note, after);
273
    }
274
  else
275
    {
276
      /* Otherwise we must create a note and a basic block structure.  */
277
 
278
      bb = alloc_block ();
279
 
280
      init_rtl_bb_info (bb);
281
      if (!head && !end)
282
        head = end = bb_note
283
          = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
284
      else if (LABEL_P (head) && end)
285
        {
286
          bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
287
          if (head == end)
288
            end = bb_note;
289
        }
290
      else
291
        {
292
          bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
293
          head = bb_note;
294
          if (!end)
295
            end = head;
296
        }
297
 
298
      NOTE_BASIC_BLOCK (bb_note) = bb;
299
    }
300
 
301
  /* Always include the bb note in the block.  */
302
  if (NEXT_INSN (end) == bb_note)
303
    end = bb_note;
304
 
305
  BB_HEAD (bb) = head;
306
  BB_END (bb) = end;
307
  bb->index = last_basic_block++;
308
  bb->flags = BB_NEW | BB_RTL;
309
  link_block (bb, after);
310
  SET_BASIC_BLOCK (bb->index, bb);
311
  df_bb_refs_record (bb->index, false);
312
  update_bb_for_insn (bb);
313
  BB_SET_PARTITION (bb, BB_UNPARTITIONED);
314
 
315
  /* Tag the block so that we know it has been used when considering
316
     other basic block notes.  */
317
  bb->aux = bb;
318
 
319
  return bb;
320
}
321
 
322
/* Create new basic block consisting of instructions in between HEAD and END
323
   and place it to the BB chain after block AFTER.  END can be NULL in to
324
   create new empty basic block before HEAD.  Both END and HEAD can be NULL to
325
   create basic block at the end of INSN chain.  */
326
 
327
static basic_block
328
rtl_create_basic_block (void *headp, void *endp, basic_block after)
329
{
330
  rtx head = (rtx) headp, end = (rtx) endp;
331
  basic_block bb;
332
 
333
  /* Grow the basic block array if needed.  */
334
  if ((size_t) last_basic_block >= VEC_length (basic_block, basic_block_info))
335
    {
336
      size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
337
      VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
338
    }
339
 
340
  n_basic_blocks++;
341
 
342
  bb = create_basic_block_structure (head, end, NULL, after);
343
  bb->aux = NULL;
344
  return bb;
345
}
346
 
347
static basic_block
348
cfg_layout_create_basic_block (void *head, void *end, basic_block after)
349
{
350
  basic_block newbb = rtl_create_basic_block (head, end, after);
351
 
352
  return newbb;
353
}
354
 
355
/* Delete the insns in a (non-live) block.  We physically delete every
356
   non-deleted-note insn, and update the flow graph appropriately.
357
 
358
   Return nonzero if we deleted an exception handler.  */
359
 
360
/* ??? Preserving all such notes strikes me as wrong.  It would be nice
361
   to post-process the stream to remove empty blocks, loops, ranges, etc.  */
362
 
363
static void
364
rtl_delete_block (basic_block b)
365
{
366
  rtx insn, end;
367
 
368
  /* If the head of this block is a CODE_LABEL, then it might be the
369
     label for an exception handler which can't be reached.  We need
370
     to remove the label from the exception_handler_label list.  */
371
  insn = BB_HEAD (b);
372
 
373
  end = get_last_bb_insn (b);
374
 
375
  /* Selectively delete the entire chain.  */
376
  BB_HEAD (b) = NULL;
377
  delete_insn_chain (insn, end, true);
378
 
379
 
380
  if (dump_file)
381
    fprintf (dump_file, "deleting block %d\n", b->index);
382
  df_bb_delete (b->index);
383
}
384
 
385
/* Records the basic block struct in BLOCK_FOR_INSN for every insn.  */
386
 
387
void
388
compute_bb_for_insn (void)
389
{
390
  basic_block bb;
391
 
392
  FOR_EACH_BB (bb)
393
    {
394
      rtx end = BB_END (bb);
395
      rtx insn;
396
 
397
      for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
398
        {
399
          BLOCK_FOR_INSN (insn) = bb;
400
          if (insn == end)
401
            break;
402
        }
403
    }
404
}
405
 
406
/* Release the basic_block_for_insn array.  */
407
 
408
unsigned int
409
free_bb_for_insn (void)
410
{
411
  rtx insn;
412
  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
413
    if (!BARRIER_P (insn))
414
      BLOCK_FOR_INSN (insn) = NULL;
415
  return 0;
416
}
417
 
418
static unsigned int
419
rest_of_pass_free_cfg (void)
420
{
421
#ifdef DELAY_SLOTS
422
  /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
423
     valid at that point so it would be too late to call df_analyze.  */
424
  if (optimize > 0 && flag_delayed_branch)
425
    df_analyze ();
426
#endif
427
 
428
  free_bb_for_insn ();
429
  return 0;
430
}
431
 
432
struct rtl_opt_pass pass_free_cfg =
433
{
434
 {
435
  RTL_PASS,
436
  "*free_cfg",                          /* name */
437
  NULL,                                 /* gate */
438
  rest_of_pass_free_cfg,                /* execute */
439
  NULL,                                 /* sub */
440
  NULL,                                 /* next */
441
  0,                                    /* static_pass_number */
442
  TV_NONE,                              /* tv_id */
443
  0,                                    /* properties_required */
444
  0,                                    /* properties_provided */
445
  PROP_cfg,                             /* properties_destroyed */
446
  0,                                    /* todo_flags_start */
447
  0,                                    /* todo_flags_finish */
448
 }
449
};
450
 
451
/* Return RTX to emit after when we want to emit code on the entry of function.  */
452
rtx
453
entry_of_function (void)
454
{
455
  return (n_basic_blocks > NUM_FIXED_BLOCKS ?
456
          BB_HEAD (ENTRY_BLOCK_PTR->next_bb) : get_insns ());
457
}
458
 
459
/* Emit INSN at the entry point of the function, ensuring that it is only
460
   executed once per function.  */
461
void
462
emit_insn_at_entry (rtx insn)
463
{
464
  edge_iterator ei = ei_start (ENTRY_BLOCK_PTR->succs);
465
  edge e = ei_safe_edge (ei);
466
  gcc_assert (e->flags & EDGE_FALLTHRU);
467
 
468
  insert_insn_on_edge (insn, e);
469
  commit_edge_insertions ();
470
}
471
 
472
/* Update BLOCK_FOR_INSN of insns between BEGIN and END
473
   (or BARRIER if found) and notify df of the bb change.
474
   The insn chain range is inclusive
475
   (i.e. both BEGIN and END will be updated. */
476
 
477
static void
478
update_bb_for_insn_chain (rtx begin, rtx end, basic_block bb)
479
{
480
  rtx insn;
481
 
482
  end = NEXT_INSN (end);
483
  for (insn = begin; insn != end; insn = NEXT_INSN (insn))
484
    if (!BARRIER_P (insn))
485
      df_insn_change_bb (insn, bb);
486
}
487
 
488
/* Update BLOCK_FOR_INSN of insns in BB to BB,
489
   and notify df of the change.  */
490
 
491
void
492
update_bb_for_insn (basic_block bb)
493
{
494
  update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
495
}
496
 
497
 
498
/* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
499
   note associated with the BLOCK.  */
500
 
501
static rtx
502
first_insn_after_basic_block_note (basic_block block)
503
{
504
  rtx insn;
505
 
506
  /* Get the first instruction in the block.  */
507
  insn = BB_HEAD (block);
508
 
509
  if (insn == NULL_RTX)
510
    return NULL_RTX;
511
  if (LABEL_P (insn))
512
    insn = NEXT_INSN (insn);
513
  gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
514
 
515
  return NEXT_INSN (insn);
516
}
517
 
518
/* Creates a new basic block just after basic block B by splitting
519
   everything after specified instruction I.  */
520
 
521
static basic_block
522
rtl_split_block (basic_block bb, void *insnp)
523
{
524
  basic_block new_bb;
525
  rtx insn = (rtx) insnp;
526
  edge e;
527
  edge_iterator ei;
528
 
529
  if (!insn)
530
    {
531
      insn = first_insn_after_basic_block_note (bb);
532
 
533
      if (insn)
534
        {
535
          rtx next = insn;
536
 
537
          insn = PREV_INSN (insn);
538
 
539
          /* If the block contains only debug insns, insn would have
540
             been NULL in a non-debug compilation, and then we'd end
541
             up emitting a DELETED note.  For -fcompare-debug
542
             stability, emit the note too.  */
543
          if (insn != BB_END (bb)
544
              && DEBUG_INSN_P (next)
545
              && DEBUG_INSN_P (BB_END (bb)))
546
            {
547
              while (next != BB_END (bb) && DEBUG_INSN_P (next))
548
                next = NEXT_INSN (next);
549
 
550
              if (next == BB_END (bb))
551
                emit_note_after (NOTE_INSN_DELETED, next);
552
            }
553
        }
554
      else
555
        insn = get_last_insn ();
556
    }
557
 
558
  /* We probably should check type of the insn so that we do not create
559
     inconsistent cfg.  It is checked in verify_flow_info anyway, so do not
560
     bother.  */
561
  if (insn == BB_END (bb))
562
    emit_note_after (NOTE_INSN_DELETED, insn);
563
 
564
  /* Create the new basic block.  */
565
  new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
566
  BB_COPY_PARTITION (new_bb, bb);
567
  BB_END (bb) = insn;
568
 
569
  /* Redirect the outgoing edges.  */
570
  new_bb->succs = bb->succs;
571
  bb->succs = NULL;
572
  FOR_EACH_EDGE (e, ei, new_bb->succs)
573
    e->src = new_bb;
574
 
575
  /* The new block starts off being dirty.  */
576
  df_set_bb_dirty (bb);
577
  return new_bb;
578
}
579
 
580
/* Blocks A and B are to be merged into a single block A.  The insns
581
   are already contiguous.  */
582
 
583
static void
584
rtl_merge_blocks (basic_block a, basic_block b)
585
{
586
  rtx b_head = BB_HEAD (b), b_end = BB_END (b), a_end = BB_END (a);
587
  rtx del_first = NULL_RTX, del_last = NULL_RTX;
588
  rtx b_debug_start = b_end, b_debug_end = b_end;
589
  int b_empty = 0;
590
 
591
  if (dump_file)
592
    fprintf (dump_file, "merging block %d into block %d\n", b->index, a->index);
593
 
594
  while (DEBUG_INSN_P (b_end))
595
    b_end = PREV_INSN (b_debug_start = b_end);
596
 
597
  /* If there was a CODE_LABEL beginning B, delete it.  */
598
  if (LABEL_P (b_head))
599
    {
600
      /* Detect basic blocks with nothing but a label.  This can happen
601
         in particular at the end of a function.  */
602
      if (b_head == b_end)
603
        b_empty = 1;
604
 
605
      del_first = del_last = b_head;
606
      b_head = NEXT_INSN (b_head);
607
    }
608
 
609
  /* Delete the basic block note and handle blocks containing just that
610
     note.  */
611
  if (NOTE_INSN_BASIC_BLOCK_P (b_head))
612
    {
613
      if (b_head == b_end)
614
        b_empty = 1;
615
      if (! del_last)
616
        del_first = b_head;
617
 
618
      del_last = b_head;
619
      b_head = NEXT_INSN (b_head);
620
    }
621
 
622
  /* If there was a jump out of A, delete it.  */
623
  if (JUMP_P (a_end))
624
    {
625
      rtx prev;
626
 
627
      for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
628
        if (!NOTE_P (prev)
629
            || NOTE_INSN_BASIC_BLOCK_P (prev)
630
            || prev == BB_HEAD (a))
631
          break;
632
 
633
      del_first = a_end;
634
 
635
#ifdef HAVE_cc0
636
      /* If this was a conditional jump, we need to also delete
637
         the insn that set cc0.  */
638
      if (only_sets_cc0_p (prev))
639
        {
640
          rtx tmp = prev;
641
 
642
          prev = prev_nonnote_insn (prev);
643
          if (!prev)
644
            prev = BB_HEAD (a);
645
          del_first = tmp;
646
        }
647
#endif
648
 
649
      a_end = PREV_INSN (del_first);
650
    }
651
  else if (BARRIER_P (NEXT_INSN (a_end)))
652
    del_first = NEXT_INSN (a_end);
653
 
654
  /* Delete everything marked above as well as crap that might be
655
     hanging out between the two blocks.  */
656
  BB_HEAD (b) = NULL;
657
  delete_insn_chain (del_first, del_last, true);
658
 
659
  /* Reassociate the insns of B with A.  */
660
  if (!b_empty)
661
    {
662
      update_bb_for_insn_chain (a_end, b_debug_end, a);
663
 
664
      a_end = b_debug_end;
665
    }
666
  else if (b_end != b_debug_end)
667
    {
668
      /* Move any deleted labels and other notes between the end of A
669
         and the debug insns that make up B after the debug insns,
670
         bringing the debug insns into A while keeping the notes after
671
         the end of A.  */
672
      if (NEXT_INSN (a_end) != b_debug_start)
673
        reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
674
                            b_debug_end);
675
      update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
676
      a_end = b_debug_end;
677
    }
678
 
679
  df_bb_delete (b->index);
680
  BB_END (a) = a_end;
681
}
682
 
683
 
684
/* Return true when block A and B can be merged.  */
685
 
686
static bool
687
rtl_can_merge_blocks (basic_block a, basic_block b)
688
{
689
  /* If we are partitioning hot/cold basic blocks, we don't want to
690
     mess up unconditional or indirect jumps that cross between hot
691
     and cold sections.
692
 
693
     Basic block partitioning may result in some jumps that appear to
694
     be optimizable (or blocks that appear to be mergeable), but which really
695
     must be left untouched (they are required to make it safely across
696
     partition boundaries).  See  the comments at the top of
697
     bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
698
 
699
  if (BB_PARTITION (a) != BB_PARTITION (b))
700
    return false;
701
 
702
  /* There must be exactly one edge in between the blocks.  */
703
  return (single_succ_p (a)
704
          && single_succ (a) == b
705
          && single_pred_p (b)
706
          && a != b
707
          /* Must be simple edge.  */
708
          && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
709
          && a->next_bb == b
710
          && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
711
          /* If the jump insn has side effects,
712
             we can't kill the edge.  */
713
          && (!JUMP_P (BB_END (a))
714
              || (reload_completed
715
                  ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
716
}
717
 
718
/* Return the label in the head of basic block BLOCK.  Create one if it doesn't
719
   exist.  */
720
 
721
rtx
722
block_label (basic_block block)
723
{
724
  if (block == EXIT_BLOCK_PTR)
725
    return NULL_RTX;
726
 
727
  if (!LABEL_P (BB_HEAD (block)))
728
    {
729
      BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
730
    }
731
 
732
  return BB_HEAD (block);
733
}
734
 
735
/* Attempt to perform edge redirection by replacing possibly complex jump
736
   instruction by unconditional jump or removing jump completely.  This can
737
   apply only if all edges now point to the same block.  The parameters and
738
   return values are equivalent to redirect_edge_and_branch.  */
739
 
740
edge
741
try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
742
{
743
  basic_block src = e->src;
744
  rtx insn = BB_END (src), kill_from;
745
  rtx set;
746
  int fallthru = 0;
747
 
748
  /* If we are partitioning hot/cold basic blocks, we don't want to
749
     mess up unconditional or indirect jumps that cross between hot
750
     and cold sections.
751
 
752
     Basic block partitioning may result in some jumps that appear to
753
     be optimizable (or blocks that appear to be mergeable), but which really
754
     must be left untouched (they are required to make it safely across
755
     partition boundaries).  See  the comments at the top of
756
     bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
757
 
758
  if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
759
      || BB_PARTITION (src) != BB_PARTITION (target))
760
    return NULL;
761
 
762
  /* We can replace or remove a complex jump only when we have exactly
763
     two edges.  Also, if we have exactly one outgoing edge, we can
764
     redirect that.  */
765
  if (EDGE_COUNT (src->succs) >= 3
766
      /* Verify that all targets will be TARGET.  Specifically, the
767
         edge that is not E must also go to TARGET.  */
768
      || (EDGE_COUNT (src->succs) == 2
769
          && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
770
    return NULL;
771
 
772
  if (!onlyjump_p (insn))
773
    return NULL;
774
  if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
775
    return NULL;
776
 
777
  /* Avoid removing branch with side effects.  */
778
  set = single_set (insn);
779
  if (!set || side_effects_p (set))
780
    return NULL;
781
 
782
  /* In case we zap a conditional jump, we'll need to kill
783
     the cc0 setter too.  */
784
  kill_from = insn;
785
#ifdef HAVE_cc0
786
  if (reg_mentioned_p (cc0_rtx, PATTERN (insn))
787
      && only_sets_cc0_p (PREV_INSN (insn)))
788
    kill_from = PREV_INSN (insn);
789
#endif
790
 
791
  /* See if we can create the fallthru edge.  */
792
  if (in_cfglayout || can_fallthru (src, target))
793
    {
794
      if (dump_file)
795
        fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
796
      fallthru = 1;
797
 
798
      /* Selectively unlink whole insn chain.  */
799
      if (in_cfglayout)
800
        {
801
          rtx insn = src->il.rtl->footer;
802
 
803
          delete_insn_chain (kill_from, BB_END (src), false);
804
 
805
          /* Remove barriers but keep jumptables.  */
806
          while (insn)
807
            {
808
              if (BARRIER_P (insn))
809
                {
810
                  if (PREV_INSN (insn))
811
                    NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
812
                  else
813
                    src->il.rtl->footer = NEXT_INSN (insn);
814
                  if (NEXT_INSN (insn))
815
                    PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
816
                }
817
              if (LABEL_P (insn))
818
                break;
819
              insn = NEXT_INSN (insn);
820
            }
821
        }
822
      else
823
        delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
824
                           false);
825
    }
826
 
827
  /* If this already is simplejump, redirect it.  */
828
  else if (simplejump_p (insn))
829
    {
830
      if (e->dest == target)
831
        return NULL;
832
      if (dump_file)
833
        fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
834
                 INSN_UID (insn), e->dest->index, target->index);
835
      if (!redirect_jump (insn, block_label (target), 0))
836
        {
837
          gcc_assert (target == EXIT_BLOCK_PTR);
838
          return NULL;
839
        }
840
    }
841
 
842
  /* Cannot do anything for target exit block.  */
843
  else if (target == EXIT_BLOCK_PTR)
844
    return NULL;
845
 
846
  /* Or replace possibly complicated jump insn by simple jump insn.  */
847
  else
848
    {
849
      rtx target_label = block_label (target);
850
      rtx barrier, label, table;
851
 
852
      emit_jump_insn_after_noloc (gen_jump (target_label), insn);
853
      JUMP_LABEL (BB_END (src)) = target_label;
854
      LABEL_NUSES (target_label)++;
855
      if (dump_file)
856
        fprintf (dump_file, "Replacing insn %i by jump %i\n",
857
                 INSN_UID (insn), INSN_UID (BB_END (src)));
858
 
859
 
860
      delete_insn_chain (kill_from, insn, false);
861
 
862
      /* Recognize a tablejump that we are converting to a
863
         simple jump and remove its associated CODE_LABEL
864
         and ADDR_VEC or ADDR_DIFF_VEC.  */
865
      if (tablejump_p (insn, &label, &table))
866
        delete_insn_chain (label, table, false);
867
 
868
      barrier = next_nonnote_insn (BB_END (src));
869
      if (!barrier || !BARRIER_P (barrier))
870
        emit_barrier_after (BB_END (src));
871
      else
872
        {
873
          if (barrier != NEXT_INSN (BB_END (src)))
874
            {
875
              /* Move the jump before barrier so that the notes
876
                 which originally were or were created before jump table are
877
                 inside the basic block.  */
878
              rtx new_insn = BB_END (src);
879
 
880
              update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
881
                                        PREV_INSN (barrier), src);
882
 
883
              NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
884
              PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
885
 
886
              NEXT_INSN (new_insn) = barrier;
887
              NEXT_INSN (PREV_INSN (barrier)) = new_insn;
888
 
889
              PREV_INSN (new_insn) = PREV_INSN (barrier);
890
              PREV_INSN (barrier) = new_insn;
891
            }
892
        }
893
    }
894
 
895
  /* Keep only one edge out and set proper flags.  */
896
  if (!single_succ_p (src))
897
    remove_edge (e);
898
  gcc_assert (single_succ_p (src));
899
 
900
  e = single_succ_edge (src);
901
  if (fallthru)
902
    e->flags = EDGE_FALLTHRU;
903
  else
904
    e->flags = 0;
905
 
906
  e->probability = REG_BR_PROB_BASE;
907
  e->count = src->count;
908
 
909
  if (e->dest != target)
910
    redirect_edge_succ (e, target);
911
  return e;
912
}
913
 
914
/* Subroutine of redirect_branch_edge that tries to patch the jump
915
   instruction INSN so that it reaches block NEW.  Do this
916
   only when it originally reached block OLD.  Return true if this
917
   worked or the original target wasn't OLD, return false if redirection
918
   doesn't work.  */
919
 
920
static bool
921
patch_jump_insn (rtx insn, rtx old_label, basic_block new_bb)
922
{
923
  rtx tmp;
924
  /* Recognize a tablejump and adjust all matching cases.  */
925
  if (tablejump_p (insn, NULL, &tmp))
926
    {
927
      rtvec vec;
928
      int j;
929
      rtx new_label = block_label (new_bb);
930
 
931
      if (new_bb == EXIT_BLOCK_PTR)
932
        return false;
933
      if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
934
        vec = XVEC (PATTERN (tmp), 0);
935
      else
936
        vec = XVEC (PATTERN (tmp), 1);
937
 
938
      for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
939
        if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
940
          {
941
            RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
942
            --LABEL_NUSES (old_label);
943
            ++LABEL_NUSES (new_label);
944
          }
945
 
946
      /* Handle casesi dispatch insns.  */
947
      if ((tmp = single_set (insn)) != NULL
948
          && SET_DEST (tmp) == pc_rtx
949
          && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
950
          && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
951
          && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
952
        {
953
          XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
954
                                                       new_label);
955
          --LABEL_NUSES (old_label);
956
          ++LABEL_NUSES (new_label);
957
        }
958
    }
959
  else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
960
    {
961
      int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
962
      rtx new_label, note;
963
 
964
      if (new_bb == EXIT_BLOCK_PTR)
965
        return false;
966
      new_label = block_label (new_bb);
967
 
968
      for (i = 0; i < n; ++i)
969
        {
970
          rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
971
          gcc_assert (GET_CODE (old_ref) == LABEL_REF);
972
          if (XEXP (old_ref, 0) == old_label)
973
            {
974
              ASM_OPERANDS_LABEL (tmp, i)
975
                = gen_rtx_LABEL_REF (Pmode, new_label);
976
              --LABEL_NUSES (old_label);
977
              ++LABEL_NUSES (new_label);
978
            }
979
        }
980
 
981
      if (JUMP_LABEL (insn) == old_label)
982
        {
983
          JUMP_LABEL (insn) = new_label;
984
          note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
985
          if (note)
986
            remove_note (insn, note);
987
        }
988
      else
989
        {
990
          note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
991
          if (note)
992
            remove_note (insn, note);
993
          if (JUMP_LABEL (insn) != new_label
994
              && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
995
            add_reg_note (insn, REG_LABEL_TARGET, new_label);
996
        }
997
      while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
998
             != NULL_RTX)
999
        XEXP (note, 0) = new_label;
1000
    }
1001
  else
1002
    {
1003
      /* ?? We may play the games with moving the named labels from
1004
         one basic block to the other in case only one computed_jump is
1005
         available.  */
1006
      if (computed_jump_p (insn)
1007
          /* A return instruction can't be redirected.  */
1008
          || returnjump_p (insn))
1009
        return false;
1010
 
1011
      if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
1012
        {
1013
          /* If the insn doesn't go where we think, we're confused.  */
1014
          gcc_assert (JUMP_LABEL (insn) == old_label);
1015
 
1016
          /* If the substitution doesn't succeed, die.  This can happen
1017
             if the back end emitted unrecognizable instructions or if
1018
             target is exit block on some arches.  */
1019
          if (!redirect_jump (insn, block_label (new_bb), 0))
1020
            {
1021
              gcc_assert (new_bb == EXIT_BLOCK_PTR);
1022
              return false;
1023
            }
1024
        }
1025
    }
1026
  return true;
1027
}
1028
 
1029
 
1030
/* Redirect edge representing branch of (un)conditional jump or tablejump,
1031
   NULL on failure  */
1032
static edge
1033
redirect_branch_edge (edge e, basic_block target)
1034
{
1035
  rtx old_label = BB_HEAD (e->dest);
1036
  basic_block src = e->src;
1037
  rtx insn = BB_END (src);
1038
 
1039
  /* We can only redirect non-fallthru edges of jump insn.  */
1040
  if (e->flags & EDGE_FALLTHRU)
1041
    return NULL;
1042
  else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
1043
    return NULL;
1044
 
1045
  if (!currently_expanding_to_rtl)
1046
    {
1047
      if (!patch_jump_insn (insn, old_label, target))
1048
        return NULL;
1049
    }
1050
  else
1051
    /* When expanding this BB might actually contain multiple
1052
       jumps (i.e. not yet split by find_many_sub_basic_blocks).
1053
       Redirect all of those that match our label.  */
1054
    for (insn = BB_HEAD (src); insn != NEXT_INSN (BB_END (src));
1055
         insn = NEXT_INSN (insn))
1056
      if (JUMP_P (insn) && !patch_jump_insn (insn, old_label, target))
1057
        return NULL;
1058
 
1059
  if (dump_file)
1060
    fprintf (dump_file, "Edge %i->%i redirected to %i\n",
1061
             e->src->index, e->dest->index, target->index);
1062
 
1063
  if (e->dest != target)
1064
    e = redirect_edge_succ_nodup (e, target);
1065
 
1066
  return e;
1067
}
1068
 
1069
/* Attempt to change code to redirect edge E to TARGET.  Don't do that on
1070
   expense of adding new instructions or reordering basic blocks.
1071
 
1072
   Function can be also called with edge destination equivalent to the TARGET.
1073
   Then it should try the simplifications and do nothing if none is possible.
1074
 
1075
   Return edge representing the branch if transformation succeeded.  Return NULL
1076
   on failure.
1077
   We still return NULL in case E already destinated TARGET and we didn't
1078
   managed to simplify instruction stream.  */
1079
 
1080
static edge
1081
rtl_redirect_edge_and_branch (edge e, basic_block target)
1082
{
1083
  edge ret;
1084
  basic_block src = e->src;
1085
 
1086
  if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
1087
    return NULL;
1088
 
1089
  if (e->dest == target)
1090
    return e;
1091
 
1092
  if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
1093
    {
1094
      df_set_bb_dirty (src);
1095
      return ret;
1096
    }
1097
 
1098
  ret = redirect_branch_edge (e, target);
1099
  if (!ret)
1100
    return NULL;
1101
 
1102
  df_set_bb_dirty (src);
1103
  return ret;
1104
}
1105
 
1106
/* Like force_nonfallthru below, but additionally performs redirection
1107
   Used by redirect_edge_and_branch_force.  */
1108
 
1109
static basic_block
1110
force_nonfallthru_and_redirect (edge e, basic_block target)
1111
{
1112
  basic_block jump_block, new_bb = NULL, src = e->src;
1113
  rtx note;
1114
  edge new_edge;
1115
  int abnormal_edge_flags = 0;
1116
  int loc;
1117
 
1118
  /* In the case the last instruction is conditional jump to the next
1119
     instruction, first redirect the jump itself and then continue
1120
     by creating a basic block afterwards to redirect fallthru edge.  */
1121
  if (e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR
1122
      && any_condjump_p (BB_END (e->src))
1123
      && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
1124
    {
1125
      rtx note;
1126
      edge b = unchecked_make_edge (e->src, target, 0);
1127
      bool redirected;
1128
 
1129
      redirected = redirect_jump (BB_END (e->src), block_label (target), 0);
1130
      gcc_assert (redirected);
1131
 
1132
      note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
1133
      if (note)
1134
        {
1135
          int prob = INTVAL (XEXP (note, 0));
1136
 
1137
          b->probability = prob;
1138
          b->count = e->count * prob / REG_BR_PROB_BASE;
1139
          e->probability -= e->probability;
1140
          e->count -= b->count;
1141
          if (e->probability < 0)
1142
            e->probability = 0;
1143
          if (e->count < 0)
1144
            e->count = 0;
1145
        }
1146
    }
1147
 
1148
  if (e->flags & EDGE_ABNORMAL)
1149
    {
1150
      /* Irritating special case - fallthru edge to the same block as abnormal
1151
         edge.
1152
         We can't redirect abnormal edge, but we still can split the fallthru
1153
         one and create separate abnormal edge to original destination.
1154
         This allows bb-reorder to make such edge non-fallthru.  */
1155
      gcc_assert (e->dest == target);
1156
      abnormal_edge_flags = e->flags & ~(EDGE_FALLTHRU | EDGE_CAN_FALLTHRU);
1157
      e->flags &= EDGE_FALLTHRU | EDGE_CAN_FALLTHRU;
1158
    }
1159
  else
1160
    {
1161
      gcc_assert (e->flags & EDGE_FALLTHRU);
1162
      if (e->src == ENTRY_BLOCK_PTR)
1163
        {
1164
          /* We can't redirect the entry block.  Create an empty block
1165
             at the start of the function which we use to add the new
1166
             jump.  */
1167
          edge tmp;
1168
          edge_iterator ei;
1169
          bool found = false;
1170
 
1171
          basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL, ENTRY_BLOCK_PTR);
1172
 
1173
          /* Change the existing edge's source to be the new block, and add
1174
             a new edge from the entry block to the new block.  */
1175
          e->src = bb;
1176
          for (ei = ei_start (ENTRY_BLOCK_PTR->succs); (tmp = ei_safe_edge (ei)); )
1177
            {
1178
              if (tmp == e)
1179
                {
1180
                  VEC_unordered_remove (edge, ENTRY_BLOCK_PTR->succs, ei.index);
1181
                  found = true;
1182
                  break;
1183
                }
1184
              else
1185
                ei_next (&ei);
1186
            }
1187
 
1188
          gcc_assert (found);
1189
 
1190
          VEC_safe_push (edge, gc, bb->succs, e);
1191
          make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
1192
        }
1193
    }
1194
 
1195
  if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags)
1196
    {
1197
      /* Create the new structures.  */
1198
 
1199
      /* If the old block ended with a tablejump, skip its table
1200
         by searching forward from there.  Otherwise start searching
1201
         forward from the last instruction of the old block.  */
1202
      if (!tablejump_p (BB_END (e->src), NULL, &note))
1203
        note = BB_END (e->src);
1204
      note = NEXT_INSN (note);
1205
 
1206
      jump_block = create_basic_block (note, NULL, e->src);
1207
      jump_block->count = e->count;
1208
      jump_block->frequency = EDGE_FREQUENCY (e);
1209
      jump_block->loop_depth = target->loop_depth;
1210
 
1211
      /* Make sure new block ends up in correct hot/cold section.  */
1212
 
1213
      BB_COPY_PARTITION (jump_block, e->src);
1214
      if (flag_reorder_blocks_and_partition
1215
          && targetm.have_named_sections
1216
          && JUMP_P (BB_END (jump_block))
1217
          && !any_condjump_p (BB_END (jump_block))
1218
          && (EDGE_SUCC (jump_block, 0)->flags & EDGE_CROSSING))
1219
        add_reg_note (BB_END (jump_block), REG_CROSSING_JUMP, NULL_RTX);
1220
 
1221
      /* Wire edge in.  */
1222
      new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1223
      new_edge->probability = e->probability;
1224
      new_edge->count = e->count;
1225
 
1226
      /* Redirect old edge.  */
1227
      redirect_edge_pred (e, jump_block);
1228
      e->probability = REG_BR_PROB_BASE;
1229
 
1230
      new_bb = jump_block;
1231
    }
1232
  else
1233
    jump_block = e->src;
1234
 
1235
  if (e->goto_locus && e->goto_block == NULL)
1236
    loc = e->goto_locus;
1237
  else
1238
    loc = 0;
1239
  e->flags &= ~EDGE_FALLTHRU;
1240
  if (target == EXIT_BLOCK_PTR)
1241
    {
1242
#ifdef HAVE_return
1243
        emit_jump_insn_after_setloc (gen_return (), BB_END (jump_block), loc);
1244
#else
1245
        gcc_unreachable ();
1246
#endif
1247
    }
1248
  else
1249
    {
1250
      rtx label = block_label (target);
1251
      emit_jump_insn_after_setloc (gen_jump (label), BB_END (jump_block), loc);
1252
      JUMP_LABEL (BB_END (jump_block)) = label;
1253
      LABEL_NUSES (label)++;
1254
    }
1255
 
1256
  emit_barrier_after (BB_END (jump_block));
1257
  redirect_edge_succ_nodup (e, target);
1258
 
1259
  if (abnormal_edge_flags)
1260
    make_edge (src, target, abnormal_edge_flags);
1261
 
1262
  df_mark_solutions_dirty ();
1263
  return new_bb;
1264
}
1265
 
1266
/* Edge E is assumed to be fallthru edge.  Emit needed jump instruction
1267
   (and possibly create new basic block) to make edge non-fallthru.
1268
   Return newly created BB or NULL if none.  */
1269
 
1270
basic_block
1271
force_nonfallthru (edge e)
1272
{
1273
  return force_nonfallthru_and_redirect (e, e->dest);
1274
}
1275
 
1276
/* Redirect edge even at the expense of creating new jump insn or
1277
   basic block.  Return new basic block if created, NULL otherwise.
1278
   Conversion must be possible.  */
1279
 
1280
static basic_block
1281
rtl_redirect_edge_and_branch_force (edge e, basic_block target)
1282
{
1283
  if (redirect_edge_and_branch (e, target)
1284
      || e->dest == target)
1285
    return NULL;
1286
 
1287
  /* In case the edge redirection failed, try to force it to be non-fallthru
1288
     and redirect newly created simplejump.  */
1289
  df_set_bb_dirty (e->src);
1290
  return force_nonfallthru_and_redirect (e, target);
1291
}
1292
 
1293
/* The given edge should potentially be a fallthru edge.  If that is in
1294
   fact true, delete the jump and barriers that are in the way.  */
1295
 
1296
static void
1297
rtl_tidy_fallthru_edge (edge e)
1298
{
1299
  rtx q;
1300
  basic_block b = e->src, c = b->next_bb;
1301
 
1302
  /* ??? In a late-running flow pass, other folks may have deleted basic
1303
     blocks by nopping out blocks, leaving multiple BARRIERs between here
1304
     and the target label. They ought to be chastised and fixed.
1305
 
1306
     We can also wind up with a sequence of undeletable labels between
1307
     one block and the next.
1308
 
1309
     So search through a sequence of barriers, labels, and notes for
1310
     the head of block C and assert that we really do fall through.  */
1311
 
1312
  for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1313
    if (INSN_P (q))
1314
      return;
1315
 
1316
  /* Remove what will soon cease being the jump insn from the source block.
1317
     If block B consisted only of this single jump, turn it into a deleted
1318
     note.  */
1319
  q = BB_END (b);
1320
  if (JUMP_P (q)
1321
      && onlyjump_p (q)
1322
      && (any_uncondjump_p (q)
1323
          || single_succ_p (b)))
1324
    {
1325
#ifdef HAVE_cc0
1326
      /* If this was a conditional jump, we need to also delete
1327
         the insn that set cc0.  */
1328
      if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1329
        q = PREV_INSN (q);
1330
#endif
1331
 
1332
      q = PREV_INSN (q);
1333
    }
1334
 
1335
  /* Selectively unlink the sequence.  */
1336
  if (q != PREV_INSN (BB_HEAD (c)))
1337
    delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
1338
 
1339
  e->flags |= EDGE_FALLTHRU;
1340
}
1341
 
1342
/* Should move basic block BB after basic block AFTER.  NIY.  */
1343
 
1344
static bool
1345
rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1346
                      basic_block after ATTRIBUTE_UNUSED)
1347
{
1348
  return false;
1349
}
1350
 
1351
/* Split a (typically critical) edge.  Return the new block.
1352
   The edge must not be abnormal.
1353
 
1354
   ??? The code generally expects to be called on critical edges.
1355
   The case of a block ending in an unconditional jump to a
1356
   block with multiple predecessors is not handled optimally.  */
1357
 
1358
static basic_block
1359
rtl_split_edge (edge edge_in)
1360
{
1361
  basic_block bb;
1362
  rtx before;
1363
 
1364
  /* Abnormal edges cannot be split.  */
1365
  gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
1366
 
1367
  /* We are going to place the new block in front of edge destination.
1368
     Avoid existence of fallthru predecessors.  */
1369
  if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1370
    {
1371
      edge e;
1372
      edge_iterator ei;
1373
 
1374
      FOR_EACH_EDGE (e, ei, edge_in->dest->preds)
1375
        if (e->flags & EDGE_FALLTHRU)
1376
          break;
1377
 
1378
      if (e)
1379
        force_nonfallthru (e);
1380
    }
1381
 
1382
  /* Create the basic block note.  */
1383
  if (edge_in->dest != EXIT_BLOCK_PTR)
1384
    before = BB_HEAD (edge_in->dest);
1385
  else
1386
    before = NULL_RTX;
1387
 
1388
  /* If this is a fall through edge to the exit block, the blocks might be
1389
     not adjacent, and the right place is the after the source.  */
1390
  if (edge_in->flags & EDGE_FALLTHRU && edge_in->dest == EXIT_BLOCK_PTR)
1391
    {
1392
      before = NEXT_INSN (BB_END (edge_in->src));
1393
      bb = create_basic_block (before, NULL, edge_in->src);
1394
      BB_COPY_PARTITION (bb, edge_in->src);
1395
    }
1396
  else
1397
    {
1398
      bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1399
      /* ??? Why not edge_in->dest->prev_bb here?  */
1400
      BB_COPY_PARTITION (bb, edge_in->dest);
1401
    }
1402
 
1403
  make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1404
 
1405
  /* For non-fallthru edges, we must adjust the predecessor's
1406
     jump instruction to target our new block.  */
1407
  if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1408
    {
1409
      edge redirected = redirect_edge_and_branch (edge_in, bb);
1410
      gcc_assert (redirected);
1411
    }
1412
  else
1413
    {
1414
      if (edge_in->src != ENTRY_BLOCK_PTR)
1415
        {
1416
          /* For asm goto even splitting of fallthru edge might
1417
             need insn patching, as other labels might point to the
1418
             old label.  */
1419
          rtx last = BB_END (edge_in->src);
1420
          if (last
1421
              && JUMP_P (last)
1422
              && edge_in->dest != EXIT_BLOCK_PTR
1423
              && extract_asm_operands (PATTERN (last)) != NULL_RTX
1424
              && patch_jump_insn (last, before, bb))
1425
            df_set_bb_dirty (edge_in->src);
1426
        }
1427
      redirect_edge_succ (edge_in, bb);
1428
    }
1429
 
1430
  return bb;
1431
}
1432
 
1433
/* Queue instructions for insertion on an edge between two basic blocks.
1434
   The new instructions and basic blocks (if any) will not appear in the
1435
   CFG until commit_edge_insertions is called.  */
1436
 
1437
void
1438
insert_insn_on_edge (rtx pattern, edge e)
1439
{
1440
  /* We cannot insert instructions on an abnormal critical edge.
1441
     It will be easier to find the culprit if we die now.  */
1442
  gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
1443
 
1444
  if (e->insns.r == NULL_RTX)
1445
    start_sequence ();
1446
  else
1447
    push_to_sequence (e->insns.r);
1448
 
1449
  emit_insn (pattern);
1450
 
1451
  e->insns.r = get_insns ();
1452
  end_sequence ();
1453
}
1454
 
1455
/* Update the CFG for the instructions queued on edge E.  */
1456
 
1457
void
1458
commit_one_edge_insertion (edge e)
1459
{
1460
  rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1461
  basic_block bb = NULL;
1462
 
1463
  /* Pull the insns off the edge now since the edge might go away.  */
1464
  insns = e->insns.r;
1465
  e->insns.r = NULL_RTX;
1466
 
1467
  if (!before && !after)
1468
    {
1469
      /* Figure out where to put these things.  If the destination has
1470
         one predecessor, insert there.  Except for the exit block.  */
1471
      if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR)
1472
        {
1473
          bb = e->dest;
1474
 
1475
          /* Get the location correct wrt a code label, and "nice" wrt
1476
             a basic block note, and before everything else.  */
1477
          tmp = BB_HEAD (bb);
1478
          if (LABEL_P (tmp))
1479
            tmp = NEXT_INSN (tmp);
1480
          if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1481
            tmp = NEXT_INSN (tmp);
1482
          if (tmp == BB_HEAD (bb))
1483
            before = tmp;
1484
          else if (tmp)
1485
            after = PREV_INSN (tmp);
1486
          else
1487
            after = get_last_insn ();
1488
        }
1489
 
1490
      /* If the source has one successor and the edge is not abnormal,
1491
         insert there.  Except for the entry block.  */
1492
      else if ((e->flags & EDGE_ABNORMAL) == 0
1493
               && single_succ_p (e->src)
1494
               && e->src != ENTRY_BLOCK_PTR)
1495
        {
1496
          bb = e->src;
1497
 
1498
          /* It is possible to have a non-simple jump here.  Consider a target
1499
             where some forms of unconditional jumps clobber a register.  This
1500
             happens on the fr30 for example.
1501
 
1502
             We know this block has a single successor, so we can just emit
1503
             the queued insns before the jump.  */
1504
          if (JUMP_P (BB_END (bb)))
1505
            before = BB_END (bb);
1506
          else
1507
            {
1508
              /* We'd better be fallthru, or we've lost track of
1509
                 what's what.  */
1510
              gcc_assert (e->flags & EDGE_FALLTHRU);
1511
 
1512
              after = BB_END (bb);
1513
            }
1514
        }
1515
      /* Otherwise we must split the edge.  */
1516
      else
1517
        {
1518
          bb = split_edge (e);
1519
          after = BB_END (bb);
1520
 
1521
          if (flag_reorder_blocks_and_partition
1522
              && targetm.have_named_sections
1523
              && e->src != ENTRY_BLOCK_PTR
1524
              && BB_PARTITION (e->src) == BB_COLD_PARTITION
1525
              && !(e->flags & EDGE_CROSSING)
1526
              && JUMP_P (after)
1527
              && !any_condjump_p (after)
1528
              && (single_succ_edge (bb)->flags & EDGE_CROSSING))
1529
            add_reg_note (after, REG_CROSSING_JUMP, NULL_RTX);
1530
        }
1531
    }
1532
 
1533
  /* Now that we've found the spot, do the insertion.  */
1534
 
1535
  if (before)
1536
    {
1537
      emit_insn_before_noloc (insns, before, bb);
1538
      last = prev_nonnote_insn (before);
1539
    }
1540
  else
1541
    last = emit_insn_after_noloc (insns, after, bb);
1542
 
1543
  if (returnjump_p (last))
1544
    {
1545
      /* ??? Remove all outgoing edges from BB and add one for EXIT.
1546
         This is not currently a problem because this only happens
1547
         for the (single) epilogue, which already has a fallthru edge
1548
         to EXIT.  */
1549
 
1550
      e = single_succ_edge (bb);
1551
      gcc_assert (e->dest == EXIT_BLOCK_PTR
1552
                  && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
1553
 
1554
      e->flags &= ~EDGE_FALLTHRU;
1555
      emit_barrier_after (last);
1556
 
1557
      if (before)
1558
        delete_insn (before);
1559
    }
1560
  else
1561
    gcc_assert (!JUMP_P (last));
1562
 
1563
  /* Mark the basic block for find_many_sub_basic_blocks.  */
1564
  if (current_ir_type () != IR_RTL_CFGLAYOUT)
1565
    bb->aux = &bb->aux;
1566
}
1567
 
1568
/* Update the CFG for all queued instructions.  */
1569
 
1570
void
1571
commit_edge_insertions (void)
1572
{
1573
  basic_block bb;
1574
  sbitmap blocks;
1575
  bool changed = false;
1576
 
1577
#ifdef ENABLE_CHECKING
1578
  verify_flow_info ();
1579
#endif
1580
 
1581
  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1582
    {
1583
      edge e;
1584
      edge_iterator ei;
1585
 
1586
      FOR_EACH_EDGE (e, ei, bb->succs)
1587
        if (e->insns.r)
1588
          {
1589
            changed = true;
1590
            commit_one_edge_insertion (e);
1591
          }
1592
    }
1593
 
1594
  if (!changed)
1595
    return;
1596
 
1597
  /* In the old rtl CFG API, it was OK to insert control flow on an
1598
     edge, apparently?  In cfglayout mode, this will *not* work, and
1599
     the caller is responsible for making sure that control flow is
1600
     valid at all times.  */
1601
  if (current_ir_type () == IR_RTL_CFGLAYOUT)
1602
    return;
1603
 
1604
  blocks = sbitmap_alloc (last_basic_block);
1605
  sbitmap_zero (blocks);
1606
  FOR_EACH_BB (bb)
1607
    if (bb->aux)
1608
      {
1609
        SET_BIT (blocks, bb->index);
1610
        /* Check for forgotten bb->aux values before commit_edge_insertions
1611
           call.  */
1612
        gcc_assert (bb->aux == &bb->aux);
1613
        bb->aux = NULL;
1614
      }
1615
  find_many_sub_basic_blocks (blocks);
1616
  sbitmap_free (blocks);
1617
}
1618
 
1619
 
1620
/* Print out RTL-specific basic block information (live information
1621
   at start and end).  */
1622
 
1623
static void
1624
rtl_dump_bb (basic_block bb, FILE *outf, int indent, int flags ATTRIBUTE_UNUSED)
1625
{
1626
  rtx insn;
1627
  rtx last;
1628
  char *s_indent;
1629
 
1630
  s_indent = (char *) alloca ((size_t) indent + 1);
1631
  memset (s_indent, ' ', (size_t) indent);
1632
  s_indent[indent] = '\0';
1633
 
1634
  if (df)
1635
    {
1636
      df_dump_top (bb, outf);
1637
      putc ('\n', outf);
1638
    }
1639
 
1640
  for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
1641
       insn = NEXT_INSN (insn))
1642
    print_rtl_single (outf, insn);
1643
 
1644
  if (df)
1645
    {
1646
      df_dump_bottom (bb, outf);
1647
      putc ('\n', outf);
1648
    }
1649
 
1650
}
1651
 
1652
/* Like print_rtl, but also print out live information for the start of each
1653
   basic block.  */
1654
 
1655
void
1656
print_rtl_with_bb (FILE *outf, const_rtx rtx_first)
1657
{
1658
  const_rtx tmp_rtx;
1659
  if (rtx_first == 0)
1660
    fprintf (outf, "(nil)\n");
1661
  else
1662
    {
1663
      enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
1664
      int max_uid = get_max_uid ();
1665
      basic_block *start = XCNEWVEC (basic_block, max_uid);
1666
      basic_block *end = XCNEWVEC (basic_block, max_uid);
1667
      enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
1668
 
1669
      basic_block bb;
1670
 
1671
      if (df)
1672
        df_dump_start (outf);
1673
 
1674
      FOR_EACH_BB_REVERSE (bb)
1675
        {
1676
          rtx x;
1677
 
1678
          start[INSN_UID (BB_HEAD (bb))] = bb;
1679
          end[INSN_UID (BB_END (bb))] = bb;
1680
          for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
1681
            {
1682
              enum bb_state state = IN_MULTIPLE_BB;
1683
 
1684
              if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
1685
                state = IN_ONE_BB;
1686
              in_bb_p[INSN_UID (x)] = state;
1687
 
1688
              if (x == BB_END (bb))
1689
                break;
1690
            }
1691
        }
1692
 
1693
      for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
1694
        {
1695
          int did_output;
1696
          if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
1697
            {
1698
              edge e;
1699
              edge_iterator ei;
1700
 
1701
              fprintf (outf, ";; Start of basic block (");
1702
              FOR_EACH_EDGE (e, ei, bb->preds)
1703
                fprintf (outf, " %d", e->src->index);
1704
              fprintf (outf, ") -> %d\n", bb->index);
1705
 
1706
              if (df)
1707
                {
1708
                  df_dump_top (bb, outf);
1709
                  putc ('\n', outf);
1710
                }
1711
              FOR_EACH_EDGE (e, ei, bb->preds)
1712
                {
1713
                  fputs (";; Pred edge ", outf);
1714
                  dump_edge_info (outf, e, 0);
1715
                  fputc ('\n', outf);
1716
                }
1717
            }
1718
 
1719
          if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
1720
              && !NOTE_P (tmp_rtx)
1721
              && !BARRIER_P (tmp_rtx))
1722
            fprintf (outf, ";; Insn is not within a basic block\n");
1723
          else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
1724
            fprintf (outf, ";; Insn is in multiple basic blocks\n");
1725
 
1726
          did_output = print_rtl_single (outf, tmp_rtx);
1727
 
1728
          if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
1729
            {
1730
              edge e;
1731
              edge_iterator ei;
1732
 
1733
              fprintf (outf, ";; End of basic block %d -> (", bb->index);
1734
              FOR_EACH_EDGE (e, ei, bb->succs)
1735
                fprintf (outf, " %d", e->dest->index);
1736
              fprintf (outf, ")\n");
1737
 
1738
              if (df)
1739
                {
1740
                  df_dump_bottom (bb, outf);
1741
                  putc ('\n', outf);
1742
                }
1743
              putc ('\n', outf);
1744
              FOR_EACH_EDGE (e, ei, bb->succs)
1745
                {
1746
                  fputs (";; Succ edge ", outf);
1747
                  dump_edge_info (outf, e, 1);
1748
                  fputc ('\n', outf);
1749
                }
1750
            }
1751
          if (did_output)
1752
            putc ('\n', outf);
1753
        }
1754
 
1755
      free (start);
1756
      free (end);
1757
      free (in_bb_p);
1758
    }
1759
 
1760
  if (crtl->epilogue_delay_list != 0)
1761
    {
1762
      fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
1763
      for (tmp_rtx = crtl->epilogue_delay_list; tmp_rtx != 0;
1764
           tmp_rtx = XEXP (tmp_rtx, 1))
1765
        print_rtl_single (outf, XEXP (tmp_rtx, 0));
1766
    }
1767
}
1768
 
1769
void
1770
update_br_prob_note (basic_block bb)
1771
{
1772
  rtx note;
1773
  if (!JUMP_P (BB_END (bb)))
1774
    return;
1775
  note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
1776
  if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
1777
    return;
1778
  XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
1779
}
1780
 
1781
/* Get the last insn associated with block BB (that includes barriers and
1782
   tablejumps after BB).  */
1783
rtx
1784
get_last_bb_insn (basic_block bb)
1785
{
1786
  rtx tmp;
1787
  rtx end = BB_END (bb);
1788
 
1789
  /* Include any jump table following the basic block.  */
1790
  if (tablejump_p (end, NULL, &tmp))
1791
    end = tmp;
1792
 
1793
  /* Include any barriers that may follow the basic block.  */
1794
  tmp = next_nonnote_insn_bb (end);
1795
  while (tmp && BARRIER_P (tmp))
1796
    {
1797
      end = tmp;
1798
      tmp = next_nonnote_insn_bb (end);
1799
    }
1800
 
1801
  return end;
1802
}
1803
 
1804
/* Verify the CFG and RTL consistency common for both underlying RTL and
1805
   cfglayout RTL.
1806
 
1807
   Currently it does following checks:
1808
 
1809
   - overlapping of basic blocks
1810
   - insns with wrong BLOCK_FOR_INSN pointers
1811
   - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
1812
   - tails of basic blocks (ensure that boundary is necessary)
1813
   - scans body of the basic block for JUMP_INSN, CODE_LABEL
1814
     and NOTE_INSN_BASIC_BLOCK
1815
   - verify that no fall_thru edge crosses hot/cold partition boundaries
1816
   - verify that there are no pending RTL branch predictions
1817
 
1818
   In future it can be extended check a lot of other stuff as well
1819
   (reachability of basic blocks, life information, etc. etc.).  */
1820
 
1821
static int
1822
rtl_verify_flow_info_1 (void)
1823
{
1824
  rtx x;
1825
  int err = 0;
1826
  basic_block bb;
1827
 
1828
  /* Check the general integrity of the basic blocks.  */
1829
  FOR_EACH_BB_REVERSE (bb)
1830
    {
1831
      rtx insn;
1832
 
1833
      if (!(bb->flags & BB_RTL))
1834
        {
1835
          error ("BB_RTL flag not set for block %d", bb->index);
1836
          err = 1;
1837
        }
1838
 
1839
      FOR_BB_INSNS (bb, insn)
1840
        if (BLOCK_FOR_INSN (insn) != bb)
1841
          {
1842
            error ("insn %d basic block pointer is %d, should be %d",
1843
                   INSN_UID (insn),
1844
                   BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
1845
                   bb->index);
1846
            err = 1;
1847
          }
1848
 
1849
      for (insn = bb->il.rtl->header; insn; insn = NEXT_INSN (insn))
1850
        if (!BARRIER_P (insn)
1851
            && BLOCK_FOR_INSN (insn) != NULL)
1852
          {
1853
            error ("insn %d in header of bb %d has non-NULL basic block",
1854
                   INSN_UID (insn), bb->index);
1855
            err = 1;
1856
          }
1857
      for (insn = bb->il.rtl->footer; insn; insn = NEXT_INSN (insn))
1858
        if (!BARRIER_P (insn)
1859
            && BLOCK_FOR_INSN (insn) != NULL)
1860
          {
1861
            error ("insn %d in footer of bb %d has non-NULL basic block",
1862
                   INSN_UID (insn), bb->index);
1863
            err = 1;
1864
          }
1865
    }
1866
 
1867
  /* Now check the basic blocks (boundaries etc.) */
1868
  FOR_EACH_BB_REVERSE (bb)
1869
    {
1870
      int n_fallthru = 0, n_eh = 0, n_call = 0, n_abnormal = 0, n_branch = 0;
1871
      edge e, fallthru = NULL;
1872
      rtx note;
1873
      edge_iterator ei;
1874
 
1875
      if (JUMP_P (BB_END (bb))
1876
          && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
1877
          && EDGE_COUNT (bb->succs) >= 2
1878
          && any_condjump_p (BB_END (bb)))
1879
        {
1880
          if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability
1881
              && profile_status != PROFILE_ABSENT)
1882
            {
1883
              error ("verify_flow_info: REG_BR_PROB does not match cfg %wi %i",
1884
                     INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
1885
              err = 1;
1886
            }
1887
        }
1888
      FOR_EACH_EDGE (e, ei, bb->succs)
1889
        {
1890
          if (e->flags & EDGE_FALLTHRU)
1891
            {
1892
              n_fallthru++, fallthru = e;
1893
              if ((e->flags & EDGE_CROSSING)
1894
                  || (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
1895
                      && e->src != ENTRY_BLOCK_PTR
1896
                      && e->dest != EXIT_BLOCK_PTR))
1897
            {
1898
                  error ("fallthru edge crosses section boundary (bb %i)",
1899
                         e->src->index);
1900
                  err = 1;
1901
                }
1902
            }
1903
 
1904
          if ((e->flags & ~(EDGE_DFS_BACK
1905
                            | EDGE_CAN_FALLTHRU
1906
                            | EDGE_IRREDUCIBLE_LOOP
1907
                            | EDGE_LOOP_EXIT
1908
                            | EDGE_CROSSING)) == 0)
1909
            n_branch++;
1910
 
1911
          if (e->flags & EDGE_ABNORMAL_CALL)
1912
            n_call++;
1913
 
1914
          if (e->flags & EDGE_EH)
1915
            n_eh++;
1916
          else if (e->flags & EDGE_ABNORMAL)
1917
            n_abnormal++;
1918
        }
1919
 
1920
      if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
1921
        {
1922
          error ("missing REG_EH_REGION note in the end of bb %i", bb->index);
1923
          err = 1;
1924
        }
1925
      if (n_eh > 1)
1926
        {
1927
          error ("too many eh edges %i", bb->index);
1928
          err = 1;
1929
        }
1930
      if (n_branch
1931
          && (!JUMP_P (BB_END (bb))
1932
              || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
1933
                                   || any_condjump_p (BB_END (bb))))))
1934
        {
1935
          error ("too many outgoing branch edges from bb %i", bb->index);
1936
          err = 1;
1937
        }
1938
      if (n_fallthru && any_uncondjump_p (BB_END (bb)))
1939
        {
1940
          error ("fallthru edge after unconditional jump %i", bb->index);
1941
          err = 1;
1942
        }
1943
      if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
1944
        {
1945
          error ("wrong number of branch edges after unconditional jump %i",
1946
                 bb->index);
1947
          err = 1;
1948
        }
1949
      if (n_branch != 1 && any_condjump_p (BB_END (bb))
1950
          && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
1951
        {
1952
          error ("wrong amount of branch edges after conditional jump %i",
1953
                 bb->index);
1954
          err = 1;
1955
        }
1956
      if (n_call && !CALL_P (BB_END (bb)))
1957
        {
1958
          error ("call edges for non-call insn in bb %i", bb->index);
1959
          err = 1;
1960
        }
1961
      if (n_abnormal
1962
          && (!CALL_P (BB_END (bb)) && n_call != n_abnormal)
1963
          && (!JUMP_P (BB_END (bb))
1964
              || any_condjump_p (BB_END (bb))
1965
              || any_uncondjump_p (BB_END (bb))))
1966
        {
1967
          error ("abnormal edges for no purpose in bb %i", bb->index);
1968
          err = 1;
1969
        }
1970
 
1971
      for (x = BB_HEAD (bb); x != NEXT_INSN (BB_END (bb)); x = NEXT_INSN (x))
1972
        /* We may have a barrier inside a basic block before dead code
1973
           elimination.  There is no BLOCK_FOR_INSN field in a barrier.  */
1974
        if (!BARRIER_P (x) && BLOCK_FOR_INSN (x) != bb)
1975
          {
1976
            debug_rtx (x);
1977
            if (! BLOCK_FOR_INSN (x))
1978
              error
1979
                ("insn %d inside basic block %d but block_for_insn is NULL",
1980
                 INSN_UID (x), bb->index);
1981
            else
1982
              error
1983
                ("insn %d inside basic block %d but block_for_insn is %i",
1984
                 INSN_UID (x), bb->index, BLOCK_FOR_INSN (x)->index);
1985
 
1986
            err = 1;
1987
          }
1988
 
1989
      /* OK pointers are correct.  Now check the header of basic
1990
         block.  It ought to contain optional CODE_LABEL followed
1991
         by NOTE_BASIC_BLOCK.  */
1992
      x = BB_HEAD (bb);
1993
      if (LABEL_P (x))
1994
        {
1995
          if (BB_END (bb) == x)
1996
            {
1997
              error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
1998
                     bb->index);
1999
              err = 1;
2000
            }
2001
 
2002
          x = NEXT_INSN (x);
2003
        }
2004
 
2005
      if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2006
        {
2007
          error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2008
                 bb->index);
2009
          err = 1;
2010
        }
2011
 
2012
      if (BB_END (bb) == x)
2013
        /* Do checks for empty blocks here.  */
2014
        ;
2015
      else
2016
        for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2017
          {
2018
            if (NOTE_INSN_BASIC_BLOCK_P (x))
2019
              {
2020
                error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
2021
                       INSN_UID (x), bb->index);
2022
                err = 1;
2023
              }
2024
 
2025
            if (x == BB_END (bb))
2026
              break;
2027
 
2028
            if (control_flow_insn_p (x))
2029
              {
2030
                error ("in basic block %d:", bb->index);
2031
                fatal_insn ("flow control insn inside a basic block", x);
2032
              }
2033
          }
2034
    }
2035
 
2036
  /* Clean up.  */
2037
  return err;
2038
}
2039
 
2040
/* Verify the CFG and RTL consistency common for both underlying RTL and
2041
   cfglayout RTL.
2042
 
2043
   Currently it does following checks:
2044
   - all checks of rtl_verify_flow_info_1
2045
   - test head/end pointers
2046
   - check that all insns are in the basic blocks
2047
     (except the switch handling code, barriers and notes)
2048
   - check that all returns are followed by barriers
2049
   - check that all fallthru edge points to the adjacent blocks.  */
2050
 
2051
static int
2052
rtl_verify_flow_info (void)
2053
{
2054
  basic_block bb;
2055
  int err = rtl_verify_flow_info_1 ();
2056
  rtx x;
2057
  rtx last_head = get_last_insn ();
2058
  basic_block *bb_info;
2059
  int num_bb_notes;
2060
  const rtx rtx_first = get_insns ();
2061
  basic_block last_bb_seen = ENTRY_BLOCK_PTR, curr_bb = NULL;
2062
  const int max_uid = get_max_uid ();
2063
 
2064
  bb_info = XCNEWVEC (basic_block, max_uid);
2065
 
2066
  FOR_EACH_BB_REVERSE (bb)
2067
    {
2068
      edge e;
2069
      edge_iterator ei;
2070
      rtx head = BB_HEAD (bb);
2071
      rtx end = BB_END (bb);
2072
 
2073
      for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2074
        {
2075
          /* Verify the end of the basic block is in the INSN chain.  */
2076
          if (x == end)
2077
            break;
2078
 
2079
          /* And that the code outside of basic blocks has NULL bb field.  */
2080
        if (!BARRIER_P (x)
2081
            && BLOCK_FOR_INSN (x) != NULL)
2082
          {
2083
            error ("insn %d outside of basic blocks has non-NULL bb field",
2084
                   INSN_UID (x));
2085
            err = 1;
2086
          }
2087
        }
2088
 
2089
      if (!x)
2090
        {
2091
          error ("end insn %d for block %d not found in the insn stream",
2092
                 INSN_UID (end), bb->index);
2093
          err = 1;
2094
        }
2095
 
2096
      /* Work backwards from the end to the head of the basic block
2097
         to verify the head is in the RTL chain.  */
2098
      for (; x != NULL_RTX; x = PREV_INSN (x))
2099
        {
2100
          /* While walking over the insn chain, verify insns appear
2101
             in only one basic block.  */
2102
          if (bb_info[INSN_UID (x)] != NULL)
2103
            {
2104
              error ("insn %d is in multiple basic blocks (%d and %d)",
2105
                     INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
2106
              err = 1;
2107
            }
2108
 
2109
          bb_info[INSN_UID (x)] = bb;
2110
 
2111
          if (x == head)
2112
            break;
2113
        }
2114
      if (!x)
2115
        {
2116
          error ("head insn %d for block %d not found in the insn stream",
2117
                 INSN_UID (head), bb->index);
2118
          err = 1;
2119
        }
2120
 
2121
      last_head = PREV_INSN (x);
2122
 
2123
      FOR_EACH_EDGE (e, ei, bb->succs)
2124
        if (e->flags & EDGE_FALLTHRU)
2125
          break;
2126
      if (!e)
2127
        {
2128
          rtx insn;
2129
 
2130
          /* Ensure existence of barrier in BB with no fallthru edges.  */
2131
          for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
2132
            {
2133
              if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
2134
                {
2135
                  error ("missing barrier after block %i", bb->index);
2136
                  err = 1;
2137
                  break;
2138
                }
2139
              if (BARRIER_P (insn))
2140
                break;
2141
            }
2142
        }
2143
      else if (e->src != ENTRY_BLOCK_PTR
2144
               && e->dest != EXIT_BLOCK_PTR)
2145
        {
2146
          rtx insn;
2147
 
2148
          if (e->src->next_bb != e->dest)
2149
            {
2150
              error
2151
                ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2152
                 e->src->index, e->dest->index);
2153
              err = 1;
2154
            }
2155
          else
2156
            for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
2157
                 insn = NEXT_INSN (insn))
2158
              if (BARRIER_P (insn) || INSN_P (insn))
2159
                {
2160
                  error ("verify_flow_info: Incorrect fallthru %i->%i",
2161
                         e->src->index, e->dest->index);
2162
                  fatal_insn ("wrong insn in the fallthru edge", insn);
2163
                  err = 1;
2164
                }
2165
        }
2166
    }
2167
 
2168
  for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2169
    {
2170
      /* Check that the code before the first basic block has NULL
2171
         bb field.  */
2172
      if (!BARRIER_P (x)
2173
          && BLOCK_FOR_INSN (x) != NULL)
2174
        {
2175
          error ("insn %d outside of basic blocks has non-NULL bb field",
2176
                 INSN_UID (x));
2177
          err = 1;
2178
        }
2179
    }
2180
  free (bb_info);
2181
 
2182
  num_bb_notes = 0;
2183
  last_bb_seen = ENTRY_BLOCK_PTR;
2184
 
2185
  for (x = rtx_first; x; x = NEXT_INSN (x))
2186
    {
2187
      if (NOTE_INSN_BASIC_BLOCK_P (x))
2188
        {
2189
          bb = NOTE_BASIC_BLOCK (x);
2190
 
2191
          num_bb_notes++;
2192
          if (bb != last_bb_seen->next_bb)
2193
            internal_error ("basic blocks not laid down consecutively");
2194
 
2195
          curr_bb = last_bb_seen = bb;
2196
        }
2197
 
2198
      if (!curr_bb)
2199
        {
2200
          switch (GET_CODE (x))
2201
            {
2202
            case BARRIER:
2203
            case NOTE:
2204
              break;
2205
 
2206
            case CODE_LABEL:
2207
              /* An addr_vec is placed outside any basic block.  */
2208
              if (NEXT_INSN (x)
2209
                  && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
2210
                x = NEXT_INSN (x);
2211
 
2212
              /* But in any case, non-deletable labels can appear anywhere.  */
2213
              break;
2214
 
2215
            default:
2216
              fatal_insn ("insn outside basic block", x);
2217
            }
2218
        }
2219
 
2220
      if (JUMP_P (x)
2221
          && returnjump_p (x) && ! condjump_p (x)
2222
          && ! (next_nonnote_insn (x) && BARRIER_P (next_nonnote_insn (x))))
2223
            fatal_insn ("return not followed by barrier", x);
2224
      if (curr_bb && x == BB_END (curr_bb))
2225
        curr_bb = NULL;
2226
    }
2227
 
2228
  if (num_bb_notes != n_basic_blocks - NUM_FIXED_BLOCKS)
2229
    internal_error
2230
      ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2231
       num_bb_notes, n_basic_blocks);
2232
 
2233
   return err;
2234
}
2235
 
2236
/* Assume that the preceding pass has possibly eliminated jump instructions
2237
   or converted the unconditional jumps.  Eliminate the edges from CFG.
2238
   Return true if any edges are eliminated.  */
2239
 
2240
bool
2241
purge_dead_edges (basic_block bb)
2242
{
2243
  edge e;
2244
  rtx insn = BB_END (bb), note;
2245
  bool purged = false;
2246
  bool found;
2247
  edge_iterator ei;
2248
 
2249
  if (DEBUG_INSN_P (insn) && insn != BB_HEAD (bb))
2250
    do
2251
      insn = PREV_INSN (insn);
2252
    while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
2253
 
2254
  /* If this instruction cannot trap, remove REG_EH_REGION notes.  */
2255
  if (NONJUMP_INSN_P (insn)
2256
      && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2257
    {
2258
      rtx eqnote;
2259
 
2260
      if (! may_trap_p (PATTERN (insn))
2261
          || ((eqnote = find_reg_equal_equiv_note (insn))
2262
              && ! may_trap_p (XEXP (eqnote, 0))))
2263
        remove_note (insn, note);
2264
    }
2265
 
2266
  /* Cleanup abnormal edges caused by exceptions or non-local gotos.  */
2267
  for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2268
    {
2269
      bool remove = false;
2270
 
2271
      /* There are three types of edges we need to handle correctly here: EH
2272
         edges, abnormal call EH edges, and abnormal call non-EH edges.  The
2273
         latter can appear when nonlocal gotos are used.  */
2274
      if (e->flags & EDGE_ABNORMAL_CALL)
2275
        {
2276
          if (!CALL_P (insn))
2277
            remove = true;
2278
          else if (can_nonlocal_goto (insn))
2279
            ;
2280
          else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2281
            ;
2282
          else
2283
            remove = true;
2284
        }
2285
      else if (e->flags & EDGE_EH)
2286
        remove = !can_throw_internal (insn);
2287
 
2288
      if (remove)
2289
        {
2290
          remove_edge (e);
2291
          df_set_bb_dirty (bb);
2292
          purged = true;
2293
        }
2294
      else
2295
        ei_next (&ei);
2296
    }
2297
 
2298
  if (JUMP_P (insn))
2299
    {
2300
      rtx note;
2301
      edge b,f;
2302
      edge_iterator ei;
2303
 
2304
      /* We do care only about conditional jumps and simplejumps.  */
2305
      if (!any_condjump_p (insn)
2306
          && !returnjump_p (insn)
2307
          && !simplejump_p (insn))
2308
        return purged;
2309
 
2310
      /* Branch probability/prediction notes are defined only for
2311
         condjumps.  We've possibly turned condjump into simplejump.  */
2312
      if (simplejump_p (insn))
2313
        {
2314
          note = find_reg_note (insn, REG_BR_PROB, NULL);
2315
          if (note)
2316
            remove_note (insn, note);
2317
          while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
2318
            remove_note (insn, note);
2319
        }
2320
 
2321
      for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2322
        {
2323
          /* Avoid abnormal flags to leak from computed jumps turned
2324
             into simplejumps.  */
2325
 
2326
          e->flags &= ~EDGE_ABNORMAL;
2327
 
2328
          /* See if this edge is one we should keep.  */
2329
          if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
2330
            /* A conditional jump can fall through into the next
2331
               block, so we should keep the edge.  */
2332
            {
2333
              ei_next (&ei);
2334
              continue;
2335
            }
2336
          else if (e->dest != EXIT_BLOCK_PTR
2337
                   && BB_HEAD (e->dest) == JUMP_LABEL (insn))
2338
            /* If the destination block is the target of the jump,
2339
               keep the edge.  */
2340
            {
2341
              ei_next (&ei);
2342
              continue;
2343
            }
2344
          else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
2345
            /* If the destination block is the exit block, and this
2346
               instruction is a return, then keep the edge.  */
2347
            {
2348
              ei_next (&ei);
2349
              continue;
2350
            }
2351
          else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2352
            /* Keep the edges that correspond to exceptions thrown by
2353
               this instruction and rematerialize the EDGE_ABNORMAL
2354
               flag we just cleared above.  */
2355
            {
2356
              e->flags |= EDGE_ABNORMAL;
2357
              ei_next (&ei);
2358
              continue;
2359
            }
2360
 
2361
          /* We do not need this edge.  */
2362
          df_set_bb_dirty (bb);
2363
          purged = true;
2364
          remove_edge (e);
2365
        }
2366
 
2367
      if (EDGE_COUNT (bb->succs) == 0 || !purged)
2368
        return purged;
2369
 
2370
      if (dump_file)
2371
        fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
2372
 
2373
      if (!optimize)
2374
        return purged;
2375
 
2376
      /* Redistribute probabilities.  */
2377
      if (single_succ_p (bb))
2378
        {
2379
          single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2380
          single_succ_edge (bb)->count = bb->count;
2381
        }
2382
      else
2383
        {
2384
          note = find_reg_note (insn, REG_BR_PROB, NULL);
2385
          if (!note)
2386
            return purged;
2387
 
2388
          b = BRANCH_EDGE (bb);
2389
          f = FALLTHRU_EDGE (bb);
2390
          b->probability = INTVAL (XEXP (note, 0));
2391
          f->probability = REG_BR_PROB_BASE - b->probability;
2392
          b->count = bb->count * b->probability / REG_BR_PROB_BASE;
2393
          f->count = bb->count * f->probability / REG_BR_PROB_BASE;
2394
        }
2395
 
2396
      return purged;
2397
    }
2398
  else if (CALL_P (insn) && SIBLING_CALL_P (insn))
2399
    {
2400
      /* First, there should not be any EH or ABCALL edges resulting
2401
         from non-local gotos and the like.  If there were, we shouldn't
2402
         have created the sibcall in the first place.  Second, there
2403
         should of course never have been a fallthru edge.  */
2404
      gcc_assert (single_succ_p (bb));
2405
      gcc_assert (single_succ_edge (bb)->flags
2406
                  == (EDGE_SIBCALL | EDGE_ABNORMAL));
2407
 
2408
      return 0;
2409
    }
2410
 
2411
  /* If we don't see a jump insn, we don't know exactly why the block would
2412
     have been broken at this point.  Look for a simple, non-fallthru edge,
2413
     as these are only created by conditional branches.  If we find such an
2414
     edge we know that there used to be a jump here and can then safely
2415
     remove all non-fallthru edges.  */
2416
  found = false;
2417
  FOR_EACH_EDGE (e, ei, bb->succs)
2418
    if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
2419
      {
2420
        found = true;
2421
        break;
2422
      }
2423
 
2424
  if (!found)
2425
    return purged;
2426
 
2427
  /* Remove all but the fake and fallthru edges.  The fake edge may be
2428
     the only successor for this block in the case of noreturn
2429
     calls.  */
2430
  for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2431
    {
2432
      if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
2433
        {
2434
          df_set_bb_dirty (bb);
2435
          remove_edge (e);
2436
          purged = true;
2437
        }
2438
      else
2439
        ei_next (&ei);
2440
    }
2441
 
2442
  gcc_assert (single_succ_p (bb));
2443
 
2444
  single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2445
  single_succ_edge (bb)->count = bb->count;
2446
 
2447
  if (dump_file)
2448
    fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
2449
             bb->index);
2450
  return purged;
2451
}
2452
 
2453
/* Search all basic blocks for potentially dead edges and purge them.  Return
2454
   true if some edge has been eliminated.  */
2455
 
2456
bool
2457
purge_all_dead_edges (void)
2458
{
2459
  int purged = false;
2460
  basic_block bb;
2461
 
2462
  FOR_EACH_BB (bb)
2463
    {
2464
      bool purged_here = purge_dead_edges (bb);
2465
 
2466
      purged |= purged_here;
2467
    }
2468
 
2469
  return purged;
2470
}
2471
 
2472
/* Same as split_block but update cfg_layout structures.  */
2473
 
2474
static basic_block
2475
cfg_layout_split_block (basic_block bb, void *insnp)
2476
{
2477
  rtx insn = (rtx) insnp;
2478
  basic_block new_bb = rtl_split_block (bb, insn);
2479
 
2480
  new_bb->il.rtl->footer = bb->il.rtl->footer;
2481
  bb->il.rtl->footer = NULL;
2482
 
2483
  return new_bb;
2484
}
2485
 
2486
/* Redirect Edge to DEST.  */
2487
static edge
2488
cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
2489
{
2490
  basic_block src = e->src;
2491
  edge ret;
2492
 
2493
  if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
2494
    return NULL;
2495
 
2496
  if (e->dest == dest)
2497
    return e;
2498
 
2499
  if (e->src != ENTRY_BLOCK_PTR
2500
      && (ret = try_redirect_by_replacing_jump (e, dest, true)))
2501
    {
2502
      df_set_bb_dirty (src);
2503
      return ret;
2504
    }
2505
 
2506
  if (e->src == ENTRY_BLOCK_PTR
2507
      && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
2508
    {
2509
      if (dump_file)
2510
        fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
2511
                 e->src->index, dest->index);
2512
 
2513
      df_set_bb_dirty (e->src);
2514
      redirect_edge_succ (e, dest);
2515
      return e;
2516
    }
2517
 
2518
  /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
2519
     in the case the basic block appears to be in sequence.  Avoid this
2520
     transformation.  */
2521
 
2522
  if (e->flags & EDGE_FALLTHRU)
2523
    {
2524
      /* Redirect any branch edges unified with the fallthru one.  */
2525
      if (JUMP_P (BB_END (src))
2526
          && label_is_jump_target_p (BB_HEAD (e->dest),
2527
                                     BB_END (src)))
2528
        {
2529
          edge redirected;
2530
 
2531
          if (dump_file)
2532
            fprintf (dump_file, "Fallthru edge unified with branch "
2533
                     "%i->%i redirected to %i\n",
2534
                     e->src->index, e->dest->index, dest->index);
2535
          e->flags &= ~EDGE_FALLTHRU;
2536
          redirected = redirect_branch_edge (e, dest);
2537
          gcc_assert (redirected);
2538
          e->flags |= EDGE_FALLTHRU;
2539
          df_set_bb_dirty (e->src);
2540
          return e;
2541
        }
2542
      /* In case we are redirecting fallthru edge to the branch edge
2543
         of conditional jump, remove it.  */
2544
      if (EDGE_COUNT (src->succs) == 2)
2545
        {
2546
          /* Find the edge that is different from E.  */
2547
          edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
2548
 
2549
          if (s->dest == dest
2550
              && any_condjump_p (BB_END (src))
2551
              && onlyjump_p (BB_END (src)))
2552
            delete_insn (BB_END (src));
2553
        }
2554
      ret = redirect_edge_succ_nodup (e, dest);
2555
      if (dump_file)
2556
        fprintf (dump_file, "Fallthru edge %i->%i redirected to %i\n",
2557
                 e->src->index, e->dest->index, dest->index);
2558
    }
2559
  else
2560
    ret = redirect_branch_edge (e, dest);
2561
 
2562
  /* We don't want simplejumps in the insn stream during cfglayout.  */
2563
  gcc_assert (!simplejump_p (BB_END (src)));
2564
 
2565
  df_set_bb_dirty (src);
2566
  return ret;
2567
}
2568
 
2569
/* Simple wrapper as we always can redirect fallthru edges.  */
2570
static basic_block
2571
cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
2572
{
2573
  edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
2574
 
2575
  gcc_assert (redirected);
2576
  return NULL;
2577
}
2578
 
2579
/* Same as delete_basic_block but update cfg_layout structures.  */
2580
 
2581
static void
2582
cfg_layout_delete_block (basic_block bb)
2583
{
2584
  rtx insn, next, prev = PREV_INSN (BB_HEAD (bb)), *to, remaints;
2585
 
2586
  if (bb->il.rtl->header)
2587
    {
2588
      next = BB_HEAD (bb);
2589
      if (prev)
2590
        NEXT_INSN (prev) = bb->il.rtl->header;
2591
      else
2592
        set_first_insn (bb->il.rtl->header);
2593
      PREV_INSN (bb->il.rtl->header) = prev;
2594
      insn = bb->il.rtl->header;
2595
      while (NEXT_INSN (insn))
2596
        insn = NEXT_INSN (insn);
2597
      NEXT_INSN (insn) = next;
2598
      PREV_INSN (next) = insn;
2599
    }
2600
  next = NEXT_INSN (BB_END (bb));
2601
  if (bb->il.rtl->footer)
2602
    {
2603
      insn = bb->il.rtl->footer;
2604
      while (insn)
2605
        {
2606
          if (BARRIER_P (insn))
2607
            {
2608
              if (PREV_INSN (insn))
2609
                NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
2610
              else
2611
                bb->il.rtl->footer = NEXT_INSN (insn);
2612
              if (NEXT_INSN (insn))
2613
                PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
2614
            }
2615
          if (LABEL_P (insn))
2616
            break;
2617
          insn = NEXT_INSN (insn);
2618
        }
2619
      if (bb->il.rtl->footer)
2620
        {
2621
          insn = BB_END (bb);
2622
          NEXT_INSN (insn) = bb->il.rtl->footer;
2623
          PREV_INSN (bb->il.rtl->footer) = insn;
2624
          while (NEXT_INSN (insn))
2625
            insn = NEXT_INSN (insn);
2626
          NEXT_INSN (insn) = next;
2627
          if (next)
2628
            PREV_INSN (next) = insn;
2629
          else
2630
            set_last_insn (insn);
2631
        }
2632
    }
2633
  if (bb->next_bb != EXIT_BLOCK_PTR)
2634
    to = &bb->next_bb->il.rtl->header;
2635
  else
2636
    to = &cfg_layout_function_footer;
2637
 
2638
  rtl_delete_block (bb);
2639
 
2640
  if (prev)
2641
    prev = NEXT_INSN (prev);
2642
  else
2643
    prev = get_insns ();
2644
  if (next)
2645
    next = PREV_INSN (next);
2646
  else
2647
    next = get_last_insn ();
2648
 
2649
  if (next && NEXT_INSN (next) != prev)
2650
    {
2651
      remaints = unlink_insn_chain (prev, next);
2652
      insn = remaints;
2653
      while (NEXT_INSN (insn))
2654
        insn = NEXT_INSN (insn);
2655
      NEXT_INSN (insn) = *to;
2656
      if (*to)
2657
        PREV_INSN (*to) = insn;
2658
      *to = remaints;
2659
    }
2660
}
2661
 
2662
/* Return true when blocks A and B can be safely merged.  */
2663
 
2664
static bool
2665
cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
2666
{
2667
  /* If we are partitioning hot/cold basic blocks, we don't want to
2668
     mess up unconditional or indirect jumps that cross between hot
2669
     and cold sections.
2670
 
2671
     Basic block partitioning may result in some jumps that appear to
2672
     be optimizable (or blocks that appear to be mergeable), but which really
2673
     must be left untouched (they are required to make it safely across
2674
     partition boundaries).  See  the comments at the top of
2675
     bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
2676
 
2677
  if (BB_PARTITION (a) != BB_PARTITION (b))
2678
    return false;
2679
 
2680
  /* There must be exactly one edge in between the blocks.  */
2681
  return (single_succ_p (a)
2682
          && single_succ (a) == b
2683
          && single_pred_p (b) == 1
2684
          && a != b
2685
          /* Must be simple edge.  */
2686
          && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
2687
          && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
2688
          /* If the jump insn has side effects, we can't kill the edge.
2689
             When not optimizing, try_redirect_by_replacing_jump will
2690
             not allow us to redirect an edge by replacing a table jump.  */
2691
          && (!JUMP_P (BB_END (a))
2692
              || ((!optimize || reload_completed)
2693
                  ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
2694
}
2695
 
2696
/* Merge block A and B.  The blocks must be mergeable.  */
2697
 
2698
static void
2699
cfg_layout_merge_blocks (basic_block a, basic_block b)
2700
{
2701
#ifdef ENABLE_CHECKING
2702
  gcc_assert (cfg_layout_can_merge_blocks_p (a, b));
2703
#endif
2704
 
2705
  if (dump_file)
2706
    fprintf (dump_file, "merging block %d into block %d\n", b->index, a->index);
2707
 
2708
  /* If there was a CODE_LABEL beginning B, delete it.  */
2709
  if (LABEL_P (BB_HEAD (b)))
2710
    {
2711
      delete_insn (BB_HEAD (b));
2712
    }
2713
 
2714
  /* We should have fallthru edge in a, or we can do dummy redirection to get
2715
     it cleaned up.  */
2716
  if (JUMP_P (BB_END (a)))
2717
    try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
2718
  gcc_assert (!JUMP_P (BB_END (a)));
2719
 
2720
  /* When not optimizing and the edge is the only place in RTL which holds
2721
     some unique locus, emit a nop with that locus in between.  */
2722
  if (!optimize && EDGE_SUCC (a, 0)->goto_locus)
2723
    {
2724
      rtx insn = BB_END (a), end = PREV_INSN (BB_HEAD (a));
2725
      int goto_locus = EDGE_SUCC (a, 0)->goto_locus;
2726
 
2727
      while (insn != end && (!INSN_P (insn) || INSN_LOCATOR (insn) == 0))
2728
        insn = PREV_INSN (insn);
2729
      if (insn != end && locator_eq (INSN_LOCATOR (insn), goto_locus))
2730
        goto_locus = 0;
2731
      else
2732
        {
2733
          insn = BB_HEAD (b);
2734
          end = NEXT_INSN (BB_END (b));
2735
          while (insn != end && !INSN_P (insn))
2736
            insn = NEXT_INSN (insn);
2737
          if (insn != end && INSN_LOCATOR (insn) != 0
2738
              && locator_eq (INSN_LOCATOR (insn), goto_locus))
2739
            goto_locus = 0;
2740
        }
2741
      if (goto_locus)
2742
        {
2743
          BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
2744
          INSN_LOCATOR (BB_END (a)) = goto_locus;
2745
        }
2746
    }
2747
 
2748
  /* Possible line number notes should appear in between.  */
2749
  if (b->il.rtl->header)
2750
    {
2751
      rtx first = BB_END (a), last;
2752
 
2753
      last = emit_insn_after_noloc (b->il.rtl->header, BB_END (a), a);
2754
      delete_insn_chain (NEXT_INSN (first), last, false);
2755
      b->il.rtl->header = NULL;
2756
    }
2757
 
2758
  /* In the case basic blocks are not adjacent, move them around.  */
2759
  if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
2760
    {
2761
      rtx first = unlink_insn_chain (BB_HEAD (b), BB_END (b));
2762
 
2763
      emit_insn_after_noloc (first, BB_END (a), a);
2764
      /* Skip possible DELETED_LABEL insn.  */
2765
      if (!NOTE_INSN_BASIC_BLOCK_P (first))
2766
        first = NEXT_INSN (first);
2767
      gcc_assert (NOTE_INSN_BASIC_BLOCK_P (first));
2768
      BB_HEAD (b) = NULL;
2769
 
2770
      /* emit_insn_after_noloc doesn't call df_insn_change_bb.
2771
         We need to explicitly call. */
2772
      update_bb_for_insn_chain (NEXT_INSN (first),
2773
                                BB_END (b),
2774
                                a);
2775
 
2776
      delete_insn (first);
2777
    }
2778
  /* Otherwise just re-associate the instructions.  */
2779
  else
2780
    {
2781
      rtx insn;
2782
 
2783
      update_bb_for_insn_chain (BB_HEAD (b), BB_END (b), a);
2784
 
2785
      insn = BB_HEAD (b);
2786
      /* Skip possible DELETED_LABEL insn.  */
2787
      if (!NOTE_INSN_BASIC_BLOCK_P (insn))
2788
        insn = NEXT_INSN (insn);
2789
      gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
2790
      BB_HEAD (b) = NULL;
2791
      BB_END (a) = BB_END (b);
2792
      delete_insn (insn);
2793
    }
2794
 
2795
  df_bb_delete (b->index);
2796
 
2797
  /* Possible tablejumps and barriers should appear after the block.  */
2798
  if (b->il.rtl->footer)
2799
    {
2800
      if (!a->il.rtl->footer)
2801
        a->il.rtl->footer = b->il.rtl->footer;
2802
      else
2803
        {
2804
          rtx last = a->il.rtl->footer;
2805
 
2806
          while (NEXT_INSN (last))
2807
            last = NEXT_INSN (last);
2808
          NEXT_INSN (last) = b->il.rtl->footer;
2809
          PREV_INSN (b->il.rtl->footer) = last;
2810
        }
2811
      b->il.rtl->footer = NULL;
2812
    }
2813
 
2814
  if (dump_file)
2815
    fprintf (dump_file, "Merged blocks %d and %d.\n",
2816
             a->index, b->index);
2817
}
2818
 
2819
/* Split edge E.  */
2820
 
2821
static basic_block
2822
cfg_layout_split_edge (edge e)
2823
{
2824
  basic_block new_bb =
2825
    create_basic_block (e->src != ENTRY_BLOCK_PTR
2826
                        ? NEXT_INSN (BB_END (e->src)) : get_insns (),
2827
                        NULL_RTX, e->src);
2828
 
2829
  if (e->dest == EXIT_BLOCK_PTR)
2830
    BB_COPY_PARTITION (new_bb, e->src);
2831
  else
2832
    BB_COPY_PARTITION (new_bb, e->dest);
2833
  make_edge (new_bb, e->dest, EDGE_FALLTHRU);
2834
  redirect_edge_and_branch_force (e, new_bb);
2835
 
2836
  return new_bb;
2837
}
2838
 
2839
/* Do postprocessing after making a forwarder block joined by edge FALLTHRU.  */
2840
 
2841
static void
2842
rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
2843
{
2844
}
2845
 
2846
/* Return 1 if BB ends with a call, possibly followed by some
2847
   instructions that must stay with the call, 0 otherwise.  */
2848
 
2849
static bool
2850
rtl_block_ends_with_call_p (basic_block bb)
2851
{
2852
  rtx insn = BB_END (bb);
2853
 
2854
  while (!CALL_P (insn)
2855
         && insn != BB_HEAD (bb)
2856
         && (keep_with_call_p (insn)
2857
             || NOTE_P (insn)
2858
             || DEBUG_INSN_P (insn)))
2859
    insn = PREV_INSN (insn);
2860
  return (CALL_P (insn));
2861
}
2862
 
2863
/* Return 1 if BB ends with a conditional branch, 0 otherwise.  */
2864
 
2865
static bool
2866
rtl_block_ends_with_condjump_p (const_basic_block bb)
2867
{
2868
  return any_condjump_p (BB_END (bb));
2869
}
2870
 
2871
/* Return true if we need to add fake edge to exit.
2872
   Helper function for rtl_flow_call_edges_add.  */
2873
 
2874
static bool
2875
need_fake_edge_p (const_rtx insn)
2876
{
2877
  if (!INSN_P (insn))
2878
    return false;
2879
 
2880
  if ((CALL_P (insn)
2881
       && !SIBLING_CALL_P (insn)
2882
       && !find_reg_note (insn, REG_NORETURN, NULL)
2883
       && !(RTL_CONST_OR_PURE_CALL_P (insn))))
2884
    return true;
2885
 
2886
  return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
2887
           && MEM_VOLATILE_P (PATTERN (insn)))
2888
          || (GET_CODE (PATTERN (insn)) == PARALLEL
2889
              && asm_noperands (insn) != -1
2890
              && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
2891
          || GET_CODE (PATTERN (insn)) == ASM_INPUT);
2892
}
2893
 
2894
/* Add fake edges to the function exit for any non constant and non noreturn
2895
   calls, volatile inline assembly in the bitmap of blocks specified by
2896
   BLOCKS or to the whole CFG if BLOCKS is zero.  Return the number of blocks
2897
   that were split.
2898
 
2899
   The goal is to expose cases in which entering a basic block does not imply
2900
   that all subsequent instructions must be executed.  */
2901
 
2902
static int
2903
rtl_flow_call_edges_add (sbitmap blocks)
2904
{
2905
  int i;
2906
  int blocks_split = 0;
2907
  int last_bb = last_basic_block;
2908
  bool check_last_block = false;
2909
 
2910
  if (n_basic_blocks == NUM_FIXED_BLOCKS)
2911
    return 0;
2912
 
2913
  if (! blocks)
2914
    check_last_block = true;
2915
  else
2916
    check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
2917
 
2918
  /* In the last basic block, before epilogue generation, there will be
2919
     a fallthru edge to EXIT.  Special care is required if the last insn
2920
     of the last basic block is a call because make_edge folds duplicate
2921
     edges, which would result in the fallthru edge also being marked
2922
     fake, which would result in the fallthru edge being removed by
2923
     remove_fake_edges, which would result in an invalid CFG.
2924
 
2925
     Moreover, we can't elide the outgoing fake edge, since the block
2926
     profiler needs to take this into account in order to solve the minimal
2927
     spanning tree in the case that the call doesn't return.
2928
 
2929
     Handle this by adding a dummy instruction in a new last basic block.  */
2930
  if (check_last_block)
2931
    {
2932
      basic_block bb = EXIT_BLOCK_PTR->prev_bb;
2933
      rtx insn = BB_END (bb);
2934
 
2935
      /* Back up past insns that must be kept in the same block as a call.  */
2936
      while (insn != BB_HEAD (bb)
2937
             && keep_with_call_p (insn))
2938
        insn = PREV_INSN (insn);
2939
 
2940
      if (need_fake_edge_p (insn))
2941
        {
2942
          edge e;
2943
 
2944
          e = find_edge (bb, EXIT_BLOCK_PTR);
2945
          if (e)
2946
            {
2947
              insert_insn_on_edge (gen_use (const0_rtx), e);
2948
              commit_edge_insertions ();
2949
            }
2950
        }
2951
    }
2952
 
2953
  /* Now add fake edges to the function exit for any non constant
2954
     calls since there is no way that we can determine if they will
2955
     return or not...  */
2956
 
2957
  for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
2958
    {
2959
      basic_block bb = BASIC_BLOCK (i);
2960
      rtx insn;
2961
      rtx prev_insn;
2962
 
2963
      if (!bb)
2964
        continue;
2965
 
2966
      if (blocks && !TEST_BIT (blocks, i))
2967
        continue;
2968
 
2969
      for (insn = BB_END (bb); ; insn = prev_insn)
2970
        {
2971
          prev_insn = PREV_INSN (insn);
2972
          if (need_fake_edge_p (insn))
2973
            {
2974
              edge e;
2975
              rtx split_at_insn = insn;
2976
 
2977
              /* Don't split the block between a call and an insn that should
2978
                 remain in the same block as the call.  */
2979
              if (CALL_P (insn))
2980
                while (split_at_insn != BB_END (bb)
2981
                       && keep_with_call_p (NEXT_INSN (split_at_insn)))
2982
                  split_at_insn = NEXT_INSN (split_at_insn);
2983
 
2984
              /* The handling above of the final block before the epilogue
2985
                 should be enough to verify that there is no edge to the exit
2986
                 block in CFG already.  Calling make_edge in such case would
2987
                 cause us to mark that edge as fake and remove it later.  */
2988
 
2989
#ifdef ENABLE_CHECKING
2990
              if (split_at_insn == BB_END (bb))
2991
                {
2992
                  e = find_edge (bb, EXIT_BLOCK_PTR);
2993
                  gcc_assert (e == NULL);
2994
                }
2995
#endif
2996
 
2997
              /* Note that the following may create a new basic block
2998
                 and renumber the existing basic blocks.  */
2999
              if (split_at_insn != BB_END (bb))
3000
                {
3001
                  e = split_block (bb, split_at_insn);
3002
                  if (e)
3003
                    blocks_split++;
3004
                }
3005
 
3006
              make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
3007
            }
3008
 
3009
          if (insn == BB_HEAD (bb))
3010
            break;
3011
        }
3012
    }
3013
 
3014
  if (blocks_split)
3015
    verify_flow_info ();
3016
 
3017
  return blocks_split;
3018
}
3019
 
3020
/* Add COMP_RTX as a condition at end of COND_BB.  FIRST_HEAD is
3021
   the conditional branch target, SECOND_HEAD should be the fall-thru
3022
   there is no need to handle this here the loop versioning code handles
3023
   this.  the reason for SECON_HEAD is that it is needed for condition
3024
   in trees, and this should be of the same type since it is a hook.  */
3025
static void
3026
rtl_lv_add_condition_to_bb (basic_block first_head ,
3027
                            basic_block second_head ATTRIBUTE_UNUSED,
3028
                            basic_block cond_bb, void *comp_rtx)
3029
{
3030
  rtx label, seq, jump;
3031
  rtx op0 = XEXP ((rtx)comp_rtx, 0);
3032
  rtx op1 = XEXP ((rtx)comp_rtx, 1);
3033
  enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
3034
  enum machine_mode mode;
3035
 
3036
 
3037
  label = block_label (first_head);
3038
  mode = GET_MODE (op0);
3039
  if (mode == VOIDmode)
3040
    mode = GET_MODE (op1);
3041
 
3042
  start_sequence ();
3043
  op0 = force_operand (op0, NULL_RTX);
3044
  op1 = force_operand (op1, NULL_RTX);
3045
  do_compare_rtx_and_jump (op0, op1, comp, 0,
3046
                           mode, NULL_RTX, NULL_RTX, label, -1);
3047
  jump = get_last_insn ();
3048
  JUMP_LABEL (jump) = label;
3049
  LABEL_NUSES (label)++;
3050
  seq = get_insns ();
3051
  end_sequence ();
3052
 
3053
  /* Add the new cond , in the new head.  */
3054
  emit_insn_after(seq, BB_END(cond_bb));
3055
}
3056
 
3057
 
3058
/* Given a block B with unconditional branch at its end, get the
3059
   store the return the branch edge and the fall-thru edge in
3060
   BRANCH_EDGE and FALLTHRU_EDGE respectively.  */
3061
static void
3062
rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
3063
                           edge *fallthru_edge)
3064
{
3065
  edge e = EDGE_SUCC (b, 0);
3066
 
3067
  if (e->flags & EDGE_FALLTHRU)
3068
    {
3069
      *fallthru_edge = e;
3070
      *branch_edge = EDGE_SUCC (b, 1);
3071
    }
3072
  else
3073
    {
3074
      *branch_edge = e;
3075
      *fallthru_edge = EDGE_SUCC (b, 1);
3076
    }
3077
}
3078
 
3079
void
3080
init_rtl_bb_info (basic_block bb)
3081
{
3082
  gcc_assert (!bb->il.rtl);
3083
  bb->il.rtl = GGC_CNEW (struct rtl_bb_info);
3084
}
3085
 
3086
 
3087
/* Add EXPR to the end of basic block BB.  */
3088
 
3089
rtx
3090
insert_insn_end_bb_new (rtx pat, basic_block bb)
3091
{
3092
  rtx insn = BB_END (bb);
3093
  rtx new_insn;
3094
  rtx pat_end = pat;
3095
 
3096
  while (NEXT_INSN (pat_end) != NULL_RTX)
3097
    pat_end = NEXT_INSN (pat_end);
3098
 
3099
  /* If the last insn is a jump, insert EXPR in front [taking care to
3100
     handle cc0, etc. properly].  Similarly we need to care trapping
3101
     instructions in presence of non-call exceptions.  */
3102
 
3103
  if (JUMP_P (insn)
3104
      || (NONJUMP_INSN_P (insn)
3105
          && (!single_succ_p (bb)
3106
              || single_succ_edge (bb)->flags & EDGE_ABNORMAL)))
3107
    {
3108
#ifdef HAVE_cc0
3109
      rtx note;
3110
#endif
3111
      /* If this is a jump table, then we can't insert stuff here.  Since
3112
         we know the previous real insn must be the tablejump, we insert
3113
         the new instruction just before the tablejump.  */
3114
      if (GET_CODE (PATTERN (insn)) == ADDR_VEC
3115
          || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
3116
        insn = prev_real_insn (insn);
3117
 
3118
#ifdef HAVE_cc0
3119
      /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
3120
         if cc0 isn't set.  */
3121
      note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3122
      if (note)
3123
        insn = XEXP (note, 0);
3124
      else
3125
        {
3126
          rtx maybe_cc0_setter = prev_nonnote_insn (insn);
3127
          if (maybe_cc0_setter
3128
              && INSN_P (maybe_cc0_setter)
3129
              && sets_cc0_p (PATTERN (maybe_cc0_setter)))
3130
            insn = maybe_cc0_setter;
3131
        }
3132
#endif
3133
      /* FIXME: What if something in cc0/jump uses value set in new
3134
         insn?  */
3135
      new_insn = emit_insn_before_noloc (pat, insn, bb);
3136
    }
3137
 
3138
  /* Likewise if the last insn is a call, as will happen in the presence
3139
     of exception handling.  */
3140
  else if (CALL_P (insn)
3141
           && (!single_succ_p (bb)
3142
               || single_succ_edge (bb)->flags & EDGE_ABNORMAL))
3143
    {
3144
      /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
3145
         we search backward and place the instructions before the first
3146
         parameter is loaded.  Do this for everyone for consistency and a
3147
         presumption that we'll get better code elsewhere as well.  */
3148
 
3149
      /* Since different machines initialize their parameter registers
3150
         in different orders, assume nothing.  Collect the set of all
3151
         parameter registers.  */
3152
      insn = find_first_parameter_load (insn, BB_HEAD (bb));
3153
 
3154
      /* If we found all the parameter loads, then we want to insert
3155
         before the first parameter load.
3156
 
3157
         If we did not find all the parameter loads, then we might have
3158
         stopped on the head of the block, which could be a CODE_LABEL.
3159
         If we inserted before the CODE_LABEL, then we would be putting
3160
         the insn in the wrong basic block.  In that case, put the insn
3161
         after the CODE_LABEL.  Also, respect NOTE_INSN_BASIC_BLOCK.  */
3162
      while (LABEL_P (insn)
3163
             || NOTE_INSN_BASIC_BLOCK_P (insn))
3164
        insn = NEXT_INSN (insn);
3165
 
3166
      new_insn = emit_insn_before_noloc (pat, insn, bb);
3167
    }
3168
  else
3169
    new_insn = emit_insn_after_noloc (pat, insn, bb);
3170
 
3171
  return new_insn;
3172
}
3173
 
3174
/* Returns true if it is possible to remove edge E by redirecting
3175
   it to the destination of the other edge from E->src.  */
3176
 
3177
static bool
3178
rtl_can_remove_branch_p (const_edge e)
3179
{
3180
  const_basic_block src = e->src;
3181
  const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
3182
  const_rtx insn = BB_END (src), set;
3183
 
3184
  /* The conditions are taken from try_redirect_by_replacing_jump.  */
3185
  if (target == EXIT_BLOCK_PTR)
3186
    return false;
3187
 
3188
  if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
3189
    return false;
3190
 
3191
  if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
3192
      || BB_PARTITION (src) != BB_PARTITION (target))
3193
    return false;
3194
 
3195
  if (!onlyjump_p (insn)
3196
      || tablejump_p (insn, NULL, NULL))
3197
    return false;
3198
 
3199
  set = single_set (insn);
3200
  if (!set || side_effects_p (set))
3201
    return false;
3202
 
3203
  return true;
3204
}
3205
 
3206
/* Implementation of CFG manipulation for linearized RTL.  */
3207
struct cfg_hooks rtl_cfg_hooks = {
3208
  "rtl",
3209
  rtl_verify_flow_info,
3210
  rtl_dump_bb,
3211
  rtl_create_basic_block,
3212
  rtl_redirect_edge_and_branch,
3213
  rtl_redirect_edge_and_branch_force,
3214
  rtl_can_remove_branch_p,
3215
  rtl_delete_block,
3216
  rtl_split_block,
3217
  rtl_move_block_after,
3218
  rtl_can_merge_blocks,  /* can_merge_blocks_p */
3219
  rtl_merge_blocks,
3220
  rtl_predict_edge,
3221
  rtl_predicted_by_p,
3222
  NULL, /* can_duplicate_block_p */
3223
  NULL, /* duplicate_block */
3224
  rtl_split_edge,
3225
  rtl_make_forwarder_block,
3226
  rtl_tidy_fallthru_edge,
3227
  rtl_block_ends_with_call_p,
3228
  rtl_block_ends_with_condjump_p,
3229
  rtl_flow_call_edges_add,
3230
  NULL, /* execute_on_growing_pred */
3231
  NULL, /* execute_on_shrinking_pred */
3232
  NULL, /* duplicate loop for trees */
3233
  NULL, /* lv_add_condition_to_bb */
3234
  NULL, /* lv_adjust_loop_header_phi*/
3235
  NULL, /* extract_cond_bb_edges */
3236
  NULL          /* flush_pending_stmts */
3237
};
3238
 
3239
/* Implementation of CFG manipulation for cfg layout RTL, where
3240
   basic block connected via fallthru edges does not have to be adjacent.
3241
   This representation will hopefully become the default one in future
3242
   version of the compiler.  */
3243
 
3244
/* We do not want to declare these functions in a header file, since they
3245
   should only be used through the cfghooks interface, and we do not want to
3246
   move them here since it would require also moving quite a lot of related
3247
   code.  They are in cfglayout.c.  */
3248
extern bool cfg_layout_can_duplicate_bb_p (const_basic_block);
3249
extern basic_block cfg_layout_duplicate_bb (basic_block);
3250
 
3251
struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
3252
  "cfglayout mode",
3253
  rtl_verify_flow_info_1,
3254
  rtl_dump_bb,
3255
  cfg_layout_create_basic_block,
3256
  cfg_layout_redirect_edge_and_branch,
3257
  cfg_layout_redirect_edge_and_branch_force,
3258
  rtl_can_remove_branch_p,
3259
  cfg_layout_delete_block,
3260
  cfg_layout_split_block,
3261
  rtl_move_block_after,
3262
  cfg_layout_can_merge_blocks_p,
3263
  cfg_layout_merge_blocks,
3264
  rtl_predict_edge,
3265
  rtl_predicted_by_p,
3266
  cfg_layout_can_duplicate_bb_p,
3267
  cfg_layout_duplicate_bb,
3268
  cfg_layout_split_edge,
3269
  rtl_make_forwarder_block,
3270
  NULL,
3271
  rtl_block_ends_with_call_p,
3272
  rtl_block_ends_with_condjump_p,
3273
  rtl_flow_call_edges_add,
3274
  NULL, /* execute_on_growing_pred */
3275
  NULL, /* execute_on_shrinking_pred */
3276
  duplicate_loop_to_header_edge, /* duplicate loop for trees */
3277
  rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
3278
  NULL, /* lv_adjust_loop_header_phi*/
3279
  rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
3280
  NULL          /* flush_pending_stmts */
3281
};

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.