OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-old/] [gcc-4.2.2/] [gcc/] [tree-cfg.c] - Blame information for rev 856

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 38 julius
/* Control flow functions for trees.
2
   Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007
3
   Free Software Foundation, Inc.
4
   Contributed by Diego Novillo <dnovillo@redhat.com>
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify
9
it under the terms of the GNU General Public License as published by
10
the Free Software Foundation; either version 3, or (at your option)
11
any later version.
12
 
13
GCC is distributed in the hope that it will be useful,
14
but WITHOUT ANY WARRANTY; without even the implied warranty of
15
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16
GNU General Public License for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
#include "tree.h"
27
#include "rtl.h"
28
#include "tm_p.h"
29
#include "hard-reg-set.h"
30
#include "basic-block.h"
31
#include "output.h"
32
#include "flags.h"
33
#include "function.h"
34
#include "expr.h"
35
#include "ggc.h"
36
#include "langhooks.h"
37
#include "diagnostic.h"
38
#include "tree-flow.h"
39
#include "timevar.h"
40
#include "tree-dump.h"
41
#include "tree-pass.h"
42
#include "toplev.h"
43
#include "except.h"
44
#include "cfgloop.h"
45
#include "cfglayout.h"
46
#include "hashtab.h"
47
#include "tree-ssa-propagate.h"
48
 
49
/* This file contains functions for building the Control Flow Graph (CFG)
50
   for a function tree.  */
51
 
52
/* Local declarations.  */
53
 
54
/* Initial capacity for the basic block array.  */
55
static const int initial_cfg_capacity = 20;
56
 
57
/* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
58
   which use a particular edge.  The CASE_LABEL_EXPRs are chained together
59
   via their TREE_CHAIN field, which we clear after we're done with the
60
   hash table to prevent problems with duplication of SWITCH_EXPRs.
61
 
62
   Access to this list of CASE_LABEL_EXPRs allows us to efficiently
63
   update the case vector in response to edge redirections.
64
 
65
   Right now this table is set up and torn down at key points in the
66
   compilation process.  It would be nice if we could make the table
67
   more persistent.  The key is getting notification of changes to
68
   the CFG (particularly edge removal, creation and redirection).  */
69
 
70
struct edge_to_cases_elt
71
{
72
  /* The edge itself.  Necessary for hashing and equality tests.  */
73
  edge e;
74
 
75
  /* The case labels associated with this edge.  We link these up via
76
     their TREE_CHAIN field, then we wipe out the TREE_CHAIN fields
77
     when we destroy the hash table.  This prevents problems when copying
78
     SWITCH_EXPRs.  */
79
  tree case_labels;
80
};
81
 
82
static htab_t edge_to_cases;
83
 
84
/* CFG statistics.  */
85
struct cfg_stats_d
86
{
87
  long num_merged_labels;
88
};
89
 
90
static struct cfg_stats_d cfg_stats;
91
 
92
/* Nonzero if we found a computed goto while building basic blocks.  */
93
static bool found_computed_goto;
94
 
95
/* Basic blocks and flowgraphs.  */
96
static basic_block create_bb (void *, void *, basic_block);
97
static void make_blocks (tree);
98
static void factor_computed_gotos (void);
99
 
100
/* Edges.  */
101
static void make_edges (void);
102
static void make_cond_expr_edges (basic_block);
103
static void make_switch_expr_edges (basic_block);
104
static void make_goto_expr_edges (basic_block);
105
static edge tree_redirect_edge_and_branch (edge, basic_block);
106
static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
107
static unsigned int split_critical_edges (void);
108
 
109
/* Various helpers.  */
110
static inline bool stmt_starts_bb_p (tree, tree);
111
static int tree_verify_flow_info (void);
112
static void tree_make_forwarder_block (edge);
113
static void tree_cfg2vcg (FILE *);
114
static inline void change_bb_for_stmt (tree t, basic_block bb);
115
 
116
/* Flowgraph optimization and cleanup.  */
117
static void tree_merge_blocks (basic_block, basic_block);
118
static bool tree_can_merge_blocks_p (basic_block, basic_block);
119
static void remove_bb (basic_block);
120
static edge find_taken_edge_computed_goto (basic_block, tree);
121
static edge find_taken_edge_cond_expr (basic_block, tree);
122
static edge find_taken_edge_switch_expr (basic_block, tree);
123
static tree find_case_label_for_value (tree, tree);
124
 
125
void
126
init_empty_tree_cfg (void)
127
{
128
  /* Initialize the basic block array.  */
129
  init_flow ();
130
  profile_status = PROFILE_ABSENT;
131
  n_basic_blocks = NUM_FIXED_BLOCKS;
132
  last_basic_block = NUM_FIXED_BLOCKS;
133
  basic_block_info = VEC_alloc (basic_block, gc, initial_cfg_capacity);
134
  VEC_safe_grow (basic_block, gc, basic_block_info, initial_cfg_capacity);
135
  memset (VEC_address (basic_block, basic_block_info), 0,
136
          sizeof (basic_block) * initial_cfg_capacity);
137
 
138
  /* Build a mapping of labels to their associated blocks.  */
139
  label_to_block_map = VEC_alloc (basic_block, gc, initial_cfg_capacity);
140
  VEC_safe_grow (basic_block, gc, label_to_block_map, initial_cfg_capacity);
141
  memset (VEC_address (basic_block, label_to_block_map),
142
          0, sizeof (basic_block) * initial_cfg_capacity);
143
 
144
  SET_BASIC_BLOCK (ENTRY_BLOCK, ENTRY_BLOCK_PTR);
145
  SET_BASIC_BLOCK (EXIT_BLOCK, EXIT_BLOCK_PTR);
146
  ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
147
  EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
148
}
149
 
150
/*---------------------------------------------------------------------------
151
                              Create basic blocks
152
---------------------------------------------------------------------------*/
153
 
154
/* Entry point to the CFG builder for trees.  TP points to the list of
155
   statements to be added to the flowgraph.  */
156
 
157
static void
158
build_tree_cfg (tree *tp)
159
{
160
  /* Register specific tree functions.  */
161
  tree_register_cfg_hooks ();
162
 
163
  memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
164
 
165
  init_empty_tree_cfg ();
166
 
167
  found_computed_goto = 0;
168
  make_blocks (*tp);
169
 
170
  /* Computed gotos are hell to deal with, especially if there are
171
     lots of them with a large number of destinations.  So we factor
172
     them to a common computed goto location before we build the
173
     edge list.  After we convert back to normal form, we will un-factor
174
     the computed gotos since factoring introduces an unwanted jump.  */
175
  if (found_computed_goto)
176
    factor_computed_gotos ();
177
 
178
  /* Make sure there is always at least one block, even if it's empty.  */
179
  if (n_basic_blocks == NUM_FIXED_BLOCKS)
180
    create_empty_bb (ENTRY_BLOCK_PTR);
181
 
182
  /* Adjust the size of the array.  */
183
  if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
184
    {
185
      size_t old_size = VEC_length (basic_block, basic_block_info);
186
      basic_block *p;
187
      VEC_safe_grow (basic_block, gc, basic_block_info, n_basic_blocks);
188
      p = VEC_address (basic_block, basic_block_info);
189
      memset (&p[old_size], 0,
190
              sizeof (basic_block) * (n_basic_blocks - old_size));
191
    }
192
 
193
  /* To speed up statement iterator walks, we first purge dead labels.  */
194
  cleanup_dead_labels ();
195
 
196
  /* Group case nodes to reduce the number of edges.
197
     We do this after cleaning up dead labels because otherwise we miss
198
     a lot of obvious case merging opportunities.  */
199
  group_case_labels ();
200
 
201
  /* Create the edges of the flowgraph.  */
202
  make_edges ();
203
 
204
  /* Debugging dumps.  */
205
 
206
  /* Write the flowgraph to a VCG file.  */
207
  {
208
    int local_dump_flags;
209
    FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
210
    if (vcg_file)
211
      {
212
        tree_cfg2vcg (vcg_file);
213
        dump_end (TDI_vcg, vcg_file);
214
      }
215
  }
216
 
217
#ifdef ENABLE_CHECKING
218
  verify_stmts ();
219
#endif
220
 
221
  /* Dump a textual representation of the flowgraph.  */
222
  if (dump_file)
223
    dump_tree_cfg (dump_file, dump_flags);
224
}
225
 
226
static unsigned int
227
execute_build_cfg (void)
228
{
229
  build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
230
  return 0;
231
}
232
 
233
struct tree_opt_pass pass_build_cfg =
234
{
235
  "cfg",                                /* name */
236
  NULL,                                 /* gate */
237
  execute_build_cfg,                    /* execute */
238
  NULL,                                 /* sub */
239
  NULL,                                 /* next */
240
  0,                                     /* static_pass_number */
241
  TV_TREE_CFG,                          /* tv_id */
242
  PROP_gimple_leh,                      /* properties_required */
243
  PROP_cfg,                             /* properties_provided */
244
  0,                                     /* properties_destroyed */
245
  0,                                     /* todo_flags_start */
246
  TODO_verify_stmts,                    /* todo_flags_finish */
247
 
248
};
249
 
250
/* Search the CFG for any computed gotos.  If found, factor them to a
251
   common computed goto site.  Also record the location of that site so
252
   that we can un-factor the gotos after we have converted back to
253
   normal form.  */
254
 
255
static void
256
factor_computed_gotos (void)
257
{
258
  basic_block bb;
259
  tree factored_label_decl = NULL;
260
  tree var = NULL;
261
  tree factored_computed_goto_label = NULL;
262
  tree factored_computed_goto = NULL;
263
 
264
  /* We know there are one or more computed gotos in this function.
265
     Examine the last statement in each basic block to see if the block
266
     ends with a computed goto.  */
267
 
268
  FOR_EACH_BB (bb)
269
    {
270
      block_stmt_iterator bsi = bsi_last (bb);
271
      tree last;
272
 
273
      if (bsi_end_p (bsi))
274
        continue;
275
      last = bsi_stmt (bsi);
276
 
277
      /* Ignore the computed goto we create when we factor the original
278
         computed gotos.  */
279
      if (last == factored_computed_goto)
280
        continue;
281
 
282
      /* If the last statement is a computed goto, factor it.  */
283
      if (computed_goto_p (last))
284
        {
285
          tree assignment;
286
 
287
          /* The first time we find a computed goto we need to create
288
             the factored goto block and the variable each original
289
             computed goto will use for their goto destination.  */
290
          if (! factored_computed_goto)
291
            {
292
              basic_block new_bb = create_empty_bb (bb);
293
              block_stmt_iterator new_bsi = bsi_start (new_bb);
294
 
295
              /* Create the destination of the factored goto.  Each original
296
                 computed goto will put its desired destination into this
297
                 variable and jump to the label we create immediately
298
                 below.  */
299
              var = create_tmp_var (ptr_type_node, "gotovar");
300
 
301
              /* Build a label for the new block which will contain the
302
                 factored computed goto.  */
303
              factored_label_decl = create_artificial_label ();
304
              factored_computed_goto_label
305
                = build1 (LABEL_EXPR, void_type_node, factored_label_decl);
306
              bsi_insert_after (&new_bsi, factored_computed_goto_label,
307
                                BSI_NEW_STMT);
308
 
309
              /* Build our new computed goto.  */
310
              factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
311
              bsi_insert_after (&new_bsi, factored_computed_goto,
312
                                BSI_NEW_STMT);
313
            }
314
 
315
          /* Copy the original computed goto's destination into VAR.  */
316
          assignment = build2 (MODIFY_EXPR, ptr_type_node,
317
                               var, GOTO_DESTINATION (last));
318
          bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
319
 
320
          /* And re-vector the computed goto to the new destination.  */
321
          GOTO_DESTINATION (last) = factored_label_decl;
322
        }
323
    }
324
}
325
 
326
 
327
/* Build a flowgraph for the statement_list STMT_LIST.  */
328
 
329
static void
330
make_blocks (tree stmt_list)
331
{
332
  tree_stmt_iterator i = tsi_start (stmt_list);
333
  tree stmt = NULL;
334
  bool start_new_block = true;
335
  bool first_stmt_of_list = true;
336
  basic_block bb = ENTRY_BLOCK_PTR;
337
 
338
  while (!tsi_end_p (i))
339
    {
340
      tree prev_stmt;
341
 
342
      prev_stmt = stmt;
343
      stmt = tsi_stmt (i);
344
 
345
      /* If the statement starts a new basic block or if we have determined
346
         in a previous pass that we need to create a new block for STMT, do
347
         so now.  */
348
      if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
349
        {
350
          if (!first_stmt_of_list)
351
            stmt_list = tsi_split_statement_list_before (&i);
352
          bb = create_basic_block (stmt_list, NULL, bb);
353
          start_new_block = false;
354
        }
355
 
356
      /* Now add STMT to BB and create the subgraphs for special statement
357
         codes.  */
358
      set_bb_for_stmt (stmt, bb);
359
 
360
      if (computed_goto_p (stmt))
361
        found_computed_goto = true;
362
 
363
      /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
364
         next iteration.  */
365
      if (stmt_ends_bb_p (stmt))
366
        start_new_block = true;
367
 
368
      tsi_next (&i);
369
      first_stmt_of_list = false;
370
    }
371
}
372
 
373
 
374
/* Create and return a new empty basic block after bb AFTER.  */
375
 
376
static basic_block
377
create_bb (void *h, void *e, basic_block after)
378
{
379
  basic_block bb;
380
 
381
  gcc_assert (!e);
382
 
383
  /* Create and initialize a new basic block.  Since alloc_block uses
384
     ggc_alloc_cleared to allocate a basic block, we do not have to
385
     clear the newly allocated basic block here.  */
386
  bb = alloc_block ();
387
 
388
  bb->index = last_basic_block;
389
  bb->flags = BB_NEW;
390
  bb->stmt_list = h ? (tree) h : alloc_stmt_list ();
391
 
392
  /* Add the new block to the linked list of blocks.  */
393
  link_block (bb, after);
394
 
395
  /* Grow the basic block array if needed.  */
396
  if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
397
    {
398
      size_t old_size = VEC_length (basic_block, basic_block_info);
399
      size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
400
      basic_block *p;
401
      VEC_safe_grow (basic_block, gc, basic_block_info, new_size);
402
      p = VEC_address (basic_block, basic_block_info);
403
      memset (&p[old_size], 0, sizeof (basic_block) * (new_size - old_size));
404
    }
405
 
406
  /* Add the newly created block to the array.  */
407
  SET_BASIC_BLOCK (last_basic_block, bb);
408
 
409
  n_basic_blocks++;
410
  last_basic_block++;
411
 
412
  return bb;
413
}
414
 
415
 
416
/*---------------------------------------------------------------------------
417
                                 Edge creation
418
---------------------------------------------------------------------------*/
419
 
420
/* Fold COND_EXPR_COND of each COND_EXPR.  */
421
 
422
void
423
fold_cond_expr_cond (void)
424
{
425
  basic_block bb;
426
 
427
  FOR_EACH_BB (bb)
428
    {
429
      tree stmt = last_stmt (bb);
430
 
431
      if (stmt
432
          && TREE_CODE (stmt) == COND_EXPR)
433
        {
434
          tree cond;
435
          bool zerop, onep;
436
 
437
          fold_defer_overflow_warnings ();
438
          cond = fold (COND_EXPR_COND (stmt));
439
          zerop = integer_zerop (cond);
440
          onep = integer_onep (cond);
441
          fold_undefer_overflow_warnings (((zerop || onep)
442
                                           && !TREE_NO_WARNING (stmt)),
443
                                          stmt,
444
                                          WARN_STRICT_OVERFLOW_CONDITIONAL);
445
          if (zerop)
446
            COND_EXPR_COND (stmt) = boolean_false_node;
447
          else if (onep)
448
            COND_EXPR_COND (stmt) = boolean_true_node;
449
        }
450
    }
451
}
452
 
453
/* Join all the blocks in the flowgraph.  */
454
 
455
static void
456
make_edges (void)
457
{
458
  basic_block bb;
459
  struct omp_region *cur_region = NULL;
460
 
461
  /* Create an edge from entry to the first block with executable
462
     statements in it.  */
463
  make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
464
 
465
  /* Traverse the basic block array placing edges.  */
466
  FOR_EACH_BB (bb)
467
    {
468
      tree last = last_stmt (bb);
469
      bool fallthru;
470
 
471
      if (last)
472
        {
473
          enum tree_code code = TREE_CODE (last);
474
          switch (code)
475
            {
476
            case GOTO_EXPR:
477
              make_goto_expr_edges (bb);
478
              fallthru = false;
479
              break;
480
            case RETURN_EXPR:
481
              make_edge (bb, EXIT_BLOCK_PTR, 0);
482
              fallthru = false;
483
              break;
484
            case COND_EXPR:
485
              make_cond_expr_edges (bb);
486
              fallthru = false;
487
              break;
488
            case SWITCH_EXPR:
489
              make_switch_expr_edges (bb);
490
              fallthru = false;
491
              break;
492
            case RESX_EXPR:
493
              make_eh_edges (last);
494
              fallthru = false;
495
              break;
496
 
497
            case CALL_EXPR:
498
              /* If this function receives a nonlocal goto, then we need to
499
                 make edges from this call site to all the nonlocal goto
500
                 handlers.  */
501
              if (tree_can_make_abnormal_goto (last))
502
                make_abnormal_goto_edges (bb, true);
503
 
504
              /* If this statement has reachable exception handlers, then
505
                 create abnormal edges to them.  */
506
              make_eh_edges (last);
507
 
508
              /* Some calls are known not to return.  */
509
              fallthru = !(call_expr_flags (last) & ECF_NORETURN);
510
              break;
511
 
512
            case MODIFY_EXPR:
513
              if (is_ctrl_altering_stmt (last))
514
                {
515
                  /* A MODIFY_EXPR may have a CALL_EXPR on its RHS and the
516
                     CALL_EXPR may have an abnormal edge.  Search the RHS for
517
                     this case and create any required edges.  */
518
                  if (tree_can_make_abnormal_goto (last))
519
                    make_abnormal_goto_edges (bb, true);
520
 
521
                  make_eh_edges (last);
522
                }
523
              fallthru = true;
524
              break;
525
 
526
            case OMP_PARALLEL:
527
            case OMP_FOR:
528
            case OMP_SINGLE:
529
            case OMP_MASTER:
530
            case OMP_ORDERED:
531
            case OMP_CRITICAL:
532
            case OMP_SECTION:
533
              cur_region = new_omp_region (bb, code, cur_region);
534
              fallthru = true;
535
              break;
536
 
537
            case OMP_SECTIONS:
538
              cur_region = new_omp_region (bb, code, cur_region);
539
              fallthru = false;
540
              break;
541
 
542
            case OMP_RETURN:
543
              /* In the case of an OMP_SECTION, the edge will go somewhere
544
                 other than the next block.  This will be created later.  */
545
              cur_region->exit = bb;
546
              fallthru = cur_region->type != OMP_SECTION;
547
              cur_region = cur_region->outer;
548
              break;
549
 
550
            case OMP_CONTINUE:
551
              cur_region->cont = bb;
552
              switch (cur_region->type)
553
                {
554
                case OMP_FOR:
555
                  /* ??? Technically there should be a some sort of loopback
556
                     edge here, but it goes to a block that doesn't exist yet,
557
                     and without it, updating the ssa form would be a real
558
                     bear.  Fortunately, we don't yet do ssa before expanding
559
                     these nodes.  */
560
                  break;
561
 
562
                case OMP_SECTIONS:
563
                  /* Wire up the edges into and out of the nested sections.  */
564
                  /* ??? Similarly wrt loopback.  */
565
                  {
566
                    struct omp_region *i;
567
                    for (i = cur_region->inner; i ; i = i->next)
568
                      {
569
                        gcc_assert (i->type == OMP_SECTION);
570
                        make_edge (cur_region->entry, i->entry, 0);
571
                        make_edge (i->exit, bb, EDGE_FALLTHRU);
572
                      }
573
                  }
574
                  break;
575
 
576
                default:
577
                  gcc_unreachable ();
578
                }
579
              fallthru = true;
580
              break;
581
 
582
            default:
583
              gcc_assert (!stmt_ends_bb_p (last));
584
              fallthru = true;
585
            }
586
        }
587
      else
588
        fallthru = true;
589
 
590
      if (fallthru)
591
        make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
592
    }
593
 
594
  if (root_omp_region)
595
    free_omp_regions ();
596
 
597
  /* Fold COND_EXPR_COND of each COND_EXPR.  */
598
  fold_cond_expr_cond ();
599
 
600
  /* Clean up the graph and warn for unreachable code.  */
601
  cleanup_tree_cfg ();
602
}
603
 
604
 
605
/* Create the edges for a COND_EXPR starting at block BB.
606
   At this point, both clauses must contain only simple gotos.  */
607
 
608
static void
609
make_cond_expr_edges (basic_block bb)
610
{
611
  tree entry = last_stmt (bb);
612
  basic_block then_bb, else_bb;
613
  tree then_label, else_label;
614
  edge e;
615
 
616
  gcc_assert (entry);
617
  gcc_assert (TREE_CODE (entry) == COND_EXPR);
618
 
619
  /* Entry basic blocks for each component.  */
620
  then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
621
  else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
622
  then_bb = label_to_block (then_label);
623
  else_bb = label_to_block (else_label);
624
 
625
  e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
626
#ifdef USE_MAPPED_LOCATION
627
  e->goto_locus = EXPR_LOCATION (COND_EXPR_THEN (entry));
628
#else
629
  e->goto_locus = EXPR_LOCUS (COND_EXPR_THEN (entry));
630
#endif
631
  e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
632
  if (e)
633
    {
634
#ifdef USE_MAPPED_LOCATION
635
      e->goto_locus = EXPR_LOCATION (COND_EXPR_ELSE (entry));
636
#else
637
      e->goto_locus = EXPR_LOCUS (COND_EXPR_ELSE (entry));
638
#endif
639
    }
640
}
641
 
642
/* Hashing routine for EDGE_TO_CASES.  */
643
 
644
static hashval_t
645
edge_to_cases_hash (const void *p)
646
{
647
  edge e = ((struct edge_to_cases_elt *)p)->e;
648
 
649
  /* Hash on the edge itself (which is a pointer).  */
650
  return htab_hash_pointer (e);
651
}
652
 
653
/* Equality routine for EDGE_TO_CASES, edges are unique, so testing
654
   for equality is just a pointer comparison.  */
655
 
656
static int
657
edge_to_cases_eq (const void *p1, const void *p2)
658
{
659
  edge e1 = ((struct edge_to_cases_elt *)p1)->e;
660
  edge e2 = ((struct edge_to_cases_elt *)p2)->e;
661
 
662
  return e1 == e2;
663
}
664
 
665
/* Called for each element in the hash table (P) as we delete the
666
   edge to cases hash table.
667
 
668
   Clear all the TREE_CHAINs to prevent problems with copying of
669
   SWITCH_EXPRs and structure sharing rules, then free the hash table
670
   element.  */
671
 
672
static void
673
edge_to_cases_cleanup (void *p)
674
{
675
  struct edge_to_cases_elt *elt = (struct edge_to_cases_elt *) p;
676
  tree t, next;
677
 
678
  for (t = elt->case_labels; t; t = next)
679
    {
680
      next = TREE_CHAIN (t);
681
      TREE_CHAIN (t) = NULL;
682
    }
683
  free (p);
684
}
685
 
686
/* Start recording information mapping edges to case labels.  */
687
 
688
void
689
start_recording_case_labels (void)
690
{
691
  gcc_assert (edge_to_cases == NULL);
692
 
693
  edge_to_cases = htab_create (37,
694
                               edge_to_cases_hash,
695
                               edge_to_cases_eq,
696
                               edge_to_cases_cleanup);
697
}
698
 
699
/* Return nonzero if we are recording information for case labels.  */
700
 
701
static bool
702
recording_case_labels_p (void)
703
{
704
  return (edge_to_cases != NULL);
705
}
706
 
707
/* Stop recording information mapping edges to case labels and
708
   remove any information we have recorded.  */
709
void
710
end_recording_case_labels (void)
711
{
712
  htab_delete (edge_to_cases);
713
  edge_to_cases = NULL;
714
}
715
 
716
/* Record that CASE_LABEL (a CASE_LABEL_EXPR) references edge E.  */
717
 
718
static void
719
record_switch_edge (edge e, tree case_label)
720
{
721
  struct edge_to_cases_elt *elt;
722
  void **slot;
723
 
724
  /* Build a hash table element so we can see if E is already
725
     in the table.  */
726
  elt = XNEW (struct edge_to_cases_elt);
727
  elt->e = e;
728
  elt->case_labels = case_label;
729
 
730
  slot = htab_find_slot (edge_to_cases, elt, INSERT);
731
 
732
  if (*slot == NULL)
733
    {
734
      /* E was not in the hash table.  Install E into the hash table.  */
735
      *slot = (void *)elt;
736
    }
737
  else
738
    {
739
      /* E was already in the hash table.  Free ELT as we do not need it
740
         anymore.  */
741
      free (elt);
742
 
743
      /* Get the entry stored in the hash table.  */
744
      elt = (struct edge_to_cases_elt *) *slot;
745
 
746
      /* Add it to the chain of CASE_LABEL_EXPRs referencing E.  */
747
      TREE_CHAIN (case_label) = elt->case_labels;
748
      elt->case_labels = case_label;
749
    }
750
}
751
 
752
/* If we are inside a {start,end}_recording_cases block, then return
753
   a chain of CASE_LABEL_EXPRs from T which reference E.
754
 
755
   Otherwise return NULL.  */
756
 
757
static tree
758
get_cases_for_edge (edge e, tree t)
759
{
760
  struct edge_to_cases_elt elt, *elt_p;
761
  void **slot;
762
  size_t i, n;
763
  tree vec;
764
 
765
  /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
766
     chains available.  Return NULL so the caller can detect this case.  */
767
  if (!recording_case_labels_p ())
768
    return NULL;
769
 
770
restart:
771
  elt.e = e;
772
  elt.case_labels = NULL;
773
  slot = htab_find_slot (edge_to_cases, &elt, NO_INSERT);
774
 
775
  if (slot)
776
    {
777
      elt_p = (struct edge_to_cases_elt *)*slot;
778
      return elt_p->case_labels;
779
    }
780
 
781
  /* If we did not find E in the hash table, then this must be the first
782
     time we have been queried for information about E & T.  Add all the
783
     elements from T to the hash table then perform the query again.  */
784
 
785
  vec = SWITCH_LABELS (t);
786
  n = TREE_VEC_LENGTH (vec);
787
  for (i = 0; i < n; i++)
788
    {
789
      tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
790
      basic_block label_bb = label_to_block (lab);
791
      record_switch_edge (find_edge (e->src, label_bb), TREE_VEC_ELT (vec, i));
792
    }
793
  goto restart;
794
}
795
 
796
/* Create the edges for a SWITCH_EXPR starting at block BB.
797
   At this point, the switch body has been lowered and the
798
   SWITCH_LABELS filled in, so this is in effect a multi-way branch.  */
799
 
800
static void
801
make_switch_expr_edges (basic_block bb)
802
{
803
  tree entry = last_stmt (bb);
804
  size_t i, n;
805
  tree vec;
806
 
807
  vec = SWITCH_LABELS (entry);
808
  n = TREE_VEC_LENGTH (vec);
809
 
810
  for (i = 0; i < n; ++i)
811
    {
812
      tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
813
      basic_block label_bb = label_to_block (lab);
814
      make_edge (bb, label_bb, 0);
815
    }
816
}
817
 
818
 
819
/* Return the basic block holding label DEST.  */
820
 
821
basic_block
822
label_to_block_fn (struct function *ifun, tree dest)
823
{
824
  int uid = LABEL_DECL_UID (dest);
825
 
826
  /* We would die hard when faced by an undefined label.  Emit a label to
827
     the very first basic block.  This will hopefully make even the dataflow
828
     and undefined variable warnings quite right.  */
829
  if ((errorcount || sorrycount) && uid < 0)
830
    {
831
      block_stmt_iterator bsi =
832
        bsi_start (BASIC_BLOCK (NUM_FIXED_BLOCKS));
833
      tree stmt;
834
 
835
      stmt = build1 (LABEL_EXPR, void_type_node, dest);
836
      bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
837
      uid = LABEL_DECL_UID (dest);
838
    }
839
  if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
840
      <= (unsigned int) uid)
841
    return NULL;
842
  return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
843
}
844
 
845
/* Create edges for an abnormal goto statement at block BB.  If FOR_CALL
846
   is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR.  */
847
 
848
void
849
make_abnormal_goto_edges (basic_block bb, bool for_call)
850
{
851
  basic_block target_bb;
852
  block_stmt_iterator bsi;
853
 
854
  FOR_EACH_BB (target_bb)
855
    for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
856
      {
857
        tree target = bsi_stmt (bsi);
858
 
859
        if (TREE_CODE (target) != LABEL_EXPR)
860
          break;
861
 
862
        target = LABEL_EXPR_LABEL (target);
863
 
864
        /* Make an edge to every label block that has been marked as a
865
           potential target for a computed goto or a non-local goto.  */
866
        if ((FORCED_LABEL (target) && !for_call)
867
            || (DECL_NONLOCAL (target) && for_call))
868
          {
869
            make_edge (bb, target_bb, EDGE_ABNORMAL);
870
            break;
871
          }
872
      }
873
}
874
 
875
/* Create edges for a goto statement at block BB.  */
876
 
877
static void
878
make_goto_expr_edges (basic_block bb)
879
{
880
  block_stmt_iterator last = bsi_last (bb);
881
  tree goto_t = bsi_stmt (last);
882
 
883
  /* A simple GOTO creates normal edges.  */
884
  if (simple_goto_p (goto_t))
885
    {
886
      tree dest = GOTO_DESTINATION (goto_t);
887
      edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
888
#ifdef USE_MAPPED_LOCATION
889
      e->goto_locus = EXPR_LOCATION (goto_t);
890
#else
891
      e->goto_locus = EXPR_LOCUS (goto_t);
892
#endif
893
      bsi_remove (&last, true);
894
      return;
895
    }
896
 
897
  /* A computed GOTO creates abnormal edges.  */
898
  make_abnormal_goto_edges (bb, false);
899
}
900
 
901
 
902
/*---------------------------------------------------------------------------
903
                               Flowgraph analysis
904
---------------------------------------------------------------------------*/
905
 
906
/* Cleanup useless labels in basic blocks.  This is something we wish
907
   to do early because it allows us to group case labels before creating
908
   the edges for the CFG, and it speeds up block statement iterators in
909
   all passes later on.
910
   We only run this pass once, running it more than once is probably not
911
   profitable.  */
912
 
913
/* A map from basic block index to the leading label of that block.  */
914
static tree *label_for_bb;
915
 
916
/* Callback for for_each_eh_region.  Helper for cleanup_dead_labels.  */
917
static void
918
update_eh_label (struct eh_region *region)
919
{
920
  tree old_label = get_eh_region_tree_label (region);
921
  if (old_label)
922
    {
923
      tree new_label;
924
      basic_block bb = label_to_block (old_label);
925
 
926
      /* ??? After optimizing, there may be EH regions with labels
927
         that have already been removed from the function body, so
928
         there is no basic block for them.  */
929
      if (! bb)
930
        return;
931
 
932
      new_label = label_for_bb[bb->index];
933
      set_eh_region_tree_label (region, new_label);
934
    }
935
}
936
 
937
/* Given LABEL return the first label in the same basic block.  */
938
static tree
939
main_block_label (tree label)
940
{
941
  basic_block bb = label_to_block (label);
942
 
943
  /* label_to_block possibly inserted undefined label into the chain.  */
944
  if (!label_for_bb[bb->index])
945
    label_for_bb[bb->index] = label;
946
  return label_for_bb[bb->index];
947
}
948
 
949
/* Cleanup redundant labels.  This is a three-step process:
950
     1) Find the leading label for each block.
951
     2) Redirect all references to labels to the leading labels.
952
     3) Cleanup all useless labels.  */
953
 
954
void
955
cleanup_dead_labels (void)
956
{
957
  basic_block bb;
958
  label_for_bb = XCNEWVEC (tree, last_basic_block);
959
 
960
  /* Find a suitable label for each block.  We use the first user-defined
961
     label if there is one, or otherwise just the first label we see.  */
962
  FOR_EACH_BB (bb)
963
    {
964
      block_stmt_iterator i;
965
 
966
      for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
967
        {
968
          tree label, stmt = bsi_stmt (i);
969
 
970
          if (TREE_CODE (stmt) != LABEL_EXPR)
971
            break;
972
 
973
          label = LABEL_EXPR_LABEL (stmt);
974
 
975
          /* If we have not yet seen a label for the current block,
976
             remember this one and see if there are more labels.  */
977
          if (! label_for_bb[bb->index])
978
            {
979
              label_for_bb[bb->index] = label;
980
              continue;
981
            }
982
 
983
          /* If we did see a label for the current block already, but it
984
             is an artificially created label, replace it if the current
985
             label is a user defined label.  */
986
          if (! DECL_ARTIFICIAL (label)
987
              && DECL_ARTIFICIAL (label_for_bb[bb->index]))
988
            {
989
              label_for_bb[bb->index] = label;
990
              break;
991
            }
992
        }
993
    }
994
 
995
  /* Now redirect all jumps/branches to the selected label.
996
     First do so for each block ending in a control statement.  */
997
  FOR_EACH_BB (bb)
998
    {
999
      tree stmt = last_stmt (bb);
1000
      if (!stmt)
1001
        continue;
1002
 
1003
      switch (TREE_CODE (stmt))
1004
        {
1005
        case COND_EXPR:
1006
          {
1007
            tree true_branch, false_branch;
1008
 
1009
            true_branch = COND_EXPR_THEN (stmt);
1010
            false_branch = COND_EXPR_ELSE (stmt);
1011
 
1012
            GOTO_DESTINATION (true_branch)
1013
              = main_block_label (GOTO_DESTINATION (true_branch));
1014
            GOTO_DESTINATION (false_branch)
1015
              = main_block_label (GOTO_DESTINATION (false_branch));
1016
 
1017
            break;
1018
          }
1019
 
1020
        case SWITCH_EXPR:
1021
          {
1022
            size_t i;
1023
            tree vec = SWITCH_LABELS (stmt);
1024
            size_t n = TREE_VEC_LENGTH (vec);
1025
 
1026
            /* Replace all destination labels.  */
1027
            for (i = 0; i < n; ++i)
1028
              {
1029
                tree elt = TREE_VEC_ELT (vec, i);
1030
                tree label = main_block_label (CASE_LABEL (elt));
1031
                CASE_LABEL (elt) = label;
1032
              }
1033
            break;
1034
          }
1035
 
1036
        /* We have to handle GOTO_EXPRs until they're removed, and we don't
1037
           remove them until after we've created the CFG edges.  */
1038
        case GOTO_EXPR:
1039
          if (! computed_goto_p (stmt))
1040
            {
1041
              GOTO_DESTINATION (stmt)
1042
                = main_block_label (GOTO_DESTINATION (stmt));
1043
              break;
1044
            }
1045
 
1046
        default:
1047
          break;
1048
      }
1049
    }
1050
 
1051
  for_each_eh_region (update_eh_label);
1052
 
1053
  /* Finally, purge dead labels.  All user-defined labels and labels that
1054
     can be the target of non-local gotos and labels which have their
1055
     address taken are preserved.  */
1056
  FOR_EACH_BB (bb)
1057
    {
1058
      block_stmt_iterator i;
1059
      tree label_for_this_bb = label_for_bb[bb->index];
1060
 
1061
      if (! label_for_this_bb)
1062
        continue;
1063
 
1064
      for (i = bsi_start (bb); !bsi_end_p (i); )
1065
        {
1066
          tree label, stmt = bsi_stmt (i);
1067
 
1068
          if (TREE_CODE (stmt) != LABEL_EXPR)
1069
            break;
1070
 
1071
          label = LABEL_EXPR_LABEL (stmt);
1072
 
1073
          if (label == label_for_this_bb
1074
              || ! DECL_ARTIFICIAL (label)
1075
              || DECL_NONLOCAL (label)
1076
              || FORCED_LABEL (label))
1077
            bsi_next (&i);
1078
          else
1079
            bsi_remove (&i, true);
1080
        }
1081
    }
1082
 
1083
  free (label_for_bb);
1084
}
1085
 
1086
/* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1087
   and scan the sorted vector of cases.  Combine the ones jumping to the
1088
   same label.
1089
   Eg. three separate entries 1: 2: 3: become one entry 1..3:  */
1090
 
1091
void
1092
group_case_labels (void)
1093
{
1094
  basic_block bb;
1095
 
1096
  FOR_EACH_BB (bb)
1097
    {
1098
      tree stmt = last_stmt (bb);
1099
      if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
1100
        {
1101
          tree labels = SWITCH_LABELS (stmt);
1102
          int old_size = TREE_VEC_LENGTH (labels);
1103
          int i, j, new_size = old_size;
1104
          tree default_case = TREE_VEC_ELT (labels, old_size - 1);
1105
          tree default_label;
1106
 
1107
          /* The default label is always the last case in a switch
1108
             statement after gimplification.  */
1109
          default_label = CASE_LABEL (default_case);
1110
 
1111
          /* Look for possible opportunities to merge cases.
1112
             Ignore the last element of the label vector because it
1113
             must be the default case.  */
1114
          i = 0;
1115
          while (i < old_size - 1)
1116
            {
1117
              tree base_case, base_label, base_high;
1118
              base_case = TREE_VEC_ELT (labels, i);
1119
 
1120
              gcc_assert (base_case);
1121
              base_label = CASE_LABEL (base_case);
1122
 
1123
              /* Discard cases that have the same destination as the
1124
                 default case.  */
1125
              if (base_label == default_label)
1126
                {
1127
                  TREE_VEC_ELT (labels, i) = NULL_TREE;
1128
                  i++;
1129
                  new_size--;
1130
                  continue;
1131
                }
1132
 
1133
              base_high = CASE_HIGH (base_case) ?
1134
                CASE_HIGH (base_case) : CASE_LOW (base_case);
1135
              i++;
1136
              /* Try to merge case labels.  Break out when we reach the end
1137
                 of the label vector or when we cannot merge the next case
1138
                 label with the current one.  */
1139
              while (i < old_size - 1)
1140
                {
1141
                  tree merge_case = TREE_VEC_ELT (labels, i);
1142
                  tree merge_label = CASE_LABEL (merge_case);
1143
                  tree t = int_const_binop (PLUS_EXPR, base_high,
1144
                                            integer_one_node, 1);
1145
 
1146
                  /* Merge the cases if they jump to the same place,
1147
                     and their ranges are consecutive.  */
1148
                  if (merge_label == base_label
1149
                      && tree_int_cst_equal (CASE_LOW (merge_case), t))
1150
                    {
1151
                      base_high = CASE_HIGH (merge_case) ?
1152
                        CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1153
                      CASE_HIGH (base_case) = base_high;
1154
                      TREE_VEC_ELT (labels, i) = NULL_TREE;
1155
                      new_size--;
1156
                      i++;
1157
                    }
1158
                  else
1159
                    break;
1160
                }
1161
            }
1162
 
1163
          /* Compress the case labels in the label vector, and adjust the
1164
             length of the vector.  */
1165
          for (i = 0, j = 0; i < new_size; i++)
1166
            {
1167
              while (! TREE_VEC_ELT (labels, j))
1168
                j++;
1169
              TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
1170
            }
1171
          TREE_VEC_LENGTH (labels) = new_size;
1172
        }
1173
    }
1174
}
1175
 
1176
/* Checks whether we can merge block B into block A.  */
1177
 
1178
static bool
1179
tree_can_merge_blocks_p (basic_block a, basic_block b)
1180
{
1181
  tree stmt;
1182
  block_stmt_iterator bsi;
1183
  tree phi;
1184
 
1185
  if (!single_succ_p (a))
1186
    return false;
1187
 
1188
  if (single_succ_edge (a)->flags & EDGE_ABNORMAL)
1189
    return false;
1190
 
1191
  if (single_succ (a) != b)
1192
    return false;
1193
 
1194
  if (!single_pred_p (b))
1195
    return false;
1196
 
1197
  if (b == EXIT_BLOCK_PTR)
1198
    return false;
1199
 
1200
  /* If A ends by a statement causing exceptions or something similar, we
1201
     cannot merge the blocks.  */
1202
  stmt = last_stmt (a);
1203
  if (stmt && stmt_ends_bb_p (stmt))
1204
    return false;
1205
 
1206
  /* Do not allow a block with only a non-local label to be merged.  */
1207
  if (stmt && TREE_CODE (stmt) == LABEL_EXPR
1208
      && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1209
    return false;
1210
 
1211
  /* It must be possible to eliminate all phi nodes in B.  If ssa form
1212
     is not up-to-date, we cannot eliminate any phis.  */
1213
  phi = phi_nodes (b);
1214
  if (phi)
1215
    {
1216
      if (need_ssa_update_p ())
1217
        return false;
1218
 
1219
      for (; phi; phi = PHI_CHAIN (phi))
1220
        if (!is_gimple_reg (PHI_RESULT (phi))
1221
            && !may_propagate_copy (PHI_RESULT (phi), PHI_ARG_DEF (phi, 0)))
1222
          return false;
1223
    }
1224
 
1225
  /* Do not remove user labels.  */
1226
  for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
1227
    {
1228
      stmt = bsi_stmt (bsi);
1229
      if (TREE_CODE (stmt) != LABEL_EXPR)
1230
        break;
1231
      if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
1232
        return false;
1233
    }
1234
 
1235
  /* Protect the loop latches.  */
1236
  if (current_loops
1237
      && b->loop_father->latch == b)
1238
    return false;
1239
 
1240
  return true;
1241
}
1242
 
1243
/* Replaces all uses of NAME by VAL.  */
1244
 
1245
void
1246
replace_uses_by (tree name, tree val)
1247
{
1248
  imm_use_iterator imm_iter;
1249
  use_operand_p use;
1250
  tree stmt;
1251
  edge e;
1252
  unsigned i;
1253
 
1254
 
1255
  FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1256
    {
1257
      FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1258
        {
1259
          replace_exp (use, val);
1260
 
1261
          if (TREE_CODE (stmt) == PHI_NODE)
1262
            {
1263
              e = PHI_ARG_EDGE (stmt, PHI_ARG_INDEX_FROM_USE (use));
1264
              if (e->flags & EDGE_ABNORMAL)
1265
                {
1266
                  /* This can only occur for virtual operands, since
1267
                     for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1268
                     would prevent replacement.  */
1269
                  gcc_assert (!is_gimple_reg (name));
1270
                  SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1271
                }
1272
            }
1273
        }
1274
      if (TREE_CODE (stmt) != PHI_NODE)
1275
        {
1276
          tree rhs;
1277
 
1278
          fold_stmt_inplace (stmt);
1279
          rhs = get_rhs (stmt);
1280
          if (TREE_CODE (rhs) == ADDR_EXPR)
1281
            recompute_tree_invariant_for_addr_expr (rhs);
1282
 
1283
          maybe_clean_or_replace_eh_stmt (stmt, stmt);
1284
          mark_new_vars_to_rename (stmt);
1285
        }
1286
    }
1287
 
1288
  gcc_assert (num_imm_uses (name) == 0);
1289
 
1290
  /* Also update the trees stored in loop structures.  */
1291
  if (current_loops)
1292
    {
1293
      struct loop *loop;
1294
 
1295
      for (i = 0; i < current_loops->num; i++)
1296
        {
1297
          loop = current_loops->parray[i];
1298
          if (loop)
1299
            substitute_in_loop_info (loop, name, val);
1300
        }
1301
    }
1302
}
1303
 
1304
/* Merge block B into block A.  */
1305
 
1306
static void
1307
tree_merge_blocks (basic_block a, basic_block b)
1308
{
1309
  block_stmt_iterator bsi;
1310
  tree_stmt_iterator last;
1311
  tree phi;
1312
 
1313
  if (dump_file)
1314
    fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1315
 
1316
  /* Remove all single-valued PHI nodes from block B of the form
1317
     V_i = PHI <V_j> by propagating V_j to all the uses of V_i.  */
1318
  bsi = bsi_last (a);
1319
  for (phi = phi_nodes (b); phi; phi = phi_nodes (b))
1320
    {
1321
      tree def = PHI_RESULT (phi), use = PHI_ARG_DEF (phi, 0);
1322
      tree copy;
1323
      bool may_replace_uses = may_propagate_copy (def, use);
1324
 
1325
      /* In case we have loops to care about, do not propagate arguments of
1326
         loop closed ssa phi nodes.  */
1327
      if (current_loops
1328
          && is_gimple_reg (def)
1329
          && TREE_CODE (use) == SSA_NAME
1330
          && a->loop_father != b->loop_father)
1331
        may_replace_uses = false;
1332
 
1333
      if (!may_replace_uses)
1334
        {
1335
          gcc_assert (is_gimple_reg (def));
1336
 
1337
          /* Note that just emitting the copies is fine -- there is no problem
1338
             with ordering of phi nodes.  This is because A is the single
1339
             predecessor of B, therefore results of the phi nodes cannot
1340
             appear as arguments of the phi nodes.  */
1341
          copy = build2 (MODIFY_EXPR, void_type_node, def, use);
1342
          bsi_insert_after (&bsi, copy, BSI_NEW_STMT);
1343
          SET_PHI_RESULT (phi, NULL_TREE);
1344
          SSA_NAME_DEF_STMT (def) = copy;
1345
        }
1346
      else
1347
        replace_uses_by (def, use);
1348
 
1349
      remove_phi_node (phi, NULL);
1350
    }
1351
 
1352
  /* Ensure that B follows A.  */
1353
  move_block_after (b, a);
1354
 
1355
  gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1356
  gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1357
 
1358
  /* Remove labels from B and set bb_for_stmt to A for other statements.  */
1359
  for (bsi = bsi_start (b); !bsi_end_p (bsi);)
1360
    {
1361
      if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
1362
        {
1363
          tree label = bsi_stmt (bsi);
1364
 
1365
          bsi_remove (&bsi, false);
1366
          /* Now that we can thread computed gotos, we might have
1367
             a situation where we have a forced label in block B
1368
             However, the label at the start of block B might still be
1369
             used in other ways (think about the runtime checking for
1370
             Fortran assigned gotos).  So we can not just delete the
1371
             label.  Instead we move the label to the start of block A.  */
1372
          if (FORCED_LABEL (LABEL_EXPR_LABEL (label)))
1373
            {
1374
              block_stmt_iterator dest_bsi = bsi_start (a);
1375
              bsi_insert_before (&dest_bsi, label, BSI_NEW_STMT);
1376
            }
1377
        }
1378
      else
1379
        {
1380
          change_bb_for_stmt (bsi_stmt (bsi), a);
1381
          bsi_next (&bsi);
1382
        }
1383
    }
1384
 
1385
  /* Merge the chains.  */
1386
  last = tsi_last (a->stmt_list);
1387
  tsi_link_after (&last, b->stmt_list, TSI_NEW_STMT);
1388
  b->stmt_list = NULL;
1389
}
1390
 
1391
 
1392
/* Return the one of two successors of BB that is not reachable by a
1393
   reached by a complex edge, if there is one.  Else, return BB.  We use
1394
   this in optimizations that use post-dominators for their heuristics,
1395
   to catch the cases in C++ where function calls are involved.  */
1396
 
1397
basic_block
1398
single_noncomplex_succ (basic_block bb)
1399
{
1400
  edge e0, e1;
1401
  if (EDGE_COUNT (bb->succs) != 2)
1402
    return bb;
1403
 
1404
  e0 = EDGE_SUCC (bb, 0);
1405
  e1 = EDGE_SUCC (bb, 1);
1406
  if (e0->flags & EDGE_COMPLEX)
1407
    return e1->dest;
1408
  if (e1->flags & EDGE_COMPLEX)
1409
    return e0->dest;
1410
 
1411
  return bb;
1412
}
1413
 
1414
 
1415
/* Walk the function tree removing unnecessary statements.
1416
 
1417
     * Empty statement nodes are removed
1418
 
1419
     * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1420
 
1421
     * Unnecessary COND_EXPRs are removed
1422
 
1423
     * Some unnecessary BIND_EXPRs are removed
1424
 
1425
   Clearly more work could be done.  The trick is doing the analysis
1426
   and removal fast enough to be a net improvement in compile times.
1427
 
1428
   Note that when we remove a control structure such as a COND_EXPR
1429
   BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1430
   to ensure we eliminate all the useless code.  */
1431
 
1432
struct rus_data
1433
{
1434
  tree *last_goto;
1435
  bool repeat;
1436
  bool may_throw;
1437
  bool may_branch;
1438
  bool has_label;
1439
};
1440
 
1441
static void remove_useless_stmts_1 (tree *, struct rus_data *);
1442
 
1443
static bool
1444
remove_useless_stmts_warn_notreached (tree stmt)
1445
{
1446
  if (EXPR_HAS_LOCATION (stmt))
1447
    {
1448
      location_t loc = EXPR_LOCATION (stmt);
1449
      if (LOCATION_LINE (loc) > 0)
1450
        {
1451
          warning (0, "%Hwill never be executed", &loc);
1452
          return true;
1453
        }
1454
    }
1455
 
1456
  switch (TREE_CODE (stmt))
1457
    {
1458
    case STATEMENT_LIST:
1459
      {
1460
        tree_stmt_iterator i;
1461
        for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1462
          if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
1463
            return true;
1464
      }
1465
      break;
1466
 
1467
    case COND_EXPR:
1468
      if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
1469
        return true;
1470
      if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
1471
        return true;
1472
      if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
1473
        return true;
1474
      break;
1475
 
1476
    case TRY_FINALLY_EXPR:
1477
    case TRY_CATCH_EXPR:
1478
      if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
1479
        return true;
1480
      if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
1481
        return true;
1482
      break;
1483
 
1484
    case CATCH_EXPR:
1485
      return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
1486
    case EH_FILTER_EXPR:
1487
      return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
1488
    case BIND_EXPR:
1489
      return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
1490
 
1491
    default:
1492
      /* Not a live container.  */
1493
      break;
1494
    }
1495
 
1496
  return false;
1497
}
1498
 
1499
static void
1500
remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
1501
{
1502
  tree then_clause, else_clause, cond;
1503
  bool save_has_label, then_has_label, else_has_label;
1504
 
1505
  save_has_label = data->has_label;
1506
  data->has_label = false;
1507
  data->last_goto = NULL;
1508
 
1509
  remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
1510
 
1511
  then_has_label = data->has_label;
1512
  data->has_label = false;
1513
  data->last_goto = NULL;
1514
 
1515
  remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
1516
 
1517
  else_has_label = data->has_label;
1518
  data->has_label = save_has_label | then_has_label | else_has_label;
1519
 
1520
  then_clause = COND_EXPR_THEN (*stmt_p);
1521
  else_clause = COND_EXPR_ELSE (*stmt_p);
1522
  cond = fold (COND_EXPR_COND (*stmt_p));
1523
 
1524
  /* If neither arm does anything at all, we can remove the whole IF.  */
1525
  if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
1526
    {
1527
      *stmt_p = build_empty_stmt ();
1528
      data->repeat = true;
1529
    }
1530
 
1531
  /* If there are no reachable statements in an arm, then we can
1532
     zap the entire conditional.  */
1533
  else if (integer_nonzerop (cond) && !else_has_label)
1534
    {
1535
      if (warn_notreached)
1536
        remove_useless_stmts_warn_notreached (else_clause);
1537
      *stmt_p = then_clause;
1538
      data->repeat = true;
1539
    }
1540
  else if (integer_zerop (cond) && !then_has_label)
1541
    {
1542
      if (warn_notreached)
1543
        remove_useless_stmts_warn_notreached (then_clause);
1544
      *stmt_p = else_clause;
1545
      data->repeat = true;
1546
    }
1547
 
1548
  /* Check a couple of simple things on then/else with single stmts.  */
1549
  else
1550
    {
1551
      tree then_stmt = expr_only (then_clause);
1552
      tree else_stmt = expr_only (else_clause);
1553
 
1554
      /* Notice branches to a common destination.  */
1555
      if (then_stmt && else_stmt
1556
          && TREE_CODE (then_stmt) == GOTO_EXPR
1557
          && TREE_CODE (else_stmt) == GOTO_EXPR
1558
          && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
1559
        {
1560
          *stmt_p = then_stmt;
1561
          data->repeat = true;
1562
        }
1563
 
1564
      /* If the THEN/ELSE clause merely assigns a value to a variable or
1565
         parameter which is already known to contain that value, then
1566
         remove the useless THEN/ELSE clause.  */
1567
      else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1568
        {
1569
          if (else_stmt
1570
              && TREE_CODE (else_stmt) == MODIFY_EXPR
1571
              && TREE_OPERAND (else_stmt, 0) == cond
1572
              && integer_zerop (TREE_OPERAND (else_stmt, 1)))
1573
            COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
1574
        }
1575
      else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1576
               && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1577
                   || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1578
               && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
1579
        {
1580
          tree stmt = (TREE_CODE (cond) == EQ_EXPR
1581
                       ? then_stmt : else_stmt);
1582
          tree *location = (TREE_CODE (cond) == EQ_EXPR
1583
                            ? &COND_EXPR_THEN (*stmt_p)
1584
                            : &COND_EXPR_ELSE (*stmt_p));
1585
 
1586
          if (stmt
1587
              && TREE_CODE (stmt) == MODIFY_EXPR
1588
              && TREE_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
1589
              && TREE_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
1590
            *location = alloc_stmt_list ();
1591
        }
1592
    }
1593
 
1594
  /* Protect GOTOs in the arm of COND_EXPRs from being removed.  They
1595
     would be re-introduced during lowering.  */
1596
  data->last_goto = NULL;
1597
}
1598
 
1599
 
1600
static void
1601
remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
1602
{
1603
  bool save_may_branch, save_may_throw;
1604
  bool this_may_branch, this_may_throw;
1605
 
1606
  /* Collect may_branch and may_throw information for the body only.  */
1607
  save_may_branch = data->may_branch;
1608
  save_may_throw = data->may_throw;
1609
  data->may_branch = false;
1610
  data->may_throw = false;
1611
  data->last_goto = NULL;
1612
 
1613
  remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1614
 
1615
  this_may_branch = data->may_branch;
1616
  this_may_throw = data->may_throw;
1617
  data->may_branch |= save_may_branch;
1618
  data->may_throw |= save_may_throw;
1619
  data->last_goto = NULL;
1620
 
1621
  remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1622
 
1623
  /* If the body is empty, then we can emit the FINALLY block without
1624
     the enclosing TRY_FINALLY_EXPR.  */
1625
  if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
1626
    {
1627
      *stmt_p = TREE_OPERAND (*stmt_p, 1);
1628
      data->repeat = true;
1629
    }
1630
 
1631
  /* If the handler is empty, then we can emit the TRY block without
1632
     the enclosing TRY_FINALLY_EXPR.  */
1633
  else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1634
    {
1635
      *stmt_p = TREE_OPERAND (*stmt_p, 0);
1636
      data->repeat = true;
1637
    }
1638
 
1639
  /* If the body neither throws, nor branches, then we can safely
1640
     string the TRY and FINALLY blocks together.  */
1641
  else if (!this_may_branch && !this_may_throw)
1642
    {
1643
      tree stmt = *stmt_p;
1644
      *stmt_p = TREE_OPERAND (stmt, 0);
1645
      append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
1646
      data->repeat = true;
1647
    }
1648
}
1649
 
1650
 
1651
static void
1652
remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
1653
{
1654
  bool save_may_throw, this_may_throw;
1655
  tree_stmt_iterator i;
1656
  tree stmt;
1657
 
1658
  /* Collect may_throw information for the body only.  */
1659
  save_may_throw = data->may_throw;
1660
  data->may_throw = false;
1661
  data->last_goto = NULL;
1662
 
1663
  remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1664
 
1665
  this_may_throw = data->may_throw;
1666
  data->may_throw = save_may_throw;
1667
 
1668
  /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR.  */
1669
  if (!this_may_throw)
1670
    {
1671
      if (warn_notreached)
1672
        remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
1673
      *stmt_p = TREE_OPERAND (*stmt_p, 0);
1674
      data->repeat = true;
1675
      return;
1676
    }
1677
 
1678
  /* Process the catch clause specially.  We may be able to tell that
1679
     no exceptions propagate past this point.  */
1680
 
1681
  this_may_throw = true;
1682
  i = tsi_start (TREE_OPERAND (*stmt_p, 1));
1683
  stmt = tsi_stmt (i);
1684
  data->last_goto = NULL;
1685
 
1686
  switch (TREE_CODE (stmt))
1687
    {
1688
    case CATCH_EXPR:
1689
      for (; !tsi_end_p (i); tsi_next (&i))
1690
        {
1691
          stmt = tsi_stmt (i);
1692
          /* If we catch all exceptions, then the body does not
1693
             propagate exceptions past this point.  */
1694
          if (CATCH_TYPES (stmt) == NULL)
1695
            this_may_throw = false;
1696
          data->last_goto = NULL;
1697
          remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
1698
        }
1699
      break;
1700
 
1701
    case EH_FILTER_EXPR:
1702
      if (EH_FILTER_MUST_NOT_THROW (stmt))
1703
        this_may_throw = false;
1704
      else if (EH_FILTER_TYPES (stmt) == NULL)
1705
        this_may_throw = false;
1706
      remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
1707
      break;
1708
 
1709
    default:
1710
      /* Otherwise this is a cleanup.  */
1711
      remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1712
 
1713
      /* If the cleanup is empty, then we can emit the TRY block without
1714
         the enclosing TRY_CATCH_EXPR.  */
1715
      if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1716
        {
1717
          *stmt_p = TREE_OPERAND (*stmt_p, 0);
1718
          data->repeat = true;
1719
        }
1720
      break;
1721
    }
1722
  data->may_throw |= this_may_throw;
1723
}
1724
 
1725
 
1726
static void
1727
remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
1728
{
1729
  tree block;
1730
 
1731
  /* First remove anything underneath the BIND_EXPR.  */
1732
  remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
1733
 
1734
  /* If the BIND_EXPR has no variables, then we can pull everything
1735
     up one level and remove the BIND_EXPR, unless this is the toplevel
1736
     BIND_EXPR for the current function or an inlined function.
1737
 
1738
     When this situation occurs we will want to apply this
1739
     optimization again.  */
1740
  block = BIND_EXPR_BLOCK (*stmt_p);
1741
  if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
1742
      && *stmt_p != DECL_SAVED_TREE (current_function_decl)
1743
      && (! block
1744
          || ! BLOCK_ABSTRACT_ORIGIN (block)
1745
          || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1746
              != FUNCTION_DECL)))
1747
    {
1748
      *stmt_p = BIND_EXPR_BODY (*stmt_p);
1749
      data->repeat = true;
1750
    }
1751
}
1752
 
1753
 
1754
static void
1755
remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
1756
{
1757
  tree dest = GOTO_DESTINATION (*stmt_p);
1758
 
1759
  data->may_branch = true;
1760
  data->last_goto = NULL;
1761
 
1762
  /* Record the last goto expr, so that we can delete it if unnecessary.  */
1763
  if (TREE_CODE (dest) == LABEL_DECL)
1764
    data->last_goto = stmt_p;
1765
}
1766
 
1767
 
1768
static void
1769
remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
1770
{
1771
  tree label = LABEL_EXPR_LABEL (*stmt_p);
1772
 
1773
  data->has_label = true;
1774
 
1775
  /* We do want to jump across non-local label receiver code.  */
1776
  if (DECL_NONLOCAL (label))
1777
    data->last_goto = NULL;
1778
 
1779
  else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
1780
    {
1781
      *data->last_goto = build_empty_stmt ();
1782
      data->repeat = true;
1783
    }
1784
 
1785
  /* ??? Add something here to delete unused labels.  */
1786
}
1787
 
1788
 
1789
/* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1790
   decl.  This allows us to eliminate redundant or useless
1791
   calls to "const" functions.
1792
 
1793
   Gimplifier already does the same operation, but we may notice functions
1794
   being const and pure once their calls has been gimplified, so we need
1795
   to update the flag.  */
1796
 
1797
static void
1798
update_call_expr_flags (tree call)
1799
{
1800
  tree decl = get_callee_fndecl (call);
1801
  if (!decl)
1802
    return;
1803
  if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1804
    TREE_SIDE_EFFECTS (call) = 0;
1805
  if (TREE_NOTHROW (decl))
1806
    TREE_NOTHROW (call) = 1;
1807
}
1808
 
1809
 
1810
/* T is CALL_EXPR.  Set current_function_calls_* flags.  */
1811
 
1812
void
1813
notice_special_calls (tree t)
1814
{
1815
  int flags = call_expr_flags (t);
1816
 
1817
  if (flags & ECF_MAY_BE_ALLOCA)
1818
    current_function_calls_alloca = true;
1819
  if (flags & ECF_RETURNS_TWICE)
1820
    current_function_calls_setjmp = true;
1821
}
1822
 
1823
 
1824
/* Clear flags set by notice_special_calls.  Used by dead code removal
1825
   to update the flags.  */
1826
 
1827
void
1828
clear_special_calls (void)
1829
{
1830
  current_function_calls_alloca = false;
1831
  current_function_calls_setjmp = false;
1832
}
1833
 
1834
 
1835
static void
1836
remove_useless_stmts_1 (tree *tp, struct rus_data *data)
1837
{
1838
  tree t = *tp, op;
1839
 
1840
  switch (TREE_CODE (t))
1841
    {
1842
    case COND_EXPR:
1843
      remove_useless_stmts_cond (tp, data);
1844
      break;
1845
 
1846
    case TRY_FINALLY_EXPR:
1847
      remove_useless_stmts_tf (tp, data);
1848
      break;
1849
 
1850
    case TRY_CATCH_EXPR:
1851
      remove_useless_stmts_tc (tp, data);
1852
      break;
1853
 
1854
    case BIND_EXPR:
1855
      remove_useless_stmts_bind (tp, data);
1856
      break;
1857
 
1858
    case GOTO_EXPR:
1859
      remove_useless_stmts_goto (tp, data);
1860
      break;
1861
 
1862
    case LABEL_EXPR:
1863
      remove_useless_stmts_label (tp, data);
1864
      break;
1865
 
1866
    case RETURN_EXPR:
1867
      fold_stmt (tp);
1868
      data->last_goto = NULL;
1869
      data->may_branch = true;
1870
      break;
1871
 
1872
    case CALL_EXPR:
1873
      fold_stmt (tp);
1874
      data->last_goto = NULL;
1875
      notice_special_calls (t);
1876
      update_call_expr_flags (t);
1877
      if (tree_could_throw_p (t))
1878
        data->may_throw = true;
1879
      break;
1880
 
1881
    case MODIFY_EXPR:
1882
      data->last_goto = NULL;
1883
      fold_stmt (tp);
1884
      op = get_call_expr_in (t);
1885
      if (op)
1886
        {
1887
          update_call_expr_flags (op);
1888
          notice_special_calls (op);
1889
        }
1890
      if (tree_could_throw_p (t))
1891
        data->may_throw = true;
1892
      break;
1893
 
1894
    case STATEMENT_LIST:
1895
      {
1896
        tree_stmt_iterator i = tsi_start (t);
1897
        while (!tsi_end_p (i))
1898
          {
1899
            t = tsi_stmt (i);
1900
            if (IS_EMPTY_STMT (t))
1901
              {
1902
                tsi_delink (&i);
1903
                continue;
1904
              }
1905
 
1906
            remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
1907
 
1908
            t = tsi_stmt (i);
1909
            if (TREE_CODE (t) == STATEMENT_LIST)
1910
              {
1911
                tsi_link_before (&i, t, TSI_SAME_STMT);
1912
                tsi_delink (&i);
1913
              }
1914
            else
1915
              tsi_next (&i);
1916
          }
1917
      }
1918
      break;
1919
    case ASM_EXPR:
1920
      fold_stmt (tp);
1921
      data->last_goto = NULL;
1922
      break;
1923
 
1924
    default:
1925
      data->last_goto = NULL;
1926
      break;
1927
    }
1928
}
1929
 
1930
static unsigned int
1931
remove_useless_stmts (void)
1932
{
1933
  struct rus_data data;
1934
 
1935
  clear_special_calls ();
1936
 
1937
  do
1938
    {
1939
      memset (&data, 0, sizeof (data));
1940
      remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
1941
    }
1942
  while (data.repeat);
1943
  return 0;
1944
}
1945
 
1946
 
1947
struct tree_opt_pass pass_remove_useless_stmts =
1948
{
1949
  "useless",                            /* name */
1950
  NULL,                                 /* gate */
1951
  remove_useless_stmts,                 /* execute */
1952
  NULL,                                 /* sub */
1953
  NULL,                                 /* next */
1954
  0,                                     /* static_pass_number */
1955
  0,                                     /* tv_id */
1956
  PROP_gimple_any,                      /* properties_required */
1957
  0,                                     /* properties_provided */
1958
  0,                                     /* properties_destroyed */
1959
  0,                                     /* todo_flags_start */
1960
  TODO_dump_func,                       /* todo_flags_finish */
1961
 
1962
};
1963
 
1964
/* Remove PHI nodes associated with basic block BB and all edges out of BB.  */
1965
 
1966
static void
1967
remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1968
{
1969
  tree phi;
1970
 
1971
  /* Since this block is no longer reachable, we can just delete all
1972
     of its PHI nodes.  */
1973
  phi = phi_nodes (bb);
1974
  while (phi)
1975
    {
1976
      tree next = PHI_CHAIN (phi);
1977
      remove_phi_node (phi, NULL_TREE);
1978
      phi = next;
1979
    }
1980
 
1981
  /* Remove edges to BB's successors.  */
1982
  while (EDGE_COUNT (bb->succs) > 0)
1983
    remove_edge (EDGE_SUCC (bb, 0));
1984
}
1985
 
1986
 
1987
/* Remove statements of basic block BB.  */
1988
 
1989
static void
1990
remove_bb (basic_block bb)
1991
{
1992
  block_stmt_iterator i;
1993
#ifdef USE_MAPPED_LOCATION
1994
  source_location loc = UNKNOWN_LOCATION;
1995
#else
1996
  source_locus loc = 0;
1997
#endif
1998
 
1999
  if (dump_file)
2000
    {
2001
      fprintf (dump_file, "Removing basic block %d\n", bb->index);
2002
      if (dump_flags & TDF_DETAILS)
2003
        {
2004
          dump_bb (bb, dump_file, 0);
2005
          fprintf (dump_file, "\n");
2006
        }
2007
    }
2008
 
2009
  /* If we remove the header or the latch of a loop, mark the loop for
2010
     removal by setting its header and latch to NULL.  */
2011
  if (current_loops)
2012
    {
2013
      struct loop *loop = bb->loop_father;
2014
 
2015
      if (loop->latch == bb
2016
          || loop->header == bb)
2017
        {
2018
          loop->latch = NULL;
2019
          loop->header = NULL;
2020
 
2021
          /* Also clean up the information associated with the loop.  Updating
2022
             it would waste time. More importantly, it may refer to ssa
2023
             names that were defined in other removed basic block -- these
2024
             ssa names are now removed and invalid.  */
2025
          free_numbers_of_iterations_estimates_loop (loop);
2026
        }
2027
    }
2028
 
2029
  /* Remove all the instructions in the block.  */
2030
  for (i = bsi_start (bb); !bsi_end_p (i);)
2031
    {
2032
      tree stmt = bsi_stmt (i);
2033
      if (TREE_CODE (stmt) == LABEL_EXPR
2034
          && (FORCED_LABEL (LABEL_EXPR_LABEL (stmt))
2035
              || DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt))))
2036
        {
2037
          basic_block new_bb;
2038
          block_stmt_iterator new_bsi;
2039
 
2040
          /* A non-reachable non-local label may still be referenced.
2041
             But it no longer needs to carry the extra semantics of
2042
             non-locality.  */
2043
          if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
2044
            {
2045
              DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)) = 0;
2046
              FORCED_LABEL (LABEL_EXPR_LABEL (stmt)) = 1;
2047
            }
2048
 
2049
          new_bb = bb->prev_bb;
2050
          new_bsi = bsi_start (new_bb);
2051
          bsi_remove (&i, false);
2052
          bsi_insert_before (&new_bsi, stmt, BSI_NEW_STMT);
2053
        }
2054
      else
2055
        {
2056
          /* Release SSA definitions if we are in SSA.  Note that we
2057
             may be called when not in SSA.  For example,
2058
             final_cleanup calls this function via
2059
             cleanup_tree_cfg.  */
2060
          if (in_ssa_p)
2061
            release_defs (stmt);
2062
 
2063
          bsi_remove (&i, true);
2064
        }
2065
 
2066
      /* Don't warn for removed gotos.  Gotos are often removed due to
2067
         jump threading, thus resulting in bogus warnings.  Not great,
2068
         since this way we lose warnings for gotos in the original
2069
         program that are indeed unreachable.  */
2070
      if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
2071
        {
2072
#ifdef USE_MAPPED_LOCATION
2073
          if (EXPR_HAS_LOCATION (stmt))
2074
            loc = EXPR_LOCATION (stmt);
2075
#else
2076
          source_locus t;
2077
          t = EXPR_LOCUS (stmt);
2078
          if (t && LOCATION_LINE (*t) > 0)
2079
            loc = t;
2080
#endif
2081
        }
2082
    }
2083
 
2084
  /* If requested, give a warning that the first statement in the
2085
     block is unreachable.  We walk statements backwards in the
2086
     loop above, so the last statement we process is the first statement
2087
     in the block.  */
2088
#ifdef USE_MAPPED_LOCATION
2089
  if (loc > BUILTINS_LOCATION)
2090
    warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
2091
#else
2092
  if (loc)
2093
    warning (OPT_Wunreachable_code, "%Hwill never be executed", loc);
2094
#endif
2095
 
2096
  remove_phi_nodes_and_edges_for_unreachable_block (bb);
2097
}
2098
 
2099
 
2100
/* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2101
   predicate VAL, return the edge that will be taken out of the block.
2102
   If VAL does not match a unique edge, NULL is returned.  */
2103
 
2104
edge
2105
find_taken_edge (basic_block bb, tree val)
2106
{
2107
  tree stmt;
2108
 
2109
  stmt = last_stmt (bb);
2110
 
2111
  gcc_assert (stmt);
2112
  gcc_assert (is_ctrl_stmt (stmt));
2113
  gcc_assert (val);
2114
 
2115
  if (! is_gimple_min_invariant (val))
2116
    return NULL;
2117
 
2118
  if (TREE_CODE (stmt) == COND_EXPR)
2119
    return find_taken_edge_cond_expr (bb, val);
2120
 
2121
  if (TREE_CODE (stmt) == SWITCH_EXPR)
2122
    return find_taken_edge_switch_expr (bb, val);
2123
 
2124
  if (computed_goto_p (stmt))
2125
    {
2126
      /* Only optimize if the argument is a label, if the argument is
2127
         not a label then we can not construct a proper CFG.
2128
 
2129
         It may be the case that we only need to allow the LABEL_REF to
2130
         appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2131
         appear inside a LABEL_EXPR just to be safe.  */
2132
      if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2133
          && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2134
        return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2135
      return NULL;
2136
    }
2137
 
2138
  gcc_unreachable ();
2139
}
2140
 
2141
/* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2142
   statement, determine which of the outgoing edges will be taken out of the
2143
   block.  Return NULL if either edge may be taken.  */
2144
 
2145
static edge
2146
find_taken_edge_computed_goto (basic_block bb, tree val)
2147
{
2148
  basic_block dest;
2149
  edge e = NULL;
2150
 
2151
  dest = label_to_block (val);
2152
  if (dest)
2153
    {
2154
      e = find_edge (bb, dest);
2155
      gcc_assert (e != NULL);
2156
    }
2157
 
2158
  return e;
2159
}
2160
 
2161
/* Given a constant value VAL and the entry block BB to a COND_EXPR
2162
   statement, determine which of the two edges will be taken out of the
2163
   block.  Return NULL if either edge may be taken.  */
2164
 
2165
static edge
2166
find_taken_edge_cond_expr (basic_block bb, tree val)
2167
{
2168
  edge true_edge, false_edge;
2169
 
2170
  extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2171
 
2172
  gcc_assert (TREE_CODE (val) == INTEGER_CST);
2173
  return (zero_p (val) ? false_edge : true_edge);
2174
}
2175
 
2176
/* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2177
   statement, determine which edge will be taken out of the block.  Return
2178
   NULL if any edge may be taken.  */
2179
 
2180
static edge
2181
find_taken_edge_switch_expr (basic_block bb, tree val)
2182
{
2183
  tree switch_expr, taken_case;
2184
  basic_block dest_bb;
2185
  edge e;
2186
 
2187
  switch_expr = last_stmt (bb);
2188
  taken_case = find_case_label_for_value (switch_expr, val);
2189
  dest_bb = label_to_block (CASE_LABEL (taken_case));
2190
 
2191
  e = find_edge (bb, dest_bb);
2192
  gcc_assert (e);
2193
  return e;
2194
}
2195
 
2196
 
2197
/* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2198
   We can make optimal use here of the fact that the case labels are
2199
   sorted: We can do a binary search for a case matching VAL.  */
2200
 
2201
static tree
2202
find_case_label_for_value (tree switch_expr, tree val)
2203
{
2204
  tree vec = SWITCH_LABELS (switch_expr);
2205
  size_t low, high, n = TREE_VEC_LENGTH (vec);
2206
  tree default_case = TREE_VEC_ELT (vec, n - 1);
2207
 
2208
  for (low = -1, high = n - 1; high - low > 1; )
2209
    {
2210
      size_t i = (high + low) / 2;
2211
      tree t = TREE_VEC_ELT (vec, i);
2212
      int cmp;
2213
 
2214
      /* Cache the result of comparing CASE_LOW and val.  */
2215
      cmp = tree_int_cst_compare (CASE_LOW (t), val);
2216
 
2217
      if (cmp > 0)
2218
        high = i;
2219
      else
2220
        low = i;
2221
 
2222
      if (CASE_HIGH (t) == NULL)
2223
        {
2224
          /* A singe-valued case label.  */
2225
          if (cmp == 0)
2226
            return t;
2227
        }
2228
      else
2229
        {
2230
          /* A case range.  We can only handle integer ranges.  */
2231
          if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2232
            return t;
2233
        }
2234
    }
2235
 
2236
  return default_case;
2237
}
2238
 
2239
 
2240
 
2241
 
2242
/*---------------------------------------------------------------------------
2243
                              Debugging functions
2244
---------------------------------------------------------------------------*/
2245
 
2246
/* Dump tree-specific information of block BB to file OUTF.  */
2247
 
2248
void
2249
tree_dump_bb (basic_block bb, FILE *outf, int indent)
2250
{
2251
  dump_generic_bb (outf, bb, indent, TDF_VOPS);
2252
}
2253
 
2254
 
2255
/* Dump a basic block on stderr.  */
2256
 
2257
void
2258
debug_tree_bb (basic_block bb)
2259
{
2260
  dump_bb (bb, stderr, 0);
2261
}
2262
 
2263
 
2264
/* Dump basic block with index N on stderr.  */
2265
 
2266
basic_block
2267
debug_tree_bb_n (int n)
2268
{
2269
  debug_tree_bb (BASIC_BLOCK (n));
2270
  return BASIC_BLOCK (n);
2271
}
2272
 
2273
 
2274
/* Dump the CFG on stderr.
2275
 
2276
   FLAGS are the same used by the tree dumping functions
2277
   (see TDF_* in tree-pass.h).  */
2278
 
2279
void
2280
debug_tree_cfg (int flags)
2281
{
2282
  dump_tree_cfg (stderr, flags);
2283
}
2284
 
2285
 
2286
/* Dump the program showing basic block boundaries on the given FILE.
2287
 
2288
   FLAGS are the same used by the tree dumping functions (see TDF_* in
2289
   tree.h).  */
2290
 
2291
void
2292
dump_tree_cfg (FILE *file, int flags)
2293
{
2294
  if (flags & TDF_DETAILS)
2295
    {
2296
      const char *funcname
2297
        = lang_hooks.decl_printable_name (current_function_decl, 2);
2298
 
2299
      fputc ('\n', file);
2300
      fprintf (file, ";; Function %s\n\n", funcname);
2301
      fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2302
               n_basic_blocks, n_edges, last_basic_block);
2303
 
2304
      brief_dump_cfg (file);
2305
      fprintf (file, "\n");
2306
    }
2307
 
2308
  if (flags & TDF_STATS)
2309
    dump_cfg_stats (file);
2310
 
2311
  dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2312
}
2313
 
2314
 
2315
/* Dump CFG statistics on FILE.  */
2316
 
2317
void
2318
dump_cfg_stats (FILE *file)
2319
{
2320
  static long max_num_merged_labels = 0;
2321
  unsigned long size, total = 0;
2322
  long num_edges;
2323
  basic_block bb;
2324
  const char * const fmt_str   = "%-30s%-13s%12s\n";
2325
  const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2326
  const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2327
  const char * const fmt_str_3 = "%-43s%11lu%c\n";
2328
  const char *funcname
2329
    = lang_hooks.decl_printable_name (current_function_decl, 2);
2330
 
2331
 
2332
  fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2333
 
2334
  fprintf (file, "---------------------------------------------------------\n");
2335
  fprintf (file, fmt_str, "", "  Number of  ", "Memory");
2336
  fprintf (file, fmt_str, "", "  instances  ", "used ");
2337
  fprintf (file, "---------------------------------------------------------\n");
2338
 
2339
  size = n_basic_blocks * sizeof (struct basic_block_def);
2340
  total += size;
2341
  fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2342
           SCALE (size), LABEL (size));
2343
 
2344
  num_edges = 0;
2345
  FOR_EACH_BB (bb)
2346
    num_edges += EDGE_COUNT (bb->succs);
2347
  size = num_edges * sizeof (struct edge_def);
2348
  total += size;
2349
  fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2350
 
2351
  fprintf (file, "---------------------------------------------------------\n");
2352
  fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2353
           LABEL (total));
2354
  fprintf (file, "---------------------------------------------------------\n");
2355
  fprintf (file, "\n");
2356
 
2357
  if (cfg_stats.num_merged_labels > max_num_merged_labels)
2358
    max_num_merged_labels = cfg_stats.num_merged_labels;
2359
 
2360
  fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2361
           cfg_stats.num_merged_labels, max_num_merged_labels);
2362
 
2363
  fprintf (file, "\n");
2364
}
2365
 
2366
 
2367
/* Dump CFG statistics on stderr.  Keep extern so that it's always
2368
   linked in the final executable.  */
2369
 
2370
void
2371
debug_cfg_stats (void)
2372
{
2373
  dump_cfg_stats (stderr);
2374
}
2375
 
2376
 
2377
/* Dump the flowgraph to a .vcg FILE.  */
2378
 
2379
static void
2380
tree_cfg2vcg (FILE *file)
2381
{
2382
  edge e;
2383
  edge_iterator ei;
2384
  basic_block bb;
2385
  const char *funcname
2386
    = lang_hooks.decl_printable_name (current_function_decl, 2);
2387
 
2388
  /* Write the file header.  */
2389
  fprintf (file, "graph: { title: \"%s\"\n", funcname);
2390
  fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2391
  fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2392
 
2393
  /* Write blocks and edges.  */
2394
  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2395
    {
2396
      fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2397
               e->dest->index);
2398
 
2399
      if (e->flags & EDGE_FAKE)
2400
        fprintf (file, " linestyle: dotted priority: 10");
2401
      else
2402
        fprintf (file, " linestyle: solid priority: 100");
2403
 
2404
      fprintf (file, " }\n");
2405
    }
2406
  fputc ('\n', file);
2407
 
2408
  FOR_EACH_BB (bb)
2409
    {
2410
      enum tree_code head_code, end_code;
2411
      const char *head_name, *end_name;
2412
      int head_line = 0;
2413
      int end_line = 0;
2414
      tree first = first_stmt (bb);
2415
      tree last = last_stmt (bb);
2416
 
2417
      if (first)
2418
        {
2419
          head_code = TREE_CODE (first);
2420
          head_name = tree_code_name[head_code];
2421
          head_line = get_lineno (first);
2422
        }
2423
      else
2424
        head_name = "no-statement";
2425
 
2426
      if (last)
2427
        {
2428
          end_code = TREE_CODE (last);
2429
          end_name = tree_code_name[end_code];
2430
          end_line = get_lineno (last);
2431
        }
2432
      else
2433
        end_name = "no-statement";
2434
 
2435
      fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2436
               bb->index, bb->index, head_name, head_line, end_name,
2437
               end_line);
2438
 
2439
      FOR_EACH_EDGE (e, ei, bb->succs)
2440
        {
2441
          if (e->dest == EXIT_BLOCK_PTR)
2442
            fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2443
          else
2444
            fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2445
 
2446
          if (e->flags & EDGE_FAKE)
2447
            fprintf (file, " priority: 10 linestyle: dotted");
2448
          else
2449
            fprintf (file, " priority: 100 linestyle: solid");
2450
 
2451
          fprintf (file, " }\n");
2452
        }
2453
 
2454
      if (bb->next_bb != EXIT_BLOCK_PTR)
2455
        fputc ('\n', file);
2456
    }
2457
 
2458
  fputs ("}\n\n", file);
2459
}
2460
 
2461
 
2462
 
2463
/*---------------------------------------------------------------------------
2464
                             Miscellaneous helpers
2465
---------------------------------------------------------------------------*/
2466
 
2467
/* Return true if T represents a stmt that always transfers control.  */
2468
 
2469
bool
2470
is_ctrl_stmt (tree t)
2471
{
2472
  return (TREE_CODE (t) == COND_EXPR
2473
          || TREE_CODE (t) == SWITCH_EXPR
2474
          || TREE_CODE (t) == GOTO_EXPR
2475
          || TREE_CODE (t) == RETURN_EXPR
2476
          || TREE_CODE (t) == RESX_EXPR);
2477
}
2478
 
2479
 
2480
/* Return true if T is a statement that may alter the flow of control
2481
   (e.g., a call to a non-returning function).  */
2482
 
2483
bool
2484
is_ctrl_altering_stmt (tree t)
2485
{
2486
  tree call;
2487
 
2488
  gcc_assert (t);
2489
  call = get_call_expr_in (t);
2490
  if (call)
2491
    {
2492
      /* A non-pure/const CALL_EXPR alters flow control if the current
2493
         function has nonlocal labels.  */
2494
      if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label)
2495
        return true;
2496
 
2497
      /* A CALL_EXPR also alters control flow if it does not return.  */
2498
      if (call_expr_flags (call) & ECF_NORETURN)
2499
        return true;
2500
    }
2501
 
2502
  /* OpenMP directives alter control flow.  */
2503
  if (OMP_DIRECTIVE_P (t))
2504
    return true;
2505
 
2506
  /* If a statement can throw, it alters control flow.  */
2507
  return tree_can_throw_internal (t);
2508
}
2509
 
2510
 
2511
/* Return true if T is a computed goto.  */
2512
 
2513
bool
2514
computed_goto_p (tree t)
2515
{
2516
  return (TREE_CODE (t) == GOTO_EXPR
2517
          && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
2518
}
2519
 
2520
 
2521
/* Return true if T is a simple local goto.  */
2522
 
2523
bool
2524
simple_goto_p (tree t)
2525
{
2526
  return (TREE_CODE (t) == GOTO_EXPR
2527
          && TREE_CODE (GOTO_DESTINATION (t)) == LABEL_DECL);
2528
}
2529
 
2530
 
2531
/* Return true if T can make an abnormal transfer of control flow.
2532
   Transfers of control flow associated with EH are excluded.  */
2533
 
2534
bool
2535
tree_can_make_abnormal_goto (tree t)
2536
{
2537
  if (computed_goto_p (t))
2538
    return true;
2539
  if (TREE_CODE (t) == MODIFY_EXPR)
2540
    t = TREE_OPERAND (t, 1);
2541
  if (TREE_CODE (t) == WITH_SIZE_EXPR)
2542
    t = TREE_OPERAND (t, 0);
2543
  if (TREE_CODE (t) == CALL_EXPR)
2544
    return TREE_SIDE_EFFECTS (t) && current_function_has_nonlocal_label;
2545
  return false;
2546
}
2547
 
2548
 
2549
/* Return true if T should start a new basic block.  PREV_T is the
2550
   statement preceding T.  It is used when T is a label or a case label.
2551
   Labels should only start a new basic block if their previous statement
2552
   wasn't a label.  Otherwise, sequence of labels would generate
2553
   unnecessary basic blocks that only contain a single label.  */
2554
 
2555
static inline bool
2556
stmt_starts_bb_p (tree t, tree prev_t)
2557
{
2558
  if (t == NULL_TREE)
2559
    return false;
2560
 
2561
  /* LABEL_EXPRs start a new basic block only if the preceding
2562
     statement wasn't a label of the same type.  This prevents the
2563
     creation of consecutive blocks that have nothing but a single
2564
     label.  */
2565
  if (TREE_CODE (t) == LABEL_EXPR)
2566
    {
2567
      /* Nonlocal and computed GOTO targets always start a new block.  */
2568
      if (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
2569
          || FORCED_LABEL (LABEL_EXPR_LABEL (t)))
2570
        return true;
2571
 
2572
      if (prev_t && TREE_CODE (prev_t) == LABEL_EXPR)
2573
        {
2574
          if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
2575
            return true;
2576
 
2577
          cfg_stats.num_merged_labels++;
2578
          return false;
2579
        }
2580
      else
2581
        return true;
2582
    }
2583
 
2584
  return false;
2585
}
2586
 
2587
 
2588
/* Return true if T should end a basic block.  */
2589
 
2590
bool
2591
stmt_ends_bb_p (tree t)
2592
{
2593
  return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2594
}
2595
 
2596
 
2597
/* Add gotos that used to be represented implicitly in the CFG.  */
2598
 
2599
void
2600
disband_implicit_edges (void)
2601
{
2602
  basic_block bb;
2603
  block_stmt_iterator last;
2604
  edge e;
2605
  edge_iterator ei;
2606
  tree stmt, label;
2607
 
2608
  FOR_EACH_BB (bb)
2609
    {
2610
      last = bsi_last (bb);
2611
      stmt = last_stmt (bb);
2612
 
2613
      if (stmt && TREE_CODE (stmt) == COND_EXPR)
2614
        {
2615
          /* Remove superfluous gotos from COND_EXPR branches.  Moved
2616
             from cfg_remove_useless_stmts here since it violates the
2617
             invariants for tree--cfg correspondence and thus fits better
2618
             here where we do it anyway.  */
2619
          e = find_edge (bb, bb->next_bb);
2620
          if (e)
2621
            {
2622
              if (e->flags & EDGE_TRUE_VALUE)
2623
                COND_EXPR_THEN (stmt) = build_empty_stmt ();
2624
              else if (e->flags & EDGE_FALSE_VALUE)
2625
                COND_EXPR_ELSE (stmt) = build_empty_stmt ();
2626
              else
2627
                gcc_unreachable ();
2628
              e->flags |= EDGE_FALLTHRU;
2629
            }
2630
 
2631
          continue;
2632
        }
2633
 
2634
      if (stmt && TREE_CODE (stmt) == RETURN_EXPR)
2635
        {
2636
          /* Remove the RETURN_EXPR if we may fall though to the exit
2637
             instead.  */
2638
          gcc_assert (single_succ_p (bb));
2639
          gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
2640
 
2641
          if (bb->next_bb == EXIT_BLOCK_PTR
2642
              && !TREE_OPERAND (stmt, 0))
2643
            {
2644
              bsi_remove (&last, true);
2645
              single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
2646
            }
2647
          continue;
2648
        }
2649
 
2650
      /* There can be no fallthru edge if the last statement is a control
2651
         one.  */
2652
      if (stmt && is_ctrl_stmt (stmt))
2653
        continue;
2654
 
2655
      /* Find a fallthru edge and emit the goto if necessary.  */
2656
      FOR_EACH_EDGE (e, ei, bb->succs)
2657
        if (e->flags & EDGE_FALLTHRU)
2658
          break;
2659
 
2660
      if (!e || e->dest == bb->next_bb)
2661
        continue;
2662
 
2663
      gcc_assert (e->dest != EXIT_BLOCK_PTR);
2664
      label = tree_block_label (e->dest);
2665
 
2666
      stmt = build1 (GOTO_EXPR, void_type_node, label);
2667
#ifdef USE_MAPPED_LOCATION
2668
      SET_EXPR_LOCATION (stmt, e->goto_locus);
2669
#else
2670
      SET_EXPR_LOCUS (stmt, e->goto_locus);
2671
#endif
2672
      bsi_insert_after (&last, stmt, BSI_NEW_STMT);
2673
      e->flags &= ~EDGE_FALLTHRU;
2674
    }
2675
}
2676
 
2677
/* Remove block annotations and other datastructures.  */
2678
 
2679
void
2680
delete_tree_cfg_annotations (void)
2681
{
2682
  label_to_block_map = NULL;
2683
}
2684
 
2685
 
2686
/* Return the first statement in basic block BB.  */
2687
 
2688
tree
2689
first_stmt (basic_block bb)
2690
{
2691
  block_stmt_iterator i = bsi_start (bb);
2692
  return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
2693
}
2694
 
2695
 
2696
/* Return the last statement in basic block BB.  */
2697
 
2698
tree
2699
last_stmt (basic_block bb)
2700
{
2701
  block_stmt_iterator b = bsi_last (bb);
2702
  return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
2703
}
2704
 
2705
 
2706
/* Return a pointer to the last statement in block BB.  */
2707
 
2708
tree *
2709
last_stmt_ptr (basic_block bb)
2710
{
2711
  block_stmt_iterator last = bsi_last (bb);
2712
  return !bsi_end_p (last) ? bsi_stmt_ptr (last) : NULL;
2713
}
2714
 
2715
 
2716
/* Return the last statement of an otherwise empty block.  Return NULL
2717
   if the block is totally empty, or if it contains more than one
2718
   statement.  */
2719
 
2720
tree
2721
last_and_only_stmt (basic_block bb)
2722
{
2723
  block_stmt_iterator i = bsi_last (bb);
2724
  tree last, prev;
2725
 
2726
  if (bsi_end_p (i))
2727
    return NULL_TREE;
2728
 
2729
  last = bsi_stmt (i);
2730
  bsi_prev (&i);
2731
  if (bsi_end_p (i))
2732
    return last;
2733
 
2734
  /* Empty statements should no longer appear in the instruction stream.
2735
     Everything that might have appeared before should be deleted by
2736
     remove_useless_stmts, and the optimizers should just bsi_remove
2737
     instead of smashing with build_empty_stmt.
2738
 
2739
     Thus the only thing that should appear here in a block containing
2740
     one executable statement is a label.  */
2741
  prev = bsi_stmt (i);
2742
  if (TREE_CODE (prev) == LABEL_EXPR)
2743
    return last;
2744
  else
2745
    return NULL_TREE;
2746
}
2747
 
2748
 
2749
/* Mark BB as the basic block holding statement T.  */
2750
 
2751
void
2752
set_bb_for_stmt (tree t, basic_block bb)
2753
{
2754
  if (TREE_CODE (t) == PHI_NODE)
2755
    PHI_BB (t) = bb;
2756
  else if (TREE_CODE (t) == STATEMENT_LIST)
2757
    {
2758
      tree_stmt_iterator i;
2759
      for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2760
        set_bb_for_stmt (tsi_stmt (i), bb);
2761
    }
2762
  else
2763
    {
2764
      stmt_ann_t ann = get_stmt_ann (t);
2765
      ann->bb = bb;
2766
 
2767
      /* If the statement is a label, add the label to block-to-labels map
2768
        so that we can speed up edge creation for GOTO_EXPRs.  */
2769
      if (TREE_CODE (t) == LABEL_EXPR)
2770
        {
2771
          int uid;
2772
 
2773
          t = LABEL_EXPR_LABEL (t);
2774
          uid = LABEL_DECL_UID (t);
2775
          if (uid == -1)
2776
            {
2777
              unsigned old_len = VEC_length (basic_block, label_to_block_map);
2778
              LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
2779
              if (old_len <= (unsigned) uid)
2780
                {
2781
                  basic_block *addr;
2782
                  unsigned new_len = 3 * uid / 2;
2783
 
2784
                  VEC_safe_grow (basic_block, gc, label_to_block_map,
2785
                                 new_len);
2786
                  addr = VEC_address (basic_block, label_to_block_map);
2787
                  memset (&addr[old_len],
2788
                          0, sizeof (basic_block) * (new_len - old_len));
2789
                }
2790
            }
2791
          else
2792
            /* We're moving an existing label.  Make sure that we've
2793
                removed it from the old block.  */
2794
            gcc_assert (!bb
2795
                        || !VEC_index (basic_block, label_to_block_map, uid));
2796
          VEC_replace (basic_block, label_to_block_map, uid, bb);
2797
        }
2798
    }
2799
}
2800
 
2801
/* Faster version of set_bb_for_stmt that assume that statement is being moved
2802
   from one basic block to another.
2803
   For BB splitting we can run into quadratic case, so performance is quite
2804
   important and knowing that the tables are big enough, change_bb_for_stmt
2805
   can inline as leaf function.  */
2806
static inline void
2807
change_bb_for_stmt (tree t, basic_block bb)
2808
{
2809
  get_stmt_ann (t)->bb = bb;
2810
  if (TREE_CODE (t) == LABEL_EXPR)
2811
    VEC_replace (basic_block, label_to_block_map,
2812
                 LABEL_DECL_UID (LABEL_EXPR_LABEL (t)), bb);
2813
}
2814
 
2815
/* Finds iterator for STMT.  */
2816
 
2817
extern block_stmt_iterator
2818
bsi_for_stmt (tree stmt)
2819
{
2820
  block_stmt_iterator bsi;
2821
 
2822
  for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
2823
    if (bsi_stmt (bsi) == stmt)
2824
      return bsi;
2825
 
2826
  gcc_unreachable ();
2827
}
2828
 
2829
/* Mark statement T as modified, and update it.  */
2830
static inline void
2831
update_modified_stmts (tree t)
2832
{
2833
  if (TREE_CODE (t) == STATEMENT_LIST)
2834
    {
2835
      tree_stmt_iterator i;
2836
      tree stmt;
2837
      for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2838
        {
2839
          stmt = tsi_stmt (i);
2840
          update_stmt_if_modified (stmt);
2841
        }
2842
    }
2843
  else
2844
    update_stmt_if_modified (t);
2845
}
2846
 
2847
/* Insert statement (or statement list) T before the statement
2848
   pointed-to by iterator I.  M specifies how to update iterator I
2849
   after insertion (see enum bsi_iterator_update).  */
2850
 
2851
void
2852
bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2853
{
2854
  set_bb_for_stmt (t, i->bb);
2855
  update_modified_stmts (t);
2856
  tsi_link_before (&i->tsi, t, m);
2857
}
2858
 
2859
 
2860
/* Insert statement (or statement list) T after the statement
2861
   pointed-to by iterator I.  M specifies how to update iterator I
2862
   after insertion (see enum bsi_iterator_update).  */
2863
 
2864
void
2865
bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2866
{
2867
  set_bb_for_stmt (t, i->bb);
2868
  update_modified_stmts (t);
2869
  tsi_link_after (&i->tsi, t, m);
2870
}
2871
 
2872
 
2873
/* Remove the statement pointed to by iterator I.  The iterator is updated
2874
   to the next statement.
2875
 
2876
   When REMOVE_EH_INFO is true we remove the statement pointed to by
2877
   iterator I from the EH tables.  Otherwise we do not modify the EH
2878
   tables.
2879
 
2880
   Generally, REMOVE_EH_INFO should be true when the statement is going to
2881
   be removed from the IL and not reinserted elsewhere.  */
2882
 
2883
void
2884
bsi_remove (block_stmt_iterator *i, bool remove_eh_info)
2885
{
2886
  tree t = bsi_stmt (*i);
2887
  set_bb_for_stmt (t, NULL);
2888
  delink_stmt_imm_use (t);
2889
  tsi_delink (&i->tsi);
2890
  mark_stmt_modified (t);
2891
  if (remove_eh_info)
2892
    remove_stmt_from_eh_region (t);
2893
}
2894
 
2895
 
2896
/* Move the statement at FROM so it comes right after the statement at TO.  */
2897
 
2898
void
2899
bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
2900
{
2901
  tree stmt = bsi_stmt (*from);
2902
  bsi_remove (from, false);
2903
  bsi_insert_after (to, stmt, BSI_SAME_STMT);
2904
}
2905
 
2906
 
2907
/* Move the statement at FROM so it comes right before the statement at TO.  */
2908
 
2909
void
2910
bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
2911
{
2912
  tree stmt = bsi_stmt (*from);
2913
  bsi_remove (from, false);
2914
  bsi_insert_before (to, stmt, BSI_SAME_STMT);
2915
}
2916
 
2917
 
2918
/* Move the statement at FROM to the end of basic block BB.  */
2919
 
2920
void
2921
bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
2922
{
2923
  block_stmt_iterator last = bsi_last (bb);
2924
 
2925
  /* Have to check bsi_end_p because it could be an empty block.  */
2926
  if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
2927
    bsi_move_before (from, &last);
2928
  else
2929
    bsi_move_after (from, &last);
2930
}
2931
 
2932
 
2933
/* Replace the contents of the statement pointed to by iterator BSI
2934
   with STMT.  If UPDATE_EH_INFO is true, the exception handling
2935
   information of the original statement is moved to the new statement.  */
2936
 
2937
void
2938
bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool update_eh_info)
2939
{
2940
  int eh_region;
2941
  tree orig_stmt = bsi_stmt (*bsi);
2942
 
2943
  SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
2944
  set_bb_for_stmt (stmt, bsi->bb);
2945
 
2946
  /* Preserve EH region information from the original statement, if
2947
     requested by the caller.  */
2948
  if (update_eh_info)
2949
    {
2950
      eh_region = lookup_stmt_eh_region (orig_stmt);
2951
      if (eh_region >= 0)
2952
        {
2953
          remove_stmt_from_eh_region (orig_stmt);
2954
          add_stmt_to_eh_region (stmt, eh_region);
2955
        }
2956
    }
2957
 
2958
  delink_stmt_imm_use (orig_stmt);
2959
  *bsi_stmt_ptr (*bsi) = stmt;
2960
  mark_stmt_modified (stmt);
2961
  update_modified_stmts (stmt);
2962
}
2963
 
2964
 
2965
/* Insert the statement pointed-to by BSI into edge E.  Every attempt
2966
   is made to place the statement in an existing basic block, but
2967
   sometimes that isn't possible.  When it isn't possible, the edge is
2968
   split and the statement is added to the new block.
2969
 
2970
   In all cases, the returned *BSI points to the correct location.  The
2971
   return value is true if insertion should be done after the location,
2972
   or false if it should be done before the location.  If new basic block
2973
   has to be created, it is stored in *NEW_BB.  */
2974
 
2975
static bool
2976
tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
2977
                           basic_block *new_bb)
2978
{
2979
  basic_block dest, src;
2980
  tree tmp;
2981
 
2982
  dest = e->dest;
2983
 restart:
2984
 
2985
  /* If the destination has one predecessor which has no PHI nodes,
2986
     insert there.  Except for the exit block.
2987
 
2988
     The requirement for no PHI nodes could be relaxed.  Basically we
2989
     would have to examine the PHIs to prove that none of them used
2990
     the value set by the statement we want to insert on E.  That
2991
     hardly seems worth the effort.  */
2992
  if (single_pred_p (dest)
2993
      && ! phi_nodes (dest)
2994
      && dest != EXIT_BLOCK_PTR)
2995
    {
2996
      *bsi = bsi_start (dest);
2997
      if (bsi_end_p (*bsi))
2998
        return true;
2999
 
3000
      /* Make sure we insert after any leading labels.  */
3001
      tmp = bsi_stmt (*bsi);
3002
      while (TREE_CODE (tmp) == LABEL_EXPR)
3003
        {
3004
          bsi_next (bsi);
3005
          if (bsi_end_p (*bsi))
3006
            break;
3007
          tmp = bsi_stmt (*bsi);
3008
        }
3009
 
3010
      if (bsi_end_p (*bsi))
3011
        {
3012
          *bsi = bsi_last (dest);
3013
          return true;
3014
        }
3015
      else
3016
        return false;
3017
    }
3018
 
3019
  /* If the source has one successor, the edge is not abnormal and
3020
     the last statement does not end a basic block, insert there.
3021
     Except for the entry block.  */
3022
  src = e->src;
3023
  if ((e->flags & EDGE_ABNORMAL) == 0
3024
      && single_succ_p (src)
3025
      && src != ENTRY_BLOCK_PTR)
3026
    {
3027
      *bsi = bsi_last (src);
3028
      if (bsi_end_p (*bsi))
3029
        return true;
3030
 
3031
      tmp = bsi_stmt (*bsi);
3032
      if (!stmt_ends_bb_p (tmp))
3033
        return true;
3034
 
3035
      /* Insert code just before returning the value.  We may need to decompose
3036
         the return in the case it contains non-trivial operand.  */
3037
      if (TREE_CODE (tmp) == RETURN_EXPR)
3038
        {
3039
          tree op = TREE_OPERAND (tmp, 0);
3040
          if (op && !is_gimple_val (op))
3041
            {
3042
              gcc_assert (TREE_CODE (op) == MODIFY_EXPR);
3043
              bsi_insert_before (bsi, op, BSI_NEW_STMT);
3044
              TREE_OPERAND (tmp, 0) = TREE_OPERAND (op, 0);
3045
            }
3046
          bsi_prev (bsi);
3047
          return true;
3048
        }
3049
    }
3050
 
3051
  /* Otherwise, create a new basic block, and split this edge.  */
3052
  dest = split_edge (e);
3053
  if (new_bb)
3054
    *new_bb = dest;
3055
  e = single_pred_edge (dest);
3056
  goto restart;
3057
}
3058
 
3059
 
3060
/* This routine will commit all pending edge insertions, creating any new
3061
   basic blocks which are necessary.  */
3062
 
3063
void
3064
bsi_commit_edge_inserts (void)
3065
{
3066
  basic_block bb;
3067
  edge e;
3068
  edge_iterator ei;
3069
 
3070
  bsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR), NULL);
3071
 
3072
  FOR_EACH_BB (bb)
3073
    FOR_EACH_EDGE (e, ei, bb->succs)
3074
      bsi_commit_one_edge_insert (e, NULL);
3075
}
3076
 
3077
 
3078
/* Commit insertions pending at edge E. If a new block is created, set NEW_BB
3079
   to this block, otherwise set it to NULL.  */
3080
 
3081
void
3082
bsi_commit_one_edge_insert (edge e, basic_block *new_bb)
3083
{
3084
  if (new_bb)
3085
    *new_bb = NULL;
3086
  if (PENDING_STMT (e))
3087
    {
3088
      block_stmt_iterator bsi;
3089
      tree stmt = PENDING_STMT (e);
3090
 
3091
      PENDING_STMT (e) = NULL_TREE;
3092
 
3093
      if (tree_find_edge_insert_loc (e, &bsi, new_bb))
3094
        bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3095
      else
3096
        bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3097
    }
3098
}
3099
 
3100
 
3101
/* Add STMT to the pending list of edge E.  No actual insertion is
3102
   made until a call to bsi_commit_edge_inserts () is made.  */
3103
 
3104
void
3105
bsi_insert_on_edge (edge e, tree stmt)
3106
{
3107
  append_to_statement_list (stmt, &PENDING_STMT (e));
3108
}
3109
 
3110
/* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts.  If a new
3111
   block has to be created, it is returned.  */
3112
 
3113
basic_block
3114
bsi_insert_on_edge_immediate (edge e, tree stmt)
3115
{
3116
  block_stmt_iterator bsi;
3117
  basic_block new_bb = NULL;
3118
 
3119
  gcc_assert (!PENDING_STMT (e));
3120
 
3121
  if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
3122
    bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3123
  else
3124
    bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3125
 
3126
  return new_bb;
3127
}
3128
 
3129
/*---------------------------------------------------------------------------
3130
             Tree specific functions for CFG manipulation
3131
---------------------------------------------------------------------------*/
3132
 
3133
/* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE.  */
3134
 
3135
static void
3136
reinstall_phi_args (edge new_edge, edge old_edge)
3137
{
3138
  tree var, phi;
3139
 
3140
  if (!PENDING_STMT (old_edge))
3141
    return;
3142
 
3143
  for (var = PENDING_STMT (old_edge), phi = phi_nodes (new_edge->dest);
3144
       var && phi;
3145
       var = TREE_CHAIN (var), phi = PHI_CHAIN (phi))
3146
    {
3147
      tree result = TREE_PURPOSE (var);
3148
      tree arg = TREE_VALUE (var);
3149
 
3150
      gcc_assert (result == PHI_RESULT (phi));
3151
 
3152
      add_phi_arg (phi, arg, new_edge);
3153
    }
3154
 
3155
  PENDING_STMT (old_edge) = NULL;
3156
}
3157
 
3158
/* Returns the basic block after which the new basic block created
3159
   by splitting edge EDGE_IN should be placed.  Tries to keep the new block
3160
   near its "logical" location.  This is of most help to humans looking
3161
   at debugging dumps.  */
3162
 
3163
static basic_block
3164
split_edge_bb_loc (edge edge_in)
3165
{
3166
  basic_block dest = edge_in->dest;
3167
 
3168
  if (dest->prev_bb && find_edge (dest->prev_bb, dest))
3169
    return edge_in->src;
3170
  else
3171
    return dest->prev_bb;
3172
}
3173
 
3174
/* Split a (typically critical) edge EDGE_IN.  Return the new block.
3175
   Abort on abnormal edges.  */
3176
 
3177
static basic_block
3178
tree_split_edge (edge edge_in)
3179
{
3180
  basic_block new_bb, after_bb, dest;
3181
  edge new_edge, e;
3182
 
3183
  /* Abnormal edges cannot be split.  */
3184
  gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3185
 
3186
  dest = edge_in->dest;
3187
 
3188
  after_bb = split_edge_bb_loc (edge_in);
3189
 
3190
  new_bb = create_empty_bb (after_bb);
3191
  new_bb->frequency = EDGE_FREQUENCY (edge_in);
3192
  new_bb->count = edge_in->count;
3193
  new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
3194
  new_edge->probability = REG_BR_PROB_BASE;
3195
  new_edge->count = edge_in->count;
3196
 
3197
  e = redirect_edge_and_branch (edge_in, new_bb);
3198
  gcc_assert (e);
3199
  reinstall_phi_args (new_edge, e);
3200
 
3201
  return new_bb;
3202
}
3203
 
3204
 
3205
/* Return true when BB has label LABEL in it.  */
3206
 
3207
static bool
3208
has_label_p (basic_block bb, tree label)
3209
{
3210
  block_stmt_iterator bsi;
3211
 
3212
  for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3213
    {
3214
      tree stmt = bsi_stmt (bsi);
3215
 
3216
      if (TREE_CODE (stmt) != LABEL_EXPR)
3217
        return false;
3218
      if (LABEL_EXPR_LABEL (stmt) == label)
3219
        return true;
3220
    }
3221
  return false;
3222
}
3223
 
3224
 
3225
/* Callback for walk_tree, check that all elements with address taken are
3226
   properly noticed as such.  The DATA is an int* that is 1 if TP was seen
3227
   inside a PHI node.  */
3228
 
3229
static tree
3230
verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3231
{
3232
  tree t = *tp, x;
3233
  bool in_phi = (data != NULL);
3234
 
3235
  if (TYPE_P (t))
3236
    *walk_subtrees = 0;
3237
 
3238
  /* Check operand N for being valid GIMPLE and give error MSG if not.  */
3239
#define CHECK_OP(N, MSG) \
3240
  do { if (!is_gimple_val (TREE_OPERAND (t, N)))                \
3241
       { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3242
 
3243
  switch (TREE_CODE (t))
3244
    {
3245
    case SSA_NAME:
3246
      if (SSA_NAME_IN_FREE_LIST (t))
3247
        {
3248
          error ("SSA name in freelist but still referenced");
3249
          return *tp;
3250
        }
3251
      break;
3252
 
3253
    case ASSERT_EXPR:
3254
      x = fold (ASSERT_EXPR_COND (t));
3255
      if (x == boolean_false_node)
3256
        {
3257
          error ("ASSERT_EXPR with an always-false condition");
3258
          return *tp;
3259
        }
3260
      break;
3261
 
3262
    case MODIFY_EXPR:
3263
      x = TREE_OPERAND (t, 0);
3264
      if (TREE_CODE (x) == BIT_FIELD_REF
3265
          && is_gimple_reg (TREE_OPERAND (x, 0)))
3266
        {
3267
          error ("GIMPLE register modified with BIT_FIELD_REF");
3268
          return t;
3269
        }
3270
      break;
3271
 
3272
    case ADDR_EXPR:
3273
      {
3274
        bool old_invariant;
3275
        bool old_constant;
3276
        bool old_side_effects;
3277
        bool new_invariant;
3278
        bool new_constant;
3279
        bool new_side_effects;
3280
 
3281
        /* ??? tree-ssa-alias.c may have overlooked dead PHI nodes, missing
3282
           dead PHIs that take the address of something.  But if the PHI
3283
           result is dead, the fact that it takes the address of anything
3284
           is irrelevant.  Because we can not tell from here if a PHI result
3285
           is dead, we just skip this check for PHIs altogether.  This means
3286
           we may be missing "valid" checks, but what can you do?
3287
           This was PR19217.  */
3288
        if (in_phi)
3289
          break;
3290
 
3291
        old_invariant = TREE_INVARIANT (t);
3292
        old_constant = TREE_CONSTANT (t);
3293
        old_side_effects = TREE_SIDE_EFFECTS (t);
3294
 
3295
        recompute_tree_invariant_for_addr_expr (t);
3296
        new_invariant = TREE_INVARIANT (t);
3297
        new_side_effects = TREE_SIDE_EFFECTS (t);
3298
        new_constant = TREE_CONSTANT (t);
3299
 
3300
        if (old_invariant != new_invariant)
3301
          {
3302
            error ("invariant not recomputed when ADDR_EXPR changed");
3303
            return t;
3304
          }
3305
 
3306
        if (old_constant != new_constant)
3307
          {
3308
            error ("constant not recomputed when ADDR_EXPR changed");
3309
            return t;
3310
          }
3311
        if (old_side_effects != new_side_effects)
3312
          {
3313
            error ("side effects not recomputed when ADDR_EXPR changed");
3314
            return t;
3315
          }
3316
 
3317
        /* Skip any references (they will be checked when we recurse down the
3318
           tree) and ensure that any variable used as a prefix is marked
3319
           addressable.  */
3320
        for (x = TREE_OPERAND (t, 0);
3321
             handled_component_p (x);
3322
             x = TREE_OPERAND (x, 0))
3323
          ;
3324
 
3325
        if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
3326
          return NULL;
3327
        if (!TREE_ADDRESSABLE (x))
3328
          {
3329
            error ("address taken, but ADDRESSABLE bit not set");
3330
            return x;
3331
          }
3332
        break;
3333
      }
3334
 
3335
    case COND_EXPR:
3336
      x = COND_EXPR_COND (t);
3337
      if (TREE_CODE (TREE_TYPE (x)) != BOOLEAN_TYPE)
3338
        {
3339
          error ("non-boolean used in condition");
3340
          return x;
3341
        }
3342
      if (!is_gimple_condexpr (x))
3343
        {
3344
          error ("invalid conditional operand");
3345
          return x;
3346
        }
3347
      break;
3348
 
3349
    case NOP_EXPR:
3350
    case CONVERT_EXPR:
3351
    case FIX_TRUNC_EXPR:
3352
    case FIX_CEIL_EXPR:
3353
    case FIX_FLOOR_EXPR:
3354
    case FIX_ROUND_EXPR:
3355
    case FLOAT_EXPR:
3356
    case NEGATE_EXPR:
3357
    case ABS_EXPR:
3358
    case BIT_NOT_EXPR:
3359
    case NON_LVALUE_EXPR:
3360
    case TRUTH_NOT_EXPR:
3361
      CHECK_OP (0, "invalid operand to unary operator");
3362
      break;
3363
 
3364
    case REALPART_EXPR:
3365
    case IMAGPART_EXPR:
3366
    case COMPONENT_REF:
3367
    case ARRAY_REF:
3368
    case ARRAY_RANGE_REF:
3369
    case BIT_FIELD_REF:
3370
    case VIEW_CONVERT_EXPR:
3371
      /* We have a nest of references.  Verify that each of the operands
3372
         that determine where to reference is either a constant or a variable,
3373
         verify that the base is valid, and then show we've already checked
3374
         the subtrees.  */
3375
      while (handled_component_p (t))
3376
        {
3377
          if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3378
            CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3379
          else if (TREE_CODE (t) == ARRAY_REF
3380
                   || TREE_CODE (t) == ARRAY_RANGE_REF)
3381
            {
3382
              CHECK_OP (1, "invalid array index");
3383
              if (TREE_OPERAND (t, 2))
3384
                CHECK_OP (2, "invalid array lower bound");
3385
              if (TREE_OPERAND (t, 3))
3386
                CHECK_OP (3, "invalid array stride");
3387
            }
3388
          else if (TREE_CODE (t) == BIT_FIELD_REF)
3389
            {
3390
              CHECK_OP (1, "invalid operand to BIT_FIELD_REF");
3391
              CHECK_OP (2, "invalid operand to BIT_FIELD_REF");
3392
            }
3393
 
3394
          t = TREE_OPERAND (t, 0);
3395
        }
3396
 
3397
      if (!CONSTANT_CLASS_P (t) && !is_gimple_lvalue (t))
3398
        {
3399
          error ("invalid reference prefix");
3400
          return t;
3401
        }
3402
      *walk_subtrees = 0;
3403
      break;
3404
 
3405
    case LT_EXPR:
3406
    case LE_EXPR:
3407
    case GT_EXPR:
3408
    case GE_EXPR:
3409
    case EQ_EXPR:
3410
    case NE_EXPR:
3411
    case UNORDERED_EXPR:
3412
    case ORDERED_EXPR:
3413
    case UNLT_EXPR:
3414
    case UNLE_EXPR:
3415
    case UNGT_EXPR:
3416
    case UNGE_EXPR:
3417
    case UNEQ_EXPR:
3418
    case LTGT_EXPR:
3419
    case PLUS_EXPR:
3420
    case MINUS_EXPR:
3421
    case MULT_EXPR:
3422
    case TRUNC_DIV_EXPR:
3423
    case CEIL_DIV_EXPR:
3424
    case FLOOR_DIV_EXPR:
3425
    case ROUND_DIV_EXPR:
3426
    case TRUNC_MOD_EXPR:
3427
    case CEIL_MOD_EXPR:
3428
    case FLOOR_MOD_EXPR:
3429
    case ROUND_MOD_EXPR:
3430
    case RDIV_EXPR:
3431
    case EXACT_DIV_EXPR:
3432
    case MIN_EXPR:
3433
    case MAX_EXPR:
3434
    case LSHIFT_EXPR:
3435
    case RSHIFT_EXPR:
3436
    case LROTATE_EXPR:
3437
    case RROTATE_EXPR:
3438
    case BIT_IOR_EXPR:
3439
    case BIT_XOR_EXPR:
3440
    case BIT_AND_EXPR:
3441
      CHECK_OP (0, "invalid operand to binary operator");
3442
      CHECK_OP (1, "invalid operand to binary operator");
3443
      break;
3444
 
3445
    case CONSTRUCTOR:
3446
      if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3447
        *walk_subtrees = 0;
3448
      break;
3449
 
3450
    default:
3451
      break;
3452
    }
3453
  return NULL;
3454
 
3455
#undef CHECK_OP
3456
}
3457
 
3458
 
3459
/* Verify STMT, return true if STMT is not in GIMPLE form.
3460
   TODO: Implement type checking.  */
3461
 
3462
static bool
3463
verify_stmt (tree stmt, bool last_in_block)
3464
{
3465
  tree addr;
3466
 
3467
  if (OMP_DIRECTIVE_P (stmt))
3468
    {
3469
      /* OpenMP directives are validated by the FE and never operated
3470
         on by the optimizers.  Furthermore, OMP_FOR may contain
3471
         non-gimple expressions when the main index variable has had
3472
         its address taken.  This does not affect the loop itself
3473
         because the header of an OMP_FOR is merely used to determine
3474
         how to setup the parallel iteration.  */
3475
      return false;
3476
    }
3477
 
3478
  if (!is_gimple_stmt (stmt))
3479
    {
3480
      error ("is not a valid GIMPLE statement");
3481
      goto fail;
3482
    }
3483
 
3484
  addr = walk_tree (&stmt, verify_expr, NULL, NULL);
3485
  if (addr)
3486
    {
3487
      debug_generic_stmt (addr);
3488
      return true;
3489
    }
3490
 
3491
  /* If the statement is marked as part of an EH region, then it is
3492
     expected that the statement could throw.  Verify that when we
3493
     have optimizations that simplify statements such that we prove
3494
     that they cannot throw, that we update other data structures
3495
     to match.  */
3496
  if (lookup_stmt_eh_region (stmt) >= 0)
3497
    {
3498
      if (!tree_could_throw_p (stmt))
3499
        {
3500
          error ("statement marked for throw, but doesn%'t");
3501
          goto fail;
3502
        }
3503
      if (!last_in_block && tree_can_throw_internal (stmt))
3504
        {
3505
          error ("statement marked for throw in middle of block");
3506
          goto fail;
3507
        }
3508
    }
3509
 
3510
  return false;
3511
 
3512
 fail:
3513
  debug_generic_stmt (stmt);
3514
  return true;
3515
}
3516
 
3517
 
3518
/* Return true when the T can be shared.  */
3519
 
3520
static bool
3521
tree_node_can_be_shared (tree t)
3522
{
3523
  if (IS_TYPE_OR_DECL_P (t)
3524
      || is_gimple_min_invariant (t)
3525
      || TREE_CODE (t) == SSA_NAME
3526
      || t == error_mark_node
3527
      || TREE_CODE (t) == IDENTIFIER_NODE)
3528
    return true;
3529
 
3530
  if (TREE_CODE (t) == CASE_LABEL_EXPR)
3531
    return true;
3532
 
3533
  while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3534
           && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
3535
         || TREE_CODE (t) == COMPONENT_REF
3536
         || TREE_CODE (t) == REALPART_EXPR
3537
         || TREE_CODE (t) == IMAGPART_EXPR)
3538
    t = TREE_OPERAND (t, 0);
3539
 
3540
  if (DECL_P (t))
3541
    return true;
3542
 
3543
  return false;
3544
}
3545
 
3546
 
3547
/* Called via walk_trees.  Verify tree sharing.  */
3548
 
3549
static tree
3550
verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
3551
{
3552
  htab_t htab = (htab_t) data;
3553
  void **slot;
3554
 
3555
  if (tree_node_can_be_shared (*tp))
3556
    {
3557
      *walk_subtrees = false;
3558
      return NULL;
3559
    }
3560
 
3561
  slot = htab_find_slot (htab, *tp, INSERT);
3562
  if (*slot)
3563
    return (tree) *slot;
3564
  *slot = *tp;
3565
 
3566
  return NULL;
3567
}
3568
 
3569
 
3570
/* Verify the GIMPLE statement chain.  */
3571
 
3572
void
3573
verify_stmts (void)
3574
{
3575
  basic_block bb;
3576
  block_stmt_iterator bsi;
3577
  bool err = false;
3578
  htab_t htab;
3579
  tree addr;
3580
 
3581
  timevar_push (TV_TREE_STMT_VERIFY);
3582
  htab = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
3583
 
3584
  FOR_EACH_BB (bb)
3585
    {
3586
      tree phi;
3587
      int i;
3588
 
3589
      for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
3590
        {
3591
          int phi_num_args = PHI_NUM_ARGS (phi);
3592
 
3593
          if (bb_for_stmt (phi) != bb)
3594
            {
3595
              error ("bb_for_stmt (phi) is set to a wrong basic block");
3596
              err |= true;
3597
            }
3598
 
3599
          for (i = 0; i < phi_num_args; i++)
3600
            {
3601
              tree t = PHI_ARG_DEF (phi, i);
3602
              tree addr;
3603
 
3604
              /* Addressable variables do have SSA_NAMEs but they
3605
                 are not considered gimple values.  */
3606
              if (TREE_CODE (t) != SSA_NAME
3607
                  && TREE_CODE (t) != FUNCTION_DECL
3608
                  && !is_gimple_val (t))
3609
                {
3610
                  error ("PHI def is not a GIMPLE value");
3611
                  debug_generic_stmt (phi);
3612
                  debug_generic_stmt (t);
3613
                  err |= true;
3614
                }
3615
 
3616
              addr = walk_tree (&t, verify_expr, (void *) 1, NULL);
3617
              if (addr)
3618
                {
3619
                  debug_generic_stmt (addr);
3620
                  err |= true;
3621
                }
3622
 
3623
              addr = walk_tree (&t, verify_node_sharing, htab, NULL);
3624
              if (addr)
3625
                {
3626
                  error ("incorrect sharing of tree nodes");
3627
                  debug_generic_stmt (phi);
3628
                  debug_generic_stmt (addr);
3629
                  err |= true;
3630
                }
3631
            }
3632
        }
3633
 
3634
      for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
3635
        {
3636
          tree stmt = bsi_stmt (bsi);
3637
 
3638
          if (bb_for_stmt (stmt) != bb)
3639
            {
3640
              error ("bb_for_stmt (stmt) is set to a wrong basic block");
3641
              err |= true;
3642
            }
3643
 
3644
          bsi_next (&bsi);
3645
          err |= verify_stmt (stmt, bsi_end_p (bsi));
3646
          addr = walk_tree (&stmt, verify_node_sharing, htab, NULL);
3647
          if (addr)
3648
            {
3649
              error ("incorrect sharing of tree nodes");
3650
              debug_generic_stmt (stmt);
3651
              debug_generic_stmt (addr);
3652
              err |= true;
3653
            }
3654
        }
3655
    }
3656
 
3657
  if (err)
3658
    internal_error ("verify_stmts failed");
3659
 
3660
  htab_delete (htab);
3661
  timevar_pop (TV_TREE_STMT_VERIFY);
3662
}
3663
 
3664
 
3665
/* Verifies that the flow information is OK.  */
3666
 
3667
static int
3668
tree_verify_flow_info (void)
3669
{
3670
  int err = 0;
3671
  basic_block bb;
3672
  block_stmt_iterator bsi;
3673
  tree stmt;
3674
  edge e;
3675
  edge_iterator ei;
3676
 
3677
  if (ENTRY_BLOCK_PTR->stmt_list)
3678
    {
3679
      error ("ENTRY_BLOCK has a statement list associated with it");
3680
      err = 1;
3681
    }
3682
 
3683
  if (EXIT_BLOCK_PTR->stmt_list)
3684
    {
3685
      error ("EXIT_BLOCK has a statement list associated with it");
3686
      err = 1;
3687
    }
3688
 
3689
  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
3690
    if (e->flags & EDGE_FALLTHRU)
3691
      {
3692
        error ("fallthru to exit from bb %d", e->src->index);
3693
        err = 1;
3694
      }
3695
 
3696
  FOR_EACH_BB (bb)
3697
    {
3698
      bool found_ctrl_stmt = false;
3699
 
3700
      stmt = NULL_TREE;
3701
 
3702
      /* Skip labels on the start of basic block.  */
3703
      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3704
        {
3705
          tree prev_stmt = stmt;
3706
 
3707
          stmt = bsi_stmt (bsi);
3708
 
3709
          if (TREE_CODE (stmt) != LABEL_EXPR)
3710
            break;
3711
 
3712
          if (prev_stmt && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
3713
            {
3714
              error ("nonlocal label ");
3715
              print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3716
              fprintf (stderr, " is not first in a sequence of labels in bb %d",
3717
                       bb->index);
3718
              err = 1;
3719
            }
3720
 
3721
          if (label_to_block (LABEL_EXPR_LABEL (stmt)) != bb)
3722
            {
3723
              error ("label ");
3724
              print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3725
              fprintf (stderr, " to block does not match in bb %d",
3726
                       bb->index);
3727
              err = 1;
3728
            }
3729
 
3730
          if (decl_function_context (LABEL_EXPR_LABEL (stmt))
3731
              != current_function_decl)
3732
            {
3733
              error ("label ");
3734
              print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3735
              fprintf (stderr, " has incorrect context in bb %d",
3736
                       bb->index);
3737
              err = 1;
3738
            }
3739
        }
3740
 
3741
      /* Verify that body of basic block BB is free of control flow.  */
3742
      for (; !bsi_end_p (bsi); bsi_next (&bsi))
3743
        {
3744
          tree stmt = bsi_stmt (bsi);
3745
 
3746
          if (found_ctrl_stmt)
3747
            {
3748
              error ("control flow in the middle of basic block %d",
3749
                     bb->index);
3750
              err = 1;
3751
            }
3752
 
3753
          if (stmt_ends_bb_p (stmt))
3754
            found_ctrl_stmt = true;
3755
 
3756
          if (TREE_CODE (stmt) == LABEL_EXPR)
3757
            {
3758
              error ("label ");
3759
              print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3760
              fprintf (stderr, " in the middle of basic block %d", bb->index);
3761
              err = 1;
3762
            }
3763
        }
3764
 
3765
      bsi = bsi_last (bb);
3766
      if (bsi_end_p (bsi))
3767
        continue;
3768
 
3769
      stmt = bsi_stmt (bsi);
3770
 
3771
      err |= verify_eh_edges (stmt);
3772
 
3773
      if (is_ctrl_stmt (stmt))
3774
        {
3775
          FOR_EACH_EDGE (e, ei, bb->succs)
3776
            if (e->flags & EDGE_FALLTHRU)
3777
              {
3778
                error ("fallthru edge after a control statement in bb %d",
3779
                       bb->index);
3780
                err = 1;
3781
              }
3782
        }
3783
 
3784
      if (TREE_CODE (stmt) != COND_EXPR)
3785
        {
3786
          /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
3787
             after anything else but if statement.  */
3788
          FOR_EACH_EDGE (e, ei, bb->succs)
3789
            if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
3790
              {
3791
                error ("true/false edge after a non-COND_EXPR in bb %d",
3792
                       bb->index);
3793
                err = 1;
3794
              }
3795
        }
3796
 
3797
      switch (TREE_CODE (stmt))
3798
        {
3799
        case COND_EXPR:
3800
          {
3801
            edge true_edge;
3802
            edge false_edge;
3803
            if (TREE_CODE (COND_EXPR_THEN (stmt)) != GOTO_EXPR
3804
                || TREE_CODE (COND_EXPR_ELSE (stmt)) != GOTO_EXPR)
3805
              {
3806
                error ("structured COND_EXPR at the end of bb %d", bb->index);
3807
                err = 1;
3808
              }
3809
 
3810
            extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
3811
 
3812
            if (!true_edge || !false_edge
3813
                || !(true_edge->flags & EDGE_TRUE_VALUE)
3814
                || !(false_edge->flags & EDGE_FALSE_VALUE)
3815
                || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3816
                || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3817
                || EDGE_COUNT (bb->succs) >= 3)
3818
              {
3819
                error ("wrong outgoing edge flags at end of bb %d",
3820
                       bb->index);
3821
                err = 1;
3822
              }
3823
 
3824
            if (!has_label_p (true_edge->dest,
3825
                              GOTO_DESTINATION (COND_EXPR_THEN (stmt))))
3826
              {
3827
                error ("%<then%> label does not match edge at end of bb %d",
3828
                       bb->index);
3829
                err = 1;
3830
              }
3831
 
3832
            if (!has_label_p (false_edge->dest,
3833
                              GOTO_DESTINATION (COND_EXPR_ELSE (stmt))))
3834
              {
3835
                error ("%<else%> label does not match edge at end of bb %d",
3836
                       bb->index);
3837
                err = 1;
3838
              }
3839
          }
3840
          break;
3841
 
3842
        case GOTO_EXPR:
3843
          if (simple_goto_p (stmt))
3844
            {
3845
              error ("explicit goto at end of bb %d", bb->index);
3846
              err = 1;
3847
            }
3848
          else
3849
            {
3850
              /* FIXME.  We should double check that the labels in the
3851
                 destination blocks have their address taken.  */
3852
              FOR_EACH_EDGE (e, ei, bb->succs)
3853
                if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
3854
                                 | EDGE_FALSE_VALUE))
3855
                    || !(e->flags & EDGE_ABNORMAL))
3856
                  {
3857
                    error ("wrong outgoing edge flags at end of bb %d",
3858
                           bb->index);
3859
                    err = 1;
3860
                  }
3861
            }
3862
          break;
3863
 
3864
        case RETURN_EXPR:
3865
          if (!single_succ_p (bb)
3866
              || (single_succ_edge (bb)->flags
3867
                  & (EDGE_FALLTHRU | EDGE_ABNORMAL
3868
                     | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3869
            {
3870
              error ("wrong outgoing edge flags at end of bb %d", bb->index);
3871
              err = 1;
3872
            }
3873
          if (single_succ (bb) != EXIT_BLOCK_PTR)
3874
            {
3875
              error ("return edge does not point to exit in bb %d",
3876
                     bb->index);
3877
              err = 1;
3878
            }
3879
          break;
3880
 
3881
        case SWITCH_EXPR:
3882
          {
3883
            tree prev;
3884
            edge e;
3885
            size_t i, n;
3886
            tree vec;
3887
 
3888
            vec = SWITCH_LABELS (stmt);
3889
            n = TREE_VEC_LENGTH (vec);
3890
 
3891
            /* Mark all the destination basic blocks.  */
3892
            for (i = 0; i < n; ++i)
3893
              {
3894
                tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3895
                basic_block label_bb = label_to_block (lab);
3896
 
3897
                gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
3898
                label_bb->aux = (void *)1;
3899
              }
3900
 
3901
            /* Verify that the case labels are sorted.  */
3902
            prev = TREE_VEC_ELT (vec, 0);
3903
            for (i = 1; i < n - 1; ++i)
3904
              {
3905
                tree c = TREE_VEC_ELT (vec, i);
3906
                if (! CASE_LOW (c))
3907
                  {
3908
                    error ("found default case not at end of case vector");
3909
                    err = 1;
3910
                    continue;
3911
                  }
3912
                if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
3913
                  {
3914
                    error ("case labels not sorted: ");
3915
                    print_generic_expr (stderr, prev, 0);
3916
                    fprintf (stderr," is greater than ");
3917
                    print_generic_expr (stderr, c, 0);
3918
                    fprintf (stderr," but comes before it.\n");
3919
                    err = 1;
3920
                  }
3921
                prev = c;
3922
              }
3923
            if (CASE_LOW (TREE_VEC_ELT (vec, n - 1)))
3924
              {
3925
                error ("no default case found at end of case vector");
3926
                err = 1;
3927
              }
3928
 
3929
            FOR_EACH_EDGE (e, ei, bb->succs)
3930
              {
3931
                if (!e->dest->aux)
3932
                  {
3933
                    error ("extra outgoing edge %d->%d",
3934
                           bb->index, e->dest->index);
3935
                    err = 1;
3936
                  }
3937
                e->dest->aux = (void *)2;
3938
                if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
3939
                                 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3940
                  {
3941
                    error ("wrong outgoing edge flags at end of bb %d",
3942
                           bb->index);
3943
                    err = 1;
3944
                  }
3945
              }
3946
 
3947
            /* Check that we have all of them.  */
3948
            for (i = 0; i < n; ++i)
3949
              {
3950
                tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3951
                basic_block label_bb = label_to_block (lab);
3952
 
3953
                if (label_bb->aux != (void *)2)
3954
                  {
3955
                    error ("missing edge %i->%i",
3956
                           bb->index, label_bb->index);
3957
                    err = 1;
3958
                  }
3959
              }
3960
 
3961
            FOR_EACH_EDGE (e, ei, bb->succs)
3962
              e->dest->aux = (void *)0;
3963
          }
3964
 
3965
        default: ;
3966
        }
3967
    }
3968
 
3969
  if (dom_computed[CDI_DOMINATORS] >= DOM_NO_FAST_QUERY)
3970
    verify_dominators (CDI_DOMINATORS);
3971
 
3972
  return err;
3973
}
3974
 
3975
 
3976
/* Updates phi nodes after creating a forwarder block joined
3977
   by edge FALLTHRU.  */
3978
 
3979
static void
3980
tree_make_forwarder_block (edge fallthru)
3981
{
3982
  edge e;
3983
  edge_iterator ei;
3984
  basic_block dummy, bb;
3985
  tree phi, new_phi, var;
3986
 
3987
  dummy = fallthru->src;
3988
  bb = fallthru->dest;
3989
 
3990
  if (single_pred_p (bb))
3991
    return;
3992
 
3993
  /* If we redirected a branch we must create new phi nodes at the
3994
     start of BB.  */
3995
  for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
3996
    {
3997
      var = PHI_RESULT (phi);
3998
      new_phi = create_phi_node (var, bb);
3999
      SSA_NAME_DEF_STMT (var) = new_phi;
4000
      SET_PHI_RESULT (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
4001
      add_phi_arg (new_phi, PHI_RESULT (phi), fallthru);
4002
    }
4003
 
4004
  /* Ensure that the PHI node chain is in the same order.  */
4005
  set_phi_nodes (bb, phi_reverse (phi_nodes (bb)));
4006
 
4007
  /* Add the arguments we have stored on edges.  */
4008
  FOR_EACH_EDGE (e, ei, bb->preds)
4009
    {
4010
      if (e == fallthru)
4011
        continue;
4012
 
4013
      flush_pending_stmts (e);
4014
    }
4015
}
4016
 
4017
 
4018
/* Return a non-special label in the head of basic block BLOCK.
4019
   Create one if it doesn't exist.  */
4020
 
4021
tree
4022
tree_block_label (basic_block bb)
4023
{
4024
  block_stmt_iterator i, s = bsi_start (bb);
4025
  bool first = true;
4026
  tree label, stmt;
4027
 
4028
  for (i = s; !bsi_end_p (i); first = false, bsi_next (&i))
4029
    {
4030
      stmt = bsi_stmt (i);
4031
      if (TREE_CODE (stmt) != LABEL_EXPR)
4032
        break;
4033
      label = LABEL_EXPR_LABEL (stmt);
4034
      if (!DECL_NONLOCAL (label))
4035
        {
4036
          if (!first)
4037
            bsi_move_before (&i, &s);
4038
          return label;
4039
        }
4040
    }
4041
 
4042
  label = create_artificial_label ();
4043
  stmt = build1 (LABEL_EXPR, void_type_node, label);
4044
  bsi_insert_before (&s, stmt, BSI_NEW_STMT);
4045
  return label;
4046
}
4047
 
4048
 
4049
/* Attempt to perform edge redirection by replacing a possibly complex
4050
   jump instruction by a goto or by removing the jump completely.
4051
   This can apply only if all edges now point to the same block.  The
4052
   parameters and return values are equivalent to
4053
   redirect_edge_and_branch.  */
4054
 
4055
static edge
4056
tree_try_redirect_by_replacing_jump (edge e, basic_block target)
4057
{
4058
  basic_block src = e->src;
4059
  block_stmt_iterator b;
4060
  tree stmt;
4061
 
4062
  /* We can replace or remove a complex jump only when we have exactly
4063
     two edges.  */
4064
  if (EDGE_COUNT (src->succs) != 2
4065
      /* Verify that all targets will be TARGET.  Specifically, the
4066
         edge that is not E must also go to TARGET.  */
4067
      || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4068
    return NULL;
4069
 
4070
  b = bsi_last (src);
4071
  if (bsi_end_p (b))
4072
    return NULL;
4073
  stmt = bsi_stmt (b);
4074
 
4075
  if (TREE_CODE (stmt) == COND_EXPR
4076
      || TREE_CODE (stmt) == SWITCH_EXPR)
4077
    {
4078
      bsi_remove (&b, true);
4079
      e = ssa_redirect_edge (e, target);
4080
      e->flags = EDGE_FALLTHRU;
4081
      return e;
4082
    }
4083
 
4084
  return NULL;
4085
}
4086
 
4087
 
4088
/* Redirect E to DEST.  Return NULL on failure.  Otherwise, return the
4089
   edge representing the redirected branch.  */
4090
 
4091
static edge
4092
tree_redirect_edge_and_branch (edge e, basic_block dest)
4093
{
4094
  basic_block bb = e->src;
4095
  block_stmt_iterator bsi;
4096
  edge ret;
4097
  tree label, stmt;
4098
 
4099
  if (e->flags & EDGE_ABNORMAL)
4100
    return NULL;
4101
 
4102
  if (e->src != ENTRY_BLOCK_PTR
4103
      && (ret = tree_try_redirect_by_replacing_jump (e, dest)))
4104
    return ret;
4105
 
4106
  if (e->dest == dest)
4107
    return NULL;
4108
 
4109
  label = tree_block_label (dest);
4110
 
4111
  bsi = bsi_last (bb);
4112
  stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi);
4113
 
4114
  switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
4115
    {
4116
    case COND_EXPR:
4117
      stmt = (e->flags & EDGE_TRUE_VALUE
4118
              ? COND_EXPR_THEN (stmt)
4119
              : COND_EXPR_ELSE (stmt));
4120
      GOTO_DESTINATION (stmt) = label;
4121
      break;
4122
 
4123
    case GOTO_EXPR:
4124
      /* No non-abnormal edges should lead from a non-simple goto, and
4125
         simple ones should be represented implicitly.  */
4126
      gcc_unreachable ();
4127
 
4128
    case SWITCH_EXPR:
4129
      {
4130
        tree cases = get_cases_for_edge (e, stmt);
4131
 
4132
        /* If we have a list of cases associated with E, then use it
4133
           as it's a lot faster than walking the entire case vector.  */
4134
        if (cases)
4135
          {
4136
            edge e2 = find_edge (e->src, dest);
4137
            tree last, first;
4138
 
4139
            first = cases;
4140
            while (cases)
4141
              {
4142
                last = cases;
4143
                CASE_LABEL (cases) = label;
4144
                cases = TREE_CHAIN (cases);
4145
              }
4146
 
4147
            /* If there was already an edge in the CFG, then we need
4148
               to move all the cases associated with E to E2.  */
4149
            if (e2)
4150
              {
4151
                tree cases2 = get_cases_for_edge (e2, stmt);
4152
 
4153
                TREE_CHAIN (last) = TREE_CHAIN (cases2);
4154
                TREE_CHAIN (cases2) = first;
4155
              }
4156
          }
4157
        else
4158
          {
4159
            tree vec = SWITCH_LABELS (stmt);
4160
            size_t i, n = TREE_VEC_LENGTH (vec);
4161
 
4162
            for (i = 0; i < n; i++)
4163
              {
4164
                tree elt = TREE_VEC_ELT (vec, i);
4165
 
4166
                if (label_to_block (CASE_LABEL (elt)) == e->dest)
4167
                  CASE_LABEL (elt) = label;
4168
              }
4169
          }
4170
 
4171
        break;
4172
      }
4173
 
4174
    case RETURN_EXPR:
4175
      bsi_remove (&bsi, true);
4176
      e->flags |= EDGE_FALLTHRU;
4177
      break;
4178
 
4179
    default:
4180
      /* Otherwise it must be a fallthru edge, and we don't need to
4181
         do anything besides redirecting it.  */
4182
      gcc_assert (e->flags & EDGE_FALLTHRU);
4183
      break;
4184
    }
4185
 
4186
  /* Update/insert PHI nodes as necessary.  */
4187
 
4188
  /* Now update the edges in the CFG.  */
4189
  e = ssa_redirect_edge (e, dest);
4190
 
4191
  return e;
4192
}
4193
 
4194
 
4195
/* Simple wrapper, as we can always redirect fallthru edges.  */
4196
 
4197
static basic_block
4198
tree_redirect_edge_and_branch_force (edge e, basic_block dest)
4199
{
4200
  e = tree_redirect_edge_and_branch (e, dest);
4201
  gcc_assert (e);
4202
 
4203
  return NULL;
4204
}
4205
 
4206
 
4207
/* Splits basic block BB after statement STMT (but at least after the
4208
   labels).  If STMT is NULL, BB is split just after the labels.  */
4209
 
4210
static basic_block
4211
tree_split_block (basic_block bb, void *stmt)
4212
{
4213
  block_stmt_iterator bsi;
4214
  tree_stmt_iterator tsi_tgt;
4215
  tree act;
4216
  basic_block new_bb;
4217
  edge e;
4218
  edge_iterator ei;
4219
 
4220
  new_bb = create_empty_bb (bb);
4221
 
4222
  /* Redirect the outgoing edges.  */
4223
  new_bb->succs = bb->succs;
4224
  bb->succs = NULL;
4225
  FOR_EACH_EDGE (e, ei, new_bb->succs)
4226
    e->src = new_bb;
4227
 
4228
  if (stmt && TREE_CODE ((tree) stmt) == LABEL_EXPR)
4229
    stmt = NULL;
4230
 
4231
  /* Move everything from BSI to the new basic block.  */
4232
  for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4233
    {
4234
      act = bsi_stmt (bsi);
4235
      if (TREE_CODE (act) == LABEL_EXPR)
4236
        continue;
4237
 
4238
      if (!stmt)
4239
        break;
4240
 
4241
      if (stmt == act)
4242
        {
4243
          bsi_next (&bsi);
4244
          break;
4245
        }
4246
    }
4247
 
4248
  if (bsi_end_p (bsi))
4249
    return new_bb;
4250
 
4251
  /* Split the statement list - avoid re-creating new containers as this
4252
     brings ugly quadratic memory consumption in the inliner.
4253
     (We are still quadratic since we need to update stmt BB pointers,
4254
     sadly.)  */
4255
  new_bb->stmt_list = tsi_split_statement_list_before (&bsi.tsi);
4256
  for (tsi_tgt = tsi_start (new_bb->stmt_list);
4257
       !tsi_end_p (tsi_tgt); tsi_next (&tsi_tgt))
4258
    change_bb_for_stmt (tsi_stmt (tsi_tgt), new_bb);
4259
 
4260
  return new_bb;
4261
}
4262
 
4263
 
4264
/* Moves basic block BB after block AFTER.  */
4265
 
4266
static bool
4267
tree_move_block_after (basic_block bb, basic_block after)
4268
{
4269
  if (bb->prev_bb == after)
4270
    return true;
4271
 
4272
  unlink_block (bb);
4273
  link_block (bb, after);
4274
 
4275
  return true;
4276
}
4277
 
4278
 
4279
/* Return true if basic_block can be duplicated.  */
4280
 
4281
static bool
4282
tree_can_duplicate_bb_p (basic_block bb ATTRIBUTE_UNUSED)
4283
{
4284
  return true;
4285
}
4286
 
4287
 
4288
/* Create a duplicate of the basic block BB.  NOTE: This does not
4289
   preserve SSA form.  */
4290
 
4291
static basic_block
4292
tree_duplicate_bb (basic_block bb)
4293
{
4294
  basic_block new_bb;
4295
  block_stmt_iterator bsi, bsi_tgt;
4296
  tree phi;
4297
 
4298
  new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
4299
 
4300
  /* Copy the PHI nodes.  We ignore PHI node arguments here because
4301
     the incoming edges have not been setup yet.  */
4302
  for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4303
    {
4304
      tree copy = create_phi_node (PHI_RESULT (phi), new_bb);
4305
      create_new_def_for (PHI_RESULT (copy), copy, PHI_RESULT_PTR (copy));
4306
    }
4307
 
4308
  /* Keep the chain of PHI nodes in the same order so that they can be
4309
     updated by ssa_redirect_edge.  */
4310
  set_phi_nodes (new_bb, phi_reverse (phi_nodes (new_bb)));
4311
 
4312
  bsi_tgt = bsi_start (new_bb);
4313
  for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4314
    {
4315
      def_operand_p def_p;
4316
      ssa_op_iter op_iter;
4317
      tree stmt, copy;
4318
      int region;
4319
 
4320
      stmt = bsi_stmt (bsi);
4321
      if (TREE_CODE (stmt) == LABEL_EXPR)
4322
        continue;
4323
 
4324
      /* Create a new copy of STMT and duplicate STMT's virtual
4325
         operands.  */
4326
      copy = unshare_expr (stmt);
4327
      bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
4328
      copy_virtual_operands (copy, stmt);
4329
      region = lookup_stmt_eh_region (stmt);
4330
      if (region >= 0)
4331
        add_stmt_to_eh_region (copy, region);
4332
 
4333
      /* Create new names for all the definitions created by COPY and
4334
         add replacement mappings for each new name.  */
4335
      FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
4336
        create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
4337
    }
4338
 
4339
  return new_bb;
4340
}
4341
 
4342
 
4343
/* Basic block BB_COPY was created by code duplication.  Add phi node
4344
   arguments for edges going out of BB_COPY.  The blocks that were
4345
   duplicated have BB_DUPLICATED set.  */
4346
 
4347
void
4348
add_phi_args_after_copy_bb (basic_block bb_copy)
4349
{
4350
  basic_block bb, dest;
4351
  edge e, e_copy;
4352
  edge_iterator ei;
4353
  tree phi, phi_copy, phi_next, def;
4354
 
4355
  bb = get_bb_original (bb_copy);
4356
 
4357
  FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
4358
    {
4359
      if (!phi_nodes (e_copy->dest))
4360
        continue;
4361
 
4362
      if (e_copy->dest->flags & BB_DUPLICATED)
4363
        dest = get_bb_original (e_copy->dest);
4364
      else
4365
        dest = e_copy->dest;
4366
 
4367
      e = find_edge (bb, dest);
4368
      if (!e)
4369
        {
4370
          /* During loop unrolling the target of the latch edge is copied.
4371
             In this case we are not looking for edge to dest, but to
4372
             duplicated block whose original was dest.  */
4373
          FOR_EACH_EDGE (e, ei, bb->succs)
4374
            if ((e->dest->flags & BB_DUPLICATED)
4375
                && get_bb_original (e->dest) == dest)
4376
              break;
4377
 
4378
          gcc_assert (e != NULL);
4379
        }
4380
 
4381
      for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
4382
           phi;
4383
           phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
4384
        {
4385
          phi_next = PHI_CHAIN (phi);
4386
          def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4387
          add_phi_arg (phi_copy, def, e_copy);
4388
        }
4389
    }
4390
}
4391
 
4392
/* Blocks in REGION_COPY array of length N_REGION were created by
4393
   duplication of basic blocks.  Add phi node arguments for edges
4394
   going from these blocks.  */
4395
 
4396
void
4397
add_phi_args_after_copy (basic_block *region_copy, unsigned n_region)
4398
{
4399
  unsigned i;
4400
 
4401
  for (i = 0; i < n_region; i++)
4402
    region_copy[i]->flags |= BB_DUPLICATED;
4403
 
4404
  for (i = 0; i < n_region; i++)
4405
    add_phi_args_after_copy_bb (region_copy[i]);
4406
 
4407
  for (i = 0; i < n_region; i++)
4408
    region_copy[i]->flags &= ~BB_DUPLICATED;
4409
}
4410
 
4411
/* Duplicates a REGION (set of N_REGION basic blocks) with just a single
4412
   important exit edge EXIT.  By important we mean that no SSA name defined
4413
   inside region is live over the other exit edges of the region.  All entry
4414
   edges to the region must go to ENTRY->dest.  The edge ENTRY is redirected
4415
   to the duplicate of the region.  SSA form, dominance and loop information
4416
   is updated.  The new basic blocks are stored to REGION_COPY in the same
4417
   order as they had in REGION, provided that REGION_COPY is not NULL.
4418
   The function returns false if it is unable to copy the region,
4419
   true otherwise.  */
4420
 
4421
bool
4422
tree_duplicate_sese_region (edge entry, edge exit,
4423
                            basic_block *region, unsigned n_region,
4424
                            basic_block *region_copy)
4425
{
4426
  unsigned i, n_doms;
4427
  bool free_region_copy = false, copying_header = false;
4428
  struct loop *loop = entry->dest->loop_father;
4429
  edge exit_copy;
4430
  basic_block *doms;
4431
  edge redirected;
4432
  int total_freq = 0, entry_freq = 0;
4433
  gcov_type total_count = 0, entry_count = 0;
4434
 
4435
  if (!can_copy_bbs_p (region, n_region))
4436
    return false;
4437
 
4438
  /* Some sanity checking.  Note that we do not check for all possible
4439
     missuses of the functions.  I.e. if you ask to copy something weird,
4440
     it will work, but the state of structures probably will not be
4441
     correct.  */
4442
  for (i = 0; i < n_region; i++)
4443
    {
4444
      /* We do not handle subloops, i.e. all the blocks must belong to the
4445
         same loop.  */
4446
      if (region[i]->loop_father != loop)
4447
        return false;
4448
 
4449
      if (region[i] != entry->dest
4450
          && region[i] == loop->header)
4451
        return false;
4452
    }
4453
 
4454
  loop->copy = loop;
4455
 
4456
  /* In case the function is used for loop header copying (which is the primary
4457
     use), ensure that EXIT and its copy will be new latch and entry edges.  */
4458
  if (loop->header == entry->dest)
4459
    {
4460
      copying_header = true;
4461
      loop->copy = loop->outer;
4462
 
4463
      if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
4464
        return false;
4465
 
4466
      for (i = 0; i < n_region; i++)
4467
        if (region[i] != exit->src
4468
            && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
4469
          return false;
4470
    }
4471
 
4472
  if (!region_copy)
4473
    {
4474
      region_copy = XNEWVEC (basic_block, n_region);
4475
      free_region_copy = true;
4476
    }
4477
 
4478
  gcc_assert (!need_ssa_update_p ());
4479
 
4480
  /* Record blocks outside the region that are dominated by something
4481
     inside.  */
4482
  doms = XNEWVEC (basic_block, n_basic_blocks);
4483
  initialize_original_copy_tables ();
4484
 
4485
  n_doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region, doms);
4486
 
4487
  if (entry->dest->count)
4488
    {
4489
      total_count = entry->dest->count;
4490
      entry_count = entry->count;
4491
      /* Fix up corner cases, to avoid division by zero or creation of negative
4492
         frequencies.  */
4493
      if (entry_count > total_count)
4494
        entry_count = total_count;
4495
    }
4496
  else
4497
    {
4498
      total_freq = entry->dest->frequency;
4499
      entry_freq = EDGE_FREQUENCY (entry);
4500
      /* Fix up corner cases, to avoid division by zero or creation of negative
4501
         frequencies.  */
4502
      if (total_freq == 0)
4503
        total_freq = 1;
4504
      else if (entry_freq > total_freq)
4505
        entry_freq = total_freq;
4506
    }
4507
 
4508
  copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
4509
            split_edge_bb_loc (entry));
4510
  if (total_count)
4511
    {
4512
      scale_bbs_frequencies_gcov_type (region, n_region,
4513
                                       total_count - entry_count,
4514
                                       total_count);
4515
      scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
4516
                                       total_count);
4517
    }
4518
  else
4519
    {
4520
      scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
4521
                                 total_freq);
4522
      scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
4523
    }
4524
 
4525
  if (copying_header)
4526
    {
4527
      loop->header = exit->dest;
4528
      loop->latch = exit->src;
4529
    }
4530
 
4531
  /* Redirect the entry and add the phi node arguments.  */
4532
  redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
4533
  gcc_assert (redirected != NULL);
4534
  flush_pending_stmts (entry);
4535
 
4536
  /* Concerning updating of dominators:  We must recount dominators
4537
     for entry block and its copy.  Anything that is outside of the
4538
     region, but was dominated by something inside needs recounting as
4539
     well.  */
4540
  set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
4541
  doms[n_doms++] = get_bb_original (entry->dest);
4542
  iterate_fix_dominators (CDI_DOMINATORS, doms, n_doms);
4543
  free (doms);
4544
 
4545
  /* Add the other PHI node arguments.  */
4546
  add_phi_args_after_copy (region_copy, n_region);
4547
 
4548
  /* Update the SSA web.  */
4549
  update_ssa (TODO_update_ssa);
4550
 
4551
  if (free_region_copy)
4552
    free (region_copy);
4553
 
4554
  free_original_copy_tables ();
4555
  return true;
4556
}
4557
 
4558
/*
4559
DEF_VEC_P(basic_block);
4560
DEF_VEC_ALLOC_P(basic_block,heap);
4561
*/
4562
 
4563
/* Add all the blocks dominated by ENTRY to the array BBS_P.  Stop
4564
   adding blocks when the dominator traversal reaches EXIT.  This
4565
   function silently assumes that ENTRY strictly dominates EXIT.  */
4566
 
4567
static void
4568
gather_blocks_in_sese_region (basic_block entry, basic_block exit,
4569
                              VEC(basic_block,heap) **bbs_p)
4570
{
4571
  basic_block son;
4572
 
4573
  for (son = first_dom_son (CDI_DOMINATORS, entry);
4574
       son;
4575
       son = next_dom_son (CDI_DOMINATORS, son))
4576
    {
4577
      VEC_safe_push (basic_block, heap, *bbs_p, son);
4578
      if (son != exit)
4579
        gather_blocks_in_sese_region (son, exit, bbs_p);
4580
    }
4581
}
4582
 
4583
 
4584
struct move_stmt_d
4585
{
4586
  tree block;
4587
  tree from_context;
4588
  tree to_context;
4589
  bitmap vars_to_remove;
4590
  htab_t new_label_map;
4591
  bool remap_decls_p;
4592
};
4593
 
4594
/* Helper for move_block_to_fn.  Set TREE_BLOCK in every expression
4595
   contained in *TP and change the DECL_CONTEXT of every local
4596
   variable referenced in *TP.  */
4597
 
4598
static tree
4599
move_stmt_r (tree *tp, int *walk_subtrees, void *data)
4600
{
4601
  struct move_stmt_d *p = (struct move_stmt_d *) data;
4602
  tree t = *tp;
4603
 
4604
  if (p->block && IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (TREE_CODE (t))))
4605
    TREE_BLOCK (t) = p->block;
4606
 
4607
  if (OMP_DIRECTIVE_P (t)
4608
      && TREE_CODE (t) != OMP_RETURN
4609
      && TREE_CODE (t) != OMP_CONTINUE)
4610
    {
4611
      /* Do not remap variables inside OMP directives.  Variables
4612
         referenced in clauses and directive header belong to the
4613
         parent function and should not be moved into the child
4614
         function.  */
4615
      bool save_remap_decls_p = p->remap_decls_p;
4616
      p->remap_decls_p = false;
4617
      *walk_subtrees = 0;
4618
 
4619
      walk_tree (&OMP_BODY (t), move_stmt_r, p, NULL);
4620
 
4621
      p->remap_decls_p = save_remap_decls_p;
4622
    }
4623
  else if (DECL_P (t) && DECL_CONTEXT (t) == p->from_context)
4624
    {
4625
      if (TREE_CODE (t) == LABEL_DECL)
4626
        {
4627
          if (p->new_label_map)
4628
            {
4629
              struct tree_map in, *out;
4630
              in.from = t;
4631
              out = htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
4632
              if (out)
4633
                *tp = t = out->to;
4634
            }
4635
 
4636
          DECL_CONTEXT (t) = p->to_context;
4637
        }
4638
      else if (p->remap_decls_p)
4639
        {
4640
          DECL_CONTEXT (t) = p->to_context;
4641
 
4642
          if (TREE_CODE (t) == VAR_DECL)
4643
            {
4644
              struct function *f = DECL_STRUCT_FUNCTION (p->to_context);
4645
              f->unexpanded_var_list
4646
                = tree_cons (0, t, f->unexpanded_var_list);
4647
 
4648
              /* Mark T to be removed from the original function,
4649
                 otherwise it will be given a DECL_RTL when the
4650
                 original function is expanded.  */
4651
              bitmap_set_bit (p->vars_to_remove, DECL_UID (t));
4652
            }
4653
        }
4654
    }
4655
  else if (TYPE_P (t))
4656
    *walk_subtrees = 0;
4657
 
4658
  return NULL_TREE;
4659
}
4660
 
4661
 
4662
/* Move basic block BB from function CFUN to function DEST_FN.  The
4663
   block is moved out of the original linked list and placed after
4664
   block AFTER in the new list.  Also, the block is removed from the
4665
   original array of blocks and placed in DEST_FN's array of blocks.
4666
   If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
4667
   updated to reflect the moved edges.
4668
 
4669
   On exit, local variables that need to be removed from
4670
   CFUN->UNEXPANDED_VAR_LIST will have been added to VARS_TO_REMOVE.  */
4671
 
4672
static void
4673
move_block_to_fn (struct function *dest_cfun, basic_block bb,
4674
                  basic_block after, bool update_edge_count_p,
4675
                  bitmap vars_to_remove, htab_t new_label_map, int eh_offset)
4676
{
4677
  struct control_flow_graph *cfg;
4678
  edge_iterator ei;
4679
  edge e;
4680
  block_stmt_iterator si;
4681
  struct move_stmt_d d;
4682
  unsigned old_len, new_len;
4683
  basic_block *addr;
4684
 
4685
  /* Link BB to the new linked list.  */
4686
  move_block_after (bb, after);
4687
 
4688
  /* Update the edge count in the corresponding flowgraphs.  */
4689
  if (update_edge_count_p)
4690
    FOR_EACH_EDGE (e, ei, bb->succs)
4691
      {
4692
        cfun->cfg->x_n_edges--;
4693
        dest_cfun->cfg->x_n_edges++;
4694
      }
4695
 
4696
  /* Remove BB from the original basic block array.  */
4697
  VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
4698
  cfun->cfg->x_n_basic_blocks--;
4699
 
4700
  /* Grow DEST_CFUN's basic block array if needed.  */
4701
  cfg = dest_cfun->cfg;
4702
  cfg->x_n_basic_blocks++;
4703
  if (bb->index > cfg->x_last_basic_block)
4704
    cfg->x_last_basic_block = bb->index;
4705
 
4706
  old_len = VEC_length (basic_block, cfg->x_basic_block_info);
4707
  if ((unsigned) cfg->x_last_basic_block >= old_len)
4708
    {
4709
      new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
4710
      VEC_safe_grow (basic_block, gc, cfg->x_basic_block_info, new_len);
4711
      addr = VEC_address (basic_block, cfg->x_basic_block_info);
4712
      memset (&addr[old_len], 0, sizeof (basic_block) * (new_len - old_len));
4713
    }
4714
 
4715
  VEC_replace (basic_block, cfg->x_basic_block_info,
4716
               cfg->x_last_basic_block, bb);
4717
 
4718
  /* The statements in BB need to be associated with a new TREE_BLOCK.
4719
     Labels need to be associated with a new label-to-block map.  */
4720
  memset (&d, 0, sizeof (d));
4721
  d.vars_to_remove = vars_to_remove;
4722
 
4723
  for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
4724
    {
4725
      tree stmt = bsi_stmt (si);
4726
      int region;
4727
 
4728
      d.from_context = cfun->decl;
4729
      d.to_context = dest_cfun->decl;
4730
      d.remap_decls_p = true;
4731
      d.new_label_map = new_label_map;
4732
      if (TREE_BLOCK (stmt))
4733
        d.block = DECL_INITIAL (dest_cfun->decl);
4734
 
4735
      walk_tree (&stmt, move_stmt_r, &d, NULL);
4736
 
4737
      if (TREE_CODE (stmt) == LABEL_EXPR)
4738
        {
4739
          tree label = LABEL_EXPR_LABEL (stmt);
4740
          int uid = LABEL_DECL_UID (label);
4741
 
4742
          gcc_assert (uid > -1);
4743
 
4744
          old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
4745
          if (old_len <= (unsigned) uid)
4746
            {
4747
              new_len = 3 * uid / 2;
4748
              VEC_safe_grow (basic_block, gc, cfg->x_label_to_block_map,
4749
                             new_len);
4750
              addr = VEC_address (basic_block, cfg->x_label_to_block_map);
4751
              memset (&addr[old_len], 0,
4752
                      sizeof (basic_block) * (new_len - old_len));
4753
            }
4754
 
4755
          VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
4756
          VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
4757
 
4758
          gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
4759
 
4760
          if (uid >= dest_cfun->last_label_uid)
4761
            dest_cfun->last_label_uid = uid + 1;
4762
        }
4763
      else if (TREE_CODE (stmt) == RESX_EXPR && eh_offset != 0)
4764
        TREE_OPERAND (stmt, 0) =
4765
          build_int_cst (NULL_TREE,
4766
                         TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0))
4767
                         + eh_offset);
4768
 
4769
      region = lookup_stmt_eh_region (stmt);
4770
      if (region >= 0)
4771
        {
4772
          add_stmt_to_eh_region_fn (dest_cfun, stmt, region + eh_offset);
4773
          remove_stmt_from_eh_region (stmt);
4774
        }
4775
    }
4776
}
4777
 
4778
/* Examine the statements in BB (which is in SRC_CFUN); find and return
4779
   the outermost EH region.  Use REGION as the incoming base EH region.  */
4780
 
4781
static int
4782
find_outermost_region_in_block (struct function *src_cfun,
4783
                                basic_block bb, int region)
4784
{
4785
  block_stmt_iterator si;
4786
 
4787
  for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
4788
    {
4789
      tree stmt = bsi_stmt (si);
4790
      int stmt_region;
4791
 
4792
      if (TREE_CODE (stmt) == RESX_EXPR)
4793
        stmt_region = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
4794
      else
4795
        stmt_region = lookup_stmt_eh_region_fn (src_cfun, stmt);
4796
      if (stmt_region > 0)
4797
        {
4798
          if (region < 0)
4799
            region = stmt_region;
4800
          else if (stmt_region != region)
4801
            {
4802
              region = eh_region_outermost (src_cfun, stmt_region, region);
4803
              gcc_assert (region != -1);
4804
            }
4805
        }
4806
    }
4807
 
4808
  return region;
4809
}
4810
 
4811
static tree
4812
new_label_mapper (tree decl, void *data)
4813
{
4814
  htab_t hash = (htab_t) data;
4815
  struct tree_map *m;
4816
  void **slot;
4817
 
4818
  gcc_assert (TREE_CODE (decl) == LABEL_DECL);
4819
 
4820
  m = xmalloc (sizeof (struct tree_map));
4821
  m->hash = DECL_UID (decl);
4822
  m->from = decl;
4823
  m->to = create_artificial_label ();
4824
  LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
4825
 
4826
  slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
4827
  gcc_assert (*slot == NULL);
4828
 
4829
  *slot = m;
4830
 
4831
  return m->to;
4832
}
4833
 
4834
/* Move a single-entry, single-exit region delimited by ENTRY_BB and
4835
   EXIT_BB to function DEST_CFUN.  The whole region is replaced by a
4836
   single basic block in the original CFG and the new basic block is
4837
   returned.  DEST_CFUN must not have a CFG yet.
4838
 
4839
   Note that the region need not be a pure SESE region.  Blocks inside
4840
   the region may contain calls to abort/exit.  The only restriction
4841
   is that ENTRY_BB should be the only entry point and it must
4842
   dominate EXIT_BB.
4843
 
4844
   All local variables referenced in the region are assumed to be in
4845
   the corresponding BLOCK_VARS and unexpanded variable lists
4846
   associated with DEST_CFUN.  */
4847
 
4848
basic_block
4849
move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
4850
                        basic_block exit_bb)
4851
{
4852
  VEC(basic_block,heap) *bbs;
4853
  basic_block after, bb, *entry_pred, *exit_succ;
4854
  struct function *saved_cfun;
4855
  int *entry_flag, *exit_flag, eh_offset;
4856
  unsigned i, num_entry_edges, num_exit_edges;
4857
  edge e;
4858
  edge_iterator ei;
4859
  bitmap vars_to_remove;
4860
  htab_t new_label_map;
4861
 
4862
  saved_cfun = cfun;
4863
 
4864
  /* Collect all the blocks in the region.  Manually add ENTRY_BB
4865
     because it won't be added by dfs_enumerate_from.  */
4866
  calculate_dominance_info (CDI_DOMINATORS);
4867
 
4868
  /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
4869
     region.  */
4870
  gcc_assert (entry_bb != exit_bb
4871
              && (!exit_bb
4872
                  || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
4873
 
4874
  bbs = NULL;
4875
  VEC_safe_push (basic_block, heap, bbs, entry_bb);
4876
  gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
4877
 
4878
  /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG.  We need to remember
4879
     the predecessor edges to ENTRY_BB and the successor edges to
4880
     EXIT_BB so that we can re-attach them to the new basic block that
4881
     will replace the region.  */
4882
  num_entry_edges = EDGE_COUNT (entry_bb->preds);
4883
  entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
4884
  entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
4885
  i = 0;
4886
  for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
4887
    {
4888
      entry_flag[i] = e->flags;
4889
      entry_pred[i++] = e->src;
4890
      remove_edge (e);
4891
    }
4892
 
4893
  if (exit_bb)
4894
    {
4895
      num_exit_edges = EDGE_COUNT (exit_bb->succs);
4896
      exit_succ = (basic_block *) xcalloc (num_exit_edges,
4897
                                           sizeof (basic_block));
4898
      exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
4899
      i = 0;
4900
      for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
4901
        {
4902
          exit_flag[i] = e->flags;
4903
          exit_succ[i++] = e->dest;
4904
          remove_edge (e);
4905
        }
4906
    }
4907
  else
4908
    {
4909
      num_exit_edges = 0;
4910
      exit_succ = NULL;
4911
      exit_flag = NULL;
4912
    }
4913
 
4914
  /* Switch context to the child function to initialize DEST_FN's CFG.  */
4915
  gcc_assert (dest_cfun->cfg == NULL);
4916
  cfun = dest_cfun;
4917
 
4918
  init_empty_tree_cfg ();
4919
 
4920
  /* Initialize EH information for the new function.  */
4921
  eh_offset = 0;
4922
  new_label_map = NULL;
4923
  if (saved_cfun->eh)
4924
    {
4925
      int region = -1;
4926
 
4927
      for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
4928
        region = find_outermost_region_in_block (saved_cfun, bb, region);
4929
 
4930
      init_eh_for_function ();
4931
      if (region != -1)
4932
        {
4933
          new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
4934
          eh_offset = duplicate_eh_regions (saved_cfun, new_label_mapper,
4935
                                            new_label_map, region, 0);
4936
        }
4937
    }
4938
 
4939
  cfun = saved_cfun;
4940
 
4941
  /* Move blocks from BBS into DEST_CFUN.  */
4942
  gcc_assert (VEC_length (basic_block, bbs) >= 2);
4943
  after = dest_cfun->cfg->x_entry_block_ptr;
4944
  vars_to_remove = BITMAP_ALLOC (NULL);
4945
  for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
4946
    {
4947
      /* No need to update edge counts on the last block.  It has
4948
         already been updated earlier when we detached the region from
4949
         the original CFG.  */
4950
      move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, vars_to_remove,
4951
                        new_label_map, eh_offset);
4952
      after = bb;
4953
    }
4954
 
4955
  if (new_label_map)
4956
    htab_delete (new_label_map);
4957
 
4958
  /* Remove the variables marked in VARS_TO_REMOVE from
4959
     CFUN->UNEXPANDED_VAR_LIST.  Otherwise, they will be given a
4960
     DECL_RTL in the context of CFUN.  */
4961
  if (!bitmap_empty_p (vars_to_remove))
4962
    {
4963
      tree *p;
4964
 
4965
      for (p = &cfun->unexpanded_var_list; *p; )
4966
        {
4967
          tree var = TREE_VALUE (*p);
4968
          if (bitmap_bit_p (vars_to_remove, DECL_UID (var)))
4969
            {
4970
              *p = TREE_CHAIN (*p);
4971
              continue;
4972
            }
4973
 
4974
          p = &TREE_CHAIN (*p);
4975
        }
4976
    }
4977
 
4978
  BITMAP_FREE (vars_to_remove);
4979
 
4980
  /* Rewire the entry and exit blocks.  The successor to the entry
4981
     block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
4982
     the child function.  Similarly, the predecessor of DEST_FN's
4983
     EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR.  We
4984
     need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
4985
     various CFG manipulation function get to the right CFG.
4986
 
4987
     FIXME, this is silly.  The CFG ought to become a parameter to
4988
     these helpers.  */
4989
  cfun = dest_cfun;
4990
  make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
4991
  if (exit_bb)
4992
    make_edge (exit_bb,  EXIT_BLOCK_PTR, 0);
4993
  cfun = saved_cfun;
4994
 
4995
  /* Back in the original function, the SESE region has disappeared,
4996
     create a new basic block in its place.  */
4997
  bb = create_empty_bb (entry_pred[0]);
4998
  for (i = 0; i < num_entry_edges; i++)
4999
    make_edge (entry_pred[i], bb, entry_flag[i]);
5000
 
5001
  for (i = 0; i < num_exit_edges; i++)
5002
    make_edge (bb, exit_succ[i], exit_flag[i]);
5003
 
5004
  if (exit_bb)
5005
    {
5006
      free (exit_flag);
5007
      free (exit_succ);
5008
    }
5009
  free (entry_flag);
5010
  free (entry_pred);
5011
  free_dominance_info (CDI_DOMINATORS);
5012
  free_dominance_info (CDI_POST_DOMINATORS);
5013
  VEC_free (basic_block, heap, bbs);
5014
 
5015
  return bb;
5016
}
5017
 
5018
 
5019
/* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h)  */
5020
 
5021
void
5022
dump_function_to_file (tree fn, FILE *file, int flags)
5023
{
5024
  tree arg, vars, var;
5025
  bool ignore_topmost_bind = false, any_var = false;
5026
  basic_block bb;
5027
  tree chain;
5028
  struct function *saved_cfun;
5029
 
5030
  fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
5031
 
5032
  arg = DECL_ARGUMENTS (fn);
5033
  while (arg)
5034
    {
5035
      print_generic_expr (file, arg, dump_flags);
5036
      if (TREE_CHAIN (arg))
5037
        fprintf (file, ", ");
5038
      arg = TREE_CHAIN (arg);
5039
    }
5040
  fprintf (file, ")\n");
5041
 
5042
  if (flags & TDF_DETAILS)
5043
    dump_eh_tree (file, DECL_STRUCT_FUNCTION (fn));
5044
  if (flags & TDF_RAW)
5045
    {
5046
      dump_node (fn, TDF_SLIM | flags, file);
5047
      return;
5048
    }
5049
 
5050
  /* Switch CFUN to point to FN.  */
5051
  saved_cfun = cfun;
5052
  cfun = DECL_STRUCT_FUNCTION (fn);
5053
 
5054
  /* When GIMPLE is lowered, the variables are no longer available in
5055
     BIND_EXPRs, so display them separately.  */
5056
  if (cfun && cfun->decl == fn && cfun->unexpanded_var_list)
5057
    {
5058
      ignore_topmost_bind = true;
5059
 
5060
      fprintf (file, "{\n");
5061
      for (vars = cfun->unexpanded_var_list; vars; vars = TREE_CHAIN (vars))
5062
        {
5063
          var = TREE_VALUE (vars);
5064
 
5065
          print_generic_decl (file, var, flags);
5066
          fprintf (file, "\n");
5067
 
5068
          any_var = true;
5069
        }
5070
    }
5071
 
5072
  if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
5073
    {
5074
      /* Make a CFG based dump.  */
5075
      check_bb_profile (ENTRY_BLOCK_PTR, file);
5076
      if (!ignore_topmost_bind)
5077
        fprintf (file, "{\n");
5078
 
5079
      if (any_var && n_basic_blocks)
5080
        fprintf (file, "\n");
5081
 
5082
      FOR_EACH_BB (bb)
5083
        dump_generic_bb (file, bb, 2, flags);
5084
 
5085
      fprintf (file, "}\n");
5086
      check_bb_profile (EXIT_BLOCK_PTR, file);
5087
    }
5088
  else
5089
    {
5090
      int indent;
5091
 
5092
      /* Make a tree based dump.  */
5093
      chain = DECL_SAVED_TREE (fn);
5094
 
5095
      if (chain && TREE_CODE (chain) == BIND_EXPR)
5096
        {
5097
          if (ignore_topmost_bind)
5098
            {
5099
              chain = BIND_EXPR_BODY (chain);
5100
              indent = 2;
5101
            }
5102
          else
5103
            indent = 0;
5104
        }
5105
      else
5106
        {
5107
          if (!ignore_topmost_bind)
5108
            fprintf (file, "{\n");
5109
          indent = 2;
5110
        }
5111
 
5112
      if (any_var)
5113
        fprintf (file, "\n");
5114
 
5115
      print_generic_stmt_indented (file, chain, flags, indent);
5116
      if (ignore_topmost_bind)
5117
        fprintf (file, "}\n");
5118
    }
5119
 
5120
  fprintf (file, "\n\n");
5121
 
5122
  /* Restore CFUN.  */
5123
  cfun = saved_cfun;
5124
}
5125
 
5126
 
5127
/* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h)  */
5128
 
5129
void
5130
debug_function (tree fn, int flags)
5131
{
5132
  dump_function_to_file (fn, stderr, flags);
5133
}
5134
 
5135
 
5136
/* Pretty print of the loops intermediate representation.  */
5137
static void print_loop (FILE *, struct loop *, int);
5138
static void print_pred_bbs (FILE *, basic_block bb);
5139
static void print_succ_bbs (FILE *, basic_block bb);
5140
 
5141
 
5142
/* Print on FILE the indexes for the predecessors of basic_block BB.  */
5143
 
5144
static void
5145
print_pred_bbs (FILE *file, basic_block bb)
5146
{
5147
  edge e;
5148
  edge_iterator ei;
5149
 
5150
  FOR_EACH_EDGE (e, ei, bb->preds)
5151
    fprintf (file, "bb_%d ", e->src->index);
5152
}
5153
 
5154
 
5155
/* Print on FILE the indexes for the successors of basic_block BB.  */
5156
 
5157
static void
5158
print_succ_bbs (FILE *file, basic_block bb)
5159
{
5160
  edge e;
5161
  edge_iterator ei;
5162
 
5163
  FOR_EACH_EDGE (e, ei, bb->succs)
5164
    fprintf (file, "bb_%d ", e->dest->index);
5165
}
5166
 
5167
 
5168
/* Pretty print LOOP on FILE, indented INDENT spaces.  */
5169
 
5170
static void
5171
print_loop (FILE *file, struct loop *loop, int indent)
5172
{
5173
  char *s_indent;
5174
  basic_block bb;
5175
 
5176
  if (loop == NULL)
5177
    return;
5178
 
5179
  s_indent = (char *) alloca ((size_t) indent + 1);
5180
  memset ((void *) s_indent, ' ', (size_t) indent);
5181
  s_indent[indent] = '\0';
5182
 
5183
  /* Print the loop's header.  */
5184
  fprintf (file, "%sloop_%d\n", s_indent, loop->num);
5185
 
5186
  /* Print the loop's body.  */
5187
  fprintf (file, "%s{\n", s_indent);
5188
  FOR_EACH_BB (bb)
5189
    if (bb->loop_father == loop)
5190
      {
5191
        /* Print the basic_block's header.  */
5192
        fprintf (file, "%s  bb_%d (preds = {", s_indent, bb->index);
5193
        print_pred_bbs (file, bb);
5194
        fprintf (file, "}, succs = {");
5195
        print_succ_bbs (file, bb);
5196
        fprintf (file, "})\n");
5197
 
5198
        /* Print the basic_block's body.  */
5199
        fprintf (file, "%s  {\n", s_indent);
5200
        tree_dump_bb (bb, file, indent + 4);
5201
        fprintf (file, "%s  }\n", s_indent);
5202
      }
5203
 
5204
  print_loop (file, loop->inner, indent + 2);
5205
  fprintf (file, "%s}\n", s_indent);
5206
  print_loop (file, loop->next, indent);
5207
}
5208
 
5209
 
5210
/* Follow a CFG edge from the entry point of the program, and on entry
5211
   of a loop, pretty print the loop structure on FILE.  */
5212
 
5213
void
5214
print_loop_ir (FILE *file)
5215
{
5216
  basic_block bb;
5217
 
5218
  bb = BASIC_BLOCK (NUM_FIXED_BLOCKS);
5219
  if (bb && bb->loop_father)
5220
    print_loop (file, bb->loop_father, 0);
5221
}
5222
 
5223
 
5224
/* Debugging loops structure at tree level.  */
5225
 
5226
void
5227
debug_loop_ir (void)
5228
{
5229
  print_loop_ir (stderr);
5230
}
5231
 
5232
 
5233
/* Return true if BB ends with a call, possibly followed by some
5234
   instructions that must stay with the call.  Return false,
5235
   otherwise.  */
5236
 
5237
static bool
5238
tree_block_ends_with_call_p (basic_block bb)
5239
{
5240
  block_stmt_iterator bsi = bsi_last (bb);
5241
  return get_call_expr_in (bsi_stmt (bsi)) != NULL;
5242
}
5243
 
5244
 
5245
/* Return true if BB ends with a conditional branch.  Return false,
5246
   otherwise.  */
5247
 
5248
static bool
5249
tree_block_ends_with_condjump_p (basic_block bb)
5250
{
5251
  tree stmt = last_stmt (bb);
5252
  return (stmt && TREE_CODE (stmt) == COND_EXPR);
5253
}
5254
 
5255
 
5256
/* Return true if we need to add fake edge to exit at statement T.
5257
   Helper function for tree_flow_call_edges_add.  */
5258
 
5259
static bool
5260
need_fake_edge_p (tree t)
5261
{
5262
  tree call;
5263
 
5264
  /* NORETURN and LONGJMP calls already have an edge to exit.
5265
     CONST and PURE calls do not need one.
5266
     We don't currently check for CONST and PURE here, although
5267
     it would be a good idea, because those attributes are
5268
     figured out from the RTL in mark_constant_function, and
5269
     the counter incrementation code from -fprofile-arcs
5270
     leads to different results from -fbranch-probabilities.  */
5271
  call = get_call_expr_in (t);
5272
  if (call
5273
      && !(call_expr_flags (call) & ECF_NORETURN))
5274
    return true;
5275
 
5276
  if (TREE_CODE (t) == ASM_EXPR
5277
       && (ASM_VOLATILE_P (t) || ASM_INPUT_P (t)))
5278
    return true;
5279
 
5280
  return false;
5281
}
5282
 
5283
 
5284
/* Add fake edges to the function exit for any non constant and non
5285
   noreturn calls, volatile inline assembly in the bitmap of blocks
5286
   specified by BLOCKS or to the whole CFG if BLOCKS is zero.  Return
5287
   the number of blocks that were split.
5288
 
5289
   The goal is to expose cases in which entering a basic block does
5290
   not imply that all subsequent instructions must be executed.  */
5291
 
5292
static int
5293
tree_flow_call_edges_add (sbitmap blocks)
5294
{
5295
  int i;
5296
  int blocks_split = 0;
5297
  int last_bb = last_basic_block;
5298
  bool check_last_block = false;
5299
 
5300
  if (n_basic_blocks == NUM_FIXED_BLOCKS)
5301
    return 0;
5302
 
5303
  if (! blocks)
5304
    check_last_block = true;
5305
  else
5306
    check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
5307
 
5308
  /* In the last basic block, before epilogue generation, there will be
5309
     a fallthru edge to EXIT.  Special care is required if the last insn
5310
     of the last basic block is a call because make_edge folds duplicate
5311
     edges, which would result in the fallthru edge also being marked
5312
     fake, which would result in the fallthru edge being removed by
5313
     remove_fake_edges, which would result in an invalid CFG.
5314
 
5315
     Moreover, we can't elide the outgoing fake edge, since the block
5316
     profiler needs to take this into account in order to solve the minimal
5317
     spanning tree in the case that the call doesn't return.
5318
 
5319
     Handle this by adding a dummy instruction in a new last basic block.  */
5320
  if (check_last_block)
5321
    {
5322
      basic_block bb = EXIT_BLOCK_PTR->prev_bb;
5323
      block_stmt_iterator bsi = bsi_last (bb);
5324
      tree t = NULL_TREE;
5325
      if (!bsi_end_p (bsi))
5326
        t = bsi_stmt (bsi);
5327
 
5328
      if (t && need_fake_edge_p (t))
5329
        {
5330
          edge e;
5331
 
5332
          e = find_edge (bb, EXIT_BLOCK_PTR);
5333
          if (e)
5334
            {
5335
              bsi_insert_on_edge (e, build_empty_stmt ());
5336
              bsi_commit_edge_inserts ();
5337
            }
5338
        }
5339
    }
5340
 
5341
  /* Now add fake edges to the function exit for any non constant
5342
     calls since there is no way that we can determine if they will
5343
     return or not...  */
5344
  for (i = 0; i < last_bb; i++)
5345
    {
5346
      basic_block bb = BASIC_BLOCK (i);
5347
      block_stmt_iterator bsi;
5348
      tree stmt, last_stmt;
5349
 
5350
      if (!bb)
5351
        continue;
5352
 
5353
      if (blocks && !TEST_BIT (blocks, i))
5354
        continue;
5355
 
5356
      bsi = bsi_last (bb);
5357
      if (!bsi_end_p (bsi))
5358
        {
5359
          last_stmt = bsi_stmt (bsi);
5360
          do
5361
            {
5362
              stmt = bsi_stmt (bsi);
5363
              if (need_fake_edge_p (stmt))
5364
                {
5365
                  edge e;
5366
                  /* The handling above of the final block before the
5367
                     epilogue should be enough to verify that there is
5368
                     no edge to the exit block in CFG already.
5369
                     Calling make_edge in such case would cause us to
5370
                     mark that edge as fake and remove it later.  */
5371
#ifdef ENABLE_CHECKING
5372
                  if (stmt == last_stmt)
5373
                    {
5374
                      e = find_edge (bb, EXIT_BLOCK_PTR);
5375
                      gcc_assert (e == NULL);
5376
                    }
5377
#endif
5378
 
5379
                  /* Note that the following may create a new basic block
5380
                     and renumber the existing basic blocks.  */
5381
                  if (stmt != last_stmt)
5382
                    {
5383
                      e = split_block (bb, stmt);
5384
                      if (e)
5385
                        blocks_split++;
5386
                    }
5387
                  make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
5388
                }
5389
              bsi_prev (&bsi);
5390
            }
5391
          while (!bsi_end_p (bsi));
5392
        }
5393
    }
5394
 
5395
  if (blocks_split)
5396
    verify_flow_info ();
5397
 
5398
  return blocks_split;
5399
}
5400
 
5401
/* Purge dead abnormal call edges from basic block BB.  */
5402
 
5403
bool
5404
tree_purge_dead_abnormal_call_edges (basic_block bb)
5405
{
5406
  bool changed = tree_purge_dead_eh_edges (bb);
5407
 
5408
  if (current_function_has_nonlocal_label)
5409
    {
5410
      tree stmt = last_stmt (bb);
5411
      edge_iterator ei;
5412
      edge e;
5413
 
5414
      if (!(stmt && tree_can_make_abnormal_goto (stmt)))
5415
        for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5416
          {
5417
            if (e->flags & EDGE_ABNORMAL)
5418
              {
5419
                remove_edge (e);
5420
                changed = true;
5421
              }
5422
            else
5423
              ei_next (&ei);
5424
          }
5425
 
5426
      /* See tree_purge_dead_eh_edges below.  */
5427
      if (changed)
5428
        free_dominance_info (CDI_DOMINATORS);
5429
    }
5430
 
5431
  return changed;
5432
}
5433
 
5434
/* Purge dead EH edges from basic block BB.  */
5435
 
5436
bool
5437
tree_purge_dead_eh_edges (basic_block bb)
5438
{
5439
  bool changed = false;
5440
  edge e;
5441
  edge_iterator ei;
5442
  tree stmt = last_stmt (bb);
5443
 
5444
  if (stmt && tree_can_throw_internal (stmt))
5445
    return false;
5446
 
5447
  for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5448
    {
5449
      if (e->flags & EDGE_EH)
5450
        {
5451
          remove_edge (e);
5452
          changed = true;
5453
        }
5454
      else
5455
        ei_next (&ei);
5456
    }
5457
 
5458
  /* Removal of dead EH edges might change dominators of not
5459
     just immediate successors.  E.g. when bb1 is changed so that
5460
     it no longer can throw and bb1->bb3 and bb1->bb4 are dead
5461
     eh edges purged by this function in:
5462
 
5463
          / \
5464
         v   v
5465
         1-->2
5466
        / \  |
5467
       v   v |
5468
       3-->4 |
5469
        \    v
5470
         --->5
5471
             |
5472
             -
5473
     idom(bb5) must be recomputed.  For now just free the dominance
5474
     info.  */
5475
  if (changed)
5476
    free_dominance_info (CDI_DOMINATORS);
5477
 
5478
  return changed;
5479
}
5480
 
5481
bool
5482
tree_purge_all_dead_eh_edges (bitmap blocks)
5483
{
5484
  bool changed = false;
5485
  unsigned i;
5486
  bitmap_iterator bi;
5487
 
5488
  EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
5489
    {
5490
      changed |= tree_purge_dead_eh_edges (BASIC_BLOCK (i));
5491
    }
5492
 
5493
  return changed;
5494
}
5495
 
5496
/* This function is called whenever a new edge is created or
5497
   redirected.  */
5498
 
5499
static void
5500
tree_execute_on_growing_pred (edge e)
5501
{
5502
  basic_block bb = e->dest;
5503
 
5504
  if (phi_nodes (bb))
5505
    reserve_phi_args_for_new_edge (bb);
5506
}
5507
 
5508
/* This function is called immediately before edge E is removed from
5509
   the edge vector E->dest->preds.  */
5510
 
5511
static void
5512
tree_execute_on_shrinking_pred (edge e)
5513
{
5514
  if (phi_nodes (e->dest))
5515
    remove_phi_args (e);
5516
}
5517
 
5518
/*---------------------------------------------------------------------------
5519
  Helper functions for Loop versioning
5520
  ---------------------------------------------------------------------------*/
5521
 
5522
/* Adjust phi nodes for 'first' basic block.  'second' basic block is a copy
5523
   of 'first'. Both of them are dominated by 'new_head' basic block. When
5524
   'new_head' was created by 'second's incoming edge it received phi arguments
5525
   on the edge by split_edge(). Later, additional edge 'e' was created to
5526
   connect 'new_head' and 'first'. Now this routine adds phi args on this
5527
   additional edge 'e' that new_head to second edge received as part of edge
5528
   splitting.
5529
*/
5530
 
5531
static void
5532
tree_lv_adjust_loop_header_phi (basic_block first, basic_block second,
5533
                                basic_block new_head, edge e)
5534
{
5535
  tree phi1, phi2;
5536
  edge e2 = find_edge (new_head, second);
5537
 
5538
  /* Because NEW_HEAD has been created by splitting SECOND's incoming
5539
     edge, we should always have an edge from NEW_HEAD to SECOND.  */
5540
  gcc_assert (e2 != NULL);
5541
 
5542
  /* Browse all 'second' basic block phi nodes and add phi args to
5543
     edge 'e' for 'first' head. PHI args are always in correct order.  */
5544
 
5545
  for (phi2 = phi_nodes (second), phi1 = phi_nodes (first);
5546
       phi2 && phi1;
5547
       phi2 = PHI_CHAIN (phi2),  phi1 = PHI_CHAIN (phi1))
5548
    {
5549
      tree def = PHI_ARG_DEF (phi2, e2->dest_idx);
5550
      add_phi_arg (phi1, def, e);
5551
    }
5552
}
5553
 
5554
/* Adds a if else statement to COND_BB with condition COND_EXPR.
5555
   SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
5556
   the destination of the ELSE part.  */
5557
static void
5558
tree_lv_add_condition_to_bb (basic_block first_head, basic_block second_head,
5559
                            basic_block cond_bb, void *cond_e)
5560
{
5561
  block_stmt_iterator bsi;
5562
  tree goto1 = NULL_TREE;
5563
  tree goto2 = NULL_TREE;
5564
  tree new_cond_expr = NULL_TREE;
5565
  tree cond_expr = (tree) cond_e;
5566
  edge e0;
5567
 
5568
  /* Build new conditional expr */
5569
  goto1 = build1 (GOTO_EXPR, void_type_node, tree_block_label (first_head));
5570
  goto2 = build1 (GOTO_EXPR, void_type_node, tree_block_label (second_head));
5571
  new_cond_expr = build3 (COND_EXPR, void_type_node, cond_expr, goto1, goto2);
5572
 
5573
  /* Add new cond in cond_bb.  */
5574
  bsi = bsi_start (cond_bb);
5575
  bsi_insert_after (&bsi, new_cond_expr, BSI_NEW_STMT);
5576
  /* Adjust edges appropriately to connect new head with first head
5577
     as well as second head.  */
5578
  e0 = single_succ_edge (cond_bb);
5579
  e0->flags &= ~EDGE_FALLTHRU;
5580
  e0->flags |= EDGE_FALSE_VALUE;
5581
}
5582
 
5583
struct cfg_hooks tree_cfg_hooks = {
5584
  "tree",
5585
  tree_verify_flow_info,
5586
  tree_dump_bb,                 /* dump_bb  */
5587
  create_bb,                    /* create_basic_block  */
5588
  tree_redirect_edge_and_branch,/* redirect_edge_and_branch  */
5589
  tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force  */
5590
  remove_bb,                    /* delete_basic_block  */
5591
  tree_split_block,             /* split_block  */
5592
  tree_move_block_after,        /* move_block_after  */
5593
  tree_can_merge_blocks_p,      /* can_merge_blocks_p  */
5594
  tree_merge_blocks,            /* merge_blocks  */
5595
  tree_predict_edge,            /* predict_edge  */
5596
  tree_predicted_by_p,          /* predicted_by_p  */
5597
  tree_can_duplicate_bb_p,      /* can_duplicate_block_p  */
5598
  tree_duplicate_bb,            /* duplicate_block  */
5599
  tree_split_edge,              /* split_edge  */
5600
  tree_make_forwarder_block,    /* make_forward_block  */
5601
  NULL,                         /* tidy_fallthru_edge  */
5602
  tree_block_ends_with_call_p,  /* block_ends_with_call_p */
5603
  tree_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
5604
  tree_flow_call_edges_add,     /* flow_call_edges_add */
5605
  tree_execute_on_growing_pred, /* execute_on_growing_pred */
5606
  tree_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
5607
  tree_duplicate_loop_to_header_edge, /* duplicate loop for trees */
5608
  tree_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5609
  tree_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
5610
  extract_true_false_edges_from_block, /* extract_cond_bb_edges */
5611
  flush_pending_stmts           /* flush_pending_stmts */
5612
};
5613
 
5614
 
5615
/* Split all critical edges.  */
5616
 
5617
static unsigned int
5618
split_critical_edges (void)
5619
{
5620
  basic_block bb;
5621
  edge e;
5622
  edge_iterator ei;
5623
 
5624
  /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
5625
     expensive.  So we want to enable recording of edge to CASE_LABEL_EXPR
5626
     mappings around the calls to split_edge.  */
5627
  start_recording_case_labels ();
5628
  FOR_ALL_BB (bb)
5629
    {
5630
      FOR_EACH_EDGE (e, ei, bb->succs)
5631
        if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
5632
          {
5633
            split_edge (e);
5634
          }
5635
    }
5636
  end_recording_case_labels ();
5637
  return 0;
5638
}
5639
 
5640
struct tree_opt_pass pass_split_crit_edges =
5641
{
5642
  "crited",                          /* name */
5643
  NULL,                          /* gate */
5644
  split_critical_edges,          /* execute */
5645
  NULL,                          /* sub */
5646
  NULL,                          /* next */
5647
  0,                             /* static_pass_number */
5648
  TV_TREE_SPLIT_EDGES,           /* tv_id */
5649
  PROP_cfg,                      /* properties required */
5650
  PROP_no_crit_edges,            /* properties_provided */
5651
  0,                             /* properties_destroyed */
5652
  0,                             /* todo_flags_start */
5653
  TODO_dump_func,                /* todo_flags_finish */
5654
 
5655
};
5656
 
5657
 
5658
/* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
5659
   a temporary, make sure and register it to be renamed if necessary,
5660
   and finally return the temporary.  Put the statements to compute
5661
   EXP before the current statement in BSI.  */
5662
 
5663
tree
5664
gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
5665
{
5666
  tree t, new_stmt, orig_stmt;
5667
 
5668
  if (is_gimple_val (exp))
5669
    return exp;
5670
 
5671
  t = make_rename_temp (type, NULL);
5672
  new_stmt = build2 (MODIFY_EXPR, type, t, exp);
5673
 
5674
  orig_stmt = bsi_stmt (*bsi);
5675
  SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
5676
  TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
5677
 
5678
  bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
5679
  if (in_ssa_p)
5680
    mark_new_vars_to_rename (new_stmt);
5681
 
5682
  return t;
5683
}
5684
 
5685
/* Build a ternary operation and gimplify it.  Emit code before BSI.
5686
   Return the gimple_val holding the result.  */
5687
 
5688
tree
5689
gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
5690
                 tree type, tree a, tree b, tree c)
5691
{
5692
  tree ret;
5693
 
5694
  ret = fold_build3 (code, type, a, b, c);
5695
  STRIP_NOPS (ret);
5696
 
5697
  return gimplify_val (bsi, type, ret);
5698
}
5699
 
5700
/* Build a binary operation and gimplify it.  Emit code before BSI.
5701
   Return the gimple_val holding the result.  */
5702
 
5703
tree
5704
gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
5705
                 tree type, tree a, tree b)
5706
{
5707
  tree ret;
5708
 
5709
  ret = fold_build2 (code, type, a, b);
5710
  STRIP_NOPS (ret);
5711
 
5712
  return gimplify_val (bsi, type, ret);
5713
}
5714
 
5715
/* Build a unary operation and gimplify it.  Emit code before BSI.
5716
   Return the gimple_val holding the result.  */
5717
 
5718
tree
5719
gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
5720
                 tree a)
5721
{
5722
  tree ret;
5723
 
5724
  ret = fold_build1 (code, type, a);
5725
  STRIP_NOPS (ret);
5726
 
5727
  return gimplify_val (bsi, type, ret);
5728
}
5729
 
5730
 
5731
 
5732
/* Emit return warnings.  */
5733
 
5734
static unsigned int
5735
execute_warn_function_return (void)
5736
{
5737
#ifdef USE_MAPPED_LOCATION
5738
  source_location location;
5739
#else
5740
  location_t *locus;
5741
#endif
5742
  tree last;
5743
  edge e;
5744
  edge_iterator ei;
5745
 
5746
  /* If we have a path to EXIT, then we do return.  */
5747
  if (TREE_THIS_VOLATILE (cfun->decl)
5748
      && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
5749
    {
5750
#ifdef USE_MAPPED_LOCATION
5751
      location = UNKNOWN_LOCATION;
5752
#else
5753
      locus = NULL;
5754
#endif
5755
      FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5756
        {
5757
          last = last_stmt (e->src);
5758
          if (TREE_CODE (last) == RETURN_EXPR
5759
#ifdef USE_MAPPED_LOCATION
5760
              && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
5761
#else
5762
              && (locus = EXPR_LOCUS (last)) != NULL)
5763
#endif
5764
            break;
5765
        }
5766
#ifdef USE_MAPPED_LOCATION
5767
      if (location == UNKNOWN_LOCATION)
5768
        location = cfun->function_end_locus;
5769
      warning (0, "%H%<noreturn%> function does return", &location);
5770
#else
5771
      if (!locus)
5772
        locus = &cfun->function_end_locus;
5773
      warning (0, "%H%<noreturn%> function does return", locus);
5774
#endif
5775
    }
5776
 
5777
  /* If we see "return;" in some basic block, then we do reach the end
5778
     without returning a value.  */
5779
  else if (warn_return_type
5780
           && !TREE_NO_WARNING (cfun->decl)
5781
           && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
5782
           && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
5783
    {
5784
      FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5785
        {
5786
          tree last = last_stmt (e->src);
5787
          if (TREE_CODE (last) == RETURN_EXPR
5788
              && TREE_OPERAND (last, 0) == NULL
5789
              && !TREE_NO_WARNING (last))
5790
            {
5791
#ifdef USE_MAPPED_LOCATION
5792
              location = EXPR_LOCATION (last);
5793
              if (location == UNKNOWN_LOCATION)
5794
                  location = cfun->function_end_locus;
5795
              warning (0, "%Hcontrol reaches end of non-void function", &location);
5796
#else
5797
              locus = EXPR_LOCUS (last);
5798
              if (!locus)
5799
                locus = &cfun->function_end_locus;
5800
              warning (0, "%Hcontrol reaches end of non-void function", locus);
5801
#endif
5802
              TREE_NO_WARNING (cfun->decl) = 1;
5803
              break;
5804
            }
5805
        }
5806
    }
5807
  return 0;
5808
}
5809
 
5810
 
5811
/* Given a basic block B which ends with a conditional and has
5812
   precisely two successors, determine which of the edges is taken if
5813
   the conditional is true and which is taken if the conditional is
5814
   false.  Set TRUE_EDGE and FALSE_EDGE appropriately.  */
5815
 
5816
void
5817
extract_true_false_edges_from_block (basic_block b,
5818
                                     edge *true_edge,
5819
                                     edge *false_edge)
5820
{
5821
  edge e = EDGE_SUCC (b, 0);
5822
 
5823
  if (e->flags & EDGE_TRUE_VALUE)
5824
    {
5825
      *true_edge = e;
5826
      *false_edge = EDGE_SUCC (b, 1);
5827
    }
5828
  else
5829
    {
5830
      *false_edge = e;
5831
      *true_edge = EDGE_SUCC (b, 1);
5832
    }
5833
}
5834
 
5835
struct tree_opt_pass pass_warn_function_return =
5836
{
5837
  NULL,                                 /* name */
5838
  NULL,                                 /* gate */
5839
  execute_warn_function_return,         /* execute */
5840
  NULL,                                 /* sub */
5841
  NULL,                                 /* next */
5842
  0,                                     /* static_pass_number */
5843
  0,                                     /* tv_id */
5844
  PROP_cfg,                             /* properties_required */
5845
  0,                                     /* properties_provided */
5846
  0,                                     /* properties_destroyed */
5847
  0,                                     /* todo_flags_start */
5848
  0,                                     /* todo_flags_finish */
5849
 
5850
};
5851
 
5852
/* Emit noreturn warnings.  */
5853
 
5854
static unsigned int
5855
execute_warn_function_noreturn (void)
5856
{
5857
  if (warn_missing_noreturn
5858
      && !TREE_THIS_VOLATILE (cfun->decl)
5859
      && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
5860
      && !lang_hooks.function.missing_noreturn_ok_p (cfun->decl))
5861
    warning (OPT_Wmissing_noreturn, "%Jfunction might be possible candidate "
5862
             "for attribute %<noreturn%>",
5863
             cfun->decl);
5864
  return 0;
5865
}
5866
 
5867
struct tree_opt_pass pass_warn_function_noreturn =
5868
{
5869
  NULL,                                 /* name */
5870
  NULL,                                 /* gate */
5871
  execute_warn_function_noreturn,       /* execute */
5872
  NULL,                                 /* sub */
5873
  NULL,                                 /* next */
5874
  0,                                     /* static_pass_number */
5875
  0,                                     /* tv_id */
5876
  PROP_cfg,                             /* properties_required */
5877
  0,                                     /* properties_provided */
5878
  0,                                     /* properties_destroyed */
5879
  0,                                     /* todo_flags_start */
5880
  0,                                     /* todo_flags_finish */
5881
 
5882
};

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.