OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-stable/] [gcc-4.5.1/] [gcc/] [tree-cfg.c] - Blame information for rev 816

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 280 jeremybenn
/* Control flow functions for trees.
2
   Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3
   2010  Free Software Foundation, Inc.
4
   Contributed by Diego Novillo <dnovillo@redhat.com>
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify
9
it under the terms of the GNU General Public License as published by
10
the Free Software Foundation; either version 3, or (at your option)
11
any later version.
12
 
13
GCC is distributed in the hope that it will be useful,
14
but WITHOUT ANY WARRANTY; without even the implied warranty of
15
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16
GNU General Public License for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
#include "tree.h"
27
#include "rtl.h"
28
#include "tm_p.h"
29
#include "hard-reg-set.h"
30
#include "basic-block.h"
31
#include "output.h"
32
#include "flags.h"
33
#include "function.h"
34
#include "expr.h"
35
#include "ggc.h"
36
#include "langhooks.h"
37
#include "diagnostic.h"
38
#include "tree-flow.h"
39
#include "timevar.h"
40
#include "tree-dump.h"
41
#include "tree-pass.h"
42
#include "toplev.h"
43
#include "except.h"
44
#include "cfgloop.h"
45
#include "cfglayout.h"
46
#include "tree-ssa-propagate.h"
47
#include "value-prof.h"
48
#include "pointer-set.h"
49
#include "tree-inline.h"
50
 
51
/* This file contains functions for building the Control Flow Graph (CFG)
52
   for a function tree.  */
53
 
54
/* Local declarations.  */
55
 
56
/* Initial capacity for the basic block array.  */
57
static const int initial_cfg_capacity = 20;
58
 
59
/* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
60
   which use a particular edge.  The CASE_LABEL_EXPRs are chained together
61
   via their TREE_CHAIN field, which we clear after we're done with the
62
   hash table to prevent problems with duplication of GIMPLE_SWITCHes.
63
 
64
   Access to this list of CASE_LABEL_EXPRs allows us to efficiently
65
   update the case vector in response to edge redirections.
66
 
67
   Right now this table is set up and torn down at key points in the
68
   compilation process.  It would be nice if we could make the table
69
   more persistent.  The key is getting notification of changes to
70
   the CFG (particularly edge removal, creation and redirection).  */
71
 
72
static struct pointer_map_t *edge_to_cases;
73
 
74
/* CFG statistics.  */
75
struct cfg_stats_d
76
{
77
  long num_merged_labels;
78
};
79
 
80
static struct cfg_stats_d cfg_stats;
81
 
82
/* Nonzero if we found a computed goto while building basic blocks.  */
83
static bool found_computed_goto;
84
 
85
/* Hash table to store last discriminator assigned for each locus.  */
86
struct locus_discrim_map
87
{
88
  location_t locus;
89
  int discriminator;
90
};
91
static htab_t discriminator_per_locus;
92
 
93
/* Basic blocks and flowgraphs.  */
94
static void make_blocks (gimple_seq);
95
static void factor_computed_gotos (void);
96
 
97
/* Edges.  */
98
static void make_edges (void);
99
static void make_cond_expr_edges (basic_block);
100
static void make_gimple_switch_edges (basic_block);
101
static void make_goto_expr_edges (basic_block);
102
static void make_gimple_asm_edges (basic_block);
103
static unsigned int locus_map_hash (const void *);
104
static int locus_map_eq (const void *, const void *);
105
static void assign_discriminator (location_t, basic_block);
106
static edge gimple_redirect_edge_and_branch (edge, basic_block);
107
static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
108
static unsigned int split_critical_edges (void);
109
 
110
/* Various helpers.  */
111
static inline bool stmt_starts_bb_p (gimple, gimple);
112
static int gimple_verify_flow_info (void);
113
static void gimple_make_forwarder_block (edge);
114
static void gimple_cfg2vcg (FILE *);
115
static gimple first_non_label_stmt (basic_block);
116
 
117
/* Flowgraph optimization and cleanup.  */
118
static void gimple_merge_blocks (basic_block, basic_block);
119
static bool gimple_can_merge_blocks_p (basic_block, basic_block);
120
static void remove_bb (basic_block);
121
static edge find_taken_edge_computed_goto (basic_block, tree);
122
static edge find_taken_edge_cond_expr (basic_block, tree);
123
static edge find_taken_edge_switch_expr (basic_block, tree);
124
static tree find_case_label_for_value (gimple, tree);
125
 
126
void
127
init_empty_tree_cfg_for_function (struct function *fn)
128
{
129
  /* Initialize the basic block array.  */
130
  init_flow (fn);
131
  profile_status_for_function (fn) = PROFILE_ABSENT;
132
  n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
133
  last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
134
  basic_block_info_for_function (fn)
135
    = VEC_alloc (basic_block, gc, initial_cfg_capacity);
136
  VEC_safe_grow_cleared (basic_block, gc,
137
                         basic_block_info_for_function (fn),
138
                         initial_cfg_capacity);
139
 
140
  /* Build a mapping of labels to their associated blocks.  */
141
  label_to_block_map_for_function (fn)
142
    = VEC_alloc (basic_block, gc, initial_cfg_capacity);
143
  VEC_safe_grow_cleared (basic_block, gc,
144
                         label_to_block_map_for_function (fn),
145
                         initial_cfg_capacity);
146
 
147
  SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
148
                                ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
149
  SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
150
                   EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
151
 
152
  ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
153
    = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
154
  EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
155
    = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
156
}
157
 
158
void
159
init_empty_tree_cfg (void)
160
{
161
  init_empty_tree_cfg_for_function (cfun);
162
}
163
 
164
/*---------------------------------------------------------------------------
165
                              Create basic blocks
166
---------------------------------------------------------------------------*/
167
 
168
/* Entry point to the CFG builder for trees.  SEQ is the sequence of
169
   statements to be added to the flowgraph.  */
170
 
171
static void
172
build_gimple_cfg (gimple_seq seq)
173
{
174
  /* Register specific gimple functions.  */
175
  gimple_register_cfg_hooks ();
176
 
177
  memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
178
 
179
  init_empty_tree_cfg ();
180
 
181
  found_computed_goto = 0;
182
  make_blocks (seq);
183
 
184
  /* Computed gotos are hell to deal with, especially if there are
185
     lots of them with a large number of destinations.  So we factor
186
     them to a common computed goto location before we build the
187
     edge list.  After we convert back to normal form, we will un-factor
188
     the computed gotos since factoring introduces an unwanted jump.  */
189
  if (found_computed_goto)
190
    factor_computed_gotos ();
191
 
192
  /* Make sure there is always at least one block, even if it's empty.  */
193
  if (n_basic_blocks == NUM_FIXED_BLOCKS)
194
    create_empty_bb (ENTRY_BLOCK_PTR);
195
 
196
  /* Adjust the size of the array.  */
197
  if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
198
    VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
199
 
200
  /* To speed up statement iterator walks, we first purge dead labels.  */
201
  cleanup_dead_labels ();
202
 
203
  /* Group case nodes to reduce the number of edges.
204
     We do this after cleaning up dead labels because otherwise we miss
205
     a lot of obvious case merging opportunities.  */
206
  group_case_labels ();
207
 
208
  /* Create the edges of the flowgraph.  */
209
  discriminator_per_locus = htab_create (13, locus_map_hash, locus_map_eq,
210
                                         free);
211
  make_edges ();
212
  cleanup_dead_labels ();
213
  htab_delete (discriminator_per_locus);
214
 
215
  /* Debugging dumps.  */
216
 
217
  /* Write the flowgraph to a VCG file.  */
218
  {
219
    int local_dump_flags;
220
    FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
221
    if (vcg_file)
222
      {
223
        gimple_cfg2vcg (vcg_file);
224
        dump_end (TDI_vcg, vcg_file);
225
      }
226
  }
227
 
228
#ifdef ENABLE_CHECKING
229
  verify_stmts ();
230
#endif
231
}
232
 
233
static unsigned int
234
execute_build_cfg (void)
235
{
236
  gimple_seq body = gimple_body (current_function_decl);
237
 
238
  build_gimple_cfg (body);
239
  gimple_set_body (current_function_decl, NULL);
240
  if (dump_file && (dump_flags & TDF_DETAILS))
241
    {
242
      fprintf (dump_file, "Scope blocks:\n");
243
      dump_scope_blocks (dump_file, dump_flags);
244
    }
245
  return 0;
246
}
247
 
248
struct gimple_opt_pass pass_build_cfg =
249
{
250
 {
251
  GIMPLE_PASS,
252
  "cfg",                                /* name */
253
  NULL,                                 /* gate */
254
  execute_build_cfg,                    /* execute */
255
  NULL,                                 /* sub */
256
  NULL,                                 /* next */
257
  0,                                     /* static_pass_number */
258
  TV_TREE_CFG,                          /* tv_id */
259
  PROP_gimple_leh,                      /* properties_required */
260
  PROP_cfg,                             /* properties_provided */
261
  0,                                     /* properties_destroyed */
262
  0,                                     /* todo_flags_start */
263
  TODO_verify_stmts | TODO_cleanup_cfg
264
  | TODO_dump_func                      /* todo_flags_finish */
265
 }
266
};
267
 
268
 
269
/* Return true if T is a computed goto.  */
270
 
271
static bool
272
computed_goto_p (gimple t)
273
{
274
  return (gimple_code (t) == GIMPLE_GOTO
275
          && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
276
}
277
 
278
 
279
/* Search the CFG for any computed gotos.  If found, factor them to a
280
   common computed goto site.  Also record the location of that site so
281
   that we can un-factor the gotos after we have converted back to
282
   normal form.  */
283
 
284
static void
285
factor_computed_gotos (void)
286
{
287
  basic_block bb;
288
  tree factored_label_decl = NULL;
289
  tree var = NULL;
290
  gimple factored_computed_goto_label = NULL;
291
  gimple factored_computed_goto = NULL;
292
 
293
  /* We know there are one or more computed gotos in this function.
294
     Examine the last statement in each basic block to see if the block
295
     ends with a computed goto.  */
296
 
297
  FOR_EACH_BB (bb)
298
    {
299
      gimple_stmt_iterator gsi = gsi_last_bb (bb);
300
      gimple last;
301
 
302
      if (gsi_end_p (gsi))
303
        continue;
304
 
305
      last = gsi_stmt (gsi);
306
 
307
      /* Ignore the computed goto we create when we factor the original
308
         computed gotos.  */
309
      if (last == factored_computed_goto)
310
        continue;
311
 
312
      /* If the last statement is a computed goto, factor it.  */
313
      if (computed_goto_p (last))
314
        {
315
          gimple assignment;
316
 
317
          /* The first time we find a computed goto we need to create
318
             the factored goto block and the variable each original
319
             computed goto will use for their goto destination.  */
320
          if (!factored_computed_goto)
321
            {
322
              basic_block new_bb = create_empty_bb (bb);
323
              gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
324
 
325
              /* Create the destination of the factored goto.  Each original
326
                 computed goto will put its desired destination into this
327
                 variable and jump to the label we create immediately
328
                 below.  */
329
              var = create_tmp_var (ptr_type_node, "gotovar");
330
 
331
              /* Build a label for the new block which will contain the
332
                 factored computed goto.  */
333
              factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
334
              factored_computed_goto_label
335
                = gimple_build_label (factored_label_decl);
336
              gsi_insert_after (&new_gsi, factored_computed_goto_label,
337
                                GSI_NEW_STMT);
338
 
339
              /* Build our new computed goto.  */
340
              factored_computed_goto = gimple_build_goto (var);
341
              gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
342
            }
343
 
344
          /* Copy the original computed goto's destination into VAR.  */
345
          assignment = gimple_build_assign (var, gimple_goto_dest (last));
346
          gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
347
 
348
          /* And re-vector the computed goto to the new destination.  */
349
          gimple_goto_set_dest (last, factored_label_decl);
350
        }
351
    }
352
}
353
 
354
 
355
/* Build a flowgraph for the sequence of stmts SEQ.  */
356
 
357
static void
358
make_blocks (gimple_seq seq)
359
{
360
  gimple_stmt_iterator i = gsi_start (seq);
361
  gimple stmt = NULL;
362
  bool start_new_block = true;
363
  bool first_stmt_of_seq = true;
364
  basic_block bb = ENTRY_BLOCK_PTR;
365
 
366
  while (!gsi_end_p (i))
367
    {
368
      gimple prev_stmt;
369
 
370
      prev_stmt = stmt;
371
      stmt = gsi_stmt (i);
372
 
373
      /* If the statement starts a new basic block or if we have determined
374
         in a previous pass that we need to create a new block for STMT, do
375
         so now.  */
376
      if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
377
        {
378
          if (!first_stmt_of_seq)
379
            seq = gsi_split_seq_before (&i);
380
          bb = create_basic_block (seq, NULL, bb);
381
          start_new_block = false;
382
        }
383
 
384
      /* Now add STMT to BB and create the subgraphs for special statement
385
         codes.  */
386
      gimple_set_bb (stmt, bb);
387
 
388
      if (computed_goto_p (stmt))
389
        found_computed_goto = true;
390
 
391
      /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
392
         next iteration.  */
393
      if (stmt_ends_bb_p (stmt))
394
        {
395
          /* If the stmt can make abnormal goto use a new temporary
396
             for the assignment to the LHS.  This makes sure the old value
397
             of the LHS is available on the abnormal edge.  Otherwise
398
             we will end up with overlapping life-ranges for abnormal
399
             SSA names.  */
400
          if (gimple_has_lhs (stmt)
401
              && stmt_can_make_abnormal_goto (stmt)
402
              && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
403
            {
404
              tree lhs = gimple_get_lhs (stmt);
405
              tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
406
              gimple s = gimple_build_assign (lhs, tmp);
407
              gimple_set_location (s, gimple_location (stmt));
408
              gimple_set_block (s, gimple_block (stmt));
409
              gimple_set_lhs (stmt, tmp);
410
              if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
411
                  || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
412
                DECL_GIMPLE_REG_P (tmp) = 1;
413
              gsi_insert_after (&i, s, GSI_SAME_STMT);
414
            }
415
          start_new_block = true;
416
        }
417
 
418
      gsi_next (&i);
419
      first_stmt_of_seq = false;
420
    }
421
}
422
 
423
 
424
/* Create and return a new empty basic block after bb AFTER.  */
425
 
426
static basic_block
427
create_bb (void *h, void *e, basic_block after)
428
{
429
  basic_block bb;
430
 
431
  gcc_assert (!e);
432
 
433
  /* Create and initialize a new basic block.  Since alloc_block uses
434
     ggc_alloc_cleared to allocate a basic block, we do not have to
435
     clear the newly allocated basic block here.  */
436
  bb = alloc_block ();
437
 
438
  bb->index = last_basic_block;
439
  bb->flags = BB_NEW;
440
  bb->il.gimple = GGC_CNEW (struct gimple_bb_info);
441
  set_bb_seq (bb, h ? (gimple_seq) h : gimple_seq_alloc ());
442
 
443
  /* Add the new block to the linked list of blocks.  */
444
  link_block (bb, after);
445
 
446
  /* Grow the basic block array if needed.  */
447
  if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
448
    {
449
      size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
450
      VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
451
    }
452
 
453
  /* Add the newly created block to the array.  */
454
  SET_BASIC_BLOCK (last_basic_block, bb);
455
 
456
  n_basic_blocks++;
457
  last_basic_block++;
458
 
459
  return bb;
460
}
461
 
462
 
463
/*---------------------------------------------------------------------------
464
                                 Edge creation
465
---------------------------------------------------------------------------*/
466
 
467
/* Fold COND_EXPR_COND of each COND_EXPR.  */
468
 
469
void
470
fold_cond_expr_cond (void)
471
{
472
  basic_block bb;
473
 
474
  FOR_EACH_BB (bb)
475
    {
476
      gimple stmt = last_stmt (bb);
477
 
478
      if (stmt && gimple_code (stmt) == GIMPLE_COND)
479
        {
480
          location_t loc = gimple_location (stmt);
481
          tree cond;
482
          bool zerop, onep;
483
 
484
          fold_defer_overflow_warnings ();
485
          cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
486
                              gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
487
          if (cond)
488
            {
489
              zerop = integer_zerop (cond);
490
              onep = integer_onep (cond);
491
            }
492
          else
493
            zerop = onep = false;
494
 
495
          fold_undefer_overflow_warnings (zerop || onep,
496
                                          stmt,
497
                                          WARN_STRICT_OVERFLOW_CONDITIONAL);
498
          if (zerop)
499
            gimple_cond_make_false (stmt);
500
          else if (onep)
501
            gimple_cond_make_true (stmt);
502
        }
503
    }
504
}
505
 
506
/* Join all the blocks in the flowgraph.  */
507
 
508
static void
509
make_edges (void)
510
{
511
  basic_block bb;
512
  struct omp_region *cur_region = NULL;
513
 
514
  /* Create an edge from entry to the first block with executable
515
     statements in it.  */
516
  make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
517
 
518
  /* Traverse the basic block array placing edges.  */
519
  FOR_EACH_BB (bb)
520
    {
521
      gimple last = last_stmt (bb);
522
      bool fallthru;
523
 
524
      if (last)
525
        {
526
          enum gimple_code code = gimple_code (last);
527
          switch (code)
528
            {
529
            case GIMPLE_GOTO:
530
              make_goto_expr_edges (bb);
531
              fallthru = false;
532
              break;
533
            case GIMPLE_RETURN:
534
              make_edge (bb, EXIT_BLOCK_PTR, 0);
535
              fallthru = false;
536
              break;
537
            case GIMPLE_COND:
538
              make_cond_expr_edges (bb);
539
              fallthru = false;
540
              break;
541
            case GIMPLE_SWITCH:
542
              make_gimple_switch_edges (bb);
543
              fallthru = false;
544
              break;
545
            case GIMPLE_RESX:
546
              make_eh_edges (last);
547
              fallthru = false;
548
              break;
549
            case GIMPLE_EH_DISPATCH:
550
              fallthru = make_eh_dispatch_edges (last);
551
              break;
552
 
553
            case GIMPLE_CALL:
554
              /* If this function receives a nonlocal goto, then we need to
555
                 make edges from this call site to all the nonlocal goto
556
                 handlers.  */
557
              if (stmt_can_make_abnormal_goto (last))
558
                make_abnormal_goto_edges (bb, true);
559
 
560
              /* If this statement has reachable exception handlers, then
561
                 create abnormal edges to them.  */
562
              make_eh_edges (last);
563
 
564
              /* Some calls are known not to return.  */
565
              fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
566
              break;
567
 
568
            case GIMPLE_ASSIGN:
569
               /* A GIMPLE_ASSIGN may throw internally and thus be considered
570
                  control-altering. */
571
              if (is_ctrl_altering_stmt (last))
572
                make_eh_edges (last);
573
              fallthru = true;
574
              break;
575
 
576
            case GIMPLE_ASM:
577
              make_gimple_asm_edges (bb);
578
              fallthru = true;
579
              break;
580
 
581
            case GIMPLE_OMP_PARALLEL:
582
            case GIMPLE_OMP_TASK:
583
            case GIMPLE_OMP_FOR:
584
            case GIMPLE_OMP_SINGLE:
585
            case GIMPLE_OMP_MASTER:
586
            case GIMPLE_OMP_ORDERED:
587
            case GIMPLE_OMP_CRITICAL:
588
            case GIMPLE_OMP_SECTION:
589
              cur_region = new_omp_region (bb, code, cur_region);
590
              fallthru = true;
591
              break;
592
 
593
            case GIMPLE_OMP_SECTIONS:
594
              cur_region = new_omp_region (bb, code, cur_region);
595
              fallthru = true;
596
              break;
597
 
598
            case GIMPLE_OMP_SECTIONS_SWITCH:
599
              fallthru = false;
600
              break;
601
 
602
            case GIMPLE_OMP_ATOMIC_LOAD:
603
            case GIMPLE_OMP_ATOMIC_STORE:
604
               fallthru = true;
605
               break;
606
 
607
            case GIMPLE_OMP_RETURN:
608
              /* In the case of a GIMPLE_OMP_SECTION, the edge will go
609
                 somewhere other than the next block.  This will be
610
                 created later.  */
611
              cur_region->exit = bb;
612
              fallthru = cur_region->type != GIMPLE_OMP_SECTION;
613
              cur_region = cur_region->outer;
614
              break;
615
 
616
            case GIMPLE_OMP_CONTINUE:
617
              cur_region->cont = bb;
618
              switch (cur_region->type)
619
                {
620
                case GIMPLE_OMP_FOR:
621
                  /* Mark all GIMPLE_OMP_FOR and GIMPLE_OMP_CONTINUE
622
                     succs edges as abnormal to prevent splitting
623
                     them.  */
624
                  single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
625
                  /* Make the loopback edge.  */
626
                  make_edge (bb, single_succ (cur_region->entry),
627
                             EDGE_ABNORMAL);
628
 
629
                  /* Create an edge from GIMPLE_OMP_FOR to exit, which
630
                     corresponds to the case that the body of the loop
631
                     is not executed at all.  */
632
                  make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
633
                  make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
634
                  fallthru = false;
635
                  break;
636
 
637
                case GIMPLE_OMP_SECTIONS:
638
                  /* Wire up the edges into and out of the nested sections.  */
639
                  {
640
                    basic_block switch_bb = single_succ (cur_region->entry);
641
 
642
                    struct omp_region *i;
643
                    for (i = cur_region->inner; i ; i = i->next)
644
                      {
645
                        gcc_assert (i->type == GIMPLE_OMP_SECTION);
646
                        make_edge (switch_bb, i->entry, 0);
647
                        make_edge (i->exit, bb, EDGE_FALLTHRU);
648
                      }
649
 
650
                    /* Make the loopback edge to the block with
651
                       GIMPLE_OMP_SECTIONS_SWITCH.  */
652
                    make_edge (bb, switch_bb, 0);
653
 
654
                    /* Make the edge from the switch to exit.  */
655
                    make_edge (switch_bb, bb->next_bb, 0);
656
                    fallthru = false;
657
                  }
658
                  break;
659
 
660
                default:
661
                  gcc_unreachable ();
662
                }
663
              break;
664
 
665
            default:
666
              gcc_assert (!stmt_ends_bb_p (last));
667
              fallthru = true;
668
            }
669
        }
670
      else
671
        fallthru = true;
672
 
673
      if (fallthru)
674
        {
675
          make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
676
          if (last)
677
            assign_discriminator (gimple_location (last), bb->next_bb);
678
        }
679
    }
680
 
681
  if (root_omp_region)
682
    free_omp_regions ();
683
 
684
  /* Fold COND_EXPR_COND of each COND_EXPR.  */
685
  fold_cond_expr_cond ();
686
}
687
 
688
/* Trivial hash function for a location_t.  ITEM is a pointer to
689
   a hash table entry that maps a location_t to a discriminator.  */
690
 
691
static unsigned int
692
locus_map_hash (const void *item)
693
{
694
  return ((const struct locus_discrim_map *) item)->locus;
695
}
696
 
697
/* Equality function for the locus-to-discriminator map.  VA and VB
698
   point to the two hash table entries to compare.  */
699
 
700
static int
701
locus_map_eq (const void *va, const void *vb)
702
{
703
  const struct locus_discrim_map *a = (const struct locus_discrim_map *) va;
704
  const struct locus_discrim_map *b = (const struct locus_discrim_map *) vb;
705
  return a->locus == b->locus;
706
}
707
 
708
/* Find the next available discriminator value for LOCUS.  The
709
   discriminator distinguishes among several basic blocks that
710
   share a common locus, allowing for more accurate sample-based
711
   profiling.  */
712
 
713
static int
714
next_discriminator_for_locus (location_t locus)
715
{
716
  struct locus_discrim_map item;
717
  struct locus_discrim_map **slot;
718
 
719
  item.locus = locus;
720
  item.discriminator = 0;
721
  slot = (struct locus_discrim_map **)
722
      htab_find_slot_with_hash (discriminator_per_locus, (void *) &item,
723
                                (hashval_t) locus, INSERT);
724
  gcc_assert (slot);
725
  if (*slot == HTAB_EMPTY_ENTRY)
726
    {
727
      *slot = XNEW (struct locus_discrim_map);
728
      gcc_assert (*slot);
729
      (*slot)->locus = locus;
730
      (*slot)->discriminator = 0;
731
    }
732
  (*slot)->discriminator++;
733
  return (*slot)->discriminator;
734
}
735
 
736
/* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line.  */
737
 
738
static bool
739
same_line_p (location_t locus1, location_t locus2)
740
{
741
  expanded_location from, to;
742
 
743
  if (locus1 == locus2)
744
    return true;
745
 
746
  from = expand_location (locus1);
747
  to = expand_location (locus2);
748
 
749
  if (from.line != to.line)
750
    return false;
751
  if (from.file == to.file)
752
    return true;
753
  return (from.file != NULL
754
          && to.file != NULL
755
          && strcmp (from.file, to.file) == 0);
756
}
757
 
758
/* Assign a unique discriminator value to block BB if it begins at the same
759
   LOCUS as its predecessor block.  */
760
 
761
static void
762
assign_discriminator (location_t locus, basic_block bb)
763
{
764
  gimple first_in_to_bb, last_in_to_bb;
765
 
766
  if (locus == 0 || bb->discriminator != 0)
767
    return;
768
 
769
  first_in_to_bb = first_non_label_stmt (bb);
770
  last_in_to_bb = last_stmt (bb);
771
  if ((first_in_to_bb && same_line_p (locus, gimple_location (first_in_to_bb)))
772
      || (last_in_to_bb && same_line_p (locus, gimple_location (last_in_to_bb))))
773
    bb->discriminator = next_discriminator_for_locus (locus);
774
}
775
 
776
/* Create the edges for a GIMPLE_COND starting at block BB.  */
777
 
778
static void
779
make_cond_expr_edges (basic_block bb)
780
{
781
  gimple entry = last_stmt (bb);
782
  gimple then_stmt, else_stmt;
783
  basic_block then_bb, else_bb;
784
  tree then_label, else_label;
785
  edge e;
786
  location_t entry_locus;
787
 
788
  gcc_assert (entry);
789
  gcc_assert (gimple_code (entry) == GIMPLE_COND);
790
 
791
  entry_locus = gimple_location (entry);
792
 
793
  /* Entry basic blocks for each component.  */
794
  then_label = gimple_cond_true_label (entry);
795
  else_label = gimple_cond_false_label (entry);
796
  then_bb = label_to_block (then_label);
797
  else_bb = label_to_block (else_label);
798
  then_stmt = first_stmt (then_bb);
799
  else_stmt = first_stmt (else_bb);
800
 
801
  e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
802
  assign_discriminator (entry_locus, then_bb);
803
  e->goto_locus = gimple_location (then_stmt);
804
  if (e->goto_locus)
805
    e->goto_block = gimple_block (then_stmt);
806
  e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
807
  if (e)
808
    {
809
      assign_discriminator (entry_locus, else_bb);
810
      e->goto_locus = gimple_location (else_stmt);
811
      if (e->goto_locus)
812
        e->goto_block = gimple_block (else_stmt);
813
    }
814
 
815
  /* We do not need the labels anymore.  */
816
  gimple_cond_set_true_label (entry, NULL_TREE);
817
  gimple_cond_set_false_label (entry, NULL_TREE);
818
}
819
 
820
 
821
/* Called for each element in the hash table (P) as we delete the
822
   edge to cases hash table.
823
 
824
   Clear all the TREE_CHAINs to prevent problems with copying of
825
   SWITCH_EXPRs and structure sharing rules, then free the hash table
826
   element.  */
827
 
828
static bool
829
edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
830
                       void *data ATTRIBUTE_UNUSED)
831
{
832
  tree t, next;
833
 
834
  for (t = (tree) *value; t; t = next)
835
    {
836
      next = TREE_CHAIN (t);
837
      TREE_CHAIN (t) = NULL;
838
    }
839
 
840
  *value = NULL;
841
  return false;
842
}
843
 
844
/* Start recording information mapping edges to case labels.  */
845
 
846
void
847
start_recording_case_labels (void)
848
{
849
  gcc_assert (edge_to_cases == NULL);
850
  edge_to_cases = pointer_map_create ();
851
}
852
 
853
/* Return nonzero if we are recording information for case labels.  */
854
 
855
static bool
856
recording_case_labels_p (void)
857
{
858
  return (edge_to_cases != NULL);
859
}
860
 
861
/* Stop recording information mapping edges to case labels and
862
   remove any information we have recorded.  */
863
void
864
end_recording_case_labels (void)
865
{
866
  pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
867
  pointer_map_destroy (edge_to_cases);
868
  edge_to_cases = NULL;
869
}
870
 
871
/* If we are inside a {start,end}_recording_cases block, then return
872
   a chain of CASE_LABEL_EXPRs from T which reference E.
873
 
874
   Otherwise return NULL.  */
875
 
876
static tree
877
get_cases_for_edge (edge e, gimple t)
878
{
879
  void **slot;
880
  size_t i, n;
881
 
882
  /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
883
     chains available.  Return NULL so the caller can detect this case.  */
884
  if (!recording_case_labels_p ())
885
    return NULL;
886
 
887
  slot = pointer_map_contains (edge_to_cases, e);
888
  if (slot)
889
    return (tree) *slot;
890
 
891
  /* If we did not find E in the hash table, then this must be the first
892
     time we have been queried for information about E & T.  Add all the
893
     elements from T to the hash table then perform the query again.  */
894
 
895
  n = gimple_switch_num_labels (t);
896
  for (i = 0; i < n; i++)
897
    {
898
      tree elt = gimple_switch_label (t, i);
899
      tree lab = CASE_LABEL (elt);
900
      basic_block label_bb = label_to_block (lab);
901
      edge this_edge = find_edge (e->src, label_bb);
902
 
903
      /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
904
         a new chain.  */
905
      slot = pointer_map_insert (edge_to_cases, this_edge);
906
      TREE_CHAIN (elt) = (tree) *slot;
907
      *slot = elt;
908
    }
909
 
910
  return (tree) *pointer_map_contains (edge_to_cases, e);
911
}
912
 
913
/* Create the edges for a GIMPLE_SWITCH starting at block BB.  */
914
 
915
static void
916
make_gimple_switch_edges (basic_block bb)
917
{
918
  gimple entry = last_stmt (bb);
919
  location_t entry_locus;
920
  size_t i, n;
921
 
922
  entry_locus = gimple_location (entry);
923
 
924
  n = gimple_switch_num_labels (entry);
925
 
926
  for (i = 0; i < n; ++i)
927
    {
928
      tree lab = CASE_LABEL (gimple_switch_label (entry, i));
929
      basic_block label_bb = label_to_block (lab);
930
      make_edge (bb, label_bb, 0);
931
      assign_discriminator (entry_locus, label_bb);
932
    }
933
}
934
 
935
 
936
/* Return the basic block holding label DEST.  */
937
 
938
basic_block
939
label_to_block_fn (struct function *ifun, tree dest)
940
{
941
  int uid = LABEL_DECL_UID (dest);
942
 
943
  /* We would die hard when faced by an undefined label.  Emit a label to
944
     the very first basic block.  This will hopefully make even the dataflow
945
     and undefined variable warnings quite right.  */
946
  if ((errorcount || sorrycount) && uid < 0)
947
    {
948
      gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
949
      gimple stmt;
950
 
951
      stmt = gimple_build_label (dest);
952
      gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
953
      uid = LABEL_DECL_UID (dest);
954
    }
955
  if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
956
      <= (unsigned int) uid)
957
    return NULL;
958
  return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
959
}
960
 
961
/* Create edges for an abnormal goto statement at block BB.  If FOR_CALL
962
   is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR.  */
963
 
964
void
965
make_abnormal_goto_edges (basic_block bb, bool for_call)
966
{
967
  basic_block target_bb;
968
  gimple_stmt_iterator gsi;
969
 
970
  FOR_EACH_BB (target_bb)
971
    for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
972
      {
973
        gimple label_stmt = gsi_stmt (gsi);
974
        tree target;
975
 
976
        if (gimple_code (label_stmt) != GIMPLE_LABEL)
977
          break;
978
 
979
        target = gimple_label_label (label_stmt);
980
 
981
        /* Make an edge to every label block that has been marked as a
982
           potential target for a computed goto or a non-local goto.  */
983
        if ((FORCED_LABEL (target) && !for_call)
984
            || (DECL_NONLOCAL (target) && for_call))
985
          {
986
            make_edge (bb, target_bb, EDGE_ABNORMAL);
987
            break;
988
          }
989
      }
990
}
991
 
992
/* Create edges for a goto statement at block BB.  */
993
 
994
static void
995
make_goto_expr_edges (basic_block bb)
996
{
997
  gimple_stmt_iterator last = gsi_last_bb (bb);
998
  gimple goto_t = gsi_stmt (last);
999
 
1000
  /* A simple GOTO creates normal edges.  */
1001
  if (simple_goto_p (goto_t))
1002
    {
1003
      tree dest = gimple_goto_dest (goto_t);
1004
      basic_block label_bb = label_to_block (dest);
1005
      edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1006
      e->goto_locus = gimple_location (goto_t);
1007
      assign_discriminator (e->goto_locus, label_bb);
1008
      if (e->goto_locus)
1009
        e->goto_block = gimple_block (goto_t);
1010
      gsi_remove (&last, true);
1011
      return;
1012
    }
1013
 
1014
  /* A computed GOTO creates abnormal edges.  */
1015
  make_abnormal_goto_edges (bb, false);
1016
}
1017
 
1018
/* Create edges for an asm statement with labels at block BB.  */
1019
 
1020
static void
1021
make_gimple_asm_edges (basic_block bb)
1022
{
1023
  gimple stmt = last_stmt (bb);
1024
  location_t stmt_loc = gimple_location (stmt);
1025
  int i, n = gimple_asm_nlabels (stmt);
1026
 
1027
  for (i = 0; i < n; ++i)
1028
    {
1029
      tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1030
      basic_block label_bb = label_to_block (label);
1031
      make_edge (bb, label_bb, 0);
1032
      assign_discriminator (stmt_loc, label_bb);
1033
    }
1034
}
1035
 
1036
/*---------------------------------------------------------------------------
1037
                               Flowgraph analysis
1038
---------------------------------------------------------------------------*/
1039
 
1040
/* Cleanup useless labels in basic blocks.  This is something we wish
1041
   to do early because it allows us to group case labels before creating
1042
   the edges for the CFG, and it speeds up block statement iterators in
1043
   all passes later on.
1044
   We rerun this pass after CFG is created, to get rid of the labels that
1045
   are no longer referenced.  After then we do not run it any more, since
1046
   (almost) no new labels should be created.  */
1047
 
1048
/* A map from basic block index to the leading label of that block.  */
1049
static struct label_record
1050
{
1051
  /* The label.  */
1052
  tree label;
1053
 
1054
  /* True if the label is referenced from somewhere.  */
1055
  bool used;
1056
} *label_for_bb;
1057
 
1058
/* Given LABEL return the first label in the same basic block.  */
1059
 
1060
static tree
1061
main_block_label (tree label)
1062
{
1063
  basic_block bb = label_to_block (label);
1064
  tree main_label = label_for_bb[bb->index].label;
1065
 
1066
  /* label_to_block possibly inserted undefined label into the chain.  */
1067
  if (!main_label)
1068
    {
1069
      label_for_bb[bb->index].label = label;
1070
      main_label = label;
1071
    }
1072
 
1073
  label_for_bb[bb->index].used = true;
1074
  return main_label;
1075
}
1076
 
1077
/* Clean up redundant labels within the exception tree.  */
1078
 
1079
static void
1080
cleanup_dead_labels_eh (void)
1081
{
1082
  eh_landing_pad lp;
1083
  eh_region r;
1084
  tree lab;
1085
  int i;
1086
 
1087
  if (cfun->eh == NULL)
1088
    return;
1089
 
1090
  for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
1091
    if (lp && lp->post_landing_pad)
1092
      {
1093
        lab = main_block_label (lp->post_landing_pad);
1094
        if (lab != lp->post_landing_pad)
1095
          {
1096
            EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1097
            EH_LANDING_PAD_NR (lab) = lp->index;
1098
          }
1099
      }
1100
 
1101
  FOR_ALL_EH_REGION (r)
1102
    switch (r->type)
1103
      {
1104
      case ERT_CLEANUP:
1105
      case ERT_MUST_NOT_THROW:
1106
        break;
1107
 
1108
      case ERT_TRY:
1109
        {
1110
          eh_catch c;
1111
          for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1112
            {
1113
              lab = c->label;
1114
              if (lab)
1115
                c->label = main_block_label (lab);
1116
            }
1117
        }
1118
        break;
1119
 
1120
      case ERT_ALLOWED_EXCEPTIONS:
1121
        lab = r->u.allowed.label;
1122
        if (lab)
1123
          r->u.allowed.label = main_block_label (lab);
1124
        break;
1125
      }
1126
}
1127
 
1128
 
1129
/* Cleanup redundant labels.  This is a three-step process:
1130
     1) Find the leading label for each block.
1131
     2) Redirect all references to labels to the leading labels.
1132
     3) Cleanup all useless labels.  */
1133
 
1134
void
1135
cleanup_dead_labels (void)
1136
{
1137
  basic_block bb;
1138
  label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
1139
 
1140
  /* Find a suitable label for each block.  We use the first user-defined
1141
     label if there is one, or otherwise just the first label we see.  */
1142
  FOR_EACH_BB (bb)
1143
    {
1144
      gimple_stmt_iterator i;
1145
 
1146
      for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1147
        {
1148
          tree label;
1149
          gimple stmt = gsi_stmt (i);
1150
 
1151
          if (gimple_code (stmt) != GIMPLE_LABEL)
1152
            break;
1153
 
1154
          label = gimple_label_label (stmt);
1155
 
1156
          /* If we have not yet seen a label for the current block,
1157
             remember this one and see if there are more labels.  */
1158
          if (!label_for_bb[bb->index].label)
1159
            {
1160
              label_for_bb[bb->index].label = label;
1161
              continue;
1162
            }
1163
 
1164
          /* If we did see a label for the current block already, but it
1165
             is an artificially created label, replace it if the current
1166
             label is a user defined label.  */
1167
          if (!DECL_ARTIFICIAL (label)
1168
              && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1169
            {
1170
              label_for_bb[bb->index].label = label;
1171
              break;
1172
            }
1173
        }
1174
    }
1175
 
1176
  /* Now redirect all jumps/branches to the selected label.
1177
     First do so for each block ending in a control statement.  */
1178
  FOR_EACH_BB (bb)
1179
    {
1180
      gimple stmt = last_stmt (bb);
1181
      if (!stmt)
1182
        continue;
1183
 
1184
      switch (gimple_code (stmt))
1185
        {
1186
        case GIMPLE_COND:
1187
          {
1188
            tree true_label = gimple_cond_true_label (stmt);
1189
            tree false_label = gimple_cond_false_label (stmt);
1190
 
1191
            if (true_label)
1192
              gimple_cond_set_true_label (stmt, main_block_label (true_label));
1193
            if (false_label)
1194
              gimple_cond_set_false_label (stmt, main_block_label (false_label));
1195
            break;
1196
          }
1197
 
1198
        case GIMPLE_SWITCH:
1199
          {
1200
            size_t i, n = gimple_switch_num_labels (stmt);
1201
 
1202
            /* Replace all destination labels.  */
1203
            for (i = 0; i < n; ++i)
1204
              {
1205
                tree case_label = gimple_switch_label (stmt, i);
1206
                tree label = main_block_label (CASE_LABEL (case_label));
1207
                CASE_LABEL (case_label) = label;
1208
              }
1209
            break;
1210
          }
1211
 
1212
        case GIMPLE_ASM:
1213
          {
1214
            int i, n = gimple_asm_nlabels (stmt);
1215
 
1216
            for (i = 0; i < n; ++i)
1217
              {
1218
                tree cons = gimple_asm_label_op (stmt, i);
1219
                tree label = main_block_label (TREE_VALUE (cons));
1220
                TREE_VALUE (cons) = label;
1221
              }
1222
            break;
1223
          }
1224
 
1225
        /* We have to handle gotos until they're removed, and we don't
1226
           remove them until after we've created the CFG edges.  */
1227
        case GIMPLE_GOTO:
1228
          if (!computed_goto_p (stmt))
1229
            {
1230
              tree new_dest = main_block_label (gimple_goto_dest (stmt));
1231
              gimple_goto_set_dest (stmt, new_dest);
1232
            }
1233
          break;
1234
 
1235
        default:
1236
          break;
1237
      }
1238
    }
1239
 
1240
  /* Do the same for the exception region tree labels.  */
1241
  cleanup_dead_labels_eh ();
1242
 
1243
  /* Finally, purge dead labels.  All user-defined labels and labels that
1244
     can be the target of non-local gotos and labels which have their
1245
     address taken are preserved.  */
1246
  FOR_EACH_BB (bb)
1247
    {
1248
      gimple_stmt_iterator i;
1249
      tree label_for_this_bb = label_for_bb[bb->index].label;
1250
 
1251
      if (!label_for_this_bb)
1252
        continue;
1253
 
1254
      /* If the main label of the block is unused, we may still remove it.  */
1255
      if (!label_for_bb[bb->index].used)
1256
        label_for_this_bb = NULL;
1257
 
1258
      for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1259
        {
1260
          tree label;
1261
          gimple stmt = gsi_stmt (i);
1262
 
1263
          if (gimple_code (stmt) != GIMPLE_LABEL)
1264
            break;
1265
 
1266
          label = gimple_label_label (stmt);
1267
 
1268
          if (label == label_for_this_bb
1269
              || !DECL_ARTIFICIAL (label)
1270
              || DECL_NONLOCAL (label)
1271
              || FORCED_LABEL (label))
1272
            gsi_next (&i);
1273
          else
1274
            gsi_remove (&i, true);
1275
        }
1276
    }
1277
 
1278
  free (label_for_bb);
1279
}
1280
 
1281
/* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1282
   and scan the sorted vector of cases.  Combine the ones jumping to the
1283
   same label.
1284
   Eg. three separate entries 1: 2: 3: become one entry 1..3:  */
1285
 
1286
void
1287
group_case_labels (void)
1288
{
1289
  basic_block bb;
1290
 
1291
  FOR_EACH_BB (bb)
1292
    {
1293
      gimple stmt = last_stmt (bb);
1294
      if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1295
        {
1296
          int old_size = gimple_switch_num_labels (stmt);
1297
          int i, j, new_size = old_size;
1298
          tree default_case = NULL_TREE;
1299
          tree default_label = NULL_TREE;
1300
          bool has_default;
1301
 
1302
          /* The default label is always the first case in a switch
1303
             statement after gimplification if it was not optimized
1304
             away */
1305
          if (!CASE_LOW (gimple_switch_default_label (stmt))
1306
              && !CASE_HIGH (gimple_switch_default_label (stmt)))
1307
            {
1308
              default_case = gimple_switch_default_label (stmt);
1309
              default_label = CASE_LABEL (default_case);
1310
              has_default = true;
1311
            }
1312
          else
1313
            has_default = false;
1314
 
1315
          /* Look for possible opportunities to merge cases.  */
1316
          if (has_default)
1317
            i = 1;
1318
          else
1319
            i = 0;
1320
          while (i < old_size)
1321
            {
1322
              tree base_case, base_label, base_high;
1323
              base_case = gimple_switch_label (stmt, i);
1324
 
1325
              gcc_assert (base_case);
1326
              base_label = CASE_LABEL (base_case);
1327
 
1328
              /* Discard cases that have the same destination as the
1329
                 default case.  */
1330
              if (base_label == default_label)
1331
                {
1332
                  gimple_switch_set_label (stmt, i, NULL_TREE);
1333
                  i++;
1334
                  new_size--;
1335
                  continue;
1336
                }
1337
 
1338
              base_high = CASE_HIGH (base_case)
1339
                          ? CASE_HIGH (base_case)
1340
                          : CASE_LOW (base_case);
1341
              i++;
1342
 
1343
              /* Try to merge case labels.  Break out when we reach the end
1344
                 of the label vector or when we cannot merge the next case
1345
                 label with the current one.  */
1346
              while (i < old_size)
1347
                {
1348
                  tree merge_case = gimple_switch_label (stmt, i);
1349
                  tree merge_label = CASE_LABEL (merge_case);
1350
                  tree t = int_const_binop (PLUS_EXPR, base_high,
1351
                                            integer_one_node, 1);
1352
 
1353
                  /* Merge the cases if they jump to the same place,
1354
                     and their ranges are consecutive.  */
1355
                  if (merge_label == base_label
1356
                      && tree_int_cst_equal (CASE_LOW (merge_case), t))
1357
                    {
1358
                      base_high = CASE_HIGH (merge_case) ?
1359
                        CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1360
                      CASE_HIGH (base_case) = base_high;
1361
                      gimple_switch_set_label (stmt, i, NULL_TREE);
1362
                      new_size--;
1363
                      i++;
1364
                    }
1365
                  else
1366
                    break;
1367
                }
1368
            }
1369
 
1370
          /* Compress the case labels in the label vector, and adjust the
1371
             length of the vector.  */
1372
          for (i = 0, j = 0; i < new_size; i++)
1373
            {
1374
              while (! gimple_switch_label (stmt, j))
1375
                j++;
1376
              gimple_switch_set_label (stmt, i,
1377
                                       gimple_switch_label (stmt, j++));
1378
            }
1379
 
1380
          gcc_assert (new_size <= old_size);
1381
          gimple_switch_set_num_labels (stmt, new_size);
1382
        }
1383
    }
1384
}
1385
 
1386
/* Checks whether we can merge block B into block A.  */
1387
 
1388
static bool
1389
gimple_can_merge_blocks_p (basic_block a, basic_block b)
1390
{
1391
  gimple stmt;
1392
  gimple_stmt_iterator gsi;
1393
  gimple_seq phis;
1394
 
1395
  if (!single_succ_p (a))
1396
    return false;
1397
 
1398
  if (single_succ_edge (a)->flags & (EDGE_ABNORMAL | EDGE_EH))
1399
    return false;
1400
 
1401
  if (single_succ (a) != b)
1402
    return false;
1403
 
1404
  if (!single_pred_p (b))
1405
    return false;
1406
 
1407
  if (b == EXIT_BLOCK_PTR)
1408
    return false;
1409
 
1410
  /* If A ends by a statement causing exceptions or something similar, we
1411
     cannot merge the blocks.  */
1412
  stmt = last_stmt (a);
1413
  if (stmt && stmt_ends_bb_p (stmt))
1414
    return false;
1415
 
1416
  /* Do not allow a block with only a non-local label to be merged.  */
1417
  if (stmt
1418
      && gimple_code (stmt) == GIMPLE_LABEL
1419
      && DECL_NONLOCAL (gimple_label_label (stmt)))
1420
    return false;
1421
 
1422
  /* Examine the labels at the beginning of B.  */
1423
  for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1424
    {
1425
      tree lab;
1426
      stmt = gsi_stmt (gsi);
1427
      if (gimple_code (stmt) != GIMPLE_LABEL)
1428
        break;
1429
      lab = gimple_label_label (stmt);
1430
 
1431
      /* Do not remove user labels.  */
1432
      if (!DECL_ARTIFICIAL (lab))
1433
        return false;
1434
    }
1435
 
1436
  /* Protect the loop latches.  */
1437
  if (current_loops && b->loop_father->latch == b)
1438
    return false;
1439
 
1440
  /* It must be possible to eliminate all phi nodes in B.  If ssa form
1441
     is not up-to-date and a name-mapping is registered, we cannot eliminate
1442
     any phis.  Symbols marked for renaming are never a problem though.  */
1443
  phis = phi_nodes (b);
1444
  if (!gimple_seq_empty_p (phis)
1445
      && name_mappings_registered_p ())
1446
    return false;
1447
 
1448
  return true;
1449
}
1450
 
1451
/* Return true if the var whose chain of uses starts at PTR has no
1452
   nondebug uses.  */
1453
bool
1454
has_zero_uses_1 (const ssa_use_operand_t *head)
1455
{
1456
  const ssa_use_operand_t *ptr;
1457
 
1458
  for (ptr = head->next; ptr != head; ptr = ptr->next)
1459
    if (!is_gimple_debug (USE_STMT (ptr)))
1460
      return false;
1461
 
1462
  return true;
1463
}
1464
 
1465
/* Return true if the var whose chain of uses starts at PTR has a
1466
   single nondebug use.  Set USE_P and STMT to that single nondebug
1467
   use, if so, or to NULL otherwise.  */
1468
bool
1469
single_imm_use_1 (const ssa_use_operand_t *head,
1470
                  use_operand_p *use_p, gimple *stmt)
1471
{
1472
  ssa_use_operand_t *ptr, *single_use = 0;
1473
 
1474
  for (ptr = head->next; ptr != head; ptr = ptr->next)
1475
    if (!is_gimple_debug (USE_STMT (ptr)))
1476
      {
1477
        if (single_use)
1478
          {
1479
            single_use = NULL;
1480
            break;
1481
          }
1482
        single_use = ptr;
1483
      }
1484
 
1485
  if (use_p)
1486
    *use_p = single_use;
1487
 
1488
  if (stmt)
1489
    *stmt = single_use ? single_use->loc.stmt : NULL;
1490
 
1491
  return !!single_use;
1492
}
1493
 
1494
/* Replaces all uses of NAME by VAL.  */
1495
 
1496
void
1497
replace_uses_by (tree name, tree val)
1498
{
1499
  imm_use_iterator imm_iter;
1500
  use_operand_p use;
1501
  gimple stmt;
1502
  edge e;
1503
 
1504
  FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1505
    {
1506
      FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1507
        {
1508
          replace_exp (use, val);
1509
 
1510
          if (gimple_code (stmt) == GIMPLE_PHI)
1511
            {
1512
              e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1513
              if (e->flags & EDGE_ABNORMAL)
1514
                {
1515
                  /* This can only occur for virtual operands, since
1516
                     for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1517
                     would prevent replacement.  */
1518
                  gcc_assert (!is_gimple_reg (name));
1519
                  SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1520
                }
1521
            }
1522
        }
1523
 
1524
      if (gimple_code (stmt) != GIMPLE_PHI)
1525
        {
1526
          size_t i;
1527
 
1528
          fold_stmt_inplace (stmt);
1529
          if (cfgcleanup_altered_bbs)
1530
            bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1531
 
1532
          /* FIXME.  This should go in update_stmt.  */
1533
          for (i = 0; i < gimple_num_ops (stmt); i++)
1534
            {
1535
              tree op = gimple_op (stmt, i);
1536
              /* Operands may be empty here.  For example, the labels
1537
                 of a GIMPLE_COND are nulled out following the creation
1538
                 of the corresponding CFG edges.  */
1539
              if (op && TREE_CODE (op) == ADDR_EXPR)
1540
                recompute_tree_invariant_for_addr_expr (op);
1541
            }
1542
 
1543
          maybe_clean_or_replace_eh_stmt (stmt, stmt);
1544
          update_stmt (stmt);
1545
        }
1546
    }
1547
 
1548
  gcc_assert (has_zero_uses (name));
1549
 
1550
  /* Also update the trees stored in loop structures.  */
1551
  if (current_loops)
1552
    {
1553
      struct loop *loop;
1554
      loop_iterator li;
1555
 
1556
      FOR_EACH_LOOP (li, loop, 0)
1557
        {
1558
          substitute_in_loop_info (loop, name, val);
1559
        }
1560
    }
1561
}
1562
 
1563
/* Merge block B into block A.  */
1564
 
1565
static void
1566
gimple_merge_blocks (basic_block a, basic_block b)
1567
{
1568
  gimple_stmt_iterator last, gsi, psi;
1569
  gimple_seq phis = phi_nodes (b);
1570
 
1571
  if (dump_file)
1572
    fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1573
 
1574
  /* Remove all single-valued PHI nodes from block B of the form
1575
     V_i = PHI <V_j> by propagating V_j to all the uses of V_i.  */
1576
  gsi = gsi_last_bb (a);
1577
  for (psi = gsi_start (phis); !gsi_end_p (psi); )
1578
    {
1579
      gimple phi = gsi_stmt (psi);
1580
      tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1581
      gimple copy;
1582
      bool may_replace_uses = !is_gimple_reg (def)
1583
                              || may_propagate_copy (def, use);
1584
 
1585
      /* In case we maintain loop closed ssa form, do not propagate arguments
1586
         of loop exit phi nodes.  */
1587
      if (current_loops
1588
          && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1589
          && is_gimple_reg (def)
1590
          && TREE_CODE (use) == SSA_NAME
1591
          && a->loop_father != b->loop_father)
1592
        may_replace_uses = false;
1593
 
1594
      if (!may_replace_uses)
1595
        {
1596
          gcc_assert (is_gimple_reg (def));
1597
 
1598
          /* Note that just emitting the copies is fine -- there is no problem
1599
             with ordering of phi nodes.  This is because A is the single
1600
             predecessor of B, therefore results of the phi nodes cannot
1601
             appear as arguments of the phi nodes.  */
1602
          copy = gimple_build_assign (def, use);
1603
          gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1604
          remove_phi_node (&psi, false);
1605
        }
1606
      else
1607
        {
1608
          /* If we deal with a PHI for virtual operands, we can simply
1609
             propagate these without fussing with folding or updating
1610
             the stmt.  */
1611
          if (!is_gimple_reg (def))
1612
            {
1613
              imm_use_iterator iter;
1614
              use_operand_p use_p;
1615
              gimple stmt;
1616
 
1617
              FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1618
                FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1619
                  SET_USE (use_p, use);
1620
 
1621
              if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1622
                SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1623
            }
1624
          else
1625
            replace_uses_by (def, use);
1626
 
1627
          remove_phi_node (&psi, true);
1628
        }
1629
    }
1630
 
1631
  /* Ensure that B follows A.  */
1632
  move_block_after (b, a);
1633
 
1634
  gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1635
  gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1636
 
1637
  /* Remove labels from B and set gimple_bb to A for other statements.  */
1638
  for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1639
    {
1640
      gimple stmt = gsi_stmt (gsi);
1641
      if (gimple_code (stmt) == GIMPLE_LABEL)
1642
        {
1643
          tree label = gimple_label_label (stmt);
1644
          int lp_nr;
1645
 
1646
          gsi_remove (&gsi, false);
1647
 
1648
          /* Now that we can thread computed gotos, we might have
1649
             a situation where we have a forced label in block B
1650
             However, the label at the start of block B might still be
1651
             used in other ways (think about the runtime checking for
1652
             Fortran assigned gotos).  So we can not just delete the
1653
             label.  Instead we move the label to the start of block A.  */
1654
          if (FORCED_LABEL (label))
1655
            {
1656
              gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1657
              gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1658
            }
1659
 
1660
          lp_nr = EH_LANDING_PAD_NR (label);
1661
          if (lp_nr)
1662
            {
1663
              eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1664
              lp->post_landing_pad = NULL;
1665
            }
1666
        }
1667
      else
1668
        {
1669
          gimple_set_bb (stmt, a);
1670
          gsi_next (&gsi);
1671
        }
1672
    }
1673
 
1674
  /* Merge the sequences.  */
1675
  last = gsi_last_bb (a);
1676
  gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1677
  set_bb_seq (b, NULL);
1678
 
1679
  if (cfgcleanup_altered_bbs)
1680
    bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1681
}
1682
 
1683
 
1684
/* Return the one of two successors of BB that is not reachable by a
1685
   complex edge, if there is one.  Else, return BB.  We use
1686
   this in optimizations that use post-dominators for their heuristics,
1687
   to catch the cases in C++ where function calls are involved.  */
1688
 
1689
basic_block
1690
single_noncomplex_succ (basic_block bb)
1691
{
1692
  edge e0, e1;
1693
  if (EDGE_COUNT (bb->succs) != 2)
1694
    return bb;
1695
 
1696
  e0 = EDGE_SUCC (bb, 0);
1697
  e1 = EDGE_SUCC (bb, 1);
1698
  if (e0->flags & EDGE_COMPLEX)
1699
    return e1->dest;
1700
  if (e1->flags & EDGE_COMPLEX)
1701
    return e0->dest;
1702
 
1703
  return bb;
1704
}
1705
 
1706
/* T is CALL_EXPR.  Set current_function_calls_* flags.  */
1707
 
1708
void
1709
notice_special_calls (gimple call)
1710
{
1711
  int flags = gimple_call_flags (call);
1712
 
1713
  if (flags & ECF_MAY_BE_ALLOCA)
1714
    cfun->calls_alloca = true;
1715
  if (flags & ECF_RETURNS_TWICE)
1716
    cfun->calls_setjmp = true;
1717
}
1718
 
1719
 
1720
/* Clear flags set by notice_special_calls.  Used by dead code removal
1721
   to update the flags.  */
1722
 
1723
void
1724
clear_special_calls (void)
1725
{
1726
  cfun->calls_alloca = false;
1727
  cfun->calls_setjmp = false;
1728
}
1729
 
1730
/* Remove PHI nodes associated with basic block BB and all edges out of BB.  */
1731
 
1732
static void
1733
remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1734
{
1735
  /* Since this block is no longer reachable, we can just delete all
1736
     of its PHI nodes.  */
1737
  remove_phi_nodes (bb);
1738
 
1739
  /* Remove edges to BB's successors.  */
1740
  while (EDGE_COUNT (bb->succs) > 0)
1741
    remove_edge (EDGE_SUCC (bb, 0));
1742
}
1743
 
1744
 
1745
/* Remove statements of basic block BB.  */
1746
 
1747
static void
1748
remove_bb (basic_block bb)
1749
{
1750
  gimple_stmt_iterator i;
1751
 
1752
  if (dump_file)
1753
    {
1754
      fprintf (dump_file, "Removing basic block %d\n", bb->index);
1755
      if (dump_flags & TDF_DETAILS)
1756
        {
1757
          dump_bb (bb, dump_file, 0);
1758
          fprintf (dump_file, "\n");
1759
        }
1760
    }
1761
 
1762
  if (current_loops)
1763
    {
1764
      struct loop *loop = bb->loop_father;
1765
 
1766
      /* If a loop gets removed, clean up the information associated
1767
         with it.  */
1768
      if (loop->latch == bb
1769
          || loop->header == bb)
1770
        free_numbers_of_iterations_estimates_loop (loop);
1771
    }
1772
 
1773
  /* Remove all the instructions in the block.  */
1774
  if (bb_seq (bb) != NULL)
1775
    {
1776
      /* Walk backwards so as to get a chance to substitute all
1777
         released DEFs into debug stmts.  See
1778
         eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1779
         details.  */
1780
      for (i = gsi_last_bb (bb); !gsi_end_p (i);)
1781
        {
1782
          gimple stmt = gsi_stmt (i);
1783
          if (gimple_code (stmt) == GIMPLE_LABEL
1784
              && (FORCED_LABEL (gimple_label_label (stmt))
1785
                  || DECL_NONLOCAL (gimple_label_label (stmt))))
1786
            {
1787
              basic_block new_bb;
1788
              gimple_stmt_iterator new_gsi;
1789
 
1790
              /* A non-reachable non-local label may still be referenced.
1791
                 But it no longer needs to carry the extra semantics of
1792
                 non-locality.  */
1793
              if (DECL_NONLOCAL (gimple_label_label (stmt)))
1794
                {
1795
                  DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
1796
                  FORCED_LABEL (gimple_label_label (stmt)) = 1;
1797
                }
1798
 
1799
              new_bb = bb->prev_bb;
1800
              new_gsi = gsi_start_bb (new_bb);
1801
              gsi_remove (&i, false);
1802
              gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
1803
            }
1804
          else
1805
            {
1806
              /* Release SSA definitions if we are in SSA.  Note that we
1807
                 may be called when not in SSA.  For example,
1808
                 final_cleanup calls this function via
1809
                 cleanup_tree_cfg.  */
1810
              if (gimple_in_ssa_p (cfun))
1811
                release_defs (stmt);
1812
 
1813
              gsi_remove (&i, true);
1814
            }
1815
 
1816
          if (gsi_end_p (i))
1817
            i = gsi_last_bb (bb);
1818
          else
1819
            gsi_prev (&i);
1820
        }
1821
    }
1822
 
1823
  remove_phi_nodes_and_edges_for_unreachable_block (bb);
1824
  bb->il.gimple = NULL;
1825
}
1826
 
1827
 
1828
/* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1829
   predicate VAL, return the edge that will be taken out of the block.
1830
   If VAL does not match a unique edge, NULL is returned.  */
1831
 
1832
edge
1833
find_taken_edge (basic_block bb, tree val)
1834
{
1835
  gimple stmt;
1836
 
1837
  stmt = last_stmt (bb);
1838
 
1839
  gcc_assert (stmt);
1840
  gcc_assert (is_ctrl_stmt (stmt));
1841
 
1842
  if (val == NULL)
1843
    return NULL;
1844
 
1845
  if (!is_gimple_min_invariant (val))
1846
    return NULL;
1847
 
1848
  if (gimple_code (stmt) == GIMPLE_COND)
1849
    return find_taken_edge_cond_expr (bb, val);
1850
 
1851
  if (gimple_code (stmt) == GIMPLE_SWITCH)
1852
    return find_taken_edge_switch_expr (bb, val);
1853
 
1854
  if (computed_goto_p (stmt))
1855
    {
1856
      /* Only optimize if the argument is a label, if the argument is
1857
         not a label then we can not construct a proper CFG.
1858
 
1859
         It may be the case that we only need to allow the LABEL_REF to
1860
         appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1861
         appear inside a LABEL_EXPR just to be safe.  */
1862
      if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
1863
          && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
1864
        return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
1865
      return NULL;
1866
    }
1867
 
1868
  gcc_unreachable ();
1869
}
1870
 
1871
/* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1872
   statement, determine which of the outgoing edges will be taken out of the
1873
   block.  Return NULL if either edge may be taken.  */
1874
 
1875
static edge
1876
find_taken_edge_computed_goto (basic_block bb, tree val)
1877
{
1878
  basic_block dest;
1879
  edge e = NULL;
1880
 
1881
  dest = label_to_block (val);
1882
  if (dest)
1883
    {
1884
      e = find_edge (bb, dest);
1885
      gcc_assert (e != NULL);
1886
    }
1887
 
1888
  return e;
1889
}
1890
 
1891
/* Given a constant value VAL and the entry block BB to a COND_EXPR
1892
   statement, determine which of the two edges will be taken out of the
1893
   block.  Return NULL if either edge may be taken.  */
1894
 
1895
static edge
1896
find_taken_edge_cond_expr (basic_block bb, tree val)
1897
{
1898
  edge true_edge, false_edge;
1899
 
1900
  extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1901
 
1902
  gcc_assert (TREE_CODE (val) == INTEGER_CST);
1903
  return (integer_zerop (val) ? false_edge : true_edge);
1904
}
1905
 
1906
/* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
1907
   statement, determine which edge will be taken out of the block.  Return
1908
   NULL if any edge may be taken.  */
1909
 
1910
static edge
1911
find_taken_edge_switch_expr (basic_block bb, tree val)
1912
{
1913
  basic_block dest_bb;
1914
  edge e;
1915
  gimple switch_stmt;
1916
  tree taken_case;
1917
 
1918
  switch_stmt = last_stmt (bb);
1919
  taken_case = find_case_label_for_value (switch_stmt, val);
1920
  dest_bb = label_to_block (CASE_LABEL (taken_case));
1921
 
1922
  e = find_edge (bb, dest_bb);
1923
  gcc_assert (e);
1924
  return e;
1925
}
1926
 
1927
 
1928
/* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
1929
   We can make optimal use here of the fact that the case labels are
1930
   sorted: We can do a binary search for a case matching VAL.  */
1931
 
1932
static tree
1933
find_case_label_for_value (gimple switch_stmt, tree val)
1934
{
1935
  size_t low, high, n = gimple_switch_num_labels (switch_stmt);
1936
  tree default_case = gimple_switch_default_label (switch_stmt);
1937
 
1938
  for (low = 0, high = n; high - low > 1; )
1939
    {
1940
      size_t i = (high + low) / 2;
1941
      tree t = gimple_switch_label (switch_stmt, i);
1942
      int cmp;
1943
 
1944
      /* Cache the result of comparing CASE_LOW and val.  */
1945
      cmp = tree_int_cst_compare (CASE_LOW (t), val);
1946
 
1947
      if (cmp > 0)
1948
        high = i;
1949
      else
1950
        low = i;
1951
 
1952
      if (CASE_HIGH (t) == NULL)
1953
        {
1954
          /* A singe-valued case label.  */
1955
          if (cmp == 0)
1956
            return t;
1957
        }
1958
      else
1959
        {
1960
          /* A case range.  We can only handle integer ranges.  */
1961
          if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
1962
            return t;
1963
        }
1964
    }
1965
 
1966
  return default_case;
1967
}
1968
 
1969
 
1970
/* Dump a basic block on stderr.  */
1971
 
1972
void
1973
gimple_debug_bb (basic_block bb)
1974
{
1975
  gimple_dump_bb (bb, stderr, 0, TDF_VOPS|TDF_MEMSYMS);
1976
}
1977
 
1978
 
1979
/* Dump basic block with index N on stderr.  */
1980
 
1981
basic_block
1982
gimple_debug_bb_n (int n)
1983
{
1984
  gimple_debug_bb (BASIC_BLOCK (n));
1985
  return BASIC_BLOCK (n);
1986
}
1987
 
1988
 
1989
/* Dump the CFG on stderr.
1990
 
1991
   FLAGS are the same used by the tree dumping functions
1992
   (see TDF_* in tree-pass.h).  */
1993
 
1994
void
1995
gimple_debug_cfg (int flags)
1996
{
1997
  gimple_dump_cfg (stderr, flags);
1998
}
1999
 
2000
 
2001
/* Dump the program showing basic block boundaries on the given FILE.
2002
 
2003
   FLAGS are the same used by the tree dumping functions (see TDF_* in
2004
   tree.h).  */
2005
 
2006
void
2007
gimple_dump_cfg (FILE *file, int flags)
2008
{
2009
  if (flags & TDF_DETAILS)
2010
    {
2011
      const char *funcname
2012
        = lang_hooks.decl_printable_name (current_function_decl, 2);
2013
 
2014
      fputc ('\n', file);
2015
      fprintf (file, ";; Function %s\n\n", funcname);
2016
      fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2017
               n_basic_blocks, n_edges, last_basic_block);
2018
 
2019
      brief_dump_cfg (file);
2020
      fprintf (file, "\n");
2021
    }
2022
 
2023
  if (flags & TDF_STATS)
2024
    dump_cfg_stats (file);
2025
 
2026
  dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2027
}
2028
 
2029
 
2030
/* Dump CFG statistics on FILE.  */
2031
 
2032
void
2033
dump_cfg_stats (FILE *file)
2034
{
2035
  static long max_num_merged_labels = 0;
2036
  unsigned long size, total = 0;
2037
  long num_edges;
2038
  basic_block bb;
2039
  const char * const fmt_str   = "%-30s%-13s%12s\n";
2040
  const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2041
  const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2042
  const char * const fmt_str_3 = "%-43s%11lu%c\n";
2043
  const char *funcname
2044
    = lang_hooks.decl_printable_name (current_function_decl, 2);
2045
 
2046
 
2047
  fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2048
 
2049
  fprintf (file, "---------------------------------------------------------\n");
2050
  fprintf (file, fmt_str, "", "  Number of  ", "Memory");
2051
  fprintf (file, fmt_str, "", "  instances  ", "used ");
2052
  fprintf (file, "---------------------------------------------------------\n");
2053
 
2054
  size = n_basic_blocks * sizeof (struct basic_block_def);
2055
  total += size;
2056
  fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2057
           SCALE (size), LABEL (size));
2058
 
2059
  num_edges = 0;
2060
  FOR_EACH_BB (bb)
2061
    num_edges += EDGE_COUNT (bb->succs);
2062
  size = num_edges * sizeof (struct edge_def);
2063
  total += size;
2064
  fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2065
 
2066
  fprintf (file, "---------------------------------------------------------\n");
2067
  fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2068
           LABEL (total));
2069
  fprintf (file, "---------------------------------------------------------\n");
2070
  fprintf (file, "\n");
2071
 
2072
  if (cfg_stats.num_merged_labels > max_num_merged_labels)
2073
    max_num_merged_labels = cfg_stats.num_merged_labels;
2074
 
2075
  fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2076
           cfg_stats.num_merged_labels, max_num_merged_labels);
2077
 
2078
  fprintf (file, "\n");
2079
}
2080
 
2081
 
2082
/* Dump CFG statistics on stderr.  Keep extern so that it's always
2083
   linked in the final executable.  */
2084
 
2085
void
2086
debug_cfg_stats (void)
2087
{
2088
  dump_cfg_stats (stderr);
2089
}
2090
 
2091
 
2092
/* Dump the flowgraph to a .vcg FILE.  */
2093
 
2094
static void
2095
gimple_cfg2vcg (FILE *file)
2096
{
2097
  edge e;
2098
  edge_iterator ei;
2099
  basic_block bb;
2100
  const char *funcname
2101
    = lang_hooks.decl_printable_name (current_function_decl, 2);
2102
 
2103
  /* Write the file header.  */
2104
  fprintf (file, "graph: { title: \"%s\"\n", funcname);
2105
  fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2106
  fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2107
 
2108
  /* Write blocks and edges.  */
2109
  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2110
    {
2111
      fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2112
               e->dest->index);
2113
 
2114
      if (e->flags & EDGE_FAKE)
2115
        fprintf (file, " linestyle: dotted priority: 10");
2116
      else
2117
        fprintf (file, " linestyle: solid priority: 100");
2118
 
2119
      fprintf (file, " }\n");
2120
    }
2121
  fputc ('\n', file);
2122
 
2123
  FOR_EACH_BB (bb)
2124
    {
2125
      enum gimple_code head_code, end_code;
2126
      const char *head_name, *end_name;
2127
      int head_line = 0;
2128
      int end_line = 0;
2129
      gimple first = first_stmt (bb);
2130
      gimple last = last_stmt (bb);
2131
 
2132
      if (first)
2133
        {
2134
          head_code = gimple_code (first);
2135
          head_name = gimple_code_name[head_code];
2136
          head_line = get_lineno (first);
2137
        }
2138
      else
2139
        head_name = "no-statement";
2140
 
2141
      if (last)
2142
        {
2143
          end_code = gimple_code (last);
2144
          end_name = gimple_code_name[end_code];
2145
          end_line = get_lineno (last);
2146
        }
2147
      else
2148
        end_name = "no-statement";
2149
 
2150
      fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2151
               bb->index, bb->index, head_name, head_line, end_name,
2152
               end_line);
2153
 
2154
      FOR_EACH_EDGE (e, ei, bb->succs)
2155
        {
2156
          if (e->dest == EXIT_BLOCK_PTR)
2157
            fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2158
          else
2159
            fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2160
 
2161
          if (e->flags & EDGE_FAKE)
2162
            fprintf (file, " priority: 10 linestyle: dotted");
2163
          else
2164
            fprintf (file, " priority: 100 linestyle: solid");
2165
 
2166
          fprintf (file, " }\n");
2167
        }
2168
 
2169
      if (bb->next_bb != EXIT_BLOCK_PTR)
2170
        fputc ('\n', file);
2171
    }
2172
 
2173
  fputs ("}\n\n", file);
2174
}
2175
 
2176
 
2177
 
2178
/*---------------------------------------------------------------------------
2179
                             Miscellaneous helpers
2180
---------------------------------------------------------------------------*/
2181
 
2182
/* Return true if T represents a stmt that always transfers control.  */
2183
 
2184
bool
2185
is_ctrl_stmt (gimple t)
2186
{
2187
  switch (gimple_code (t))
2188
    {
2189
    case GIMPLE_COND:
2190
    case GIMPLE_SWITCH:
2191
    case GIMPLE_GOTO:
2192
    case GIMPLE_RETURN:
2193
    case GIMPLE_RESX:
2194
      return true;
2195
    default:
2196
      return false;
2197
    }
2198
}
2199
 
2200
 
2201
/* Return true if T is a statement that may alter the flow of control
2202
   (e.g., a call to a non-returning function).  */
2203
 
2204
bool
2205
is_ctrl_altering_stmt (gimple t)
2206
{
2207
  gcc_assert (t);
2208
 
2209
  switch (gimple_code (t))
2210
    {
2211
    case GIMPLE_CALL:
2212
      {
2213
        int flags = gimple_call_flags (t);
2214
 
2215
        /* A non-pure/const call alters flow control if the current
2216
           function has nonlocal labels.  */
2217
        if (!(flags & (ECF_CONST | ECF_PURE)) && cfun->has_nonlocal_label)
2218
          return true;
2219
 
2220
        /* A call also alters control flow if it does not return.  */
2221
        if (flags & ECF_NORETURN)
2222
          return true;
2223
      }
2224
      break;
2225
 
2226
    case GIMPLE_EH_DISPATCH:
2227
      /* EH_DISPATCH branches to the individual catch handlers at
2228
         this level of a try or allowed-exceptions region.  It can
2229
         fallthru to the next statement as well.  */
2230
      return true;
2231
 
2232
    case GIMPLE_ASM:
2233
      if (gimple_asm_nlabels (t) > 0)
2234
        return true;
2235
      break;
2236
 
2237
    CASE_GIMPLE_OMP:
2238
      /* OpenMP directives alter control flow.  */
2239
      return true;
2240
 
2241
    default:
2242
      break;
2243
    }
2244
 
2245
  /* If a statement can throw, it alters control flow.  */
2246
  return stmt_can_throw_internal (t);
2247
}
2248
 
2249
 
2250
/* Return true if T is a simple local goto.  */
2251
 
2252
bool
2253
simple_goto_p (gimple t)
2254
{
2255
  return (gimple_code (t) == GIMPLE_GOTO
2256
          && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2257
}
2258
 
2259
 
2260
/* Return true if T can make an abnormal transfer of control flow.
2261
   Transfers of control flow associated with EH are excluded.  */
2262
 
2263
bool
2264
stmt_can_make_abnormal_goto (gimple t)
2265
{
2266
  if (computed_goto_p (t))
2267
    return true;
2268
  if (is_gimple_call (t))
2269
    return gimple_has_side_effects (t) && cfun->has_nonlocal_label;
2270
  return false;
2271
}
2272
 
2273
 
2274
/* Return true if STMT should start a new basic block.  PREV_STMT is
2275
   the statement preceding STMT.  It is used when STMT is a label or a
2276
   case label.  Labels should only start a new basic block if their
2277
   previous statement wasn't a label.  Otherwise, sequence of labels
2278
   would generate unnecessary basic blocks that only contain a single
2279
   label.  */
2280
 
2281
static inline bool
2282
stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2283
{
2284
  if (stmt == NULL)
2285
    return false;
2286
 
2287
  /* Labels start a new basic block only if the preceding statement
2288
     wasn't a label of the same type.  This prevents the creation of
2289
     consecutive blocks that have nothing but a single label.  */
2290
  if (gimple_code (stmt) == GIMPLE_LABEL)
2291
    {
2292
      /* Nonlocal and computed GOTO targets always start a new block.  */
2293
      if (DECL_NONLOCAL (gimple_label_label (stmt))
2294
          || FORCED_LABEL (gimple_label_label (stmt)))
2295
        return true;
2296
 
2297
      if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2298
        {
2299
          if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2300
            return true;
2301
 
2302
          cfg_stats.num_merged_labels++;
2303
          return false;
2304
        }
2305
      else
2306
        return true;
2307
    }
2308
 
2309
  return false;
2310
}
2311
 
2312
 
2313
/* Return true if T should end a basic block.  */
2314
 
2315
bool
2316
stmt_ends_bb_p (gimple t)
2317
{
2318
  return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2319
}
2320
 
2321
/* Remove block annotations and other data structures.  */
2322
 
2323
void
2324
delete_tree_cfg_annotations (void)
2325
{
2326
  label_to_block_map = NULL;
2327
}
2328
 
2329
 
2330
/* Return the first statement in basic block BB.  */
2331
 
2332
gimple
2333
first_stmt (basic_block bb)
2334
{
2335
  gimple_stmt_iterator i = gsi_start_bb (bb);
2336
  gimple stmt = NULL;
2337
 
2338
  while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2339
    {
2340
      gsi_next (&i);
2341
      stmt = NULL;
2342
    }
2343
  return stmt;
2344
}
2345
 
2346
/* Return the first non-label statement in basic block BB.  */
2347
 
2348
static gimple
2349
first_non_label_stmt (basic_block bb)
2350
{
2351
  gimple_stmt_iterator i = gsi_start_bb (bb);
2352
  while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2353
    gsi_next (&i);
2354
  return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2355
}
2356
 
2357
/* Return the last statement in basic block BB.  */
2358
 
2359
gimple
2360
last_stmt (basic_block bb)
2361
{
2362
  gimple_stmt_iterator i = gsi_last_bb (bb);
2363
  gimple stmt = NULL;
2364
 
2365
  while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2366
    {
2367
      gsi_prev (&i);
2368
      stmt = NULL;
2369
    }
2370
  return stmt;
2371
}
2372
 
2373
/* Return the last statement of an otherwise empty block.  Return NULL
2374
   if the block is totally empty, or if it contains more than one
2375
   statement.  */
2376
 
2377
gimple
2378
last_and_only_stmt (basic_block bb)
2379
{
2380
  gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2381
  gimple last, prev;
2382
 
2383
  if (gsi_end_p (i))
2384
    return NULL;
2385
 
2386
  last = gsi_stmt (i);
2387
  gsi_prev_nondebug (&i);
2388
  if (gsi_end_p (i))
2389
    return last;
2390
 
2391
  /* Empty statements should no longer appear in the instruction stream.
2392
     Everything that might have appeared before should be deleted by
2393
     remove_useless_stmts, and the optimizers should just gsi_remove
2394
     instead of smashing with build_empty_stmt.
2395
 
2396
     Thus the only thing that should appear here in a block containing
2397
     one executable statement is a label.  */
2398
  prev = gsi_stmt (i);
2399
  if (gimple_code (prev) == GIMPLE_LABEL)
2400
    return last;
2401
  else
2402
    return NULL;
2403
}
2404
 
2405
/* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE.  */
2406
 
2407
static void
2408
reinstall_phi_args (edge new_edge, edge old_edge)
2409
{
2410
  edge_var_map_vector v;
2411
  edge_var_map *vm;
2412
  int i;
2413
  gimple_stmt_iterator phis;
2414
 
2415
  v = redirect_edge_var_map_vector (old_edge);
2416
  if (!v)
2417
    return;
2418
 
2419
  for (i = 0, phis = gsi_start_phis (new_edge->dest);
2420
       VEC_iterate (edge_var_map, v, i, vm) && !gsi_end_p (phis);
2421
       i++, gsi_next (&phis))
2422
    {
2423
      gimple phi = gsi_stmt (phis);
2424
      tree result = redirect_edge_var_map_result (vm);
2425
      tree arg = redirect_edge_var_map_def (vm);
2426
 
2427
      gcc_assert (result == gimple_phi_result (phi));
2428
 
2429
      add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2430
    }
2431
 
2432
  redirect_edge_var_map_clear (old_edge);
2433
}
2434
 
2435
/* Returns the basic block after which the new basic block created
2436
   by splitting edge EDGE_IN should be placed.  Tries to keep the new block
2437
   near its "logical" location.  This is of most help to humans looking
2438
   at debugging dumps.  */
2439
 
2440
static basic_block
2441
split_edge_bb_loc (edge edge_in)
2442
{
2443
  basic_block dest = edge_in->dest;
2444
  basic_block dest_prev = dest->prev_bb;
2445
 
2446
  if (dest_prev)
2447
    {
2448
      edge e = find_edge (dest_prev, dest);
2449
      if (e && !(e->flags & EDGE_COMPLEX))
2450
        return edge_in->src;
2451
    }
2452
  return dest_prev;
2453
}
2454
 
2455
/* Split a (typically critical) edge EDGE_IN.  Return the new block.
2456
   Abort on abnormal edges.  */
2457
 
2458
static basic_block
2459
gimple_split_edge (edge edge_in)
2460
{
2461
  basic_block new_bb, after_bb, dest;
2462
  edge new_edge, e;
2463
 
2464
  /* Abnormal edges cannot be split.  */
2465
  gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2466
 
2467
  dest = edge_in->dest;
2468
 
2469
  after_bb = split_edge_bb_loc (edge_in);
2470
 
2471
  new_bb = create_empty_bb (after_bb);
2472
  new_bb->frequency = EDGE_FREQUENCY (edge_in);
2473
  new_bb->count = edge_in->count;
2474
  new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2475
  new_edge->probability = REG_BR_PROB_BASE;
2476
  new_edge->count = edge_in->count;
2477
 
2478
  e = redirect_edge_and_branch (edge_in, new_bb);
2479
  gcc_assert (e == edge_in);
2480
  reinstall_phi_args (new_edge, e);
2481
 
2482
  return new_bb;
2483
}
2484
 
2485
/* Callback for walk_tree, check that all elements with address taken are
2486
   properly noticed as such.  The DATA is an int* that is 1 if TP was seen
2487
   inside a PHI node.  */
2488
 
2489
static tree
2490
verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2491
{
2492
  tree t = *tp, x;
2493
 
2494
  if (TYPE_P (t))
2495
    *walk_subtrees = 0;
2496
 
2497
  /* Check operand N for being valid GIMPLE and give error MSG if not.  */
2498
#define CHECK_OP(N, MSG) \
2499
  do { if (!is_gimple_val (TREE_OPERAND (t, N)))                \
2500
       { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2501
 
2502
  switch (TREE_CODE (t))
2503
    {
2504
    case SSA_NAME:
2505
      if (SSA_NAME_IN_FREE_LIST (t))
2506
        {
2507
          error ("SSA name in freelist but still referenced");
2508
          return *tp;
2509
        }
2510
      break;
2511
 
2512
    case INDIRECT_REF:
2513
      x = TREE_OPERAND (t, 0);
2514
      if (!is_gimple_reg (x) && !is_gimple_min_invariant (x))
2515
        {
2516
          error ("Indirect reference's operand is not a register or a constant.");
2517
          return x;
2518
        }
2519
      break;
2520
 
2521
    case ASSERT_EXPR:
2522
      x = fold (ASSERT_EXPR_COND (t));
2523
      if (x == boolean_false_node)
2524
        {
2525
          error ("ASSERT_EXPR with an always-false condition");
2526
          return *tp;
2527
        }
2528
      break;
2529
 
2530
    case MODIFY_EXPR:
2531
      error ("MODIFY_EXPR not expected while having tuples.");
2532
      return *tp;
2533
 
2534
    case ADDR_EXPR:
2535
      {
2536
        bool old_constant;
2537
        bool old_side_effects;
2538
        bool new_constant;
2539
        bool new_side_effects;
2540
 
2541
        gcc_assert (is_gimple_address (t));
2542
 
2543
        old_constant = TREE_CONSTANT (t);
2544
        old_side_effects = TREE_SIDE_EFFECTS (t);
2545
 
2546
        recompute_tree_invariant_for_addr_expr (t);
2547
        new_side_effects = TREE_SIDE_EFFECTS (t);
2548
        new_constant = TREE_CONSTANT (t);
2549
 
2550
        if (old_constant != new_constant)
2551
          {
2552
            error ("constant not recomputed when ADDR_EXPR changed");
2553
            return t;
2554
          }
2555
        if (old_side_effects != new_side_effects)
2556
          {
2557
            error ("side effects not recomputed when ADDR_EXPR changed");
2558
            return t;
2559
          }
2560
 
2561
        /* Skip any references (they will be checked when we recurse down the
2562
           tree) and ensure that any variable used as a prefix is marked
2563
           addressable.  */
2564
        for (x = TREE_OPERAND (t, 0);
2565
             handled_component_p (x);
2566
             x = TREE_OPERAND (x, 0))
2567
          ;
2568
 
2569
        if (!(TREE_CODE (x) == VAR_DECL
2570
              || TREE_CODE (x) == PARM_DECL
2571
              || TREE_CODE (x) == RESULT_DECL))
2572
          return NULL;
2573
        if (!TREE_ADDRESSABLE (x))
2574
          {
2575
            error ("address taken, but ADDRESSABLE bit not set");
2576
            return x;
2577
          }
2578
        if (DECL_GIMPLE_REG_P (x))
2579
          {
2580
            error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2581
            return x;
2582
          }
2583
 
2584
        break;
2585
      }
2586
 
2587
    case COND_EXPR:
2588
      x = COND_EXPR_COND (t);
2589
      if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2590
        {
2591
          error ("non-integral used in condition");
2592
          return x;
2593
        }
2594
      if (!is_gimple_condexpr (x))
2595
        {
2596
          error ("invalid conditional operand");
2597
          return x;
2598
        }
2599
      break;
2600
 
2601
    case NON_LVALUE_EXPR:
2602
        gcc_unreachable ();
2603
 
2604
    CASE_CONVERT:
2605
    case FIX_TRUNC_EXPR:
2606
    case FLOAT_EXPR:
2607
    case NEGATE_EXPR:
2608
    case ABS_EXPR:
2609
    case BIT_NOT_EXPR:
2610
    case TRUTH_NOT_EXPR:
2611
      CHECK_OP (0, "invalid operand to unary operator");
2612
      break;
2613
 
2614
    case REALPART_EXPR:
2615
    case IMAGPART_EXPR:
2616
    case COMPONENT_REF:
2617
    case ARRAY_REF:
2618
    case ARRAY_RANGE_REF:
2619
    case BIT_FIELD_REF:
2620
    case VIEW_CONVERT_EXPR:
2621
      /* We have a nest of references.  Verify that each of the operands
2622
         that determine where to reference is either a constant or a variable,
2623
         verify that the base is valid, and then show we've already checked
2624
         the subtrees.  */
2625
      while (handled_component_p (t))
2626
        {
2627
          if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2628
            CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2629
          else if (TREE_CODE (t) == ARRAY_REF
2630
                   || TREE_CODE (t) == ARRAY_RANGE_REF)
2631
            {
2632
              CHECK_OP (1, "invalid array index");
2633
              if (TREE_OPERAND (t, 2))
2634
                CHECK_OP (2, "invalid array lower bound");
2635
              if (TREE_OPERAND (t, 3))
2636
                CHECK_OP (3, "invalid array stride");
2637
            }
2638
          else if (TREE_CODE (t) == BIT_FIELD_REF)
2639
            {
2640
              if (!host_integerp (TREE_OPERAND (t, 1), 1)
2641
                  || !host_integerp (TREE_OPERAND (t, 2), 1))
2642
                {
2643
                  error ("invalid position or size operand to BIT_FIELD_REF");
2644
                  return t;
2645
                }
2646
              else if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2647
                       && (TYPE_PRECISION (TREE_TYPE (t))
2648
                           != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2649
                {
2650
                  error ("integral result type precision does not match "
2651
                         "field size of BIT_FIELD_REF");
2652
                  return t;
2653
                }
2654
              if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2655
                  && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2656
                      != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2657
                {
2658
                  error ("mode precision of non-integral result does not "
2659
                         "match field size of BIT_FIELD_REF");
2660
                  return t;
2661
                }
2662
            }
2663
 
2664
          t = TREE_OPERAND (t, 0);
2665
        }
2666
 
2667
      if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2668
        {
2669
          error ("invalid reference prefix");
2670
          return t;
2671
        }
2672
      *walk_subtrees = 0;
2673
      break;
2674
    case PLUS_EXPR:
2675
    case MINUS_EXPR:
2676
      /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2677
         POINTER_PLUS_EXPR. */
2678
      if (POINTER_TYPE_P (TREE_TYPE (t)))
2679
        {
2680
          error ("invalid operand to plus/minus, type is a pointer");
2681
          return t;
2682
        }
2683
      CHECK_OP (0, "invalid operand to binary operator");
2684
      CHECK_OP (1, "invalid operand to binary operator");
2685
      break;
2686
 
2687
    case POINTER_PLUS_EXPR:
2688
      /* Check to make sure the first operand is a pointer or reference type. */
2689
      if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2690
        {
2691
          error ("invalid operand to pointer plus, first operand is not a pointer");
2692
          return t;
2693
        }
2694
      /* Check to make sure the second operand is an integer with type of
2695
         sizetype.  */
2696
      if (!useless_type_conversion_p (sizetype,
2697
                                     TREE_TYPE (TREE_OPERAND (t, 1))))
2698
        {
2699
          error ("invalid operand to pointer plus, second operand is not an "
2700
                 "integer with type of sizetype.");
2701
          return t;
2702
        }
2703
      /* FALLTHROUGH */
2704
    case LT_EXPR:
2705
    case LE_EXPR:
2706
    case GT_EXPR:
2707
    case GE_EXPR:
2708
    case EQ_EXPR:
2709
    case NE_EXPR:
2710
    case UNORDERED_EXPR:
2711
    case ORDERED_EXPR:
2712
    case UNLT_EXPR:
2713
    case UNLE_EXPR:
2714
    case UNGT_EXPR:
2715
    case UNGE_EXPR:
2716
    case UNEQ_EXPR:
2717
    case LTGT_EXPR:
2718
    case MULT_EXPR:
2719
    case TRUNC_DIV_EXPR:
2720
    case CEIL_DIV_EXPR:
2721
    case FLOOR_DIV_EXPR:
2722
    case ROUND_DIV_EXPR:
2723
    case TRUNC_MOD_EXPR:
2724
    case CEIL_MOD_EXPR:
2725
    case FLOOR_MOD_EXPR:
2726
    case ROUND_MOD_EXPR:
2727
    case RDIV_EXPR:
2728
    case EXACT_DIV_EXPR:
2729
    case MIN_EXPR:
2730
    case MAX_EXPR:
2731
    case LSHIFT_EXPR:
2732
    case RSHIFT_EXPR:
2733
    case LROTATE_EXPR:
2734
    case RROTATE_EXPR:
2735
    case BIT_IOR_EXPR:
2736
    case BIT_XOR_EXPR:
2737
    case BIT_AND_EXPR:
2738
      CHECK_OP (0, "invalid operand to binary operator");
2739
      CHECK_OP (1, "invalid operand to binary operator");
2740
      break;
2741
 
2742
    case CONSTRUCTOR:
2743
      if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2744
        *walk_subtrees = 0;
2745
      break;
2746
 
2747
    default:
2748
      break;
2749
    }
2750
  return NULL;
2751
 
2752
#undef CHECK_OP
2753
}
2754
 
2755
 
2756
/* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2757
   Returns true if there is an error, otherwise false.  */
2758
 
2759
static bool
2760
verify_types_in_gimple_min_lval (tree expr)
2761
{
2762
  tree op;
2763
 
2764
  if (is_gimple_id (expr))
2765
    return false;
2766
 
2767
  if (!INDIRECT_REF_P (expr)
2768
      && TREE_CODE (expr) != TARGET_MEM_REF)
2769
    {
2770
      error ("invalid expression for min lvalue");
2771
      return true;
2772
    }
2773
 
2774
  /* TARGET_MEM_REFs are strange beasts.  */
2775
  if (TREE_CODE (expr) == TARGET_MEM_REF)
2776
    return false;
2777
 
2778
  op = TREE_OPERAND (expr, 0);
2779
  if (!is_gimple_val (op))
2780
    {
2781
      error ("invalid operand in indirect reference");
2782
      debug_generic_stmt (op);
2783
      return true;
2784
    }
2785
  if (!useless_type_conversion_p (TREE_TYPE (expr),
2786
                                  TREE_TYPE (TREE_TYPE (op))))
2787
    {
2788
      error ("type mismatch in indirect reference");
2789
      debug_generic_stmt (TREE_TYPE (expr));
2790
      debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2791
      return true;
2792
    }
2793
 
2794
  return false;
2795
}
2796
 
2797
/* Verify if EXPR is a valid GIMPLE reference expression.  If
2798
   REQUIRE_LVALUE is true verifies it is an lvalue.  Returns true
2799
   if there is an error, otherwise false.  */
2800
 
2801
static bool
2802
verify_types_in_gimple_reference (tree expr, bool require_lvalue)
2803
{
2804
  while (handled_component_p (expr))
2805
    {
2806
      tree op = TREE_OPERAND (expr, 0);
2807
 
2808
      if (TREE_CODE (expr) == ARRAY_REF
2809
          || TREE_CODE (expr) == ARRAY_RANGE_REF)
2810
        {
2811
          if (!is_gimple_val (TREE_OPERAND (expr, 1))
2812
              || (TREE_OPERAND (expr, 2)
2813
                  && !is_gimple_val (TREE_OPERAND (expr, 2)))
2814
              || (TREE_OPERAND (expr, 3)
2815
                  && !is_gimple_val (TREE_OPERAND (expr, 3))))
2816
            {
2817
              error ("invalid operands to array reference");
2818
              debug_generic_stmt (expr);
2819
              return true;
2820
            }
2821
        }
2822
 
2823
      /* Verify if the reference array element types are compatible.  */
2824
      if (TREE_CODE (expr) == ARRAY_REF
2825
          && !useless_type_conversion_p (TREE_TYPE (expr),
2826
                                         TREE_TYPE (TREE_TYPE (op))))
2827
        {
2828
          error ("type mismatch in array reference");
2829
          debug_generic_stmt (TREE_TYPE (expr));
2830
          debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2831
          return true;
2832
        }
2833
      if (TREE_CODE (expr) == ARRAY_RANGE_REF
2834
          && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
2835
                                         TREE_TYPE (TREE_TYPE (op))))
2836
        {
2837
          error ("type mismatch in array range reference");
2838
          debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
2839
          debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2840
          return true;
2841
        }
2842
 
2843
      if ((TREE_CODE (expr) == REALPART_EXPR
2844
           || TREE_CODE (expr) == IMAGPART_EXPR)
2845
          && !useless_type_conversion_p (TREE_TYPE (expr),
2846
                                         TREE_TYPE (TREE_TYPE (op))))
2847
        {
2848
          error ("type mismatch in real/imagpart reference");
2849
          debug_generic_stmt (TREE_TYPE (expr));
2850
          debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2851
          return true;
2852
        }
2853
 
2854
      if (TREE_CODE (expr) == COMPONENT_REF
2855
          && !useless_type_conversion_p (TREE_TYPE (expr),
2856
                                         TREE_TYPE (TREE_OPERAND (expr, 1))))
2857
        {
2858
          error ("type mismatch in component reference");
2859
          debug_generic_stmt (TREE_TYPE (expr));
2860
          debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
2861
          return true;
2862
        }
2863
 
2864
      if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2865
        {
2866
          /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
2867
             that their operand is not an SSA name or an invariant when
2868
             requiring an lvalue (this usually means there is a SRA or IPA-SRA
2869
             bug).  Otherwise there is nothing to verify, gross mismatches at
2870
             most invoke undefined behavior.  */
2871
          if (require_lvalue
2872
              && (TREE_CODE (op) == SSA_NAME
2873
                  || is_gimple_min_invariant (op)))
2874
            {
2875
              error ("Conversion of an SSA_NAME on the left hand side.");
2876
              debug_generic_stmt (expr);
2877
              return true;
2878
            }
2879
          else if (!handled_component_p (op))
2880
            return false;
2881
        }
2882
 
2883
      expr = op;
2884
    }
2885
 
2886
  return ((require_lvalue || !is_gimple_min_invariant (expr))
2887
          && verify_types_in_gimple_min_lval (expr));
2888
}
2889
 
2890
/* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
2891
   list of pointer-to types that is trivially convertible to DEST.  */
2892
 
2893
static bool
2894
one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
2895
{
2896
  tree src;
2897
 
2898
  if (!TYPE_POINTER_TO (src_obj))
2899
    return true;
2900
 
2901
  for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
2902
    if (useless_type_conversion_p (dest, src))
2903
      return true;
2904
 
2905
  return false;
2906
}
2907
 
2908
/* Return true if TYPE1 is a fixed-point type and if conversions to and
2909
   from TYPE2 can be handled by FIXED_CONVERT_EXPR.  */
2910
 
2911
static bool
2912
valid_fixed_convert_types_p (tree type1, tree type2)
2913
{
2914
  return (FIXED_POINT_TYPE_P (type1)
2915
          && (INTEGRAL_TYPE_P (type2)
2916
              || SCALAR_FLOAT_TYPE_P (type2)
2917
              || FIXED_POINT_TYPE_P (type2)));
2918
}
2919
 
2920
/* Verify the contents of a GIMPLE_CALL STMT.  Returns true when there
2921
   is a problem, otherwise false.  */
2922
 
2923
static bool
2924
verify_gimple_call (gimple stmt)
2925
{
2926
  tree fn = gimple_call_fn (stmt);
2927
  tree fntype;
2928
  unsigned i;
2929
 
2930
  if (TREE_CODE (fn) != OBJ_TYPE_REF
2931
      && !is_gimple_val (fn))
2932
    {
2933
      error ("invalid function in gimple call");
2934
      debug_generic_stmt (fn);
2935
      return true;
2936
    }
2937
 
2938
  if (!POINTER_TYPE_P (TREE_TYPE  (fn))
2939
      || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
2940
          && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE))
2941
    {
2942
      error ("non-function in gimple call");
2943
      return true;
2944
    }
2945
 
2946
  if (gimple_call_lhs (stmt)
2947
      && (!is_gimple_lvalue (gimple_call_lhs (stmt))
2948
          || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
2949
    {
2950
      error ("invalid LHS in gimple call");
2951
      return true;
2952
    }
2953
 
2954
  if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
2955
    {
2956
      error ("LHS in noreturn call");
2957
      return true;
2958
    }
2959
 
2960
  fntype = TREE_TYPE (TREE_TYPE (fn));
2961
  if (gimple_call_lhs (stmt)
2962
      && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
2963
                                     TREE_TYPE (fntype))
2964
      /* ???  At least C++ misses conversions at assignments from
2965
         void * call results.
2966
         ???  Java is completely off.  Especially with functions
2967
         returning java.lang.Object.
2968
         For now simply allow arbitrary pointer type conversions.  */
2969
      && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
2970
           && POINTER_TYPE_P (TREE_TYPE (fntype))))
2971
    {
2972
      error ("invalid conversion in gimple call");
2973
      debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
2974
      debug_generic_stmt (TREE_TYPE (fntype));
2975
      return true;
2976
    }
2977
 
2978
  if (gimple_call_chain (stmt)
2979
      && !is_gimple_val (gimple_call_chain (stmt)))
2980
    {
2981
      error ("invalid static chain in gimple call");
2982
      debug_generic_stmt (gimple_call_chain (stmt));
2983
      return true;
2984
    }
2985
 
2986
  /* If there is a static chain argument, this should not be an indirect
2987
     call, and the decl should have DECL_STATIC_CHAIN set.  */
2988
  if (gimple_call_chain (stmt))
2989
    {
2990
      if (TREE_CODE (fn) != ADDR_EXPR
2991
          || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
2992
        {
2993
          error ("static chain in indirect gimple call");
2994
          return true;
2995
        }
2996
      fn = TREE_OPERAND (fn, 0);
2997
 
2998
      if (!DECL_STATIC_CHAIN (fn))
2999
        {
3000
          error ("static chain with function that doesn't use one");
3001
          return true;
3002
        }
3003
    }
3004
 
3005
  /* ???  The C frontend passes unpromoted arguments in case it
3006
     didn't see a function declaration before the call.  So for now
3007
     leave the call arguments mostly unverified.  Once we gimplify
3008
     unit-at-a-time we have a chance to fix this.  */
3009
 
3010
  for (i = 0; i < gimple_call_num_args (stmt); ++i)
3011
    {
3012
      tree arg = gimple_call_arg (stmt, i);
3013
      if (!is_gimple_operand (arg))
3014
        {
3015
          error ("invalid argument to gimple call");
3016
          debug_generic_expr (arg);
3017
        }
3018
    }
3019
 
3020
  return false;
3021
}
3022
 
3023
/* Verifies the gimple comparison with the result type TYPE and
3024
   the operands OP0 and OP1.  */
3025
 
3026
static bool
3027
verify_gimple_comparison (tree type, tree op0, tree op1)
3028
{
3029
  tree op0_type = TREE_TYPE (op0);
3030
  tree op1_type = TREE_TYPE (op1);
3031
 
3032
  if (!is_gimple_val (op0) || !is_gimple_val (op1))
3033
    {
3034
      error ("invalid operands in gimple comparison");
3035
      return true;
3036
    }
3037
 
3038
  /* For comparisons we do not have the operations type as the
3039
     effective type the comparison is carried out in.  Instead
3040
     we require that either the first operand is trivially
3041
     convertible into the second, or the other way around.
3042
     The resulting type of a comparison may be any integral type.
3043
     Because we special-case pointers to void we allow
3044
     comparisons of pointers with the same mode as well.  */
3045
  if ((!useless_type_conversion_p (op0_type, op1_type)
3046
       && !useless_type_conversion_p (op1_type, op0_type)
3047
       && (!POINTER_TYPE_P (op0_type)
3048
           || !POINTER_TYPE_P (op1_type)
3049
           || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3050
      || !INTEGRAL_TYPE_P (type))
3051
    {
3052
      error ("type mismatch in comparison expression");
3053
      debug_generic_expr (type);
3054
      debug_generic_expr (op0_type);
3055
      debug_generic_expr (op1_type);
3056
      return true;
3057
    }
3058
 
3059
  return false;
3060
}
3061
 
3062
/* Verify a gimple assignment statement STMT with an unary rhs.
3063
   Returns true if anything is wrong.  */
3064
 
3065
static bool
3066
verify_gimple_assign_unary (gimple stmt)
3067
{
3068
  enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3069
  tree lhs = gimple_assign_lhs (stmt);
3070
  tree lhs_type = TREE_TYPE (lhs);
3071
  tree rhs1 = gimple_assign_rhs1 (stmt);
3072
  tree rhs1_type = TREE_TYPE (rhs1);
3073
 
3074
  if (!is_gimple_reg (lhs)
3075
      && !(optimize == 0
3076
           && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3077
    {
3078
      error ("non-register as LHS of unary operation");
3079
      return true;
3080
    }
3081
 
3082
  if (!is_gimple_val (rhs1))
3083
    {
3084
      error ("invalid operand in unary operation");
3085
      return true;
3086
    }
3087
 
3088
  /* First handle conversions.  */
3089
  switch (rhs_code)
3090
    {
3091
    CASE_CONVERT:
3092
      {
3093
        /* Allow conversions between integral types and pointers only if
3094
           there is no sign or zero extension involved.
3095
           For targets were the precision of sizetype doesn't match that
3096
           of pointers we need to allow arbitrary conversions from and
3097
           to sizetype.  */
3098
        if ((POINTER_TYPE_P (lhs_type)
3099
             && INTEGRAL_TYPE_P (rhs1_type)
3100
             && (TYPE_PRECISION (lhs_type) >= TYPE_PRECISION (rhs1_type)
3101
                 || rhs1_type == sizetype))
3102
            || (POINTER_TYPE_P (rhs1_type)
3103
                && INTEGRAL_TYPE_P (lhs_type)
3104
                && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3105
                    || lhs_type == sizetype)))
3106
          return false;
3107
 
3108
        /* Allow conversion from integer to offset type and vice versa.  */
3109
        if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3110
             && TREE_CODE (rhs1_type) == INTEGER_TYPE)
3111
            || (TREE_CODE (lhs_type) == INTEGER_TYPE
3112
                && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3113
          return false;
3114
 
3115
        /* Otherwise assert we are converting between types of the
3116
           same kind.  */
3117
        if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3118
          {
3119
            error ("invalid types in nop conversion");
3120
            debug_generic_expr (lhs_type);
3121
            debug_generic_expr (rhs1_type);
3122
            return true;
3123
          }
3124
 
3125
        return false;
3126
      }
3127
 
3128
    case ADDR_SPACE_CONVERT_EXPR:
3129
      {
3130
        if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3131
            || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3132
                == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3133
          {
3134
            error ("invalid types in address space conversion");
3135
            debug_generic_expr (lhs_type);
3136
            debug_generic_expr (rhs1_type);
3137
            return true;
3138
          }
3139
 
3140
        return false;
3141
      }
3142
 
3143
    case FIXED_CONVERT_EXPR:
3144
      {
3145
        if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3146
            && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3147
          {
3148
            error ("invalid types in fixed-point conversion");
3149
            debug_generic_expr (lhs_type);
3150
            debug_generic_expr (rhs1_type);
3151
            return true;
3152
          }
3153
 
3154
        return false;
3155
      }
3156
 
3157
    case FLOAT_EXPR:
3158
      {
3159
        if (!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3160
          {
3161
            error ("invalid types in conversion to floating point");
3162
            debug_generic_expr (lhs_type);
3163
            debug_generic_expr (rhs1_type);
3164
            return true;
3165
          }
3166
 
3167
        return false;
3168
      }
3169
 
3170
    case FIX_TRUNC_EXPR:
3171
      {
3172
        if (!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3173
          {
3174
            error ("invalid types in conversion to integer");
3175
            debug_generic_expr (lhs_type);
3176
            debug_generic_expr (rhs1_type);
3177
            return true;
3178
          }
3179
 
3180
        return false;
3181
      }
3182
 
3183
    case VEC_UNPACK_HI_EXPR:
3184
    case VEC_UNPACK_LO_EXPR:
3185
    case REDUC_MAX_EXPR:
3186
    case REDUC_MIN_EXPR:
3187
    case REDUC_PLUS_EXPR:
3188
    case VEC_UNPACK_FLOAT_HI_EXPR:
3189
    case VEC_UNPACK_FLOAT_LO_EXPR:
3190
      /* FIXME.  */
3191
      return false;
3192
 
3193
    case TRUTH_NOT_EXPR:
3194
    case NEGATE_EXPR:
3195
    case ABS_EXPR:
3196
    case BIT_NOT_EXPR:
3197
    case PAREN_EXPR:
3198
    case NON_LVALUE_EXPR:
3199
    case CONJ_EXPR:
3200
      break;
3201
 
3202
    default:
3203
      gcc_unreachable ();
3204
    }
3205
 
3206
  /* For the remaining codes assert there is no conversion involved.  */
3207
  if (!useless_type_conversion_p (lhs_type, rhs1_type))
3208
    {
3209
      error ("non-trivial conversion in unary operation");
3210
      debug_generic_expr (lhs_type);
3211
      debug_generic_expr (rhs1_type);
3212
      return true;
3213
    }
3214
 
3215
  return false;
3216
}
3217
 
3218
/* Verify a gimple assignment statement STMT with a binary rhs.
3219
   Returns true if anything is wrong.  */
3220
 
3221
static bool
3222
verify_gimple_assign_binary (gimple stmt)
3223
{
3224
  enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3225
  tree lhs = gimple_assign_lhs (stmt);
3226
  tree lhs_type = TREE_TYPE (lhs);
3227
  tree rhs1 = gimple_assign_rhs1 (stmt);
3228
  tree rhs1_type = TREE_TYPE (rhs1);
3229
  tree rhs2 = gimple_assign_rhs2 (stmt);
3230
  tree rhs2_type = TREE_TYPE (rhs2);
3231
 
3232
  if (!is_gimple_reg (lhs)
3233
      && !(optimize == 0
3234
           && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3235
    {
3236
      error ("non-register as LHS of binary operation");
3237
      return true;
3238
    }
3239
 
3240
  if (!is_gimple_val (rhs1)
3241
      || !is_gimple_val (rhs2))
3242
    {
3243
      error ("invalid operands in binary operation");
3244
      return true;
3245
    }
3246
 
3247
  /* First handle operations that involve different types.  */
3248
  switch (rhs_code)
3249
    {
3250
    case COMPLEX_EXPR:
3251
      {
3252
        if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3253
            || !(INTEGRAL_TYPE_P (rhs1_type)
3254
                 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3255
            || !(INTEGRAL_TYPE_P (rhs2_type)
3256
                 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3257
          {
3258
            error ("type mismatch in complex expression");
3259
            debug_generic_expr (lhs_type);
3260
            debug_generic_expr (rhs1_type);
3261
            debug_generic_expr (rhs2_type);
3262
            return true;
3263
          }
3264
 
3265
        return false;
3266
      }
3267
 
3268
    case LSHIFT_EXPR:
3269
    case RSHIFT_EXPR:
3270
    case LROTATE_EXPR:
3271
    case RROTATE_EXPR:
3272
      {
3273
        /* Shifts and rotates are ok on integral types, fixed point
3274
           types and integer vector types.  */
3275
        if ((!INTEGRAL_TYPE_P (rhs1_type)
3276
             && !FIXED_POINT_TYPE_P (rhs1_type)
3277
             && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3278
                  && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3279
            || (!INTEGRAL_TYPE_P (rhs2_type)
3280
                /* Vector shifts of vectors are also ok.  */
3281
                && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3282
                     && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3283
                     && TREE_CODE (rhs2_type) == VECTOR_TYPE
3284
                     && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3285
            || !useless_type_conversion_p (lhs_type, rhs1_type))
3286
          {
3287
            error ("type mismatch in shift expression");
3288
            debug_generic_expr (lhs_type);
3289
            debug_generic_expr (rhs1_type);
3290
            debug_generic_expr (rhs2_type);
3291
            return true;
3292
          }
3293
 
3294
        return false;
3295
      }
3296
 
3297
    case VEC_LSHIFT_EXPR:
3298
    case VEC_RSHIFT_EXPR:
3299
      {
3300
        if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3301
            || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3302
                 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3303
                 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3304
            || (!INTEGRAL_TYPE_P (rhs2_type)
3305
                && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3306
                    || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3307
            || !useless_type_conversion_p (lhs_type, rhs1_type))
3308
          {
3309
            error ("type mismatch in vector shift expression");
3310
            debug_generic_expr (lhs_type);
3311
            debug_generic_expr (rhs1_type);
3312
            debug_generic_expr (rhs2_type);
3313
            return true;
3314
          }
3315
        /* For shifting a vector of floating point components we
3316
           only allow shifting by a constant multiple of the element size.  */
3317
        if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
3318
            && (TREE_CODE (rhs2) != INTEGER_CST
3319
                || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3320
                                           TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3321
          {
3322
            error ("non-element sized vector shift of floating point vector");
3323
            return true;
3324
          }
3325
 
3326
        return false;
3327
      }
3328
 
3329
    case PLUS_EXPR:
3330
      {
3331
        /* We use regular PLUS_EXPR for vectors.
3332
           ???  This just makes the checker happy and may not be what is
3333
           intended.  */
3334
        if (TREE_CODE (lhs_type) == VECTOR_TYPE
3335
            && POINTER_TYPE_P (TREE_TYPE (lhs_type)))
3336
          {
3337
            if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3338
                || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3339
              {
3340
                error ("invalid non-vector operands to vector valued plus");
3341
                return true;
3342
              }
3343
            lhs_type = TREE_TYPE (lhs_type);
3344
            rhs1_type = TREE_TYPE (rhs1_type);
3345
            rhs2_type = TREE_TYPE (rhs2_type);
3346
            /* PLUS_EXPR is commutative, so we might end up canonicalizing
3347
               the pointer to 2nd place.  */
3348
            if (POINTER_TYPE_P (rhs2_type))
3349
              {
3350
                tree tem = rhs1_type;
3351
                rhs1_type = rhs2_type;
3352
                rhs2_type = tem;
3353
              }
3354
            goto do_pointer_plus_expr_check;
3355
          }
3356
      }
3357
    /* Fallthru.  */
3358
    case MINUS_EXPR:
3359
      {
3360
        if (POINTER_TYPE_P (lhs_type)
3361
            || POINTER_TYPE_P (rhs1_type)
3362
            || POINTER_TYPE_P (rhs2_type))
3363
          {
3364
            error ("invalid (pointer) operands to plus/minus");
3365
            return true;
3366
          }
3367
 
3368
        /* Continue with generic binary expression handling.  */
3369
        break;
3370
      }
3371
 
3372
    case POINTER_PLUS_EXPR:
3373
      {
3374
do_pointer_plus_expr_check:
3375
        if (!POINTER_TYPE_P (rhs1_type)
3376
            || !useless_type_conversion_p (lhs_type, rhs1_type)
3377
            || !useless_type_conversion_p (sizetype, rhs2_type))
3378
          {
3379
            error ("type mismatch in pointer plus expression");
3380
            debug_generic_stmt (lhs_type);
3381
            debug_generic_stmt (rhs1_type);
3382
            debug_generic_stmt (rhs2_type);
3383
            return true;
3384
          }
3385
 
3386
        return false;
3387
      }
3388
 
3389
    case TRUTH_ANDIF_EXPR:
3390
    case TRUTH_ORIF_EXPR:
3391
      gcc_unreachable ();
3392
 
3393
    case TRUTH_AND_EXPR:
3394
    case TRUTH_OR_EXPR:
3395
    case TRUTH_XOR_EXPR:
3396
      {
3397
        /* We allow any kind of integral typed argument and result.  */
3398
        if (!INTEGRAL_TYPE_P (rhs1_type)
3399
            || !INTEGRAL_TYPE_P (rhs2_type)
3400
            || !INTEGRAL_TYPE_P (lhs_type))
3401
          {
3402
            error ("type mismatch in binary truth expression");
3403
            debug_generic_expr (lhs_type);
3404
            debug_generic_expr (rhs1_type);
3405
            debug_generic_expr (rhs2_type);
3406
            return true;
3407
          }
3408
 
3409
        return false;
3410
      }
3411
 
3412
    case LT_EXPR:
3413
    case LE_EXPR:
3414
    case GT_EXPR:
3415
    case GE_EXPR:
3416
    case EQ_EXPR:
3417
    case NE_EXPR:
3418
    case UNORDERED_EXPR:
3419
    case ORDERED_EXPR:
3420
    case UNLT_EXPR:
3421
    case UNLE_EXPR:
3422
    case UNGT_EXPR:
3423
    case UNGE_EXPR:
3424
    case UNEQ_EXPR:
3425
    case LTGT_EXPR:
3426
      /* Comparisons are also binary, but the result type is not
3427
         connected to the operand types.  */
3428
      return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3429
 
3430
    case WIDEN_SUM_EXPR:
3431
    case WIDEN_MULT_EXPR:
3432
    case VEC_WIDEN_MULT_HI_EXPR:
3433
    case VEC_WIDEN_MULT_LO_EXPR:
3434
    case VEC_PACK_TRUNC_EXPR:
3435
    case VEC_PACK_SAT_EXPR:
3436
    case VEC_PACK_FIX_TRUNC_EXPR:
3437
    case VEC_EXTRACT_EVEN_EXPR:
3438
    case VEC_EXTRACT_ODD_EXPR:
3439
    case VEC_INTERLEAVE_HIGH_EXPR:
3440
    case VEC_INTERLEAVE_LOW_EXPR:
3441
      /* FIXME.  */
3442
      return false;
3443
 
3444
    case MULT_EXPR:
3445
    case TRUNC_DIV_EXPR:
3446
    case CEIL_DIV_EXPR:
3447
    case FLOOR_DIV_EXPR:
3448
    case ROUND_DIV_EXPR:
3449
    case TRUNC_MOD_EXPR:
3450
    case CEIL_MOD_EXPR:
3451
    case FLOOR_MOD_EXPR:
3452
    case ROUND_MOD_EXPR:
3453
    case RDIV_EXPR:
3454
    case EXACT_DIV_EXPR:
3455
    case MIN_EXPR:
3456
    case MAX_EXPR:
3457
    case BIT_IOR_EXPR:
3458
    case BIT_XOR_EXPR:
3459
    case BIT_AND_EXPR:
3460
      /* Continue with generic binary expression handling.  */
3461
      break;
3462
 
3463
    default:
3464
      gcc_unreachable ();
3465
    }
3466
 
3467
  if (!useless_type_conversion_p (lhs_type, rhs1_type)
3468
      || !useless_type_conversion_p (lhs_type, rhs2_type))
3469
    {
3470
      error ("type mismatch in binary expression");
3471
      debug_generic_stmt (lhs_type);
3472
      debug_generic_stmt (rhs1_type);
3473
      debug_generic_stmt (rhs2_type);
3474
      return true;
3475
    }
3476
 
3477
  return false;
3478
}
3479
 
3480
/* Verify a gimple assignment statement STMT with a single rhs.
3481
   Returns true if anything is wrong.  */
3482
 
3483
static bool
3484
verify_gimple_assign_single (gimple stmt)
3485
{
3486
  enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3487
  tree lhs = gimple_assign_lhs (stmt);
3488
  tree lhs_type = TREE_TYPE (lhs);
3489
  tree rhs1 = gimple_assign_rhs1 (stmt);
3490
  tree rhs1_type = TREE_TYPE (rhs1);
3491
  bool res = false;
3492
 
3493
  if (!useless_type_conversion_p (lhs_type, rhs1_type))
3494
    {
3495
      error ("non-trivial conversion at assignment");
3496
      debug_generic_expr (lhs_type);
3497
      debug_generic_expr (rhs1_type);
3498
      return true;
3499
    }
3500
 
3501
  if (handled_component_p (lhs))
3502
    res |= verify_types_in_gimple_reference (lhs, true);
3503
 
3504
  /* Special codes we cannot handle via their class.  */
3505
  switch (rhs_code)
3506
    {
3507
    case ADDR_EXPR:
3508
      {
3509
        tree op = TREE_OPERAND (rhs1, 0);
3510
        if (!is_gimple_addressable (op))
3511
          {
3512
            error ("invalid operand in unary expression");
3513
            return true;
3514
          }
3515
 
3516
        if (!types_compatible_p (TREE_TYPE (op), TREE_TYPE (TREE_TYPE (rhs1)))
3517
            && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
3518
                                                          TREE_TYPE (op)))
3519
          {
3520
            error ("type mismatch in address expression");
3521
            debug_generic_stmt (TREE_TYPE (rhs1));
3522
            debug_generic_stmt (TREE_TYPE (op));
3523
            return true;
3524
          }
3525
 
3526
        return verify_types_in_gimple_reference (op, true);
3527
      }
3528
 
3529
    /* tcc_reference  */
3530
    case COMPONENT_REF:
3531
    case BIT_FIELD_REF:
3532
    case INDIRECT_REF:
3533
    case ALIGN_INDIRECT_REF:
3534
    case MISALIGNED_INDIRECT_REF:
3535
    case ARRAY_REF:
3536
    case ARRAY_RANGE_REF:
3537
    case VIEW_CONVERT_EXPR:
3538
    case REALPART_EXPR:
3539
    case IMAGPART_EXPR:
3540
    case TARGET_MEM_REF:
3541
      if (!is_gimple_reg (lhs)
3542
          && is_gimple_reg_type (TREE_TYPE (lhs)))
3543
        {
3544
          error ("invalid rhs for gimple memory store");
3545
          debug_generic_stmt (lhs);
3546
          debug_generic_stmt (rhs1);
3547
          return true;
3548
        }
3549
      return res || verify_types_in_gimple_reference (rhs1, false);
3550
 
3551
    /* tcc_constant  */
3552
    case SSA_NAME:
3553
    case INTEGER_CST:
3554
    case REAL_CST:
3555
    case FIXED_CST:
3556
    case COMPLEX_CST:
3557
    case VECTOR_CST:
3558
    case STRING_CST:
3559
      return res;
3560
 
3561
    /* tcc_declaration  */
3562
    case CONST_DECL:
3563
      return res;
3564
    case VAR_DECL:
3565
    case PARM_DECL:
3566
      if (!is_gimple_reg (lhs)
3567
          && !is_gimple_reg (rhs1)
3568
          && is_gimple_reg_type (TREE_TYPE (lhs)))
3569
        {
3570
          error ("invalid rhs for gimple memory store");
3571
          debug_generic_stmt (lhs);
3572
          debug_generic_stmt (rhs1);
3573
          return true;
3574
        }
3575
      return res;
3576
 
3577
    case COND_EXPR:
3578
    case CONSTRUCTOR:
3579
    case OBJ_TYPE_REF:
3580
    case ASSERT_EXPR:
3581
    case WITH_SIZE_EXPR:
3582
    case POLYNOMIAL_CHREC:
3583
    case DOT_PROD_EXPR:
3584
    case VEC_COND_EXPR:
3585
    case REALIGN_LOAD_EXPR:
3586
      /* FIXME.  */
3587
      return res;
3588
 
3589
    default:;
3590
    }
3591
 
3592
  return res;
3593
}
3594
 
3595
/* Verify the contents of a GIMPLE_ASSIGN STMT.  Returns true when there
3596
   is a problem, otherwise false.  */
3597
 
3598
static bool
3599
verify_gimple_assign (gimple stmt)
3600
{
3601
  switch (gimple_assign_rhs_class (stmt))
3602
    {
3603
    case GIMPLE_SINGLE_RHS:
3604
      return verify_gimple_assign_single (stmt);
3605
 
3606
    case GIMPLE_UNARY_RHS:
3607
      return verify_gimple_assign_unary (stmt);
3608
 
3609
    case GIMPLE_BINARY_RHS:
3610
      return verify_gimple_assign_binary (stmt);
3611
 
3612
    default:
3613
      gcc_unreachable ();
3614
    }
3615
}
3616
 
3617
/* Verify the contents of a GIMPLE_RETURN STMT.  Returns true when there
3618
   is a problem, otherwise false.  */
3619
 
3620
static bool
3621
verify_gimple_return (gimple stmt)
3622
{
3623
  tree op = gimple_return_retval (stmt);
3624
  tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
3625
 
3626
  /* We cannot test for present return values as we do not fix up missing
3627
     return values from the original source.  */
3628
  if (op == NULL)
3629
    return false;
3630
 
3631
  if (!is_gimple_val (op)
3632
      && TREE_CODE (op) != RESULT_DECL)
3633
    {
3634
      error ("invalid operand in return statement");
3635
      debug_generic_stmt (op);
3636
      return true;
3637
    }
3638
 
3639
  if (!useless_type_conversion_p (restype, TREE_TYPE (op))
3640
      /* ???  With C++ we can have the situation that the result
3641
         decl is a reference type while the return type is an aggregate.  */
3642
      && !(TREE_CODE (op) == RESULT_DECL
3643
           && TREE_CODE (TREE_TYPE (op)) == REFERENCE_TYPE
3644
           && useless_type_conversion_p (restype, TREE_TYPE (TREE_TYPE (op)))))
3645
    {
3646
      error ("invalid conversion in return statement");
3647
      debug_generic_stmt (restype);
3648
      debug_generic_stmt (TREE_TYPE (op));
3649
      return true;
3650
    }
3651
 
3652
  return false;
3653
}
3654
 
3655
 
3656
/* Verify the contents of a GIMPLE_GOTO STMT.  Returns true when there
3657
   is a problem, otherwise false.  */
3658
 
3659
static bool
3660
verify_gimple_goto (gimple stmt)
3661
{
3662
  tree dest = gimple_goto_dest (stmt);
3663
 
3664
  /* ???  We have two canonical forms of direct goto destinations, a
3665
     bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL.  */
3666
  if (TREE_CODE (dest) != LABEL_DECL
3667
      && (!is_gimple_val (dest)
3668
          || !POINTER_TYPE_P (TREE_TYPE (dest))))
3669
    {
3670
      error ("goto destination is neither a label nor a pointer");
3671
      return true;
3672
    }
3673
 
3674
  return false;
3675
}
3676
 
3677
/* Verify the contents of a GIMPLE_SWITCH STMT.  Returns true when there
3678
   is a problem, otherwise false.  */
3679
 
3680
static bool
3681
verify_gimple_switch (gimple stmt)
3682
{
3683
  if (!is_gimple_val (gimple_switch_index (stmt)))
3684
    {
3685
      error ("invalid operand to switch statement");
3686
      debug_generic_stmt (gimple_switch_index (stmt));
3687
      return true;
3688
    }
3689
 
3690
  return false;
3691
}
3692
 
3693
 
3694
/* Verify the contents of a GIMPLE_PHI.  Returns true if there is a problem,
3695
   and false otherwise.  */
3696
 
3697
static bool
3698
verify_gimple_phi (gimple stmt)
3699
{
3700
  tree type = TREE_TYPE (gimple_phi_result (stmt));
3701
  unsigned i;
3702
 
3703
  if (TREE_CODE (gimple_phi_result (stmt)) != SSA_NAME)
3704
    {
3705
      error ("Invalid PHI result");
3706
      return true;
3707
    }
3708
 
3709
  for (i = 0; i < gimple_phi_num_args (stmt); i++)
3710
    {
3711
      tree arg = gimple_phi_arg_def (stmt, i);
3712
      if ((is_gimple_reg (gimple_phi_result (stmt))
3713
           && !is_gimple_val (arg))
3714
          || (!is_gimple_reg (gimple_phi_result (stmt))
3715
              && !is_gimple_addressable (arg)))
3716
        {
3717
          error ("Invalid PHI argument");
3718
          debug_generic_stmt (arg);
3719
          return true;
3720
        }
3721
      if (!useless_type_conversion_p (type, TREE_TYPE (arg)))
3722
        {
3723
          error ("Incompatible types in PHI argument %u", i);
3724
          debug_generic_stmt (type);
3725
          debug_generic_stmt (TREE_TYPE (arg));
3726
          return true;
3727
        }
3728
    }
3729
 
3730
  return false;
3731
}
3732
 
3733
 
3734
/* Verify a gimple debug statement STMT.
3735
   Returns true if anything is wrong.  */
3736
 
3737
static bool
3738
verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
3739
{
3740
  /* There isn't much that could be wrong in a gimple debug stmt.  A
3741
     gimple debug bind stmt, for example, maps a tree, that's usually
3742
     a VAR_DECL or a PARM_DECL, but that could also be some scalarized
3743
     component or member of an aggregate type, to another tree, that
3744
     can be an arbitrary expression.  These stmts expand into debug
3745
     insns, and are converted to debug notes by var-tracking.c.  */
3746
  return false;
3747
}
3748
 
3749
 
3750
/* Verify the GIMPLE statement STMT.  Returns true if there is an
3751
   error, otherwise false.  */
3752
 
3753
static bool
3754
verify_types_in_gimple_stmt (gimple stmt)
3755
{
3756
  switch (gimple_code (stmt))
3757
    {
3758
    case GIMPLE_ASSIGN:
3759
      return verify_gimple_assign (stmt);
3760
 
3761
    case GIMPLE_LABEL:
3762
      return TREE_CODE (gimple_label_label (stmt)) != LABEL_DECL;
3763
 
3764
    case GIMPLE_CALL:
3765
      return verify_gimple_call (stmt);
3766
 
3767
    case GIMPLE_COND:
3768
      if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
3769
        {
3770
          error ("invalid comparison code in gimple cond");
3771
          return true;
3772
        }
3773
      if (!(!gimple_cond_true_label (stmt)
3774
            || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
3775
          || !(!gimple_cond_false_label (stmt)
3776
               || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
3777
        {
3778
          error ("invalid labels in gimple cond");
3779
          return true;
3780
        }
3781
 
3782
      return verify_gimple_comparison (boolean_type_node,
3783
                                       gimple_cond_lhs (stmt),
3784
                                       gimple_cond_rhs (stmt));
3785
 
3786
    case GIMPLE_GOTO:
3787
      return verify_gimple_goto (stmt);
3788
 
3789
    case GIMPLE_SWITCH:
3790
      return verify_gimple_switch (stmt);
3791
 
3792
    case GIMPLE_RETURN:
3793
      return verify_gimple_return (stmt);
3794
 
3795
    case GIMPLE_ASM:
3796
      return false;
3797
 
3798
    case GIMPLE_PHI:
3799
      return verify_gimple_phi (stmt);
3800
 
3801
    /* Tuples that do not have tree operands.  */
3802
    case GIMPLE_NOP:
3803
    case GIMPLE_PREDICT:
3804
    case GIMPLE_RESX:
3805
    case GIMPLE_EH_DISPATCH:
3806
    case GIMPLE_EH_MUST_NOT_THROW:
3807
      return false;
3808
 
3809
    CASE_GIMPLE_OMP:
3810
      /* OpenMP directives are validated by the FE and never operated
3811
         on by the optimizers.  Furthermore, GIMPLE_OMP_FOR may contain
3812
         non-gimple expressions when the main index variable has had
3813
         its address taken.  This does not affect the loop itself
3814
         because the header of an GIMPLE_OMP_FOR is merely used to determine
3815
         how to setup the parallel iteration.  */
3816
      return false;
3817
 
3818
    case GIMPLE_DEBUG:
3819
      return verify_gimple_debug (stmt);
3820
 
3821
    default:
3822
      gcc_unreachable ();
3823
    }
3824
}
3825
 
3826
/* Verify the GIMPLE statements inside the sequence STMTS.  */
3827
 
3828
static bool
3829
verify_types_in_gimple_seq_2 (gimple_seq stmts)
3830
{
3831
  gimple_stmt_iterator ittr;
3832
  bool err = false;
3833
 
3834
  for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
3835
    {
3836
      gimple stmt = gsi_stmt (ittr);
3837
 
3838
      switch (gimple_code (stmt))
3839
        {
3840
        case GIMPLE_BIND:
3841
          err |= verify_types_in_gimple_seq_2 (gimple_bind_body (stmt));
3842
          break;
3843
 
3844
        case GIMPLE_TRY:
3845
          err |= verify_types_in_gimple_seq_2 (gimple_try_eval (stmt));
3846
          err |= verify_types_in_gimple_seq_2 (gimple_try_cleanup (stmt));
3847
          break;
3848
 
3849
        case GIMPLE_EH_FILTER:
3850
          err |= verify_types_in_gimple_seq_2 (gimple_eh_filter_failure (stmt));
3851
          break;
3852
 
3853
        case GIMPLE_CATCH:
3854
          err |= verify_types_in_gimple_seq_2 (gimple_catch_handler (stmt));
3855
          break;
3856
 
3857
        default:
3858
          {
3859
            bool err2 = verify_types_in_gimple_stmt (stmt);
3860
            if (err2)
3861
              debug_gimple_stmt (stmt);
3862
            err |= err2;
3863
          }
3864
        }
3865
    }
3866
 
3867
  return err;
3868
}
3869
 
3870
 
3871
/* Verify the GIMPLE statements inside the statement list STMTS.  */
3872
 
3873
void
3874
verify_types_in_gimple_seq (gimple_seq stmts)
3875
{
3876
  if (verify_types_in_gimple_seq_2 (stmts))
3877
    internal_error ("verify_gimple failed");
3878
}
3879
 
3880
 
3881
/* Verify STMT, return true if STMT is not in GIMPLE form.
3882
   TODO: Implement type checking.  */
3883
 
3884
static bool
3885
verify_stmt (gimple_stmt_iterator *gsi)
3886
{
3887
  tree addr;
3888
  struct walk_stmt_info wi;
3889
  bool last_in_block = gsi_one_before_end_p (*gsi);
3890
  gimple stmt = gsi_stmt (*gsi);
3891
  int lp_nr;
3892
 
3893
  if (is_gimple_omp (stmt))
3894
    {
3895
      /* OpenMP directives are validated by the FE and never operated
3896
         on by the optimizers.  Furthermore, GIMPLE_OMP_FOR may contain
3897
         non-gimple expressions when the main index variable has had
3898
         its address taken.  This does not affect the loop itself
3899
         because the header of an GIMPLE_OMP_FOR is merely used to determine
3900
         how to setup the parallel iteration.  */
3901
      return false;
3902
    }
3903
 
3904
  /* FIXME.  The C frontend passes unpromoted arguments in case it
3905
     didn't see a function declaration before the call.  */
3906
  if (is_gimple_call (stmt))
3907
    {
3908
      tree decl;
3909
 
3910
      if (!is_gimple_call_addr (gimple_call_fn (stmt)))
3911
        {
3912
          error ("invalid function in call statement");
3913
          return true;
3914
        }
3915
 
3916
      decl = gimple_call_fndecl (stmt);
3917
      if (decl
3918
          && TREE_CODE (decl) == FUNCTION_DECL
3919
          && DECL_LOOPING_CONST_OR_PURE_P (decl)
3920
          && (!DECL_PURE_P (decl))
3921
          && (!TREE_READONLY (decl)))
3922
        {
3923
          error ("invalid pure const state for function");
3924
          return true;
3925
        }
3926
    }
3927
 
3928
  if (is_gimple_debug (stmt))
3929
    return false;
3930
 
3931
  memset (&wi, 0, sizeof (wi));
3932
  addr = walk_gimple_op (gsi_stmt (*gsi), verify_expr, &wi);
3933
  if (addr)
3934
    {
3935
      debug_generic_expr (addr);
3936
      inform (gimple_location (gsi_stmt (*gsi)), "in statement");
3937
      debug_gimple_stmt (stmt);
3938
      return true;
3939
    }
3940
 
3941
  /* If the statement is marked as part of an EH region, then it is
3942
     expected that the statement could throw.  Verify that when we
3943
     have optimizations that simplify statements such that we prove
3944
     that they cannot throw, that we update other data structures
3945
     to match.  */
3946
  lp_nr = lookup_stmt_eh_lp (stmt);
3947
  if (lp_nr != 0)
3948
    {
3949
      if (!stmt_could_throw_p (stmt))
3950
        {
3951
          /* During IPA passes, ipa-pure-const sets nothrow flags on calls
3952
             and they are updated on statements only after fixup_cfg
3953
             is executed at beggining of expansion stage.  */
3954
          if (cgraph_state != CGRAPH_STATE_IPA_SSA)
3955
            {
3956
              error ("statement marked for throw, but doesn%'t");
3957
              goto fail;
3958
            }
3959
        }
3960
      else if (lp_nr > 0 && !last_in_block && stmt_can_throw_internal (stmt))
3961
        {
3962
          error ("statement marked for throw in middle of block");
3963
          goto fail;
3964
        }
3965
    }
3966
 
3967
  return false;
3968
 
3969
 fail:
3970
  debug_gimple_stmt (stmt);
3971
  return true;
3972
}
3973
 
3974
 
3975
/* Return true when the T can be shared.  */
3976
 
3977
bool
3978
tree_node_can_be_shared (tree t)
3979
{
3980
  if (IS_TYPE_OR_DECL_P (t)
3981
      || is_gimple_min_invariant (t)
3982
      || TREE_CODE (t) == SSA_NAME
3983
      || t == error_mark_node
3984
      || TREE_CODE (t) == IDENTIFIER_NODE)
3985
    return true;
3986
 
3987
  if (TREE_CODE (t) == CASE_LABEL_EXPR)
3988
    return true;
3989
 
3990
  while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3991
           && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
3992
         || TREE_CODE (t) == COMPONENT_REF
3993
         || TREE_CODE (t) == REALPART_EXPR
3994
         || TREE_CODE (t) == IMAGPART_EXPR)
3995
    t = TREE_OPERAND (t, 0);
3996
 
3997
  if (DECL_P (t))
3998
    return true;
3999
 
4000
  return false;
4001
}
4002
 
4003
 
4004
/* Called via walk_gimple_stmt.  Verify tree sharing.  */
4005
 
4006
static tree
4007
verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4008
{
4009
  struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4010
  struct pointer_set_t *visited = (struct pointer_set_t *) wi->info;
4011
 
4012
  if (tree_node_can_be_shared (*tp))
4013
    {
4014
      *walk_subtrees = false;
4015
      return NULL;
4016
    }
4017
 
4018
  if (pointer_set_insert (visited, *tp))
4019
    return *tp;
4020
 
4021
  return NULL;
4022
}
4023
 
4024
 
4025
static bool eh_error_found;
4026
static int
4027
verify_eh_throw_stmt_node (void **slot, void *data)
4028
{
4029
  struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4030
  struct pointer_set_t *visited = (struct pointer_set_t *) data;
4031
 
4032
  if (!pointer_set_contains (visited, node->stmt))
4033
    {
4034
      error ("Dead STMT in EH table");
4035
      debug_gimple_stmt (node->stmt);
4036
      eh_error_found = true;
4037
    }
4038
  return 1;
4039
}
4040
 
4041
 
4042
/* Verify the GIMPLE statements in every basic block.  */
4043
 
4044
void
4045
verify_stmts (void)
4046
{
4047
  basic_block bb;
4048
  gimple_stmt_iterator gsi;
4049
  bool err = false;
4050
  struct pointer_set_t *visited, *visited_stmts;
4051
  tree addr;
4052
  struct walk_stmt_info wi;
4053
 
4054
  timevar_push (TV_TREE_STMT_VERIFY);
4055
  visited = pointer_set_create ();
4056
  visited_stmts = pointer_set_create ();
4057
 
4058
  memset (&wi, 0, sizeof (wi));
4059
  wi.info = (void *) visited;
4060
 
4061
  FOR_EACH_BB (bb)
4062
    {
4063
      gimple phi;
4064
      size_t i;
4065
 
4066
      for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4067
        {
4068
          phi = gsi_stmt (gsi);
4069
          pointer_set_insert (visited_stmts, phi);
4070
          if (gimple_bb (phi) != bb)
4071
            {
4072
              error ("gimple_bb (phi) is set to a wrong basic block");
4073
              err |= true;
4074
            }
4075
 
4076
          for (i = 0; i < gimple_phi_num_args (phi); i++)
4077
            {
4078
              tree t = gimple_phi_arg_def (phi, i);
4079
              tree addr;
4080
 
4081
              if (!t)
4082
                {
4083
                  error ("missing PHI def");
4084
                  debug_gimple_stmt (phi);
4085
                  err |= true;
4086
                  continue;
4087
                }
4088
              /* Addressable variables do have SSA_NAMEs but they
4089
                 are not considered gimple values.  */
4090
              else if (TREE_CODE (t) != SSA_NAME
4091
                       && TREE_CODE (t) != FUNCTION_DECL
4092
                       && !is_gimple_min_invariant (t))
4093
                {
4094
                  error ("PHI argument is not a GIMPLE value");
4095
                  debug_gimple_stmt (phi);
4096
                  debug_generic_expr (t);
4097
                  err |= true;
4098
                }
4099
 
4100
              addr = walk_tree (&t, verify_node_sharing, visited, NULL);
4101
              if (addr)
4102
                {
4103
                  error ("incorrect sharing of tree nodes");
4104
                  debug_gimple_stmt (phi);
4105
                  debug_generic_expr (addr);
4106
                  err |= true;
4107
                }
4108
            }
4109
 
4110
#ifdef ENABLE_TYPES_CHECKING
4111
          if (verify_gimple_phi (phi))
4112
            {
4113
              debug_gimple_stmt (phi);
4114
              err |= true;
4115
            }
4116
#endif
4117
        }
4118
 
4119
      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
4120
        {
4121
          gimple stmt = gsi_stmt (gsi);
4122
 
4123
          if (gimple_code (stmt) == GIMPLE_WITH_CLEANUP_EXPR
4124
              || gimple_code (stmt) == GIMPLE_BIND)
4125
            {
4126
              error ("invalid GIMPLE statement");
4127
              debug_gimple_stmt (stmt);
4128
              err |= true;
4129
            }
4130
 
4131
          pointer_set_insert (visited_stmts, stmt);
4132
 
4133
          if (gimple_bb (stmt) != bb)
4134
            {
4135
              error ("gimple_bb (stmt) is set to a wrong basic block");
4136
              debug_gimple_stmt (stmt);
4137
              err |= true;
4138
            }
4139
 
4140
          if (gimple_code (stmt) == GIMPLE_LABEL)
4141
            {
4142
              tree decl = gimple_label_label (stmt);
4143
              int uid = LABEL_DECL_UID (decl);
4144
 
4145
              if (uid == -1
4146
                  || VEC_index (basic_block, label_to_block_map, uid) != bb)
4147
                {
4148
                  error ("incorrect entry in label_to_block_map");
4149
                  err |= true;
4150
                }
4151
 
4152
              uid = EH_LANDING_PAD_NR (decl);
4153
              if (uid)
4154
                {
4155
                  eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4156
                  if (decl != lp->post_landing_pad)
4157
                    {
4158
                      error ("incorrect setting of landing pad number");
4159
                      err |= true;
4160
                    }
4161
                }
4162
            }
4163
 
4164
          err |= verify_stmt (&gsi);
4165
 
4166
#ifdef ENABLE_TYPES_CHECKING
4167
          if (verify_types_in_gimple_stmt (gsi_stmt (gsi)))
4168
            {
4169
              debug_gimple_stmt (stmt);
4170
              err |= true;
4171
            }
4172
#endif
4173
          addr = walk_gimple_op (gsi_stmt (gsi), verify_node_sharing, &wi);
4174
          if (addr)
4175
            {
4176
              error ("incorrect sharing of tree nodes");
4177
              debug_gimple_stmt (stmt);
4178
              debug_generic_expr (addr);
4179
              err |= true;
4180
            }
4181
          gsi_next (&gsi);
4182
        }
4183
    }
4184
 
4185
  eh_error_found = false;
4186
  if (get_eh_throw_stmt_table (cfun))
4187
    htab_traverse (get_eh_throw_stmt_table (cfun),
4188
                   verify_eh_throw_stmt_node,
4189
                   visited_stmts);
4190
 
4191
  if (err | eh_error_found)
4192
    internal_error ("verify_stmts failed");
4193
 
4194
  pointer_set_destroy (visited);
4195
  pointer_set_destroy (visited_stmts);
4196
  verify_histograms ();
4197
  timevar_pop (TV_TREE_STMT_VERIFY);
4198
}
4199
 
4200
 
4201
/* Verifies that the flow information is OK.  */
4202
 
4203
static int
4204
gimple_verify_flow_info (void)
4205
{
4206
  int err = 0;
4207
  basic_block bb;
4208
  gimple_stmt_iterator gsi;
4209
  gimple stmt;
4210
  edge e;
4211
  edge_iterator ei;
4212
 
4213
  if (ENTRY_BLOCK_PTR->il.gimple)
4214
    {
4215
      error ("ENTRY_BLOCK has IL associated with it");
4216
      err = 1;
4217
    }
4218
 
4219
  if (EXIT_BLOCK_PTR->il.gimple)
4220
    {
4221
      error ("EXIT_BLOCK has IL associated with it");
4222
      err = 1;
4223
    }
4224
 
4225
  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4226
    if (e->flags & EDGE_FALLTHRU)
4227
      {
4228
        error ("fallthru to exit from bb %d", e->src->index);
4229
        err = 1;
4230
      }
4231
 
4232
  FOR_EACH_BB (bb)
4233
    {
4234
      bool found_ctrl_stmt = false;
4235
 
4236
      stmt = NULL;
4237
 
4238
      /* Skip labels on the start of basic block.  */
4239
      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4240
        {
4241
          tree label;
4242
          gimple prev_stmt = stmt;
4243
 
4244
          stmt = gsi_stmt (gsi);
4245
 
4246
          if (gimple_code (stmt) != GIMPLE_LABEL)
4247
            break;
4248
 
4249
          label = gimple_label_label (stmt);
4250
          if (prev_stmt && DECL_NONLOCAL (label))
4251
            {
4252
              error ("nonlocal label ");
4253
              print_generic_expr (stderr, label, 0);
4254
              fprintf (stderr, " is not first in a sequence of labels in bb %d",
4255
                       bb->index);
4256
              err = 1;
4257
            }
4258
 
4259
          if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
4260
            {
4261
              error ("EH landing pad label ");
4262
              print_generic_expr (stderr, label, 0);
4263
              fprintf (stderr, " is not first in a sequence of labels in bb %d",
4264
                       bb->index);
4265
              err = 1;
4266
            }
4267
 
4268
          if (label_to_block (label) != bb)
4269
            {
4270
              error ("label ");
4271
              print_generic_expr (stderr, label, 0);
4272
              fprintf (stderr, " to block does not match in bb %d",
4273
                       bb->index);
4274
              err = 1;
4275
            }
4276
 
4277
          if (decl_function_context (label) != current_function_decl)
4278
            {
4279
              error ("label ");
4280
              print_generic_expr (stderr, label, 0);
4281
              fprintf (stderr, " has incorrect context in bb %d",
4282
                       bb->index);
4283
              err = 1;
4284
            }
4285
        }
4286
 
4287
      /* Verify that body of basic block BB is free of control flow.  */
4288
      for (; !gsi_end_p (gsi); gsi_next (&gsi))
4289
        {
4290
          gimple stmt = gsi_stmt (gsi);
4291
 
4292
          if (found_ctrl_stmt)
4293
            {
4294
              error ("control flow in the middle of basic block %d",
4295
                     bb->index);
4296
              err = 1;
4297
            }
4298
 
4299
          if (stmt_ends_bb_p (stmt))
4300
            found_ctrl_stmt = true;
4301
 
4302
          if (gimple_code (stmt) == GIMPLE_LABEL)
4303
            {
4304
              error ("label ");
4305
              print_generic_expr (stderr, gimple_label_label (stmt), 0);
4306
              fprintf (stderr, " in the middle of basic block %d", bb->index);
4307
              err = 1;
4308
            }
4309
        }
4310
 
4311
      gsi = gsi_last_bb (bb);
4312
      if (gsi_end_p (gsi))
4313
        continue;
4314
 
4315
      stmt = gsi_stmt (gsi);
4316
 
4317
      if (gimple_code (stmt) == GIMPLE_LABEL)
4318
        continue;
4319
 
4320
      err |= verify_eh_edges (stmt);
4321
 
4322
      if (is_ctrl_stmt (stmt))
4323
        {
4324
          FOR_EACH_EDGE (e, ei, bb->succs)
4325
            if (e->flags & EDGE_FALLTHRU)
4326
              {
4327
                error ("fallthru edge after a control statement in bb %d",
4328
                       bb->index);
4329
                err = 1;
4330
              }
4331
        }
4332
 
4333
      if (gimple_code (stmt) != GIMPLE_COND)
4334
        {
4335
          /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4336
             after anything else but if statement.  */
4337
          FOR_EACH_EDGE (e, ei, bb->succs)
4338
            if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4339
              {
4340
                error ("true/false edge after a non-GIMPLE_COND in bb %d",
4341
                       bb->index);
4342
                err = 1;
4343
              }
4344
        }
4345
 
4346
      switch (gimple_code (stmt))
4347
        {
4348
        case GIMPLE_COND:
4349
          {
4350
            edge true_edge;
4351
            edge false_edge;
4352
 
4353
            extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4354
 
4355
            if (!true_edge
4356
                || !false_edge
4357
                || !(true_edge->flags & EDGE_TRUE_VALUE)
4358
                || !(false_edge->flags & EDGE_FALSE_VALUE)
4359
                || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4360
                || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4361
                || EDGE_COUNT (bb->succs) >= 3)
4362
              {
4363
                error ("wrong outgoing edge flags at end of bb %d",
4364
                       bb->index);
4365
                err = 1;
4366
              }
4367
          }
4368
          break;
4369
 
4370
        case GIMPLE_GOTO:
4371
          if (simple_goto_p (stmt))
4372
            {
4373
              error ("explicit goto at end of bb %d", bb->index);
4374
              err = 1;
4375
            }
4376
          else
4377
            {
4378
              /* FIXME.  We should double check that the labels in the
4379
                 destination blocks have their address taken.  */
4380
              FOR_EACH_EDGE (e, ei, bb->succs)
4381
                if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4382
                                 | EDGE_FALSE_VALUE))
4383
                    || !(e->flags & EDGE_ABNORMAL))
4384
                  {
4385
                    error ("wrong outgoing edge flags at end of bb %d",
4386
                           bb->index);
4387
                    err = 1;
4388
                  }
4389
            }
4390
          break;
4391
 
4392
        case GIMPLE_RETURN:
4393
          if (!single_succ_p (bb)
4394
              || (single_succ_edge (bb)->flags
4395
                  & (EDGE_FALLTHRU | EDGE_ABNORMAL
4396
                     | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4397
            {
4398
              error ("wrong outgoing edge flags at end of bb %d", bb->index);
4399
              err = 1;
4400
            }
4401
          if (single_succ (bb) != EXIT_BLOCK_PTR)
4402
            {
4403
              error ("return edge does not point to exit in bb %d",
4404
                     bb->index);
4405
              err = 1;
4406
            }
4407
          break;
4408
 
4409
        case GIMPLE_SWITCH:
4410
          {
4411
            tree prev;
4412
            edge e;
4413
            size_t i, n;
4414
 
4415
            n = gimple_switch_num_labels (stmt);
4416
 
4417
            /* Mark all the destination basic blocks.  */
4418
            for (i = 0; i < n; ++i)
4419
              {
4420
                tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4421
                basic_block label_bb = label_to_block (lab);
4422
                gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
4423
                label_bb->aux = (void *)1;
4424
              }
4425
 
4426
            /* Verify that the case labels are sorted.  */
4427
            prev = gimple_switch_label (stmt, 0);
4428
            for (i = 1; i < n; ++i)
4429
              {
4430
                tree c = gimple_switch_label (stmt, i);
4431
                if (!CASE_LOW (c))
4432
                  {
4433
                    error ("found default case not at the start of "
4434
                           "case vector");
4435
                    err = 1;
4436
                    continue;
4437
                  }
4438
                if (CASE_LOW (prev)
4439
                    && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
4440
                  {
4441
                    error ("case labels not sorted: ");
4442
                    print_generic_expr (stderr, prev, 0);
4443
                    fprintf (stderr," is greater than ");
4444
                    print_generic_expr (stderr, c, 0);
4445
                    fprintf (stderr," but comes before it.\n");
4446
                    err = 1;
4447
                  }
4448
                prev = c;
4449
              }
4450
            /* VRP will remove the default case if it can prove it will
4451
               never be executed.  So do not verify there always exists
4452
               a default case here.  */
4453
 
4454
            FOR_EACH_EDGE (e, ei, bb->succs)
4455
              {
4456
                if (!e->dest->aux)
4457
                  {
4458
                    error ("extra outgoing edge %d->%d",
4459
                           bb->index, e->dest->index);
4460
                    err = 1;
4461
                  }
4462
 
4463
                e->dest->aux = (void *)2;
4464
                if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4465
                                 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4466
                  {
4467
                    error ("wrong outgoing edge flags at end of bb %d",
4468
                           bb->index);
4469
                    err = 1;
4470
                  }
4471
              }
4472
 
4473
            /* Check that we have all of them.  */
4474
            for (i = 0; i < n; ++i)
4475
              {
4476
                tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4477
                basic_block label_bb = label_to_block (lab);
4478
 
4479
                if (label_bb->aux != (void *)2)
4480
                  {
4481
                    error ("missing edge %i->%i", bb->index, label_bb->index);
4482
                    err = 1;
4483
                  }
4484
              }
4485
 
4486
            FOR_EACH_EDGE (e, ei, bb->succs)
4487
              e->dest->aux = (void *)0;
4488
          }
4489
          break;
4490
 
4491
        case GIMPLE_EH_DISPATCH:
4492
          err |= verify_eh_dispatch_edge (stmt);
4493
          break;
4494
 
4495
        default:
4496
          break;
4497
        }
4498
    }
4499
 
4500
  if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
4501
    verify_dominators (CDI_DOMINATORS);
4502
 
4503
  return err;
4504
}
4505
 
4506
 
4507
/* Updates phi nodes after creating a forwarder block joined
4508
   by edge FALLTHRU.  */
4509
 
4510
static void
4511
gimple_make_forwarder_block (edge fallthru)
4512
{
4513
  edge e;
4514
  edge_iterator ei;
4515
  basic_block dummy, bb;
4516
  tree var;
4517
  gimple_stmt_iterator gsi;
4518
 
4519
  dummy = fallthru->src;
4520
  bb = fallthru->dest;
4521
 
4522
  if (single_pred_p (bb))
4523
    return;
4524
 
4525
  /* If we redirected a branch we must create new PHI nodes at the
4526
     start of BB.  */
4527
  for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
4528
    {
4529
      gimple phi, new_phi;
4530
 
4531
      phi = gsi_stmt (gsi);
4532
      var = gimple_phi_result (phi);
4533
      new_phi = create_phi_node (var, bb);
4534
      SSA_NAME_DEF_STMT (var) = new_phi;
4535
      gimple_phi_set_result (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
4536
      add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
4537
                   UNKNOWN_LOCATION);
4538
    }
4539
 
4540
  /* Add the arguments we have stored on edges.  */
4541
  FOR_EACH_EDGE (e, ei, bb->preds)
4542
    {
4543
      if (e == fallthru)
4544
        continue;
4545
 
4546
      flush_pending_stmts (e);
4547
    }
4548
}
4549
 
4550
 
4551
/* Return a non-special label in the head of basic block BLOCK.
4552
   Create one if it doesn't exist.  */
4553
 
4554
tree
4555
gimple_block_label (basic_block bb)
4556
{
4557
  gimple_stmt_iterator i, s = gsi_start_bb (bb);
4558
  bool first = true;
4559
  tree label;
4560
  gimple stmt;
4561
 
4562
  for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
4563
    {
4564
      stmt = gsi_stmt (i);
4565
      if (gimple_code (stmt) != GIMPLE_LABEL)
4566
        break;
4567
      label = gimple_label_label (stmt);
4568
      if (!DECL_NONLOCAL (label))
4569
        {
4570
          if (!first)
4571
            gsi_move_before (&i, &s);
4572
          return label;
4573
        }
4574
    }
4575
 
4576
  label = create_artificial_label (UNKNOWN_LOCATION);
4577
  stmt = gimple_build_label (label);
4578
  gsi_insert_before (&s, stmt, GSI_NEW_STMT);
4579
  return label;
4580
}
4581
 
4582
 
4583
/* Attempt to perform edge redirection by replacing a possibly complex
4584
   jump instruction by a goto or by removing the jump completely.
4585
   This can apply only if all edges now point to the same block.  The
4586
   parameters and return values are equivalent to
4587
   redirect_edge_and_branch.  */
4588
 
4589
static edge
4590
gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
4591
{
4592
  basic_block src = e->src;
4593
  gimple_stmt_iterator i;
4594
  gimple stmt;
4595
 
4596
  /* We can replace or remove a complex jump only when we have exactly
4597
     two edges.  */
4598
  if (EDGE_COUNT (src->succs) != 2
4599
      /* Verify that all targets will be TARGET.  Specifically, the
4600
         edge that is not E must also go to TARGET.  */
4601
      || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4602
    return NULL;
4603
 
4604
  i = gsi_last_bb (src);
4605
  if (gsi_end_p (i))
4606
    return NULL;
4607
 
4608
  stmt = gsi_stmt (i);
4609
 
4610
  if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
4611
    {
4612
      gsi_remove (&i, true);
4613
      e = ssa_redirect_edge (e, target);
4614
      e->flags = EDGE_FALLTHRU;
4615
      return e;
4616
    }
4617
 
4618
  return NULL;
4619
}
4620
 
4621
 
4622
/* Redirect E to DEST.  Return NULL on failure.  Otherwise, return the
4623
   edge representing the redirected branch.  */
4624
 
4625
static edge
4626
gimple_redirect_edge_and_branch (edge e, basic_block dest)
4627
{
4628
  basic_block bb = e->src;
4629
  gimple_stmt_iterator gsi;
4630
  edge ret;
4631
  gimple stmt;
4632
 
4633
  if (e->flags & EDGE_ABNORMAL)
4634
    return NULL;
4635
 
4636
  if (e->dest == dest)
4637
    return NULL;
4638
 
4639
  if (e->flags & EDGE_EH)
4640
    return redirect_eh_edge (e, dest);
4641
 
4642
  if (e->src != ENTRY_BLOCK_PTR)
4643
    {
4644
      ret = gimple_try_redirect_by_replacing_jump (e, dest);
4645
      if (ret)
4646
        return ret;
4647
    }
4648
 
4649
  gsi = gsi_last_bb (bb);
4650
  stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
4651
 
4652
  switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
4653
    {
4654
    case GIMPLE_COND:
4655
      /* For COND_EXPR, we only need to redirect the edge.  */
4656
      break;
4657
 
4658
    case GIMPLE_GOTO:
4659
      /* No non-abnormal edges should lead from a non-simple goto, and
4660
         simple ones should be represented implicitly.  */
4661
      gcc_unreachable ();
4662
 
4663
    case GIMPLE_SWITCH:
4664
      {
4665
        tree label = gimple_block_label (dest);
4666
        tree cases = get_cases_for_edge (e, stmt);
4667
 
4668
        /* If we have a list of cases associated with E, then use it
4669
           as it's a lot faster than walking the entire case vector.  */
4670
        if (cases)
4671
          {
4672
            edge e2 = find_edge (e->src, dest);
4673
            tree last, first;
4674
 
4675
            first = cases;
4676
            while (cases)
4677
              {
4678
                last = cases;
4679
                CASE_LABEL (cases) = label;
4680
                cases = TREE_CHAIN (cases);
4681
              }
4682
 
4683
            /* If there was already an edge in the CFG, then we need
4684
               to move all the cases associated with E to E2.  */
4685
            if (e2)
4686
              {
4687
                tree cases2 = get_cases_for_edge (e2, stmt);
4688
 
4689
                TREE_CHAIN (last) = TREE_CHAIN (cases2);
4690
                TREE_CHAIN (cases2) = first;
4691
              }
4692
          }
4693
        else
4694
          {
4695
            size_t i, n = gimple_switch_num_labels (stmt);
4696
 
4697
            for (i = 0; i < n; i++)
4698
              {
4699
                tree elt = gimple_switch_label (stmt, i);
4700
                if (label_to_block (CASE_LABEL (elt)) == e->dest)
4701
                  CASE_LABEL (elt) = label;
4702
              }
4703
          }
4704
      }
4705
      break;
4706
 
4707
    case GIMPLE_ASM:
4708
      {
4709
        int i, n = gimple_asm_nlabels (stmt);
4710
        tree label = NULL;
4711
 
4712
        for (i = 0; i < n; ++i)
4713
          {
4714
            tree cons = gimple_asm_label_op (stmt, i);
4715
            if (label_to_block (TREE_VALUE (cons)) == e->dest)
4716
              {
4717
                if (!label)
4718
                  label = gimple_block_label (dest);
4719
                TREE_VALUE (cons) = label;
4720
              }
4721
          }
4722
 
4723
        /* If we didn't find any label matching the former edge in the
4724
           asm labels, we must be redirecting the fallthrough
4725
           edge.  */
4726
        gcc_assert (label || (e->flags & EDGE_FALLTHRU));
4727
      }
4728
      break;
4729
 
4730
    case GIMPLE_RETURN:
4731
      gsi_remove (&gsi, true);
4732
      e->flags |= EDGE_FALLTHRU;
4733
      break;
4734
 
4735
    case GIMPLE_OMP_RETURN:
4736
    case GIMPLE_OMP_CONTINUE:
4737
    case GIMPLE_OMP_SECTIONS_SWITCH:
4738
    case GIMPLE_OMP_FOR:
4739
      /* The edges from OMP constructs can be simply redirected.  */
4740
      break;
4741
 
4742
    case GIMPLE_EH_DISPATCH:
4743
      if (!(e->flags & EDGE_FALLTHRU))
4744
        redirect_eh_dispatch_edge (stmt, e, dest);
4745
      break;
4746
 
4747
    default:
4748
      /* Otherwise it must be a fallthru edge, and we don't need to
4749
         do anything besides redirecting it.  */
4750
      gcc_assert (e->flags & EDGE_FALLTHRU);
4751
      break;
4752
    }
4753
 
4754
  /* Update/insert PHI nodes as necessary.  */
4755
 
4756
  /* Now update the edges in the CFG.  */
4757
  e = ssa_redirect_edge (e, dest);
4758
 
4759
  return e;
4760
}
4761
 
4762
/* Returns true if it is possible to remove edge E by redirecting
4763
   it to the destination of the other edge from E->src.  */
4764
 
4765
static bool
4766
gimple_can_remove_branch_p (const_edge e)
4767
{
4768
  if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
4769
    return false;
4770
 
4771
  return true;
4772
}
4773
 
4774
/* Simple wrapper, as we can always redirect fallthru edges.  */
4775
 
4776
static basic_block
4777
gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
4778
{
4779
  e = gimple_redirect_edge_and_branch (e, dest);
4780
  gcc_assert (e);
4781
 
4782
  return NULL;
4783
}
4784
 
4785
 
4786
/* Splits basic block BB after statement STMT (but at least after the
4787
   labels).  If STMT is NULL, BB is split just after the labels.  */
4788
 
4789
static basic_block
4790
gimple_split_block (basic_block bb, void *stmt)
4791
{
4792
  gimple_stmt_iterator gsi;
4793
  gimple_stmt_iterator gsi_tgt;
4794
  gimple act;
4795
  gimple_seq list;
4796
  basic_block new_bb;
4797
  edge e;
4798
  edge_iterator ei;
4799
 
4800
  new_bb = create_empty_bb (bb);
4801
 
4802
  /* Redirect the outgoing edges.  */
4803
  new_bb->succs = bb->succs;
4804
  bb->succs = NULL;
4805
  FOR_EACH_EDGE (e, ei, new_bb->succs)
4806
    e->src = new_bb;
4807
 
4808
  if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
4809
    stmt = NULL;
4810
 
4811
  /* Move everything from GSI to the new basic block.  */
4812
  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4813
    {
4814
      act = gsi_stmt (gsi);
4815
      if (gimple_code (act) == GIMPLE_LABEL)
4816
        continue;
4817
 
4818
      if (!stmt)
4819
        break;
4820
 
4821
      if (stmt == act)
4822
        {
4823
          gsi_next (&gsi);
4824
          break;
4825
        }
4826
    }
4827
 
4828
  if (gsi_end_p (gsi))
4829
    return new_bb;
4830
 
4831
  /* Split the statement list - avoid re-creating new containers as this
4832
     brings ugly quadratic memory consumption in the inliner.
4833
     (We are still quadratic since we need to update stmt BB pointers,
4834
     sadly.)  */
4835
  list = gsi_split_seq_before (&gsi);
4836
  set_bb_seq (new_bb, list);
4837
  for (gsi_tgt = gsi_start (list);
4838
       !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
4839
    gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
4840
 
4841
  return new_bb;
4842
}
4843
 
4844
 
4845
/* Moves basic block BB after block AFTER.  */
4846
 
4847
static bool
4848
gimple_move_block_after (basic_block bb, basic_block after)
4849
{
4850
  if (bb->prev_bb == after)
4851
    return true;
4852
 
4853
  unlink_block (bb);
4854
  link_block (bb, after);
4855
 
4856
  return true;
4857
}
4858
 
4859
 
4860
/* Return true if basic_block can be duplicated.  */
4861
 
4862
static bool
4863
gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
4864
{
4865
  return true;
4866
}
4867
 
4868
/* Create a duplicate of the basic block BB.  NOTE: This does not
4869
   preserve SSA form.  */
4870
 
4871
static basic_block
4872
gimple_duplicate_bb (basic_block bb)
4873
{
4874
  basic_block new_bb;
4875
  gimple_stmt_iterator gsi, gsi_tgt;
4876
  gimple_seq phis = phi_nodes (bb);
4877
  gimple phi, stmt, copy;
4878
 
4879
  new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
4880
 
4881
  /* Copy the PHI nodes.  We ignore PHI node arguments here because
4882
     the incoming edges have not been setup yet.  */
4883
  for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
4884
    {
4885
      phi = gsi_stmt (gsi);
4886
      copy = create_phi_node (gimple_phi_result (phi), new_bb);
4887
      create_new_def_for (gimple_phi_result (copy), copy,
4888
                          gimple_phi_result_ptr (copy));
4889
    }
4890
 
4891
  gsi_tgt = gsi_start_bb (new_bb);
4892
  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4893
    {
4894
      def_operand_p def_p;
4895
      ssa_op_iter op_iter;
4896
 
4897
      stmt = gsi_stmt (gsi);
4898
      if (gimple_code (stmt) == GIMPLE_LABEL)
4899
        continue;
4900
 
4901
      /* Create a new copy of STMT and duplicate STMT's virtual
4902
         operands.  */
4903
      copy = gimple_copy (stmt);
4904
      gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
4905
 
4906
      maybe_duplicate_eh_stmt (copy, stmt);
4907
      gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
4908
 
4909
      /* Create new names for all the definitions created by COPY and
4910
         add replacement mappings for each new name.  */
4911
      FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
4912
        create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
4913
    }
4914
 
4915
  return new_bb;
4916
}
4917
 
4918
/* Adds phi node arguments for edge E_COPY after basic block duplication.  */
4919
 
4920
static void
4921
add_phi_args_after_copy_edge (edge e_copy)
4922
{
4923
  basic_block bb, bb_copy = e_copy->src, dest;
4924
  edge e;
4925
  edge_iterator ei;
4926
  gimple phi, phi_copy;
4927
  tree def;
4928
  gimple_stmt_iterator psi, psi_copy;
4929
 
4930
  if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
4931
    return;
4932
 
4933
  bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
4934
 
4935
  if (e_copy->dest->flags & BB_DUPLICATED)
4936
    dest = get_bb_original (e_copy->dest);
4937
  else
4938
    dest = e_copy->dest;
4939
 
4940
  e = find_edge (bb, dest);
4941
  if (!e)
4942
    {
4943
      /* During loop unrolling the target of the latch edge is copied.
4944
         In this case we are not looking for edge to dest, but to
4945
         duplicated block whose original was dest.  */
4946
      FOR_EACH_EDGE (e, ei, bb->succs)
4947
        {
4948
          if ((e->dest->flags & BB_DUPLICATED)
4949
              && get_bb_original (e->dest) == dest)
4950
            break;
4951
        }
4952
 
4953
      gcc_assert (e != NULL);
4954
    }
4955
 
4956
  for (psi = gsi_start_phis (e->dest),
4957
       psi_copy = gsi_start_phis (e_copy->dest);
4958
       !gsi_end_p (psi);
4959
       gsi_next (&psi), gsi_next (&psi_copy))
4960
    {
4961
      phi = gsi_stmt (psi);
4962
      phi_copy = gsi_stmt (psi_copy);
4963
      def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4964
      add_phi_arg (phi_copy, def, e_copy,
4965
                   gimple_phi_arg_location_from_edge (phi, e));
4966
    }
4967
}
4968
 
4969
 
4970
/* Basic block BB_COPY was created by code duplication.  Add phi node
4971
   arguments for edges going out of BB_COPY.  The blocks that were
4972
   duplicated have BB_DUPLICATED set.  */
4973
 
4974
void
4975
add_phi_args_after_copy_bb (basic_block bb_copy)
4976
{
4977
  edge e_copy;
4978
  edge_iterator ei;
4979
 
4980
  FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
4981
    {
4982
      add_phi_args_after_copy_edge (e_copy);
4983
    }
4984
}
4985
 
4986
/* Blocks in REGION_COPY array of length N_REGION were created by
4987
   duplication of basic blocks.  Add phi node arguments for edges
4988
   going from these blocks.  If E_COPY is not NULL, also add
4989
   phi node arguments for its destination.*/
4990
 
4991
void
4992
add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
4993
                         edge e_copy)
4994
{
4995
  unsigned i;
4996
 
4997
  for (i = 0; i < n_region; i++)
4998
    region_copy[i]->flags |= BB_DUPLICATED;
4999
 
5000
  for (i = 0; i < n_region; i++)
5001
    add_phi_args_after_copy_bb (region_copy[i]);
5002
  if (e_copy)
5003
    add_phi_args_after_copy_edge (e_copy);
5004
 
5005
  for (i = 0; i < n_region; i++)
5006
    region_copy[i]->flags &= ~BB_DUPLICATED;
5007
}
5008
 
5009
/* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5010
   important exit edge EXIT.  By important we mean that no SSA name defined
5011
   inside region is live over the other exit edges of the region.  All entry
5012
   edges to the region must go to ENTRY->dest.  The edge ENTRY is redirected
5013
   to the duplicate of the region.  SSA form, dominance and loop information
5014
   is updated.  The new basic blocks are stored to REGION_COPY in the same
5015
   order as they had in REGION, provided that REGION_COPY is not NULL.
5016
   The function returns false if it is unable to copy the region,
5017
   true otherwise.  */
5018
 
5019
bool
5020
gimple_duplicate_sese_region (edge entry, edge exit,
5021
                            basic_block *region, unsigned n_region,
5022
                            basic_block *region_copy)
5023
{
5024
  unsigned i;
5025
  bool free_region_copy = false, copying_header = false;
5026
  struct loop *loop = entry->dest->loop_father;
5027
  edge exit_copy;
5028
  VEC (basic_block, heap) *doms;
5029
  edge redirected;
5030
  int total_freq = 0, entry_freq = 0;
5031
  gcov_type total_count = 0, entry_count = 0;
5032
 
5033
  if (!can_copy_bbs_p (region, n_region))
5034
    return false;
5035
 
5036
  /* Some sanity checking.  Note that we do not check for all possible
5037
     missuses of the functions.  I.e. if you ask to copy something weird,
5038
     it will work, but the state of structures probably will not be
5039
     correct.  */
5040
  for (i = 0; i < n_region; i++)
5041
    {
5042
      /* We do not handle subloops, i.e. all the blocks must belong to the
5043
         same loop.  */
5044
      if (region[i]->loop_father != loop)
5045
        return false;
5046
 
5047
      if (region[i] != entry->dest
5048
          && region[i] == loop->header)
5049
        return false;
5050
    }
5051
 
5052
  set_loop_copy (loop, loop);
5053
 
5054
  /* In case the function is used for loop header copying (which is the primary
5055
     use), ensure that EXIT and its copy will be new latch and entry edges.  */
5056
  if (loop->header == entry->dest)
5057
    {
5058
      copying_header = true;
5059
      set_loop_copy (loop, loop_outer (loop));
5060
 
5061
      if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5062
        return false;
5063
 
5064
      for (i = 0; i < n_region; i++)
5065
        if (region[i] != exit->src
5066
            && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5067
          return false;
5068
    }
5069
 
5070
  if (!region_copy)
5071
    {
5072
      region_copy = XNEWVEC (basic_block, n_region);
5073
      free_region_copy = true;
5074
    }
5075
 
5076
  gcc_assert (!need_ssa_update_p (cfun));
5077
 
5078
  /* Record blocks outside the region that are dominated by something
5079
     inside.  */
5080
  doms = NULL;
5081
  initialize_original_copy_tables ();
5082
 
5083
  doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5084
 
5085
  if (entry->dest->count)
5086
    {
5087
      total_count = entry->dest->count;
5088
      entry_count = entry->count;
5089
      /* Fix up corner cases, to avoid division by zero or creation of negative
5090
         frequencies.  */
5091
      if (entry_count > total_count)
5092
        entry_count = total_count;
5093
    }
5094
  else
5095
    {
5096
      total_freq = entry->dest->frequency;
5097
      entry_freq = EDGE_FREQUENCY (entry);
5098
      /* Fix up corner cases, to avoid division by zero or creation of negative
5099
         frequencies.  */
5100
      if (total_freq == 0)
5101
        total_freq = 1;
5102
      else if (entry_freq > total_freq)
5103
        entry_freq = total_freq;
5104
    }
5105
 
5106
  copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5107
            split_edge_bb_loc (entry));
5108
  if (total_count)
5109
    {
5110
      scale_bbs_frequencies_gcov_type (region, n_region,
5111
                                       total_count - entry_count,
5112
                                       total_count);
5113
      scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5114
                                       total_count);
5115
    }
5116
  else
5117
    {
5118
      scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5119
                                 total_freq);
5120
      scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5121
    }
5122
 
5123
  if (copying_header)
5124
    {
5125
      loop->header = exit->dest;
5126
      loop->latch = exit->src;
5127
    }
5128
 
5129
  /* Redirect the entry and add the phi node arguments.  */
5130
  redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5131
  gcc_assert (redirected != NULL);
5132
  flush_pending_stmts (entry);
5133
 
5134
  /* Concerning updating of dominators:  We must recount dominators
5135
     for entry block and its copy.  Anything that is outside of the
5136
     region, but was dominated by something inside needs recounting as
5137
     well.  */
5138
  set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5139
  VEC_safe_push (basic_block, heap, doms, get_bb_original (entry->dest));
5140
  iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5141
  VEC_free (basic_block, heap, doms);
5142
 
5143
  /* Add the other PHI node arguments.  */
5144
  add_phi_args_after_copy (region_copy, n_region, NULL);
5145
 
5146
  /* Update the SSA web.  */
5147
  update_ssa (TODO_update_ssa);
5148
 
5149
  if (free_region_copy)
5150
    free (region_copy);
5151
 
5152
  free_original_copy_tables ();
5153
  return true;
5154
}
5155
 
5156
/* Duplicates REGION consisting of N_REGION blocks.  The new blocks
5157
   are stored to REGION_COPY in the same order in that they appear
5158
   in REGION, if REGION_COPY is not NULL.  ENTRY is the entry to
5159
   the region, EXIT an exit from it.  The condition guarding EXIT
5160
   is moved to ENTRY.  Returns true if duplication succeeds, false
5161
   otherwise.
5162
 
5163
   For example,
5164
 
5165
   some_code;
5166
   if (cond)
5167
     A;
5168
   else
5169
     B;
5170
 
5171
   is transformed to
5172
 
5173
   if (cond)
5174
     {
5175
       some_code;
5176
       A;
5177
     }
5178
   else
5179
     {
5180
       some_code;
5181
       B;
5182
     }
5183
*/
5184
 
5185
bool
5186
gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5187
                          basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5188
                          basic_block *region_copy ATTRIBUTE_UNUSED)
5189
{
5190
  unsigned i;
5191
  bool free_region_copy = false;
5192
  struct loop *loop = exit->dest->loop_father;
5193
  struct loop *orig_loop = entry->dest->loop_father;
5194
  basic_block switch_bb, entry_bb, nentry_bb;
5195
  VEC (basic_block, heap) *doms;
5196
  int total_freq = 0, exit_freq = 0;
5197
  gcov_type total_count = 0, exit_count = 0;
5198
  edge exits[2], nexits[2], e;
5199
  gimple_stmt_iterator gsi,gsi1;
5200
  gimple cond_stmt;
5201
  edge sorig, snew;
5202
  basic_block exit_bb;
5203
  basic_block iters_bb;
5204
  tree new_rhs;
5205
  gimple_stmt_iterator psi;
5206
  gimple phi;
5207
  tree def;
5208
 
5209
  gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5210
  exits[0] = exit;
5211
  exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5212
 
5213
  if (!can_copy_bbs_p (region, n_region))
5214
    return false;
5215
 
5216
  initialize_original_copy_tables ();
5217
  set_loop_copy (orig_loop, loop);
5218
  duplicate_subloops (orig_loop, loop);
5219
 
5220
  if (!region_copy)
5221
    {
5222
      region_copy = XNEWVEC (basic_block, n_region);
5223
      free_region_copy = true;
5224
    }
5225
 
5226
  gcc_assert (!need_ssa_update_p (cfun));
5227
 
5228
  /* Record blocks outside the region that are dominated by something
5229
     inside.  */
5230
  doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5231
 
5232
  if (exit->src->count)
5233
    {
5234
      total_count = exit->src->count;
5235
      exit_count = exit->count;
5236
      /* Fix up corner cases, to avoid division by zero or creation of negative
5237
         frequencies.  */
5238
      if (exit_count > total_count)
5239
        exit_count = total_count;
5240
    }
5241
  else
5242
    {
5243
      total_freq = exit->src->frequency;
5244
      exit_freq = EDGE_FREQUENCY (exit);
5245
      /* Fix up corner cases, to avoid division by zero or creation of negative
5246
         frequencies.  */
5247
      if (total_freq == 0)
5248
        total_freq = 1;
5249
      if (exit_freq > total_freq)
5250
        exit_freq = total_freq;
5251
    }
5252
 
5253
  copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5254
            split_edge_bb_loc (exit));
5255
  if (total_count)
5256
    {
5257
      scale_bbs_frequencies_gcov_type (region, n_region,
5258
                                       total_count - exit_count,
5259
                                       total_count);
5260
      scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5261
                                       total_count);
5262
    }
5263
  else
5264
    {
5265
      scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5266
                                 total_freq);
5267
      scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5268
    }
5269
 
5270
  /* Create the switch block, and put the exit condition to it.  */
5271
  entry_bb = entry->dest;
5272
  nentry_bb = get_bb_copy (entry_bb);
5273
  if (!last_stmt (entry->src)
5274
      || !stmt_ends_bb_p (last_stmt (entry->src)))
5275
    switch_bb = entry->src;
5276
  else
5277
    switch_bb = split_edge (entry);
5278
  set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5279
 
5280
  gsi = gsi_last_bb (switch_bb);
5281
  cond_stmt = last_stmt (exit->src);
5282
  gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
5283
  cond_stmt = gimple_copy (cond_stmt);
5284
 
5285
 /* If the block consisting of the exit condition has the latch as
5286
    successor, then the body of the loop is executed before
5287
    the exit condition is tested.  In such case, moving the
5288
    condition to the entry, causes that the loop will iterate
5289
    one less iteration (which is the wanted outcome, since we
5290
    peel out the last iteration).  If the body is executed after
5291
    the condition, moving the condition to the entry requires
5292
    decrementing one iteration.  */
5293
  if (exits[1]->dest == orig_loop->latch)
5294
    new_rhs = gimple_cond_rhs (cond_stmt);
5295
  else
5296
  {
5297
    new_rhs = fold_build2 (MINUS_EXPR, TREE_TYPE (gimple_cond_rhs (cond_stmt)),
5298
                           gimple_cond_rhs (cond_stmt),
5299
                           build_int_cst (TREE_TYPE (gimple_cond_rhs (cond_stmt)), 1));
5300
 
5301
    if (TREE_CODE (gimple_cond_rhs (cond_stmt)) == SSA_NAME)
5302
      {
5303
        iters_bb = gimple_bb (SSA_NAME_DEF_STMT (gimple_cond_rhs (cond_stmt)));
5304
        for (gsi1 = gsi_start_bb (iters_bb); !gsi_end_p (gsi1); gsi_next (&gsi1))
5305
          if (gsi_stmt (gsi1) == SSA_NAME_DEF_STMT (gimple_cond_rhs (cond_stmt)))
5306
            break;
5307
 
5308
        new_rhs = force_gimple_operand_gsi (&gsi1, new_rhs, true,
5309
                                            NULL_TREE,false,GSI_CONTINUE_LINKING);
5310
      }
5311
  }
5312
  gimple_cond_set_rhs (cond_stmt, unshare_expr (new_rhs));
5313
  gimple_cond_set_lhs (cond_stmt, unshare_expr (gimple_cond_lhs (cond_stmt)));
5314
  gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
5315
 
5316
  sorig = single_succ_edge (switch_bb);
5317
  sorig->flags = exits[1]->flags;
5318
  snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5319
 
5320
  /* Register the new edge from SWITCH_BB in loop exit lists.  */
5321
  rescan_loop_exit (snew, true, false);
5322
 
5323
  /* Add the PHI node arguments.  */
5324
  add_phi_args_after_copy (region_copy, n_region, snew);
5325
 
5326
  /* Get rid of now superfluous conditions and associated edges (and phi node
5327
     arguments).  */
5328
  exit_bb = exit->dest;
5329
 
5330
  e = redirect_edge_and_branch (exits[0], exits[1]->dest);
5331
  PENDING_STMT (e) = NULL;
5332
 
5333
  /* The latch of ORIG_LOOP was copied, and so was the backedge
5334
     to the original header.  We redirect this backedge to EXIT_BB.  */
5335
  for (i = 0; i < n_region; i++)
5336
    if (get_bb_original (region_copy[i]) == orig_loop->latch)
5337
      {
5338
        gcc_assert (single_succ_edge (region_copy[i]));
5339
        e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
5340
        PENDING_STMT (e) = NULL;
5341
        for (psi = gsi_start_phis (exit_bb);
5342
             !gsi_end_p (psi);
5343
             gsi_next (&psi))
5344
          {
5345
            phi = gsi_stmt (psi);
5346
            def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
5347
            add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
5348
          }
5349
      }
5350
  e = redirect_edge_and_branch (nexits[0], nexits[1]->dest);
5351
  PENDING_STMT (e) = NULL;
5352
 
5353
  /* Anything that is outside of the region, but was dominated by something
5354
     inside needs to update dominance info.  */
5355
  iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5356
  VEC_free (basic_block, heap, doms);
5357
  /* Update the SSA web.  */
5358
  update_ssa (TODO_update_ssa);
5359
 
5360
  if (free_region_copy)
5361
    free (region_copy);
5362
 
5363
  free_original_copy_tables ();
5364
  return true;
5365
}
5366
 
5367
/* Add all the blocks dominated by ENTRY to the array BBS_P.  Stop
5368
   adding blocks when the dominator traversal reaches EXIT.  This
5369
   function silently assumes that ENTRY strictly dominates EXIT.  */
5370
 
5371
void
5372
gather_blocks_in_sese_region (basic_block entry, basic_block exit,
5373
                              VEC(basic_block,heap) **bbs_p)
5374
{
5375
  basic_block son;
5376
 
5377
  for (son = first_dom_son (CDI_DOMINATORS, entry);
5378
       son;
5379
       son = next_dom_son (CDI_DOMINATORS, son))
5380
    {
5381
      VEC_safe_push (basic_block, heap, *bbs_p, son);
5382
      if (son != exit)
5383
        gather_blocks_in_sese_region (son, exit, bbs_p);
5384
    }
5385
}
5386
 
5387
/* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
5388
   The duplicates are recorded in VARS_MAP.  */
5389
 
5390
static void
5391
replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
5392
                           tree to_context)
5393
{
5394
  tree t = *tp, new_t;
5395
  struct function *f = DECL_STRUCT_FUNCTION (to_context);
5396
  void **loc;
5397
 
5398
  if (DECL_CONTEXT (t) == to_context)
5399
    return;
5400
 
5401
  loc = pointer_map_contains (vars_map, t);
5402
 
5403
  if (!loc)
5404
    {
5405
      loc = pointer_map_insert (vars_map, t);
5406
 
5407
      if (SSA_VAR_P (t))
5408
        {
5409
          new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
5410
          f->local_decls = tree_cons (NULL_TREE, new_t, f->local_decls);
5411
        }
5412
      else
5413
        {
5414
          gcc_assert (TREE_CODE (t) == CONST_DECL);
5415
          new_t = copy_node (t);
5416
        }
5417
      DECL_CONTEXT (new_t) = to_context;
5418
 
5419
      *loc = new_t;
5420
    }
5421
  else
5422
    new_t = (tree) *loc;
5423
 
5424
  *tp = new_t;
5425
}
5426
 
5427
 
5428
/* Creates an ssa name in TO_CONTEXT equivalent to NAME.
5429
   VARS_MAP maps old ssa names and var_decls to the new ones.  */
5430
 
5431
static tree
5432
replace_ssa_name (tree name, struct pointer_map_t *vars_map,
5433
                  tree to_context)
5434
{
5435
  void **loc;
5436
  tree new_name, decl = SSA_NAME_VAR (name);
5437
 
5438
  gcc_assert (is_gimple_reg (name));
5439
 
5440
  loc = pointer_map_contains (vars_map, name);
5441
 
5442
  if (!loc)
5443
    {
5444
      replace_by_duplicate_decl (&decl, vars_map, to_context);
5445
 
5446
      push_cfun (DECL_STRUCT_FUNCTION (to_context));
5447
      if (gimple_in_ssa_p (cfun))
5448
        add_referenced_var (decl);
5449
 
5450
      new_name = make_ssa_name (decl, SSA_NAME_DEF_STMT (name));
5451
      if (SSA_NAME_IS_DEFAULT_DEF (name))
5452
        set_default_def (decl, new_name);
5453
      pop_cfun ();
5454
 
5455
      loc = pointer_map_insert (vars_map, name);
5456
      *loc = new_name;
5457
    }
5458
  else
5459
    new_name = (tree) *loc;
5460
 
5461
  return new_name;
5462
}
5463
 
5464
struct move_stmt_d
5465
{
5466
  tree orig_block;
5467
  tree new_block;
5468
  tree from_context;
5469
  tree to_context;
5470
  struct pointer_map_t *vars_map;
5471
  htab_t new_label_map;
5472
  struct pointer_map_t *eh_map;
5473
  bool remap_decls_p;
5474
};
5475
 
5476
/* Helper for move_block_to_fn.  Set TREE_BLOCK in every expression
5477
   contained in *TP if it has been ORIG_BLOCK previously and change the
5478
   DECL_CONTEXT of every local variable referenced in *TP.  */
5479
 
5480
static tree
5481
move_stmt_op (tree *tp, int *walk_subtrees, void *data)
5482
{
5483
  struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5484
  struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5485
  tree t = *tp;
5486
 
5487
  if (EXPR_P (t))
5488
    /* We should never have TREE_BLOCK set on non-statements.  */
5489
    gcc_assert (!TREE_BLOCK (t));
5490
 
5491
  else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
5492
    {
5493
      if (TREE_CODE (t) == SSA_NAME)
5494
        *tp = replace_ssa_name (t, p->vars_map, p->to_context);
5495
      else if (TREE_CODE (t) == LABEL_DECL)
5496
        {
5497
          if (p->new_label_map)
5498
            {
5499
              struct tree_map in, *out;
5500
              in.base.from = t;
5501
              out = (struct tree_map *)
5502
                htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
5503
              if (out)
5504
                *tp = t = out->to;
5505
            }
5506
 
5507
          DECL_CONTEXT (t) = p->to_context;
5508
        }
5509
      else if (p->remap_decls_p)
5510
        {
5511
          /* Replace T with its duplicate.  T should no longer appear in the
5512
             parent function, so this looks wasteful; however, it may appear
5513
             in referenced_vars, and more importantly, as virtual operands of
5514
             statements, and in alias lists of other variables.  It would be
5515
             quite difficult to expunge it from all those places.  ??? It might
5516
             suffice to do this for addressable variables.  */
5517
          if ((TREE_CODE (t) == VAR_DECL
5518
               && !is_global_var (t))
5519
              || TREE_CODE (t) == CONST_DECL)
5520
            replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
5521
 
5522
          if (SSA_VAR_P (t)
5523
              && gimple_in_ssa_p (cfun))
5524
            {
5525
              push_cfun (DECL_STRUCT_FUNCTION (p->to_context));
5526
              add_referenced_var (*tp);
5527
              pop_cfun ();
5528
            }
5529
        }
5530
      *walk_subtrees = 0;
5531
    }
5532
  else if (TYPE_P (t))
5533
    *walk_subtrees = 0;
5534
 
5535
  return NULL_TREE;
5536
}
5537
 
5538
/* Helper for move_stmt_r.  Given an EH region number for the source
5539
   function, map that to the duplicate EH regio number in the dest.  */
5540
 
5541
static int
5542
move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
5543
{
5544
  eh_region old_r, new_r;
5545
  void **slot;
5546
 
5547
  old_r = get_eh_region_from_number (old_nr);
5548
  slot = pointer_map_contains (p->eh_map, old_r);
5549
  new_r = (eh_region) *slot;
5550
 
5551
  return new_r->index;
5552
}
5553
 
5554
/* Similar, but operate on INTEGER_CSTs.  */
5555
 
5556
static tree
5557
move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
5558
{
5559
  int old_nr, new_nr;
5560
 
5561
  old_nr = tree_low_cst (old_t_nr, 0);
5562
  new_nr = move_stmt_eh_region_nr (old_nr, p);
5563
 
5564
  return build_int_cst (NULL, new_nr);
5565
}
5566
 
5567
/* Like move_stmt_op, but for gimple statements.
5568
 
5569
   Helper for move_block_to_fn.  Set GIMPLE_BLOCK in every expression
5570
   contained in the current statement in *GSI_P and change the
5571
   DECL_CONTEXT of every local variable referenced in the current
5572
   statement.  */
5573
 
5574
static tree
5575
move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
5576
             struct walk_stmt_info *wi)
5577
{
5578
  struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5579
  gimple stmt = gsi_stmt (*gsi_p);
5580
  tree block = gimple_block (stmt);
5581
 
5582
  if (p->orig_block == NULL_TREE
5583
      || block == p->orig_block
5584
      || block == NULL_TREE)
5585
    gimple_set_block (stmt, p->new_block);
5586
#ifdef ENABLE_CHECKING
5587
  else if (block != p->new_block)
5588
    {
5589
      while (block && block != p->orig_block)
5590
        block = BLOCK_SUPERCONTEXT (block);
5591
      gcc_assert (block);
5592
    }
5593
#endif
5594
 
5595
  switch (gimple_code (stmt))
5596
    {
5597
    case GIMPLE_CALL:
5598
      /* Remap the region numbers for __builtin_eh_{pointer,filter}.  */
5599
      {
5600
        tree r, fndecl = gimple_call_fndecl (stmt);
5601
        if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
5602
          switch (DECL_FUNCTION_CODE (fndecl))
5603
            {
5604
            case BUILT_IN_EH_COPY_VALUES:
5605
              r = gimple_call_arg (stmt, 1);
5606
              r = move_stmt_eh_region_tree_nr (r, p);
5607
              gimple_call_set_arg (stmt, 1, r);
5608
              /* FALLTHRU */
5609
 
5610
            case BUILT_IN_EH_POINTER:
5611
            case BUILT_IN_EH_FILTER:
5612
              r = gimple_call_arg (stmt, 0);
5613
              r = move_stmt_eh_region_tree_nr (r, p);
5614
              gimple_call_set_arg (stmt, 0, r);
5615
              break;
5616
 
5617
            default:
5618
              break;
5619
            }
5620
      }
5621
      break;
5622
 
5623
    case GIMPLE_RESX:
5624
      {
5625
        int r = gimple_resx_region (stmt);
5626
        r = move_stmt_eh_region_nr (r, p);
5627
        gimple_resx_set_region (stmt, r);
5628
      }
5629
      break;
5630
 
5631
    case GIMPLE_EH_DISPATCH:
5632
      {
5633
        int r = gimple_eh_dispatch_region (stmt);
5634
        r = move_stmt_eh_region_nr (r, p);
5635
        gimple_eh_dispatch_set_region (stmt, r);
5636
      }
5637
      break;
5638
 
5639
    case GIMPLE_OMP_RETURN:
5640
    case GIMPLE_OMP_CONTINUE:
5641
      break;
5642
    default:
5643
      if (is_gimple_omp (stmt))
5644
        {
5645
          /* Do not remap variables inside OMP directives.  Variables
5646
             referenced in clauses and directive header belong to the
5647
             parent function and should not be moved into the child
5648
             function.  */
5649
          bool save_remap_decls_p = p->remap_decls_p;
5650
          p->remap_decls_p = false;
5651
          *handled_ops_p = true;
5652
 
5653
          walk_gimple_seq (gimple_omp_body (stmt), move_stmt_r,
5654
                           move_stmt_op, wi);
5655
 
5656
          p->remap_decls_p = save_remap_decls_p;
5657
        }
5658
      break;
5659
    }
5660
 
5661
  return NULL_TREE;
5662
}
5663
 
5664
/* Move basic block BB from function CFUN to function DEST_FN.  The
5665
   block is moved out of the original linked list and placed after
5666
   block AFTER in the new list.  Also, the block is removed from the
5667
   original array of blocks and placed in DEST_FN's array of blocks.
5668
   If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
5669
   updated to reflect the moved edges.
5670
 
5671
   The local variables are remapped to new instances, VARS_MAP is used
5672
   to record the mapping.  */
5673
 
5674
static void
5675
move_block_to_fn (struct function *dest_cfun, basic_block bb,
5676
                  basic_block after, bool update_edge_count_p,
5677
                  struct move_stmt_d *d)
5678
{
5679
  struct control_flow_graph *cfg;
5680
  edge_iterator ei;
5681
  edge e;
5682
  gimple_stmt_iterator si;
5683
  unsigned old_len, new_len;
5684
 
5685
  /* Remove BB from dominance structures.  */
5686
  delete_from_dominance_info (CDI_DOMINATORS, bb);
5687
  if (current_loops)
5688
    remove_bb_from_loops (bb);
5689
 
5690
  /* Link BB to the new linked list.  */
5691
  move_block_after (bb, after);
5692
 
5693
  /* Update the edge count in the corresponding flowgraphs.  */
5694
  if (update_edge_count_p)
5695
    FOR_EACH_EDGE (e, ei, bb->succs)
5696
      {
5697
        cfun->cfg->x_n_edges--;
5698
        dest_cfun->cfg->x_n_edges++;
5699
      }
5700
 
5701
  /* Remove BB from the original basic block array.  */
5702
  VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
5703
  cfun->cfg->x_n_basic_blocks--;
5704
 
5705
  /* Grow DEST_CFUN's basic block array if needed.  */
5706
  cfg = dest_cfun->cfg;
5707
  cfg->x_n_basic_blocks++;
5708
  if (bb->index >= cfg->x_last_basic_block)
5709
    cfg->x_last_basic_block = bb->index + 1;
5710
 
5711
  old_len = VEC_length (basic_block, cfg->x_basic_block_info);
5712
  if ((unsigned) cfg->x_last_basic_block >= old_len)
5713
    {
5714
      new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
5715
      VEC_safe_grow_cleared (basic_block, gc, cfg->x_basic_block_info,
5716
                             new_len);
5717
    }
5718
 
5719
  VEC_replace (basic_block, cfg->x_basic_block_info,
5720
               bb->index, bb);
5721
 
5722
  /* Remap the variables in phi nodes.  */
5723
  for (si = gsi_start_phis (bb); !gsi_end_p (si); )
5724
    {
5725
      gimple phi = gsi_stmt (si);
5726
      use_operand_p use;
5727
      tree op = PHI_RESULT (phi);
5728
      ssa_op_iter oi;
5729
 
5730
      if (!is_gimple_reg (op))
5731
        {
5732
          /* Remove the phi nodes for virtual operands (alias analysis will be
5733
             run for the new function, anyway).  */
5734
          remove_phi_node (&si, true);
5735
          continue;
5736
        }
5737
 
5738
      SET_PHI_RESULT (phi,
5739
                      replace_ssa_name (op, d->vars_map, dest_cfun->decl));
5740
      FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
5741
        {
5742
          op = USE_FROM_PTR (use);
5743
          if (TREE_CODE (op) == SSA_NAME)
5744
            SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
5745
        }
5746
 
5747
      gsi_next (&si);
5748
    }
5749
 
5750
  for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5751
    {
5752
      gimple stmt = gsi_stmt (si);
5753
      struct walk_stmt_info wi;
5754
 
5755
      memset (&wi, 0, sizeof (wi));
5756
      wi.info = d;
5757
      walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
5758
 
5759
      if (gimple_code (stmt) == GIMPLE_LABEL)
5760
        {
5761
          tree label = gimple_label_label (stmt);
5762
          int uid = LABEL_DECL_UID (label);
5763
 
5764
          gcc_assert (uid > -1);
5765
 
5766
          old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
5767
          if (old_len <= (unsigned) uid)
5768
            {
5769
              new_len = 3 * uid / 2 + 1;
5770
              VEC_safe_grow_cleared (basic_block, gc,
5771
                                     cfg->x_label_to_block_map, new_len);
5772
            }
5773
 
5774
          VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
5775
          VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
5776
 
5777
          gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
5778
 
5779
          if (uid >= dest_cfun->cfg->last_label_uid)
5780
            dest_cfun->cfg->last_label_uid = uid + 1;
5781
        }
5782
 
5783
      maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
5784
      remove_stmt_from_eh_lp_fn (cfun, stmt);
5785
 
5786
      gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
5787
      gimple_remove_stmt_histograms (cfun, stmt);
5788
 
5789
      /* We cannot leave any operands allocated from the operand caches of
5790
         the current function.  */
5791
      free_stmt_operands (stmt);
5792
      push_cfun (dest_cfun);
5793
      update_stmt (stmt);
5794
      pop_cfun ();
5795
    }
5796
 
5797
  FOR_EACH_EDGE (e, ei, bb->succs)
5798
    if (e->goto_locus)
5799
      {
5800
        tree block = e->goto_block;
5801
        if (d->orig_block == NULL_TREE
5802
            || block == d->orig_block)
5803
          e->goto_block = d->new_block;
5804
#ifdef ENABLE_CHECKING
5805
        else if (block != d->new_block)
5806
          {
5807
            while (block && block != d->orig_block)
5808
              block = BLOCK_SUPERCONTEXT (block);
5809
            gcc_assert (block);
5810
          }
5811
#endif
5812
      }
5813
}
5814
 
5815
/* Examine the statements in BB (which is in SRC_CFUN); find and return
5816
   the outermost EH region.  Use REGION as the incoming base EH region.  */
5817
 
5818
static eh_region
5819
find_outermost_region_in_block (struct function *src_cfun,
5820
                                basic_block bb, eh_region region)
5821
{
5822
  gimple_stmt_iterator si;
5823
 
5824
  for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5825
    {
5826
      gimple stmt = gsi_stmt (si);
5827
      eh_region stmt_region;
5828
      int lp_nr;
5829
 
5830
      lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
5831
      stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
5832
      if (stmt_region)
5833
        {
5834
          if (region == NULL)
5835
            region = stmt_region;
5836
          else if (stmt_region != region)
5837
            {
5838
              region = eh_region_outermost (src_cfun, stmt_region, region);
5839
              gcc_assert (region != NULL);
5840
            }
5841
        }
5842
    }
5843
 
5844
  return region;
5845
}
5846
 
5847
static tree
5848
new_label_mapper (tree decl, void *data)
5849
{
5850
  htab_t hash = (htab_t) data;
5851
  struct tree_map *m;
5852
  void **slot;
5853
 
5854
  gcc_assert (TREE_CODE (decl) == LABEL_DECL);
5855
 
5856
  m = XNEW (struct tree_map);
5857
  m->hash = DECL_UID (decl);
5858
  m->base.from = decl;
5859
  m->to = create_artificial_label (UNKNOWN_LOCATION);
5860
  LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
5861
  if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
5862
    cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
5863
 
5864
  slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
5865
  gcc_assert (*slot == NULL);
5866
 
5867
  *slot = m;
5868
 
5869
  return m->to;
5870
}
5871
 
5872
/* Change DECL_CONTEXT of all BLOCK_VARS in block, including
5873
   subblocks.  */
5874
 
5875
static void
5876
replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
5877
                                  tree to_context)
5878
{
5879
  tree *tp, t;
5880
 
5881
  for (tp = &BLOCK_VARS (block); *tp; tp = &TREE_CHAIN (*tp))
5882
    {
5883
      t = *tp;
5884
      if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
5885
        continue;
5886
      replace_by_duplicate_decl (&t, vars_map, to_context);
5887
      if (t != *tp)
5888
        {
5889
          if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
5890
            {
5891
              SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
5892
              DECL_HAS_VALUE_EXPR_P (t) = 1;
5893
            }
5894
          TREE_CHAIN (t) = TREE_CHAIN (*tp);
5895
          *tp = t;
5896
        }
5897
    }
5898
 
5899
  for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
5900
    replace_block_vars_by_duplicates (block, vars_map, to_context);
5901
}
5902
 
5903
/* Move a single-entry, single-exit region delimited by ENTRY_BB and
5904
   EXIT_BB to function DEST_CFUN.  The whole region is replaced by a
5905
   single basic block in the original CFG and the new basic block is
5906
   returned.  DEST_CFUN must not have a CFG yet.
5907
 
5908
   Note that the region need not be a pure SESE region.  Blocks inside
5909
   the region may contain calls to abort/exit.  The only restriction
5910
   is that ENTRY_BB should be the only entry point and it must
5911
   dominate EXIT_BB.
5912
 
5913
   Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
5914
   functions outermost BLOCK, move all subblocks of ORIG_BLOCK
5915
   to the new function.
5916
 
5917
   All local variables referenced in the region are assumed to be in
5918
   the corresponding BLOCK_VARS and unexpanded variable lists
5919
   associated with DEST_CFUN.  */
5920
 
5921
basic_block
5922
move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
5923
                        basic_block exit_bb, tree orig_block)
5924
{
5925
  VEC(basic_block,heap) *bbs, *dom_bbs;
5926
  basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
5927
  basic_block after, bb, *entry_pred, *exit_succ, abb;
5928
  struct function *saved_cfun = cfun;
5929
  int *entry_flag, *exit_flag;
5930
  unsigned *entry_prob, *exit_prob;
5931
  unsigned i, num_entry_edges, num_exit_edges;
5932
  edge e;
5933
  edge_iterator ei;
5934
  htab_t new_label_map;
5935
  struct pointer_map_t *vars_map, *eh_map;
5936
  struct loop *loop = entry_bb->loop_father;
5937
  struct move_stmt_d d;
5938
 
5939
  /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
5940
     region.  */
5941
  gcc_assert (entry_bb != exit_bb
5942
              && (!exit_bb
5943
                  || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
5944
 
5945
  /* Collect all the blocks in the region.  Manually add ENTRY_BB
5946
     because it won't be added by dfs_enumerate_from.  */
5947
  bbs = NULL;
5948
  VEC_safe_push (basic_block, heap, bbs, entry_bb);
5949
  gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
5950
 
5951
  /* The blocks that used to be dominated by something in BBS will now be
5952
     dominated by the new block.  */
5953
  dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
5954
                                     VEC_address (basic_block, bbs),
5955
                                     VEC_length (basic_block, bbs));
5956
 
5957
  /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG.  We need to remember
5958
     the predecessor edges to ENTRY_BB and the successor edges to
5959
     EXIT_BB so that we can re-attach them to the new basic block that
5960
     will replace the region.  */
5961
  num_entry_edges = EDGE_COUNT (entry_bb->preds);
5962
  entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
5963
  entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
5964
  entry_prob = XNEWVEC (unsigned, num_entry_edges);
5965
  i = 0;
5966
  for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
5967
    {
5968
      entry_prob[i] = e->probability;
5969
      entry_flag[i] = e->flags;
5970
      entry_pred[i++] = e->src;
5971
      remove_edge (e);
5972
    }
5973
 
5974
  if (exit_bb)
5975
    {
5976
      num_exit_edges = EDGE_COUNT (exit_bb->succs);
5977
      exit_succ = (basic_block *) xcalloc (num_exit_edges,
5978
                                           sizeof (basic_block));
5979
      exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
5980
      exit_prob = XNEWVEC (unsigned, num_exit_edges);
5981
      i = 0;
5982
      for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
5983
        {
5984
          exit_prob[i] = e->probability;
5985
          exit_flag[i] = e->flags;
5986
          exit_succ[i++] = e->dest;
5987
          remove_edge (e);
5988
        }
5989
    }
5990
  else
5991
    {
5992
      num_exit_edges = 0;
5993
      exit_succ = NULL;
5994
      exit_flag = NULL;
5995
      exit_prob = NULL;
5996
    }
5997
 
5998
  /* Switch context to the child function to initialize DEST_FN's CFG.  */
5999
  gcc_assert (dest_cfun->cfg == NULL);
6000
  push_cfun (dest_cfun);
6001
 
6002
  init_empty_tree_cfg ();
6003
 
6004
  /* Initialize EH information for the new function.  */
6005
  eh_map = NULL;
6006
  new_label_map = NULL;
6007
  if (saved_cfun->eh)
6008
    {
6009
      eh_region region = NULL;
6010
 
6011
      for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6012
        region = find_outermost_region_in_block (saved_cfun, bb, region);
6013
 
6014
      init_eh_for_function ();
6015
      if (region != NULL)
6016
        {
6017
          new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6018
          eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6019
                                         new_label_mapper, new_label_map);
6020
        }
6021
    }
6022
 
6023
  pop_cfun ();
6024
 
6025
  /* Move blocks from BBS into DEST_CFUN.  */
6026
  gcc_assert (VEC_length (basic_block, bbs) >= 2);
6027
  after = dest_cfun->cfg->x_entry_block_ptr;
6028
  vars_map = pointer_map_create ();
6029
 
6030
  memset (&d, 0, sizeof (d));
6031
  d.orig_block = orig_block;
6032
  d.new_block = DECL_INITIAL (dest_cfun->decl);
6033
  d.from_context = cfun->decl;
6034
  d.to_context = dest_cfun->decl;
6035
  d.vars_map = vars_map;
6036
  d.new_label_map = new_label_map;
6037
  d.eh_map = eh_map;
6038
  d.remap_decls_p = true;
6039
 
6040
  for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6041
    {
6042
      /* No need to update edge counts on the last block.  It has
6043
         already been updated earlier when we detached the region from
6044
         the original CFG.  */
6045
      move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
6046
      after = bb;
6047
    }
6048
 
6049
  /* Rewire BLOCK_SUBBLOCKS of orig_block.  */
6050
  if (orig_block)
6051
    {
6052
      tree block;
6053
      gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6054
                  == NULL_TREE);
6055
      BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6056
        = BLOCK_SUBBLOCKS (orig_block);
6057
      for (block = BLOCK_SUBBLOCKS (orig_block);
6058
           block; block = BLOCK_CHAIN (block))
6059
        BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6060
      BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6061
    }
6062
 
6063
  replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6064
                                    vars_map, dest_cfun->decl);
6065
 
6066
  if (new_label_map)
6067
    htab_delete (new_label_map);
6068
  if (eh_map)
6069
    pointer_map_destroy (eh_map);
6070
  pointer_map_destroy (vars_map);
6071
 
6072
  /* Rewire the entry and exit blocks.  The successor to the entry
6073
     block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6074
     the child function.  Similarly, the predecessor of DEST_FN's
6075
     EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR.  We
6076
     need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6077
     various CFG manipulation function get to the right CFG.
6078
 
6079
     FIXME, this is silly.  The CFG ought to become a parameter to
6080
     these helpers.  */
6081
  push_cfun (dest_cfun);
6082
  make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
6083
  if (exit_bb)
6084
    make_edge (exit_bb,  EXIT_BLOCK_PTR, 0);
6085
  pop_cfun ();
6086
 
6087
  /* Back in the original function, the SESE region has disappeared,
6088
     create a new basic block in its place.  */
6089
  bb = create_empty_bb (entry_pred[0]);
6090
  if (current_loops)
6091
    add_bb_to_loop (bb, loop);
6092
  for (i = 0; i < num_entry_edges; i++)
6093
    {
6094
      e = make_edge (entry_pred[i], bb, entry_flag[i]);
6095
      e->probability = entry_prob[i];
6096
    }
6097
 
6098
  for (i = 0; i < num_exit_edges; i++)
6099
    {
6100
      e = make_edge (bb, exit_succ[i], exit_flag[i]);
6101
      e->probability = exit_prob[i];
6102
    }
6103
 
6104
  set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6105
  for (i = 0; VEC_iterate (basic_block, dom_bbs, i, abb); i++)
6106
    set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6107
  VEC_free (basic_block, heap, dom_bbs);
6108
 
6109
  if (exit_bb)
6110
    {
6111
      free (exit_prob);
6112
      free (exit_flag);
6113
      free (exit_succ);
6114
    }
6115
  free (entry_prob);
6116
  free (entry_flag);
6117
  free (entry_pred);
6118
  VEC_free (basic_block, heap, bbs);
6119
 
6120
  return bb;
6121
}
6122
 
6123
 
6124
/* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree-pass.h)
6125
   */
6126
 
6127
void
6128
dump_function_to_file (tree fn, FILE *file, int flags)
6129
{
6130
  tree arg, vars, var;
6131
  struct function *dsf;
6132
  bool ignore_topmost_bind = false, any_var = false;
6133
  basic_block bb;
6134
  tree chain;
6135
 
6136
  fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
6137
 
6138
  arg = DECL_ARGUMENTS (fn);
6139
  while (arg)
6140
    {
6141
      print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6142
      fprintf (file, " ");
6143
      print_generic_expr (file, arg, dump_flags);
6144
      if (flags & TDF_VERBOSE)
6145
        print_node (file, "", arg, 4);
6146
      if (TREE_CHAIN (arg))
6147
        fprintf (file, ", ");
6148
      arg = TREE_CHAIN (arg);
6149
    }
6150
  fprintf (file, ")\n");
6151
 
6152
  if (flags & TDF_VERBOSE)
6153
    print_node (file, "", fn, 2);
6154
 
6155
  dsf = DECL_STRUCT_FUNCTION (fn);
6156
  if (dsf && (flags & TDF_EH))
6157
    dump_eh_tree (file, dsf);
6158
 
6159
  if (flags & TDF_RAW && !gimple_has_body_p (fn))
6160
    {
6161
      dump_node (fn, TDF_SLIM | flags, file);
6162
      return;
6163
    }
6164
 
6165
  /* Switch CFUN to point to FN.  */
6166
  push_cfun (DECL_STRUCT_FUNCTION (fn));
6167
 
6168
  /* When GIMPLE is lowered, the variables are no longer available in
6169
     BIND_EXPRs, so display them separately.  */
6170
  if (cfun && cfun->decl == fn && cfun->local_decls)
6171
    {
6172
      ignore_topmost_bind = true;
6173
 
6174
      fprintf (file, "{\n");
6175
      for (vars = cfun->local_decls; vars; vars = TREE_CHAIN (vars))
6176
        {
6177
          var = TREE_VALUE (vars);
6178
 
6179
          print_generic_decl (file, var, flags);
6180
          if (flags & TDF_VERBOSE)
6181
            print_node (file, "", var, 4);
6182
          fprintf (file, "\n");
6183
 
6184
          any_var = true;
6185
        }
6186
    }
6187
 
6188
  if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
6189
    {
6190
      /* If the CFG has been built, emit a CFG-based dump.  */
6191
      check_bb_profile (ENTRY_BLOCK_PTR, file);
6192
      if (!ignore_topmost_bind)
6193
        fprintf (file, "{\n");
6194
 
6195
      if (any_var && n_basic_blocks)
6196
        fprintf (file, "\n");
6197
 
6198
      FOR_EACH_BB (bb)
6199
        gimple_dump_bb (bb, file, 2, flags);
6200
 
6201
      fprintf (file, "}\n");
6202
      check_bb_profile (EXIT_BLOCK_PTR, file);
6203
    }
6204
  else if (DECL_SAVED_TREE (fn) == NULL)
6205
    {
6206
      /* The function is now in GIMPLE form but the CFG has not been
6207
         built yet.  Emit the single sequence of GIMPLE statements
6208
         that make up its body.  */
6209
      gimple_seq body = gimple_body (fn);
6210
 
6211
      if (gimple_seq_first_stmt (body)
6212
          && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
6213
          && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
6214
        print_gimple_seq (file, body, 0, flags);
6215
      else
6216
        {
6217
          if (!ignore_topmost_bind)
6218
            fprintf (file, "{\n");
6219
 
6220
          if (any_var)
6221
            fprintf (file, "\n");
6222
 
6223
          print_gimple_seq (file, body, 2, flags);
6224
          fprintf (file, "}\n");
6225
        }
6226
    }
6227
  else
6228
    {
6229
      int indent;
6230
 
6231
      /* Make a tree based dump.  */
6232
      chain = DECL_SAVED_TREE (fn);
6233
 
6234
      if (chain && TREE_CODE (chain) == BIND_EXPR)
6235
        {
6236
          if (ignore_topmost_bind)
6237
            {
6238
              chain = BIND_EXPR_BODY (chain);
6239
              indent = 2;
6240
            }
6241
          else
6242
            indent = 0;
6243
        }
6244
      else
6245
        {
6246
          if (!ignore_topmost_bind)
6247
            fprintf (file, "{\n");
6248
          indent = 2;
6249
        }
6250
 
6251
      if (any_var)
6252
        fprintf (file, "\n");
6253
 
6254
      print_generic_stmt_indented (file, chain, flags, indent);
6255
      if (ignore_topmost_bind)
6256
        fprintf (file, "}\n");
6257
    }
6258
 
6259
  fprintf (file, "\n\n");
6260
 
6261
  /* Restore CFUN.  */
6262
  pop_cfun ();
6263
}
6264
 
6265
 
6266
/* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h)  */
6267
 
6268
void
6269
debug_function (tree fn, int flags)
6270
{
6271
  dump_function_to_file (fn, stderr, flags);
6272
}
6273
 
6274
 
6275
/* Print on FILE the indexes for the predecessors of basic_block BB.  */
6276
 
6277
static void
6278
print_pred_bbs (FILE *file, basic_block bb)
6279
{
6280
  edge e;
6281
  edge_iterator ei;
6282
 
6283
  FOR_EACH_EDGE (e, ei, bb->preds)
6284
    fprintf (file, "bb_%d ", e->src->index);
6285
}
6286
 
6287
 
6288
/* Print on FILE the indexes for the successors of basic_block BB.  */
6289
 
6290
static void
6291
print_succ_bbs (FILE *file, basic_block bb)
6292
{
6293
  edge e;
6294
  edge_iterator ei;
6295
 
6296
  FOR_EACH_EDGE (e, ei, bb->succs)
6297
    fprintf (file, "bb_%d ", e->dest->index);
6298
}
6299
 
6300
/* Print to FILE the basic block BB following the VERBOSITY level.  */
6301
 
6302
void
6303
print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
6304
{
6305
  char *s_indent = (char *) alloca ((size_t) indent + 1);
6306
  memset ((void *) s_indent, ' ', (size_t) indent);
6307
  s_indent[indent] = '\0';
6308
 
6309
  /* Print basic_block's header.  */
6310
  if (verbosity >= 2)
6311
    {
6312
      fprintf (file, "%s  bb_%d (preds = {", s_indent, bb->index);
6313
      print_pred_bbs (file, bb);
6314
      fprintf (file, "}, succs = {");
6315
      print_succ_bbs (file, bb);
6316
      fprintf (file, "})\n");
6317
    }
6318
 
6319
  /* Print basic_block's body.  */
6320
  if (verbosity >= 3)
6321
    {
6322
      fprintf (file, "%s  {\n", s_indent);
6323
      gimple_dump_bb (bb, file, indent + 4, TDF_VOPS|TDF_MEMSYMS);
6324
      fprintf (file, "%s  }\n", s_indent);
6325
    }
6326
}
6327
 
6328
static void print_loop_and_siblings (FILE *, struct loop *, int, int);
6329
 
6330
/* Pretty print LOOP on FILE, indented INDENT spaces.  Following
6331
   VERBOSITY level this outputs the contents of the loop, or just its
6332
   structure.  */
6333
 
6334
static void
6335
print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
6336
{
6337
  char *s_indent;
6338
  basic_block bb;
6339
 
6340
  if (loop == NULL)
6341
    return;
6342
 
6343
  s_indent = (char *) alloca ((size_t) indent + 1);
6344
  memset ((void *) s_indent, ' ', (size_t) indent);
6345
  s_indent[indent] = '\0';
6346
 
6347
  /* Print loop's header.  */
6348
  fprintf (file, "%sloop_%d (header = %d, latch = %d", s_indent,
6349
           loop->num, loop->header->index, loop->latch->index);
6350
  fprintf (file, ", niter = ");
6351
  print_generic_expr (file, loop->nb_iterations, 0);
6352
 
6353
  if (loop->any_upper_bound)
6354
    {
6355
      fprintf (file, ", upper_bound = ");
6356
      dump_double_int (file, loop->nb_iterations_upper_bound, true);
6357
    }
6358
 
6359
  if (loop->any_estimate)
6360
    {
6361
      fprintf (file, ", estimate = ");
6362
      dump_double_int (file, loop->nb_iterations_estimate, true);
6363
    }
6364
  fprintf (file, ")\n");
6365
 
6366
  /* Print loop's body.  */
6367
  if (verbosity >= 1)
6368
    {
6369
      fprintf (file, "%s{\n", s_indent);
6370
      FOR_EACH_BB (bb)
6371
        if (bb->loop_father == loop)
6372
          print_loops_bb (file, bb, indent, verbosity);
6373
 
6374
      print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
6375
      fprintf (file, "%s}\n", s_indent);
6376
    }
6377
}
6378
 
6379
/* Print the LOOP and its sibling loops on FILE, indented INDENT
6380
   spaces.  Following VERBOSITY level this outputs the contents of the
6381
   loop, or just its structure.  */
6382
 
6383
static void
6384
print_loop_and_siblings (FILE *file, struct loop *loop, int indent, int verbosity)
6385
{
6386
  if (loop == NULL)
6387
    return;
6388
 
6389
  print_loop (file, loop, indent, verbosity);
6390
  print_loop_and_siblings (file, loop->next, indent, verbosity);
6391
}
6392
 
6393
/* Follow a CFG edge from the entry point of the program, and on entry
6394
   of a loop, pretty print the loop structure on FILE.  */
6395
 
6396
void
6397
print_loops (FILE *file, int verbosity)
6398
{
6399
  basic_block bb;
6400
 
6401
  bb = ENTRY_BLOCK_PTR;
6402
  if (bb && bb->loop_father)
6403
    print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
6404
}
6405
 
6406
 
6407
/* Debugging loops structure at tree level, at some VERBOSITY level.  */
6408
 
6409
void
6410
debug_loops (int verbosity)
6411
{
6412
  print_loops (stderr, verbosity);
6413
}
6414
 
6415
/* Print on stderr the code of LOOP, at some VERBOSITY level.  */
6416
 
6417
void
6418
debug_loop (struct loop *loop, int verbosity)
6419
{
6420
  print_loop (stderr, loop, 0, verbosity);
6421
}
6422
 
6423
/* Print on stderr the code of loop number NUM, at some VERBOSITY
6424
   level.  */
6425
 
6426
void
6427
debug_loop_num (unsigned num, int verbosity)
6428
{
6429
  debug_loop (get_loop (num), verbosity);
6430
}
6431
 
6432
/* Return true if BB ends with a call, possibly followed by some
6433
   instructions that must stay with the call.  Return false,
6434
   otherwise.  */
6435
 
6436
static bool
6437
gimple_block_ends_with_call_p (basic_block bb)
6438
{
6439
  gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6440
  return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
6441
}
6442
 
6443
 
6444
/* Return true if BB ends with a conditional branch.  Return false,
6445
   otherwise.  */
6446
 
6447
static bool
6448
gimple_block_ends_with_condjump_p (const_basic_block bb)
6449
{
6450
  gimple stmt = last_stmt (CONST_CAST_BB (bb));
6451
  return (stmt && gimple_code (stmt) == GIMPLE_COND);
6452
}
6453
 
6454
 
6455
/* Return true if we need to add fake edge to exit at statement T.
6456
   Helper function for gimple_flow_call_edges_add.  */
6457
 
6458
static bool
6459
need_fake_edge_p (gimple t)
6460
{
6461
  tree fndecl = NULL_TREE;
6462
  int call_flags = 0;
6463
 
6464
  /* NORETURN and LONGJMP calls already have an edge to exit.
6465
     CONST and PURE calls do not need one.
6466
     We don't currently check for CONST and PURE here, although
6467
     it would be a good idea, because those attributes are
6468
     figured out from the RTL in mark_constant_function, and
6469
     the counter incrementation code from -fprofile-arcs
6470
     leads to different results from -fbranch-probabilities.  */
6471
  if (is_gimple_call (t))
6472
    {
6473
      fndecl = gimple_call_fndecl (t);
6474
      call_flags = gimple_call_flags (t);
6475
    }
6476
 
6477
  if (is_gimple_call (t)
6478
      && fndecl
6479
      && DECL_BUILT_IN (fndecl)
6480
      && (call_flags & ECF_NOTHROW)
6481
      && !(call_flags & ECF_RETURNS_TWICE)
6482
      /* fork() doesn't really return twice, but the effect of
6483
         wrapping it in __gcov_fork() which calls __gcov_flush()
6484
         and clears the counters before forking has the same
6485
         effect as returning twice.  Force a fake edge.  */
6486
      && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6487
           && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
6488
    return false;
6489
 
6490
  if (is_gimple_call (t)
6491
      && !(call_flags & ECF_NORETURN))
6492
    return true;
6493
 
6494
  if (gimple_code (t) == GIMPLE_ASM
6495
       && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
6496
    return true;
6497
 
6498
  return false;
6499
}
6500
 
6501
 
6502
/* Add fake edges to the function exit for any non constant and non
6503
   noreturn calls, volatile inline assembly in the bitmap of blocks
6504
   specified by BLOCKS or to the whole CFG if BLOCKS is zero.  Return
6505
   the number of blocks that were split.
6506
 
6507
   The goal is to expose cases in which entering a basic block does
6508
   not imply that all subsequent instructions must be executed.  */
6509
 
6510
static int
6511
gimple_flow_call_edges_add (sbitmap blocks)
6512
{
6513
  int i;
6514
  int blocks_split = 0;
6515
  int last_bb = last_basic_block;
6516
  bool check_last_block = false;
6517
 
6518
  if (n_basic_blocks == NUM_FIXED_BLOCKS)
6519
    return 0;
6520
 
6521
  if (! blocks)
6522
    check_last_block = true;
6523
  else
6524
    check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
6525
 
6526
  /* In the last basic block, before epilogue generation, there will be
6527
     a fallthru edge to EXIT.  Special care is required if the last insn
6528
     of the last basic block is a call because make_edge folds duplicate
6529
     edges, which would result in the fallthru edge also being marked
6530
     fake, which would result in the fallthru edge being removed by
6531
     remove_fake_edges, which would result in an invalid CFG.
6532
 
6533
     Moreover, we can't elide the outgoing fake edge, since the block
6534
     profiler needs to take this into account in order to solve the minimal
6535
     spanning tree in the case that the call doesn't return.
6536
 
6537
     Handle this by adding a dummy instruction in a new last basic block.  */
6538
  if (check_last_block)
6539
    {
6540
      basic_block bb = EXIT_BLOCK_PTR->prev_bb;
6541
      gimple_stmt_iterator gsi = gsi_last_bb (bb);
6542
      gimple t = NULL;
6543
 
6544
      if (!gsi_end_p (gsi))
6545
        t = gsi_stmt (gsi);
6546
 
6547
      if (t && need_fake_edge_p (t))
6548
        {
6549
          edge e;
6550
 
6551
          e = find_edge (bb, EXIT_BLOCK_PTR);
6552
          if (e)
6553
            {
6554
              gsi_insert_on_edge (e, gimple_build_nop ());
6555
              gsi_commit_edge_inserts ();
6556
            }
6557
        }
6558
    }
6559
 
6560
  /* Now add fake edges to the function exit for any non constant
6561
     calls since there is no way that we can determine if they will
6562
     return or not...  */
6563
  for (i = 0; i < last_bb; i++)
6564
    {
6565
      basic_block bb = BASIC_BLOCK (i);
6566
      gimple_stmt_iterator gsi;
6567
      gimple stmt, last_stmt;
6568
 
6569
      if (!bb)
6570
        continue;
6571
 
6572
      if (blocks && !TEST_BIT (blocks, i))
6573
        continue;
6574
 
6575
      gsi = gsi_last_bb (bb);
6576
      if (!gsi_end_p (gsi))
6577
        {
6578
          last_stmt = gsi_stmt (gsi);
6579
          do
6580
            {
6581
              stmt = gsi_stmt (gsi);
6582
              if (need_fake_edge_p (stmt))
6583
                {
6584
                  edge e;
6585
 
6586
                  /* The handling above of the final block before the
6587
                     epilogue should be enough to verify that there is
6588
                     no edge to the exit block in CFG already.
6589
                     Calling make_edge in such case would cause us to
6590
                     mark that edge as fake and remove it later.  */
6591
#ifdef ENABLE_CHECKING
6592
                  if (stmt == last_stmt)
6593
                    {
6594
                      e = find_edge (bb, EXIT_BLOCK_PTR);
6595
                      gcc_assert (e == NULL);
6596
                    }
6597
#endif
6598
 
6599
                  /* Note that the following may create a new basic block
6600
                     and renumber the existing basic blocks.  */
6601
                  if (stmt != last_stmt)
6602
                    {
6603
                      e = split_block (bb, stmt);
6604
                      if (e)
6605
                        blocks_split++;
6606
                    }
6607
                  make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
6608
                }
6609
              gsi_prev (&gsi);
6610
            }
6611
          while (!gsi_end_p (gsi));
6612
        }
6613
    }
6614
 
6615
  if (blocks_split)
6616
    verify_flow_info ();
6617
 
6618
  return blocks_split;
6619
}
6620
 
6621
/* Purge dead abnormal call edges from basic block BB.  */
6622
 
6623
bool
6624
gimple_purge_dead_abnormal_call_edges (basic_block bb)
6625
{
6626
  bool changed = gimple_purge_dead_eh_edges (bb);
6627
 
6628
  if (cfun->has_nonlocal_label)
6629
    {
6630
      gimple stmt = last_stmt (bb);
6631
      edge_iterator ei;
6632
      edge e;
6633
 
6634
      if (!(stmt && stmt_can_make_abnormal_goto (stmt)))
6635
        for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6636
          {
6637
            if (e->flags & EDGE_ABNORMAL)
6638
              {
6639
                remove_edge (e);
6640
                changed = true;
6641
              }
6642
            else
6643
              ei_next (&ei);
6644
          }
6645
 
6646
      /* See gimple_purge_dead_eh_edges below.  */
6647
      if (changed)
6648
        free_dominance_info (CDI_DOMINATORS);
6649
    }
6650
 
6651
  return changed;
6652
}
6653
 
6654
/* Removes edge E and all the blocks dominated by it, and updates dominance
6655
   information.  The IL in E->src needs to be updated separately.
6656
   If dominance info is not available, only the edge E is removed.*/
6657
 
6658
void
6659
remove_edge_and_dominated_blocks (edge e)
6660
{
6661
  VEC (basic_block, heap) *bbs_to_remove = NULL;
6662
  VEC (basic_block, heap) *bbs_to_fix_dom = NULL;
6663
  bitmap df, df_idom;
6664
  edge f;
6665
  edge_iterator ei;
6666
  bool none_removed = false;
6667
  unsigned i;
6668
  basic_block bb, dbb;
6669
  bitmap_iterator bi;
6670
 
6671
  if (!dom_info_available_p (CDI_DOMINATORS))
6672
    {
6673
      remove_edge (e);
6674
      return;
6675
    }
6676
 
6677
  /* No updating is needed for edges to exit.  */
6678
  if (e->dest == EXIT_BLOCK_PTR)
6679
    {
6680
      if (cfgcleanup_altered_bbs)
6681
        bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6682
      remove_edge (e);
6683
      return;
6684
    }
6685
 
6686
  /* First, we find the basic blocks to remove.  If E->dest has a predecessor
6687
     that is not dominated by E->dest, then this set is empty.  Otherwise,
6688
     all the basic blocks dominated by E->dest are removed.
6689
 
6690
     Also, to DF_IDOM we store the immediate dominators of the blocks in
6691
     the dominance frontier of E (i.e., of the successors of the
6692
     removed blocks, if there are any, and of E->dest otherwise).  */
6693
  FOR_EACH_EDGE (f, ei, e->dest->preds)
6694
    {
6695
      if (f == e)
6696
        continue;
6697
 
6698
      if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
6699
        {
6700
          none_removed = true;
6701
          break;
6702
        }
6703
    }
6704
 
6705
  df = BITMAP_ALLOC (NULL);
6706
  df_idom = BITMAP_ALLOC (NULL);
6707
 
6708
  if (none_removed)
6709
    bitmap_set_bit (df_idom,
6710
                    get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
6711
  else
6712
    {
6713
      bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
6714
      for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6715
        {
6716
          FOR_EACH_EDGE (f, ei, bb->succs)
6717
            {
6718
              if (f->dest != EXIT_BLOCK_PTR)
6719
                bitmap_set_bit (df, f->dest->index);
6720
            }
6721
        }
6722
      for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6723
        bitmap_clear_bit (df, bb->index);
6724
 
6725
      EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
6726
        {
6727
          bb = BASIC_BLOCK (i);
6728
          bitmap_set_bit (df_idom,
6729
                          get_immediate_dominator (CDI_DOMINATORS, bb)->index);
6730
        }
6731
    }
6732
 
6733
  if (cfgcleanup_altered_bbs)
6734
    {
6735
      /* Record the set of the altered basic blocks.  */
6736
      bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6737
      bitmap_ior_into (cfgcleanup_altered_bbs, df);
6738
    }
6739
 
6740
  /* Remove E and the cancelled blocks.  */
6741
  if (none_removed)
6742
    remove_edge (e);
6743
  else
6744
    {
6745
      /* Walk backwards so as to get a chance to substitute all
6746
         released DEFs into debug stmts.  See
6747
         eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
6748
         details.  */
6749
      for (i = VEC_length (basic_block, bbs_to_remove); i-- > 0; )
6750
        delete_basic_block (VEC_index (basic_block, bbs_to_remove, i));
6751
    }
6752
 
6753
  /* Update the dominance information.  The immediate dominator may change only
6754
     for blocks whose immediate dominator belongs to DF_IDOM:
6755
 
6756
     Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
6757
     removal.  Let Z the arbitrary block such that idom(Z) = Y and
6758
     Z dominates X after the removal.  Before removal, there exists a path P
6759
     from Y to X that avoids Z.  Let F be the last edge on P that is
6760
     removed, and let W = F->dest.  Before removal, idom(W) = Y (since Y
6761
     dominates W, and because of P, Z does not dominate W), and W belongs to
6762
     the dominance frontier of E.  Therefore, Y belongs to DF_IDOM.  */
6763
  EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
6764
    {
6765
      bb = BASIC_BLOCK (i);
6766
      for (dbb = first_dom_son (CDI_DOMINATORS, bb);
6767
           dbb;
6768
           dbb = next_dom_son (CDI_DOMINATORS, dbb))
6769
        VEC_safe_push (basic_block, heap, bbs_to_fix_dom, dbb);
6770
    }
6771
 
6772
  iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
6773
 
6774
  BITMAP_FREE (df);
6775
  BITMAP_FREE (df_idom);
6776
  VEC_free (basic_block, heap, bbs_to_remove);
6777
  VEC_free (basic_block, heap, bbs_to_fix_dom);
6778
}
6779
 
6780
/* Purge dead EH edges from basic block BB.  */
6781
 
6782
bool
6783
gimple_purge_dead_eh_edges (basic_block bb)
6784
{
6785
  bool changed = false;
6786
  edge e;
6787
  edge_iterator ei;
6788
  gimple stmt = last_stmt (bb);
6789
 
6790
  if (stmt && stmt_can_throw_internal (stmt))
6791
    return false;
6792
 
6793
  for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6794
    {
6795
      if (e->flags & EDGE_EH)
6796
        {
6797
          remove_edge_and_dominated_blocks (e);
6798
          changed = true;
6799
        }
6800
      else
6801
        ei_next (&ei);
6802
    }
6803
 
6804
  return changed;
6805
}
6806
 
6807
bool
6808
gimple_purge_all_dead_eh_edges (const_bitmap blocks)
6809
{
6810
  bool changed = false;
6811
  unsigned i;
6812
  bitmap_iterator bi;
6813
 
6814
  EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
6815
    {
6816
      basic_block bb = BASIC_BLOCK (i);
6817
 
6818
      /* Earlier gimple_purge_dead_eh_edges could have removed
6819
         this basic block already.  */
6820
      gcc_assert (bb || changed);
6821
      if (bb != NULL)
6822
        changed |= gimple_purge_dead_eh_edges (bb);
6823
    }
6824
 
6825
  return changed;
6826
}
6827
 
6828
/* This function is called whenever a new edge is created or
6829
   redirected.  */
6830
 
6831
static void
6832
gimple_execute_on_growing_pred (edge e)
6833
{
6834
  basic_block bb = e->dest;
6835
 
6836
  if (!gimple_seq_empty_p (phi_nodes (bb)))
6837
    reserve_phi_args_for_new_edge (bb);
6838
}
6839
 
6840
/* This function is called immediately before edge E is removed from
6841
   the edge vector E->dest->preds.  */
6842
 
6843
static void
6844
gimple_execute_on_shrinking_pred (edge e)
6845
{
6846
  if (!gimple_seq_empty_p (phi_nodes (e->dest)))
6847
    remove_phi_args (e);
6848
}
6849
 
6850
/*---------------------------------------------------------------------------
6851
  Helper functions for Loop versioning
6852
  ---------------------------------------------------------------------------*/
6853
 
6854
/* Adjust phi nodes for 'first' basic block.  'second' basic block is a copy
6855
   of 'first'. Both of them are dominated by 'new_head' basic block. When
6856
   'new_head' was created by 'second's incoming edge it received phi arguments
6857
   on the edge by split_edge(). Later, additional edge 'e' was created to
6858
   connect 'new_head' and 'first'. Now this routine adds phi args on this
6859
   additional edge 'e' that new_head to second edge received as part of edge
6860
   splitting.  */
6861
 
6862
static void
6863
gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
6864
                                  basic_block new_head, edge e)
6865
{
6866
  gimple phi1, phi2;
6867
  gimple_stmt_iterator psi1, psi2;
6868
  tree def;
6869
  edge e2 = find_edge (new_head, second);
6870
 
6871
  /* Because NEW_HEAD has been created by splitting SECOND's incoming
6872
     edge, we should always have an edge from NEW_HEAD to SECOND.  */
6873
  gcc_assert (e2 != NULL);
6874
 
6875
  /* Browse all 'second' basic block phi nodes and add phi args to
6876
     edge 'e' for 'first' head. PHI args are always in correct order.  */
6877
 
6878
  for (psi2 = gsi_start_phis (second),
6879
       psi1 = gsi_start_phis (first);
6880
       !gsi_end_p (psi2) && !gsi_end_p (psi1);
6881
       gsi_next (&psi2),  gsi_next (&psi1))
6882
    {
6883
      phi1 = gsi_stmt (psi1);
6884
      phi2 = gsi_stmt (psi2);
6885
      def = PHI_ARG_DEF (phi2, e2->dest_idx);
6886
      add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
6887
    }
6888
}
6889
 
6890
 
6891
/* Adds a if else statement to COND_BB with condition COND_EXPR.
6892
   SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
6893
   the destination of the ELSE part.  */
6894
 
6895
static void
6896
gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
6897
                               basic_block second_head ATTRIBUTE_UNUSED,
6898
                               basic_block cond_bb, void *cond_e)
6899
{
6900
  gimple_stmt_iterator gsi;
6901
  gimple new_cond_expr;
6902
  tree cond_expr = (tree) cond_e;
6903
  edge e0;
6904
 
6905
  /* Build new conditional expr */
6906
  new_cond_expr = gimple_build_cond_from_tree (cond_expr,
6907
                                               NULL_TREE, NULL_TREE);
6908
 
6909
  /* Add new cond in cond_bb.  */
6910
  gsi = gsi_last_bb (cond_bb);
6911
  gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
6912
 
6913
  /* Adjust edges appropriately to connect new head with first head
6914
     as well as second head.  */
6915
  e0 = single_succ_edge (cond_bb);
6916
  e0->flags &= ~EDGE_FALLTHRU;
6917
  e0->flags |= EDGE_FALSE_VALUE;
6918
}
6919
 
6920
struct cfg_hooks gimple_cfg_hooks = {
6921
  "gimple",
6922
  gimple_verify_flow_info,
6923
  gimple_dump_bb,               /* dump_bb  */
6924
  create_bb,                    /* create_basic_block  */
6925
  gimple_redirect_edge_and_branch, /* redirect_edge_and_branch  */
6926
  gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force  */
6927
  gimple_can_remove_branch_p,   /* can_remove_branch_p  */
6928
  remove_bb,                    /* delete_basic_block  */
6929
  gimple_split_block,           /* split_block  */
6930
  gimple_move_block_after,      /* move_block_after  */
6931
  gimple_can_merge_blocks_p,    /* can_merge_blocks_p  */
6932
  gimple_merge_blocks,          /* merge_blocks  */
6933
  gimple_predict_edge,          /* predict_edge  */
6934
  gimple_predicted_by_p,                /* predicted_by_p  */
6935
  gimple_can_duplicate_bb_p,    /* can_duplicate_block_p  */
6936
  gimple_duplicate_bb,          /* duplicate_block  */
6937
  gimple_split_edge,            /* split_edge  */
6938
  gimple_make_forwarder_block,  /* make_forward_block  */
6939
  NULL,                         /* tidy_fallthru_edge  */
6940
  gimple_block_ends_with_call_p,/* block_ends_with_call_p */
6941
  gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
6942
  gimple_flow_call_edges_add,     /* flow_call_edges_add */
6943
  gimple_execute_on_growing_pred,       /* execute_on_growing_pred */
6944
  gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
6945
  gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
6946
  gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
6947
  gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
6948
  extract_true_false_edges_from_block, /* extract_cond_bb_edges */
6949
  flush_pending_stmts           /* flush_pending_stmts */
6950
};
6951
 
6952
 
6953
/* Split all critical edges.  */
6954
 
6955
static unsigned int
6956
split_critical_edges (void)
6957
{
6958
  basic_block bb;
6959
  edge e;
6960
  edge_iterator ei;
6961
 
6962
  /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
6963
     expensive.  So we want to enable recording of edge to CASE_LABEL_EXPR
6964
     mappings around the calls to split_edge.  */
6965
  start_recording_case_labels ();
6966
  FOR_ALL_BB (bb)
6967
    {
6968
      FOR_EACH_EDGE (e, ei, bb->succs)
6969
        {
6970
          if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
6971
            split_edge (e);
6972
          /* PRE inserts statements to edges and expects that
6973
             since split_critical_edges was done beforehand, committing edge
6974
             insertions will not split more edges.  In addition to critical
6975
             edges we must split edges that have multiple successors and
6976
             end by control flow statements, such as RESX.
6977
             Go ahead and split them too.  This matches the logic in
6978
             gimple_find_edge_insert_loc.  */
6979
          else if ((!single_pred_p (e->dest)
6980
                    || !gimple_seq_empty_p (phi_nodes (e->dest))
6981
                    || e->dest == EXIT_BLOCK_PTR)
6982
                   && e->src != ENTRY_BLOCK_PTR
6983
                   && !(e->flags & EDGE_ABNORMAL))
6984
            {
6985
              gimple_stmt_iterator gsi;
6986
 
6987
              gsi = gsi_last_bb (e->src);
6988
              if (!gsi_end_p (gsi)
6989
                  && stmt_ends_bb_p (gsi_stmt (gsi))
6990
                  && gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN)
6991
                split_edge (e);
6992
            }
6993
        }
6994
    }
6995
  end_recording_case_labels ();
6996
  return 0;
6997
}
6998
 
6999
struct gimple_opt_pass pass_split_crit_edges =
7000
{
7001
 {
7002
  GIMPLE_PASS,
7003
  "crited",                          /* name */
7004
  NULL,                          /* gate */
7005
  split_critical_edges,          /* execute */
7006
  NULL,                          /* sub */
7007
  NULL,                          /* next */
7008
  0,                             /* static_pass_number */
7009
  TV_TREE_SPLIT_EDGES,           /* tv_id */
7010
  PROP_cfg,                      /* properties required */
7011
  PROP_no_crit_edges,            /* properties_provided */
7012
  0,                             /* properties_destroyed */
7013
  0,                             /* todo_flags_start */
7014
  TODO_dump_func | TODO_verify_flow  /* todo_flags_finish */
7015
 }
7016
};
7017
 
7018
 
7019
/* Build a ternary operation and gimplify it.  Emit code before GSI.
7020
   Return the gimple_val holding the result.  */
7021
 
7022
tree
7023
gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
7024
                 tree type, tree a, tree b, tree c)
7025
{
7026
  tree ret;
7027
  location_t loc = gimple_location (gsi_stmt (*gsi));
7028
 
7029
  ret = fold_build3_loc (loc, code, type, a, b, c);
7030
  STRIP_NOPS (ret);
7031
 
7032
  return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7033
                                   GSI_SAME_STMT);
7034
}
7035
 
7036
/* Build a binary operation and gimplify it.  Emit code before GSI.
7037
   Return the gimple_val holding the result.  */
7038
 
7039
tree
7040
gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
7041
                 tree type, tree a, tree b)
7042
{
7043
  tree ret;
7044
 
7045
  ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
7046
  STRIP_NOPS (ret);
7047
 
7048
  return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7049
                                   GSI_SAME_STMT);
7050
}
7051
 
7052
/* Build a unary operation and gimplify it.  Emit code before GSI.
7053
   Return the gimple_val holding the result.  */
7054
 
7055
tree
7056
gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
7057
                 tree a)
7058
{
7059
  tree ret;
7060
 
7061
  ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
7062
  STRIP_NOPS (ret);
7063
 
7064
  return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7065
                                   GSI_SAME_STMT);
7066
}
7067
 
7068
 
7069
 
7070
/* Emit return warnings.  */
7071
 
7072
static unsigned int
7073
execute_warn_function_return (void)
7074
{
7075
  source_location location;
7076
  gimple last;
7077
  edge e;
7078
  edge_iterator ei;
7079
 
7080
  /* If we have a path to EXIT, then we do return.  */
7081
  if (TREE_THIS_VOLATILE (cfun->decl)
7082
      && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
7083
    {
7084
      location = UNKNOWN_LOCATION;
7085
      FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7086
        {
7087
          last = last_stmt (e->src);
7088
          if (gimple_code (last) == GIMPLE_RETURN
7089
              && (location = gimple_location (last)) != UNKNOWN_LOCATION)
7090
            break;
7091
        }
7092
      if (location == UNKNOWN_LOCATION)
7093
        location = cfun->function_end_locus;
7094
      warning_at (location, 0, "%<noreturn%> function does return");
7095
    }
7096
 
7097
  /* If we see "return;" in some basic block, then we do reach the end
7098
     without returning a value.  */
7099
  else if (warn_return_type
7100
           && !TREE_NO_WARNING (cfun->decl)
7101
           && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
7102
           && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
7103
    {
7104
      FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7105
        {
7106
          gimple last = last_stmt (e->src);
7107
          if (gimple_code (last) == GIMPLE_RETURN
7108
              && gimple_return_retval (last) == NULL
7109
              && !gimple_no_warning_p (last))
7110
            {
7111
              location = gimple_location (last);
7112
              if (location == UNKNOWN_LOCATION)
7113
                  location = cfun->function_end_locus;
7114
              warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
7115
              TREE_NO_WARNING (cfun->decl) = 1;
7116
              break;
7117
            }
7118
        }
7119
    }
7120
  return 0;
7121
}
7122
 
7123
 
7124
/* Given a basic block B which ends with a conditional and has
7125
   precisely two successors, determine which of the edges is taken if
7126
   the conditional is true and which is taken if the conditional is
7127
   false.  Set TRUE_EDGE and FALSE_EDGE appropriately.  */
7128
 
7129
void
7130
extract_true_false_edges_from_block (basic_block b,
7131
                                     edge *true_edge,
7132
                                     edge *false_edge)
7133
{
7134
  edge e = EDGE_SUCC (b, 0);
7135
 
7136
  if (e->flags & EDGE_TRUE_VALUE)
7137
    {
7138
      *true_edge = e;
7139
      *false_edge = EDGE_SUCC (b, 1);
7140
    }
7141
  else
7142
    {
7143
      *false_edge = e;
7144
      *true_edge = EDGE_SUCC (b, 1);
7145
    }
7146
}
7147
 
7148
struct gimple_opt_pass pass_warn_function_return =
7149
{
7150
 {
7151
  GIMPLE_PASS,
7152
  "*warn_function_return",              /* name */
7153
  NULL,                                 /* gate */
7154
  execute_warn_function_return,         /* execute */
7155
  NULL,                                 /* sub */
7156
  NULL,                                 /* next */
7157
  0,                                     /* static_pass_number */
7158
  TV_NONE,                              /* tv_id */
7159
  PROP_cfg,                             /* properties_required */
7160
  0,                                     /* properties_provided */
7161
  0,                                     /* properties_destroyed */
7162
  0,                                     /* todo_flags_start */
7163
 
7164
 }
7165
};
7166
 
7167
/* Emit noreturn warnings.  */
7168
 
7169
static unsigned int
7170
execute_warn_function_noreturn (void)
7171
{
7172
  if (warn_missing_noreturn
7173
      && !TREE_THIS_VOLATILE (cfun->decl)
7174
      && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
7175
      && !lang_hooks.missing_noreturn_ok_p (cfun->decl))
7176
    warning_at (DECL_SOURCE_LOCATION (cfun->decl), OPT_Wmissing_noreturn,
7177
                "function might be possible candidate "
7178
                "for attribute %<noreturn%>");
7179
  return 0;
7180
}
7181
 
7182
struct gimple_opt_pass pass_warn_function_noreturn =
7183
{
7184
 {
7185
  GIMPLE_PASS,
7186
  "*warn_function_noreturn",            /* name */
7187
  NULL,                                 /* gate */
7188
  execute_warn_function_noreturn,       /* execute */
7189
  NULL,                                 /* sub */
7190
  NULL,                                 /* next */
7191
  0,                                     /* static_pass_number */
7192
  TV_NONE,                              /* tv_id */
7193
  PROP_cfg,                             /* properties_required */
7194
  0,                                     /* properties_provided */
7195
  0,                                     /* properties_destroyed */
7196
  0,                                     /* todo_flags_start */
7197
 
7198
 }
7199
};
7200
 
7201
 
7202
/* Walk a gimplified function and warn for functions whose return value is
7203
   ignored and attribute((warn_unused_result)) is set.  This is done before
7204
   inlining, so we don't have to worry about that.  */
7205
 
7206
static void
7207
do_warn_unused_result (gimple_seq seq)
7208
{
7209
  tree fdecl, ftype;
7210
  gimple_stmt_iterator i;
7211
 
7212
  for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
7213
    {
7214
      gimple g = gsi_stmt (i);
7215
 
7216
      switch (gimple_code (g))
7217
        {
7218
        case GIMPLE_BIND:
7219
          do_warn_unused_result (gimple_bind_body (g));
7220
          break;
7221
        case GIMPLE_TRY:
7222
          do_warn_unused_result (gimple_try_eval (g));
7223
          do_warn_unused_result (gimple_try_cleanup (g));
7224
          break;
7225
        case GIMPLE_CATCH:
7226
          do_warn_unused_result (gimple_catch_handler (g));
7227
          break;
7228
        case GIMPLE_EH_FILTER:
7229
          do_warn_unused_result (gimple_eh_filter_failure (g));
7230
          break;
7231
 
7232
        case GIMPLE_CALL:
7233
          if (gimple_call_lhs (g))
7234
            break;
7235
 
7236
          /* This is a naked call, as opposed to a GIMPLE_CALL with an
7237
             LHS.  All calls whose value is ignored should be
7238
             represented like this.  Look for the attribute.  */
7239
          fdecl = gimple_call_fndecl (g);
7240
          ftype = TREE_TYPE (TREE_TYPE (gimple_call_fn (g)));
7241
 
7242
          if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
7243
            {
7244
              location_t loc = gimple_location (g);
7245
 
7246
              if (fdecl)
7247
                warning_at (loc, OPT_Wunused_result,
7248
                            "ignoring return value of %qD, "
7249
                            "declared with attribute warn_unused_result",
7250
                            fdecl);
7251
              else
7252
                warning_at (loc, OPT_Wunused_result,
7253
                            "ignoring return value of function "
7254
                            "declared with attribute warn_unused_result");
7255
            }
7256
          break;
7257
 
7258
        default:
7259
          /* Not a container, not a call, or a call whose value is used.  */
7260
          break;
7261
        }
7262
    }
7263
}
7264
 
7265
static unsigned int
7266
run_warn_unused_result (void)
7267
{
7268
  do_warn_unused_result (gimple_body (current_function_decl));
7269
  return 0;
7270
}
7271
 
7272
static bool
7273
gate_warn_unused_result (void)
7274
{
7275
  return flag_warn_unused_result;
7276
}
7277
 
7278
struct gimple_opt_pass pass_warn_unused_result =
7279
{
7280
  {
7281
    GIMPLE_PASS,
7282
    "*warn_unused_result",              /* name */
7283
    gate_warn_unused_result,            /* gate */
7284
    run_warn_unused_result,             /* execute */
7285
    NULL,                               /* sub */
7286
    NULL,                               /* next */
7287
    0,                                   /* static_pass_number */
7288
    TV_NONE,                            /* tv_id */
7289
    PROP_gimple_any,                    /* properties_required */
7290
    0,                                   /* properties_provided */
7291
    0,                                   /* properties_destroyed */
7292
    0,                                   /* todo_flags_start */
7293
    0,                                   /* todo_flags_finish */
7294
  }
7295
};
7296
 

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.