OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-old/] [gcc-4.2.2/] [gcc/] [tree-ssa-dom.c] - Blame information for rev 816

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 38 julius
/* SSA Dominator optimizations for trees
2
   Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007
3
   Free Software Foundation, Inc.
4
   Contributed by Diego Novillo <dnovillo@redhat.com>
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify
9
it under the terms of the GNU General Public License as published by
10
the Free Software Foundation; either version 3, or (at your option)
11
any later version.
12
 
13
GCC is distributed in the hope that it will be useful,
14
but WITHOUT ANY WARRANTY; without even the implied warranty of
15
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16
GNU General Public License for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
#include "tree.h"
27
#include "flags.h"
28
#include "rtl.h"
29
#include "tm_p.h"
30
#include "ggc.h"
31
#include "basic-block.h"
32
#include "cfgloop.h"
33
#include "output.h"
34
#include "expr.h"
35
#include "function.h"
36
#include "diagnostic.h"
37
#include "timevar.h"
38
#include "tree-dump.h"
39
#include "tree-flow.h"
40
#include "domwalk.h"
41
#include "real.h"
42
#include "tree-pass.h"
43
#include "tree-ssa-propagate.h"
44
#include "langhooks.h"
45
#include "params.h"
46
 
47
/* This file implements optimizations on the dominator tree.  */
48
 
49
 
50
/* Structure for recording edge equivalences as well as any pending
51
   edge redirections during the dominator optimizer.
52
 
53
   Computing and storing the edge equivalences instead of creating
54
   them on-demand can save significant amounts of time, particularly
55
   for pathological cases involving switch statements.
56
 
57
   These structures live for a single iteration of the dominator
58
   optimizer in the edge's AUX field.  At the end of an iteration we
59
   free each of these structures and update the AUX field to point
60
   to any requested redirection target (the code for updating the
61
   CFG and SSA graph for edge redirection expects redirection edge
62
   targets to be in the AUX field for each edge.  */
63
 
64
struct edge_info
65
{
66
  /* If this edge creates a simple equivalence, the LHS and RHS of
67
     the equivalence will be stored here.  */
68
  tree lhs;
69
  tree rhs;
70
 
71
  /* Traversing an edge may also indicate one or more particular conditions
72
     are true or false.  The number of recorded conditions can vary, but
73
     can be determined by the condition's code.  So we have an array
74
     and its maximum index rather than use a varray.  */
75
  tree *cond_equivalences;
76
  unsigned int max_cond_equivalences;
77
};
78
 
79
 
80
/* Hash table with expressions made available during the renaming process.
81
   When an assignment of the form X_i = EXPR is found, the statement is
82
   stored in this table.  If the same expression EXPR is later found on the
83
   RHS of another statement, it is replaced with X_i (thus performing
84
   global redundancy elimination).  Similarly as we pass through conditionals
85
   we record the conditional itself as having either a true or false value
86
   in this table.  */
87
static htab_t avail_exprs;
88
 
89
/* Stack of available expressions in AVAIL_EXPRs.  Each block pushes any
90
   expressions it enters into the hash table along with a marker entry
91
   (null).  When we finish processing the block, we pop off entries and
92
   remove the expressions from the global hash table until we hit the
93
   marker.  */
94
static VEC(tree,heap) *avail_exprs_stack;
95
 
96
/* Stack of statements we need to rescan during finalization for newly
97
   exposed variables.
98
 
99
   Statement rescanning must occur after the current block's available
100
   expressions are removed from AVAIL_EXPRS.  Else we may change the
101
   hash code for an expression and be unable to find/remove it from
102
   AVAIL_EXPRS.  */
103
static VEC(tree,heap) *stmts_to_rescan;
104
 
105
/* Structure for entries in the expression hash table.
106
 
107
   This requires more memory for the hash table entries, but allows us
108
   to avoid creating silly tree nodes and annotations for conditionals,
109
   eliminates 2 global hash tables and two block local varrays.
110
 
111
   It also allows us to reduce the number of hash table lookups we
112
   have to perform in lookup_avail_expr and finally it allows us to
113
   significantly reduce the number of calls into the hashing routine
114
   itself.  */
115
 
116
struct expr_hash_elt
117
{
118
  /* The value (lhs) of this expression.  */
119
  tree lhs;
120
 
121
  /* The expression (rhs) we want to record.  */
122
  tree rhs;
123
 
124
  /* The stmt pointer if this element corresponds to a statement.  */
125
  tree stmt;
126
 
127
  /* The hash value for RHS/ann.  */
128
  hashval_t hash;
129
};
130
 
131
/* Stack of dest,src pairs that need to be restored during finalization.
132
 
133
   A NULL entry is used to mark the end of pairs which need to be
134
   restored during finalization of this block.  */
135
static VEC(tree,heap) *const_and_copies_stack;
136
 
137
/* Track whether or not we have changed the control flow graph.  */
138
static bool cfg_altered;
139
 
140
/* Bitmap of blocks that have had EH statements cleaned.  We should
141
   remove their dead edges eventually.  */
142
static bitmap need_eh_cleanup;
143
 
144
/* Statistics for dominator optimizations.  */
145
struct opt_stats_d
146
{
147
  long num_stmts;
148
  long num_exprs_considered;
149
  long num_re;
150
  long num_const_prop;
151
  long num_copy_prop;
152
};
153
 
154
static struct opt_stats_d opt_stats;
155
 
156
struct eq_expr_value
157
{
158
  tree src;
159
  tree dst;
160
};
161
 
162
/* Local functions.  */
163
static void optimize_stmt (struct dom_walk_data *,
164
                           basic_block bb,
165
                           block_stmt_iterator);
166
static tree lookup_avail_expr (tree, bool);
167
static hashval_t avail_expr_hash (const void *);
168
static hashval_t real_avail_expr_hash (const void *);
169
static int avail_expr_eq (const void *, const void *);
170
static void htab_statistics (FILE *, htab_t);
171
static void record_cond (tree, tree);
172
static void record_const_or_copy (tree, tree);
173
static void record_equality (tree, tree);
174
static void record_equivalences_from_phis (basic_block);
175
static void record_equivalences_from_incoming_edge (basic_block);
176
static bool eliminate_redundant_computations (tree);
177
static void record_equivalences_from_stmt (tree, int, stmt_ann_t);
178
static void dom_thread_across_edge (struct dom_walk_data *, edge);
179
static void dom_opt_finalize_block (struct dom_walk_data *, basic_block);
180
static void dom_opt_initialize_block (struct dom_walk_data *, basic_block);
181
static void propagate_to_outgoing_edges (struct dom_walk_data *, basic_block);
182
static void remove_local_expressions_from_table (void);
183
static void restore_vars_to_original_value (void);
184
static edge single_incoming_edge_ignoring_loop_edges (basic_block);
185
 
186
 
187
/* Allocate an EDGE_INFO for edge E and attach it to E.
188
   Return the new EDGE_INFO structure.  */
189
 
190
static struct edge_info *
191
allocate_edge_info (edge e)
192
{
193
  struct edge_info *edge_info;
194
 
195
  edge_info = XCNEW (struct edge_info);
196
 
197
  e->aux = edge_info;
198
  return edge_info;
199
}
200
 
201
/* Free all EDGE_INFO structures associated with edges in the CFG.
202
   If a particular edge can be threaded, copy the redirection
203
   target from the EDGE_INFO structure into the edge's AUX field
204
   as required by code to update the CFG and SSA graph for
205
   jump threading.  */
206
 
207
static void
208
free_all_edge_infos (void)
209
{
210
  basic_block bb;
211
  edge_iterator ei;
212
  edge e;
213
 
214
  FOR_EACH_BB (bb)
215
    {
216
      FOR_EACH_EDGE (e, ei, bb->preds)
217
        {
218
         struct edge_info *edge_info = (struct edge_info *) e->aux;
219
 
220
          if (edge_info)
221
            {
222
              if (edge_info->cond_equivalences)
223
                free (edge_info->cond_equivalences);
224
              free (edge_info);
225
              e->aux = NULL;
226
            }
227
        }
228
    }
229
}
230
 
231
/* Jump threading, redundancy elimination and const/copy propagation.
232
 
233
   This pass may expose new symbols that need to be renamed into SSA.  For
234
   every new symbol exposed, its corresponding bit will be set in
235
   VARS_TO_RENAME.  */
236
 
237
static unsigned int
238
tree_ssa_dominator_optimize (void)
239
{
240
  struct dom_walk_data walk_data;
241
  unsigned int i;
242
  struct loops loops_info;
243
 
244
  memset (&opt_stats, 0, sizeof (opt_stats));
245
 
246
  /* Create our hash tables.  */
247
  avail_exprs = htab_create (1024, real_avail_expr_hash, avail_expr_eq, free);
248
  avail_exprs_stack = VEC_alloc (tree, heap, 20);
249
  const_and_copies_stack = VEC_alloc (tree, heap, 20);
250
  stmts_to_rescan = VEC_alloc (tree, heap, 20);
251
  need_eh_cleanup = BITMAP_ALLOC (NULL);
252
 
253
  /* Setup callbacks for the generic dominator tree walker.  */
254
  walk_data.walk_stmts_backward = false;
255
  walk_data.dom_direction = CDI_DOMINATORS;
256
  walk_data.initialize_block_local_data = NULL;
257
  walk_data.before_dom_children_before_stmts = dom_opt_initialize_block;
258
  walk_data.before_dom_children_walk_stmts = optimize_stmt;
259
  walk_data.before_dom_children_after_stmts = propagate_to_outgoing_edges;
260
  walk_data.after_dom_children_before_stmts = NULL;
261
  walk_data.after_dom_children_walk_stmts = NULL;
262
  walk_data.after_dom_children_after_stmts = dom_opt_finalize_block;
263
  /* Right now we only attach a dummy COND_EXPR to the global data pointer.
264
     When we attach more stuff we'll need to fill this out with a real
265
     structure.  */
266
  walk_data.global_data = NULL;
267
  walk_data.block_local_data_size = 0;
268
  walk_data.interesting_blocks = NULL;
269
 
270
  /* Now initialize the dominator walker.  */
271
  init_walk_dominator_tree (&walk_data);
272
 
273
  calculate_dominance_info (CDI_DOMINATORS);
274
 
275
  /* We need to know which edges exit loops so that we can
276
     aggressively thread through loop headers to an exit
277
     edge.  */
278
  flow_loops_find (&loops_info);
279
  mark_loop_exit_edges (&loops_info);
280
  flow_loops_free (&loops_info);
281
 
282
  /* Clean up the CFG so that any forwarder blocks created by loop
283
     canonicalization are removed.  */
284
  cleanup_tree_cfg ();
285
  calculate_dominance_info (CDI_DOMINATORS);
286
 
287
  /* We need accurate information regarding back edges in the CFG
288
     for jump threading.  */
289
  mark_dfs_back_edges ();
290
 
291
  /* Recursively walk the dominator tree optimizing statements.  */
292
  walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
293
 
294
  {
295
    block_stmt_iterator bsi;
296
    basic_block bb;
297
    FOR_EACH_BB (bb)
298
      {
299
        for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
300
          update_stmt_if_modified (bsi_stmt (bsi));
301
      }
302
  }
303
 
304
  /* If we exposed any new variables, go ahead and put them into
305
     SSA form now, before we handle jump threading.  This simplifies
306
     interactions between rewriting of _DECL nodes into SSA form
307
     and rewriting SSA_NAME nodes into SSA form after block
308
     duplication and CFG manipulation.  */
309
  update_ssa (TODO_update_ssa);
310
 
311
  free_all_edge_infos ();
312
 
313
  /* Thread jumps, creating duplicate blocks as needed.  */
314
  cfg_altered |= thread_through_all_blocks ();
315
 
316
  /* Removal of statements may make some EH edges dead.  Purge
317
     such edges from the CFG as needed.  */
318
  if (!bitmap_empty_p (need_eh_cleanup))
319
    {
320
      cfg_altered |= tree_purge_all_dead_eh_edges (need_eh_cleanup);
321
      bitmap_zero (need_eh_cleanup);
322
    }
323
 
324
  if (cfg_altered)
325
    free_dominance_info (CDI_DOMINATORS);
326
 
327
  /* Finally, remove everything except invariants in SSA_NAME_VALUE.
328
 
329
     Long term we will be able to let everything in SSA_NAME_VALUE
330
     persist.  However, for now, we know this is the safe thing to do.  */
331
  for (i = 0; i < num_ssa_names; i++)
332
   {
333
      tree name = ssa_name (i);
334
      tree value;
335
 
336
      if (!name)
337
        continue;
338
 
339
      value = SSA_NAME_VALUE (name);
340
      if (value && !is_gimple_min_invariant (value))
341
        SSA_NAME_VALUE (name) = NULL;
342
    }
343
 
344
  /* Debugging dumps.  */
345
  if (dump_file && (dump_flags & TDF_STATS))
346
    dump_dominator_optimization_stats (dump_file);
347
 
348
  /* Delete our main hashtable.  */
349
  htab_delete (avail_exprs);
350
 
351
  /* And finalize the dominator walker.  */
352
  fini_walk_dominator_tree (&walk_data);
353
 
354
  /* Free asserted bitmaps and stacks.  */
355
  BITMAP_FREE (need_eh_cleanup);
356
 
357
  VEC_free (tree, heap, avail_exprs_stack);
358
  VEC_free (tree, heap, const_and_copies_stack);
359
  VEC_free (tree, heap, stmts_to_rescan);
360
  return 0;
361
}
362
 
363
static bool
364
gate_dominator (void)
365
{
366
  return flag_tree_dom != 0;
367
}
368
 
369
struct tree_opt_pass pass_dominator =
370
{
371
  "dom",                                /* name */
372
  gate_dominator,                       /* gate */
373
  tree_ssa_dominator_optimize,          /* execute */
374
  NULL,                                 /* sub */
375
  NULL,                                 /* next */
376
  0,                                     /* static_pass_number */
377
  TV_TREE_SSA_DOMINATOR_OPTS,           /* tv_id */
378
  PROP_cfg | PROP_ssa | PROP_alias,     /* properties_required */
379
  0,                                     /* properties_provided */
380
  PROP_smt_usage,                       /* properties_destroyed */
381
  0,                                     /* todo_flags_start */
382
  TODO_dump_func
383
    | TODO_update_ssa
384
    | TODO_cleanup_cfg
385
    | TODO_verify_ssa
386
    | TODO_update_smt_usage,            /* todo_flags_finish */
387
 
388
};
389
 
390
 
391
/* Given a stmt CONDSTMT containing a COND_EXPR, canonicalize the
392
   COND_EXPR into a canonical form.  */
393
 
394
static void
395
canonicalize_comparison (tree condstmt)
396
{
397
  tree cond = COND_EXPR_COND (condstmt);
398
  tree op0;
399
  tree op1;
400
  enum tree_code code = TREE_CODE (cond);
401
 
402
  if (!COMPARISON_CLASS_P (cond))
403
    return;
404
 
405
  op0 = TREE_OPERAND (cond, 0);
406
  op1 = TREE_OPERAND (cond, 1);
407
 
408
  /* If it would be profitable to swap the operands, then do so to
409
     canonicalize the statement, enabling better optimization.
410
 
411
     By placing canonicalization of such expressions here we
412
     transparently keep statements in canonical form, even
413
     when the statement is modified.  */
414
  if (tree_swap_operands_p (op0, op1, false))
415
    {
416
      /* For relationals we need to swap the operands
417
         and change the code.  */
418
      if (code == LT_EXPR
419
          || code == GT_EXPR
420
          || code == LE_EXPR
421
          || code == GE_EXPR)
422
        {
423
          TREE_SET_CODE (cond, swap_tree_comparison (code));
424
          swap_tree_operands (condstmt,
425
                              &TREE_OPERAND (cond, 0),
426
                              &TREE_OPERAND (cond, 1));
427
          /* If one operand was in the operand cache, but the other is
428
             not, because it is a constant, this is a case that the
429
             internal updating code of swap_tree_operands can't handle
430
             properly.  */
431
          if (TREE_CODE_CLASS (TREE_CODE (op0))
432
              != TREE_CODE_CLASS (TREE_CODE (op1)))
433
            update_stmt (condstmt);
434
        }
435
    }
436
}
437
 
438
/* Initialize local stacks for this optimizer and record equivalences
439
   upon entry to BB.  Equivalences can come from the edge traversed to
440
   reach BB or they may come from PHI nodes at the start of BB.  */
441
 
442
static void
443
dom_opt_initialize_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
444
                          basic_block bb)
445
{
446
  if (dump_file && (dump_flags & TDF_DETAILS))
447
    fprintf (dump_file, "\n\nOptimizing block #%d\n\n", bb->index);
448
 
449
  /* Push a marker on the stacks of local information so that we know how
450
     far to unwind when we finalize this block.  */
451
  VEC_safe_push (tree, heap, avail_exprs_stack, NULL_TREE);
452
  VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
453
 
454
  record_equivalences_from_incoming_edge (bb);
455
 
456
  /* PHI nodes can create equivalences too.  */
457
  record_equivalences_from_phis (bb);
458
}
459
 
460
/* Given an expression EXPR (a relational expression or a statement),
461
   initialize the hash table element pointed to by ELEMENT.  */
462
 
463
static void
464
initialize_hash_element (tree expr, tree lhs, struct expr_hash_elt *element)
465
{
466
  /* Hash table elements may be based on conditional expressions or statements.
467
 
468
     For the former case, we have no annotation and we want to hash the
469
     conditional expression.  In the latter case we have an annotation and
470
     we want to record the expression the statement evaluates.  */
471
  if (COMPARISON_CLASS_P (expr) || TREE_CODE (expr) == TRUTH_NOT_EXPR)
472
    {
473
      element->stmt = NULL;
474
      element->rhs = expr;
475
    }
476
  else if (TREE_CODE (expr) == COND_EXPR)
477
    {
478
      element->stmt = expr;
479
      element->rhs = COND_EXPR_COND (expr);
480
    }
481
  else if (TREE_CODE (expr) == SWITCH_EXPR)
482
    {
483
      element->stmt = expr;
484
      element->rhs = SWITCH_COND (expr);
485
    }
486
  else if (TREE_CODE (expr) == RETURN_EXPR && TREE_OPERAND (expr, 0))
487
    {
488
      element->stmt = expr;
489
      element->rhs = TREE_OPERAND (TREE_OPERAND (expr, 0), 1);
490
    }
491
  else if (TREE_CODE (expr) == GOTO_EXPR)
492
    {
493
      element->stmt = expr;
494
      element->rhs = GOTO_DESTINATION (expr);
495
    }
496
  else
497
    {
498
      element->stmt = expr;
499
      element->rhs = TREE_OPERAND (expr, 1);
500
    }
501
 
502
  element->lhs = lhs;
503
  element->hash = avail_expr_hash (element);
504
}
505
 
506
/* Remove all the expressions in LOCALS from TABLE, stopping when there are
507
   LIMIT entries left in LOCALs.  */
508
 
509
static void
510
remove_local_expressions_from_table (void)
511
{
512
  /* Remove all the expressions made available in this block.  */
513
  while (VEC_length (tree, avail_exprs_stack) > 0)
514
    {
515
      struct expr_hash_elt element;
516
      tree expr = VEC_pop (tree, avail_exprs_stack);
517
 
518
      if (expr == NULL_TREE)
519
        break;
520
 
521
      initialize_hash_element (expr, NULL, &element);
522
      htab_remove_elt_with_hash (avail_exprs, &element, element.hash);
523
    }
524
}
525
 
526
/* Use the source/dest pairs in CONST_AND_COPIES_STACK to restore
527
   CONST_AND_COPIES to its original state, stopping when we hit a
528
   NULL marker.  */
529
 
530
static void
531
restore_vars_to_original_value (void)
532
{
533
  while (VEC_length (tree, const_and_copies_stack) > 0)
534
    {
535
      tree prev_value, dest;
536
 
537
      dest = VEC_pop (tree, const_and_copies_stack);
538
 
539
      if (dest == NULL)
540
        break;
541
 
542
      prev_value = VEC_pop (tree, const_and_copies_stack);
543
      SSA_NAME_VALUE (dest) =  prev_value;
544
    }
545
}
546
 
547
/* A trivial wrapper so that we can present the generic jump
548
   threading code with a simple API for simplifying statements.  */
549
static tree
550
simplify_stmt_for_jump_threading (tree stmt, tree within_stmt ATTRIBUTE_UNUSED)
551
{
552
  return lookup_avail_expr (stmt, false);
553
}
554
 
555
/* Wrapper for common code to attempt to thread an edge.  For example,
556
   it handles lazily building the dummy condition and the bookkeeping
557
   when jump threading is successful.  */
558
 
559
static void
560
dom_thread_across_edge (struct dom_walk_data *walk_data, edge e)
561
{
562
  /* If we don't already have a dummy condition, build it now.  */
563
  if (! walk_data->global_data)
564
    {
565
      tree dummy_cond = build2 (NE_EXPR, boolean_type_node,
566
                                integer_zero_node, integer_zero_node);
567
      dummy_cond = build3 (COND_EXPR, void_type_node, dummy_cond, NULL, NULL);
568
      walk_data->global_data = dummy_cond;
569
    }
570
 
571
  thread_across_edge (walk_data->global_data, e, false,
572
                      &const_and_copies_stack,
573
                      simplify_stmt_for_jump_threading);
574
}
575
 
576
/* We have finished processing the dominator children of BB, perform
577
   any finalization actions in preparation for leaving this node in
578
   the dominator tree.  */
579
 
580
static void
581
dom_opt_finalize_block (struct dom_walk_data *walk_data, basic_block bb)
582
{
583
  tree last;
584
 
585
 
586
  /* If we have an outgoing edge to a block with multiple incoming and
587
     outgoing edges, then we may be able to thread the edge.  ie, we
588
     may be able to statically determine which of the outgoing edges
589
     will be traversed when the incoming edge from BB is traversed.  */
590
  if (single_succ_p (bb)
591
      && (single_succ_edge (bb)->flags & EDGE_ABNORMAL) == 0
592
      && potentially_threadable_block (single_succ (bb)))
593
    {
594
      dom_thread_across_edge (walk_data, single_succ_edge (bb));
595
    }
596
  else if ((last = last_stmt (bb))
597
           && TREE_CODE (last) == COND_EXPR
598
           && (COMPARISON_CLASS_P (COND_EXPR_COND (last))
599
               || TREE_CODE (COND_EXPR_COND (last)) == SSA_NAME)
600
           && EDGE_COUNT (bb->succs) == 2
601
           && (EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL) == 0
602
           && (EDGE_SUCC (bb, 1)->flags & EDGE_ABNORMAL) == 0)
603
    {
604
      edge true_edge, false_edge;
605
 
606
      extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
607
 
608
      /* Only try to thread the edge if it reaches a target block with
609
         more than one predecessor and more than one successor.  */
610
      if (potentially_threadable_block (true_edge->dest))
611
        {
612
          struct edge_info *edge_info;
613
          unsigned int i;
614
 
615
          /* Push a marker onto the available expression stack so that we
616
             unwind any expressions related to the TRUE arm before processing
617
             the false arm below.  */
618
          VEC_safe_push (tree, heap, avail_exprs_stack, NULL_TREE);
619
          VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
620
 
621
          edge_info = (struct edge_info *) true_edge->aux;
622
 
623
          /* If we have info associated with this edge, record it into
624
             our equivalency tables.  */
625
          if (edge_info)
626
            {
627
              tree *cond_equivalences = edge_info->cond_equivalences;
628
              tree lhs = edge_info->lhs;
629
              tree rhs = edge_info->rhs;
630
 
631
              /* If we have a simple NAME = VALUE equivalency record it.  */
632
              if (lhs && TREE_CODE (lhs) == SSA_NAME)
633
                record_const_or_copy (lhs, rhs);
634
 
635
              /* If we have 0 = COND or 1 = COND equivalences, record them
636
                 into our expression hash tables.  */
637
              if (cond_equivalences)
638
                for (i = 0; i < edge_info->max_cond_equivalences; i += 2)
639
                  {
640
                    tree expr = cond_equivalences[i];
641
                    tree value = cond_equivalences[i + 1];
642
 
643
                    record_cond (expr, value);
644
                  }
645
            }
646
 
647
          dom_thread_across_edge (walk_data, true_edge);
648
 
649
          /* And restore the various tables to their state before
650
             we threaded this edge.  */
651
          remove_local_expressions_from_table ();
652
        }
653
 
654
      /* Similarly for the ELSE arm.  */
655
      if (potentially_threadable_block (false_edge->dest))
656
        {
657
          struct edge_info *edge_info;
658
          unsigned int i;
659
 
660
          VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
661
          edge_info = (struct edge_info *) false_edge->aux;
662
 
663
          /* If we have info associated with this edge, record it into
664
             our equivalency tables.  */
665
          if (edge_info)
666
            {
667
              tree *cond_equivalences = edge_info->cond_equivalences;
668
              tree lhs = edge_info->lhs;
669
              tree rhs = edge_info->rhs;
670
 
671
              /* If we have a simple NAME = VALUE equivalency record it.  */
672
              if (lhs && TREE_CODE (lhs) == SSA_NAME)
673
                record_const_or_copy (lhs, rhs);
674
 
675
              /* If we have 0 = COND or 1 = COND equivalences, record them
676
                 into our expression hash tables.  */
677
              if (cond_equivalences)
678
                for (i = 0; i < edge_info->max_cond_equivalences; i += 2)
679
                  {
680
                    tree expr = cond_equivalences[i];
681
                    tree value = cond_equivalences[i + 1];
682
 
683
                    record_cond (expr, value);
684
                  }
685
            }
686
 
687
          /* Now thread the edge.  */
688
          dom_thread_across_edge (walk_data, false_edge);
689
 
690
          /* No need to remove local expressions from our tables
691
             or restore vars to their original value as that will
692
             be done immediately below.  */
693
        }
694
    }
695
 
696
  remove_local_expressions_from_table ();
697
  restore_vars_to_original_value ();
698
 
699
  /* If we queued any statements to rescan in this block, then
700
     go ahead and rescan them now.  */
701
  while (VEC_length (tree, stmts_to_rescan) > 0)
702
    {
703
      tree stmt = VEC_last (tree, stmts_to_rescan);
704
      basic_block stmt_bb = bb_for_stmt (stmt);
705
 
706
      if (stmt_bb != bb)
707
        break;
708
 
709
      VEC_pop (tree, stmts_to_rescan);
710
      mark_new_vars_to_rename (stmt);
711
    }
712
}
713
 
714
/* PHI nodes can create equivalences too.
715
 
716
   Ignoring any alternatives which are the same as the result, if
717
   all the alternatives are equal, then the PHI node creates an
718
   equivalence.  */
719
 
720
static void
721
record_equivalences_from_phis (basic_block bb)
722
{
723
  tree phi;
724
 
725
  for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
726
    {
727
      tree lhs = PHI_RESULT (phi);
728
      tree rhs = NULL;
729
      int i;
730
 
731
      for (i = 0; i < PHI_NUM_ARGS (phi); i++)
732
        {
733
          tree t = PHI_ARG_DEF (phi, i);
734
 
735
          /* Ignore alternatives which are the same as our LHS.  Since
736
             LHS is a PHI_RESULT, it is known to be a SSA_NAME, so we
737
             can simply compare pointers.  */
738
          if (lhs == t)
739
            continue;
740
 
741
          /* If we have not processed an alternative yet, then set
742
             RHS to this alternative.  */
743
          if (rhs == NULL)
744
            rhs = t;
745
          /* If we have processed an alternative (stored in RHS), then
746
             see if it is equal to this one.  If it isn't, then stop
747
             the search.  */
748
          else if (! operand_equal_for_phi_arg_p (rhs, t))
749
            break;
750
        }
751
 
752
      /* If we had no interesting alternatives, then all the RHS alternatives
753
         must have been the same as LHS.  */
754
      if (!rhs)
755
        rhs = lhs;
756
 
757
      /* If we managed to iterate through each PHI alternative without
758
         breaking out of the loop, then we have a PHI which may create
759
         a useful equivalence.  We do not need to record unwind data for
760
         this, since this is a true assignment and not an equivalence
761
         inferred from a comparison.  All uses of this ssa name are dominated
762
         by this assignment, so unwinding just costs time and space.  */
763
      if (i == PHI_NUM_ARGS (phi)
764
          && may_propagate_copy (lhs, rhs))
765
        SSA_NAME_VALUE (lhs) = rhs;
766
    }
767
}
768
 
769
/* Ignoring loop backedges, if BB has precisely one incoming edge then
770
   return that edge.  Otherwise return NULL.  */
771
static edge
772
single_incoming_edge_ignoring_loop_edges (basic_block bb)
773
{
774
  edge retval = NULL;
775
  edge e;
776
  edge_iterator ei;
777
 
778
  FOR_EACH_EDGE (e, ei, bb->preds)
779
    {
780
      /* A loop back edge can be identified by the destination of
781
         the edge dominating the source of the edge.  */
782
      if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
783
        continue;
784
 
785
      /* If we have already seen a non-loop edge, then we must have
786
         multiple incoming non-loop edges and thus we return NULL.  */
787
      if (retval)
788
        return NULL;
789
 
790
      /* This is the first non-loop incoming edge we have found.  Record
791
         it.  */
792
      retval = e;
793
    }
794
 
795
  return retval;
796
}
797
 
798
/* Record any equivalences created by the incoming edge to BB.  If BB
799
   has more than one incoming edge, then no equivalence is created.  */
800
 
801
static void
802
record_equivalences_from_incoming_edge (basic_block bb)
803
{
804
  edge e;
805
  basic_block parent;
806
  struct edge_info *edge_info;
807
 
808
  /* If our parent block ended with a control statement, then we may be
809
     able to record some equivalences based on which outgoing edge from
810
     the parent was followed.  */
811
  parent = get_immediate_dominator (CDI_DOMINATORS, bb);
812
 
813
  e = single_incoming_edge_ignoring_loop_edges (bb);
814
 
815
  /* If we had a single incoming edge from our parent block, then enter
816
     any data associated with the edge into our tables.  */
817
  if (e && e->src == parent)
818
    {
819
      unsigned int i;
820
 
821
      edge_info = (struct edge_info *) e->aux;
822
 
823
      if (edge_info)
824
        {
825
          tree lhs = edge_info->lhs;
826
          tree rhs = edge_info->rhs;
827
          tree *cond_equivalences = edge_info->cond_equivalences;
828
 
829
          if (lhs)
830
            record_equality (lhs, rhs);
831
 
832
          if (cond_equivalences)
833
            {
834
              for (i = 0; i < edge_info->max_cond_equivalences; i += 2)
835
                {
836
                  tree expr = cond_equivalences[i];
837
                  tree value = cond_equivalences[i + 1];
838
 
839
                  record_cond (expr, value);
840
                }
841
            }
842
        }
843
    }
844
}
845
 
846
/* Dump SSA statistics on FILE.  */
847
 
848
void
849
dump_dominator_optimization_stats (FILE *file)
850
{
851
  long n_exprs;
852
 
853
  fprintf (file, "Total number of statements:                   %6ld\n\n",
854
           opt_stats.num_stmts);
855
  fprintf (file, "Exprs considered for dominator optimizations: %6ld\n",
856
           opt_stats.num_exprs_considered);
857
 
858
  n_exprs = opt_stats.num_exprs_considered;
859
  if (n_exprs == 0)
860
    n_exprs = 1;
861
 
862
  fprintf (file, "    Redundant expressions eliminated:         %6ld (%.0f%%)\n",
863
           opt_stats.num_re, PERCENT (opt_stats.num_re,
864
                                      n_exprs));
865
  fprintf (file, "    Constants propagated:                     %6ld\n",
866
           opt_stats.num_const_prop);
867
  fprintf (file, "    Copies propagated:                        %6ld\n",
868
           opt_stats.num_copy_prop);
869
 
870
  fprintf (file, "\nHash table statistics:\n");
871
 
872
  fprintf (file, "    avail_exprs: ");
873
  htab_statistics (file, avail_exprs);
874
}
875
 
876
 
877
/* Dump SSA statistics on stderr.  */
878
 
879
void
880
debug_dominator_optimization_stats (void)
881
{
882
  dump_dominator_optimization_stats (stderr);
883
}
884
 
885
 
886
/* Dump statistics for the hash table HTAB.  */
887
 
888
static void
889
htab_statistics (FILE *file, htab_t htab)
890
{
891
  fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
892
           (long) htab_size (htab),
893
           (long) htab_elements (htab),
894
           htab_collisions (htab));
895
}
896
 
897
/* Enter a statement into the true/false expression hash table indicating
898
   that the condition COND has the value VALUE.  */
899
 
900
static void
901
record_cond (tree cond, tree value)
902
{
903
  struct expr_hash_elt *element = XCNEW (struct expr_hash_elt);
904
  void **slot;
905
 
906
  initialize_hash_element (cond, value, element);
907
 
908
  slot = htab_find_slot_with_hash (avail_exprs, (void *)element,
909
                                   element->hash, INSERT);
910
  if (*slot == NULL)
911
    {
912
      *slot = (void *) element;
913
      VEC_safe_push (tree, heap, avail_exprs_stack, cond);
914
    }
915
  else
916
    free (element);
917
}
918
 
919
/* Build a new conditional using NEW_CODE, OP0 and OP1 and store
920
   the new conditional into *p, then store a boolean_true_node
921
   into *(p + 1).  */
922
 
923
static void
924
build_and_record_new_cond (enum tree_code new_code, tree op0, tree op1, tree *p)
925
{
926
  *p = build2 (new_code, boolean_type_node, op0, op1);
927
  p++;
928
  *p = boolean_true_node;
929
}
930
 
931
/* Record that COND is true and INVERTED is false into the edge information
932
   structure.  Also record that any conditions dominated by COND are true
933
   as well.
934
 
935
   For example, if a < b is true, then a <= b must also be true.  */
936
 
937
static void
938
record_conditions (struct edge_info *edge_info, tree cond, tree inverted)
939
{
940
  tree op0, op1;
941
 
942
  if (!COMPARISON_CLASS_P (cond))
943
    return;
944
 
945
  op0 = TREE_OPERAND (cond, 0);
946
  op1 = TREE_OPERAND (cond, 1);
947
 
948
  switch (TREE_CODE (cond))
949
    {
950
    case LT_EXPR:
951
    case GT_EXPR:
952
      if (FLOAT_TYPE_P (TREE_TYPE (op0)))
953
        {
954
          edge_info->max_cond_equivalences = 12;
955
          edge_info->cond_equivalences = XNEWVEC (tree, 12);
956
          build_and_record_new_cond (ORDERED_EXPR, op0, op1,
957
                                     &edge_info->cond_equivalences[8]);
958
          build_and_record_new_cond (LTGT_EXPR, op0, op1,
959
                                     &edge_info->cond_equivalences[10]);
960
        }
961
      else
962
        {
963
          edge_info->max_cond_equivalences = 8;
964
          edge_info->cond_equivalences = XNEWVEC (tree, 8);
965
        }
966
 
967
      build_and_record_new_cond ((TREE_CODE (cond) == LT_EXPR
968
                                  ? LE_EXPR : GE_EXPR),
969
                                 op0, op1, &edge_info->cond_equivalences[4]);
970
      build_and_record_new_cond (NE_EXPR, op0, op1,
971
                                 &edge_info->cond_equivalences[6]);
972
      break;
973
 
974
    case GE_EXPR:
975
    case LE_EXPR:
976
      if (FLOAT_TYPE_P (TREE_TYPE (op0)))
977
        {
978
          edge_info->max_cond_equivalences = 6;
979
          edge_info->cond_equivalences = XNEWVEC (tree, 6);
980
          build_and_record_new_cond (ORDERED_EXPR, op0, op1,
981
                                     &edge_info->cond_equivalences[4]);
982
        }
983
      else
984
        {
985
          edge_info->max_cond_equivalences = 4;
986
          edge_info->cond_equivalences = XNEWVEC (tree, 4);
987
        }
988
      break;
989
 
990
    case EQ_EXPR:
991
      if (FLOAT_TYPE_P (TREE_TYPE (op0)))
992
        {
993
          edge_info->max_cond_equivalences = 10;
994
          edge_info->cond_equivalences = XNEWVEC (tree, 10);
995
          build_and_record_new_cond (ORDERED_EXPR, op0, op1,
996
                                     &edge_info->cond_equivalences[8]);
997
        }
998
      else
999
        {
1000
          edge_info->max_cond_equivalences = 8;
1001
          edge_info->cond_equivalences = XNEWVEC (tree, 8);
1002
        }
1003
      build_and_record_new_cond (LE_EXPR, op0, op1,
1004
                                 &edge_info->cond_equivalences[4]);
1005
      build_and_record_new_cond (GE_EXPR, op0, op1,
1006
                                 &edge_info->cond_equivalences[6]);
1007
      break;
1008
 
1009
    case UNORDERED_EXPR:
1010
      edge_info->max_cond_equivalences = 16;
1011
      edge_info->cond_equivalences = XNEWVEC (tree, 16);
1012
      build_and_record_new_cond (NE_EXPR, op0, op1,
1013
                                 &edge_info->cond_equivalences[4]);
1014
      build_and_record_new_cond (UNLE_EXPR, op0, op1,
1015
                                 &edge_info->cond_equivalences[6]);
1016
      build_and_record_new_cond (UNGE_EXPR, op0, op1,
1017
                                 &edge_info->cond_equivalences[8]);
1018
      build_and_record_new_cond (UNEQ_EXPR, op0, op1,
1019
                                 &edge_info->cond_equivalences[10]);
1020
      build_and_record_new_cond (UNLT_EXPR, op0, op1,
1021
                                 &edge_info->cond_equivalences[12]);
1022
      build_and_record_new_cond (UNGT_EXPR, op0, op1,
1023
                                 &edge_info->cond_equivalences[14]);
1024
      break;
1025
 
1026
    case UNLT_EXPR:
1027
    case UNGT_EXPR:
1028
      edge_info->max_cond_equivalences = 8;
1029
      edge_info->cond_equivalences = XNEWVEC (tree, 8);
1030
      build_and_record_new_cond ((TREE_CODE (cond) == UNLT_EXPR
1031
                                  ? UNLE_EXPR : UNGE_EXPR),
1032
                                 op0, op1, &edge_info->cond_equivalences[4]);
1033
      build_and_record_new_cond (NE_EXPR, op0, op1,
1034
                                 &edge_info->cond_equivalences[6]);
1035
      break;
1036
 
1037
    case UNEQ_EXPR:
1038
      edge_info->max_cond_equivalences = 8;
1039
      edge_info->cond_equivalences = XNEWVEC (tree, 8);
1040
      build_and_record_new_cond (UNLE_EXPR, op0, op1,
1041
                                 &edge_info->cond_equivalences[4]);
1042
      build_and_record_new_cond (UNGE_EXPR, op0, op1,
1043
                                 &edge_info->cond_equivalences[6]);
1044
      break;
1045
 
1046
    case LTGT_EXPR:
1047
      edge_info->max_cond_equivalences = 8;
1048
      edge_info->cond_equivalences = XNEWVEC (tree, 8);
1049
      build_and_record_new_cond (NE_EXPR, op0, op1,
1050
                                 &edge_info->cond_equivalences[4]);
1051
      build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1052
                                 &edge_info->cond_equivalences[6]);
1053
      break;
1054
 
1055
    default:
1056
      edge_info->max_cond_equivalences = 4;
1057
      edge_info->cond_equivalences = XNEWVEC (tree, 4);
1058
      break;
1059
    }
1060
 
1061
  /* Now store the original true and false conditions into the first
1062
     two slots.  */
1063
  edge_info->cond_equivalences[0] = cond;
1064
  edge_info->cond_equivalences[1] = boolean_true_node;
1065
  edge_info->cond_equivalences[2] = inverted;
1066
  edge_info->cond_equivalences[3] = boolean_false_node;
1067
}
1068
 
1069
/* A helper function for record_const_or_copy and record_equality.
1070
   Do the work of recording the value and undo info.  */
1071
 
1072
static void
1073
record_const_or_copy_1 (tree x, tree y, tree prev_x)
1074
{
1075
  SSA_NAME_VALUE (x) = y;
1076
 
1077
  VEC_reserve (tree, heap, const_and_copies_stack, 2);
1078
  VEC_quick_push (tree, const_and_copies_stack, prev_x);
1079
  VEC_quick_push (tree, const_and_copies_stack, x);
1080
}
1081
 
1082
 
1083
/* Return the loop depth of the basic block of the defining statement of X.
1084
   This number should not be treated as absolutely correct because the loop
1085
   information may not be completely up-to-date when dom runs.  However, it
1086
   will be relatively correct, and as more passes are taught to keep loop info
1087
   up to date, the result will become more and more accurate.  */
1088
 
1089
int
1090
loop_depth_of_name (tree x)
1091
{
1092
  tree defstmt;
1093
  basic_block defbb;
1094
 
1095
  /* If it's not an SSA_NAME, we have no clue where the definition is.  */
1096
  if (TREE_CODE (x) != SSA_NAME)
1097
    return 0;
1098
 
1099
  /* Otherwise return the loop depth of the defining statement's bb.
1100
     Note that there may not actually be a bb for this statement, if the
1101
     ssa_name is live on entry.  */
1102
  defstmt = SSA_NAME_DEF_STMT (x);
1103
  defbb = bb_for_stmt (defstmt);
1104
  if (!defbb)
1105
    return 0;
1106
 
1107
  return defbb->loop_depth;
1108
}
1109
 
1110
 
1111
/* Record that X is equal to Y in const_and_copies.  Record undo
1112
   information in the block-local vector.  */
1113
 
1114
static void
1115
record_const_or_copy (tree x, tree y)
1116
{
1117
  tree prev_x = SSA_NAME_VALUE (x);
1118
 
1119
  if (TREE_CODE (y) == SSA_NAME)
1120
    {
1121
      tree tmp = SSA_NAME_VALUE (y);
1122
      if (tmp)
1123
        y = tmp;
1124
    }
1125
 
1126
  record_const_or_copy_1 (x, y, prev_x);
1127
}
1128
 
1129
/* Similarly, but assume that X and Y are the two operands of an EQ_EXPR.
1130
   This constrains the cases in which we may treat this as assignment.  */
1131
 
1132
static void
1133
record_equality (tree x, tree y)
1134
{
1135
  tree prev_x = NULL, prev_y = NULL;
1136
 
1137
  if (TREE_CODE (x) == SSA_NAME)
1138
    prev_x = SSA_NAME_VALUE (x);
1139
  if (TREE_CODE (y) == SSA_NAME)
1140
    prev_y = SSA_NAME_VALUE (y);
1141
 
1142
  /* If one of the previous values is invariant, or invariant in more loops
1143
     (by depth), then use that.
1144
     Otherwise it doesn't matter which value we choose, just so
1145
     long as we canonicalize on one value.  */
1146
  if (TREE_INVARIANT (y))
1147
    ;
1148
  else if (TREE_INVARIANT (x) || (loop_depth_of_name (x) <= loop_depth_of_name (y)))
1149
    prev_x = x, x = y, y = prev_x, prev_x = prev_y;
1150
  else if (prev_x && TREE_INVARIANT (prev_x))
1151
    x = y, y = prev_x, prev_x = prev_y;
1152
  else if (prev_y && TREE_CODE (prev_y) != VALUE_HANDLE)
1153
    y = prev_y;
1154
 
1155
  /* After the swapping, we must have one SSA_NAME.  */
1156
  if (TREE_CODE (x) != SSA_NAME)
1157
    return;
1158
 
1159
  /* For IEEE, -0.0 == 0.0, so we don't necessarily know the sign of a
1160
     variable compared against zero.  If we're honoring signed zeros,
1161
     then we cannot record this value unless we know that the value is
1162
     nonzero.  */
1163
  if (HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (x)))
1164
      && (TREE_CODE (y) != REAL_CST
1165
          || REAL_VALUES_EQUAL (dconst0, TREE_REAL_CST (y))))
1166
    return;
1167
 
1168
  record_const_or_copy_1 (x, y, prev_x);
1169
}
1170
 
1171
/* Returns true when STMT is a simple iv increment.  It detects the
1172
   following situation:
1173
 
1174
   i_1 = phi (..., i_2)
1175
   i_2 = i_1 +/- ...  */
1176
 
1177
static bool
1178
simple_iv_increment_p (tree stmt)
1179
{
1180
  tree lhs, rhs, preinc, phi;
1181
  unsigned i;
1182
 
1183
  if (TREE_CODE (stmt) != MODIFY_EXPR)
1184
    return false;
1185
 
1186
  lhs = TREE_OPERAND (stmt, 0);
1187
  if (TREE_CODE (lhs) != SSA_NAME)
1188
    return false;
1189
 
1190
  rhs = TREE_OPERAND (stmt, 1);
1191
 
1192
  if (TREE_CODE (rhs) != PLUS_EXPR
1193
      && TREE_CODE (rhs) != MINUS_EXPR)
1194
    return false;
1195
 
1196
  preinc = TREE_OPERAND (rhs, 0);
1197
  if (TREE_CODE (preinc) != SSA_NAME)
1198
    return false;
1199
 
1200
  phi = SSA_NAME_DEF_STMT (preinc);
1201
  if (TREE_CODE (phi) != PHI_NODE)
1202
    return false;
1203
 
1204
  for (i = 0; i < (unsigned) PHI_NUM_ARGS (phi); i++)
1205
    if (PHI_ARG_DEF (phi, i) == lhs)
1206
      return true;
1207
 
1208
  return false;
1209
}
1210
 
1211
/* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
1212
   known value for that SSA_NAME (or NULL if no value is known).
1213
 
1214
   Propagate values from CONST_AND_COPIES into the PHI nodes of the
1215
   successors of BB.  */
1216
 
1217
static void
1218
cprop_into_successor_phis (basic_block bb)
1219
{
1220
  edge e;
1221
  edge_iterator ei;
1222
 
1223
  FOR_EACH_EDGE (e, ei, bb->succs)
1224
    {
1225
      tree phi;
1226
      int indx;
1227
 
1228
      /* If this is an abnormal edge, then we do not want to copy propagate
1229
         into the PHI alternative associated with this edge.  */
1230
      if (e->flags & EDGE_ABNORMAL)
1231
        continue;
1232
 
1233
      phi = phi_nodes (e->dest);
1234
      if (! phi)
1235
        continue;
1236
 
1237
      indx = e->dest_idx;
1238
      for ( ; phi; phi = PHI_CHAIN (phi))
1239
        {
1240
          tree new;
1241
          use_operand_p orig_p;
1242
          tree orig;
1243
 
1244
          /* The alternative may be associated with a constant, so verify
1245
             it is an SSA_NAME before doing anything with it.  */
1246
          orig_p = PHI_ARG_DEF_PTR (phi, indx);
1247
          orig = USE_FROM_PTR (orig_p);
1248
          if (TREE_CODE (orig) != SSA_NAME)
1249
            continue;
1250
 
1251
          /* If we have *ORIG_P in our constant/copy table, then replace
1252
             ORIG_P with its value in our constant/copy table.  */
1253
          new = SSA_NAME_VALUE (orig);
1254
          if (new
1255
              && new != orig
1256
              && (TREE_CODE (new) == SSA_NAME
1257
                  || is_gimple_min_invariant (new))
1258
              && may_propagate_copy (orig, new))
1259
            propagate_value (orig_p, new);
1260
        }
1261
    }
1262
}
1263
 
1264
/* We have finished optimizing BB, record any information implied by
1265
   taking a specific outgoing edge from BB.  */
1266
 
1267
static void
1268
record_edge_info (basic_block bb)
1269
{
1270
  block_stmt_iterator bsi = bsi_last (bb);
1271
  struct edge_info *edge_info;
1272
 
1273
  if (! bsi_end_p (bsi))
1274
    {
1275
      tree stmt = bsi_stmt (bsi);
1276
 
1277
      if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
1278
        {
1279
          tree cond = SWITCH_COND (stmt);
1280
 
1281
          if (TREE_CODE (cond) == SSA_NAME)
1282
            {
1283
              tree labels = SWITCH_LABELS (stmt);
1284
              int i, n_labels = TREE_VEC_LENGTH (labels);
1285
              tree *info = XCNEWVEC (tree, last_basic_block);
1286
              edge e;
1287
              edge_iterator ei;
1288
 
1289
              for (i = 0; i < n_labels; i++)
1290
                {
1291
                  tree label = TREE_VEC_ELT (labels, i);
1292
                  basic_block target_bb = label_to_block (CASE_LABEL (label));
1293
 
1294
                  if (CASE_HIGH (label)
1295
                      || !CASE_LOW (label)
1296
                      || info[target_bb->index])
1297
                    info[target_bb->index] = error_mark_node;
1298
                  else
1299
                    info[target_bb->index] = label;
1300
                }
1301
 
1302
              FOR_EACH_EDGE (e, ei, bb->succs)
1303
                {
1304
                  basic_block target_bb = e->dest;
1305
                  tree node = info[target_bb->index];
1306
 
1307
                  if (node != NULL && node != error_mark_node)
1308
                    {
1309
                      tree x = fold_convert (TREE_TYPE (cond), CASE_LOW (node));
1310
                      edge_info = allocate_edge_info (e);
1311
                      edge_info->lhs = cond;
1312
                      edge_info->rhs = x;
1313
                    }
1314
                }
1315
              free (info);
1316
            }
1317
        }
1318
 
1319
      /* A COND_EXPR may create equivalences too.  */
1320
      if (stmt && TREE_CODE (stmt) == COND_EXPR)
1321
        {
1322
          tree cond = COND_EXPR_COND (stmt);
1323
          edge true_edge;
1324
          edge false_edge;
1325
 
1326
          extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1327
 
1328
          /* If the conditional is a single variable 'X', record 'X = 1'
1329
             for the true edge and 'X = 0' on the false edge.  */
1330
          if (SSA_VAR_P (cond))
1331
            {
1332
              struct edge_info *edge_info;
1333
 
1334
              edge_info = allocate_edge_info (true_edge);
1335
              edge_info->lhs = cond;
1336
              edge_info->rhs = constant_boolean_node (1, TREE_TYPE (cond));
1337
 
1338
              edge_info = allocate_edge_info (false_edge);
1339
              edge_info->lhs = cond;
1340
              edge_info->rhs = constant_boolean_node (0, TREE_TYPE (cond));
1341
            }
1342
          /* Equality tests may create one or two equivalences.  */
1343
          else if (COMPARISON_CLASS_P (cond))
1344
            {
1345
              tree op0 = TREE_OPERAND (cond, 0);
1346
              tree op1 = TREE_OPERAND (cond, 1);
1347
 
1348
              /* Special case comparing booleans against a constant as we
1349
                 know the value of OP0 on both arms of the branch.  i.e., we
1350
                 can record an equivalence for OP0 rather than COND.  */
1351
              if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1352
                  && TREE_CODE (op0) == SSA_NAME
1353
                  && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
1354
                  && is_gimple_min_invariant (op1))
1355
                {
1356
                  if (TREE_CODE (cond) == EQ_EXPR)
1357
                    {
1358
                      edge_info = allocate_edge_info (true_edge);
1359
                      edge_info->lhs = op0;
1360
                      edge_info->rhs = (integer_zerop (op1)
1361
                                            ? boolean_false_node
1362
                                            : boolean_true_node);
1363
 
1364
                      edge_info = allocate_edge_info (false_edge);
1365
                      edge_info->lhs = op0;
1366
                      edge_info->rhs = (integer_zerop (op1)
1367
                                            ? boolean_true_node
1368
                                            : boolean_false_node);
1369
                    }
1370
                  else
1371
                    {
1372
                      edge_info = allocate_edge_info (true_edge);
1373
                      edge_info->lhs = op0;
1374
                      edge_info->rhs = (integer_zerop (op1)
1375
                                            ? boolean_true_node
1376
                                            : boolean_false_node);
1377
 
1378
                      edge_info = allocate_edge_info (false_edge);
1379
                      edge_info->lhs = op0;
1380
                      edge_info->rhs = (integer_zerop (op1)
1381
                                            ? boolean_false_node
1382
                                            : boolean_true_node);
1383
                    }
1384
                }
1385
 
1386
              else if (is_gimple_min_invariant (op0)
1387
                       && (TREE_CODE (op1) == SSA_NAME
1388
                           || is_gimple_min_invariant (op1)))
1389
                {
1390
                  tree inverted = invert_truthvalue (cond);
1391
                  struct edge_info *edge_info;
1392
 
1393
                  edge_info = allocate_edge_info (true_edge);
1394
                  record_conditions (edge_info, cond, inverted);
1395
 
1396
                  if (TREE_CODE (cond) == EQ_EXPR)
1397
                    {
1398
                      edge_info->lhs = op1;
1399
                      edge_info->rhs = op0;
1400
                    }
1401
 
1402
                  edge_info = allocate_edge_info (false_edge);
1403
                  record_conditions (edge_info, inverted, cond);
1404
 
1405
                  if (TREE_CODE (cond) == NE_EXPR)
1406
                    {
1407
                      edge_info->lhs = op1;
1408
                      edge_info->rhs = op0;
1409
                    }
1410
                }
1411
 
1412
              else if (TREE_CODE (op0) == SSA_NAME
1413
                       && (is_gimple_min_invariant (op1)
1414
                           || TREE_CODE (op1) == SSA_NAME))
1415
                {
1416
                  tree inverted = invert_truthvalue (cond);
1417
                  struct edge_info *edge_info;
1418
 
1419
                  edge_info = allocate_edge_info (true_edge);
1420
                  record_conditions (edge_info, cond, inverted);
1421
 
1422
                  if (TREE_CODE (cond) == EQ_EXPR)
1423
                    {
1424
                      edge_info->lhs = op0;
1425
                      edge_info->rhs = op1;
1426
                    }
1427
 
1428
                  edge_info = allocate_edge_info (false_edge);
1429
                  record_conditions (edge_info, inverted, cond);
1430
 
1431
                  if (TREE_CODE (cond) == NE_EXPR)
1432
                    {
1433
                      edge_info->lhs = op0;
1434
                      edge_info->rhs = op1;
1435
                    }
1436
                }
1437
            }
1438
 
1439
          /* ??? TRUTH_NOT_EXPR can create an equivalence too.  */
1440
        }
1441
    }
1442
}
1443
 
1444
/* Propagate information from BB to its outgoing edges.
1445
 
1446
   This can include equivalency information implied by control statements
1447
   at the end of BB and const/copy propagation into PHIs in BB's
1448
   successor blocks.  */
1449
 
1450
static void
1451
propagate_to_outgoing_edges (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
1452
                             basic_block bb)
1453
{
1454
  record_edge_info (bb);
1455
  cprop_into_successor_phis (bb);
1456
}
1457
 
1458
/* Search for redundant computations in STMT.  If any are found, then
1459
   replace them with the variable holding the result of the computation.
1460
 
1461
   If safe, record this expression into the available expression hash
1462
   table.  */
1463
 
1464
static bool
1465
eliminate_redundant_computations (tree stmt)
1466
{
1467
  tree *expr_p, def = NULL_TREE;
1468
  bool insert = true;
1469
  tree cached_lhs;
1470
  bool retval = false;
1471
  bool modify_expr_p = false;
1472
 
1473
  if (TREE_CODE (stmt) == MODIFY_EXPR)
1474
    def = TREE_OPERAND (stmt, 0);
1475
 
1476
  /* Certain expressions on the RHS can be optimized away, but can not
1477
     themselves be entered into the hash tables.  */
1478
  if (! def
1479
      || TREE_CODE (def) != SSA_NAME
1480
      || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def)
1481
      || !ZERO_SSA_OPERANDS (stmt, SSA_OP_VMAYDEF)
1482
      /* Do not record equivalences for increments of ivs.  This would create
1483
         overlapping live ranges for a very questionable gain.  */
1484
      || simple_iv_increment_p (stmt))
1485
    insert = false;
1486
 
1487
  /* Check if the expression has been computed before.  */
1488
  cached_lhs = lookup_avail_expr (stmt, insert);
1489
 
1490
  opt_stats.num_exprs_considered++;
1491
 
1492
  /* Get a pointer to the expression we are trying to optimize.  */
1493
  if (TREE_CODE (stmt) == COND_EXPR)
1494
    expr_p = &COND_EXPR_COND (stmt);
1495
  else if (TREE_CODE (stmt) == SWITCH_EXPR)
1496
    expr_p = &SWITCH_COND (stmt);
1497
  else if (TREE_CODE (stmt) == RETURN_EXPR && TREE_OPERAND (stmt, 0))
1498
    {
1499
      expr_p = &TREE_OPERAND (TREE_OPERAND (stmt, 0), 1);
1500
      modify_expr_p = true;
1501
    }
1502
  else
1503
    {
1504
      expr_p = &TREE_OPERAND (stmt, 1);
1505
      modify_expr_p = true;
1506
    }
1507
 
1508
  /* It is safe to ignore types here since we have already done
1509
     type checking in the hashing and equality routines.  In fact
1510
     type checking here merely gets in the way of constant
1511
     propagation.  Also, make sure that it is safe to propagate
1512
     CACHED_LHS into *EXPR_P.  */
1513
  if (cached_lhs
1514
      && ((TREE_CODE (cached_lhs) != SSA_NAME
1515
           && (modify_expr_p
1516
               || tree_ssa_useless_type_conversion_1 (TREE_TYPE (*expr_p),
1517
                                                      TREE_TYPE (cached_lhs))))
1518
          || may_propagate_copy (*expr_p, cached_lhs)))
1519
    {
1520
      if (dump_file && (dump_flags & TDF_DETAILS))
1521
        {
1522
          fprintf (dump_file, "  Replaced redundant expr '");
1523
          print_generic_expr (dump_file, *expr_p, dump_flags);
1524
          fprintf (dump_file, "' with '");
1525
          print_generic_expr (dump_file, cached_lhs, dump_flags);
1526
           fprintf (dump_file, "'\n");
1527
        }
1528
 
1529
      opt_stats.num_re++;
1530
 
1531
#if defined ENABLE_CHECKING
1532
      gcc_assert (TREE_CODE (cached_lhs) == SSA_NAME
1533
                  || is_gimple_min_invariant (cached_lhs));
1534
#endif
1535
 
1536
      if (TREE_CODE (cached_lhs) == ADDR_EXPR
1537
          || (POINTER_TYPE_P (TREE_TYPE (*expr_p))
1538
              && is_gimple_min_invariant (cached_lhs)))
1539
        retval = true;
1540
 
1541
      if (modify_expr_p
1542
          && !tree_ssa_useless_type_conversion_1 (TREE_TYPE (*expr_p),
1543
                                                  TREE_TYPE (cached_lhs)))
1544
        cached_lhs = fold_convert (TREE_TYPE (*expr_p), cached_lhs);
1545
 
1546
      propagate_tree_value (expr_p, cached_lhs);
1547
      mark_stmt_modified (stmt);
1548
    }
1549
  return retval;
1550
}
1551
 
1552
/* STMT, a MODIFY_EXPR, may create certain equivalences, in either
1553
   the available expressions table or the const_and_copies table.
1554
   Detect and record those equivalences.  */
1555
 
1556
static void
1557
record_equivalences_from_stmt (tree stmt,
1558
                               int may_optimize_p,
1559
                               stmt_ann_t ann)
1560
{
1561
  tree lhs = TREE_OPERAND (stmt, 0);
1562
  enum tree_code lhs_code = TREE_CODE (lhs);
1563
 
1564
  if (lhs_code == SSA_NAME)
1565
    {
1566
      tree rhs = TREE_OPERAND (stmt, 1);
1567
 
1568
      /* Strip away any useless type conversions.  */
1569
      STRIP_USELESS_TYPE_CONVERSION (rhs);
1570
 
1571
      /* If the RHS of the assignment is a constant or another variable that
1572
         may be propagated, register it in the CONST_AND_COPIES table.  We
1573
         do not need to record unwind data for this, since this is a true
1574
         assignment and not an equivalence inferred from a comparison.  All
1575
         uses of this ssa name are dominated by this assignment, so unwinding
1576
         just costs time and space.  */
1577
      if (may_optimize_p
1578
          && (TREE_CODE (rhs) == SSA_NAME
1579
              || is_gimple_min_invariant (rhs)))
1580
        SSA_NAME_VALUE (lhs) = rhs;
1581
    }
1582
 
1583
  /* A memory store, even an aliased store, creates a useful
1584
     equivalence.  By exchanging the LHS and RHS, creating suitable
1585
     vops and recording the result in the available expression table,
1586
     we may be able to expose more redundant loads.  */
1587
  if (!ann->has_volatile_ops
1588
      && (TREE_CODE (TREE_OPERAND (stmt, 1)) == SSA_NAME
1589
          || is_gimple_min_invariant (TREE_OPERAND (stmt, 1)))
1590
      && !is_gimple_reg (lhs))
1591
    {
1592
      tree rhs = TREE_OPERAND (stmt, 1);
1593
      tree new;
1594
 
1595
      /* FIXME: If the LHS of the assignment is a bitfield and the RHS
1596
         is a constant, we need to adjust the constant to fit into the
1597
         type of the LHS.  If the LHS is a bitfield and the RHS is not
1598
         a constant, then we can not record any equivalences for this
1599
         statement since we would need to represent the widening or
1600
         narrowing of RHS.  This fixes gcc.c-torture/execute/921016-1.c
1601
         and should not be necessary if GCC represented bitfields
1602
         properly.  */
1603
      if (lhs_code == COMPONENT_REF
1604
          && DECL_BIT_FIELD (TREE_OPERAND (lhs, 1)))
1605
        {
1606
          if (TREE_CONSTANT (rhs))
1607
            rhs = widen_bitfield (rhs, TREE_OPERAND (lhs, 1), lhs);
1608
          else
1609
            rhs = NULL;
1610
 
1611
          /* If the value overflowed, then we can not use this equivalence.  */
1612
          if (rhs && ! is_gimple_min_invariant (rhs))
1613
            rhs = NULL;
1614
        }
1615
 
1616
      if (rhs)
1617
        {
1618
          /* Build a new statement with the RHS and LHS exchanged.  */
1619
          new = build2 (MODIFY_EXPR, TREE_TYPE (stmt), rhs, lhs);
1620
 
1621
          create_ssa_artficial_load_stmt (new, stmt);
1622
 
1623
          /* Finally enter the statement into the available expression
1624
             table.  */
1625
          lookup_avail_expr (new, true);
1626
        }
1627
    }
1628
}
1629
 
1630
/* Replace *OP_P in STMT with any known equivalent value for *OP_P from
1631
   CONST_AND_COPIES.  */
1632
 
1633
static bool
1634
cprop_operand (tree stmt, use_operand_p op_p)
1635
{
1636
  bool may_have_exposed_new_symbols = false;
1637
  tree val;
1638
  tree op = USE_FROM_PTR (op_p);
1639
 
1640
  /* If the operand has a known constant value or it is known to be a
1641
     copy of some other variable, use the value or copy stored in
1642
     CONST_AND_COPIES.  */
1643
  val = SSA_NAME_VALUE (op);
1644
  if (val && val != op && TREE_CODE (val) != VALUE_HANDLE)
1645
    {
1646
      tree op_type, val_type;
1647
 
1648
      /* Do not change the base variable in the virtual operand
1649
         tables.  That would make it impossible to reconstruct
1650
         the renamed virtual operand if we later modify this
1651
         statement.  Also only allow the new value to be an SSA_NAME
1652
         for propagation into virtual operands.  */
1653
      if (!is_gimple_reg (op)
1654
          && (TREE_CODE (val) != SSA_NAME
1655
              || is_gimple_reg (val)
1656
              || get_virtual_var (val) != get_virtual_var (op)))
1657
        return false;
1658
 
1659
      /* Do not replace hard register operands in asm statements.  */
1660
      if (TREE_CODE (stmt) == ASM_EXPR
1661
          && !may_propagate_copy_into_asm (op))
1662
        return false;
1663
 
1664
      /* Get the toplevel type of each operand.  */
1665
      op_type = TREE_TYPE (op);
1666
      val_type = TREE_TYPE (val);
1667
 
1668
      /* While both types are pointers, get the type of the object
1669
         pointed to.  */
1670
      while (POINTER_TYPE_P (op_type) && POINTER_TYPE_P (val_type))
1671
        {
1672
          op_type = TREE_TYPE (op_type);
1673
          val_type = TREE_TYPE (val_type);
1674
        }
1675
 
1676
      /* Make sure underlying types match before propagating a constant by
1677
         converting the constant to the proper type.  Note that convert may
1678
         return a non-gimple expression, in which case we ignore this
1679
         propagation opportunity.  */
1680
      if (TREE_CODE (val) != SSA_NAME)
1681
        {
1682
          if (!lang_hooks.types_compatible_p (op_type, val_type))
1683
            {
1684
              val = fold_convert (TREE_TYPE (op), val);
1685
              if (!is_gimple_min_invariant (val))
1686
                return false;
1687
            }
1688
        }
1689
 
1690
      /* Certain operands are not allowed to be copy propagated due
1691
         to their interaction with exception handling and some GCC
1692
         extensions.  */
1693
      else if (!may_propagate_copy (op, val))
1694
        return false;
1695
 
1696
      /* Do not propagate copies if the propagated value is at a deeper loop
1697
         depth than the propagatee.  Otherwise, this may move loop variant
1698
         variables outside of their loops and prevent coalescing
1699
         opportunities.  If the value was loop invariant, it will be hoisted
1700
         by LICM and exposed for copy propagation.  */
1701
      if (loop_depth_of_name (val) > loop_depth_of_name (op))
1702
        return false;
1703
 
1704
      /* Dump details.  */
1705
      if (dump_file && (dump_flags & TDF_DETAILS))
1706
        {
1707
          fprintf (dump_file, "  Replaced '");
1708
          print_generic_expr (dump_file, op, dump_flags);
1709
          fprintf (dump_file, "' with %s '",
1710
                   (TREE_CODE (val) != SSA_NAME ? "constant" : "variable"));
1711
          print_generic_expr (dump_file, val, dump_flags);
1712
          fprintf (dump_file, "'\n");
1713
        }
1714
 
1715
      /* If VAL is an ADDR_EXPR or a constant of pointer type, note
1716
         that we may have exposed a new symbol for SSA renaming.  */
1717
      if (TREE_CODE (val) == ADDR_EXPR
1718
          || (POINTER_TYPE_P (TREE_TYPE (op))
1719
              && is_gimple_min_invariant (val)))
1720
        may_have_exposed_new_symbols = true;
1721
 
1722
      if (TREE_CODE (val) != SSA_NAME)
1723
        opt_stats.num_const_prop++;
1724
      else
1725
        opt_stats.num_copy_prop++;
1726
 
1727
      propagate_value (op_p, val);
1728
 
1729
      /* And note that we modified this statement.  This is now
1730
         safe, even if we changed virtual operands since we will
1731
         rescan the statement and rewrite its operands again.  */
1732
      mark_stmt_modified (stmt);
1733
    }
1734
  return may_have_exposed_new_symbols;
1735
}
1736
 
1737
/* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
1738
   known value for that SSA_NAME (or NULL if no value is known).
1739
 
1740
   Propagate values from CONST_AND_COPIES into the uses, vuses and
1741
   v_may_def_ops of STMT.  */
1742
 
1743
static bool
1744
cprop_into_stmt (tree stmt)
1745
{
1746
  bool may_have_exposed_new_symbols = false;
1747
  use_operand_p op_p;
1748
  ssa_op_iter iter;
1749
 
1750
  FOR_EACH_SSA_USE_OPERAND (op_p, stmt, iter, SSA_OP_ALL_USES)
1751
    {
1752
      if (TREE_CODE (USE_FROM_PTR (op_p)) == SSA_NAME)
1753
        may_have_exposed_new_symbols |= cprop_operand (stmt, op_p);
1754
    }
1755
 
1756
  return may_have_exposed_new_symbols;
1757
}
1758
 
1759
 
1760
/* Optimize the statement pointed to by iterator SI.
1761
 
1762
   We try to perform some simplistic global redundancy elimination and
1763
   constant propagation:
1764
 
1765
   1- To detect global redundancy, we keep track of expressions that have
1766
      been computed in this block and its dominators.  If we find that the
1767
      same expression is computed more than once, we eliminate repeated
1768
      computations by using the target of the first one.
1769
 
1770
   2- Constant values and copy assignments.  This is used to do very
1771
      simplistic constant and copy propagation.  When a constant or copy
1772
      assignment is found, we map the value on the RHS of the assignment to
1773
      the variable in the LHS in the CONST_AND_COPIES table.  */
1774
 
1775
static void
1776
optimize_stmt (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
1777
               basic_block bb, block_stmt_iterator si)
1778
{
1779
  stmt_ann_t ann;
1780
  tree stmt, old_stmt;
1781
  bool may_optimize_p;
1782
  bool may_have_exposed_new_symbols = false;
1783
 
1784
  old_stmt = stmt = bsi_stmt (si);
1785
 
1786
  if (TREE_CODE (stmt) == COND_EXPR)
1787
    canonicalize_comparison (stmt);
1788
 
1789
  update_stmt_if_modified (stmt);
1790
  ann = stmt_ann (stmt);
1791
  opt_stats.num_stmts++;
1792
  may_have_exposed_new_symbols = false;
1793
 
1794
  if (dump_file && (dump_flags & TDF_DETAILS))
1795
    {
1796
      fprintf (dump_file, "Optimizing statement ");
1797
      print_generic_stmt (dump_file, stmt, TDF_SLIM);
1798
    }
1799
 
1800
  /* Const/copy propagate into USES, VUSES and the RHS of V_MAY_DEFs.  */
1801
  may_have_exposed_new_symbols = cprop_into_stmt (stmt);
1802
 
1803
  /* If the statement has been modified with constant replacements,
1804
     fold its RHS before checking for redundant computations.  */
1805
  if (ann->modified)
1806
    {
1807
      tree rhs;
1808
 
1809
      /* Try to fold the statement making sure that STMT is kept
1810
         up to date.  */
1811
      if (fold_stmt (bsi_stmt_ptr (si)))
1812
        {
1813
          stmt = bsi_stmt (si);
1814
          ann = stmt_ann (stmt);
1815
 
1816
          if (dump_file && (dump_flags & TDF_DETAILS))
1817
            {
1818
              fprintf (dump_file, "  Folded to: ");
1819
              print_generic_stmt (dump_file, stmt, TDF_SLIM);
1820
            }
1821
        }
1822
 
1823
      rhs = get_rhs (stmt);
1824
      if (rhs && TREE_CODE (rhs) == ADDR_EXPR)
1825
        recompute_tree_invariant_for_addr_expr (rhs);
1826
 
1827
      /* Constant/copy propagation above may change the set of
1828
         virtual operands associated with this statement.  Folding
1829
         may remove the need for some virtual operands.
1830
 
1831
         Indicate we will need to rescan and rewrite the statement.  */
1832
      may_have_exposed_new_symbols = true;
1833
    }
1834
 
1835
  /* Check for redundant computations.  Do this optimization only
1836
     for assignments that have no volatile ops and conditionals.  */
1837
  may_optimize_p = (!ann->has_volatile_ops
1838
                    && ((TREE_CODE (stmt) == RETURN_EXPR
1839
                         && TREE_OPERAND (stmt, 0)
1840
                         && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR
1841
                         && ! (TREE_SIDE_EFFECTS
1842
                               (TREE_OPERAND (TREE_OPERAND (stmt, 0), 1))))
1843
                        || (TREE_CODE (stmt) == MODIFY_EXPR
1844
                            && ! TREE_SIDE_EFFECTS (TREE_OPERAND (stmt, 1)))
1845
                        || TREE_CODE (stmt) == COND_EXPR
1846
                        || TREE_CODE (stmt) == SWITCH_EXPR));
1847
 
1848
  if (may_optimize_p)
1849
    may_have_exposed_new_symbols |= eliminate_redundant_computations (stmt);
1850
 
1851
  /* Record any additional equivalences created by this statement.  */
1852
  if (TREE_CODE (stmt) == MODIFY_EXPR)
1853
    record_equivalences_from_stmt (stmt,
1854
                                   may_optimize_p,
1855
                                   ann);
1856
 
1857
  /* If STMT is a COND_EXPR and it was modified, then we may know
1858
     where it goes.  If that is the case, then mark the CFG as altered.
1859
 
1860
     This will cause us to later call remove_unreachable_blocks and
1861
     cleanup_tree_cfg when it is safe to do so.  It is not safe to
1862
     clean things up here since removal of edges and such can trigger
1863
     the removal of PHI nodes, which in turn can release SSA_NAMEs to
1864
     the manager.
1865
 
1866
     That's all fine and good, except that once SSA_NAMEs are released
1867
     to the manager, we must not call create_ssa_name until all references
1868
     to released SSA_NAMEs have been eliminated.
1869
 
1870
     All references to the deleted SSA_NAMEs can not be eliminated until
1871
     we remove unreachable blocks.
1872
 
1873
     We can not remove unreachable blocks until after we have completed
1874
     any queued jump threading.
1875
 
1876
     We can not complete any queued jump threads until we have taken
1877
     appropriate variables out of SSA form.  Taking variables out of
1878
     SSA form can call create_ssa_name and thus we lose.
1879
 
1880
     Ultimately I suspect we're going to need to change the interface
1881
     into the SSA_NAME manager.  */
1882
 
1883
  if (ann->modified)
1884
    {
1885
      tree val = NULL;
1886
 
1887
      if (TREE_CODE (stmt) == COND_EXPR)
1888
        val = COND_EXPR_COND (stmt);
1889
      else if (TREE_CODE (stmt) == SWITCH_EXPR)
1890
        val = SWITCH_COND (stmt);
1891
 
1892
      if (val && TREE_CODE (val) == INTEGER_CST && find_taken_edge (bb, val))
1893
        cfg_altered = true;
1894
 
1895
      /* If we simplified a statement in such a way as to be shown that it
1896
         cannot trap, update the eh information and the cfg to match.  */
1897
      if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
1898
        {
1899
          bitmap_set_bit (need_eh_cleanup, bb->index);
1900
          if (dump_file && (dump_flags & TDF_DETAILS))
1901
            fprintf (dump_file, "  Flagged to clear EH edges.\n");
1902
        }
1903
    }
1904
 
1905
  if (may_have_exposed_new_symbols)
1906
    VEC_safe_push (tree, heap, stmts_to_rescan, bsi_stmt (si));
1907
}
1908
 
1909
/* Search for an existing instance of STMT in the AVAIL_EXPRS table.  If
1910
   found, return its LHS. Otherwise insert STMT in the table and return
1911
   NULL_TREE.
1912
 
1913
   Also, when an expression is first inserted in the AVAIL_EXPRS table, it
1914
   is also added to the stack pointed to by BLOCK_AVAIL_EXPRS_P, so that they
1915
   can be removed when we finish processing this block and its children.
1916
 
1917
   NOTE: This function assumes that STMT is a MODIFY_EXPR node that
1918
   contains no CALL_EXPR on its RHS and makes no volatile nor
1919
   aliased references.  */
1920
 
1921
static tree
1922
lookup_avail_expr (tree stmt, bool insert)
1923
{
1924
  void **slot;
1925
  tree lhs;
1926
  tree temp;
1927
  struct expr_hash_elt *element = XNEW (struct expr_hash_elt);
1928
 
1929
  lhs = TREE_CODE (stmt) == MODIFY_EXPR ? TREE_OPERAND (stmt, 0) : NULL;
1930
 
1931
  initialize_hash_element (stmt, lhs, element);
1932
 
1933
  /* Don't bother remembering constant assignments and copy operations.
1934
     Constants and copy operations are handled by the constant/copy propagator
1935
     in optimize_stmt.  */
1936
  if (TREE_CODE (element->rhs) == SSA_NAME
1937
      || is_gimple_min_invariant (element->rhs))
1938
    {
1939
      free (element);
1940
      return NULL_TREE;
1941
    }
1942
 
1943
  /* Finally try to find the expression in the main expression hash table.  */
1944
  slot = htab_find_slot_with_hash (avail_exprs, element, element->hash,
1945
                                   (insert ? INSERT : NO_INSERT));
1946
  if (slot == NULL)
1947
    {
1948
      free (element);
1949
      return NULL_TREE;
1950
    }
1951
 
1952
  if (*slot == NULL)
1953
    {
1954
      *slot = (void *) element;
1955
      VEC_safe_push (tree, heap, avail_exprs_stack,
1956
                     stmt ? stmt : element->rhs);
1957
      return NULL_TREE;
1958
    }
1959
 
1960
  /* Extract the LHS of the assignment so that it can be used as the current
1961
     definition of another variable.  */
1962
  lhs = ((struct expr_hash_elt *)*slot)->lhs;
1963
 
1964
  /* See if the LHS appears in the CONST_AND_COPIES table.  If it does, then
1965
     use the value from the const_and_copies table.  */
1966
  if (TREE_CODE (lhs) == SSA_NAME)
1967
    {
1968
      temp = SSA_NAME_VALUE (lhs);
1969
      if (temp && TREE_CODE (temp) != VALUE_HANDLE)
1970
        lhs = temp;
1971
    }
1972
 
1973
  free (element);
1974
  return lhs;
1975
}
1976
 
1977
/* Hashing and equality functions for AVAIL_EXPRS.  The table stores
1978
   MODIFY_EXPR statements.  We compute a value number for expressions using
1979
   the code of the expression and the SSA numbers of its operands.  */
1980
 
1981
static hashval_t
1982
avail_expr_hash (const void *p)
1983
{
1984
  tree stmt = ((struct expr_hash_elt *)p)->stmt;
1985
  tree rhs = ((struct expr_hash_elt *)p)->rhs;
1986
  tree vuse;
1987
  ssa_op_iter iter;
1988
  hashval_t val = 0;
1989
 
1990
  /* iterative_hash_expr knows how to deal with any expression and
1991
     deals with commutative operators as well, so just use it instead
1992
     of duplicating such complexities here.  */
1993
  val = iterative_hash_expr (rhs, val);
1994
 
1995
  /* If the hash table entry is not associated with a statement, then we
1996
     can just hash the expression and not worry about virtual operands
1997
     and such.  */
1998
  if (!stmt || !stmt_ann (stmt))
1999
    return val;
2000
 
2001
  /* Add the SSA version numbers of every vuse operand.  This is important
2002
     because compound variables like arrays are not renamed in the
2003
     operands.  Rather, the rename is done on the virtual variable
2004
     representing all the elements of the array.  */
2005
  FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VUSE)
2006
    val = iterative_hash_expr (vuse, val);
2007
 
2008
  return val;
2009
}
2010
 
2011
static hashval_t
2012
real_avail_expr_hash (const void *p)
2013
{
2014
  return ((const struct expr_hash_elt *)p)->hash;
2015
}
2016
 
2017
static int
2018
avail_expr_eq (const void *p1, const void *p2)
2019
{
2020
  tree stmt1 = ((struct expr_hash_elt *)p1)->stmt;
2021
  tree rhs1 = ((struct expr_hash_elt *)p1)->rhs;
2022
  tree stmt2 = ((struct expr_hash_elt *)p2)->stmt;
2023
  tree rhs2 = ((struct expr_hash_elt *)p2)->rhs;
2024
 
2025
  /* If they are the same physical expression, return true.  */
2026
  if (rhs1 == rhs2 && stmt1 == stmt2)
2027
    return true;
2028
 
2029
  /* If their codes are not equal, then quit now.  */
2030
  if (TREE_CODE (rhs1) != TREE_CODE (rhs2))
2031
    return false;
2032
 
2033
  /* In case of a collision, both RHS have to be identical and have the
2034
     same VUSE operands.  */
2035
  if ((TREE_TYPE (rhs1) == TREE_TYPE (rhs2)
2036
       || lang_hooks.types_compatible_p (TREE_TYPE (rhs1), TREE_TYPE (rhs2)))
2037
      && operand_equal_p (rhs1, rhs2, OEP_PURE_SAME))
2038
    {
2039
      bool ret = compare_ssa_operands_equal (stmt1, stmt2, SSA_OP_VUSE);
2040
      gcc_assert (!ret || ((struct expr_hash_elt *)p1)->hash
2041
                  == ((struct expr_hash_elt *)p2)->hash);
2042
      return ret;
2043
    }
2044
 
2045
  return false;
2046
}
2047
 
2048
/* PHI-ONLY copy and constant propagation.  This pass is meant to clean
2049
   up degenerate PHIs created by or exposed by jump threading.  */
2050
 
2051
/* Given PHI, return its RHS if the PHI is a degenerate, otherwise return
2052
   NULL.  */
2053
 
2054
static tree
2055
degenerate_phi_result (tree phi)
2056
{
2057
  tree lhs = PHI_RESULT (phi);
2058
  tree val = NULL;
2059
  int i;
2060
 
2061
  /* Ignoring arguments which are the same as LHS, if all the remaining
2062
     arguments are the same, then the PHI is a degenerate and has the
2063
     value of that common argument.  */
2064
  for (i = 0; i < PHI_NUM_ARGS (phi); i++)
2065
    {
2066
      tree arg = PHI_ARG_DEF (phi, i);
2067
 
2068
      if (arg == lhs)
2069
        continue;
2070
      else if (!val)
2071
        val = arg;
2072
      else if (!operand_equal_p (arg, val, 0))
2073
        break;
2074
    }
2075
  return (i == PHI_NUM_ARGS (phi) ? val : NULL);
2076
}
2077
 
2078
/* Given a tree node T, which is either a PHI_NODE or MODIFY_EXPR,
2079
   remove it from the IL.  */
2080
 
2081
static void
2082
remove_stmt_or_phi (tree t)
2083
{
2084
  if (TREE_CODE (t) == PHI_NODE)
2085
    remove_phi_node (t, NULL);
2086
  else
2087
    {
2088
      block_stmt_iterator bsi = bsi_for_stmt (t);
2089
      bsi_remove (&bsi, true);
2090
    }
2091
}
2092
 
2093
/* Given a tree node T, which is either a PHI_NODE or MODIFY_EXPR,
2094
   return the "rhs" of the node, in the case of a non-degenerate
2095
   PHI, NULL is returned.  */
2096
 
2097
static tree
2098
get_rhs_or_phi_arg (tree t)
2099
{
2100
  if (TREE_CODE (t) == PHI_NODE)
2101
    return degenerate_phi_result (t);
2102
  else if (TREE_CODE (t) == MODIFY_EXPR)
2103
    return TREE_OPERAND (t, 1);
2104
  gcc_unreachable ();
2105
}
2106
 
2107
 
2108
/* Given a tree node T, which is either a PHI_NODE or a MODIFY_EXPR,
2109
   return the "lhs" of the node.  */
2110
 
2111
static tree
2112
get_lhs_or_phi_result (tree t)
2113
{
2114
  if (TREE_CODE (t) == PHI_NODE)
2115
    return PHI_RESULT (t);
2116
  else if (TREE_CODE (t) == MODIFY_EXPR)
2117
    return TREE_OPERAND (t, 0);
2118
  gcc_unreachable ();
2119
}
2120
 
2121
/* Propagate RHS into all uses of LHS (when possible).
2122
 
2123
   RHS and LHS are derived from STMT, which is passed in solely so
2124
   that we can remove it if propagation is successful.
2125
 
2126
   When propagating into a PHI node or into a statement which turns
2127
   into a trivial copy or constant initialization, set the
2128
   appropriate bit in INTERESTING_NAMEs so that we will visit those
2129
   nodes as well in an effort to pick up secondary optimization
2130
   opportunities.  */
2131
 
2132
static void
2133
propagate_rhs_into_lhs (tree stmt, tree lhs, tree rhs, bitmap interesting_names)
2134
{
2135
  /* First verify that propagation is valid and isn't going to move a
2136
     loop variant variable outside its loop.  */
2137
  if (! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)
2138
      && (TREE_CODE (rhs) != SSA_NAME
2139
          || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs))
2140
      && may_propagate_copy (lhs, rhs)
2141
      && loop_depth_of_name (lhs) >= loop_depth_of_name (rhs))
2142
    {
2143
      use_operand_p use_p;
2144
      imm_use_iterator iter;
2145
      tree use_stmt;
2146
      bool all = true;
2147
 
2148
      /* Dump details.  */
2149
      if (dump_file && (dump_flags & TDF_DETAILS))
2150
        {
2151
          fprintf (dump_file, "  Replacing '");
2152
          print_generic_expr (dump_file, lhs, dump_flags);
2153
          fprintf (dump_file, "' with %s '",
2154
                   (TREE_CODE (rhs) != SSA_NAME ? "constant" : "variable"));
2155
                   print_generic_expr (dump_file, rhs, dump_flags);
2156
          fprintf (dump_file, "'\n");
2157
        }
2158
 
2159
      /* Walk over every use of LHS and try to replace the use with RHS.
2160
         At this point the only reason why such a propagation would not
2161
         be successful would be if the use occurs in an ASM_EXPR.  */
2162
      FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
2163
        {
2164
 
2165
          /* It's not always safe to propagate into an ASM_EXPR.  */
2166
          if (TREE_CODE (use_stmt) == ASM_EXPR
2167
              && ! may_propagate_copy_into_asm (lhs))
2168
            {
2169
              all = false;
2170
              continue;
2171
            }
2172
 
2173
          /* Dump details.  */
2174
          if (dump_file && (dump_flags & TDF_DETAILS))
2175
            {
2176
              fprintf (dump_file, "    Original statement:");
2177
              print_generic_expr (dump_file, use_stmt, dump_flags);
2178
              fprintf (dump_file, "\n");
2179
            }
2180
 
2181
          /* Propagate the RHS into this use of the LHS.  */
2182
          FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2183
            propagate_value (use_p, rhs);
2184
 
2185
          /* Special cases to avoid useless calls into the folding
2186
             routines, operand scanning, etc.
2187
 
2188
             First, propagation into a PHI may cause the PHI to become
2189
             a degenerate, so mark the PHI as interesting.  No other
2190
             actions are necessary.
2191
 
2192
             Second, if we're propagating a virtual operand and the
2193
             propagation does not change the underlying _DECL node for
2194
             the virtual operand, then no further actions are necessary.  */
2195
          if (TREE_CODE (use_stmt) == PHI_NODE
2196
              || (! is_gimple_reg (lhs)
2197
                  && TREE_CODE (rhs) == SSA_NAME
2198
                  && SSA_NAME_VAR (lhs) == SSA_NAME_VAR (rhs)))
2199
            {
2200
              /* Dump details.  */
2201
              if (dump_file && (dump_flags & TDF_DETAILS))
2202
                {
2203
                  fprintf (dump_file, "    Updated statement:");
2204
                  print_generic_expr (dump_file, use_stmt, dump_flags);
2205
                  fprintf (dump_file, "\n");
2206
                }
2207
 
2208
              /* Propagation into a PHI may expose new degenerate PHIs,
2209
                 so mark the result of the PHI as interesting.  */
2210
              if (TREE_CODE (use_stmt) == PHI_NODE)
2211
                {
2212
                  tree result = get_lhs_or_phi_result (use_stmt);
2213
                  bitmap_set_bit (interesting_names, SSA_NAME_VERSION (result));
2214
                }
2215
              continue;
2216
            }
2217
 
2218
          /* From this point onward we are propagating into a
2219
             real statement.  Folding may (or may not) be possible,
2220
             we may expose new operands, expose dead EH edges,
2221
             etc.  */
2222
          fold_stmt_inplace (use_stmt);
2223
 
2224
          /* Sometimes propagation can expose new operands to the
2225
             renamer.  Note this will call update_stmt at the
2226
             appropriate time.  */
2227
          mark_new_vars_to_rename (use_stmt);
2228
 
2229
          /* Dump details.  */
2230
          if (dump_file && (dump_flags & TDF_DETAILS))
2231
            {
2232
              fprintf (dump_file, "    Updated statement:");
2233
              print_generic_expr (dump_file, use_stmt, dump_flags);
2234
              fprintf (dump_file, "\n");
2235
            }
2236
 
2237
          /* If we replaced a variable index with a constant, then
2238
             we would need to update the invariant flag for ADDR_EXPRs.  */
2239
          if (TREE_CODE (use_stmt) == MODIFY_EXPR
2240
              && TREE_CODE (TREE_OPERAND (use_stmt, 1)) == ADDR_EXPR)
2241
            recompute_tree_invariant_for_addr_expr (TREE_OPERAND (use_stmt, 1));
2242
 
2243
          /* If we cleaned up EH information from the statement,
2244
             mark its containing block as needing EH cleanups.  */
2245
          if (maybe_clean_or_replace_eh_stmt (use_stmt, use_stmt))
2246
            {
2247
              bitmap_set_bit (need_eh_cleanup, bb_for_stmt (use_stmt)->index);
2248
              if (dump_file && (dump_flags & TDF_DETAILS))
2249
                fprintf (dump_file, "  Flagged to clear EH edges.\n");
2250
            }
2251
 
2252
          /* Propagation may expose new trivial copy/constant propagation
2253
             opportunities.  */
2254
          if (TREE_CODE (use_stmt) == MODIFY_EXPR
2255
              && TREE_CODE (TREE_OPERAND (use_stmt, 0)) == SSA_NAME
2256
              && (TREE_CODE (TREE_OPERAND (use_stmt, 1)) == SSA_NAME
2257
                  || is_gimple_min_invariant (TREE_OPERAND (use_stmt, 1))))
2258
            {
2259
              tree result = get_lhs_or_phi_result (use_stmt);
2260
              bitmap_set_bit (interesting_names, SSA_NAME_VERSION (result));
2261
            }
2262
 
2263
          /* Propagation into these nodes may make certain edges in
2264
             the CFG unexecutable.  We want to identify them as PHI nodes
2265
             at the destination of those unexecutable edges may become
2266
             degenerates.  */
2267
          else if (TREE_CODE (use_stmt) == COND_EXPR
2268
                   || TREE_CODE (use_stmt) == SWITCH_EXPR
2269
                   || TREE_CODE (use_stmt) == GOTO_EXPR)
2270
            {
2271
              tree val;
2272
 
2273
              if (TREE_CODE (use_stmt) == COND_EXPR)
2274
                val = COND_EXPR_COND (use_stmt);
2275
              else if (TREE_CODE (use_stmt) == SWITCH_EXPR)
2276
                val = SWITCH_COND (use_stmt);
2277
              else
2278
                val = GOTO_DESTINATION  (use_stmt);
2279
 
2280
              if (is_gimple_min_invariant (val))
2281
                {
2282
                  basic_block bb = bb_for_stmt (use_stmt);
2283
                  edge te = find_taken_edge (bb, val);
2284
                  edge_iterator ei;
2285
                  edge e;
2286
                  block_stmt_iterator bsi;
2287
 
2288
                  /* Remove all outgoing edges except TE.  */
2289
                  for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei));)
2290
                    {
2291
                      if (e != te)
2292
                        {
2293
                          tree phi;
2294
 
2295
                          /* Mark all the PHI nodes at the destination of
2296
                             the unexecutable edge as interesting.  */
2297
                          for (phi = phi_nodes (e->dest);
2298
                               phi;
2299
                               phi = PHI_CHAIN (phi))
2300
                            {
2301
                              tree result = PHI_RESULT (phi);
2302
                              int version = SSA_NAME_VERSION (result);
2303
 
2304
                              bitmap_set_bit (interesting_names, version);
2305
                            }
2306
 
2307
                          te->probability += e->probability;
2308
 
2309
                          te->count += e->count;
2310
                          remove_edge (e);
2311
                          cfg_altered = 1;
2312
                        }
2313
                      else
2314
                        ei_next (&ei);
2315
                    }
2316
 
2317
                  bsi = bsi_last (bb_for_stmt (use_stmt));
2318
                  bsi_remove (&bsi, true);
2319
 
2320
                  /* And fixup the flags on the single remaining edge.  */
2321
                  te->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
2322
                  te->flags &= ~EDGE_ABNORMAL;
2323
                  te->flags |= EDGE_FALLTHRU;
2324
                  if (te->probability > REG_BR_PROB_BASE)
2325
                    te->probability = REG_BR_PROB_BASE;
2326
                }
2327
            }
2328
        }
2329
 
2330
      /* Ensure there is nothing else to do. */
2331
      gcc_assert (!all || has_zero_uses (lhs));
2332
 
2333
      /* If we were able to propagate away all uses of LHS, then
2334
         we can remove STMT.  */
2335
      if (all)
2336
        remove_stmt_or_phi (stmt);
2337
    }
2338
}
2339
 
2340
/* T is either a PHI node (potentially a degenerate PHI node) or
2341
   a statement that is a trivial copy or constant initialization.
2342
 
2343
   Attempt to eliminate T by propagating its RHS into all uses of
2344
   its LHS.  This may in turn set new bits in INTERESTING_NAMES
2345
   for nodes we want to revisit later.
2346
 
2347
   All exit paths should clear INTERESTING_NAMES for the result
2348
   of T.  */
2349
 
2350
static void
2351
eliminate_const_or_copy (tree t, bitmap interesting_names)
2352
{
2353
  tree lhs = get_lhs_or_phi_result (t);
2354
  tree rhs;
2355
  int version = SSA_NAME_VERSION (lhs);
2356
 
2357
  /* If the LHS of this statement or PHI has no uses, then we can
2358
     just eliminate it.  This can occur if, for example, the PHI
2359
     was created by block duplication due to threading and its only
2360
     use was in the conditional at the end of the block which was
2361
     deleted.  */
2362
  if (has_zero_uses (lhs))
2363
    {
2364
      bitmap_clear_bit (interesting_names, version);
2365
      remove_stmt_or_phi (t);
2366
      return;
2367
    }
2368
 
2369
  /* Get the RHS of the assignment or PHI node if the PHI is a
2370
     degenerate.  */
2371
  rhs = get_rhs_or_phi_arg (t);
2372
  if (!rhs)
2373
    {
2374
      bitmap_clear_bit (interesting_names, version);
2375
      return;
2376
    }
2377
 
2378
  propagate_rhs_into_lhs (t, lhs, rhs, interesting_names);
2379
 
2380
  /* Note that T may well have been deleted by now, so do
2381
     not access it, instead use the saved version # to clear
2382
     T's entry in the worklist.  */
2383
  bitmap_clear_bit (interesting_names, version);
2384
}
2385
 
2386
/* The first phase in degenerate PHI elimination.
2387
 
2388
   Eliminate the degenerate PHIs in BB, then recurse on the
2389
   dominator children of BB.  */
2390
 
2391
static void
2392
eliminate_degenerate_phis_1 (basic_block bb, bitmap interesting_names)
2393
{
2394
  tree phi, next;
2395
  basic_block son;
2396
 
2397
  for (phi = phi_nodes (bb); phi; phi = next)
2398
    {
2399
      next = PHI_CHAIN (phi);
2400
      eliminate_const_or_copy (phi, interesting_names);
2401
    }
2402
 
2403
  /* Recurse into the dominator children of BB.  */
2404
  for (son = first_dom_son (CDI_DOMINATORS, bb);
2405
       son;
2406
       son = next_dom_son (CDI_DOMINATORS, son))
2407
    eliminate_degenerate_phis_1 (son, interesting_names);
2408
}
2409
 
2410
 
2411
/* A very simple pass to eliminate degenerate PHI nodes from the
2412
   IL.  This is meant to be fast enough to be able to be run several
2413
   times in the optimization pipeline.
2414
 
2415
   Certain optimizations, particularly those which duplicate blocks
2416
   or remove edges from the CFG can create or expose PHIs which are
2417
   trivial copies or constant initializations.
2418
 
2419
   While we could pick up these optimizations in DOM or with the
2420
   combination of copy-prop and CCP, those solutions are far too
2421
   heavy-weight for our needs.
2422
 
2423
   This implementation has two phases so that we can efficiently
2424
   eliminate the first order degenerate PHIs and second order
2425
   degenerate PHIs.
2426
 
2427
   The first phase performs a dominator walk to identify and eliminate
2428
   the vast majority of the degenerate PHIs.  When a degenerate PHI
2429
   is identified and eliminated any affected statements or PHIs
2430
   are put on a worklist.
2431
 
2432
   The second phase eliminates degenerate PHIs and trivial copies
2433
   or constant initializations using the worklist.  This is how we
2434
   pick up the secondary optimization opportunities with minimal
2435
   cost.  */
2436
 
2437
static unsigned int
2438
eliminate_degenerate_phis (void)
2439
{
2440
  bitmap interesting_names;
2441
  bitmap interesting_names1;
2442
 
2443
  /* Bitmap of blocks which need EH information updated.  We can not
2444
     update it on-the-fly as doing so invalidates the dominator tree.  */
2445
  need_eh_cleanup = BITMAP_ALLOC (NULL);
2446
 
2447
  /* INTERESTING_NAMES is effectively our worklist, indexed by
2448
     SSA_NAME_VERSION.
2449
 
2450
     A set bit indicates that the statement or PHI node which
2451
     defines the SSA_NAME should be (re)examined to determine if
2452
     it has become a degenerate PHI or trivial const/copy propagation
2453
     opportunity.
2454
 
2455
     Experiments have show we generally get better compilation
2456
     time behavior with bitmaps rather than sbitmaps.  */
2457
  interesting_names = BITMAP_ALLOC (NULL);
2458
  interesting_names1 = BITMAP_ALLOC (NULL);
2459
 
2460
  /* First phase.  Eliminate degenerate PHIs via a dominator
2461
     walk of the CFG.
2462
 
2463
     Experiments have indicated that we generally get better
2464
     compile-time behavior by visiting blocks in the first
2465
     phase in dominator order.  Presumably this is because walking
2466
     in dominator order leaves fewer PHIs for later examination
2467
     by the worklist phase.  */
2468
  calculate_dominance_info (CDI_DOMINATORS);
2469
  eliminate_degenerate_phis_1 (ENTRY_BLOCK_PTR, interesting_names);
2470
 
2471
  /* Second phase.  Eliminate second order degenerate PHIs as well
2472
     as trivial copies or constant initializations identified by
2473
     the first phase or this phase.  Basically we keep iterating
2474
     until our set of INTERESTING_NAMEs is empty.   */
2475
  while (!bitmap_empty_p (interesting_names))
2476
    {
2477
      unsigned int i;
2478
      bitmap_iterator bi;
2479
 
2480
      /* EXECUTE_IF_SET_IN_BITMAP does not like its bitmap
2481
         changed during the loop.  Copy it to another bitmap and
2482
         use that.  */
2483
      bitmap_copy (interesting_names1, interesting_names);
2484
 
2485
      EXECUTE_IF_SET_IN_BITMAP (interesting_names1, 0, i, bi)
2486
        {
2487
          tree name = ssa_name (i);
2488
 
2489
          /* Ignore SSA_NAMEs that have been released because
2490
             their defining statement was deleted (unreachable).  */
2491
          if (name)
2492
            eliminate_const_or_copy (SSA_NAME_DEF_STMT (ssa_name (i)),
2493
                                     interesting_names);
2494
        }
2495
    }
2496
 
2497
  /* Propagation of const and copies may make some EH edges dead.  Purge
2498
     such edges from the CFG as needed.  */
2499
  if (!bitmap_empty_p (need_eh_cleanup))
2500
    {
2501
      cfg_altered |= tree_purge_all_dead_eh_edges (need_eh_cleanup);
2502
      BITMAP_FREE (need_eh_cleanup);
2503
    }
2504
 
2505
  BITMAP_FREE (interesting_names);
2506
  BITMAP_FREE (interesting_names1);
2507
  if (cfg_altered)
2508
    free_dominance_info (CDI_DOMINATORS);
2509
  return 0;
2510
}
2511
 
2512
struct tree_opt_pass pass_phi_only_cprop =
2513
{
2514
  "phicprop",                           /* name */
2515
  gate_dominator,                       /* gate */
2516
  eliminate_degenerate_phis,            /* execute */
2517
  NULL,                                 /* sub */
2518
  NULL,                                 /* next */
2519
  0,                                    /* static_pass_number */
2520
  TV_TREE_PHI_CPROP,                    /* tv_id */
2521
  PROP_cfg | PROP_ssa | PROP_alias,     /* properties_required */
2522
  0,                                    /* properties_provided */
2523
  PROP_smt_usage,                       /* properties_destroyed */
2524
  0,                                    /* todo_flags_start */
2525
  TODO_cleanup_cfg | TODO_dump_func
2526
    | TODO_ggc_collect | TODO_verify_ssa
2527
    | TODO_verify_stmts | TODO_update_smt_usage
2528
    | TODO_update_ssa, /* todo_flags_finish */
2529
 
2530
};

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.