OpenCores
URL https://opencores.org/ocsvn/openrisc_me/openrisc_me/trunk

Subversion Repositories openrisc_me

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.5.1/] [gcc/] [tree-ssa-pre.c] - Blame information for rev 308

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 280 jeremybenn
/* SSA-PRE for trees.
2
   Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3
   Free Software Foundation, Inc.
4
   Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
5
   <stevenb@suse.de>
6
 
7
This file is part of GCC.
8
 
9
GCC is free software; you can redistribute it and/or modify
10
it under the terms of the GNU General Public License as published by
11
the Free Software Foundation; either version 3, or (at your option)
12
any later version.
13
 
14
GCC is distributed in the hope that it will be useful,
15
but WITHOUT ANY WARRANTY; without even the implied warranty of
16
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
17
GNU General Public License for more details.
18
 
19
You should have received a copy of the GNU General Public License
20
along with GCC; see the file COPYING3.  If not see
21
<http://www.gnu.org/licenses/>.  */
22
 
23
#include "config.h"
24
#include "system.h"
25
#include "coretypes.h"
26
#include "tm.h"
27
#include "ggc.h"
28
#include "tree.h"
29
#include "basic-block.h"
30
#include "diagnostic.h"
31
#include "tree-inline.h"
32
#include "tree-flow.h"
33
#include "gimple.h"
34
#include "tree-dump.h"
35
#include "timevar.h"
36
#include "fibheap.h"
37
#include "hashtab.h"
38
#include "tree-iterator.h"
39
#include "real.h"
40
#include "alloc-pool.h"
41
#include "obstack.h"
42
#include "tree-pass.h"
43
#include "flags.h"
44
#include "bitmap.h"
45
#include "langhooks.h"
46
#include "cfgloop.h"
47
#include "tree-ssa-sccvn.h"
48
#include "tree-scalar-evolution.h"
49
#include "params.h"
50
#include "dbgcnt.h"
51
 
52
/* TODO:
53
 
54
   1. Avail sets can be shared by making an avail_find_leader that
55
      walks up the dominator tree and looks in those avail sets.
56
      This might affect code optimality, it's unclear right now.
57
   2. Strength reduction can be performed by anticipating expressions
58
      we can repair later on.
59
   3. We can do back-substitution or smarter value numbering to catch
60
      commutative expressions split up over multiple statements.
61
*/
62
 
63
/* For ease of terminology, "expression node" in the below refers to
64
   every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
65
   represent the actual statement containing the expressions we care about,
66
   and we cache the value number by putting it in the expression.  */
67
 
68
/* Basic algorithm
69
 
70
   First we walk the statements to generate the AVAIL sets, the
71
   EXP_GEN sets, and the tmp_gen sets.  EXP_GEN sets represent the
72
   generation of values/expressions by a given block.  We use them
73
   when computing the ANTIC sets.  The AVAIL sets consist of
74
   SSA_NAME's that represent values, so we know what values are
75
   available in what blocks.  AVAIL is a forward dataflow problem.  In
76
   SSA, values are never killed, so we don't need a kill set, or a
77
   fixpoint iteration, in order to calculate the AVAIL sets.  In
78
   traditional parlance, AVAIL sets tell us the downsafety of the
79
   expressions/values.
80
 
81
   Next, we generate the ANTIC sets.  These sets represent the
82
   anticipatable expressions.  ANTIC is a backwards dataflow
83
   problem.  An expression is anticipatable in a given block if it could
84
   be generated in that block.  This means that if we had to perform
85
   an insertion in that block, of the value of that expression, we
86
   could.  Calculating the ANTIC sets requires phi translation of
87
   expressions, because the flow goes backwards through phis.  We must
88
   iterate to a fixpoint of the ANTIC sets, because we have a kill
89
   set.  Even in SSA form, values are not live over the entire
90
   function, only from their definition point onwards.  So we have to
91
   remove values from the ANTIC set once we go past the definition
92
   point of the leaders that make them up.
93
   compute_antic/compute_antic_aux performs this computation.
94
 
95
   Third, we perform insertions to make partially redundant
96
   expressions fully redundant.
97
 
98
   An expression is partially redundant (excluding partial
99
   anticipation) if:
100
 
101
   1. It is AVAIL in some, but not all, of the predecessors of a
102
      given block.
103
   2. It is ANTIC in all the predecessors.
104
 
105
   In order to make it fully redundant, we insert the expression into
106
   the predecessors where it is not available, but is ANTIC.
107
 
108
   For the partial anticipation case, we only perform insertion if it
109
   is partially anticipated in some block, and fully available in all
110
   of the predecessors.
111
 
112
   insert/insert_aux/do_regular_insertion/do_partial_partial_insertion
113
   performs these steps.
114
 
115
   Fourth, we eliminate fully redundant expressions.
116
   This is a simple statement walk that replaces redundant
117
   calculations with the now available values.  */
118
 
119
/* Representations of value numbers:
120
 
121
   Value numbers are represented by a representative SSA_NAME.  We
122
   will create fake SSA_NAME's in situations where we need a
123
   representative but do not have one (because it is a complex
124
   expression).  In order to facilitate storing the value numbers in
125
   bitmaps, and keep the number of wasted SSA_NAME's down, we also
126
   associate a value_id with each value number, and create full blown
127
   ssa_name's only where we actually need them (IE in operands of
128
   existing expressions).
129
 
130
   Theoretically you could replace all the value_id's with
131
   SSA_NAME_VERSION, but this would allocate a large number of
132
   SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
133
   It would also require an additional indirection at each point we
134
   use the value id.  */
135
 
136
/* Representation of expressions on value numbers:
137
 
138
   Expressions consisting of value numbers are represented the same
139
   way as our VN internally represents them, with an additional
140
   "pre_expr" wrapping around them in order to facilitate storing all
141
   of the expressions in the same sets.  */
142
 
143
/* Representation of sets:
144
 
145
   The dataflow sets do not need to be sorted in any particular order
146
   for the majority of their lifetime, are simply represented as two
147
   bitmaps, one that keeps track of values present in the set, and one
148
   that keeps track of expressions present in the set.
149
 
150
   When we need them in topological order, we produce it on demand by
151
   transforming the bitmap into an array and sorting it into topo
152
   order.  */
153
 
154
/* Type of expression, used to know which member of the PRE_EXPR union
155
   is valid.  */
156
 
157
enum pre_expr_kind
158
{
159
    NAME,
160
    NARY,
161
    REFERENCE,
162
    CONSTANT
163
};
164
 
165
typedef union pre_expr_union_d
166
{
167
  tree name;
168
  tree constant;
169
  vn_nary_op_t nary;
170
  vn_reference_t reference;
171
} pre_expr_union;
172
 
173
typedef struct pre_expr_d
174
{
175
  enum pre_expr_kind kind;
176
  unsigned int id;
177
  pre_expr_union u;
178
} *pre_expr;
179
 
180
#define PRE_EXPR_NAME(e) (e)->u.name
181
#define PRE_EXPR_NARY(e) (e)->u.nary
182
#define PRE_EXPR_REFERENCE(e) (e)->u.reference
183
#define PRE_EXPR_CONSTANT(e) (e)->u.constant
184
 
185
static int
186
pre_expr_eq (const void *p1, const void *p2)
187
{
188
  const struct pre_expr_d *e1 = (const struct pre_expr_d *) p1;
189
  const struct pre_expr_d *e2 = (const struct pre_expr_d *) p2;
190
 
191
  if (e1->kind != e2->kind)
192
    return false;
193
 
194
  switch (e1->kind)
195
    {
196
    case CONSTANT:
197
      return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1),
198
                                       PRE_EXPR_CONSTANT (e2));
199
    case NAME:
200
      return PRE_EXPR_NAME (e1) == PRE_EXPR_NAME (e2);
201
    case NARY:
202
      return vn_nary_op_eq (PRE_EXPR_NARY (e1), PRE_EXPR_NARY (e2));
203
    case REFERENCE:
204
      return vn_reference_eq (PRE_EXPR_REFERENCE (e1),
205
                              PRE_EXPR_REFERENCE (e2));
206
    default:
207
      gcc_unreachable ();
208
    }
209
}
210
 
211
static hashval_t
212
pre_expr_hash (const void *p1)
213
{
214
  const struct pre_expr_d *e = (const struct pre_expr_d *) p1;
215
  switch (e->kind)
216
    {
217
    case CONSTANT:
218
      return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e));
219
    case NAME:
220
      return SSA_NAME_VERSION (PRE_EXPR_NAME (e));
221
    case NARY:
222
      return PRE_EXPR_NARY (e)->hashcode;
223
    case REFERENCE:
224
      return PRE_EXPR_REFERENCE (e)->hashcode;
225
    default:
226
      gcc_unreachable ();
227
    }
228
}
229
 
230
 
231
/* Next global expression id number.  */
232
static unsigned int next_expression_id;
233
 
234
/* Mapping from expression to id number we can use in bitmap sets.  */
235
DEF_VEC_P (pre_expr);
236
DEF_VEC_ALLOC_P (pre_expr, heap);
237
static VEC(pre_expr, heap) *expressions;
238
static htab_t expression_to_id;
239
static VEC(unsigned, heap) *name_to_id;
240
 
241
/* Allocate an expression id for EXPR.  */
242
 
243
static inline unsigned int
244
alloc_expression_id (pre_expr expr)
245
{
246
  void **slot;
247
  /* Make sure we won't overflow. */
248
  gcc_assert (next_expression_id + 1 > next_expression_id);
249
  expr->id = next_expression_id++;
250
  VEC_safe_push (pre_expr, heap, expressions, expr);
251
  if (expr->kind == NAME)
252
    {
253
      unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
254
      /* VEC_safe_grow_cleared allocates no headroom.  Avoid frequent
255
         re-allocations by using VEC_reserve upfront.  There is no
256
         VEC_quick_grow_cleared unfortunately.  */
257
      VEC_reserve (unsigned, heap, name_to_id, num_ssa_names);
258
      VEC_safe_grow_cleared (unsigned, heap, name_to_id, num_ssa_names);
259
      gcc_assert (VEC_index (unsigned, name_to_id, version) == 0);
260
      VEC_replace (unsigned, name_to_id, version, expr->id);
261
    }
262
  else
263
    {
264
      slot = htab_find_slot (expression_to_id, expr, INSERT);
265
      gcc_assert (!*slot);
266
      *slot = expr;
267
    }
268
  return next_expression_id - 1;
269
}
270
 
271
/* Return the expression id for tree EXPR.  */
272
 
273
static inline unsigned int
274
get_expression_id (const pre_expr expr)
275
{
276
  return expr->id;
277
}
278
 
279
static inline unsigned int
280
lookup_expression_id (const pre_expr expr)
281
{
282
  void **slot;
283
 
284
  if (expr->kind == NAME)
285
    {
286
      unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
287
      if (VEC_length (unsigned, name_to_id) <= version)
288
        return 0;
289
      return VEC_index (unsigned, name_to_id, version);
290
    }
291
  else
292
    {
293
      slot = htab_find_slot (expression_to_id, expr, NO_INSERT);
294
      if (!slot)
295
        return 0;
296
      return ((pre_expr)*slot)->id;
297
    }
298
}
299
 
300
/* Return the existing expression id for EXPR, or create one if one
301
   does not exist yet.  */
302
 
303
static inline unsigned int
304
get_or_alloc_expression_id (pre_expr expr)
305
{
306
  unsigned int id = lookup_expression_id (expr);
307
  if (id == 0)
308
    return alloc_expression_id (expr);
309
  return expr->id = id;
310
}
311
 
312
/* Return the expression that has expression id ID */
313
 
314
static inline pre_expr
315
expression_for_id (unsigned int id)
316
{
317
  return VEC_index (pre_expr, expressions, id);
318
}
319
 
320
/* Free the expression id field in all of our expressions,
321
   and then destroy the expressions array.  */
322
 
323
static void
324
clear_expression_ids (void)
325
{
326
  VEC_free (pre_expr, heap, expressions);
327
}
328
 
329
static alloc_pool pre_expr_pool;
330
 
331
/* Given an SSA_NAME NAME, get or create a pre_expr to represent it.  */
332
 
333
static pre_expr
334
get_or_alloc_expr_for_name (tree name)
335
{
336
  struct pre_expr_d expr;
337
  pre_expr result;
338
  unsigned int result_id;
339
 
340
  expr.kind = NAME;
341
  expr.id = 0;
342
  PRE_EXPR_NAME (&expr) = name;
343
  result_id = lookup_expression_id (&expr);
344
  if (result_id != 0)
345
    return expression_for_id (result_id);
346
 
347
  result = (pre_expr) pool_alloc (pre_expr_pool);
348
  result->kind = NAME;
349
  PRE_EXPR_NAME (result) = name;
350
  alloc_expression_id (result);
351
  return result;
352
}
353
 
354
static bool in_fre = false;
355
 
356
/* An unordered bitmap set.  One bitmap tracks values, the other,
357
   expressions.  */
358
typedef struct bitmap_set
359
{
360
  bitmap expressions;
361
  bitmap values;
362
} *bitmap_set_t;
363
 
364
#define FOR_EACH_EXPR_ID_IN_SET(set, id, bi)            \
365
  EXECUTE_IF_SET_IN_BITMAP((set)->expressions, 0, (id), (bi))
366
 
367
#define FOR_EACH_VALUE_ID_IN_SET(set, id, bi)           \
368
  EXECUTE_IF_SET_IN_BITMAP((set)->values, 0, (id), (bi))
369
 
370
/* Mapping from value id to expressions with that value_id.  */
371
DEF_VEC_P (bitmap_set_t);
372
DEF_VEC_ALLOC_P (bitmap_set_t, heap);
373
static VEC(bitmap_set_t, heap) *value_expressions;
374
 
375
/* Sets that we need to keep track of.  */
376
typedef struct bb_bitmap_sets
377
{
378
  /* The EXP_GEN set, which represents expressions/values generated in
379
     a basic block.  */
380
  bitmap_set_t exp_gen;
381
 
382
  /* The PHI_GEN set, which represents PHI results generated in a
383
     basic block.  */
384
  bitmap_set_t phi_gen;
385
 
386
  /* The TMP_GEN set, which represents results/temporaries generated
387
     in a basic block. IE the LHS of an expression.  */
388
  bitmap_set_t tmp_gen;
389
 
390
  /* The AVAIL_OUT set, which represents which values are available in
391
     a given basic block.  */
392
  bitmap_set_t avail_out;
393
 
394
  /* The ANTIC_IN set, which represents which values are anticipatable
395
     in a given basic block.  */
396
  bitmap_set_t antic_in;
397
 
398
  /* The PA_IN set, which represents which values are
399
     partially anticipatable in a given basic block.  */
400
  bitmap_set_t pa_in;
401
 
402
  /* The NEW_SETS set, which is used during insertion to augment the
403
     AVAIL_OUT set of blocks with the new insertions performed during
404
     the current iteration.  */
405
  bitmap_set_t new_sets;
406
 
407
  /* A cache for value_dies_in_block_x.  */
408
  bitmap expr_dies;
409
 
410
  /* True if we have visited this block during ANTIC calculation.  */
411
  unsigned int visited : 1;
412
 
413
  /* True we have deferred processing this block during ANTIC
414
     calculation until its successor is processed.  */
415
  unsigned int deferred : 1;
416
 
417
  /* True when the block contains a call that might not return.  */
418
  unsigned int contains_may_not_return_call : 1;
419
} *bb_value_sets_t;
420
 
421
#define EXP_GEN(BB)     ((bb_value_sets_t) ((BB)->aux))->exp_gen
422
#define PHI_GEN(BB)     ((bb_value_sets_t) ((BB)->aux))->phi_gen
423
#define TMP_GEN(BB)     ((bb_value_sets_t) ((BB)->aux))->tmp_gen
424
#define AVAIL_OUT(BB)   ((bb_value_sets_t) ((BB)->aux))->avail_out
425
#define ANTIC_IN(BB)    ((bb_value_sets_t) ((BB)->aux))->antic_in
426
#define PA_IN(BB)       ((bb_value_sets_t) ((BB)->aux))->pa_in
427
#define NEW_SETS(BB)    ((bb_value_sets_t) ((BB)->aux))->new_sets
428
#define EXPR_DIES(BB)   ((bb_value_sets_t) ((BB)->aux))->expr_dies
429
#define BB_VISITED(BB)  ((bb_value_sets_t) ((BB)->aux))->visited
430
#define BB_DEFERRED(BB) ((bb_value_sets_t) ((BB)->aux))->deferred
431
#define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
432
 
433
 
434
/* Basic block list in postorder.  */
435
static int *postorder;
436
 
437
/* This structure is used to keep track of statistics on what
438
   optimization PRE was able to perform.  */
439
static struct
440
{
441
  /* The number of RHS computations eliminated by PRE.  */
442
  int eliminations;
443
 
444
  /* The number of new expressions/temporaries generated by PRE.  */
445
  int insertions;
446
 
447
  /* The number of inserts found due to partial anticipation  */
448
  int pa_insert;
449
 
450
  /* The number of new PHI nodes added by PRE.  */
451
  int phis;
452
 
453
  /* The number of values found constant.  */
454
  int constified;
455
 
456
} pre_stats;
457
 
458
static bool do_partial_partial;
459
static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int, gimple);
460
static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr);
461
static void bitmap_value_replace_in_set (bitmap_set_t, pre_expr);
462
static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
463
static bool bitmap_set_contains_value (bitmap_set_t, unsigned int);
464
static void bitmap_insert_into_set (bitmap_set_t, pre_expr);
465
static void bitmap_insert_into_set_1 (bitmap_set_t, pre_expr,
466
                                      unsigned int, bool);
467
static bitmap_set_t bitmap_set_new (void);
468
static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *,
469
                                         gimple, tree);
470
static tree find_or_generate_expression (basic_block, pre_expr, gimple_seq *,
471
                                         gimple);
472
static unsigned int get_expr_value_id (pre_expr);
473
 
474
/* We can add and remove elements and entries to and from sets
475
   and hash tables, so we use alloc pools for them.  */
476
 
477
static alloc_pool bitmap_set_pool;
478
static bitmap_obstack grand_bitmap_obstack;
479
 
480
/* To avoid adding 300 temporary variables when we only need one, we
481
   only create one temporary variable, on demand, and build ssa names
482
   off that.  We do have to change the variable if the types don't
483
   match the current variable's type.  */
484
static tree pretemp;
485
static tree storetemp;
486
static tree prephitemp;
487
 
488
/* Set of blocks with statements that have had its EH information
489
   cleaned up.  */
490
static bitmap need_eh_cleanup;
491
 
492
/* The phi_translate_table caches phi translations for a given
493
   expression and predecessor.  */
494
 
495
static htab_t phi_translate_table;
496
 
497
/* A three tuple {e, pred, v} used to cache phi translations in the
498
   phi_translate_table.  */
499
 
500
typedef struct expr_pred_trans_d
501
{
502
  /* The expression.  */
503
  pre_expr e;
504
 
505
  /* The predecessor block along which we translated the expression.  */
506
  basic_block pred;
507
 
508
  /* The value that resulted from the translation.  */
509
  pre_expr v;
510
 
511
  /* The hashcode for the expression, pred pair. This is cached for
512
     speed reasons.  */
513
  hashval_t hashcode;
514
} *expr_pred_trans_t;
515
typedef const struct expr_pred_trans_d *const_expr_pred_trans_t;
516
 
517
/* Return the hash value for a phi translation table entry.  */
518
 
519
static hashval_t
520
expr_pred_trans_hash (const void *p)
521
{
522
  const_expr_pred_trans_t const ve = (const_expr_pred_trans_t) p;
523
  return ve->hashcode;
524
}
525
 
526
/* Return true if two phi translation table entries are the same.
527
   P1 and P2 should point to the expr_pred_trans_t's to be compared.*/
528
 
529
static int
530
expr_pred_trans_eq (const void *p1, const void *p2)
531
{
532
  const_expr_pred_trans_t const ve1 = (const_expr_pred_trans_t) p1;
533
  const_expr_pred_trans_t const ve2 = (const_expr_pred_trans_t) p2;
534
  basic_block b1 = ve1->pred;
535
  basic_block b2 = ve2->pred;
536
 
537
  /* If they are not translations for the same basic block, they can't
538
     be equal.  */
539
  if (b1 != b2)
540
    return false;
541
  return pre_expr_eq (ve1->e, ve2->e);
542
}
543
 
544
/* Search in the phi translation table for the translation of
545
   expression E in basic block PRED.
546
   Return the translated value, if found, NULL otherwise.  */
547
 
548
static inline pre_expr
549
phi_trans_lookup (pre_expr e, basic_block pred)
550
{
551
  void **slot;
552
  struct expr_pred_trans_d ept;
553
 
554
  ept.e = e;
555
  ept.pred = pred;
556
  ept.hashcode = iterative_hash_hashval_t (pre_expr_hash (e), pred->index);
557
  slot = htab_find_slot_with_hash (phi_translate_table, &ept, ept.hashcode,
558
                                   NO_INSERT);
559
  if (!slot)
560
    return NULL;
561
  else
562
    return ((expr_pred_trans_t) *slot)->v;
563
}
564
 
565
 
566
/* Add the tuple mapping from {expression E, basic block PRED} to
567
   value V, to the phi translation table.  */
568
 
569
static inline void
570
phi_trans_add (pre_expr e, pre_expr v, basic_block pred)
571
{
572
  void **slot;
573
  expr_pred_trans_t new_pair = XNEW (struct expr_pred_trans_d);
574
  new_pair->e = e;
575
  new_pair->pred = pred;
576
  new_pair->v = v;
577
  new_pair->hashcode = iterative_hash_hashval_t (pre_expr_hash (e),
578
                                                 pred->index);
579
 
580
  slot = htab_find_slot_with_hash (phi_translate_table, new_pair,
581
                                   new_pair->hashcode, INSERT);
582
  if (*slot)
583
    free (*slot);
584
  *slot = (void *) new_pair;
585
}
586
 
587
 
588
/* Add expression E to the expression set of value id V.  */
589
 
590
void
591
add_to_value (unsigned int v, pre_expr e)
592
{
593
  bitmap_set_t set;
594
 
595
  gcc_assert (get_expr_value_id (e) == v);
596
 
597
  if (v >= VEC_length (bitmap_set_t, value_expressions))
598
    {
599
      VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions,
600
                             v + 1);
601
    }
602
 
603
  set = VEC_index (bitmap_set_t, value_expressions, v);
604
  if (!set)
605
    {
606
      set = bitmap_set_new ();
607
      VEC_replace (bitmap_set_t, value_expressions, v, set);
608
    }
609
 
610
  bitmap_insert_into_set_1 (set, e, v, true);
611
}
612
 
613
/* Create a new bitmap set and return it.  */
614
 
615
static bitmap_set_t
616
bitmap_set_new (void)
617
{
618
  bitmap_set_t ret = (bitmap_set_t) pool_alloc (bitmap_set_pool);
619
  ret->expressions = BITMAP_ALLOC (&grand_bitmap_obstack);
620
  ret->values = BITMAP_ALLOC (&grand_bitmap_obstack);
621
  return ret;
622
}
623
 
624
/* Return the value id for a PRE expression EXPR.  */
625
 
626
static unsigned int
627
get_expr_value_id (pre_expr expr)
628
{
629
  switch (expr->kind)
630
    {
631
    case CONSTANT:
632
      {
633
        unsigned int id;
634
        id = get_constant_value_id (PRE_EXPR_CONSTANT (expr));
635
        if (id == 0)
636
          {
637
            id = get_or_alloc_constant_value_id (PRE_EXPR_CONSTANT (expr));
638
            add_to_value (id, expr);
639
          }
640
        return id;
641
      }
642
    case NAME:
643
      return VN_INFO (PRE_EXPR_NAME (expr))->value_id;
644
    case NARY:
645
      return PRE_EXPR_NARY (expr)->value_id;
646
    case REFERENCE:
647
      return PRE_EXPR_REFERENCE (expr)->value_id;
648
    default:
649
      gcc_unreachable ();
650
    }
651
}
652
 
653
/* Remove an expression EXPR from a bitmapped set.  */
654
 
655
static void
656
bitmap_remove_from_set (bitmap_set_t set, pre_expr expr)
657
{
658
  unsigned int val  = get_expr_value_id (expr);
659
  if (!value_id_constant_p (val))
660
    {
661
      bitmap_clear_bit (set->values, val);
662
      bitmap_clear_bit (set->expressions, get_expression_id (expr));
663
    }
664
}
665
 
666
static void
667
bitmap_insert_into_set_1 (bitmap_set_t set, pre_expr expr,
668
                          unsigned int val, bool allow_constants)
669
{
670
  if (allow_constants || !value_id_constant_p (val))
671
    {
672
      /* We specifically expect this and only this function to be able to
673
         insert constants into a set.  */
674
      bitmap_set_bit (set->values, val);
675
      bitmap_set_bit (set->expressions, get_or_alloc_expression_id (expr));
676
    }
677
}
678
 
679
/* Insert an expression EXPR into a bitmapped set.  */
680
 
681
static void
682
bitmap_insert_into_set (bitmap_set_t set, pre_expr expr)
683
{
684
  bitmap_insert_into_set_1 (set, expr, get_expr_value_id (expr), false);
685
}
686
 
687
/* Copy a bitmapped set ORIG, into bitmapped set DEST.  */
688
 
689
static void
690
bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
691
{
692
  bitmap_copy (dest->expressions, orig->expressions);
693
  bitmap_copy (dest->values, orig->values);
694
}
695
 
696
 
697
/* Free memory used up by SET.  */
698
static void
699
bitmap_set_free (bitmap_set_t set)
700
{
701
  BITMAP_FREE (set->expressions);
702
  BITMAP_FREE (set->values);
703
}
704
 
705
 
706
/* Generate an topological-ordered array of bitmap set SET.  */
707
 
708
static VEC(pre_expr, heap) *
709
sorted_array_from_bitmap_set (bitmap_set_t set)
710
{
711
  unsigned int i, j;
712
  bitmap_iterator bi, bj;
713
  VEC(pre_expr, heap) *result;
714
 
715
  /* Pre-allocate roughly enough space for the array.  */
716
  result = VEC_alloc (pre_expr, heap, bitmap_count_bits (set->values));
717
 
718
  FOR_EACH_VALUE_ID_IN_SET (set, i, bi)
719
    {
720
      /* The number of expressions having a given value is usually
721
         relatively small.  Thus, rather than making a vector of all
722
         the expressions and sorting it by value-id, we walk the values
723
         and check in the reverse mapping that tells us what expressions
724
         have a given value, to filter those in our set.  As a result,
725
         the expressions are inserted in value-id order, which means
726
         topological order.
727
 
728
         If this is somehow a significant lose for some cases, we can
729
         choose which set to walk based on the set size.  */
730
      bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, i);
731
      FOR_EACH_EXPR_ID_IN_SET (exprset, j, bj)
732
        {
733
          if (bitmap_bit_p (set->expressions, j))
734
            VEC_safe_push (pre_expr, heap, result, expression_for_id (j));
735
        }
736
    }
737
 
738
  return result;
739
}
740
 
741
/* Perform bitmapped set operation DEST &= ORIG.  */
742
 
743
static void
744
bitmap_set_and (bitmap_set_t dest, bitmap_set_t orig)
745
{
746
  bitmap_iterator bi;
747
  unsigned int i;
748
 
749
  if (dest != orig)
750
    {
751
      bitmap temp = BITMAP_ALLOC (&grand_bitmap_obstack);
752
 
753
      bitmap_and_into (dest->values, orig->values);
754
      bitmap_copy (temp, dest->expressions);
755
      EXECUTE_IF_SET_IN_BITMAP (temp, 0, i, bi)
756
        {
757
          pre_expr expr = expression_for_id (i);
758
          unsigned int value_id = get_expr_value_id (expr);
759
          if (!bitmap_bit_p (dest->values, value_id))
760
            bitmap_clear_bit (dest->expressions, i);
761
        }
762
      BITMAP_FREE (temp);
763
    }
764
}
765
 
766
/* Subtract all values and expressions contained in ORIG from DEST.  */
767
 
768
static bitmap_set_t
769
bitmap_set_subtract (bitmap_set_t dest, bitmap_set_t orig)
770
{
771
  bitmap_set_t result = bitmap_set_new ();
772
  bitmap_iterator bi;
773
  unsigned int i;
774
 
775
  bitmap_and_compl (result->expressions, dest->expressions,
776
                    orig->expressions);
777
 
778
  FOR_EACH_EXPR_ID_IN_SET (result, i, bi)
779
    {
780
      pre_expr expr = expression_for_id (i);
781
      unsigned int value_id = get_expr_value_id (expr);
782
      bitmap_set_bit (result->values, value_id);
783
    }
784
 
785
  return result;
786
}
787
 
788
/* Subtract all the values in bitmap set B from bitmap set A.  */
789
 
790
static void
791
bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b)
792
{
793
  unsigned int i;
794
  bitmap_iterator bi;
795
  bitmap temp = BITMAP_ALLOC (&grand_bitmap_obstack);
796
 
797
  bitmap_copy (temp, a->expressions);
798
  EXECUTE_IF_SET_IN_BITMAP (temp, 0, i, bi)
799
    {
800
      pre_expr expr = expression_for_id (i);
801
      if (bitmap_set_contains_value (b, get_expr_value_id (expr)))
802
        bitmap_remove_from_set (a, expr);
803
    }
804
  BITMAP_FREE (temp);
805
}
806
 
807
 
808
/* Return true if bitmapped set SET contains the value VALUE_ID.  */
809
 
810
static bool
811
bitmap_set_contains_value (bitmap_set_t set, unsigned int value_id)
812
{
813
  if (value_id_constant_p (value_id))
814
    return true;
815
 
816
  if (!set || bitmap_empty_p (set->expressions))
817
    return false;
818
 
819
  return bitmap_bit_p (set->values, value_id);
820
}
821
 
822
static inline bool
823
bitmap_set_contains_expr (bitmap_set_t set, const pre_expr expr)
824
{
825
  return bitmap_bit_p (set->expressions, get_expression_id (expr));
826
}
827
 
828
/* Replace an instance of value LOOKFOR with expression EXPR in SET.  */
829
 
830
static void
831
bitmap_set_replace_value (bitmap_set_t set, unsigned int lookfor,
832
                          const pre_expr expr)
833
{
834
  bitmap_set_t exprset;
835
  unsigned int i;
836
  bitmap_iterator bi;
837
 
838
  if (value_id_constant_p (lookfor))
839
    return;
840
 
841
  if (!bitmap_set_contains_value (set, lookfor))
842
    return;
843
 
844
  /* The number of expressions having a given value is usually
845
     significantly less than the total number of expressions in SET.
846
     Thus, rather than check, for each expression in SET, whether it
847
     has the value LOOKFOR, we walk the reverse mapping that tells us
848
     what expressions have a given value, and see if any of those
849
     expressions are in our set.  For large testcases, this is about
850
     5-10x faster than walking the bitmap.  If this is somehow a
851
     significant lose for some cases, we can choose which set to walk
852
     based on the set size.  */
853
  exprset = VEC_index (bitmap_set_t, value_expressions, lookfor);
854
  FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
855
    {
856
      if (bitmap_bit_p (set->expressions, i))
857
        {
858
          bitmap_clear_bit (set->expressions, i);
859
          bitmap_set_bit (set->expressions, get_expression_id (expr));
860
          return;
861
        }
862
    }
863
}
864
 
865
/* Return true if two bitmap sets are equal.  */
866
 
867
static bool
868
bitmap_set_equal (bitmap_set_t a, bitmap_set_t b)
869
{
870
  return bitmap_equal_p (a->values, b->values);
871
}
872
 
873
/* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
874
   and add it otherwise.  */
875
 
876
static void
877
bitmap_value_replace_in_set (bitmap_set_t set, pre_expr expr)
878
{
879
  unsigned int val = get_expr_value_id (expr);
880
 
881
  if (bitmap_set_contains_value (set, val))
882
    bitmap_set_replace_value (set, val, expr);
883
  else
884
    bitmap_insert_into_set (set, expr);
885
}
886
 
887
/* Insert EXPR into SET if EXPR's value is not already present in
888
   SET.  */
889
 
890
static void
891
bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr)
892
{
893
  unsigned int val = get_expr_value_id (expr);
894
 
895
#ifdef ENABLE_CHECKING
896
  gcc_assert (expr->id == get_or_alloc_expression_id (expr));
897
#endif
898
 
899
  /* Constant values are always considered to be part of the set.  */
900
  if (value_id_constant_p (val))
901
    return;
902
 
903
  /* If the value membership changed, add the expression.  */
904
  if (bitmap_set_bit (set->values, val))
905
    bitmap_set_bit (set->expressions, expr->id);
906
}
907
 
908
/* Print out EXPR to outfile.  */
909
 
910
static void
911
print_pre_expr (FILE *outfile, const pre_expr expr)
912
{
913
  switch (expr->kind)
914
    {
915
    case CONSTANT:
916
      print_generic_expr (outfile, PRE_EXPR_CONSTANT (expr), 0);
917
      break;
918
    case NAME:
919
      print_generic_expr (outfile, PRE_EXPR_NAME (expr), 0);
920
      break;
921
    case NARY:
922
      {
923
        unsigned int i;
924
        vn_nary_op_t nary = PRE_EXPR_NARY (expr);
925
        fprintf (outfile, "{%s,", tree_code_name [nary->opcode]);
926
        for (i = 0; i < nary->length; i++)
927
          {
928
            print_generic_expr (outfile, nary->op[i], 0);
929
            if (i != (unsigned) nary->length - 1)
930
              fprintf (outfile, ",");
931
          }
932
        fprintf (outfile, "}");
933
      }
934
      break;
935
 
936
    case REFERENCE:
937
      {
938
        vn_reference_op_t vro;
939
        unsigned int i;
940
        vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
941
        fprintf (outfile, "{");
942
        for (i = 0;
943
             VEC_iterate (vn_reference_op_s, ref->operands, i, vro);
944
             i++)
945
          {
946
            bool closebrace = false;
947
            if (vro->opcode != SSA_NAME
948
                && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
949
              {
950
                fprintf (outfile, "%s", tree_code_name [vro->opcode]);
951
                if (vro->op0)
952
                  {
953
                    fprintf (outfile, "<");
954
                    closebrace = true;
955
                  }
956
              }
957
            if (vro->op0)
958
              {
959
                print_generic_expr (outfile, vro->op0, 0);
960
                if (vro->op1)
961
                  {
962
                    fprintf (outfile, ",");
963
                    print_generic_expr (outfile, vro->op1, 0);
964
                  }
965
                if (vro->op2)
966
                  {
967
                    fprintf (outfile, ",");
968
                    print_generic_expr (outfile, vro->op2, 0);
969
                  }
970
              }
971
            if (closebrace)
972
                fprintf (outfile, ">");
973
            if (i != VEC_length (vn_reference_op_s, ref->operands) - 1)
974
              fprintf (outfile, ",");
975
          }
976
        fprintf (outfile, "}");
977
        if (ref->vuse)
978
          {
979
            fprintf (outfile, "@");
980
            print_generic_expr (outfile, ref->vuse, 0);
981
          }
982
      }
983
      break;
984
    }
985
}
986
void debug_pre_expr (pre_expr);
987
 
988
/* Like print_pre_expr but always prints to stderr.  */
989
void
990
debug_pre_expr (pre_expr e)
991
{
992
  print_pre_expr (stderr, e);
993
  fprintf (stderr, "\n");
994
}
995
 
996
/* Print out SET to OUTFILE.  */
997
 
998
static void
999
print_bitmap_set (FILE *outfile, bitmap_set_t set,
1000
                  const char *setname, int blockindex)
1001
{
1002
  fprintf (outfile, "%s[%d] := { ", setname, blockindex);
1003
  if (set)
1004
    {
1005
      bool first = true;
1006
      unsigned i;
1007
      bitmap_iterator bi;
1008
 
1009
      FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1010
        {
1011
          const pre_expr expr = expression_for_id (i);
1012
 
1013
          if (!first)
1014
            fprintf (outfile, ", ");
1015
          first = false;
1016
          print_pre_expr (outfile, expr);
1017
 
1018
          fprintf (outfile, " (%04d)", get_expr_value_id (expr));
1019
        }
1020
    }
1021
  fprintf (outfile, " }\n");
1022
}
1023
 
1024
void debug_bitmap_set (bitmap_set_t);
1025
 
1026
void
1027
debug_bitmap_set (bitmap_set_t set)
1028
{
1029
  print_bitmap_set (stderr, set, "debug", 0);
1030
}
1031
 
1032
/* Print out the expressions that have VAL to OUTFILE.  */
1033
 
1034
void
1035
print_value_expressions (FILE *outfile, unsigned int val)
1036
{
1037
  bitmap_set_t set = VEC_index (bitmap_set_t, value_expressions, val);
1038
  if (set)
1039
    {
1040
      char s[10];
1041
      sprintf (s, "%04d", val);
1042
      print_bitmap_set (outfile, set, s, 0);
1043
    }
1044
}
1045
 
1046
 
1047
void
1048
debug_value_expressions (unsigned int val)
1049
{
1050
  print_value_expressions (stderr, val);
1051
}
1052
 
1053
/* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1054
   represent it.  */
1055
 
1056
static pre_expr
1057
get_or_alloc_expr_for_constant (tree constant)
1058
{
1059
  unsigned int result_id;
1060
  unsigned int value_id;
1061
  struct pre_expr_d expr;
1062
  pre_expr newexpr;
1063
 
1064
  expr.kind = CONSTANT;
1065
  PRE_EXPR_CONSTANT (&expr) = constant;
1066
  result_id = lookup_expression_id (&expr);
1067
  if (result_id != 0)
1068
    return expression_for_id (result_id);
1069
 
1070
  newexpr = (pre_expr) pool_alloc (pre_expr_pool);
1071
  newexpr->kind = CONSTANT;
1072
  PRE_EXPR_CONSTANT (newexpr) = constant;
1073
  alloc_expression_id (newexpr);
1074
  value_id = get_or_alloc_constant_value_id (constant);
1075
  add_to_value (value_id, newexpr);
1076
  return newexpr;
1077
}
1078
 
1079
/* Given a value id V, find the actual tree representing the constant
1080
   value if there is one, and return it. Return NULL if we can't find
1081
   a constant.  */
1082
 
1083
static tree
1084
get_constant_for_value_id (unsigned int v)
1085
{
1086
  if (value_id_constant_p (v))
1087
    {
1088
      unsigned int i;
1089
      bitmap_iterator bi;
1090
      bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, v);
1091
 
1092
      FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
1093
        {
1094
          pre_expr expr = expression_for_id (i);
1095
          if (expr->kind == CONSTANT)
1096
            return PRE_EXPR_CONSTANT (expr);
1097
        }
1098
    }
1099
  return NULL;
1100
}
1101
 
1102
/* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1103
   Currently only supports constants and SSA_NAMES.  */
1104
static pre_expr
1105
get_or_alloc_expr_for (tree t)
1106
{
1107
  if (TREE_CODE (t) == SSA_NAME)
1108
    return get_or_alloc_expr_for_name (t);
1109
  else if (is_gimple_min_invariant (t))
1110
    return get_or_alloc_expr_for_constant (t);
1111
  else
1112
    {
1113
      /* More complex expressions can result from SCCVN expression
1114
         simplification that inserts values for them.  As they all
1115
         do not have VOPs the get handled by the nary ops struct.  */
1116
      vn_nary_op_t result;
1117
      unsigned int result_id;
1118
      vn_nary_op_lookup (t, &result);
1119
      if (result != NULL)
1120
        {
1121
          pre_expr e = (pre_expr) pool_alloc (pre_expr_pool);
1122
          e->kind = NARY;
1123
          PRE_EXPR_NARY (e) = result;
1124
          result_id = lookup_expression_id (e);
1125
          if (result_id != 0)
1126
            {
1127
              pool_free (pre_expr_pool, e);
1128
              e = expression_for_id (result_id);
1129
              return e;
1130
            }
1131
          alloc_expression_id (e);
1132
          return e;
1133
        }
1134
    }
1135
  return NULL;
1136
}
1137
 
1138
/* Return the folded version of T if T, when folded, is a gimple
1139
   min_invariant.  Otherwise, return T.  */
1140
 
1141
static pre_expr
1142
fully_constant_expression (pre_expr e)
1143
{
1144
  switch (e->kind)
1145
    {
1146
    case CONSTANT:
1147
      return e;
1148
    case NARY:
1149
      {
1150
        vn_nary_op_t nary = PRE_EXPR_NARY (e);
1151
        switch (TREE_CODE_CLASS (nary->opcode))
1152
          {
1153
          case tcc_expression:
1154
            if (nary->opcode == TRUTH_NOT_EXPR)
1155
              goto do_unary;
1156
            if (nary->opcode != TRUTH_AND_EXPR
1157
                && nary->opcode != TRUTH_OR_EXPR
1158
                && nary->opcode != TRUTH_XOR_EXPR)
1159
              return e;
1160
            /* Fallthrough.  */
1161
          case tcc_binary:
1162
          case tcc_comparison:
1163
            {
1164
              /* We have to go from trees to pre exprs to value ids to
1165
                 constants.  */
1166
              tree naryop0 = nary->op[0];
1167
              tree naryop1 = nary->op[1];
1168
              tree result;
1169
              if (!is_gimple_min_invariant (naryop0))
1170
                {
1171
                  pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1172
                  unsigned int vrep0 = get_expr_value_id (rep0);
1173
                  tree const0 = get_constant_for_value_id (vrep0);
1174
                  if (const0)
1175
                    naryop0 = fold_convert (TREE_TYPE (naryop0), const0);
1176
                }
1177
              if (!is_gimple_min_invariant (naryop1))
1178
                {
1179
                  pre_expr rep1 = get_or_alloc_expr_for (naryop1);
1180
                  unsigned int vrep1 = get_expr_value_id (rep1);
1181
                  tree const1 = get_constant_for_value_id (vrep1);
1182
                  if (const1)
1183
                    naryop1 = fold_convert (TREE_TYPE (naryop1), const1);
1184
                }
1185
              result = fold_binary (nary->opcode, nary->type,
1186
                                    naryop0, naryop1);
1187
              if (result && is_gimple_min_invariant (result))
1188
                return get_or_alloc_expr_for_constant (result);
1189
              /* We might have simplified the expression to a
1190
                 SSA_NAME for example from x_1 * 1.  But we cannot
1191
                 insert a PHI for x_1 unconditionally as x_1 might
1192
                 not be available readily.  */
1193
              return e;
1194
            }
1195
          case tcc_reference:
1196
            if (nary->opcode != REALPART_EXPR
1197
                && nary->opcode != IMAGPART_EXPR
1198
                && nary->opcode != VIEW_CONVERT_EXPR)
1199
              return e;
1200
            /* Fallthrough.  */
1201
          case tcc_unary:
1202
do_unary:
1203
            {
1204
              /* We have to go from trees to pre exprs to value ids to
1205
                 constants.  */
1206
              tree naryop0 = nary->op[0];
1207
              tree const0, result;
1208
              if (is_gimple_min_invariant (naryop0))
1209
                const0 = naryop0;
1210
              else
1211
                {
1212
                  pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1213
                  unsigned int vrep0 = get_expr_value_id (rep0);
1214
                  const0 = get_constant_for_value_id (vrep0);
1215
                }
1216
              result = NULL;
1217
              if (const0)
1218
                {
1219
                  tree type1 = TREE_TYPE (nary->op[0]);
1220
                  const0 = fold_convert (type1, const0);
1221
                  result = fold_unary (nary->opcode, nary->type, const0);
1222
                }
1223
              if (result && is_gimple_min_invariant (result))
1224
                return get_or_alloc_expr_for_constant (result);
1225
              return e;
1226
            }
1227
          default:
1228
            return e;
1229
          }
1230
      }
1231
    case REFERENCE:
1232
      {
1233
        vn_reference_t ref = PRE_EXPR_REFERENCE (e);
1234
        VEC (vn_reference_op_s, heap) *operands = ref->operands;
1235
        vn_reference_op_t op;
1236
 
1237
        /* Try to simplify the translated expression if it is
1238
           a call to a builtin function with at most two arguments.  */
1239
        op = VEC_index (vn_reference_op_s, operands, 0);
1240
        if (op->opcode == CALL_EXPR
1241
            && TREE_CODE (op->op0) == ADDR_EXPR
1242
            && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1243
            && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1244
            && VEC_length (vn_reference_op_s, operands) >= 2
1245
            && VEC_length (vn_reference_op_s, operands) <= 3)
1246
          {
1247
            vn_reference_op_t arg0, arg1 = NULL;
1248
            bool anyconst = false;
1249
            arg0 = VEC_index (vn_reference_op_s, operands, 1);
1250
            if (VEC_length (vn_reference_op_s, operands) > 2)
1251
              arg1 = VEC_index (vn_reference_op_s, operands, 2);
1252
            if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1253
                || (arg0->opcode == ADDR_EXPR
1254
                    && is_gimple_min_invariant (arg0->op0)))
1255
              anyconst = true;
1256
            if (arg1
1257
                && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1258
                    || (arg1->opcode == ADDR_EXPR
1259
                        && is_gimple_min_invariant (arg1->op0))))
1260
              anyconst = true;
1261
            if (anyconst)
1262
              {
1263
                tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1264
                                               arg1 ? 2 : 1,
1265
                                               arg0->op0,
1266
                                               arg1 ? arg1->op0 : NULL);
1267
                if (folded
1268
                    && TREE_CODE (folded) == NOP_EXPR)
1269
                  folded = TREE_OPERAND (folded, 0);
1270
                if (folded
1271
                    && is_gimple_min_invariant (folded))
1272
                  return get_or_alloc_expr_for_constant (folded);
1273
              }
1274
          }
1275
          return e;
1276
        }
1277
    default:
1278
      return e;
1279
    }
1280
  return e;
1281
}
1282
 
1283
/* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1284
   it has the value it would have in BLOCK.  Set *SAME_VALID to true
1285
   in case the new vuse doesn't change the value id of the OPERANDS.  */
1286
 
1287
static tree
1288
translate_vuse_through_block (VEC (vn_reference_op_s, heap) *operands,
1289
                              alias_set_type set, tree type, tree vuse,
1290
                              basic_block phiblock,
1291
                              basic_block block, bool *same_valid)
1292
{
1293
  gimple phi = SSA_NAME_DEF_STMT (vuse);
1294
  ao_ref ref;
1295
  edge e = NULL;
1296
  bool use_oracle;
1297
 
1298
  *same_valid = true;
1299
 
1300
  if (gimple_bb (phi) != phiblock)
1301
    return vuse;
1302
 
1303
  use_oracle = ao_ref_init_from_vn_reference (&ref, set, type, operands);
1304
 
1305
  /* Use the alias-oracle to find either the PHI node in this block,
1306
     the first VUSE used in this block that is equivalent to vuse or
1307
     the first VUSE which definition in this block kills the value.  */
1308
  if (gimple_code (phi) == GIMPLE_PHI)
1309
    e = find_edge (block, phiblock);
1310
  else if (use_oracle)
1311
    while (!stmt_may_clobber_ref_p_1 (phi, &ref))
1312
      {
1313
        vuse = gimple_vuse (phi);
1314
        phi = SSA_NAME_DEF_STMT (vuse);
1315
        if (gimple_bb (phi) != phiblock)
1316
          return vuse;
1317
        if (gimple_code (phi) == GIMPLE_PHI)
1318
          {
1319
            e = find_edge (block, phiblock);
1320
            break;
1321
          }
1322
      }
1323
  else
1324
    return NULL_TREE;
1325
 
1326
  if (e)
1327
    {
1328
      if (use_oracle)
1329
        {
1330
          bitmap visited = NULL;
1331
          /* Try to find a vuse that dominates this phi node by skipping
1332
             non-clobbering statements.  */
1333
          vuse = get_continuation_for_phi (phi, &ref, &visited);
1334
          if (visited)
1335
            BITMAP_FREE (visited);
1336
        }
1337
      else
1338
        vuse = NULL_TREE;
1339
      if (!vuse)
1340
        {
1341
          /* If we didn't find any, the value ID can't stay the same,
1342
             but return the translated vuse.  */
1343
          *same_valid = false;
1344
          vuse = PHI_ARG_DEF (phi, e->dest_idx);
1345
        }
1346
      /* ??? We would like to return vuse here as this is the canonical
1347
         upmost vdef that this reference is associated with.  But during
1348
         insertion of the references into the hash tables we only ever
1349
         directly insert with their direct gimple_vuse, hence returning
1350
         something else would make us not find the other expression.  */
1351
      return PHI_ARG_DEF (phi, e->dest_idx);
1352
    }
1353
 
1354
  return NULL_TREE;
1355
}
1356
 
1357
/* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1358
   SET2.  This is used to avoid making a set consisting of the union
1359
   of PA_IN and ANTIC_IN during insert.  */
1360
 
1361
static inline pre_expr
1362
find_leader_in_sets (unsigned int val, bitmap_set_t set1, bitmap_set_t set2)
1363
{
1364
  pre_expr result;
1365
 
1366
  result = bitmap_find_leader (set1, val, NULL);
1367
  if (!result && set2)
1368
    result = bitmap_find_leader (set2, val, NULL);
1369
  return result;
1370
}
1371
 
1372
/* Get the tree type for our PRE expression e.  */
1373
 
1374
static tree
1375
get_expr_type (const pre_expr e)
1376
{
1377
  switch (e->kind)
1378
    {
1379
    case NAME:
1380
      return TREE_TYPE (PRE_EXPR_NAME (e));
1381
    case CONSTANT:
1382
      return TREE_TYPE (PRE_EXPR_CONSTANT (e));
1383
    case REFERENCE:
1384
      return PRE_EXPR_REFERENCE (e)->type;
1385
    case NARY:
1386
      return PRE_EXPR_NARY (e)->type;
1387
    }
1388
  gcc_unreachable();
1389
}
1390
 
1391
/* Get a representative SSA_NAME for a given expression.
1392
   Since all of our sub-expressions are treated as values, we require
1393
   them to be SSA_NAME's for simplicity.
1394
   Prior versions of GVNPRE used to use "value handles" here, so that
1395
   an expression would be VH.11 + VH.10 instead of d_3 + e_6.  In
1396
   either case, the operands are really values (IE we do not expect
1397
   them to be usable without finding leaders).  */
1398
 
1399
static tree
1400
get_representative_for (const pre_expr e)
1401
{
1402
  tree exprtype;
1403
  tree name;
1404
  unsigned int value_id = get_expr_value_id (e);
1405
 
1406
  switch (e->kind)
1407
    {
1408
    case NAME:
1409
      return PRE_EXPR_NAME (e);
1410
    case CONSTANT:
1411
      return PRE_EXPR_CONSTANT (e);
1412
    case NARY:
1413
    case REFERENCE:
1414
      {
1415
        /* Go through all of the expressions representing this value
1416
           and pick out an SSA_NAME.  */
1417
        unsigned int i;
1418
        bitmap_iterator bi;
1419
        bitmap_set_t exprs = VEC_index (bitmap_set_t, value_expressions,
1420
                                        value_id);
1421
        FOR_EACH_EXPR_ID_IN_SET (exprs, i, bi)
1422
          {
1423
            pre_expr rep = expression_for_id (i);
1424
            if (rep->kind == NAME)
1425
              return PRE_EXPR_NAME (rep);
1426
          }
1427
      }
1428
      break;
1429
    }
1430
  /* If we reached here we couldn't find an SSA_NAME.  This can
1431
     happen when we've discovered a value that has never appeared in
1432
     the program as set to an SSA_NAME, most likely as the result of
1433
     phi translation.  */
1434
  if (dump_file)
1435
    {
1436
      fprintf (dump_file,
1437
               "Could not find SSA_NAME representative for expression:");
1438
      print_pre_expr (dump_file, e);
1439
      fprintf (dump_file, "\n");
1440
    }
1441
 
1442
  exprtype = get_expr_type (e);
1443
 
1444
  /* Build and insert the assignment of the end result to the temporary
1445
     that we will return.  */
1446
  if (!pretemp || exprtype != TREE_TYPE (pretemp))
1447
    {
1448
      pretemp = create_tmp_var (exprtype, "pretmp");
1449
      get_var_ann (pretemp);
1450
    }
1451
 
1452
  name = make_ssa_name (pretemp, gimple_build_nop ());
1453
  VN_INFO_GET (name)->value_id = value_id;
1454
  if (e->kind == CONSTANT)
1455
    VN_INFO (name)->valnum = PRE_EXPR_CONSTANT (e);
1456
  else
1457
    VN_INFO (name)->valnum = name;
1458
 
1459
  add_to_value (value_id, get_or_alloc_expr_for_name (name));
1460
  if (dump_file)
1461
    {
1462
      fprintf (dump_file, "Created SSA_NAME representative ");
1463
      print_generic_expr (dump_file, name, 0);
1464
      fprintf (dump_file, " for expression:");
1465
      print_pre_expr (dump_file, e);
1466
      fprintf (dump_file, "\n");
1467
    }
1468
 
1469
  return name;
1470
}
1471
 
1472
 
1473
 
1474
static pre_expr
1475
phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1476
               basic_block pred, basic_block phiblock);
1477
 
1478
/* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1479
   the phis in PRED.  Return NULL if we can't find a leader for each part
1480
   of the translated expression.  */
1481
 
1482
static pre_expr
1483
phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1484
                 basic_block pred, basic_block phiblock)
1485
{
1486
  switch (expr->kind)
1487
    {
1488
    case NARY:
1489
      {
1490
        unsigned int i;
1491
        bool changed = false;
1492
        vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1493
        struct vn_nary_op_s newnary;
1494
        /* The NARY structure is only guaranteed to have been
1495
           allocated to the nary->length operands.  */
1496
        memcpy (&newnary, nary, (sizeof (struct vn_nary_op_s)
1497
                                 - sizeof (tree) * (4 - nary->length)));
1498
 
1499
        for (i = 0; i < newnary.length; i++)
1500
          {
1501
            if (TREE_CODE (newnary.op[i]) != SSA_NAME)
1502
              continue;
1503
            else
1504
              {
1505
                pre_expr leader, result;
1506
                unsigned int op_val_id = VN_INFO (newnary.op[i])->value_id;
1507
                leader = find_leader_in_sets (op_val_id, set1, set2);
1508
                result = phi_translate (leader, set1, set2, pred, phiblock);
1509
                if (result && result != leader)
1510
                  {
1511
                    tree name = get_representative_for (result);
1512
                    if (!name)
1513
                      return NULL;
1514
                    newnary.op[i] = name;
1515
                  }
1516
                else if (!result)
1517
                  return NULL;
1518
 
1519
                changed |= newnary.op[i] != nary->op[i];
1520
              }
1521
          }
1522
        if (changed)
1523
          {
1524
            pre_expr constant;
1525
            unsigned int new_val_id;
1526
 
1527
            tree result = vn_nary_op_lookup_pieces (newnary.length,
1528
                                                    newnary.opcode,
1529
                                                    newnary.type,
1530
                                                    newnary.op[0],
1531
                                                    newnary.op[1],
1532
                                                    newnary.op[2],
1533
                                                    newnary.op[3],
1534
                                                    &nary);
1535
            if (result && is_gimple_min_invariant (result))
1536
              return get_or_alloc_expr_for_constant (result);
1537
 
1538
            expr = (pre_expr) pool_alloc (pre_expr_pool);
1539
            expr->kind = NARY;
1540
            expr->id = 0;
1541
            if (nary)
1542
              {
1543
                PRE_EXPR_NARY (expr) = nary;
1544
                constant = fully_constant_expression (expr);
1545
                if (constant != expr)
1546
                  return constant;
1547
 
1548
                new_val_id = nary->value_id;
1549
                get_or_alloc_expression_id (expr);
1550
              }
1551
            else
1552
              {
1553
                new_val_id = get_next_value_id ();
1554
                VEC_safe_grow_cleared (bitmap_set_t, heap,
1555
                                       value_expressions,
1556
                                       get_max_value_id() + 1);
1557
                nary = vn_nary_op_insert_pieces (newnary.length,
1558
                                                 newnary.opcode,
1559
                                                 newnary.type,
1560
                                                 newnary.op[0],
1561
                                                 newnary.op[1],
1562
                                                 newnary.op[2],
1563
                                                 newnary.op[3],
1564
                                                 result, new_val_id);
1565
                PRE_EXPR_NARY (expr) = nary;
1566
                constant = fully_constant_expression (expr);
1567
                if (constant != expr)
1568
                  return constant;
1569
                get_or_alloc_expression_id (expr);
1570
              }
1571
            add_to_value (new_val_id, expr);
1572
          }
1573
        return expr;
1574
      }
1575
      break;
1576
 
1577
    case REFERENCE:
1578
      {
1579
        vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1580
        VEC (vn_reference_op_s, heap) *operands = ref->operands;
1581
        tree vuse = ref->vuse;
1582
        tree newvuse = vuse;
1583
        VEC (vn_reference_op_s, heap) *newoperands = NULL;
1584
        bool changed = false, same_valid = true;
1585
        unsigned int i, j;
1586
        vn_reference_op_t operand;
1587
        vn_reference_t newref;
1588
 
1589
        for (i = 0, j = 0;
1590
             VEC_iterate (vn_reference_op_s, operands, i, operand); i++, j++)
1591
          {
1592
            pre_expr opresult;
1593
            pre_expr leader;
1594
            tree oldop0 = operand->op0;
1595
            tree oldop1 = operand->op1;
1596
            tree oldop2 = operand->op2;
1597
            tree op0 = oldop0;
1598
            tree op1 = oldop1;
1599
            tree op2 = oldop2;
1600
            tree type = operand->type;
1601
            vn_reference_op_s newop = *operand;
1602
 
1603
            if (op0 && TREE_CODE (op0) == SSA_NAME)
1604
              {
1605
                unsigned int op_val_id = VN_INFO (op0)->value_id;
1606
                leader = find_leader_in_sets (op_val_id, set1, set2);
1607
                opresult = phi_translate (leader, set1, set2, pred, phiblock);
1608
                if (opresult && opresult != leader)
1609
                  {
1610
                    tree name = get_representative_for (opresult);
1611
                    if (!name)
1612
                      break;
1613
                    op0 = name;
1614
                  }
1615
                else if (!opresult)
1616
                  break;
1617
              }
1618
            changed |= op0 != oldop0;
1619
 
1620
            if (op1 && TREE_CODE (op1) == SSA_NAME)
1621
              {
1622
                unsigned int op_val_id = VN_INFO (op1)->value_id;
1623
                leader = find_leader_in_sets (op_val_id, set1, set2);
1624
                opresult = phi_translate (leader, set1, set2, pred, phiblock);
1625
                if (opresult && opresult != leader)
1626
                  {
1627
                    tree name = get_representative_for (opresult);
1628
                    if (!name)
1629
                      break;
1630
                    op1 = name;
1631
                  }
1632
                else if (!opresult)
1633
                  break;
1634
              }
1635
            /* We can't possibly insert these.  */
1636
            else if (op1 && !is_gimple_min_invariant (op1))
1637
              break;
1638
            changed |= op1 != oldop1;
1639
            if (op2 && TREE_CODE (op2) == SSA_NAME)
1640
              {
1641
                unsigned int op_val_id = VN_INFO (op2)->value_id;
1642
                leader = find_leader_in_sets (op_val_id, set1, set2);
1643
                opresult = phi_translate (leader, set1, set2, pred, phiblock);
1644
                if (opresult && opresult != leader)
1645
                  {
1646
                    tree name = get_representative_for (opresult);
1647
                    if (!name)
1648
                      break;
1649
                    op2 = name;
1650
                  }
1651
                else if (!opresult)
1652
                  break;
1653
              }
1654
            /* We can't possibly insert these.  */
1655
            else if (op2 && !is_gimple_min_invariant (op2))
1656
              break;
1657
            changed |= op2 != oldop2;
1658
 
1659
            if (!newoperands)
1660
              newoperands = VEC_copy (vn_reference_op_s, heap, operands);
1661
            /* We may have changed from an SSA_NAME to a constant */
1662
            if (newop.opcode == SSA_NAME && TREE_CODE (op0) != SSA_NAME)
1663
              newop.opcode = TREE_CODE (op0);
1664
            newop.type = type;
1665
            newop.op0 = op0;
1666
            newop.op1 = op1;
1667
            newop.op2 = op2;
1668
            VEC_replace (vn_reference_op_s, newoperands, j, &newop);
1669
            /* If it transforms from an SSA_NAME to an address, fold with
1670
               a preceding indirect reference.  */
1671
            if (j > 0 && op0 && TREE_CODE (op0) == ADDR_EXPR
1672
                && VEC_index (vn_reference_op_s,
1673
                              newoperands, j - 1)->opcode == INDIRECT_REF)
1674
              vn_reference_fold_indirect (&newoperands, &j);
1675
          }
1676
        if (i != VEC_length (vn_reference_op_s, operands))
1677
          {
1678
            if (newoperands)
1679
              VEC_free (vn_reference_op_s, heap, newoperands);
1680
            return NULL;
1681
          }
1682
 
1683
        if (vuse)
1684
          {
1685
            newvuse = translate_vuse_through_block (newoperands,
1686
                                                    ref->set, ref->type,
1687
                                                    vuse, phiblock, pred,
1688
                                                    &same_valid);
1689
            if (newvuse == NULL_TREE)
1690
              {
1691
                VEC_free (vn_reference_op_s, heap, newoperands);
1692
                return NULL;
1693
              }
1694
          }
1695
 
1696
        if (changed || newvuse != vuse)
1697
          {
1698
            unsigned int new_val_id;
1699
            pre_expr constant;
1700
 
1701
            tree result = vn_reference_lookup_pieces (newvuse, ref->set,
1702
                                                      ref->type,
1703
                                                      newoperands,
1704
                                                      &newref, true);
1705
            if (newref)
1706
              VEC_free (vn_reference_op_s, heap, newoperands);
1707
 
1708
            if (result && is_gimple_min_invariant (result))
1709
              {
1710
                gcc_assert (!newoperands);
1711
                return get_or_alloc_expr_for_constant (result);
1712
              }
1713
 
1714
            expr = (pre_expr) pool_alloc (pre_expr_pool);
1715
            expr->kind = REFERENCE;
1716
            expr->id = 0;
1717
 
1718
            if (newref)
1719
              {
1720
                PRE_EXPR_REFERENCE (expr) = newref;
1721
                constant = fully_constant_expression (expr);
1722
                if (constant != expr)
1723
                  return constant;
1724
 
1725
                new_val_id = newref->value_id;
1726
                get_or_alloc_expression_id (expr);
1727
              }
1728
            else
1729
              {
1730
                if (changed || !same_valid)
1731
                  {
1732
                    new_val_id = get_next_value_id ();
1733
                    VEC_safe_grow_cleared (bitmap_set_t, heap,
1734
                                           value_expressions,
1735
                                           get_max_value_id() + 1);
1736
                  }
1737
                else
1738
                  new_val_id = ref->value_id;
1739
                newref = vn_reference_insert_pieces (newvuse, ref->set,
1740
                                                     ref->type,
1741
                                                     newoperands,
1742
                                                     result, new_val_id);
1743
                newoperands = NULL;
1744
                PRE_EXPR_REFERENCE (expr) = newref;
1745
                constant = fully_constant_expression (expr);
1746
                if (constant != expr)
1747
                  return constant;
1748
                get_or_alloc_expression_id (expr);
1749
              }
1750
            add_to_value (new_val_id, expr);
1751
          }
1752
        VEC_free (vn_reference_op_s, heap, newoperands);
1753
        return expr;
1754
      }
1755
      break;
1756
 
1757
    case NAME:
1758
      {
1759
        gimple phi = NULL;
1760
        edge e;
1761
        gimple def_stmt;
1762
        tree name = PRE_EXPR_NAME (expr);
1763
 
1764
        def_stmt = SSA_NAME_DEF_STMT (name);
1765
        if (gimple_code (def_stmt) == GIMPLE_PHI
1766
            && gimple_bb (def_stmt) == phiblock)
1767
          phi = def_stmt;
1768
        else
1769
          return expr;
1770
 
1771
        e = find_edge (pred, gimple_bb (phi));
1772
        if (e)
1773
          {
1774
            tree def = PHI_ARG_DEF (phi, e->dest_idx);
1775
            pre_expr newexpr;
1776
 
1777
            if (TREE_CODE (def) == SSA_NAME)
1778
              def = VN_INFO (def)->valnum;
1779
 
1780
            /* Handle constant. */
1781
            if (is_gimple_min_invariant (def))
1782
              return get_or_alloc_expr_for_constant (def);
1783
 
1784
            if (TREE_CODE (def) == SSA_NAME && ssa_undefined_value_p (def))
1785
              return NULL;
1786
 
1787
            newexpr = get_or_alloc_expr_for_name (def);
1788
            return newexpr;
1789
          }
1790
      }
1791
      return expr;
1792
 
1793
    default:
1794
      gcc_unreachable ();
1795
    }
1796
}
1797
 
1798
/* Wrapper around phi_translate_1 providing caching functionality.  */
1799
 
1800
static pre_expr
1801
phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1802
               basic_block pred, basic_block phiblock)
1803
{
1804
  pre_expr phitrans;
1805
 
1806
  if (!expr)
1807
    return NULL;
1808
 
1809
  /* Constants contain no values that need translation.  */
1810
  if (expr->kind == CONSTANT)
1811
    return expr;
1812
 
1813
  if (value_id_constant_p (get_expr_value_id (expr)))
1814
    return expr;
1815
 
1816
  if (expr->kind != NAME)
1817
    {
1818
      phitrans = phi_trans_lookup (expr, pred);
1819
      if (phitrans)
1820
        return phitrans;
1821
    }
1822
 
1823
  /* Translate.  */
1824
  phitrans = phi_translate_1 (expr, set1, set2, pred, phiblock);
1825
 
1826
  /* Don't add empty translations to the cache.  Neither add
1827
     translations of NAMEs as those are cheap to translate.  */
1828
  if (phitrans
1829
      && expr->kind != NAME)
1830
    phi_trans_add (expr, phitrans, pred);
1831
 
1832
  return phitrans;
1833
}
1834
 
1835
 
1836
/* For each expression in SET, translate the values through phi nodes
1837
   in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1838
   expressions in DEST.  */
1839
 
1840
static void
1841
phi_translate_set (bitmap_set_t dest, bitmap_set_t set, basic_block pred,
1842
                   basic_block phiblock)
1843
{
1844
  VEC (pre_expr, heap) *exprs;
1845
  pre_expr expr;
1846
  int i;
1847
 
1848
  if (gimple_seq_empty_p (phi_nodes (phiblock)))
1849
    {
1850
      bitmap_set_copy (dest, set);
1851
      return;
1852
    }
1853
 
1854
  exprs = sorted_array_from_bitmap_set (set);
1855
  for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
1856
    {
1857
      pre_expr translated;
1858
      translated = phi_translate (expr, set, NULL, pred, phiblock);
1859
      if (!translated)
1860
        continue;
1861
 
1862
      /* We might end up with multiple expressions from SET being
1863
         translated to the same value.  In this case we do not want
1864
         to retain the NARY or REFERENCE expression but prefer a NAME
1865
         which would be the leader.  */
1866
      if (translated->kind == NAME)
1867
        bitmap_value_replace_in_set (dest, translated);
1868
      else
1869
        bitmap_value_insert_into_set (dest, translated);
1870
    }
1871
  VEC_free (pre_expr, heap, exprs);
1872
}
1873
 
1874
/* Find the leader for a value (i.e., the name representing that
1875
   value) in a given set, and return it.  If STMT is non-NULL it
1876
   makes sure the defining statement for the leader dominates it.
1877
   Return NULL if no leader is found.  */
1878
 
1879
static pre_expr
1880
bitmap_find_leader (bitmap_set_t set, unsigned int val, gimple stmt)
1881
{
1882
  if (value_id_constant_p (val))
1883
    {
1884
      unsigned int i;
1885
      bitmap_iterator bi;
1886
      bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, val);
1887
 
1888
      FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
1889
        {
1890
          pre_expr expr = expression_for_id (i);
1891
          if (expr->kind == CONSTANT)
1892
            return expr;
1893
        }
1894
    }
1895
  if (bitmap_set_contains_value (set, val))
1896
    {
1897
      /* Rather than walk the entire bitmap of expressions, and see
1898
         whether any of them has the value we are looking for, we look
1899
         at the reverse mapping, which tells us the set of expressions
1900
         that have a given value (IE value->expressions with that
1901
         value) and see if any of those expressions are in our set.
1902
         The number of expressions per value is usually significantly
1903
         less than the number of expressions in the set.  In fact, for
1904
         large testcases, doing it this way is roughly 5-10x faster
1905
         than walking the bitmap.
1906
         If this is somehow a significant lose for some cases, we can
1907
         choose which set to walk based on which set is smaller.  */
1908
      unsigned int i;
1909
      bitmap_iterator bi;
1910
      bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, val);
1911
 
1912
      EXECUTE_IF_AND_IN_BITMAP (exprset->expressions,
1913
                                set->expressions, 0, i, bi)
1914
        {
1915
          pre_expr val = expression_for_id (i);
1916
          /* At the point where stmt is not null, there should always
1917
             be an SSA_NAME first in the list of expressions.  */
1918
          if (stmt)
1919
            {
1920
              gimple def_stmt = SSA_NAME_DEF_STMT (PRE_EXPR_NAME (val));
1921
              if (gimple_code (def_stmt) != GIMPLE_PHI
1922
                  && gimple_bb (def_stmt) == gimple_bb (stmt)
1923
                  && gimple_uid (def_stmt) >= gimple_uid (stmt))
1924
                continue;
1925
            }
1926
          return val;
1927
        }
1928
    }
1929
  return NULL;
1930
}
1931
 
1932
/* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1933
   BLOCK by seeing if it is not killed in the block.  Note that we are
1934
   only determining whether there is a store that kills it.  Because
1935
   of the order in which clean iterates over values, we are guaranteed
1936
   that altered operands will have caused us to be eliminated from the
1937
   ANTIC_IN set already.  */
1938
 
1939
static bool
1940
value_dies_in_block_x (pre_expr expr, basic_block block)
1941
{
1942
  tree vuse = PRE_EXPR_REFERENCE (expr)->vuse;
1943
  vn_reference_t refx = PRE_EXPR_REFERENCE (expr);
1944
  gimple def;
1945
  gimple_stmt_iterator gsi;
1946
  unsigned id = get_expression_id (expr);
1947
  bool res = false;
1948
  ao_ref ref;
1949
 
1950
  if (!vuse)
1951
    return false;
1952
 
1953
  /* Lookup a previously calculated result.  */
1954
  if (EXPR_DIES (block)
1955
      && bitmap_bit_p (EXPR_DIES (block), id * 2))
1956
    return bitmap_bit_p (EXPR_DIES (block), id * 2 + 1);
1957
 
1958
  /* A memory expression {e, VUSE} dies in the block if there is a
1959
     statement that may clobber e.  If, starting statement walk from the
1960
     top of the basic block, a statement uses VUSE there can be no kill
1961
     inbetween that use and the original statement that loaded {e, VUSE},
1962
     so we can stop walking.  */
1963
  ref.base = NULL_TREE;
1964
  for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
1965
    {
1966
      tree def_vuse, def_vdef;
1967
      def = gsi_stmt (gsi);
1968
      def_vuse = gimple_vuse (def);
1969
      def_vdef = gimple_vdef (def);
1970
 
1971
      /* Not a memory statement.  */
1972
      if (!def_vuse)
1973
        continue;
1974
 
1975
      /* Not a may-def.  */
1976
      if (!def_vdef)
1977
        {
1978
          /* A load with the same VUSE, we're done.  */
1979
          if (def_vuse == vuse)
1980
            break;
1981
 
1982
          continue;
1983
        }
1984
 
1985
      /* Init ref only if we really need it.  */
1986
      if (ref.base == NULL_TREE
1987
          && !ao_ref_init_from_vn_reference (&ref, refx->set, refx->type,
1988
                                             refx->operands))
1989
        {
1990
          res = true;
1991
          break;
1992
        }
1993
      /* If the statement may clobber expr, it dies.  */
1994
      if (stmt_may_clobber_ref_p_1 (def, &ref))
1995
        {
1996
          res = true;
1997
          break;
1998
        }
1999
    }
2000
 
2001
  /* Remember the result.  */
2002
  if (!EXPR_DIES (block))
2003
    EXPR_DIES (block) = BITMAP_ALLOC (&grand_bitmap_obstack);
2004
  bitmap_set_bit (EXPR_DIES (block), id * 2);
2005
  if (res)
2006
    bitmap_set_bit (EXPR_DIES (block), id * 2 + 1);
2007
 
2008
  return res;
2009
}
2010
 
2011
 
2012
#define union_contains_value(SET1, SET2, VAL)                   \
2013
  (bitmap_set_contains_value ((SET1), (VAL))                    \
2014
   || ((SET2) && bitmap_set_contains_value ((SET2), (VAL))))
2015
 
2016
/* Determine if vn_reference_op_t VRO is legal in SET1 U SET2.
2017
 */
2018
static bool
2019
vro_valid_in_sets (bitmap_set_t set1, bitmap_set_t set2,
2020
                   vn_reference_op_t vro)
2021
{
2022
  if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
2023
    {
2024
      struct pre_expr_d temp;
2025
      temp.kind = NAME;
2026
      temp.id = 0;
2027
      PRE_EXPR_NAME (&temp) = vro->op0;
2028
      temp.id = lookup_expression_id (&temp);
2029
      if (temp.id == 0)
2030
        return false;
2031
      if (!union_contains_value (set1, set2,
2032
                                 get_expr_value_id (&temp)))
2033
        return false;
2034
    }
2035
  if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
2036
    {
2037
      struct pre_expr_d temp;
2038
      temp.kind = NAME;
2039
      temp.id = 0;
2040
      PRE_EXPR_NAME (&temp) = vro->op1;
2041
      temp.id = lookup_expression_id (&temp);
2042
      if (temp.id == 0)
2043
        return false;
2044
      if (!union_contains_value (set1, set2,
2045
                                 get_expr_value_id (&temp)))
2046
        return false;
2047
    }
2048
 
2049
  if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
2050
    {
2051
      struct pre_expr_d temp;
2052
      temp.kind = NAME;
2053
      temp.id = 0;
2054
      PRE_EXPR_NAME (&temp) = vro->op2;
2055
      temp.id = lookup_expression_id (&temp);
2056
      if (temp.id == 0)
2057
        return false;
2058
      if (!union_contains_value (set1, set2,
2059
                                 get_expr_value_id (&temp)))
2060
        return false;
2061
    }
2062
 
2063
  return true;
2064
}
2065
 
2066
/* Determine if the expression EXPR is valid in SET1 U SET2.
2067
   ONLY SET2 CAN BE NULL.
2068
   This means that we have a leader for each part of the expression
2069
   (if it consists of values), or the expression is an SSA_NAME.
2070
   For loads/calls, we also see if the vuse is killed in this block.  */
2071
 
2072
static bool
2073
valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr,
2074
               basic_block block)
2075
{
2076
  switch (expr->kind)
2077
    {
2078
    case NAME:
2079
      return bitmap_set_contains_expr (AVAIL_OUT (block), expr);
2080
    case NARY:
2081
      {
2082
        unsigned int i;
2083
        vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2084
        for (i = 0; i < nary->length; i++)
2085
          {
2086
            if (TREE_CODE (nary->op[i]) == SSA_NAME)
2087
              {
2088
                struct pre_expr_d temp;
2089
                temp.kind = NAME;
2090
                temp.id = 0;
2091
                PRE_EXPR_NAME (&temp) = nary->op[i];
2092
                temp.id = lookup_expression_id (&temp);
2093
                if (temp.id == 0)
2094
                  return false;
2095
                if (!union_contains_value (set1, set2,
2096
                                           get_expr_value_id (&temp)))
2097
                  return false;
2098
              }
2099
          }
2100
        /* If the NARY may trap make sure the block does not contain
2101
           a possible exit point.
2102
           ???  This is overly conservative if we translate AVAIL_OUT
2103
           as the available expression might be after the exit point.  */
2104
        if (BB_MAY_NOTRETURN (block)
2105
            && vn_nary_may_trap (nary))
2106
          return false;
2107
        return true;
2108
      }
2109
      break;
2110
    case REFERENCE:
2111
      {
2112
        vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2113
        vn_reference_op_t vro;
2114
        unsigned int i;
2115
 
2116
        for (i = 0; VEC_iterate (vn_reference_op_s, ref->operands, i, vro); i++)
2117
          {
2118
            if (!vro_valid_in_sets (set1, set2, vro))
2119
              return false;
2120
          }
2121
        if (ref->vuse)
2122
          {
2123
            gimple def_stmt = SSA_NAME_DEF_STMT (ref->vuse);
2124
            if (!gimple_nop_p (def_stmt)
2125
                && gimple_bb (def_stmt) != block
2126
                && !dominated_by_p (CDI_DOMINATORS,
2127
                                    block, gimple_bb (def_stmt)))
2128
              return false;
2129
          }
2130
        return !value_dies_in_block_x (expr, block);
2131
      }
2132
    default:
2133
      gcc_unreachable ();
2134
    }
2135
}
2136
 
2137
/* Clean the set of expressions that are no longer valid in SET1 or
2138
   SET2.  This means expressions that are made up of values we have no
2139
   leaders for in SET1 or SET2.  This version is used for partial
2140
   anticipation, which means it is not valid in either ANTIC_IN or
2141
   PA_IN.  */
2142
 
2143
static void
2144
dependent_clean (bitmap_set_t set1, bitmap_set_t set2, basic_block block)
2145
{
2146
  VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (set1);
2147
  pre_expr expr;
2148
  int i;
2149
 
2150
  for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
2151
    {
2152
      if (!valid_in_sets (set1, set2, expr, block))
2153
        bitmap_remove_from_set (set1, expr);
2154
    }
2155
  VEC_free (pre_expr, heap, exprs);
2156
}
2157
 
2158
/* Clean the set of expressions that are no longer valid in SET.  This
2159
   means expressions that are made up of values we have no leaders for
2160
   in SET.  */
2161
 
2162
static void
2163
clean (bitmap_set_t set, basic_block block)
2164
{
2165
  VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (set);
2166
  pre_expr expr;
2167
  int i;
2168
 
2169
  for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
2170
    {
2171
      if (!valid_in_sets (set, NULL, expr, block))
2172
        bitmap_remove_from_set (set, expr);
2173
    }
2174
  VEC_free (pre_expr, heap, exprs);
2175
}
2176
 
2177
static sbitmap has_abnormal_preds;
2178
 
2179
/* List of blocks that may have changed during ANTIC computation and
2180
   thus need to be iterated over.  */
2181
 
2182
static sbitmap changed_blocks;
2183
 
2184
/* Decide whether to defer a block for a later iteration, or PHI
2185
   translate SOURCE to DEST using phis in PHIBLOCK.  Return false if we
2186
   should defer the block, and true if we processed it.  */
2187
 
2188
static bool
2189
defer_or_phi_translate_block (bitmap_set_t dest, bitmap_set_t source,
2190
                              basic_block block, basic_block phiblock)
2191
{
2192
  if (!BB_VISITED (phiblock))
2193
    {
2194
      SET_BIT (changed_blocks, block->index);
2195
      BB_VISITED (block) = 0;
2196
      BB_DEFERRED (block) = 1;
2197
      return false;
2198
    }
2199
  else
2200
    phi_translate_set (dest, source, block, phiblock);
2201
  return true;
2202
}
2203
 
2204
/* Compute the ANTIC set for BLOCK.
2205
 
2206
   If succs(BLOCK) > 1 then
2207
     ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2208
   else if succs(BLOCK) == 1 then
2209
     ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2210
 
2211
   ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2212
*/
2213
 
2214
static bool
2215
compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
2216
{
2217
  bool changed = false;
2218
  bitmap_set_t S, old, ANTIC_OUT;
2219
  bitmap_iterator bi;
2220
  unsigned int bii;
2221
  edge e;
2222
  edge_iterator ei;
2223
 
2224
  old = ANTIC_OUT = S = NULL;
2225
  BB_VISITED (block) = 1;
2226
 
2227
  /* If any edges from predecessors are abnormal, antic_in is empty,
2228
     so do nothing.  */
2229
  if (block_has_abnormal_pred_edge)
2230
    goto maybe_dump_sets;
2231
 
2232
  old = ANTIC_IN (block);
2233
  ANTIC_OUT = bitmap_set_new ();
2234
 
2235
  /* If the block has no successors, ANTIC_OUT is empty.  */
2236
  if (EDGE_COUNT (block->succs) == 0)
2237
    ;
2238
  /* If we have one successor, we could have some phi nodes to
2239
     translate through.  */
2240
  else if (single_succ_p (block))
2241
    {
2242
      basic_block succ_bb = single_succ (block);
2243
 
2244
      /* We trade iterations of the dataflow equations for having to
2245
         phi translate the maximal set, which is incredibly slow
2246
         (since the maximal set often has 300+ members, even when you
2247
         have a small number of blocks).
2248
         Basically, we defer the computation of ANTIC for this block
2249
         until we have processed it's successor, which will inevitably
2250
         have a *much* smaller set of values to phi translate once
2251
         clean has been run on it.
2252
         The cost of doing this is that we technically perform more
2253
         iterations, however, they are lower cost iterations.
2254
 
2255
         Timings for PRE on tramp3d-v4:
2256
         without maximal set fix: 11 seconds
2257
         with maximal set fix/without deferring: 26 seconds
2258
         with maximal set fix/with deferring: 11 seconds
2259
     */
2260
 
2261
      if (!defer_or_phi_translate_block (ANTIC_OUT, ANTIC_IN (succ_bb),
2262
                                        block, succ_bb))
2263
        {
2264
          changed = true;
2265
          goto maybe_dump_sets;
2266
        }
2267
    }
2268
  /* If we have multiple successors, we take the intersection of all of
2269
     them.  Note that in the case of loop exit phi nodes, we may have
2270
     phis to translate through.  */
2271
  else
2272
    {
2273
      VEC(basic_block, heap) * worklist;
2274
      size_t i;
2275
      basic_block bprime, first = NULL;
2276
 
2277
      worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
2278
      FOR_EACH_EDGE (e, ei, block->succs)
2279
        {
2280
          if (!first
2281
              && BB_VISITED (e->dest))
2282
            first = e->dest;
2283
          else if (BB_VISITED (e->dest))
2284
            VEC_quick_push (basic_block, worklist, e->dest);
2285
        }
2286
 
2287
      /* Of multiple successors we have to have visited one already.  */
2288
      if (!first)
2289
        {
2290
          SET_BIT (changed_blocks, block->index);
2291
          BB_VISITED (block) = 0;
2292
          BB_DEFERRED (block) = 1;
2293
          changed = true;
2294
          VEC_free (basic_block, heap, worklist);
2295
          goto maybe_dump_sets;
2296
        }
2297
 
2298
      if (!gimple_seq_empty_p (phi_nodes (first)))
2299
        phi_translate_set (ANTIC_OUT, ANTIC_IN (first), block, first);
2300
      else
2301
        bitmap_set_copy (ANTIC_OUT, ANTIC_IN (first));
2302
 
2303
      for (i = 0; VEC_iterate (basic_block, worklist, i, bprime); i++)
2304
        {
2305
          if (!gimple_seq_empty_p (phi_nodes (bprime)))
2306
            {
2307
              bitmap_set_t tmp = bitmap_set_new ();
2308
              phi_translate_set (tmp, ANTIC_IN (bprime), block, bprime);
2309
              bitmap_set_and (ANTIC_OUT, tmp);
2310
              bitmap_set_free (tmp);
2311
            }
2312
          else
2313
            bitmap_set_and (ANTIC_OUT, ANTIC_IN (bprime));
2314
        }
2315
      VEC_free (basic_block, heap, worklist);
2316
    }
2317
 
2318
  /* Generate ANTIC_OUT - TMP_GEN.  */
2319
  S = bitmap_set_subtract (ANTIC_OUT, TMP_GEN (block));
2320
 
2321
  /* Start ANTIC_IN with EXP_GEN - TMP_GEN.  */
2322
  ANTIC_IN (block) = bitmap_set_subtract (EXP_GEN (block),
2323
                                          TMP_GEN (block));
2324
 
2325
  /* Then union in the ANTIC_OUT - TMP_GEN values,
2326
     to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2327
  FOR_EACH_EXPR_ID_IN_SET (S, bii, bi)
2328
    bitmap_value_insert_into_set (ANTIC_IN (block),
2329
                                  expression_for_id (bii));
2330
 
2331
  clean (ANTIC_IN (block), block);
2332
 
2333
  /* !old->expressions can happen when we deferred a block.  */
2334
  if (!old->expressions || !bitmap_set_equal (old, ANTIC_IN (block)))
2335
    {
2336
      changed = true;
2337
      SET_BIT (changed_blocks, block->index);
2338
      FOR_EACH_EDGE (e, ei, block->preds)
2339
        SET_BIT (changed_blocks, e->src->index);
2340
    }
2341
  else
2342
    RESET_BIT (changed_blocks, block->index);
2343
 
2344
 maybe_dump_sets:
2345
  if (dump_file && (dump_flags & TDF_DETAILS))
2346
    {
2347
      if (!BB_DEFERRED (block) || BB_VISITED (block))
2348
        {
2349
          if (ANTIC_OUT)
2350
            print_bitmap_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
2351
 
2352
          print_bitmap_set (dump_file, ANTIC_IN (block), "ANTIC_IN",
2353
                            block->index);
2354
 
2355
          if (S)
2356
            print_bitmap_set (dump_file, S, "S", block->index);
2357
        }
2358
      else
2359
        {
2360
          fprintf (dump_file,
2361
                   "Block %d was deferred for a future iteration.\n",
2362
                   block->index);
2363
        }
2364
    }
2365
  if (old)
2366
    bitmap_set_free (old);
2367
  if (S)
2368
    bitmap_set_free (S);
2369
  if (ANTIC_OUT)
2370
    bitmap_set_free (ANTIC_OUT);
2371
  return changed;
2372
}
2373
 
2374
/* Compute PARTIAL_ANTIC for BLOCK.
2375
 
2376
   If succs(BLOCK) > 1 then
2377
     PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2378
     in ANTIC_OUT for all succ(BLOCK)
2379
   else if succs(BLOCK) == 1 then
2380
     PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2381
 
2382
   PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK]
2383
                                  - ANTIC_IN[BLOCK])
2384
 
2385
*/
2386
static bool
2387
compute_partial_antic_aux (basic_block block,
2388
                           bool block_has_abnormal_pred_edge)
2389
{
2390
  bool changed = false;
2391
  bitmap_set_t old_PA_IN;
2392
  bitmap_set_t PA_OUT;
2393
  edge e;
2394
  edge_iterator ei;
2395
  unsigned long max_pa = PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH);
2396
 
2397
  old_PA_IN = PA_OUT = NULL;
2398
 
2399
  /* If any edges from predecessors are abnormal, antic_in is empty,
2400
     so do nothing.  */
2401
  if (block_has_abnormal_pred_edge)
2402
    goto maybe_dump_sets;
2403
 
2404
  /* If there are too many partially anticipatable values in the
2405
     block, phi_translate_set can take an exponential time: stop
2406
     before the translation starts.  */
2407
  if (max_pa
2408
      && single_succ_p (block)
2409
      && bitmap_count_bits (PA_IN (single_succ (block))->values) > max_pa)
2410
    goto maybe_dump_sets;
2411
 
2412
  old_PA_IN = PA_IN (block);
2413
  PA_OUT = bitmap_set_new ();
2414
 
2415
  /* If the block has no successors, ANTIC_OUT is empty.  */
2416
  if (EDGE_COUNT (block->succs) == 0)
2417
    ;
2418
  /* If we have one successor, we could have some phi nodes to
2419
     translate through.  Note that we can't phi translate across DFS
2420
     back edges in partial antic, because it uses a union operation on
2421
     the successors.  For recurrences like IV's, we will end up
2422
     generating a new value in the set on each go around (i + 3 (VH.1)
2423
     VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever.  */
2424
  else if (single_succ_p (block))
2425
    {
2426
      basic_block succ = single_succ (block);
2427
      if (!(single_succ_edge (block)->flags & EDGE_DFS_BACK))
2428
        phi_translate_set (PA_OUT, PA_IN (succ), block, succ);
2429
    }
2430
  /* If we have multiple successors, we take the union of all of
2431
     them.  */
2432
  else
2433
    {
2434
      VEC(basic_block, heap) * worklist;
2435
      size_t i;
2436
      basic_block bprime;
2437
 
2438
      worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
2439
      FOR_EACH_EDGE (e, ei, block->succs)
2440
        {
2441
          if (e->flags & EDGE_DFS_BACK)
2442
            continue;
2443
          VEC_quick_push (basic_block, worklist, e->dest);
2444
        }
2445
      if (VEC_length (basic_block, worklist) > 0)
2446
        {
2447
          for (i = 0; VEC_iterate (basic_block, worklist, i, bprime); i++)
2448
            {
2449
              unsigned int i;
2450
              bitmap_iterator bi;
2451
 
2452
              FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi)
2453
                bitmap_value_insert_into_set (PA_OUT,
2454
                                              expression_for_id (i));
2455
              if (!gimple_seq_empty_p (phi_nodes (bprime)))
2456
                {
2457
                  bitmap_set_t pa_in = bitmap_set_new ();
2458
                  phi_translate_set (pa_in, PA_IN (bprime), block, bprime);
2459
                  FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi)
2460
                    bitmap_value_insert_into_set (PA_OUT,
2461
                                                  expression_for_id (i));
2462
                  bitmap_set_free (pa_in);
2463
                }
2464
              else
2465
                FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime), i, bi)
2466
                  bitmap_value_insert_into_set (PA_OUT,
2467
                                                expression_for_id (i));
2468
            }
2469
        }
2470
      VEC_free (basic_block, heap, worklist);
2471
    }
2472
 
2473
  /* PA_IN starts with PA_OUT - TMP_GEN.
2474
     Then we subtract things from ANTIC_IN.  */
2475
  PA_IN (block) = bitmap_set_subtract (PA_OUT, TMP_GEN (block));
2476
 
2477
  /* For partial antic, we want to put back in the phi results, since
2478
     we will properly avoid making them partially antic over backedges.  */
2479
  bitmap_ior_into (PA_IN (block)->values, PHI_GEN (block)->values);
2480
  bitmap_ior_into (PA_IN (block)->expressions, PHI_GEN (block)->expressions);
2481
 
2482
  /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2483
  bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block));
2484
 
2485
  dependent_clean (PA_IN (block), ANTIC_IN (block), block);
2486
 
2487
  if (!bitmap_set_equal (old_PA_IN, PA_IN (block)))
2488
    {
2489
      changed = true;
2490
      SET_BIT (changed_blocks, block->index);
2491
      FOR_EACH_EDGE (e, ei, block->preds)
2492
        SET_BIT (changed_blocks, e->src->index);
2493
    }
2494
  else
2495
    RESET_BIT (changed_blocks, block->index);
2496
 
2497
 maybe_dump_sets:
2498
  if (dump_file && (dump_flags & TDF_DETAILS))
2499
    {
2500
      if (PA_OUT)
2501
        print_bitmap_set (dump_file, PA_OUT, "PA_OUT", block->index);
2502
 
2503
      print_bitmap_set (dump_file, PA_IN (block), "PA_IN", block->index);
2504
    }
2505
  if (old_PA_IN)
2506
    bitmap_set_free (old_PA_IN);
2507
  if (PA_OUT)
2508
    bitmap_set_free (PA_OUT);
2509
  return changed;
2510
}
2511
 
2512
/* Compute ANTIC and partial ANTIC sets.  */
2513
 
2514
static void
2515
compute_antic (void)
2516
{
2517
  bool changed = true;
2518
  int num_iterations = 0;
2519
  basic_block block;
2520
  int i;
2521
 
2522
  /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2523
     We pre-build the map of blocks with incoming abnormal edges here.  */
2524
  has_abnormal_preds = sbitmap_alloc (last_basic_block);
2525
  sbitmap_zero (has_abnormal_preds);
2526
 
2527
  FOR_EACH_BB (block)
2528
    {
2529
      edge_iterator ei;
2530
      edge e;
2531
 
2532
      FOR_EACH_EDGE (e, ei, block->preds)
2533
        {
2534
          e->flags &= ~EDGE_DFS_BACK;
2535
          if (e->flags & EDGE_ABNORMAL)
2536
            {
2537
              SET_BIT (has_abnormal_preds, block->index);
2538
              break;
2539
            }
2540
        }
2541
 
2542
      BB_VISITED (block) = 0;
2543
      BB_DEFERRED (block) = 0;
2544
 
2545
      /* While we are here, give empty ANTIC_IN sets to each block.  */
2546
      ANTIC_IN (block) = bitmap_set_new ();
2547
      PA_IN (block) = bitmap_set_new ();
2548
    }
2549
 
2550
  /* At the exit block we anticipate nothing.  */
2551
  ANTIC_IN (EXIT_BLOCK_PTR) = bitmap_set_new ();
2552
  BB_VISITED (EXIT_BLOCK_PTR) = 1;
2553
  PA_IN (EXIT_BLOCK_PTR) = bitmap_set_new ();
2554
 
2555
  changed_blocks = sbitmap_alloc (last_basic_block + 1);
2556
  sbitmap_ones (changed_blocks);
2557
  while (changed)
2558
    {
2559
      if (dump_file && (dump_flags & TDF_DETAILS))
2560
        fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2561
      num_iterations++;
2562
      changed = false;
2563
      for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1; i >= 0; i--)
2564
        {
2565
          if (TEST_BIT (changed_blocks, postorder[i]))
2566
            {
2567
              basic_block block = BASIC_BLOCK (postorder[i]);
2568
              changed |= compute_antic_aux (block,
2569
                                            TEST_BIT (has_abnormal_preds,
2570
                                                      block->index));
2571
            }
2572
        }
2573
#ifdef ENABLE_CHECKING
2574
      /* Theoretically possible, but *highly* unlikely.  */
2575
      gcc_assert (num_iterations < 500);
2576
#endif
2577
    }
2578
 
2579
  statistics_histogram_event (cfun, "compute_antic iterations",
2580
                              num_iterations);
2581
 
2582
  if (do_partial_partial)
2583
    {
2584
      sbitmap_ones (changed_blocks);
2585
      mark_dfs_back_edges ();
2586
      num_iterations = 0;
2587
      changed = true;
2588
      while (changed)
2589
        {
2590
          if (dump_file && (dump_flags & TDF_DETAILS))
2591
            fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2592
          num_iterations++;
2593
          changed = false;
2594
          for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1 ; i >= 0; i--)
2595
            {
2596
              if (TEST_BIT (changed_blocks, postorder[i]))
2597
                {
2598
                  basic_block block = BASIC_BLOCK (postorder[i]);
2599
                  changed
2600
                    |= compute_partial_antic_aux (block,
2601
                                                  TEST_BIT (has_abnormal_preds,
2602
                                                            block->index));
2603
                }
2604
            }
2605
#ifdef ENABLE_CHECKING
2606
          /* Theoretically possible, but *highly* unlikely.  */
2607
          gcc_assert (num_iterations < 500);
2608
#endif
2609
        }
2610
      statistics_histogram_event (cfun, "compute_partial_antic iterations",
2611
                                  num_iterations);
2612
    }
2613
  sbitmap_free (has_abnormal_preds);
2614
  sbitmap_free (changed_blocks);
2615
}
2616
 
2617
/* Return true if we can value number the call in STMT.  This is true
2618
   if we have a pure or constant call.  */
2619
 
2620
static bool
2621
can_value_number_call (gimple stmt)
2622
{
2623
  if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2624
    return true;
2625
  return false;
2626
}
2627
 
2628
/* Return true if OP is a tree which we can perform PRE on.
2629
   This may not match the operations we can value number, but in
2630
   a perfect world would.  */
2631
 
2632
static bool
2633
can_PRE_operation (tree op)
2634
{
2635
  return UNARY_CLASS_P (op)
2636
    || BINARY_CLASS_P (op)
2637
    || COMPARISON_CLASS_P (op)
2638
    || TREE_CODE (op) == INDIRECT_REF
2639
    || TREE_CODE (op) == COMPONENT_REF
2640
    || TREE_CODE (op) == VIEW_CONVERT_EXPR
2641
    || TREE_CODE (op) == CALL_EXPR
2642
    || TREE_CODE (op) == ARRAY_REF;
2643
}
2644
 
2645
 
2646
/* Inserted expressions are placed onto this worklist, which is used
2647
   for performing quick dead code elimination of insertions we made
2648
   that didn't turn out to be necessary.   */
2649
static VEC(gimple,heap) *inserted_exprs;
2650
static bitmap inserted_phi_names;
2651
 
2652
/* Pool allocated fake store expressions are placed onto this
2653
   worklist, which, after performing dead code elimination, is walked
2654
   to see which expressions need to be put into GC'able memory  */
2655
static VEC(gimple, heap) *need_creation;
2656
 
2657
/* The actual worker for create_component_ref_by_pieces.  */
2658
 
2659
static tree
2660
create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
2661
                                  unsigned int *operand, gimple_seq *stmts,
2662
                                  gimple domstmt)
2663
{
2664
  vn_reference_op_t currop = VEC_index (vn_reference_op_s, ref->operands,
2665
                                        *operand);
2666
  tree genop;
2667
  ++*operand;
2668
  switch (currop->opcode)
2669
    {
2670
    case CALL_EXPR:
2671
      {
2672
        tree folded, sc = NULL_TREE;
2673
        unsigned int nargs = 0;
2674
        tree fn, *args;
2675
        if (TREE_CODE (currop->op0) == FUNCTION_DECL)
2676
          fn = currop->op0;
2677
        else
2678
          {
2679
            pre_expr op0 = get_or_alloc_expr_for (currop->op0);
2680
            fn = find_or_generate_expression (block, op0, stmts, domstmt);
2681
            if (!fn)
2682
              return NULL_TREE;
2683
          }
2684
        if (currop->op1)
2685
          {
2686
            pre_expr scexpr = get_or_alloc_expr_for (currop->op1);
2687
            sc = find_or_generate_expression (block, scexpr, stmts, domstmt);
2688
            if (!sc)
2689
              return NULL_TREE;
2690
          }
2691
        args = XNEWVEC (tree, VEC_length (vn_reference_op_s,
2692
                                          ref->operands) - 1);
2693
        while (*operand < VEC_length (vn_reference_op_s, ref->operands))
2694
          {
2695
            args[nargs] = create_component_ref_by_pieces_1 (block, ref,
2696
                                                            operand, stmts,
2697
                                                            domstmt);
2698
            if (!args[nargs])
2699
              {
2700
                free (args);
2701
                return NULL_TREE;
2702
              }
2703
            nargs++;
2704
          }
2705
        folded = build_call_array (currop->type,
2706
                                   (TREE_CODE (fn) == FUNCTION_DECL
2707
                                    ? build_fold_addr_expr (fn) : fn),
2708
                                   nargs, args);
2709
        free (args);
2710
        if (sc)
2711
          CALL_EXPR_STATIC_CHAIN (folded) = sc;
2712
        return folded;
2713
      }
2714
      break;
2715
    case TARGET_MEM_REF:
2716
      {
2717
        vn_reference_op_t nextop = VEC_index (vn_reference_op_s, ref->operands,
2718
                                              *operand);
2719
        pre_expr op0expr;
2720
        tree genop0 = NULL_TREE;
2721
        tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2722
                                                        stmts, domstmt);
2723
        if (!baseop)
2724
          return NULL_TREE;
2725
        if (currop->op0)
2726
          {
2727
            op0expr = get_or_alloc_expr_for (currop->op0);
2728
            genop0 = find_or_generate_expression (block, op0expr,
2729
                                                  stmts, domstmt);
2730
            if (!genop0)
2731
              return NULL_TREE;
2732
          }
2733
        if (DECL_P (baseop))
2734
          return build6 (TARGET_MEM_REF, currop->type,
2735
                         baseop, NULL_TREE,
2736
                         genop0, currop->op1, currop->op2,
2737
                         unshare_expr (nextop->op1));
2738
        else
2739
          return build6 (TARGET_MEM_REF, currop->type,
2740
                         NULL_TREE, baseop,
2741
                         genop0, currop->op1, currop->op2,
2742
                         unshare_expr (nextop->op1));
2743
      }
2744
      break;
2745
    case ADDR_EXPR:
2746
      if (currop->op0)
2747
        {
2748
          gcc_assert (is_gimple_min_invariant (currop->op0));
2749
          return currop->op0;
2750
        }
2751
      /* Fallthrough.  */
2752
    case REALPART_EXPR:
2753
    case IMAGPART_EXPR:
2754
    case VIEW_CONVERT_EXPR:
2755
      {
2756
        tree folded;
2757
        tree genop0 = create_component_ref_by_pieces_1 (block, ref,
2758
                                                        operand,
2759
                                                        stmts, domstmt);
2760
        if (!genop0)
2761
          return NULL_TREE;
2762
        folded = fold_build1 (currop->opcode, currop->type,
2763
                              genop0);
2764
        return folded;
2765
      }
2766
      break;
2767
    case ALIGN_INDIRECT_REF:
2768
    case MISALIGNED_INDIRECT_REF:
2769
    case INDIRECT_REF:
2770
      {
2771
        tree folded;
2772
        tree genop1 = create_component_ref_by_pieces_1 (block, ref,
2773
                                                        operand,
2774
                                                        stmts, domstmt);
2775
        if (!genop1)
2776
          return NULL_TREE;
2777
        genop1 = fold_convert (build_pointer_type (currop->type),
2778
                               genop1);
2779
 
2780
        if (currop->opcode == MISALIGNED_INDIRECT_REF)
2781
          folded = fold_build2 (currop->opcode, currop->type,
2782
                                genop1, currop->op1);
2783
        else
2784
          folded = fold_build1 (currop->opcode, currop->type,
2785
                                genop1);
2786
        return folded;
2787
      }
2788
      break;
2789
    case BIT_FIELD_REF:
2790
      {
2791
        tree folded;
2792
        tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2793
                                                        stmts, domstmt);
2794
        pre_expr op1expr = get_or_alloc_expr_for (currop->op0);
2795
        pre_expr op2expr = get_or_alloc_expr_for (currop->op1);
2796
        tree genop1;
2797
        tree genop2;
2798
 
2799
        if (!genop0)
2800
          return NULL_TREE;
2801
        genop1 = find_or_generate_expression (block, op1expr, stmts, domstmt);
2802
        if (!genop1)
2803
          return NULL_TREE;
2804
        genop2 = find_or_generate_expression (block, op2expr, stmts, domstmt);
2805
        if (!genop2)
2806
          return NULL_TREE;
2807
        folded = fold_build3 (BIT_FIELD_REF, currop->type, genop0, genop1,
2808
                              genop2);
2809
        return folded;
2810
      }
2811
 
2812
      /* For array ref vn_reference_op's, operand 1 of the array ref
2813
         is op0 of the reference op and operand 3 of the array ref is
2814
         op1.  */
2815
    case ARRAY_RANGE_REF:
2816
    case ARRAY_REF:
2817
      {
2818
        tree genop0;
2819
        tree genop1 = currop->op0;
2820
        pre_expr op1expr;
2821
        tree genop2 = currop->op1;
2822
        pre_expr op2expr;
2823
        tree genop3 = currop->op2;
2824
        pre_expr op3expr;
2825
        genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2826
                                                   stmts, domstmt);
2827
        if (!genop0)
2828
          return NULL_TREE;
2829
        op1expr = get_or_alloc_expr_for (genop1);
2830
        genop1 = find_or_generate_expression (block, op1expr, stmts, domstmt);
2831
        if (!genop1)
2832
          return NULL_TREE;
2833
        if (genop2)
2834
          {
2835
            /* Drop zero minimum index.  */
2836
            if (tree_int_cst_equal (genop2, integer_zero_node))
2837
              genop2 = NULL_TREE;
2838
            else
2839
              {
2840
                op2expr = get_or_alloc_expr_for (genop2);
2841
                genop2 = find_or_generate_expression (block, op2expr, stmts,
2842
                                                      domstmt);
2843
                if (!genop2)
2844
                  return NULL_TREE;
2845
              }
2846
          }
2847
        if (genop3)
2848
          {
2849
            tree elmt_type = TREE_TYPE (TREE_TYPE (genop0));
2850
            /* We can't always put a size in units of the element alignment
2851
               here as the element alignment may be not visible.  See
2852
               PR43783.  Simply drop the element size for constant
2853
               sizes.  */
2854
            if (tree_int_cst_equal (genop3, TYPE_SIZE_UNIT (elmt_type)))
2855
              genop3 = NULL_TREE;
2856
            else
2857
              {
2858
                genop3 = size_binop (EXACT_DIV_EXPR, genop3,
2859
                                     size_int (TYPE_ALIGN_UNIT (elmt_type)));
2860
                op3expr = get_or_alloc_expr_for (genop3);
2861
                genop3 = find_or_generate_expression (block, op3expr, stmts,
2862
                                                      domstmt);
2863
                if (!genop3)
2864
                  return NULL_TREE;
2865
              }
2866
          }
2867
        return build4 (currop->opcode, currop->type, genop0, genop1,
2868
                       genop2, genop3);
2869
      }
2870
    case COMPONENT_REF:
2871
      {
2872
        tree op0;
2873
        tree op1;
2874
        tree genop2 = currop->op1;
2875
        pre_expr op2expr;
2876
        op0 = create_component_ref_by_pieces_1 (block, ref, operand,
2877
                                                stmts, domstmt);
2878
        if (!op0)
2879
          return NULL_TREE;
2880
        /* op1 should be a FIELD_DECL, which are represented by
2881
           themselves.  */
2882
        op1 = currop->op0;
2883
        if (genop2)
2884
          {
2885
            op2expr = get_or_alloc_expr_for (genop2);
2886
            genop2 = find_or_generate_expression (block, op2expr, stmts,
2887
                                                  domstmt);
2888
            if (!genop2)
2889
              return NULL_TREE;
2890
          }
2891
 
2892
        return fold_build3 (COMPONENT_REF, TREE_TYPE (op1), op0, op1,
2893
                            genop2);
2894
      }
2895
      break;
2896
    case SSA_NAME:
2897
      {
2898
        pre_expr op0expr = get_or_alloc_expr_for (currop->op0);
2899
        genop = find_or_generate_expression (block, op0expr, stmts, domstmt);
2900
        return genop;
2901
      }
2902
    case STRING_CST:
2903
    case INTEGER_CST:
2904
    case COMPLEX_CST:
2905
    case VECTOR_CST:
2906
    case REAL_CST:
2907
    case CONSTRUCTOR:
2908
    case VAR_DECL:
2909
    case PARM_DECL:
2910
    case CONST_DECL:
2911
    case RESULT_DECL:
2912
    case FUNCTION_DECL:
2913
      return currop->op0;
2914
 
2915
    default:
2916
      gcc_unreachable ();
2917
    }
2918
}
2919
 
2920
/* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2921
   COMPONENT_REF or INDIRECT_REF or ARRAY_REF portion, because we'd end up with
2922
   trying to rename aggregates into ssa form directly, which is a no no.
2923
 
2924
   Thus, this routine doesn't create temporaries, it just builds a
2925
   single access expression for the array, calling
2926
   find_or_generate_expression to build the innermost pieces.
2927
 
2928
   This function is a subroutine of create_expression_by_pieces, and
2929
   should not be called on it's own unless you really know what you
2930
   are doing.  */
2931
 
2932
static tree
2933
create_component_ref_by_pieces (basic_block block, vn_reference_t ref,
2934
                                gimple_seq *stmts, gimple domstmt)
2935
{
2936
  unsigned int op = 0;
2937
  return create_component_ref_by_pieces_1 (block, ref, &op, stmts, domstmt);
2938
}
2939
 
2940
/* Find a leader for an expression, or generate one using
2941
   create_expression_by_pieces if it's ANTIC but
2942
   complex.
2943
   BLOCK is the basic_block we are looking for leaders in.
2944
   EXPR is the expression to find a leader or generate for.
2945
   STMTS is the statement list to put the inserted expressions on.
2946
   Returns the SSA_NAME of the LHS of the generated expression or the
2947
   leader.
2948
   DOMSTMT if non-NULL is a statement that should be dominated by
2949
   all uses in the generated expression.  If DOMSTMT is non-NULL this
2950
   routine can fail and return NULL_TREE.  Otherwise it will assert
2951
   on failure.  */
2952
 
2953
static tree
2954
find_or_generate_expression (basic_block block, pre_expr expr,
2955
                             gimple_seq *stmts, gimple domstmt)
2956
{
2957
  pre_expr leader = bitmap_find_leader (AVAIL_OUT (block),
2958
                                        get_expr_value_id (expr), domstmt);
2959
  tree genop = NULL;
2960
  if (leader)
2961
    {
2962
      if (leader->kind == NAME)
2963
        genop = PRE_EXPR_NAME (leader);
2964
      else if (leader->kind == CONSTANT)
2965
        genop = PRE_EXPR_CONSTANT (leader);
2966
    }
2967
 
2968
  /* If it's still NULL, it must be a complex expression, so generate
2969
     it recursively.  Not so for FRE though.  */
2970
  if (genop == NULL
2971
      && !in_fre)
2972
    {
2973
      bitmap_set_t exprset;
2974
      unsigned int lookfor = get_expr_value_id (expr);
2975
      bool handled = false;
2976
      bitmap_iterator bi;
2977
      unsigned int i;
2978
 
2979
      exprset = VEC_index (bitmap_set_t, value_expressions, lookfor);
2980
      FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
2981
        {
2982
          pre_expr temp = expression_for_id (i);
2983
          if (temp->kind != NAME)
2984
            {
2985
              handled = true;
2986
              genop = create_expression_by_pieces (block, temp, stmts,
2987
                                                   domstmt,
2988
                                                   get_expr_type (expr));
2989
              break;
2990
            }
2991
        }
2992
      if (!handled && domstmt)
2993
        return NULL_TREE;
2994
 
2995
      gcc_assert (handled);
2996
    }
2997
  return genop;
2998
}
2999
 
3000
#define NECESSARY GF_PLF_1
3001
 
3002
/* Create an expression in pieces, so that we can handle very complex
3003
   expressions that may be ANTIC, but not necessary GIMPLE.
3004
   BLOCK is the basic block the expression will be inserted into,
3005
   EXPR is the expression to insert (in value form)
3006
   STMTS is a statement list to append the necessary insertions into.
3007
 
3008
   This function will die if we hit some value that shouldn't be
3009
   ANTIC but is (IE there is no leader for it, or its components).
3010
   This function may also generate expressions that are themselves
3011
   partially or fully redundant.  Those that are will be either made
3012
   fully redundant during the next iteration of insert (for partially
3013
   redundant ones), or eliminated by eliminate (for fully redundant
3014
   ones).
3015
 
3016
   If DOMSTMT is non-NULL then we make sure that all uses in the
3017
   expressions dominate that statement.  In this case the function
3018
   can return NULL_TREE to signal failure.  */
3019
 
3020
static tree
3021
create_expression_by_pieces (basic_block block, pre_expr expr,
3022
                             gimple_seq *stmts, gimple domstmt, tree type)
3023
{
3024
  tree temp, name;
3025
  tree folded;
3026
  gimple_seq forced_stmts = NULL;
3027
  unsigned int value_id;
3028
  gimple_stmt_iterator gsi;
3029
  tree exprtype = type ? type : get_expr_type (expr);
3030
  pre_expr nameexpr;
3031
  gimple newstmt;
3032
 
3033
  switch (expr->kind)
3034
    {
3035
      /* We may hit the NAME/CONSTANT case if we have to convert types
3036
         that value numbering saw through.  */
3037
    case NAME:
3038
      folded = PRE_EXPR_NAME (expr);
3039
      break;
3040
    case CONSTANT:
3041
      folded = PRE_EXPR_CONSTANT (expr);
3042
      break;
3043
    case REFERENCE:
3044
      {
3045
        vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
3046
        folded = create_component_ref_by_pieces (block, ref, stmts, domstmt);
3047
      }
3048
      break;
3049
    case NARY:
3050
      {
3051
        vn_nary_op_t nary = PRE_EXPR_NARY (expr);
3052
        switch (nary->length)
3053
          {
3054
          case 2:
3055
            {
3056
              pre_expr op1 = get_or_alloc_expr_for (nary->op[0]);
3057
              pre_expr op2 = get_or_alloc_expr_for (nary->op[1]);
3058
              tree genop1 = find_or_generate_expression (block, op1,
3059
                                                         stmts, domstmt);
3060
              tree genop2 = find_or_generate_expression (block, op2,
3061
                                                         stmts, domstmt);
3062
              if (!genop1 || !genop2)
3063
                return NULL_TREE;
3064
              /* Ensure op2 is a sizetype for POINTER_PLUS_EXPR.  It
3065
                 may be a constant with the wrong type.  */
3066
              if (nary->opcode == POINTER_PLUS_EXPR)
3067
                {
3068
                  genop1 = fold_convert (nary->type, genop1);
3069
                  genop2 = fold_convert (sizetype, genop2);
3070
                }
3071
              else
3072
                {
3073
                  genop1 = fold_convert (TREE_TYPE (nary->op[0]), genop1);
3074
                  genop2 = fold_convert (TREE_TYPE (nary->op[1]), genop2);
3075
                }
3076
 
3077
              folded = fold_build2 (nary->opcode, nary->type,
3078
                                    genop1, genop2);
3079
            }
3080
            break;
3081
          case 1:
3082
            {
3083
              pre_expr op1 = get_or_alloc_expr_for (nary->op[0]);
3084
              tree genop1 = find_or_generate_expression (block, op1,
3085
                                                         stmts, domstmt);
3086
              if (!genop1)
3087
                return NULL_TREE;
3088
              genop1 = fold_convert (TREE_TYPE (nary->op[0]), genop1);
3089
 
3090
              folded = fold_build1 (nary->opcode, nary->type,
3091
                                    genop1);
3092
            }
3093
            break;
3094
          default:
3095
            return NULL_TREE;
3096
          }
3097
      }
3098
      break;
3099
    default:
3100
      return NULL_TREE;
3101
    }
3102
 
3103
  if (!useless_type_conversion_p (exprtype, TREE_TYPE (folded)))
3104
    folded = fold_convert (exprtype, folded);
3105
 
3106
  /* Force the generated expression to be a sequence of GIMPLE
3107
     statements.
3108
     We have to call unshare_expr because force_gimple_operand may
3109
     modify the tree we pass to it.  */
3110
  folded = force_gimple_operand (unshare_expr (folded), &forced_stmts,
3111
                                 false, NULL);
3112
 
3113
  /* If we have any intermediate expressions to the value sets, add them
3114
     to the value sets and chain them in the instruction stream.  */
3115
  if (forced_stmts)
3116
    {
3117
      gsi = gsi_start (forced_stmts);
3118
      for (; !gsi_end_p (gsi); gsi_next (&gsi))
3119
        {
3120
          gimple stmt = gsi_stmt (gsi);
3121
          tree forcedname = gimple_get_lhs (stmt);
3122
          pre_expr nameexpr;
3123
 
3124
          VEC_safe_push (gimple, heap, inserted_exprs, stmt);
3125
          if (TREE_CODE (forcedname) == SSA_NAME)
3126
            {
3127
              VN_INFO_GET (forcedname)->valnum = forcedname;
3128
              VN_INFO (forcedname)->value_id = get_next_value_id ();
3129
              nameexpr = get_or_alloc_expr_for_name (forcedname);
3130
              add_to_value (VN_INFO (forcedname)->value_id, nameexpr);
3131
              if (!in_fre)
3132
                bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
3133
              bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
3134
            }
3135
          mark_symbols_for_renaming (stmt);
3136
        }
3137
      gimple_seq_add_seq (stmts, forced_stmts);
3138
    }
3139
 
3140
  /* Build and insert the assignment of the end result to the temporary
3141
     that we will return.  */
3142
  if (!pretemp || exprtype != TREE_TYPE (pretemp))
3143
    {
3144
      pretemp = create_tmp_var (exprtype, "pretmp");
3145
      get_var_ann (pretemp);
3146
    }
3147
 
3148
  temp = pretemp;
3149
  add_referenced_var (temp);
3150
 
3151
  if (TREE_CODE (exprtype) == COMPLEX_TYPE
3152
      || TREE_CODE (exprtype) == VECTOR_TYPE)
3153
    DECL_GIMPLE_REG_P (temp) = 1;
3154
 
3155
  newstmt = gimple_build_assign (temp, folded);
3156
  name = make_ssa_name (temp, newstmt);
3157
  gimple_assign_set_lhs (newstmt, name);
3158
  gimple_set_plf (newstmt, NECESSARY, false);
3159
 
3160
  gimple_seq_add_stmt (stmts, newstmt);
3161
  VEC_safe_push (gimple, heap, inserted_exprs, newstmt);
3162
 
3163
  /* All the symbols in NEWEXPR should be put into SSA form.  */
3164
  mark_symbols_for_renaming (newstmt);
3165
 
3166
  /* Add a value number to the temporary.
3167
     The value may already exist in either NEW_SETS, or AVAIL_OUT, because
3168
     we are creating the expression by pieces, and this particular piece of
3169
     the expression may have been represented.  There is no harm in replacing
3170
     here.  */
3171
  VN_INFO_GET (name)->valnum = name;
3172
  value_id = get_expr_value_id (expr);
3173
  VN_INFO (name)->value_id = value_id;
3174
  nameexpr = get_or_alloc_expr_for_name (name);
3175
  add_to_value (value_id, nameexpr);
3176
  if (!in_fre)
3177
    bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
3178
  bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
3179
 
3180
  pre_stats.insertions++;
3181
  if (dump_file && (dump_flags & TDF_DETAILS))
3182
    {
3183
      fprintf (dump_file, "Inserted ");
3184
      print_gimple_stmt (dump_file, newstmt, 0, 0);
3185
      fprintf (dump_file, " in predecessor %d\n", block->index);
3186
    }
3187
 
3188
  return name;
3189
}
3190
 
3191
 
3192
/* Returns true if we want to inhibit the insertions of PHI nodes
3193
   for the given EXPR for basic block BB (a member of a loop).
3194
   We want to do this, when we fear that the induction variable we
3195
   create might inhibit vectorization.  */
3196
 
3197
static bool
3198
inhibit_phi_insertion (basic_block bb, pre_expr expr)
3199
{
3200
  vn_reference_t vr = PRE_EXPR_REFERENCE (expr);
3201
  VEC (vn_reference_op_s, heap) *ops = vr->operands;
3202
  vn_reference_op_t op;
3203
  unsigned i;
3204
 
3205
  /* If we aren't going to vectorize we don't inhibit anything.  */
3206
  if (!flag_tree_vectorize)
3207
    return false;
3208
 
3209
  /* Otherwise we inhibit the insertion when the address of the
3210
     memory reference is a simple induction variable.  In other
3211
     cases the vectorizer won't do anything anyway (either it's
3212
     loop invariant or a complicated expression).  */
3213
  for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
3214
    {
3215
      switch (op->opcode)
3216
        {
3217
        case ARRAY_REF:
3218
        case ARRAY_RANGE_REF:
3219
          if (TREE_CODE (op->op0) != SSA_NAME)
3220
            break;
3221
          /* Fallthru.  */
3222
        case SSA_NAME:
3223
          {
3224
            basic_block defbb = gimple_bb (SSA_NAME_DEF_STMT (op->op0));
3225
            affine_iv iv;
3226
            /* Default defs are loop invariant.  */
3227
            if (!defbb)
3228
              break;
3229
            /* Defined outside this loop, also loop invariant.  */
3230
            if (!flow_bb_inside_loop_p (bb->loop_father, defbb))
3231
              break;
3232
            /* If it's a simple induction variable inhibit insertion,
3233
               the vectorizer might be interested in this one.  */
3234
            if (simple_iv (bb->loop_father, bb->loop_father,
3235
                           op->op0, &iv, true))
3236
              return true;
3237
            /* No simple IV, vectorizer can't do anything, hence no
3238
               reason to inhibit the transformation for this operand.  */
3239
            break;
3240
          }
3241
        default:
3242
          break;
3243
        }
3244
    }
3245
  return false;
3246
}
3247
 
3248
/* Insert the to-be-made-available values of expression EXPRNUM for each
3249
   predecessor, stored in AVAIL, into the predecessors of BLOCK, and
3250
   merge the result with a phi node, given the same value number as
3251
   NODE.  Return true if we have inserted new stuff.  */
3252
 
3253
static bool
3254
insert_into_preds_of_block (basic_block block, unsigned int exprnum,
3255
                            pre_expr *avail)
3256
{
3257
  pre_expr expr = expression_for_id (exprnum);
3258
  pre_expr newphi;
3259
  unsigned int val = get_expr_value_id (expr);
3260
  edge pred;
3261
  bool insertions = false;
3262
  bool nophi = false;
3263
  basic_block bprime;
3264
  pre_expr eprime;
3265
  edge_iterator ei;
3266
  tree type = get_expr_type (expr);
3267
  tree temp;
3268
  gimple phi;
3269
 
3270
  if (dump_file && (dump_flags & TDF_DETAILS))
3271
    {
3272
      fprintf (dump_file, "Found partial redundancy for expression ");
3273
      print_pre_expr (dump_file, expr);
3274
      fprintf (dump_file, " (%04d)\n", val);
3275
    }
3276
 
3277
  /* Make sure we aren't creating an induction variable.  */
3278
  if (block->loop_depth > 0 && EDGE_COUNT (block->preds) == 2)
3279
    {
3280
      bool firstinsideloop = false;
3281
      bool secondinsideloop = false;
3282
      firstinsideloop = flow_bb_inside_loop_p (block->loop_father,
3283
                                               EDGE_PRED (block, 0)->src);
3284
      secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
3285
                                                EDGE_PRED (block, 1)->src);
3286
      /* Induction variables only have one edge inside the loop.  */
3287
      if ((firstinsideloop ^ secondinsideloop)
3288
          && (expr->kind != REFERENCE
3289
              || inhibit_phi_insertion (block, expr)))
3290
        {
3291
          if (dump_file && (dump_flags & TDF_DETAILS))
3292
            fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
3293
          nophi = true;
3294
        }
3295
    }
3296
 
3297
  /* Make the necessary insertions.  */
3298
  FOR_EACH_EDGE (pred, ei, block->preds)
3299
    {
3300
      gimple_seq stmts = NULL;
3301
      tree builtexpr;
3302
      bprime = pred->src;
3303
      eprime = avail[bprime->index];
3304
 
3305
      if (eprime->kind != NAME && eprime->kind != CONSTANT)
3306
        {
3307
          builtexpr = create_expression_by_pieces (bprime,
3308
                                                   eprime,
3309
                                                   &stmts, NULL,
3310
                                                   type);
3311
          gcc_assert (!(pred->flags & EDGE_ABNORMAL));
3312
          gsi_insert_seq_on_edge (pred, stmts);
3313
          avail[bprime->index] = get_or_alloc_expr_for_name (builtexpr);
3314
          insertions = true;
3315
        }
3316
      else if (eprime->kind == CONSTANT)
3317
        {
3318
          /* Constants may not have the right type, fold_convert
3319
             should give us back a constant with the right type.
3320
          */
3321
          tree constant = PRE_EXPR_CONSTANT (eprime);
3322
          if (!useless_type_conversion_p (type, TREE_TYPE (constant)))
3323
            {
3324
              tree builtexpr = fold_convert (type, constant);
3325
              if (!is_gimple_min_invariant (builtexpr))
3326
                {
3327
                  tree forcedexpr = force_gimple_operand (builtexpr,
3328
                                                          &stmts, true,
3329
                                                          NULL);
3330
                  if (!is_gimple_min_invariant (forcedexpr))
3331
                    {
3332
                      if (forcedexpr != builtexpr)
3333
                        {
3334
                          VN_INFO_GET (forcedexpr)->valnum = PRE_EXPR_CONSTANT (eprime);
3335
                          VN_INFO (forcedexpr)->value_id = get_expr_value_id (eprime);
3336
                        }
3337
                      if (stmts)
3338
                        {
3339
                          gimple_stmt_iterator gsi;
3340
                          gsi = gsi_start (stmts);
3341
                          for (; !gsi_end_p (gsi); gsi_next (&gsi))
3342
                            {
3343
                              gimple stmt = gsi_stmt (gsi);
3344
                              VEC_safe_push (gimple, heap, inserted_exprs, stmt);
3345
                              gimple_set_plf (stmt, NECESSARY, false);
3346
                            }
3347
                          gsi_insert_seq_on_edge (pred, stmts);
3348
                        }
3349
                      avail[bprime->index] = get_or_alloc_expr_for_name (forcedexpr);
3350
                    }
3351
                }
3352
            }
3353
        }
3354
      else if (eprime->kind == NAME)
3355
        {
3356
          /* We may have to do a conversion because our value
3357
             numbering can look through types in certain cases, but
3358
             our IL requires all operands of a phi node have the same
3359
             type.  */
3360
          tree name = PRE_EXPR_NAME (eprime);
3361
          if (!useless_type_conversion_p (type, TREE_TYPE (name)))
3362
            {
3363
              tree builtexpr;
3364
              tree forcedexpr;
3365
              builtexpr = fold_convert (type, name);
3366
              forcedexpr = force_gimple_operand (builtexpr,
3367
                                                 &stmts, true,
3368
                                                 NULL);
3369
 
3370
              if (forcedexpr != name)
3371
                {
3372
                  VN_INFO_GET (forcedexpr)->valnum = VN_INFO (name)->valnum;
3373
                  VN_INFO (forcedexpr)->value_id = VN_INFO (name)->value_id;
3374
                }
3375
 
3376
              if (stmts)
3377
                {
3378
                  gimple_stmt_iterator gsi;
3379
                  gsi = gsi_start (stmts);
3380
                  for (; !gsi_end_p (gsi); gsi_next (&gsi))
3381
                    {
3382
                      gimple stmt = gsi_stmt (gsi);
3383
                      VEC_safe_push (gimple, heap, inserted_exprs, stmt);
3384
                      gimple_set_plf (stmt, NECESSARY, false);
3385
                    }
3386
                  gsi_insert_seq_on_edge (pred, stmts);
3387
                }
3388
              avail[bprime->index] = get_or_alloc_expr_for_name (forcedexpr);
3389
            }
3390
        }
3391
    }
3392
  /* If we didn't want a phi node, and we made insertions, we still have
3393
     inserted new stuff, and thus return true.  If we didn't want a phi node,
3394
     and didn't make insertions, we haven't added anything new, so return
3395
     false.  */
3396
  if (nophi && insertions)
3397
    return true;
3398
  else if (nophi && !insertions)
3399
    return false;
3400
 
3401
  /* Now build a phi for the new variable.  */
3402
  if (!prephitemp || TREE_TYPE (prephitemp) != type)
3403
    {
3404
      prephitemp = create_tmp_var (type, "prephitmp");
3405
      get_var_ann (prephitemp);
3406
    }
3407
 
3408
  temp = prephitemp;
3409
  add_referenced_var (temp);
3410
 
3411
  if (TREE_CODE (type) == COMPLEX_TYPE
3412
      || TREE_CODE (type) == VECTOR_TYPE)
3413
    DECL_GIMPLE_REG_P (temp) = 1;
3414
  phi = create_phi_node (temp, block);
3415
 
3416
  gimple_set_plf (phi, NECESSARY, false);
3417
  VN_INFO_GET (gimple_phi_result (phi))->valnum = gimple_phi_result (phi);
3418
  VN_INFO (gimple_phi_result (phi))->value_id = val;
3419
  VEC_safe_push (gimple, heap, inserted_exprs, phi);
3420
  bitmap_set_bit (inserted_phi_names,
3421
                  SSA_NAME_VERSION (gimple_phi_result (phi)));
3422
  FOR_EACH_EDGE (pred, ei, block->preds)
3423
    {
3424
      pre_expr ae = avail[pred->src->index];
3425
      gcc_assert (get_expr_type (ae) == type
3426
                  || useless_type_conversion_p (type, get_expr_type (ae)));
3427
      if (ae->kind == CONSTANT)
3428
        add_phi_arg (phi, PRE_EXPR_CONSTANT (ae), pred, UNKNOWN_LOCATION);
3429
      else
3430
        add_phi_arg (phi, PRE_EXPR_NAME (avail[pred->src->index]), pred,
3431
                     UNKNOWN_LOCATION);
3432
    }
3433
 
3434
  newphi = get_or_alloc_expr_for_name (gimple_phi_result (phi));
3435
  add_to_value (val, newphi);
3436
 
3437
  /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3438
     this insertion, since we test for the existence of this value in PHI_GEN
3439
     before proceeding with the partial redundancy checks in insert_aux.
3440
 
3441
     The value may exist in AVAIL_OUT, in particular, it could be represented
3442
     by the expression we are trying to eliminate, in which case we want the
3443
     replacement to occur.  If it's not existing in AVAIL_OUT, we want it
3444
     inserted there.
3445
 
3446
     Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3447
     this block, because if it did, it would have existed in our dominator's
3448
     AVAIL_OUT, and would have been skipped due to the full redundancy check.
3449
  */
3450
 
3451
  bitmap_insert_into_set (PHI_GEN (block), newphi);
3452
  bitmap_value_replace_in_set (AVAIL_OUT (block),
3453
                               newphi);
3454
  bitmap_insert_into_set (NEW_SETS (block),
3455
                          newphi);
3456
 
3457
  if (dump_file && (dump_flags & TDF_DETAILS))
3458
    {
3459
      fprintf (dump_file, "Created phi ");
3460
      print_gimple_stmt (dump_file, phi, 0, 0);
3461
      fprintf (dump_file, " in block %d\n", block->index);
3462
    }
3463
  pre_stats.phis++;
3464
  return true;
3465
}
3466
 
3467
 
3468
 
3469
/* Perform insertion of partially redundant values.
3470
   For BLOCK, do the following:
3471
   1.  Propagate the NEW_SETS of the dominator into the current block.
3472
   If the block has multiple predecessors,
3473
       2a. Iterate over the ANTIC expressions for the block to see if
3474
           any of them are partially redundant.
3475
       2b. If so, insert them into the necessary predecessors to make
3476
           the expression fully redundant.
3477
       2c. Insert a new PHI merging the values of the predecessors.
3478
       2d. Insert the new PHI, and the new expressions, into the
3479
           NEW_SETS set.
3480
   3. Recursively call ourselves on the dominator children of BLOCK.
3481
 
3482
   Steps 1, 2a, and 3 are done by insert_aux. 2b, 2c and 2d are done by
3483
   do_regular_insertion and do_partial_insertion.
3484
 
3485
*/
3486
 
3487
static bool
3488
do_regular_insertion (basic_block block, basic_block dom)
3489
{
3490
  bool new_stuff = false;
3491
  VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (ANTIC_IN (block));
3492
  pre_expr expr;
3493
  int i;
3494
 
3495
  for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
3496
    {
3497
      if (expr->kind != NAME)
3498
        {
3499
          pre_expr *avail;
3500
          unsigned int val;
3501
          bool by_some = false;
3502
          bool cant_insert = false;
3503
          bool all_same = true;
3504
          pre_expr first_s = NULL;
3505
          edge pred;
3506
          basic_block bprime;
3507
          pre_expr eprime = NULL;
3508
          edge_iterator ei;
3509
          pre_expr edoubleprime = NULL;
3510
          bool do_insertion = false;
3511
 
3512
          val = get_expr_value_id (expr);
3513
          if (bitmap_set_contains_value (PHI_GEN (block), val))
3514
            continue;
3515
          if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3516
            {
3517
              if (dump_file && (dump_flags & TDF_DETAILS))
3518
                fprintf (dump_file, "Found fully redundant value\n");
3519
              continue;
3520
            }
3521
 
3522
          avail = XCNEWVEC (pre_expr, last_basic_block);
3523
          FOR_EACH_EDGE (pred, ei, block->preds)
3524
            {
3525
              unsigned int vprime;
3526
 
3527
              /* We should never run insertion for the exit block
3528
                 and so not come across fake pred edges.  */
3529
              gcc_assert (!(pred->flags & EDGE_FAKE));
3530
              bprime = pred->src;
3531
              eprime = phi_translate (expr, ANTIC_IN (block), NULL,
3532
                                      bprime, block);
3533
 
3534
              /* eprime will generally only be NULL if the
3535
                 value of the expression, translated
3536
                 through the PHI for this predecessor, is
3537
                 undefined.  If that is the case, we can't
3538
                 make the expression fully redundant,
3539
                 because its value is undefined along a
3540
                 predecessor path.  We can thus break out
3541
                 early because it doesn't matter what the
3542
                 rest of the results are.  */
3543
              if (eprime == NULL)
3544
                {
3545
                  cant_insert = true;
3546
                  break;
3547
                }
3548
 
3549
              eprime = fully_constant_expression (eprime);
3550
              vprime = get_expr_value_id (eprime);
3551
              edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3552
                                                 vprime, NULL);
3553
              if (edoubleprime == NULL)
3554
                {
3555
                  avail[bprime->index] = eprime;
3556
                  all_same = false;
3557
                }
3558
              else
3559
                {
3560
                  avail[bprime->index] = edoubleprime;
3561
                  by_some = true;
3562
                  /* We want to perform insertions to remove a redundancy on
3563
                     a path in the CFG we want to optimize for speed.  */
3564
                  if (optimize_edge_for_speed_p (pred))
3565
                    do_insertion = true;
3566
                  if (first_s == NULL)
3567
                    first_s = edoubleprime;
3568
                  else if (!pre_expr_eq (first_s, edoubleprime))
3569
                    all_same = false;
3570
                }
3571
            }
3572
          /* If we can insert it, it's not the same value
3573
             already existing along every predecessor, and
3574
             it's defined by some predecessor, it is
3575
             partially redundant.  */
3576
          if (!cant_insert && !all_same && by_some && do_insertion
3577
              && dbg_cnt (treepre_insert))
3578
            {
3579
              if (insert_into_preds_of_block (block, get_expression_id (expr),
3580
                                              avail))
3581
                new_stuff = true;
3582
            }
3583
          /* If all edges produce the same value and that value is
3584
             an invariant, then the PHI has the same value on all
3585
             edges.  Note this.  */
3586
          else if (!cant_insert && all_same && eprime
3587
                   && (edoubleprime->kind == CONSTANT
3588
                       || edoubleprime->kind == NAME)
3589
                   && !value_id_constant_p (val))
3590
            {
3591
              unsigned int j;
3592
              bitmap_iterator bi;
3593
              bitmap_set_t exprset = VEC_index (bitmap_set_t,
3594
                                                value_expressions, val);
3595
 
3596
              unsigned int new_val = get_expr_value_id (edoubleprime);
3597
              FOR_EACH_EXPR_ID_IN_SET (exprset, j, bi)
3598
                {
3599
                  pre_expr expr = expression_for_id (j);
3600
 
3601
                  if (expr->kind == NAME)
3602
                    {
3603
                      vn_ssa_aux_t info = VN_INFO (PRE_EXPR_NAME (expr));
3604
                      /* Just reset the value id and valnum so it is
3605
                         the same as the constant we have discovered.  */
3606
                      if (edoubleprime->kind == CONSTANT)
3607
                        {
3608
                          info->valnum = PRE_EXPR_CONSTANT (edoubleprime);
3609
                          pre_stats.constified++;
3610
                        }
3611
                      else
3612
                        info->valnum = VN_INFO (PRE_EXPR_NAME (edoubleprime))->valnum;
3613
                      info->value_id = new_val;
3614
                    }
3615
                }
3616
            }
3617
          free (avail);
3618
        }
3619
    }
3620
 
3621
  VEC_free (pre_expr, heap, exprs);
3622
  return new_stuff;
3623
}
3624
 
3625
 
3626
/* Perform insertion for partially anticipatable expressions.  There
3627
   is only one case we will perform insertion for these.  This case is
3628
   if the expression is partially anticipatable, and fully available.
3629
   In this case, we know that putting it earlier will enable us to
3630
   remove the later computation.  */
3631
 
3632
 
3633
static bool
3634
do_partial_partial_insertion (basic_block block, basic_block dom)
3635
{
3636
  bool new_stuff = false;
3637
  VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (PA_IN (block));
3638
  pre_expr expr;
3639
  int i;
3640
 
3641
  for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
3642
    {
3643
      if (expr->kind != NAME)
3644
        {
3645
          pre_expr *avail;
3646
          unsigned int val;
3647
          bool by_all = true;
3648
          bool cant_insert = false;
3649
          edge pred;
3650
          basic_block bprime;
3651
          pre_expr eprime = NULL;
3652
          edge_iterator ei;
3653
 
3654
          val = get_expr_value_id (expr);
3655
          if (bitmap_set_contains_value (PHI_GEN (block), val))
3656
            continue;
3657
          if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3658
            continue;
3659
 
3660
          avail = XCNEWVEC (pre_expr, last_basic_block);
3661
          FOR_EACH_EDGE (pred, ei, block->preds)
3662
            {
3663
              unsigned int vprime;
3664
              pre_expr edoubleprime;
3665
 
3666
              /* We should never run insertion for the exit block
3667
                 and so not come across fake pred edges.  */
3668
              gcc_assert (!(pred->flags & EDGE_FAKE));
3669
              bprime = pred->src;
3670
              eprime = phi_translate (expr, ANTIC_IN (block),
3671
                                      PA_IN (block),
3672
                                      bprime, block);
3673
 
3674
              /* eprime will generally only be NULL if the
3675
                 value of the expression, translated
3676
                 through the PHI for this predecessor, is
3677
                 undefined.  If that is the case, we can't
3678
                 make the expression fully redundant,
3679
                 because its value is undefined along a
3680
                 predecessor path.  We can thus break out
3681
                 early because it doesn't matter what the
3682
                 rest of the results are.  */
3683
              if (eprime == NULL)
3684
                {
3685
                  cant_insert = true;
3686
                  break;
3687
                }
3688
 
3689
              eprime = fully_constant_expression (eprime);
3690
              vprime = get_expr_value_id (eprime);
3691
              edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3692
                                                 vprime, NULL);
3693
              if (edoubleprime == NULL)
3694
                {
3695
                  by_all = false;
3696
                  break;
3697
                }
3698
              else
3699
                avail[bprime->index] = edoubleprime;
3700
 
3701
            }
3702
 
3703
          /* If we can insert it, it's not the same value
3704
             already existing along every predecessor, and
3705
             it's defined by some predecessor, it is
3706
             partially redundant.  */
3707
          if (!cant_insert && by_all && dbg_cnt (treepre_insert))
3708
            {
3709
              pre_stats.pa_insert++;
3710
              if (insert_into_preds_of_block (block, get_expression_id (expr),
3711
                                              avail))
3712
                new_stuff = true;
3713
            }
3714
          free (avail);
3715
        }
3716
    }
3717
 
3718
  VEC_free (pre_expr, heap, exprs);
3719
  return new_stuff;
3720
}
3721
 
3722
static bool
3723
insert_aux (basic_block block)
3724
{
3725
  basic_block son;
3726
  bool new_stuff = false;
3727
 
3728
  if (block)
3729
    {
3730
      basic_block dom;
3731
      dom = get_immediate_dominator (CDI_DOMINATORS, block);
3732
      if (dom)
3733
        {
3734
          unsigned i;
3735
          bitmap_iterator bi;
3736
          bitmap_set_t newset = NEW_SETS (dom);
3737
          if (newset)
3738
            {
3739
              /* Note that we need to value_replace both NEW_SETS, and
3740
                 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3741
                 represented by some non-simple expression here that we want
3742
                 to replace it with.  */
3743
              FOR_EACH_EXPR_ID_IN_SET (newset, i, bi)
3744
                {
3745
                  pre_expr expr = expression_for_id (i);
3746
                  bitmap_value_replace_in_set (NEW_SETS (block), expr);
3747
                  bitmap_value_replace_in_set (AVAIL_OUT (block), expr);
3748
                }
3749
            }
3750
          if (!single_pred_p (block))
3751
            {
3752
              new_stuff |= do_regular_insertion (block, dom);
3753
              if (do_partial_partial)
3754
                new_stuff |= do_partial_partial_insertion (block, dom);
3755
            }
3756
        }
3757
    }
3758
  for (son = first_dom_son (CDI_DOMINATORS, block);
3759
       son;
3760
       son = next_dom_son (CDI_DOMINATORS, son))
3761
    {
3762
      new_stuff |= insert_aux (son);
3763
    }
3764
 
3765
  return new_stuff;
3766
}
3767
 
3768
/* Perform insertion of partially redundant values.  */
3769
 
3770
static void
3771
insert (void)
3772
{
3773
  bool new_stuff = true;
3774
  basic_block bb;
3775
  int num_iterations = 0;
3776
 
3777
  FOR_ALL_BB (bb)
3778
    NEW_SETS (bb) = bitmap_set_new ();
3779
 
3780
  while (new_stuff)
3781
    {
3782
      num_iterations++;
3783
      new_stuff = insert_aux (ENTRY_BLOCK_PTR);
3784
    }
3785
  statistics_histogram_event (cfun, "insert iterations", num_iterations);
3786
}
3787
 
3788
 
3789
/* Add OP to EXP_GEN (block), and possibly to the maximal set.  */
3790
 
3791
static void
3792
add_to_exp_gen (basic_block block, tree op)
3793
{
3794
  if (!in_fre)
3795
    {
3796
      pre_expr result;
3797
      if (TREE_CODE (op) == SSA_NAME && ssa_undefined_value_p (op))
3798
        return;
3799
      result = get_or_alloc_expr_for_name (op);
3800
      bitmap_value_insert_into_set (EXP_GEN (block), result);
3801
    }
3802
}
3803
 
3804
/* Create value ids for PHI in BLOCK.  */
3805
 
3806
static void
3807
make_values_for_phi (gimple phi, basic_block block)
3808
{
3809
  tree result = gimple_phi_result (phi);
3810
 
3811
  /* We have no need for virtual phis, as they don't represent
3812
     actual computations.  */
3813
  if (is_gimple_reg (result))
3814
    {
3815
      pre_expr e = get_or_alloc_expr_for_name (result);
3816
      add_to_value (get_expr_value_id (e), e);
3817
      bitmap_insert_into_set (PHI_GEN (block), e);
3818
      bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3819
      if (!in_fre)
3820
        {
3821
          unsigned i;
3822
          for (i = 0; i < gimple_phi_num_args (phi); ++i)
3823
            {
3824
              tree arg = gimple_phi_arg_def (phi, i);
3825
              if (TREE_CODE (arg) == SSA_NAME)
3826
                {
3827
                  e = get_or_alloc_expr_for_name (arg);
3828
                  add_to_value (get_expr_value_id (e), e);
3829
                }
3830
            }
3831
        }
3832
    }
3833
}
3834
 
3835
/* Compute the AVAIL set for all basic blocks.
3836
 
3837
   This function performs value numbering of the statements in each basic
3838
   block.  The AVAIL sets are built from information we glean while doing
3839
   this value numbering, since the AVAIL sets contain only one entry per
3840
   value.
3841
 
3842
   AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3843
   AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK].  */
3844
 
3845
static void
3846
compute_avail (void)
3847
{
3848
 
3849
  basic_block block, son;
3850
  basic_block *worklist;
3851
  size_t sp = 0;
3852
  unsigned i;
3853
 
3854
  /* We pretend that default definitions are defined in the entry block.
3855
     This includes function arguments and the static chain decl.  */
3856
  for (i = 1; i < num_ssa_names; ++i)
3857
    {
3858
      tree name = ssa_name (i);
3859
      pre_expr e;
3860
      if (!name
3861
          || !SSA_NAME_IS_DEFAULT_DEF (name)
3862
          || has_zero_uses (name)
3863
          || !is_gimple_reg (name))
3864
        continue;
3865
 
3866
      e = get_or_alloc_expr_for_name (name);
3867
      add_to_value (get_expr_value_id (e), e);
3868
      if (!in_fre)
3869
        bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), e);
3870
      bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), e);
3871
    }
3872
 
3873
  /* Allocate the worklist.  */
3874
  worklist = XNEWVEC (basic_block, n_basic_blocks);
3875
 
3876
  /* Seed the algorithm by putting the dominator children of the entry
3877
     block on the worklist.  */
3878
  for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR);
3879
       son;
3880
       son = next_dom_son (CDI_DOMINATORS, son))
3881
    worklist[sp++] = son;
3882
 
3883
  /* Loop until the worklist is empty.  */
3884
  while (sp)
3885
    {
3886
      gimple_stmt_iterator gsi;
3887
      gimple stmt;
3888
      basic_block dom;
3889
      unsigned int stmt_uid = 1;
3890
 
3891
      /* Pick a block from the worklist.  */
3892
      block = worklist[--sp];
3893
 
3894
      /* Initially, the set of available values in BLOCK is that of
3895
         its immediate dominator.  */
3896
      dom = get_immediate_dominator (CDI_DOMINATORS, block);
3897
      if (dom)
3898
        bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
3899
 
3900
      /* Generate values for PHI nodes.  */
3901
      for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi))
3902
        make_values_for_phi (gsi_stmt (gsi), block);
3903
 
3904
      BB_MAY_NOTRETURN (block) = 0;
3905
 
3906
      /* Now compute value numbers and populate value sets with all
3907
         the expressions computed in BLOCK.  */
3908
      for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
3909
        {
3910
          ssa_op_iter iter;
3911
          tree op;
3912
 
3913
          stmt = gsi_stmt (gsi);
3914
          gimple_set_uid (stmt, stmt_uid++);
3915
 
3916
          /* Cache whether the basic-block has any non-visible side-effect
3917
             or control flow.
3918
             If this isn't a call or it is the last stmt in the
3919
             basic-block then the CFG represents things correctly.  */
3920
          if (is_gimple_call (stmt)
3921
              && !stmt_ends_bb_p (stmt))
3922
            {
3923
              /* Non-looping const functions always return normally.
3924
                 Otherwise the call might not return or have side-effects
3925
                 that forbids hoisting possibly trapping expressions
3926
                 before it.  */
3927
              int flags = gimple_call_flags (stmt);
3928
              if (!(flags & ECF_CONST)
3929
                  || (flags & ECF_LOOPING_CONST_OR_PURE))
3930
                BB_MAY_NOTRETURN (block) = 1;
3931
            }
3932
 
3933
          FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3934
            {
3935
              pre_expr e = get_or_alloc_expr_for_name (op);
3936
 
3937
              add_to_value (get_expr_value_id (e), e);
3938
              if (!in_fre)
3939
                bitmap_insert_into_set (TMP_GEN (block), e);
3940
              bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3941
            }
3942
 
3943
          if (gimple_has_volatile_ops (stmt)
3944
              || stmt_could_throw_p (stmt))
3945
            continue;
3946
 
3947
          switch (gimple_code (stmt))
3948
            {
3949
            case GIMPLE_RETURN:
3950
              FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3951
                add_to_exp_gen (block, op);
3952
              continue;
3953
 
3954
            case GIMPLE_CALL:
3955
              {
3956
                vn_reference_t ref;
3957
                unsigned int i;
3958
                vn_reference_op_t vro;
3959
                pre_expr result = NULL;
3960
                VEC(vn_reference_op_s, heap) *ops = NULL;
3961
 
3962
                if (!can_value_number_call (stmt))
3963
                  continue;
3964
 
3965
                copy_reference_ops_from_call (stmt, &ops);
3966
                vn_reference_lookup_pieces (gimple_vuse (stmt), 0,
3967
                                            gimple_expr_type (stmt),
3968
                                            ops, &ref, false);
3969
                VEC_free (vn_reference_op_s, heap, ops);
3970
                if (!ref)
3971
                  continue;
3972
 
3973
                for (i = 0; VEC_iterate (vn_reference_op_s,
3974
                                         ref->operands, i,
3975
                                         vro); i++)
3976
                  {
3977
                    if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
3978
                      add_to_exp_gen (block, vro->op0);
3979
                    if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
3980
                      add_to_exp_gen (block, vro->op1);
3981
                    if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
3982
                      add_to_exp_gen (block, vro->op2);
3983
                  }
3984
                result = (pre_expr) pool_alloc (pre_expr_pool);
3985
                result->kind = REFERENCE;
3986
                result->id = 0;
3987
                PRE_EXPR_REFERENCE (result) = ref;
3988
 
3989
                get_or_alloc_expression_id (result);
3990
                add_to_value (get_expr_value_id (result), result);
3991
                if (!in_fre)
3992
                  bitmap_value_insert_into_set (EXP_GEN (block), result);
3993
                continue;
3994
              }
3995
 
3996
            case GIMPLE_ASSIGN:
3997
              {
3998
                pre_expr result = NULL;
3999
                switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
4000
                  {
4001
                  case tcc_unary:
4002
                  case tcc_binary:
4003
                  case tcc_comparison:
4004
                    {
4005
                      vn_nary_op_t nary;
4006
                      unsigned int i;
4007
 
4008
                      vn_nary_op_lookup_pieces (gimple_num_ops (stmt) - 1,
4009
                                                gimple_assign_rhs_code (stmt),
4010
                                                gimple_expr_type (stmt),
4011
                                                gimple_assign_rhs1 (stmt),
4012
                                                gimple_assign_rhs2 (stmt),
4013
                                                NULL_TREE, NULL_TREE, &nary);
4014
 
4015
                      if (!nary)
4016
                        continue;
4017
 
4018
                      for (i = 0; i < nary->length; i++)
4019
                        if (TREE_CODE (nary->op[i]) == SSA_NAME)
4020
                          add_to_exp_gen (block, nary->op[i]);
4021
 
4022
                      result = (pre_expr) pool_alloc (pre_expr_pool);
4023
                      result->kind = NARY;
4024
                      result->id = 0;
4025
                      PRE_EXPR_NARY (result) = nary;
4026
                      break;
4027
                    }
4028
 
4029
                  case tcc_declaration:
4030
                  case tcc_reference:
4031
                    {
4032
                      vn_reference_t ref;
4033
                      unsigned int i;
4034
                      vn_reference_op_t vro;
4035
 
4036
                      vn_reference_lookup (gimple_assign_rhs1 (stmt),
4037
                                           gimple_vuse (stmt),
4038
                                           true, &ref);
4039
                      if (!ref)
4040
                        continue;
4041
 
4042
                      for (i = 0; VEC_iterate (vn_reference_op_s,
4043
                                               ref->operands, i,
4044
                                               vro); i++)
4045
                        {
4046
                          if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
4047
                            add_to_exp_gen (block, vro->op0);
4048
                          if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
4049
                            add_to_exp_gen (block, vro->op1);
4050
                          if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
4051
                            add_to_exp_gen (block, vro->op2);
4052
                        }
4053
                      result = (pre_expr) pool_alloc (pre_expr_pool);
4054
                      result->kind = REFERENCE;
4055
                      result->id = 0;
4056
                      PRE_EXPR_REFERENCE (result) = ref;
4057
                      break;
4058
                    }
4059
 
4060
                  default:
4061
                    /* For any other statement that we don't
4062
                       recognize, simply add all referenced
4063
                       SSA_NAMEs to EXP_GEN.  */
4064
                    FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
4065
                      add_to_exp_gen (block, op);
4066
                    continue;
4067
                  }
4068
 
4069
                get_or_alloc_expression_id (result);
4070
                add_to_value (get_expr_value_id (result), result);
4071
                if (!in_fre)
4072
                  bitmap_value_insert_into_set (EXP_GEN (block), result);
4073
 
4074
                continue;
4075
              }
4076
            default:
4077
              break;
4078
            }
4079
        }
4080
 
4081
      /* Put the dominator children of BLOCK on the worklist of blocks
4082
         to compute available sets for.  */
4083
      for (son = first_dom_son (CDI_DOMINATORS, block);
4084
           son;
4085
           son = next_dom_son (CDI_DOMINATORS, son))
4086
        worklist[sp++] = son;
4087
    }
4088
 
4089
  free (worklist);
4090
}
4091
 
4092
/* Insert the expression for SSA_VN that SCCVN thought would be simpler
4093
   than the available expressions for it.  The insertion point is
4094
   right before the first use in STMT.  Returns the SSA_NAME that should
4095
   be used for replacement.  */
4096
 
4097
static tree
4098
do_SCCVN_insertion (gimple stmt, tree ssa_vn)
4099
{
4100
  basic_block bb = gimple_bb (stmt);
4101
  gimple_stmt_iterator gsi;
4102
  gimple_seq stmts = NULL;
4103
  tree expr;
4104
  pre_expr e;
4105
 
4106
  /* First create a value expression from the expression we want
4107
     to insert and associate it with the value handle for SSA_VN.  */
4108
  e = get_or_alloc_expr_for (vn_get_expr_for (ssa_vn));
4109
  if (e == NULL)
4110
    return NULL_TREE;
4111
 
4112
  /* Then use create_expression_by_pieces to generate a valid
4113
     expression to insert at this point of the IL stream.  */
4114
  expr = create_expression_by_pieces (bb, e, &stmts, stmt, NULL);
4115
  if (expr == NULL_TREE)
4116
    return NULL_TREE;
4117
  gsi = gsi_for_stmt (stmt);
4118
  gsi_insert_seq_before (&gsi, stmts, GSI_SAME_STMT);
4119
 
4120
  return expr;
4121
}
4122
 
4123
/* Eliminate fully redundant computations.  */
4124
 
4125
static unsigned int
4126
eliminate (void)
4127
{
4128
  VEC (gimple, heap) *to_remove = NULL;
4129
  basic_block b;
4130
  unsigned int todo = 0;
4131
  gimple_stmt_iterator gsi;
4132
  gimple stmt;
4133
  unsigned i;
4134
 
4135
  FOR_EACH_BB (b)
4136
    {
4137
      for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
4138
        {
4139
          stmt = gsi_stmt (gsi);
4140
 
4141
          /* Lookup the RHS of the expression, see if we have an
4142
             available computation for it.  If so, replace the RHS with
4143
             the available computation.  */
4144
          if (gimple_has_lhs (stmt)
4145
              && TREE_CODE (gimple_get_lhs (stmt)) == SSA_NAME
4146
              && !gimple_assign_ssa_name_copy_p (stmt)
4147
              && (!gimple_assign_single_p (stmt)
4148
                  || !is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
4149
              && !gimple_has_volatile_ops  (stmt)
4150
              && !has_zero_uses (gimple_get_lhs (stmt)))
4151
            {
4152
              tree lhs = gimple_get_lhs (stmt);
4153
              tree rhs = NULL_TREE;
4154
              tree sprime = NULL;
4155
              pre_expr lhsexpr = get_or_alloc_expr_for_name (lhs);
4156
              pre_expr sprimeexpr;
4157
 
4158
              if (gimple_assign_single_p (stmt))
4159
                rhs = gimple_assign_rhs1 (stmt);
4160
 
4161
              sprimeexpr = bitmap_find_leader (AVAIL_OUT (b),
4162
                                               get_expr_value_id (lhsexpr),
4163
                                               NULL);
4164
 
4165
              if (sprimeexpr)
4166
                {
4167
                  if (sprimeexpr->kind == CONSTANT)
4168
                    sprime = PRE_EXPR_CONSTANT (sprimeexpr);
4169
                  else if (sprimeexpr->kind == NAME)
4170
                    sprime = PRE_EXPR_NAME (sprimeexpr);
4171
                  else
4172
                    gcc_unreachable ();
4173
                }
4174
 
4175
              /* If there is no existing leader but SCCVN knows this
4176
                 value is constant, use that constant.  */
4177
              if (!sprime && is_gimple_min_invariant (VN_INFO (lhs)->valnum))
4178
                {
4179
                  sprime = VN_INFO (lhs)->valnum;
4180
                  if (!useless_type_conversion_p (TREE_TYPE (lhs),
4181
                                                  TREE_TYPE (sprime)))
4182
                    sprime = fold_convert (TREE_TYPE (lhs), sprime);
4183
 
4184
                  if (dump_file && (dump_flags & TDF_DETAILS))
4185
                    {
4186
                      fprintf (dump_file, "Replaced ");
4187
                      print_gimple_expr (dump_file, stmt, 0, 0);
4188
                      fprintf (dump_file, " with ");
4189
                      print_generic_expr (dump_file, sprime, 0);
4190
                      fprintf (dump_file, " in ");
4191
                      print_gimple_stmt (dump_file, stmt, 0, 0);
4192
                    }
4193
                  pre_stats.eliminations++;
4194
                  propagate_tree_value_into_stmt (&gsi, sprime);
4195
                  stmt = gsi_stmt (gsi);
4196
                  update_stmt (stmt);
4197
                  continue;
4198
                }
4199
 
4200
              /* If there is no existing usable leader but SCCVN thinks
4201
                 it has an expression it wants to use as replacement,
4202
                 insert that.  */
4203
              if (!sprime || sprime == lhs)
4204
                {
4205
                  tree val = VN_INFO (lhs)->valnum;
4206
                  if (val != VN_TOP
4207
                      && TREE_CODE (val) == SSA_NAME
4208
                      && VN_INFO (val)->needs_insertion
4209
                      && can_PRE_operation (vn_get_expr_for (val)))
4210
                    sprime = do_SCCVN_insertion (stmt, val);
4211
                }
4212
              if (sprime
4213
                  && sprime != lhs
4214
                  && (rhs == NULL_TREE
4215
                      || TREE_CODE (rhs) != SSA_NAME
4216
                      || may_propagate_copy (rhs, sprime)))
4217
                {
4218
                  gcc_assert (sprime != rhs);
4219
 
4220
                  if (dump_file && (dump_flags & TDF_DETAILS))
4221
                    {
4222
                      fprintf (dump_file, "Replaced ");
4223
                      print_gimple_expr (dump_file, stmt, 0, 0);
4224
                      fprintf (dump_file, " with ");
4225
                      print_generic_expr (dump_file, sprime, 0);
4226
                      fprintf (dump_file, " in ");
4227
                      print_gimple_stmt (dump_file, stmt, 0, 0);
4228
                    }
4229
 
4230
                  if (TREE_CODE (sprime) == SSA_NAME)
4231
                    gimple_set_plf (SSA_NAME_DEF_STMT (sprime),
4232
                                    NECESSARY, true);
4233
                  /* We need to make sure the new and old types actually match,
4234
                     which may require adding a simple cast, which fold_convert
4235
                     will do for us.  */
4236
                  if ((!rhs || TREE_CODE (rhs) != SSA_NAME)
4237
                      && !useless_type_conversion_p (gimple_expr_type (stmt),
4238
                                                     TREE_TYPE (sprime)))
4239
                    sprime = fold_convert (gimple_expr_type (stmt), sprime);
4240
 
4241
                  pre_stats.eliminations++;
4242
                  propagate_tree_value_into_stmt (&gsi, sprime);
4243
                  stmt = gsi_stmt (gsi);
4244
                  update_stmt (stmt);
4245
 
4246
                  /* If we removed EH side effects from the statement, clean
4247
                     its EH information.  */
4248
                  if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
4249
                    {
4250
                      bitmap_set_bit (need_eh_cleanup,
4251
                                      gimple_bb (stmt)->index);
4252
                      if (dump_file && (dump_flags & TDF_DETAILS))
4253
                        fprintf (dump_file, "  Removed EH side effects.\n");
4254
                    }
4255
                }
4256
            }
4257
          /* If the statement is a scalar store, see if the expression
4258
             has the same value number as its rhs.  If so, the store is
4259
             dead.  */
4260
          else if (gimple_assign_single_p (stmt)
4261
                   && !is_gimple_reg (gimple_assign_lhs (stmt))
4262
                   && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
4263
                       || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
4264
            {
4265
              tree rhs = gimple_assign_rhs1 (stmt);
4266
              tree val;
4267
              val = vn_reference_lookup (gimple_assign_lhs (stmt),
4268
                                         gimple_vuse (stmt), true, NULL);
4269
              if (TREE_CODE (rhs) == SSA_NAME)
4270
                rhs = VN_INFO (rhs)->valnum;
4271
              if (val
4272
                  && operand_equal_p (val, rhs, 0))
4273
                {
4274
                  if (dump_file && (dump_flags & TDF_DETAILS))
4275
                    {
4276
                      fprintf (dump_file, "Deleted redundant store ");
4277
                      print_gimple_stmt (dump_file, stmt, 0, 0);
4278
                    }
4279
 
4280
                  /* Queue stmt for removal.  */
4281
                  VEC_safe_push (gimple, heap, to_remove, stmt);
4282
                }
4283
            }
4284
          /* Visit COND_EXPRs and fold the comparison with the
4285
             available value-numbers.  */
4286
          else if (gimple_code (stmt) == GIMPLE_COND)
4287
            {
4288
              tree op0 = gimple_cond_lhs (stmt);
4289
              tree op1 = gimple_cond_rhs (stmt);
4290
              tree result;
4291
 
4292
              if (TREE_CODE (op0) == SSA_NAME)
4293
                op0 = VN_INFO (op0)->valnum;
4294
              if (TREE_CODE (op1) == SSA_NAME)
4295
                op1 = VN_INFO (op1)->valnum;
4296
              result = fold_binary (gimple_cond_code (stmt), boolean_type_node,
4297
                                    op0, op1);
4298
              if (result && TREE_CODE (result) == INTEGER_CST)
4299
                {
4300
                  if (integer_zerop (result))
4301
                    gimple_cond_make_false (stmt);
4302
                  else
4303
                    gimple_cond_make_true (stmt);
4304
                  update_stmt (stmt);
4305
                  todo = TODO_cleanup_cfg;
4306
                }
4307
            }
4308
          /* Visit indirect calls and turn them into direct calls if
4309
             possible.  */
4310
          if (gimple_code (stmt) == GIMPLE_CALL
4311
              && TREE_CODE (gimple_call_fn (stmt)) == SSA_NAME)
4312
            {
4313
              tree fn = VN_INFO (gimple_call_fn (stmt))->valnum;
4314
              if (TREE_CODE (fn) == ADDR_EXPR
4315
                  && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4316
                {
4317
                  if (dump_file && (dump_flags & TDF_DETAILS))
4318
                    {
4319
                      fprintf (dump_file, "Replacing call target with ");
4320
                      print_generic_expr (dump_file, fn, 0);
4321
                      fprintf (dump_file, " in ");
4322
                      print_gimple_stmt (dump_file, stmt, 0, 0);
4323
                    }
4324
 
4325
                  gimple_call_set_fn (stmt, fn);
4326
                  update_stmt (stmt);
4327
                  if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
4328
                    {
4329
                      bitmap_set_bit (need_eh_cleanup,
4330
                                      gimple_bb (stmt)->index);
4331
                      if (dump_file && (dump_flags & TDF_DETAILS))
4332
                        fprintf (dump_file, "  Removed EH side effects.\n");
4333
                    }
4334
 
4335
                  /* Changing an indirect call to a direct call may
4336
                     have exposed different semantics.  This may
4337
                     require an SSA update.  */
4338
                  todo |= TODO_update_ssa_only_virtuals;
4339
                }
4340
            }
4341
        }
4342
 
4343
      for (gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
4344
        {
4345
          gimple stmt, phi = gsi_stmt (gsi);
4346
          tree sprime = NULL_TREE, res = PHI_RESULT (phi);
4347
          pre_expr sprimeexpr, resexpr;
4348
          gimple_stmt_iterator gsi2;
4349
 
4350
          /* We want to perform redundant PHI elimination.  Do so by
4351
             replacing the PHI with a single copy if possible.
4352
             Do not touch inserted, single-argument or virtual PHIs.  */
4353
          if (gimple_phi_num_args (phi) == 1
4354
              || !is_gimple_reg (res)
4355
              || bitmap_bit_p (inserted_phi_names, SSA_NAME_VERSION (res)))
4356
            {
4357
              gsi_next (&gsi);
4358
              continue;
4359
            }
4360
 
4361
          resexpr = get_or_alloc_expr_for_name (res);
4362
          sprimeexpr = bitmap_find_leader (AVAIL_OUT (b),
4363
                                           get_expr_value_id (resexpr), NULL);
4364
          if (sprimeexpr)
4365
            {
4366
              if (sprimeexpr->kind == CONSTANT)
4367
                sprime = PRE_EXPR_CONSTANT (sprimeexpr);
4368
              else if (sprimeexpr->kind == NAME)
4369
                sprime = PRE_EXPR_NAME (sprimeexpr);
4370
              else
4371
                gcc_unreachable ();
4372
            }
4373
          if (!sprimeexpr
4374
              || sprime == res)
4375
            {
4376
              gsi_next (&gsi);
4377
              continue;
4378
            }
4379
 
4380
          if (dump_file && (dump_flags & TDF_DETAILS))
4381
            {
4382
              fprintf (dump_file, "Replaced redundant PHI node defining ");
4383
              print_generic_expr (dump_file, res, 0);
4384
              fprintf (dump_file, " with ");
4385
              print_generic_expr (dump_file, sprime, 0);
4386
              fprintf (dump_file, "\n");
4387
            }
4388
 
4389
          remove_phi_node (&gsi, false);
4390
 
4391
          if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
4392
            sprime = fold_convert (TREE_TYPE (res), sprime);
4393
          stmt = gimple_build_assign (res, sprime);
4394
          SSA_NAME_DEF_STMT (res) = stmt;
4395
          if (TREE_CODE (sprime) == SSA_NAME)
4396
            gimple_set_plf (SSA_NAME_DEF_STMT (sprime),
4397
                            NECESSARY, true);
4398
          gsi2 = gsi_after_labels (b);
4399
          gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
4400
          /* Queue the copy for eventual removal.  */
4401
          VEC_safe_push (gimple, heap, to_remove, stmt);
4402
          pre_stats.eliminations++;
4403
        }
4404
    }
4405
 
4406
  /* We cannot remove stmts during BB walk, especially not release SSA
4407
     names there as this confuses the VN machinery.  The stmts ending
4408
     up in to_remove are either stores or simple copies.  */
4409
  for (i = 0; VEC_iterate (gimple, to_remove, i, stmt); ++i)
4410
    {
4411
      tree lhs = gimple_assign_lhs (stmt);
4412
      tree rhs = gimple_assign_rhs1 (stmt);
4413
      use_operand_p use_p;
4414
      gimple use_stmt;
4415
 
4416
      /* If there is a single use only, propagate the equivalency
4417
         instead of keeping the copy.  */
4418
      if (TREE_CODE (lhs) == SSA_NAME
4419
          && TREE_CODE (rhs) == SSA_NAME
4420
          && single_imm_use (lhs, &use_p, &use_stmt)
4421
          && may_propagate_copy (USE_FROM_PTR (use_p), rhs))
4422
        {
4423
          SET_USE (use_p, gimple_assign_rhs1 (stmt));
4424
          update_stmt (use_stmt);
4425
        }
4426
 
4427
      /* If this is a store or a now unused copy, remove it.  */
4428
      if (TREE_CODE (lhs) != SSA_NAME
4429
          || has_zero_uses (lhs))
4430
        {
4431
          gsi = gsi_for_stmt (stmt);
4432
          unlink_stmt_vdef (stmt);
4433
          gsi_remove (&gsi, true);
4434
          release_defs (stmt);
4435
        }
4436
    }
4437
  VEC_free (gimple, heap, to_remove);
4438
 
4439
  return todo;
4440
}
4441
 
4442
/* Borrow a bit of tree-ssa-dce.c for the moment.
4443
   XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
4444
   this may be a bit faster, and we may want critical edges kept split.  */
4445
 
4446
/* If OP's defining statement has not already been determined to be necessary,
4447
   mark that statement necessary. Return the stmt, if it is newly
4448
   necessary.  */
4449
 
4450
static inline gimple
4451
mark_operand_necessary (tree op)
4452
{
4453
  gimple stmt;
4454
 
4455
  gcc_assert (op);
4456
 
4457
  if (TREE_CODE (op) != SSA_NAME)
4458
    return NULL;
4459
 
4460
  stmt = SSA_NAME_DEF_STMT (op);
4461
  gcc_assert (stmt);
4462
 
4463
  if (gimple_plf (stmt, NECESSARY)
4464
      || gimple_nop_p (stmt))
4465
    return NULL;
4466
 
4467
  gimple_set_plf (stmt, NECESSARY, true);
4468
  return stmt;
4469
}
4470
 
4471
/* Because we don't follow exactly the standard PRE algorithm, and decide not
4472
   to insert PHI nodes sometimes, and because value numbering of casts isn't
4473
   perfect, we sometimes end up inserting dead code.   This simple DCE-like
4474
   pass removes any insertions we made that weren't actually used.  */
4475
 
4476
static void
4477
remove_dead_inserted_code (void)
4478
{
4479
  VEC(gimple,heap) *worklist = NULL;
4480
  int i;
4481
  gimple t;
4482
 
4483
  worklist = VEC_alloc (gimple, heap, VEC_length (gimple, inserted_exprs));
4484
  for (i = 0; VEC_iterate (gimple, inserted_exprs, i, t); i++)
4485
    {
4486
      if (gimple_plf (t, NECESSARY))
4487
        VEC_quick_push (gimple, worklist, t);
4488
    }
4489
  while (VEC_length (gimple, worklist) > 0)
4490
    {
4491
      t = VEC_pop (gimple, worklist);
4492
 
4493
      /* PHI nodes are somewhat special in that each PHI alternative has
4494
         data and control dependencies.  All the statements feeding the
4495
         PHI node's arguments are always necessary. */
4496
      if (gimple_code (t) == GIMPLE_PHI)
4497
        {
4498
          unsigned k;
4499
 
4500
          VEC_reserve (gimple, heap, worklist, gimple_phi_num_args (t));
4501
          for (k = 0; k < gimple_phi_num_args (t); k++)
4502
            {
4503
              tree arg = PHI_ARG_DEF (t, k);
4504
              if (TREE_CODE (arg) == SSA_NAME)
4505
                {
4506
                  gimple n = mark_operand_necessary (arg);
4507
                  if (n)
4508
                    VEC_quick_push (gimple, worklist, n);
4509
                }
4510
            }
4511
        }
4512
      else
4513
        {
4514
          /* Propagate through the operands.  Examine all the USE, VUSE and
4515
             VDEF operands in this statement.  Mark all the statements
4516
             which feed this statement's uses as necessary.  */
4517
          ssa_op_iter iter;
4518
          tree use;
4519
 
4520
          /* The operands of VDEF expressions are also needed as they
4521
             represent potential definitions that may reach this
4522
             statement (VDEF operands allow us to follow def-def
4523
             links).  */
4524
 
4525
          FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES)
4526
            {
4527
              gimple n = mark_operand_necessary (use);
4528
              if (n)
4529
                VEC_safe_push (gimple, heap, worklist, n);
4530
            }
4531
        }
4532
    }
4533
 
4534
  for (i = 0; VEC_iterate (gimple, inserted_exprs, i, t); i++)
4535
    {
4536
      if (!gimple_plf (t, NECESSARY))
4537
        {
4538
          gimple_stmt_iterator gsi;
4539
 
4540
          if (dump_file && (dump_flags & TDF_DETAILS))
4541
            {
4542
              fprintf (dump_file, "Removing unnecessary insertion:");
4543
              print_gimple_stmt (dump_file, t, 0, 0);
4544
            }
4545
 
4546
          gsi = gsi_for_stmt (t);
4547
          if (gimple_code (t) == GIMPLE_PHI)
4548
            remove_phi_node (&gsi, true);
4549
          else
4550
            {
4551
              gsi_remove (&gsi, true);
4552
              release_defs (t);
4553
            }
4554
        }
4555
    }
4556
  VEC_free (gimple, heap, worklist);
4557
}
4558
 
4559
/* Compute a reverse post-order in *POST_ORDER.  If INCLUDE_ENTRY_EXIT is
4560
   true, then then ENTRY_BLOCK and EXIT_BLOCK are included.  Returns
4561
   the number of visited blocks.  */
4562
 
4563
static int
4564
my_rev_post_order_compute (int *post_order, bool include_entry_exit)
4565
{
4566
  edge_iterator *stack;
4567
  int sp;
4568
  int post_order_num = 0;
4569
  sbitmap visited;
4570
  int count;
4571
 
4572
  if (include_entry_exit)
4573
    post_order[post_order_num++] = EXIT_BLOCK;
4574
 
4575
  /* Allocate stack for back-tracking up CFG.  */
4576
  stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
4577
  sp = 0;
4578
 
4579
  /* Allocate bitmap to track nodes that have been visited.  */
4580
  visited = sbitmap_alloc (last_basic_block);
4581
 
4582
  /* None of the nodes in the CFG have been visited yet.  */
4583
  sbitmap_zero (visited);
4584
 
4585
  /* Push the last edge on to the stack.  */
4586
  stack[sp++] = ei_start (EXIT_BLOCK_PTR->preds);
4587
 
4588
  while (sp)
4589
    {
4590
      edge_iterator ei;
4591
      basic_block src;
4592
      basic_block dest;
4593
 
4594
      /* Look at the edge on the top of the stack.  */
4595
      ei = stack[sp - 1];
4596
      src = ei_edge (ei)->src;
4597
      dest = ei_edge (ei)->dest;
4598
 
4599
      /* Check if the edge destination has been visited yet.  */
4600
      if (src != ENTRY_BLOCK_PTR && ! TEST_BIT (visited, src->index))
4601
        {
4602
          /* Mark that we have visited the destination.  */
4603
          SET_BIT (visited, src->index);
4604
 
4605
          if (EDGE_COUNT (src->preds) > 0)
4606
            /* Since the DEST node has been visited for the first
4607
               time, check its successors.  */
4608
            stack[sp++] = ei_start (src->preds);
4609
          else
4610
            post_order[post_order_num++] = src->index;
4611
        }
4612
      else
4613
        {
4614
          if (ei_one_before_end_p (ei) && dest != EXIT_BLOCK_PTR)
4615
            post_order[post_order_num++] = dest->index;
4616
 
4617
          if (!ei_one_before_end_p (ei))
4618
            ei_next (&stack[sp - 1]);
4619
          else
4620
            sp--;
4621
        }
4622
    }
4623
 
4624
  if (include_entry_exit)
4625
    {
4626
      post_order[post_order_num++] = ENTRY_BLOCK;
4627
      count = post_order_num;
4628
    }
4629
  else
4630
    count = post_order_num + 2;
4631
 
4632
  free (stack);
4633
  sbitmap_free (visited);
4634
  return post_order_num;
4635
}
4636
 
4637
 
4638
/* Initialize data structures used by PRE.  */
4639
 
4640
static void
4641
init_pre (bool do_fre)
4642
{
4643
  basic_block bb;
4644
 
4645
  next_expression_id = 1;
4646
  expressions = NULL;
4647
  VEC_safe_push (pre_expr, heap, expressions, NULL);
4648
  value_expressions = VEC_alloc (bitmap_set_t, heap, get_max_value_id () + 1);
4649
  VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions,
4650
                         get_max_value_id() + 1);
4651
  name_to_id = NULL;
4652
 
4653
  in_fre = do_fre;
4654
 
4655
  inserted_exprs = NULL;
4656
  need_creation = NULL;
4657
  pretemp = NULL_TREE;
4658
  storetemp = NULL_TREE;
4659
  prephitemp = NULL_TREE;
4660
 
4661
  connect_infinite_loops_to_exit ();
4662
  memset (&pre_stats, 0, sizeof (pre_stats));
4663
 
4664
 
4665
  postorder = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
4666
  my_rev_post_order_compute (postorder, false);
4667
 
4668
  FOR_ALL_BB (bb)
4669
    bb->aux = XCNEWVEC (struct bb_bitmap_sets, 1);
4670
 
4671
  calculate_dominance_info (CDI_POST_DOMINATORS);
4672
  calculate_dominance_info (CDI_DOMINATORS);
4673
 
4674
  bitmap_obstack_initialize (&grand_bitmap_obstack);
4675
  inserted_phi_names = BITMAP_ALLOC (&grand_bitmap_obstack);
4676
  phi_translate_table = htab_create (5110, expr_pred_trans_hash,
4677
                                     expr_pred_trans_eq, free);
4678
  expression_to_id = htab_create (num_ssa_names * 3,
4679
                                  pre_expr_hash,
4680
                                  pre_expr_eq, NULL);
4681
  bitmap_set_pool = create_alloc_pool ("Bitmap sets",
4682
                                       sizeof (struct bitmap_set), 30);
4683
  pre_expr_pool = create_alloc_pool ("pre_expr nodes",
4684
                                     sizeof (struct pre_expr_d), 30);
4685
  FOR_ALL_BB (bb)
4686
    {
4687
      EXP_GEN (bb) = bitmap_set_new ();
4688
      PHI_GEN (bb) = bitmap_set_new ();
4689
      TMP_GEN (bb) = bitmap_set_new ();
4690
      AVAIL_OUT (bb) = bitmap_set_new ();
4691
    }
4692
 
4693
  need_eh_cleanup = BITMAP_ALLOC (NULL);
4694
}
4695
 
4696
 
4697
/* Deallocate data structures used by PRE.  */
4698
 
4699
static void
4700
fini_pre (bool do_fre)
4701
{
4702
  basic_block bb;
4703
 
4704
  free (postorder);
4705
  VEC_free (bitmap_set_t, heap, value_expressions);
4706
  VEC_free (gimple, heap, inserted_exprs);
4707
  VEC_free (gimple, heap, need_creation);
4708
  bitmap_obstack_release (&grand_bitmap_obstack);
4709
  free_alloc_pool (bitmap_set_pool);
4710
  free_alloc_pool (pre_expr_pool);
4711
  htab_delete (phi_translate_table);
4712
  htab_delete (expression_to_id);
4713
  VEC_free (unsigned, heap, name_to_id);
4714
 
4715
  FOR_ALL_BB (bb)
4716
    {
4717
      free (bb->aux);
4718
      bb->aux = NULL;
4719
    }
4720
 
4721
  free_dominance_info (CDI_POST_DOMINATORS);
4722
 
4723
  if (!bitmap_empty_p (need_eh_cleanup))
4724
    {
4725
      gimple_purge_all_dead_eh_edges (need_eh_cleanup);
4726
      cleanup_tree_cfg ();
4727
    }
4728
 
4729
  BITMAP_FREE (need_eh_cleanup);
4730
 
4731
  if (!do_fre)
4732
    loop_optimizer_finalize ();
4733
}
4734
 
4735
/* Main entry point to the SSA-PRE pass.  DO_FRE is true if the caller
4736
   only wants to do full redundancy elimination.  */
4737
 
4738
static unsigned int
4739
execute_pre (bool do_fre)
4740
{
4741
  unsigned int todo = 0;
4742
 
4743
  do_partial_partial = optimize > 2 && optimize_function_for_speed_p (cfun);
4744
 
4745
  /* This has to happen before SCCVN runs because
4746
     loop_optimizer_init may create new phis, etc.  */
4747
  if (!do_fre)
4748
    loop_optimizer_init (LOOPS_NORMAL);
4749
 
4750
  if (!run_scc_vn (do_fre))
4751
    {
4752
      if (!do_fre)
4753
        {
4754
          remove_dead_inserted_code ();
4755
          loop_optimizer_finalize ();
4756
        }
4757
 
4758
      return 0;
4759
    }
4760
  init_pre (do_fre);
4761
  scev_initialize ();
4762
 
4763
 
4764
  /* Collect and value number expressions computed in each basic block.  */
4765
  compute_avail ();
4766
 
4767
  if (dump_file && (dump_flags & TDF_DETAILS))
4768
    {
4769
      basic_block bb;
4770
 
4771
      FOR_ALL_BB (bb)
4772
        {
4773
          print_bitmap_set (dump_file, EXP_GEN (bb), "exp_gen", bb->index);
4774
          print_bitmap_set (dump_file, PHI_GEN (bb), "phi_gen", bb->index);
4775
          print_bitmap_set (dump_file, TMP_GEN (bb), "tmp_gen", bb->index);
4776
          print_bitmap_set (dump_file, AVAIL_OUT (bb), "avail_out", bb->index);
4777
        }
4778
    }
4779
 
4780
  /* Insert can get quite slow on an incredibly large number of basic
4781
     blocks due to some quadratic behavior.  Until this behavior is
4782
     fixed, don't run it when he have an incredibly large number of
4783
     bb's.  If we aren't going to run insert, there is no point in
4784
     computing ANTIC, either, even though it's plenty fast.  */
4785
  if (!do_fre && n_basic_blocks < 4000)
4786
    {
4787
      compute_antic ();
4788
      insert ();
4789
    }
4790
 
4791
  /* Remove all the redundant expressions.  */
4792
  todo |= eliminate ();
4793
 
4794
  statistics_counter_event (cfun, "Insertions", pre_stats.insertions);
4795
  statistics_counter_event (cfun, "PA inserted", pre_stats.pa_insert);
4796
  statistics_counter_event (cfun, "New PHIs", pre_stats.phis);
4797
  statistics_counter_event (cfun, "Eliminated", pre_stats.eliminations);
4798
  statistics_counter_event (cfun, "Constified", pre_stats.constified);
4799
 
4800
  /* Make sure to remove fake edges before committing our inserts.
4801
     This makes sure we don't end up with extra critical edges that
4802
     we would need to split.  */
4803
  remove_fake_exit_edges ();
4804
  gsi_commit_edge_inserts ();
4805
 
4806
  clear_expression_ids ();
4807
  free_scc_vn ();
4808
  if (!do_fre)
4809
    remove_dead_inserted_code ();
4810
 
4811
  scev_finalize ();
4812
  fini_pre (do_fre);
4813
 
4814
  return todo;
4815
}
4816
 
4817
/* Gate and execute functions for PRE.  */
4818
 
4819
static unsigned int
4820
do_pre (void)
4821
{
4822
  return execute_pre (false);
4823
}
4824
 
4825
static bool
4826
gate_pre (void)
4827
{
4828
  return flag_tree_pre != 0;
4829
}
4830
 
4831
struct gimple_opt_pass pass_pre =
4832
{
4833
 {
4834
  GIMPLE_PASS,
4835
  "pre",                                /* name */
4836
  gate_pre,                             /* gate */
4837
  do_pre,                               /* execute */
4838
  NULL,                                 /* sub */
4839
  NULL,                                 /* next */
4840
  0,                                     /* static_pass_number */
4841
  TV_TREE_PRE,                          /* tv_id */
4842
  PROP_no_crit_edges | PROP_cfg
4843
    | PROP_ssa,                         /* properties_required */
4844
  0,                                     /* properties_provided */
4845
  0,                                     /* properties_destroyed */
4846
  TODO_rebuild_alias,                   /* todo_flags_start */
4847
  TODO_update_ssa_only_virtuals | TODO_dump_func | TODO_ggc_collect
4848
  | TODO_verify_ssa /* todo_flags_finish */
4849
 }
4850
};
4851
 
4852
 
4853
/* Gate and execute functions for FRE.  */
4854
 
4855
static unsigned int
4856
execute_fre (void)
4857
{
4858
  return execute_pre (true);
4859
}
4860
 
4861
static bool
4862
gate_fre (void)
4863
{
4864
  return flag_tree_fre != 0;
4865
}
4866
 
4867
struct gimple_opt_pass pass_fre =
4868
{
4869
 {
4870
  GIMPLE_PASS,
4871
  "fre",                                /* name */
4872
  gate_fre,                             /* gate */
4873
  execute_fre,                          /* execute */
4874
  NULL,                                 /* sub */
4875
  NULL,                                 /* next */
4876
  0,                                     /* static_pass_number */
4877
  TV_TREE_FRE,                          /* tv_id */
4878
  PROP_cfg | PROP_ssa,                  /* properties_required */
4879
  0,                                     /* properties_provided */
4880
  0,                                     /* properties_destroyed */
4881
  0,                                     /* todo_flags_start */
4882
  TODO_dump_func | TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */
4883
 }
4884
};

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.