OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [tree-ssa-loop-im.c] - Blame information for rev 774

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 684 jeremybenn
/* Loop invariant motion.
2
   Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2010
3
   Free Software Foundation, Inc.
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify it
8
under the terms of the GNU General Public License as published by the
9
Free Software Foundation; either version 3, or (at your option) any
10
later version.
11
 
12
GCC is distributed in the hope that it will be useful, but WITHOUT
13
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15
for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING3.  If not see
19
<http://www.gnu.org/licenses/>.  */
20
 
21
#include "config.h"
22
#include "system.h"
23
#include "coretypes.h"
24
#include "tm.h"
25
#include "tree.h"
26
#include "tm_p.h"
27
#include "basic-block.h"
28
#include "output.h"
29
#include "tree-pretty-print.h"
30
#include "gimple-pretty-print.h"
31
#include "tree-flow.h"
32
#include "tree-dump.h"
33
#include "timevar.h"
34
#include "cfgloop.h"
35
#include "domwalk.h"
36
#include "params.h"
37
#include "tree-pass.h"
38
#include "flags.h"
39
#include "hashtab.h"
40
#include "tree-affine.h"
41
#include "pointer-set.h"
42
#include "tree-ssa-propagate.h"
43
 
44
/* TODO:  Support for predicated code motion.  I.e.
45
 
46
   while (1)
47
     {
48
       if (cond)
49
         {
50
           a = inv;
51
           something;
52
         }
53
     }
54
 
55
   Where COND and INV are is invariants, but evaluating INV may trap or be
56
   invalid from some other reason if !COND.  This may be transformed to
57
 
58
   if (cond)
59
     a = inv;
60
   while (1)
61
     {
62
       if (cond)
63
         something;
64
     }  */
65
 
66
/* A type for the list of statements that have to be moved in order to be able
67
   to hoist an invariant computation.  */
68
 
69
struct depend
70
{
71
  gimple stmt;
72
  struct depend *next;
73
};
74
 
75
/* The auxiliary data kept for each statement.  */
76
 
77
struct lim_aux_data
78
{
79
  struct loop *max_loop;        /* The outermost loop in that the statement
80
                                   is invariant.  */
81
 
82
  struct loop *tgt_loop;        /* The loop out of that we want to move the
83
                                   invariant.  */
84
 
85
  struct loop *always_executed_in;
86
                                /* The outermost loop for that we are sure
87
                                   the statement is executed if the loop
88
                                   is entered.  */
89
 
90
  unsigned cost;                /* Cost of the computation performed by the
91
                                   statement.  */
92
 
93
  struct depend *depends;       /* List of statements that must be also hoisted
94
                                   out of the loop when this statement is
95
                                   hoisted; i.e. those that define the operands
96
                                   of the statement and are inside of the
97
                                   MAX_LOOP loop.  */
98
};
99
 
100
/* Maps statements to their lim_aux_data.  */
101
 
102
static struct pointer_map_t *lim_aux_data_map;
103
 
104
/* Description of a memory reference location.  */
105
 
106
typedef struct mem_ref_loc
107
{
108
  tree *ref;                    /* The reference itself.  */
109
  gimple stmt;                  /* The statement in that it occurs.  */
110
} *mem_ref_loc_p;
111
 
112
DEF_VEC_P(mem_ref_loc_p);
113
DEF_VEC_ALLOC_P(mem_ref_loc_p, heap);
114
 
115
/* The list of memory reference locations in a loop.  */
116
 
117
typedef struct mem_ref_locs
118
{
119
  VEC (mem_ref_loc_p, heap) *locs;
120
} *mem_ref_locs_p;
121
 
122
DEF_VEC_P(mem_ref_locs_p);
123
DEF_VEC_ALLOC_P(mem_ref_locs_p, heap);
124
 
125
/* Description of a memory reference.  */
126
 
127
typedef struct mem_ref
128
{
129
  tree mem;                     /* The memory itself.  */
130
  unsigned id;                  /* ID assigned to the memory reference
131
                                   (its index in memory_accesses.refs_list)  */
132
  hashval_t hash;               /* Its hash value.  */
133
  bitmap stored;                /* The set of loops in that this memory location
134
                                   is stored to.  */
135
  VEC (mem_ref_locs_p, heap) *accesses_in_loop;
136
                                /* The locations of the accesses.  Vector
137
                                   indexed by the loop number.  */
138
 
139
  /* The following sets are computed on demand.  We keep both set and
140
     its complement, so that we know whether the information was
141
     already computed or not.  */
142
  bitmap indep_loop;            /* The set of loops in that the memory
143
                                   reference is independent, meaning:
144
                                   If it is stored in the loop, this store
145
                                     is independent on all other loads and
146
                                     stores.
147
                                   If it is only loaded, then it is independent
148
                                     on all stores in the loop.  */
149
  bitmap dep_loop;              /* The complement of INDEP_LOOP.  */
150
 
151
  bitmap indep_ref;             /* The set of memory references on that
152
                                   this reference is independent.  */
153
  bitmap dep_ref;               /* The complement of INDEP_REF.  */
154
} *mem_ref_p;
155
 
156
DEF_VEC_P(mem_ref_p);
157
DEF_VEC_ALLOC_P(mem_ref_p, heap);
158
 
159
DEF_VEC_P(bitmap);
160
DEF_VEC_ALLOC_P(bitmap, heap);
161
 
162
DEF_VEC_P(htab_t);
163
DEF_VEC_ALLOC_P(htab_t, heap);
164
 
165
/* Description of memory accesses in loops.  */
166
 
167
static struct
168
{
169
  /* The hash table of memory references accessed in loops.  */
170
  htab_t refs;
171
 
172
  /* The list of memory references.  */
173
  VEC (mem_ref_p, heap) *refs_list;
174
 
175
  /* The set of memory references accessed in each loop.  */
176
  VEC (bitmap, heap) *refs_in_loop;
177
 
178
  /* The set of memory references accessed in each loop, including
179
     subloops.  */
180
  VEC (bitmap, heap) *all_refs_in_loop;
181
 
182
  /* The set of memory references stored in each loop, including
183
     subloops.  */
184
  VEC (bitmap, heap) *all_refs_stored_in_loop;
185
 
186
  /* Cache for expanding memory addresses.  */
187
  struct pointer_map_t *ttae_cache;
188
} memory_accesses;
189
 
190
static bool ref_indep_loop_p (struct loop *, mem_ref_p);
191
 
192
/* Minimum cost of an expensive expression.  */
193
#define LIM_EXPENSIVE ((unsigned) PARAM_VALUE (PARAM_LIM_EXPENSIVE))
194
 
195
/* The outermost loop for which execution of the header guarantees that the
196
   block will be executed.  */
197
#define ALWAYS_EXECUTED_IN(BB) ((struct loop *) (BB)->aux)
198
#define SET_ALWAYS_EXECUTED_IN(BB, VAL) ((BB)->aux = (void *) (VAL))
199
 
200
/* Whether the reference was analyzable.  */
201
#define MEM_ANALYZABLE(REF) ((REF)->mem != error_mark_node)
202
 
203
static struct lim_aux_data *
204
init_lim_data (gimple stmt)
205
{
206
  void **p = pointer_map_insert (lim_aux_data_map, stmt);
207
 
208
  *p = XCNEW (struct lim_aux_data);
209
  return (struct lim_aux_data *) *p;
210
}
211
 
212
static struct lim_aux_data *
213
get_lim_data (gimple stmt)
214
{
215
  void **p = pointer_map_contains (lim_aux_data_map, stmt);
216
  if (!p)
217
    return NULL;
218
 
219
  return (struct lim_aux_data *) *p;
220
}
221
 
222
/* Releases the memory occupied by DATA.  */
223
 
224
static void
225
free_lim_aux_data (struct lim_aux_data *data)
226
{
227
  struct depend *dep, *next;
228
 
229
  for (dep = data->depends; dep; dep = next)
230
    {
231
      next = dep->next;
232
      free (dep);
233
    }
234
  free (data);
235
}
236
 
237
static void
238
clear_lim_data (gimple stmt)
239
{
240
  void **p = pointer_map_contains (lim_aux_data_map, stmt);
241
  if (!p)
242
    return;
243
 
244
  free_lim_aux_data ((struct lim_aux_data *) *p);
245
  *p = NULL;
246
}
247
 
248
/* Calls CBCK for each index in memory reference ADDR_P.  There are two
249
   kinds situations handled; in each of these cases, the memory reference
250
   and DATA are passed to the callback:
251
 
252
   Access to an array: ARRAY_{RANGE_}REF (base, index).  In this case we also
253
   pass the pointer to the index to the callback.
254
 
255
   Pointer dereference: INDIRECT_REF (addr).  In this case we also pass the
256
   pointer to addr to the callback.
257
 
258
   If the callback returns false, the whole search stops and false is returned.
259
   Otherwise the function returns true after traversing through the whole
260
   reference *ADDR_P.  */
261
 
262
bool
263
for_each_index (tree *addr_p, bool (*cbck) (tree, tree *, void *), void *data)
264
{
265
  tree *nxt, *idx;
266
 
267
  for (; ; addr_p = nxt)
268
    {
269
      switch (TREE_CODE (*addr_p))
270
        {
271
        case SSA_NAME:
272
          return cbck (*addr_p, addr_p, data);
273
 
274
        case MEM_REF:
275
          nxt = &TREE_OPERAND (*addr_p, 0);
276
          return cbck (*addr_p, nxt, data);
277
 
278
        case BIT_FIELD_REF:
279
        case VIEW_CONVERT_EXPR:
280
        case REALPART_EXPR:
281
        case IMAGPART_EXPR:
282
          nxt = &TREE_OPERAND (*addr_p, 0);
283
          break;
284
 
285
        case COMPONENT_REF:
286
          /* If the component has varying offset, it behaves like index
287
             as well.  */
288
          idx = &TREE_OPERAND (*addr_p, 2);
289
          if (*idx
290
              && !cbck (*addr_p, idx, data))
291
            return false;
292
 
293
          nxt = &TREE_OPERAND (*addr_p, 0);
294
          break;
295
 
296
        case ARRAY_REF:
297
        case ARRAY_RANGE_REF:
298
          nxt = &TREE_OPERAND (*addr_p, 0);
299
          if (!cbck (*addr_p, &TREE_OPERAND (*addr_p, 1), data))
300
            return false;
301
          break;
302
 
303
        case VAR_DECL:
304
        case PARM_DECL:
305
        case STRING_CST:
306
        case RESULT_DECL:
307
        case VECTOR_CST:
308
        case COMPLEX_CST:
309
        case INTEGER_CST:
310
        case REAL_CST:
311
        case FIXED_CST:
312
        case CONSTRUCTOR:
313
          return true;
314
 
315
        case ADDR_EXPR:
316
          gcc_assert (is_gimple_min_invariant (*addr_p));
317
          return true;
318
 
319
        case TARGET_MEM_REF:
320
          idx = &TMR_BASE (*addr_p);
321
          if (*idx
322
              && !cbck (*addr_p, idx, data))
323
            return false;
324
          idx = &TMR_INDEX (*addr_p);
325
          if (*idx
326
              && !cbck (*addr_p, idx, data))
327
            return false;
328
          idx = &TMR_INDEX2 (*addr_p);
329
          if (*idx
330
              && !cbck (*addr_p, idx, data))
331
            return false;
332
          return true;
333
 
334
        default:
335
          gcc_unreachable ();
336
        }
337
    }
338
}
339
 
340
/* If it is possible to hoist the statement STMT unconditionally,
341
   returns MOVE_POSSIBLE.
342
   If it is possible to hoist the statement STMT, but we must avoid making
343
   it executed if it would not be executed in the original program (e.g.
344
   because it may trap), return MOVE_PRESERVE_EXECUTION.
345
   Otherwise return MOVE_IMPOSSIBLE.  */
346
 
347
enum move_pos
348
movement_possibility (gimple stmt)
349
{
350
  tree lhs;
351
  enum move_pos ret = MOVE_POSSIBLE;
352
 
353
  if (flag_unswitch_loops
354
      && gimple_code (stmt) == GIMPLE_COND)
355
    {
356
      /* If we perform unswitching, force the operands of the invariant
357
         condition to be moved out of the loop.  */
358
      return MOVE_POSSIBLE;
359
    }
360
 
361
  if (gimple_code (stmt) == GIMPLE_PHI
362
      && gimple_phi_num_args (stmt) <= 2
363
      && is_gimple_reg (gimple_phi_result (stmt))
364
      && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_phi_result (stmt)))
365
    return MOVE_POSSIBLE;
366
 
367
  if (gimple_get_lhs (stmt) == NULL_TREE)
368
    return MOVE_IMPOSSIBLE;
369
 
370
  if (gimple_vdef (stmt))
371
    return MOVE_IMPOSSIBLE;
372
 
373
  if (stmt_ends_bb_p (stmt)
374
      || gimple_has_volatile_ops (stmt)
375
      || gimple_has_side_effects (stmt)
376
      || stmt_could_throw_p (stmt))
377
    return MOVE_IMPOSSIBLE;
378
 
379
  if (is_gimple_call (stmt))
380
    {
381
      /* While pure or const call is guaranteed to have no side effects, we
382
         cannot move it arbitrarily.  Consider code like
383
 
384
         char *s = something ();
385
 
386
         while (1)
387
           {
388
             if (s)
389
               t = strlen (s);
390
             else
391
               t = 0;
392
           }
393
 
394
         Here the strlen call cannot be moved out of the loop, even though
395
         s is invariant.  In addition to possibly creating a call with
396
         invalid arguments, moving out a function call that is not executed
397
         may cause performance regressions in case the call is costly and
398
         not executed at all.  */
399
      ret = MOVE_PRESERVE_EXECUTION;
400
      lhs = gimple_call_lhs (stmt);
401
    }
402
  else if (is_gimple_assign (stmt))
403
    lhs = gimple_assign_lhs (stmt);
404
  else
405
    return MOVE_IMPOSSIBLE;
406
 
407
  if (TREE_CODE (lhs) == SSA_NAME
408
      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
409
    return MOVE_IMPOSSIBLE;
410
 
411
  if (TREE_CODE (lhs) != SSA_NAME
412
      || gimple_could_trap_p (stmt))
413
    return MOVE_PRESERVE_EXECUTION;
414
 
415
  /* Non local loads in a transaction cannot be hoisted out.  Well,
416
     unless the load happens on every path out of the loop, but we
417
     don't take this into account yet.  */
418
  if (flag_tm
419
      && gimple_in_transaction (stmt)
420
      && gimple_assign_single_p (stmt))
421
    {
422
      tree rhs = gimple_assign_rhs1 (stmt);
423
      if (DECL_P (rhs) && is_global_var (rhs))
424
        {
425
          if (dump_file)
426
            {
427
              fprintf (dump_file, "Cannot hoist conditional load of ");
428
              print_generic_expr (dump_file, rhs, TDF_SLIM);
429
              fprintf (dump_file, " because it is in a transaction.\n");
430
            }
431
          return MOVE_IMPOSSIBLE;
432
        }
433
    }
434
 
435
  return ret;
436
}
437
 
438
/* Suppose that operand DEF is used inside the LOOP.  Returns the outermost
439
   loop to that we could move the expression using DEF if it did not have
440
   other operands, i.e. the outermost loop enclosing LOOP in that the value
441
   of DEF is invariant.  */
442
 
443
static struct loop *
444
outermost_invariant_loop (tree def, struct loop *loop)
445
{
446
  gimple def_stmt;
447
  basic_block def_bb;
448
  struct loop *max_loop;
449
  struct lim_aux_data *lim_data;
450
 
451
  if (!def)
452
    return superloop_at_depth (loop, 1);
453
 
454
  if (TREE_CODE (def) != SSA_NAME)
455
    {
456
      gcc_assert (is_gimple_min_invariant (def));
457
      return superloop_at_depth (loop, 1);
458
    }
459
 
460
  def_stmt = SSA_NAME_DEF_STMT (def);
461
  def_bb = gimple_bb (def_stmt);
462
  if (!def_bb)
463
    return superloop_at_depth (loop, 1);
464
 
465
  max_loop = find_common_loop (loop, def_bb->loop_father);
466
 
467
  lim_data = get_lim_data (def_stmt);
468
  if (lim_data != NULL && lim_data->max_loop != NULL)
469
    max_loop = find_common_loop (max_loop,
470
                                 loop_outer (lim_data->max_loop));
471
  if (max_loop == loop)
472
    return NULL;
473
  max_loop = superloop_at_depth (loop, loop_depth (max_loop) + 1);
474
 
475
  return max_loop;
476
}
477
 
478
/* DATA is a structure containing information associated with a statement
479
   inside LOOP.  DEF is one of the operands of this statement.
480
 
481
   Find the outermost loop enclosing LOOP in that value of DEF is invariant
482
   and record this in DATA->max_loop field.  If DEF itself is defined inside
483
   this loop as well (i.e. we need to hoist it out of the loop if we want
484
   to hoist the statement represented by DATA), record the statement in that
485
   DEF is defined to the DATA->depends list.  Additionally if ADD_COST is true,
486
   add the cost of the computation of DEF to the DATA->cost.
487
 
488
   If DEF is not invariant in LOOP, return false.  Otherwise return TRUE.  */
489
 
490
static bool
491
add_dependency (tree def, struct lim_aux_data *data, struct loop *loop,
492
                bool add_cost)
493
{
494
  gimple def_stmt = SSA_NAME_DEF_STMT (def);
495
  basic_block def_bb = gimple_bb (def_stmt);
496
  struct loop *max_loop;
497
  struct depend *dep;
498
  struct lim_aux_data *def_data;
499
 
500
  if (!def_bb)
501
    return true;
502
 
503
  max_loop = outermost_invariant_loop (def, loop);
504
  if (!max_loop)
505
    return false;
506
 
507
  if (flow_loop_nested_p (data->max_loop, max_loop))
508
    data->max_loop = max_loop;
509
 
510
  def_data = get_lim_data (def_stmt);
511
  if (!def_data)
512
    return true;
513
 
514
  if (add_cost
515
      /* Only add the cost if the statement defining DEF is inside LOOP,
516
         i.e. if it is likely that by moving the invariants dependent
517
         on it, we will be able to avoid creating a new register for
518
         it (since it will be only used in these dependent invariants).  */
519
      && def_bb->loop_father == loop)
520
    data->cost += def_data->cost;
521
 
522
  dep = XNEW (struct depend);
523
  dep->stmt = def_stmt;
524
  dep->next = data->depends;
525
  data->depends = dep;
526
 
527
  return true;
528
}
529
 
530
/* Returns an estimate for a cost of statement STMT.  The values here
531
   are just ad-hoc constants, similar to costs for inlining.  */
532
 
533
static unsigned
534
stmt_cost (gimple stmt)
535
{
536
  /* Always try to create possibilities for unswitching.  */
537
  if (gimple_code (stmt) == GIMPLE_COND
538
      || gimple_code (stmt) == GIMPLE_PHI)
539
    return LIM_EXPENSIVE;
540
 
541
  /* We should be hoisting calls if possible.  */
542
  if (is_gimple_call (stmt))
543
    {
544
      tree fndecl;
545
 
546
      /* Unless the call is a builtin_constant_p; this always folds to a
547
         constant, so moving it is useless.  */
548
      fndecl = gimple_call_fndecl (stmt);
549
      if (fndecl
550
          && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
551
          && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CONSTANT_P)
552
        return 0;
553
 
554
      return LIM_EXPENSIVE;
555
    }
556
 
557
  /* Hoisting memory references out should almost surely be a win.  */
558
  if (gimple_references_memory_p (stmt))
559
    return LIM_EXPENSIVE;
560
 
561
  if (gimple_code (stmt) != GIMPLE_ASSIGN)
562
    return 1;
563
 
564
  switch (gimple_assign_rhs_code (stmt))
565
    {
566
    case MULT_EXPR:
567
    case WIDEN_MULT_EXPR:
568
    case WIDEN_MULT_PLUS_EXPR:
569
    case WIDEN_MULT_MINUS_EXPR:
570
    case DOT_PROD_EXPR:
571
    case FMA_EXPR:
572
    case TRUNC_DIV_EXPR:
573
    case CEIL_DIV_EXPR:
574
    case FLOOR_DIV_EXPR:
575
    case ROUND_DIV_EXPR:
576
    case EXACT_DIV_EXPR:
577
    case CEIL_MOD_EXPR:
578
    case FLOOR_MOD_EXPR:
579
    case ROUND_MOD_EXPR:
580
    case TRUNC_MOD_EXPR:
581
    case RDIV_EXPR:
582
      /* Division and multiplication are usually expensive.  */
583
      return LIM_EXPENSIVE;
584
 
585
    case LSHIFT_EXPR:
586
    case RSHIFT_EXPR:
587
    case WIDEN_LSHIFT_EXPR:
588
    case LROTATE_EXPR:
589
    case RROTATE_EXPR:
590
      /* Shifts and rotates are usually expensive.  */
591
      return LIM_EXPENSIVE;
592
 
593
    case CONSTRUCTOR:
594
      /* Make vector construction cost proportional to the number
595
         of elements.  */
596
      return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
597
 
598
    case SSA_NAME:
599
    case PAREN_EXPR:
600
      /* Whether or not something is wrapped inside a PAREN_EXPR
601
         should not change move cost.  Nor should an intermediate
602
         unpropagated SSA name copy.  */
603
      return 0;
604
 
605
    default:
606
      return 1;
607
    }
608
}
609
 
610
/* Finds the outermost loop between OUTER and LOOP in that the memory reference
611
   REF is independent.  If REF is not independent in LOOP, NULL is returned
612
   instead.  */
613
 
614
static struct loop *
615
outermost_indep_loop (struct loop *outer, struct loop *loop, mem_ref_p ref)
616
{
617
  struct loop *aloop;
618
 
619
  if (bitmap_bit_p (ref->stored, loop->num))
620
    return NULL;
621
 
622
  for (aloop = outer;
623
       aloop != loop;
624
       aloop = superloop_at_depth (loop, loop_depth (aloop) + 1))
625
    if (!bitmap_bit_p (ref->stored, aloop->num)
626
        && ref_indep_loop_p (aloop, ref))
627
      return aloop;
628
 
629
  if (ref_indep_loop_p (loop, ref))
630
    return loop;
631
  else
632
    return NULL;
633
}
634
 
635
/* If there is a simple load or store to a memory reference in STMT, returns
636
   the location of the memory reference, and sets IS_STORE according to whether
637
   it is a store or load.  Otherwise, returns NULL.  */
638
 
639
static tree *
640
simple_mem_ref_in_stmt (gimple stmt, bool *is_store)
641
{
642
  tree *lhs;
643
  enum tree_code code;
644
 
645
  /* Recognize MEM = (SSA_NAME | invariant) and SSA_NAME = MEM patterns.  */
646
  if (gimple_code (stmt) != GIMPLE_ASSIGN)
647
    return NULL;
648
 
649
  code = gimple_assign_rhs_code (stmt);
650
 
651
  lhs = gimple_assign_lhs_ptr (stmt);
652
 
653
  if (TREE_CODE (*lhs) == SSA_NAME)
654
    {
655
      if (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS
656
          || !is_gimple_addressable (gimple_assign_rhs1 (stmt)))
657
        return NULL;
658
 
659
      *is_store = false;
660
      return gimple_assign_rhs1_ptr (stmt);
661
    }
662
  else if (code == SSA_NAME
663
           || (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS
664
               && is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
665
    {
666
      *is_store = true;
667
      return lhs;
668
    }
669
  else
670
    return NULL;
671
}
672
 
673
/* Returns the memory reference contained in STMT.  */
674
 
675
static mem_ref_p
676
mem_ref_in_stmt (gimple stmt)
677
{
678
  bool store;
679
  tree *mem = simple_mem_ref_in_stmt (stmt, &store);
680
  hashval_t hash;
681
  mem_ref_p ref;
682
 
683
  if (!mem)
684
    return NULL;
685
  gcc_assert (!store);
686
 
687
  hash = iterative_hash_expr (*mem, 0);
688
  ref = (mem_ref_p) htab_find_with_hash (memory_accesses.refs, *mem, hash);
689
 
690
  gcc_assert (ref != NULL);
691
  return ref;
692
}
693
 
694
/* From a controlling predicate in DOM determine the arguments from
695
   the PHI node PHI that are chosen if the predicate evaluates to
696
   true and false and store them to *TRUE_ARG_P and *FALSE_ARG_P if
697
   they are non-NULL.  Returns true if the arguments can be determined,
698
   else return false.  */
699
 
700
static bool
701
extract_true_false_args_from_phi (basic_block dom, gimple phi,
702
                                  tree *true_arg_p, tree *false_arg_p)
703
{
704
  basic_block bb = gimple_bb (phi);
705
  edge true_edge, false_edge, tem;
706
  tree arg0 = NULL_TREE, arg1 = NULL_TREE;
707
 
708
  /* We have to verify that one edge into the PHI node is dominated
709
     by the true edge of the predicate block and the other edge
710
     dominated by the false edge.  This ensures that the PHI argument
711
     we are going to take is completely determined by the path we
712
     take from the predicate block.
713
     We can only use BB dominance checks below if the destination of
714
     the true/false edges are dominated by their edge, thus only
715
     have a single predecessor.  */
716
  extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
717
  tem = EDGE_PRED (bb, 0);
718
  if (tem == true_edge
719
      || (single_pred_p (true_edge->dest)
720
          && (tem->src == true_edge->dest
721
              || dominated_by_p (CDI_DOMINATORS,
722
                                 tem->src, true_edge->dest))))
723
    arg0 = PHI_ARG_DEF (phi, tem->dest_idx);
724
  else if (tem == false_edge
725
           || (single_pred_p (false_edge->dest)
726
               && (tem->src == false_edge->dest
727
                   || dominated_by_p (CDI_DOMINATORS,
728
                                      tem->src, false_edge->dest))))
729
    arg1 = PHI_ARG_DEF (phi, tem->dest_idx);
730
  else
731
    return false;
732
  tem = EDGE_PRED (bb, 1);
733
  if (tem == true_edge
734
      || (single_pred_p (true_edge->dest)
735
          && (tem->src == true_edge->dest
736
              || dominated_by_p (CDI_DOMINATORS,
737
                                 tem->src, true_edge->dest))))
738
    arg0 = PHI_ARG_DEF (phi, tem->dest_idx);
739
  else if (tem == false_edge
740
           || (single_pred_p (false_edge->dest)
741
               && (tem->src == false_edge->dest
742
                   || dominated_by_p (CDI_DOMINATORS,
743
                                      tem->src, false_edge->dest))))
744
    arg1 = PHI_ARG_DEF (phi, tem->dest_idx);
745
  else
746
    return false;
747
  if (!arg0 || !arg1)
748
    return false;
749
 
750
  if (true_arg_p)
751
    *true_arg_p = arg0;
752
  if (false_arg_p)
753
    *false_arg_p = arg1;
754
 
755
  return true;
756
}
757
 
758
/* Determine the outermost loop to that it is possible to hoist a statement
759
   STMT and store it to LIM_DATA (STMT)->max_loop.  To do this we determine
760
   the outermost loop in that the value computed by STMT is invariant.
761
   If MUST_PRESERVE_EXEC is true, additionally choose such a loop that
762
   we preserve the fact whether STMT is executed.  It also fills other related
763
   information to LIM_DATA (STMT).
764
 
765
   The function returns false if STMT cannot be hoisted outside of the loop it
766
   is defined in, and true otherwise.  */
767
 
768
static bool
769
determine_max_movement (gimple stmt, bool must_preserve_exec)
770
{
771
  basic_block bb = gimple_bb (stmt);
772
  struct loop *loop = bb->loop_father;
773
  struct loop *level;
774
  struct lim_aux_data *lim_data = get_lim_data (stmt);
775
  tree val;
776
  ssa_op_iter iter;
777
 
778
  if (must_preserve_exec)
779
    level = ALWAYS_EXECUTED_IN (bb);
780
  else
781
    level = superloop_at_depth (loop, 1);
782
  lim_data->max_loop = level;
783
 
784
  if (gimple_code (stmt) == GIMPLE_PHI)
785
    {
786
      use_operand_p use_p;
787
      unsigned min_cost = UINT_MAX;
788
      unsigned total_cost = 0;
789
      struct lim_aux_data *def_data;
790
 
791
      /* We will end up promoting dependencies to be unconditionally
792
         evaluated.  For this reason the PHI cost (and thus the
793
         cost we remove from the loop by doing the invariant motion)
794
         is that of the cheapest PHI argument dependency chain.  */
795
      FOR_EACH_PHI_ARG (use_p, stmt, iter, SSA_OP_USE)
796
        {
797
          val = USE_FROM_PTR (use_p);
798
          if (TREE_CODE (val) != SSA_NAME)
799
            continue;
800
          if (!add_dependency (val, lim_data, loop, false))
801
            return false;
802
          def_data = get_lim_data (SSA_NAME_DEF_STMT (val));
803
          if (def_data)
804
            {
805
              min_cost = MIN (min_cost, def_data->cost);
806
              total_cost += def_data->cost;
807
            }
808
        }
809
 
810
      lim_data->cost += min_cost;
811
 
812
      if (gimple_phi_num_args (stmt) > 1)
813
        {
814
          basic_block dom = get_immediate_dominator (CDI_DOMINATORS, bb);
815
          gimple cond;
816
          if (gsi_end_p (gsi_last_bb (dom)))
817
            return false;
818
          cond = gsi_stmt (gsi_last_bb (dom));
819
          if (gimple_code (cond) != GIMPLE_COND)
820
            return false;
821
          /* Verify that this is an extended form of a diamond and
822
             the PHI arguments are completely controlled by the
823
             predicate in DOM.  */
824
          if (!extract_true_false_args_from_phi (dom, stmt, NULL, NULL))
825
            return false;
826
 
827
          /* Fold in dependencies and cost of the condition.  */
828
          FOR_EACH_SSA_TREE_OPERAND (val, cond, iter, SSA_OP_USE)
829
            {
830
              if (!add_dependency (val, lim_data, loop, false))
831
                return false;
832
              def_data = get_lim_data (SSA_NAME_DEF_STMT (val));
833
              if (def_data)
834
                total_cost += def_data->cost;
835
            }
836
 
837
          /* We want to avoid unconditionally executing very expensive
838
             operations.  As costs for our dependencies cannot be
839
             negative just claim we are not invariand for this case.
840
             We also are not sure whether the control-flow inside the
841
             loop will vanish.  */
842
          if (total_cost - min_cost >= 2 * LIM_EXPENSIVE
843
              && !(min_cost != 0
844
                   && total_cost / min_cost <= 2))
845
            return false;
846
 
847
          /* Assume that the control-flow in the loop will vanish.
848
             ???  We should verify this and not artificially increase
849
             the cost if that is not the case.  */
850
          lim_data->cost += stmt_cost (stmt);
851
        }
852
 
853
      return true;
854
    }
855
  else
856
    FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_USE)
857
      if (!add_dependency (val, lim_data, loop, true))
858
        return false;
859
 
860
  if (gimple_vuse (stmt))
861
    {
862
      mem_ref_p ref = mem_ref_in_stmt (stmt);
863
 
864
      if (ref)
865
        {
866
          lim_data->max_loop
867
                  = outermost_indep_loop (lim_data->max_loop, loop, ref);
868
          if (!lim_data->max_loop)
869
            return false;
870
        }
871
      else
872
        {
873
          if ((val = gimple_vuse (stmt)) != NULL_TREE)
874
            {
875
              if (!add_dependency (val, lim_data, loop, false))
876
                return false;
877
            }
878
        }
879
    }
880
 
881
  lim_data->cost += stmt_cost (stmt);
882
 
883
  return true;
884
}
885
 
886
/* Suppose that some statement in ORIG_LOOP is hoisted to the loop LEVEL,
887
   and that one of the operands of this statement is computed by STMT.
888
   Ensure that STMT (together with all the statements that define its
889
   operands) is hoisted at least out of the loop LEVEL.  */
890
 
891
static void
892
set_level (gimple stmt, struct loop *orig_loop, struct loop *level)
893
{
894
  struct loop *stmt_loop = gimple_bb (stmt)->loop_father;
895
  struct depend *dep;
896
  struct lim_aux_data *lim_data;
897
 
898
  stmt_loop = find_common_loop (orig_loop, stmt_loop);
899
  lim_data = get_lim_data (stmt);
900
  if (lim_data != NULL && lim_data->tgt_loop != NULL)
901
    stmt_loop = find_common_loop (stmt_loop,
902
                                  loop_outer (lim_data->tgt_loop));
903
  if (flow_loop_nested_p (stmt_loop, level))
904
    return;
905
 
906
  gcc_assert (level == lim_data->max_loop
907
              || flow_loop_nested_p (lim_data->max_loop, level));
908
 
909
  lim_data->tgt_loop = level;
910
  for (dep = lim_data->depends; dep; dep = dep->next)
911
    set_level (dep->stmt, orig_loop, level);
912
}
913
 
914
/* Determines an outermost loop from that we want to hoist the statement STMT.
915
   For now we chose the outermost possible loop.  TODO -- use profiling
916
   information to set it more sanely.  */
917
 
918
static void
919
set_profitable_level (gimple stmt)
920
{
921
  set_level (stmt, gimple_bb (stmt)->loop_father, get_lim_data (stmt)->max_loop);
922
}
923
 
924
/* Returns true if STMT is a call that has side effects.  */
925
 
926
static bool
927
nonpure_call_p (gimple stmt)
928
{
929
  if (gimple_code (stmt) != GIMPLE_CALL)
930
    return false;
931
 
932
  return gimple_has_side_effects (stmt);
933
}
934
 
935
/* Rewrite a/b to a*(1/b).  Return the invariant stmt to process.  */
936
 
937
static gimple
938
rewrite_reciprocal (gimple_stmt_iterator *bsi)
939
{
940
  gimple stmt, stmt1, stmt2;
941
  tree var, name, lhs, type;
942
  tree real_one;
943
  gimple_stmt_iterator gsi;
944
 
945
  stmt = gsi_stmt (*bsi);
946
  lhs = gimple_assign_lhs (stmt);
947
  type = TREE_TYPE (lhs);
948
 
949
  var = create_tmp_var (type, "reciptmp");
950
  add_referenced_var (var);
951
  DECL_GIMPLE_REG_P (var) = 1;
952
 
953
  real_one = build_one_cst (type);
954
 
955
  stmt1 = gimple_build_assign_with_ops (RDIV_EXPR,
956
                var, real_one, gimple_assign_rhs2 (stmt));
957
  name = make_ssa_name (var, stmt1);
958
  gimple_assign_set_lhs (stmt1, name);
959
 
960
  stmt2 = gimple_build_assign_with_ops (MULT_EXPR, lhs, name,
961
                                        gimple_assign_rhs1 (stmt));
962
 
963
  /* Replace division stmt with reciprocal and multiply stmts.
964
     The multiply stmt is not invariant, so update iterator
965
     and avoid rescanning.  */
966
  gsi = *bsi;
967
  gsi_insert_before (bsi, stmt1, GSI_NEW_STMT);
968
  gsi_replace (&gsi, stmt2, true);
969
 
970
  /* Continue processing with invariant reciprocal statement.  */
971
  return stmt1;
972
}
973
 
974
/* Check if the pattern at *BSI is a bittest of the form
975
   (A >> B) & 1 != 0 and in this case rewrite it to A & (1 << B) != 0.  */
976
 
977
static gimple
978
rewrite_bittest (gimple_stmt_iterator *bsi)
979
{
980
  gimple stmt, use_stmt, stmt1, stmt2;
981
  tree lhs, var, name, t, a, b;
982
  use_operand_p use;
983
 
984
  stmt = gsi_stmt (*bsi);
985
  lhs = gimple_assign_lhs (stmt);
986
 
987
  /* Verify that the single use of lhs is a comparison against zero.  */
988
  if (TREE_CODE (lhs) != SSA_NAME
989
      || !single_imm_use (lhs, &use, &use_stmt)
990
      || gimple_code (use_stmt) != GIMPLE_COND)
991
    return stmt;
992
  if (gimple_cond_lhs (use_stmt) != lhs
993
      || (gimple_cond_code (use_stmt) != NE_EXPR
994
          && gimple_cond_code (use_stmt) != EQ_EXPR)
995
      || !integer_zerop (gimple_cond_rhs (use_stmt)))
996
    return stmt;
997
 
998
  /* Get at the operands of the shift.  The rhs is TMP1 & 1.  */
999
  stmt1 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
1000
  if (gimple_code (stmt1) != GIMPLE_ASSIGN)
1001
    return stmt;
1002
 
1003
  /* There is a conversion in between possibly inserted by fold.  */
1004
  if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt1)))
1005
    {
1006
      t = gimple_assign_rhs1 (stmt1);
1007
      if (TREE_CODE (t) != SSA_NAME
1008
          || !has_single_use (t))
1009
        return stmt;
1010
      stmt1 = SSA_NAME_DEF_STMT (t);
1011
      if (gimple_code (stmt1) != GIMPLE_ASSIGN)
1012
        return stmt;
1013
    }
1014
 
1015
  /* Verify that B is loop invariant but A is not.  Verify that with
1016
     all the stmt walking we are still in the same loop.  */
1017
  if (gimple_assign_rhs_code (stmt1) != RSHIFT_EXPR
1018
      || loop_containing_stmt (stmt1) != loop_containing_stmt (stmt))
1019
    return stmt;
1020
 
1021
  a = gimple_assign_rhs1 (stmt1);
1022
  b = gimple_assign_rhs2 (stmt1);
1023
 
1024
  if (outermost_invariant_loop (b, loop_containing_stmt (stmt1)) != NULL
1025
      && outermost_invariant_loop (a, loop_containing_stmt (stmt1)) == NULL)
1026
    {
1027
      gimple_stmt_iterator rsi;
1028
 
1029
      /* 1 << B */
1030
      var = create_tmp_var (TREE_TYPE (a), "shifttmp");
1031
      add_referenced_var (var);
1032
      t = fold_build2 (LSHIFT_EXPR, TREE_TYPE (a),
1033
                       build_int_cst (TREE_TYPE (a), 1), b);
1034
      stmt1 = gimple_build_assign (var, t);
1035
      name = make_ssa_name (var, stmt1);
1036
      gimple_assign_set_lhs (stmt1, name);
1037
 
1038
      /* A & (1 << B) */
1039
      t = fold_build2 (BIT_AND_EXPR, TREE_TYPE (a), a, name);
1040
      stmt2 = gimple_build_assign (var, t);
1041
      name = make_ssa_name (var, stmt2);
1042
      gimple_assign_set_lhs (stmt2, name);
1043
 
1044
      /* Replace the SSA_NAME we compare against zero.  Adjust
1045
         the type of zero accordingly.  */
1046
      SET_USE (use, name);
1047
      gimple_cond_set_rhs (use_stmt, build_int_cst_type (TREE_TYPE (name), 0));
1048
 
1049
      /* Don't use gsi_replace here, none of the new assignments sets
1050
         the variable originally set in stmt.  Move bsi to stmt1, and
1051
         then remove the original stmt, so that we get a chance to
1052
         retain debug info for it.  */
1053
      rsi = *bsi;
1054
      gsi_insert_before (bsi, stmt1, GSI_NEW_STMT);
1055
      gsi_insert_before (&rsi, stmt2, GSI_SAME_STMT);
1056
      gsi_remove (&rsi, true);
1057
 
1058
      return stmt1;
1059
    }
1060
 
1061
  return stmt;
1062
}
1063
 
1064
 
1065
/* Determine the outermost loops in that statements in basic block BB are
1066
   invariant, and record them to the LIM_DATA associated with the statements.
1067
   Callback for walk_dominator_tree.  */
1068
 
1069
static void
1070
determine_invariantness_stmt (struct dom_walk_data *dw_data ATTRIBUTE_UNUSED,
1071
                              basic_block bb)
1072
{
1073
  enum move_pos pos;
1074
  gimple_stmt_iterator bsi;
1075
  gimple stmt;
1076
  bool maybe_never = ALWAYS_EXECUTED_IN (bb) == NULL;
1077
  struct loop *outermost = ALWAYS_EXECUTED_IN (bb);
1078
  struct lim_aux_data *lim_data;
1079
 
1080
  if (!loop_outer (bb->loop_father))
1081
    return;
1082
 
1083
  if (dump_file && (dump_flags & TDF_DETAILS))
1084
    fprintf (dump_file, "Basic block %d (loop %d -- depth %d):\n\n",
1085
             bb->index, bb->loop_father->num, loop_depth (bb->loop_father));
1086
 
1087
  /* Look at PHI nodes, but only if there is at most two.
1088
     ???  We could relax this further by post-processing the inserted
1089
     code and transforming adjacent cond-exprs with the same predicate
1090
     to control flow again.  */
1091
  bsi = gsi_start_phis (bb);
1092
  if (!gsi_end_p (bsi)
1093
      && ((gsi_next (&bsi), gsi_end_p (bsi))
1094
          || (gsi_next (&bsi), gsi_end_p (bsi))))
1095
    for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
1096
      {
1097
        stmt = gsi_stmt (bsi);
1098
 
1099
        pos = movement_possibility (stmt);
1100
        if (pos == MOVE_IMPOSSIBLE)
1101
          continue;
1102
 
1103
        lim_data = init_lim_data (stmt);
1104
        lim_data->always_executed_in = outermost;
1105
 
1106
        if (!determine_max_movement (stmt, false))
1107
          {
1108
            lim_data->max_loop = NULL;
1109
            continue;
1110
          }
1111
 
1112
        if (dump_file && (dump_flags & TDF_DETAILS))
1113
          {
1114
            print_gimple_stmt (dump_file, stmt, 2, 0);
1115
            fprintf (dump_file, "  invariant up to level %d, cost %d.\n\n",
1116
                     loop_depth (lim_data->max_loop),
1117
                     lim_data->cost);
1118
          }
1119
 
1120
        if (lim_data->cost >= LIM_EXPENSIVE)
1121
          set_profitable_level (stmt);
1122
      }
1123
 
1124
  for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
1125
    {
1126
      stmt = gsi_stmt (bsi);
1127
 
1128
      pos = movement_possibility (stmt);
1129
      if (pos == MOVE_IMPOSSIBLE)
1130
        {
1131
          if (nonpure_call_p (stmt))
1132
            {
1133
              maybe_never = true;
1134
              outermost = NULL;
1135
            }
1136
          /* Make sure to note always_executed_in for stores to make
1137
             store-motion work.  */
1138
          else if (stmt_makes_single_store (stmt))
1139
            {
1140
              struct lim_aux_data *lim_data = init_lim_data (stmt);
1141
              lim_data->always_executed_in = outermost;
1142
            }
1143
          continue;
1144
        }
1145
 
1146
      if (is_gimple_assign (stmt)
1147
          && (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
1148
              == GIMPLE_BINARY_RHS))
1149
        {
1150
          tree op0 = gimple_assign_rhs1 (stmt);
1151
          tree op1 = gimple_assign_rhs2 (stmt);
1152
          struct loop *ol1 = outermost_invariant_loop (op1,
1153
                                        loop_containing_stmt (stmt));
1154
 
1155
          /* If divisor is invariant, convert a/b to a*(1/b), allowing reciprocal
1156
             to be hoisted out of loop, saving expensive divide.  */
1157
          if (pos == MOVE_POSSIBLE
1158
              && gimple_assign_rhs_code (stmt) == RDIV_EXPR
1159
              && flag_unsafe_math_optimizations
1160
              && !flag_trapping_math
1161
              && ol1 != NULL
1162
              && outermost_invariant_loop (op0, ol1) == NULL)
1163
            stmt = rewrite_reciprocal (&bsi);
1164
 
1165
          /* If the shift count is invariant, convert (A >> B) & 1 to
1166
             A & (1 << B) allowing the bit mask to be hoisted out of the loop
1167
             saving an expensive shift.  */
1168
          if (pos == MOVE_POSSIBLE
1169
              && gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
1170
              && integer_onep (op1)
1171
              && TREE_CODE (op0) == SSA_NAME
1172
              && has_single_use (op0))
1173
            stmt = rewrite_bittest (&bsi);
1174
        }
1175
 
1176
      lim_data = init_lim_data (stmt);
1177
      lim_data->always_executed_in = outermost;
1178
 
1179
      if (maybe_never && pos == MOVE_PRESERVE_EXECUTION)
1180
        continue;
1181
 
1182
      if (!determine_max_movement (stmt, pos == MOVE_PRESERVE_EXECUTION))
1183
        {
1184
          lim_data->max_loop = NULL;
1185
          continue;
1186
        }
1187
 
1188
      if (dump_file && (dump_flags & TDF_DETAILS))
1189
        {
1190
          print_gimple_stmt (dump_file, stmt, 2, 0);
1191
          fprintf (dump_file, "  invariant up to level %d, cost %d.\n\n",
1192
                   loop_depth (lim_data->max_loop),
1193
                   lim_data->cost);
1194
        }
1195
 
1196
      if (lim_data->cost >= LIM_EXPENSIVE)
1197
        set_profitable_level (stmt);
1198
    }
1199
}
1200
 
1201
/* For each statement determines the outermost loop in that it is invariant,
1202
   statements on whose motion it depends and the cost of the computation.
1203
   This information is stored to the LIM_DATA structure associated with
1204
   each statement.  */
1205
 
1206
static void
1207
determine_invariantness (void)
1208
{
1209
  struct dom_walk_data walk_data;
1210
 
1211
  memset (&walk_data, 0, sizeof (struct dom_walk_data));
1212
  walk_data.dom_direction = CDI_DOMINATORS;
1213
  walk_data.before_dom_children = determine_invariantness_stmt;
1214
 
1215
  init_walk_dominator_tree (&walk_data);
1216
  walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
1217
  fini_walk_dominator_tree (&walk_data);
1218
}
1219
 
1220
/* Hoist the statements in basic block BB out of the loops prescribed by
1221
   data stored in LIM_DATA structures associated with each statement.  Callback
1222
   for walk_dominator_tree.  */
1223
 
1224
static void
1225
move_computations_stmt (struct dom_walk_data *dw_data,
1226
                        basic_block bb)
1227
{
1228
  struct loop *level;
1229
  gimple_stmt_iterator bsi;
1230
  gimple stmt;
1231
  unsigned cost = 0;
1232
  struct lim_aux_data *lim_data;
1233
 
1234
  if (!loop_outer (bb->loop_father))
1235
    return;
1236
 
1237
  for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); )
1238
    {
1239
      gimple new_stmt;
1240
      stmt = gsi_stmt (bsi);
1241
 
1242
      lim_data = get_lim_data (stmt);
1243
      if (lim_data == NULL)
1244
        {
1245
          gsi_next (&bsi);
1246
          continue;
1247
        }
1248
 
1249
      cost = lim_data->cost;
1250
      level = lim_data->tgt_loop;
1251
      clear_lim_data (stmt);
1252
 
1253
      if (!level)
1254
        {
1255
          gsi_next (&bsi);
1256
          continue;
1257
        }
1258
 
1259
      if (dump_file && (dump_flags & TDF_DETAILS))
1260
        {
1261
          fprintf (dump_file, "Moving PHI node\n");
1262
          print_gimple_stmt (dump_file, stmt, 0, 0);
1263
          fprintf (dump_file, "(cost %u) out of loop %d.\n\n",
1264
                   cost, level->num);
1265
        }
1266
 
1267
      if (gimple_phi_num_args (stmt) == 1)
1268
        {
1269
          tree arg = PHI_ARG_DEF (stmt, 0);
1270
          new_stmt = gimple_build_assign_with_ops (TREE_CODE (arg),
1271
                                                   gimple_phi_result (stmt),
1272
                                                   arg, NULL_TREE);
1273
          SSA_NAME_DEF_STMT (gimple_phi_result (stmt)) = new_stmt;
1274
        }
1275
      else
1276
        {
1277
          basic_block dom = get_immediate_dominator (CDI_DOMINATORS, bb);
1278
          gimple cond = gsi_stmt (gsi_last_bb (dom));
1279
          tree arg0 = NULL_TREE, arg1 = NULL_TREE, t;
1280
          /* Get the PHI arguments corresponding to the true and false
1281
             edges of COND.  */
1282
          extract_true_false_args_from_phi (dom, stmt, &arg0, &arg1);
1283
          gcc_assert (arg0 && arg1);
1284
          t = build2 (gimple_cond_code (cond), boolean_type_node,
1285
                      gimple_cond_lhs (cond), gimple_cond_rhs (cond));
1286
          new_stmt = gimple_build_assign_with_ops3 (COND_EXPR,
1287
                                                    gimple_phi_result (stmt),
1288
                                                    t, arg0, arg1);
1289
          SSA_NAME_DEF_STMT (gimple_phi_result (stmt)) = new_stmt;
1290
          *((unsigned int *)(dw_data->global_data)) |= TODO_cleanup_cfg;
1291
        }
1292
      gsi_insert_on_edge (loop_preheader_edge (level), new_stmt);
1293
      remove_phi_node (&bsi, false);
1294
    }
1295
 
1296
  for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); )
1297
    {
1298
      stmt = gsi_stmt (bsi);
1299
 
1300
      lim_data = get_lim_data (stmt);
1301
      if (lim_data == NULL)
1302
        {
1303
          gsi_next (&bsi);
1304
          continue;
1305
        }
1306
 
1307
      cost = lim_data->cost;
1308
      level = lim_data->tgt_loop;
1309
      clear_lim_data (stmt);
1310
 
1311
      if (!level)
1312
        {
1313
          gsi_next (&bsi);
1314
          continue;
1315
        }
1316
 
1317
      /* We do not really want to move conditionals out of the loop; we just
1318
         placed it here to force its operands to be moved if necessary.  */
1319
      if (gimple_code (stmt) == GIMPLE_COND)
1320
        continue;
1321
 
1322
      if (dump_file && (dump_flags & TDF_DETAILS))
1323
        {
1324
          fprintf (dump_file, "Moving statement\n");
1325
          print_gimple_stmt (dump_file, stmt, 0, 0);
1326
          fprintf (dump_file, "(cost %u) out of loop %d.\n\n",
1327
                   cost, level->num);
1328
        }
1329
 
1330
      mark_virtual_ops_for_renaming (stmt);
1331
      gsi_insert_on_edge (loop_preheader_edge (level), stmt);
1332
      gsi_remove (&bsi, false);
1333
    }
1334
}
1335
 
1336
/* Hoist the statements out of the loops prescribed by data stored in
1337
   LIM_DATA structures associated with each statement.*/
1338
 
1339
static unsigned int
1340
move_computations (void)
1341
{
1342
  struct dom_walk_data walk_data;
1343
  unsigned int todo = 0;
1344
 
1345
  memset (&walk_data, 0, sizeof (struct dom_walk_data));
1346
  walk_data.global_data = &todo;
1347
  walk_data.dom_direction = CDI_DOMINATORS;
1348
  walk_data.before_dom_children = move_computations_stmt;
1349
 
1350
  init_walk_dominator_tree (&walk_data);
1351
  walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
1352
  fini_walk_dominator_tree (&walk_data);
1353
 
1354
  gsi_commit_edge_inserts ();
1355
  if (need_ssa_update_p (cfun))
1356
    rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
1357
 
1358
  return todo;
1359
}
1360
 
1361
/* Checks whether the statement defining variable *INDEX can be hoisted
1362
   out of the loop passed in DATA.  Callback for for_each_index.  */
1363
 
1364
static bool
1365
may_move_till (tree ref, tree *index, void *data)
1366
{
1367
  struct loop *loop = (struct loop *) data, *max_loop;
1368
 
1369
  /* If REF is an array reference, check also that the step and the lower
1370
     bound is invariant in LOOP.  */
1371
  if (TREE_CODE (ref) == ARRAY_REF)
1372
    {
1373
      tree step = TREE_OPERAND (ref, 3);
1374
      tree lbound = TREE_OPERAND (ref, 2);
1375
 
1376
      max_loop = outermost_invariant_loop (step, loop);
1377
      if (!max_loop)
1378
        return false;
1379
 
1380
      max_loop = outermost_invariant_loop (lbound, loop);
1381
      if (!max_loop)
1382
        return false;
1383
    }
1384
 
1385
  max_loop = outermost_invariant_loop (*index, loop);
1386
  if (!max_loop)
1387
    return false;
1388
 
1389
  return true;
1390
}
1391
 
1392
/* If OP is SSA NAME, force the statement that defines it to be
1393
   moved out of the LOOP.  ORIG_LOOP is the loop in that EXPR is used.  */
1394
 
1395
static void
1396
force_move_till_op (tree op, struct loop *orig_loop, struct loop *loop)
1397
{
1398
  gimple stmt;
1399
 
1400
  if (!op
1401
      || is_gimple_min_invariant (op))
1402
    return;
1403
 
1404
  gcc_assert (TREE_CODE (op) == SSA_NAME);
1405
 
1406
  stmt = SSA_NAME_DEF_STMT (op);
1407
  if (gimple_nop_p (stmt))
1408
    return;
1409
 
1410
  set_level (stmt, orig_loop, loop);
1411
}
1412
 
1413
/* Forces statement defining invariants in REF (and *INDEX) to be moved out of
1414
   the LOOP.  The reference REF is used in the loop ORIG_LOOP.  Callback for
1415
   for_each_index.  */
1416
 
1417
struct fmt_data
1418
{
1419
  struct loop *loop;
1420
  struct loop *orig_loop;
1421
};
1422
 
1423
static bool
1424
force_move_till (tree ref, tree *index, void *data)
1425
{
1426
  struct fmt_data *fmt_data = (struct fmt_data *) data;
1427
 
1428
  if (TREE_CODE (ref) == ARRAY_REF)
1429
    {
1430
      tree step = TREE_OPERAND (ref, 3);
1431
      tree lbound = TREE_OPERAND (ref, 2);
1432
 
1433
      force_move_till_op (step, fmt_data->orig_loop, fmt_data->loop);
1434
      force_move_till_op (lbound, fmt_data->orig_loop, fmt_data->loop);
1435
    }
1436
 
1437
  force_move_till_op (*index, fmt_data->orig_loop, fmt_data->loop);
1438
 
1439
  return true;
1440
}
1441
 
1442
/* A hash function for struct mem_ref object OBJ.  */
1443
 
1444
static hashval_t
1445
memref_hash (const void *obj)
1446
{
1447
  const struct mem_ref *const mem = (const struct mem_ref *) obj;
1448
 
1449
  return mem->hash;
1450
}
1451
 
1452
/* An equality function for struct mem_ref object OBJ1 with
1453
   memory reference OBJ2.  */
1454
 
1455
static int
1456
memref_eq (const void *obj1, const void *obj2)
1457
{
1458
  const struct mem_ref *const mem1 = (const struct mem_ref *) obj1;
1459
 
1460
  return operand_equal_p (mem1->mem, (const_tree) obj2, 0);
1461
}
1462
 
1463
/* Releases list of memory reference locations ACCS.  */
1464
 
1465
static void
1466
free_mem_ref_locs (mem_ref_locs_p accs)
1467
{
1468
  unsigned i;
1469
  mem_ref_loc_p loc;
1470
 
1471
  if (!accs)
1472
    return;
1473
 
1474
  FOR_EACH_VEC_ELT (mem_ref_loc_p, accs->locs, i, loc)
1475
    free (loc);
1476
  VEC_free (mem_ref_loc_p, heap, accs->locs);
1477
  free (accs);
1478
}
1479
 
1480
/* A function to free the mem_ref object OBJ.  */
1481
 
1482
static void
1483
memref_free (void *obj)
1484
{
1485
  struct mem_ref *const mem = (struct mem_ref *) obj;
1486
  unsigned i;
1487
  mem_ref_locs_p accs;
1488
 
1489
  BITMAP_FREE (mem->stored);
1490
  BITMAP_FREE (mem->indep_loop);
1491
  BITMAP_FREE (mem->dep_loop);
1492
  BITMAP_FREE (mem->indep_ref);
1493
  BITMAP_FREE (mem->dep_ref);
1494
 
1495
  FOR_EACH_VEC_ELT (mem_ref_locs_p, mem->accesses_in_loop, i, accs)
1496
    free_mem_ref_locs (accs);
1497
  VEC_free (mem_ref_locs_p, heap, mem->accesses_in_loop);
1498
 
1499
  free (mem);
1500
}
1501
 
1502
/* Allocates and returns a memory reference description for MEM whose hash
1503
   value is HASH and id is ID.  */
1504
 
1505
static mem_ref_p
1506
mem_ref_alloc (tree mem, unsigned hash, unsigned id)
1507
{
1508
  mem_ref_p ref = XNEW (struct mem_ref);
1509
  ref->mem = mem;
1510
  ref->id = id;
1511
  ref->hash = hash;
1512
  ref->stored = BITMAP_ALLOC (NULL);
1513
  ref->indep_loop = BITMAP_ALLOC (NULL);
1514
  ref->dep_loop = BITMAP_ALLOC (NULL);
1515
  ref->indep_ref = BITMAP_ALLOC (NULL);
1516
  ref->dep_ref = BITMAP_ALLOC (NULL);
1517
  ref->accesses_in_loop = NULL;
1518
 
1519
  return ref;
1520
}
1521
 
1522
/* Allocates and returns the new list of locations.  */
1523
 
1524
static mem_ref_locs_p
1525
mem_ref_locs_alloc (void)
1526
{
1527
  mem_ref_locs_p accs = XNEW (struct mem_ref_locs);
1528
  accs->locs = NULL;
1529
  return accs;
1530
}
1531
 
1532
/* Records memory reference location *LOC in LOOP to the memory reference
1533
   description REF.  The reference occurs in statement STMT.  */
1534
 
1535
static void
1536
record_mem_ref_loc (mem_ref_p ref, struct loop *loop, gimple stmt, tree *loc)
1537
{
1538
  mem_ref_loc_p aref = XNEW (struct mem_ref_loc);
1539
  mem_ref_locs_p accs;
1540
  bitmap ril = VEC_index (bitmap, memory_accesses.refs_in_loop, loop->num);
1541
 
1542
  if (VEC_length (mem_ref_locs_p, ref->accesses_in_loop)
1543
      <= (unsigned) loop->num)
1544
    VEC_safe_grow_cleared (mem_ref_locs_p, heap, ref->accesses_in_loop,
1545
                           loop->num + 1);
1546
  accs = VEC_index (mem_ref_locs_p, ref->accesses_in_loop, loop->num);
1547
  if (!accs)
1548
    {
1549
      accs = mem_ref_locs_alloc ();
1550
      VEC_replace (mem_ref_locs_p, ref->accesses_in_loop, loop->num, accs);
1551
    }
1552
 
1553
  aref->stmt = stmt;
1554
  aref->ref = loc;
1555
 
1556
  VEC_safe_push (mem_ref_loc_p, heap, accs->locs, aref);
1557
  bitmap_set_bit (ril, ref->id);
1558
}
1559
 
1560
/* Marks reference REF as stored in LOOP.  */
1561
 
1562
static void
1563
mark_ref_stored (mem_ref_p ref, struct loop *loop)
1564
{
1565
  for (;
1566
       loop != current_loops->tree_root
1567
       && !bitmap_bit_p (ref->stored, loop->num);
1568
       loop = loop_outer (loop))
1569
    bitmap_set_bit (ref->stored, loop->num);
1570
}
1571
 
1572
/* Gathers memory references in statement STMT in LOOP, storing the
1573
   information about them in the memory_accesses structure.  Marks
1574
   the vops accessed through unrecognized statements there as
1575
   well.  */
1576
 
1577
static void
1578
gather_mem_refs_stmt (struct loop *loop, gimple stmt)
1579
{
1580
  tree *mem = NULL;
1581
  hashval_t hash;
1582
  PTR *slot;
1583
  mem_ref_p ref;
1584
  bool is_stored;
1585
  unsigned id;
1586
 
1587
  if (!gimple_vuse (stmt))
1588
    return;
1589
 
1590
  mem = simple_mem_ref_in_stmt (stmt, &is_stored);
1591
  if (!mem)
1592
    {
1593
      id = VEC_length (mem_ref_p, memory_accesses.refs_list);
1594
      ref = mem_ref_alloc (error_mark_node, 0, id);
1595
      VEC_safe_push (mem_ref_p, heap, memory_accesses.refs_list, ref);
1596
      if (dump_file && (dump_flags & TDF_DETAILS))
1597
        {
1598
          fprintf (dump_file, "Unanalyzed memory reference %u: ", id);
1599
          print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1600
        }
1601
      if (gimple_vdef (stmt))
1602
        mark_ref_stored (ref, loop);
1603
      record_mem_ref_loc (ref, loop, stmt, mem);
1604
      return;
1605
    }
1606
 
1607
  hash = iterative_hash_expr (*mem, 0);
1608
  slot = htab_find_slot_with_hash (memory_accesses.refs, *mem, hash, INSERT);
1609
 
1610
  if (*slot)
1611
    {
1612
      ref = (mem_ref_p) *slot;
1613
      id = ref->id;
1614
    }
1615
  else
1616
    {
1617
      id = VEC_length (mem_ref_p, memory_accesses.refs_list);
1618
      ref = mem_ref_alloc (*mem, hash, id);
1619
      VEC_safe_push (mem_ref_p, heap, memory_accesses.refs_list, ref);
1620
      *slot = ref;
1621
 
1622
      if (dump_file && (dump_flags & TDF_DETAILS))
1623
        {
1624
          fprintf (dump_file, "Memory reference %u: ", id);
1625
          print_generic_expr (dump_file, ref->mem, TDF_SLIM);
1626
          fprintf (dump_file, "\n");
1627
        }
1628
    }
1629
  if (is_stored)
1630
    mark_ref_stored (ref, loop);
1631
 
1632
  record_mem_ref_loc (ref, loop, stmt, mem);
1633
  return;
1634
}
1635
 
1636
/* Gathers memory references in loops.  */
1637
 
1638
static void
1639
gather_mem_refs_in_loops (void)
1640
{
1641
  gimple_stmt_iterator bsi;
1642
  basic_block bb;
1643
  struct loop *loop;
1644
  loop_iterator li;
1645
  bitmap lrefs, alrefs, alrefso;
1646
 
1647
  FOR_EACH_BB (bb)
1648
    {
1649
      loop = bb->loop_father;
1650
      if (loop == current_loops->tree_root)
1651
        continue;
1652
 
1653
      for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
1654
        gather_mem_refs_stmt (loop, gsi_stmt (bsi));
1655
    }
1656
 
1657
  /* Propagate the information about accessed memory references up
1658
     the loop hierarchy.  */
1659
  FOR_EACH_LOOP (li, loop, LI_FROM_INNERMOST)
1660
    {
1661
      lrefs = VEC_index (bitmap, memory_accesses.refs_in_loop, loop->num);
1662
      alrefs = VEC_index (bitmap, memory_accesses.all_refs_in_loop, loop->num);
1663
      bitmap_ior_into (alrefs, lrefs);
1664
 
1665
      if (loop_outer (loop) == current_loops->tree_root)
1666
        continue;
1667
 
1668
      alrefso = VEC_index (bitmap, memory_accesses.all_refs_in_loop,
1669
                           loop_outer (loop)->num);
1670
      bitmap_ior_into (alrefso, alrefs);
1671
    }
1672
}
1673
 
1674
/* Create a mapping from virtual operands to references that touch them
1675
   in LOOP.  */
1676
 
1677
static void
1678
create_vop_ref_mapping_loop (struct loop *loop)
1679
{
1680
  bitmap refs = VEC_index (bitmap, memory_accesses.refs_in_loop, loop->num);
1681
  struct loop *sloop;
1682
  bitmap_iterator bi;
1683
  unsigned i;
1684
  mem_ref_p ref;
1685
 
1686
  EXECUTE_IF_SET_IN_BITMAP (refs, 0, i, bi)
1687
    {
1688
      ref = VEC_index (mem_ref_p, memory_accesses.refs_list, i);
1689
      for (sloop = loop; sloop != current_loops->tree_root;
1690
           sloop = loop_outer (sloop))
1691
        if (bitmap_bit_p (ref->stored, loop->num))
1692
          {
1693
            bitmap refs_stored
1694
              = VEC_index (bitmap, memory_accesses.all_refs_stored_in_loop,
1695
                           sloop->num);
1696
            bitmap_set_bit (refs_stored, ref->id);
1697
          }
1698
    }
1699
}
1700
 
1701
/* For each non-clobbered virtual operand and each loop, record the memory
1702
   references in this loop that touch the operand.  */
1703
 
1704
static void
1705
create_vop_ref_mapping (void)
1706
{
1707
  loop_iterator li;
1708
  struct loop *loop;
1709
 
1710
  FOR_EACH_LOOP (li, loop, 0)
1711
    {
1712
      create_vop_ref_mapping_loop (loop);
1713
    }
1714
}
1715
 
1716
/* Gathers information about memory accesses in the loops.  */
1717
 
1718
static void
1719
analyze_memory_references (void)
1720
{
1721
  unsigned i;
1722
  bitmap empty;
1723
 
1724
  memory_accesses.refs
1725
          = htab_create (100, memref_hash, memref_eq, memref_free);
1726
  memory_accesses.refs_list = NULL;
1727
  memory_accesses.refs_in_loop = VEC_alloc (bitmap, heap,
1728
                                            number_of_loops ());
1729
  memory_accesses.all_refs_in_loop = VEC_alloc (bitmap, heap,
1730
                                                number_of_loops ());
1731
  memory_accesses.all_refs_stored_in_loop = VEC_alloc (bitmap, heap,
1732
                                                       number_of_loops ());
1733
 
1734
  for (i = 0; i < number_of_loops (); i++)
1735
    {
1736
      empty = BITMAP_ALLOC (NULL);
1737
      VEC_quick_push (bitmap, memory_accesses.refs_in_loop, empty);
1738
      empty = BITMAP_ALLOC (NULL);
1739
      VEC_quick_push (bitmap, memory_accesses.all_refs_in_loop, empty);
1740
      empty = BITMAP_ALLOC (NULL);
1741
      VEC_quick_push (bitmap, memory_accesses.all_refs_stored_in_loop, empty);
1742
    }
1743
 
1744
  memory_accesses.ttae_cache = NULL;
1745
 
1746
  gather_mem_refs_in_loops ();
1747
  create_vop_ref_mapping ();
1748
}
1749
 
1750
/* Returns true if MEM1 and MEM2 may alias.  TTAE_CACHE is used as a cache in
1751
   tree_to_aff_combination_expand.  */
1752
 
1753
static bool
1754
mem_refs_may_alias_p (tree mem1, tree mem2, struct pointer_map_t **ttae_cache)
1755
{
1756
  /* Perform BASE + OFFSET analysis -- if MEM1 and MEM2 are based on the same
1757
     object and their offset differ in such a way that the locations cannot
1758
     overlap, then they cannot alias.  */
1759
  double_int size1, size2;
1760
  aff_tree off1, off2;
1761
 
1762
  /* Perform basic offset and type-based disambiguation.  */
1763
  if (!refs_may_alias_p (mem1, mem2))
1764
    return false;
1765
 
1766
  /* The expansion of addresses may be a bit expensive, thus we only do
1767
     the check at -O2 and higher optimization levels.  */
1768
  if (optimize < 2)
1769
    return true;
1770
 
1771
  get_inner_reference_aff (mem1, &off1, &size1);
1772
  get_inner_reference_aff (mem2, &off2, &size2);
1773
  aff_combination_expand (&off1, ttae_cache);
1774
  aff_combination_expand (&off2, ttae_cache);
1775
  aff_combination_scale (&off1, double_int_minus_one);
1776
  aff_combination_add (&off2, &off1);
1777
 
1778
  if (aff_comb_cannot_overlap_p (&off2, size1, size2))
1779
    return false;
1780
 
1781
  return true;
1782
}
1783
 
1784
/* Rewrites location LOC by TMP_VAR.  */
1785
 
1786
static void
1787
rewrite_mem_ref_loc (mem_ref_loc_p loc, tree tmp_var)
1788
{
1789
  mark_virtual_ops_for_renaming (loc->stmt);
1790
  *loc->ref = tmp_var;
1791
  update_stmt (loc->stmt);
1792
}
1793
 
1794
/* Adds all locations of REF in LOOP and its subloops to LOCS.  */
1795
 
1796
static void
1797
get_all_locs_in_loop (struct loop *loop, mem_ref_p ref,
1798
                      VEC (mem_ref_loc_p, heap) **locs)
1799
{
1800
  mem_ref_locs_p accs;
1801
  unsigned i;
1802
  mem_ref_loc_p loc;
1803
  bitmap refs = VEC_index (bitmap, memory_accesses.all_refs_in_loop,
1804
                           loop->num);
1805
  struct loop *subloop;
1806
 
1807
  if (!bitmap_bit_p (refs, ref->id))
1808
    return;
1809
 
1810
  if (VEC_length (mem_ref_locs_p, ref->accesses_in_loop)
1811
      > (unsigned) loop->num)
1812
    {
1813
      accs = VEC_index (mem_ref_locs_p, ref->accesses_in_loop, loop->num);
1814
      if (accs)
1815
        {
1816
          FOR_EACH_VEC_ELT (mem_ref_loc_p, accs->locs, i, loc)
1817
            VEC_safe_push (mem_ref_loc_p, heap, *locs, loc);
1818
        }
1819
    }
1820
 
1821
  for (subloop = loop->inner; subloop != NULL; subloop = subloop->next)
1822
    get_all_locs_in_loop (subloop, ref, locs);
1823
}
1824
 
1825
/* Rewrites all references to REF in LOOP by variable TMP_VAR.  */
1826
 
1827
static void
1828
rewrite_mem_refs (struct loop *loop, mem_ref_p ref, tree tmp_var)
1829
{
1830
  unsigned i;
1831
  mem_ref_loc_p loc;
1832
  VEC (mem_ref_loc_p, heap) *locs = NULL;
1833
 
1834
  get_all_locs_in_loop (loop, ref, &locs);
1835
  FOR_EACH_VEC_ELT (mem_ref_loc_p, locs, i, loc)
1836
    rewrite_mem_ref_loc (loc, tmp_var);
1837
  VEC_free (mem_ref_loc_p, heap, locs);
1838
}
1839
 
1840
/* The name and the length of the currently generated variable
1841
   for lsm.  */
1842
#define MAX_LSM_NAME_LENGTH 40
1843
static char lsm_tmp_name[MAX_LSM_NAME_LENGTH + 1];
1844
static int lsm_tmp_name_length;
1845
 
1846
/* Adds S to lsm_tmp_name.  */
1847
 
1848
static void
1849
lsm_tmp_name_add (const char *s)
1850
{
1851
  int l = strlen (s) + lsm_tmp_name_length;
1852
  if (l > MAX_LSM_NAME_LENGTH)
1853
    return;
1854
 
1855
  strcpy (lsm_tmp_name + lsm_tmp_name_length, s);
1856
  lsm_tmp_name_length = l;
1857
}
1858
 
1859
/* Stores the name for temporary variable that replaces REF to
1860
   lsm_tmp_name.  */
1861
 
1862
static void
1863
gen_lsm_tmp_name (tree ref)
1864
{
1865
  const char *name;
1866
 
1867
  switch (TREE_CODE (ref))
1868
    {
1869
    case MEM_REF:
1870
    case TARGET_MEM_REF:
1871
      gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1872
      lsm_tmp_name_add ("_");
1873
      break;
1874
 
1875
    case ADDR_EXPR:
1876
      gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1877
      break;
1878
 
1879
    case BIT_FIELD_REF:
1880
    case VIEW_CONVERT_EXPR:
1881
    case ARRAY_RANGE_REF:
1882
      gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1883
      break;
1884
 
1885
    case REALPART_EXPR:
1886
      gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1887
      lsm_tmp_name_add ("_RE");
1888
      break;
1889
 
1890
    case IMAGPART_EXPR:
1891
      gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1892
      lsm_tmp_name_add ("_IM");
1893
      break;
1894
 
1895
    case COMPONENT_REF:
1896
      gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1897
      lsm_tmp_name_add ("_");
1898
      name = get_name (TREE_OPERAND (ref, 1));
1899
      if (!name)
1900
        name = "F";
1901
      lsm_tmp_name_add (name);
1902
      break;
1903
 
1904
    case ARRAY_REF:
1905
      gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1906
      lsm_tmp_name_add ("_I");
1907
      break;
1908
 
1909
    case SSA_NAME:
1910
      ref = SSA_NAME_VAR (ref);
1911
      /* Fallthru.  */
1912
 
1913
    case VAR_DECL:
1914
    case PARM_DECL:
1915
      name = get_name (ref);
1916
      if (!name)
1917
        name = "D";
1918
      lsm_tmp_name_add (name);
1919
      break;
1920
 
1921
    case STRING_CST:
1922
      lsm_tmp_name_add ("S");
1923
      break;
1924
 
1925
    case RESULT_DECL:
1926
      lsm_tmp_name_add ("R");
1927
      break;
1928
 
1929
    case INTEGER_CST:
1930
      /* Nothing.  */
1931
      break;
1932
 
1933
    default:
1934
      gcc_unreachable ();
1935
    }
1936
}
1937
 
1938
/* Determines name for temporary variable that replaces REF.
1939
   The name is accumulated into the lsm_tmp_name variable.
1940
   N is added to the name of the temporary.  */
1941
 
1942
char *
1943
get_lsm_tmp_name (tree ref, unsigned n)
1944
{
1945
  char ns[2];
1946
 
1947
  lsm_tmp_name_length = 0;
1948
  gen_lsm_tmp_name (ref);
1949
  lsm_tmp_name_add ("_lsm");
1950
  if (n < 10)
1951
    {
1952
      ns[0] = '0' + n;
1953
      ns[1] = 0;
1954
      lsm_tmp_name_add (ns);
1955
    }
1956
  return lsm_tmp_name;
1957
}
1958
 
1959
/* Executes store motion of memory reference REF from LOOP.
1960
   Exits from the LOOP are stored in EXITS.  The initialization of the
1961
   temporary variable is put to the preheader of the loop, and assignments
1962
   to the reference from the temporary variable are emitted to exits.  */
1963
 
1964
static void
1965
execute_sm (struct loop *loop, VEC (edge, heap) *exits, mem_ref_p ref)
1966
{
1967
  tree tmp_var;
1968
  unsigned i;
1969
  gimple load, store;
1970
  struct fmt_data fmt_data;
1971
  edge ex;
1972
  struct lim_aux_data *lim_data;
1973
 
1974
  if (dump_file && (dump_flags & TDF_DETAILS))
1975
    {
1976
      fprintf (dump_file, "Executing store motion of ");
1977
      print_generic_expr (dump_file, ref->mem, 0);
1978
      fprintf (dump_file, " from loop %d\n", loop->num);
1979
    }
1980
 
1981
  tmp_var = make_rename_temp (TREE_TYPE (ref->mem),
1982
                              get_lsm_tmp_name (ref->mem, ~0));
1983
 
1984
  fmt_data.loop = loop;
1985
  fmt_data.orig_loop = loop;
1986
  for_each_index (&ref->mem, force_move_till, &fmt_data);
1987
 
1988
  rewrite_mem_refs (loop, ref, tmp_var);
1989
 
1990
  /* Emit the load & stores.  */
1991
  load = gimple_build_assign (tmp_var, unshare_expr (ref->mem));
1992
  lim_data = init_lim_data (load);
1993
  lim_data->max_loop = loop;
1994
  lim_data->tgt_loop = loop;
1995
 
1996
  /* Put this into the latch, so that we are sure it will be processed after
1997
     all dependencies.  */
1998
  gsi_insert_on_edge (loop_latch_edge (loop), load);
1999
 
2000
  FOR_EACH_VEC_ELT (edge, exits, i, ex)
2001
    {
2002
      store = gimple_build_assign (unshare_expr (ref->mem), tmp_var);
2003
      gsi_insert_on_edge (ex, store);
2004
    }
2005
}
2006
 
2007
/* Hoists memory references MEM_REFS out of LOOP.  EXITS is the list of exit
2008
   edges of the LOOP.  */
2009
 
2010
static void
2011
hoist_memory_references (struct loop *loop, bitmap mem_refs,
2012
                         VEC (edge, heap) *exits)
2013
{
2014
  mem_ref_p ref;
2015
  unsigned  i;
2016
  bitmap_iterator bi;
2017
 
2018
  EXECUTE_IF_SET_IN_BITMAP (mem_refs, 0, i, bi)
2019
    {
2020
      ref = VEC_index (mem_ref_p, memory_accesses.refs_list, i);
2021
      execute_sm (loop, exits, ref);
2022
    }
2023
}
2024
 
2025
/* Returns true if REF is always accessed in LOOP.  If STORED_P is true
2026
   make sure REF is always stored to in LOOP.  */
2027
 
2028
static bool
2029
ref_always_accessed_p (struct loop *loop, mem_ref_p ref, bool stored_p)
2030
{
2031
  VEC (mem_ref_loc_p, heap) *locs = NULL;
2032
  unsigned i;
2033
  mem_ref_loc_p loc;
2034
  bool ret = false;
2035
  struct loop *must_exec;
2036
  tree base;
2037
 
2038
  base = get_base_address (ref->mem);
2039
  if (INDIRECT_REF_P (base)
2040
      || TREE_CODE (base) == MEM_REF)
2041
    base = TREE_OPERAND (base, 0);
2042
 
2043
  get_all_locs_in_loop (loop, ref, &locs);
2044
  FOR_EACH_VEC_ELT (mem_ref_loc_p, locs, i, loc)
2045
    {
2046
      if (!get_lim_data (loc->stmt))
2047
        continue;
2048
 
2049
      /* If we require an always executed store make sure the statement
2050
         stores to the reference.  */
2051
      if (stored_p)
2052
        {
2053
          tree lhs;
2054
          if (!gimple_get_lhs (loc->stmt))
2055
            continue;
2056
          lhs = get_base_address (gimple_get_lhs (loc->stmt));
2057
          if (!lhs)
2058
            continue;
2059
          if (INDIRECT_REF_P (lhs)
2060
              || TREE_CODE (lhs) == MEM_REF)
2061
            lhs = TREE_OPERAND (lhs, 0);
2062
          if (lhs != base)
2063
            continue;
2064
        }
2065
 
2066
      must_exec = get_lim_data (loc->stmt)->always_executed_in;
2067
      if (!must_exec)
2068
        continue;
2069
 
2070
      if (must_exec == loop
2071
          || flow_loop_nested_p (must_exec, loop))
2072
        {
2073
          ret = true;
2074
          break;
2075
        }
2076
    }
2077
  VEC_free (mem_ref_loc_p, heap, locs);
2078
 
2079
  return ret;
2080
}
2081
 
2082
/* Returns true if REF1 and REF2 are independent.  */
2083
 
2084
static bool
2085
refs_independent_p (mem_ref_p ref1, mem_ref_p ref2)
2086
{
2087
  if (ref1 == ref2
2088
      || bitmap_bit_p (ref1->indep_ref, ref2->id))
2089
    return true;
2090
  if (bitmap_bit_p (ref1->dep_ref, ref2->id))
2091
    return false;
2092
  if (!MEM_ANALYZABLE (ref1)
2093
      || !MEM_ANALYZABLE (ref2))
2094
    return false;
2095
 
2096
  if (dump_file && (dump_flags & TDF_DETAILS))
2097
    fprintf (dump_file, "Querying dependency of refs %u and %u: ",
2098
             ref1->id, ref2->id);
2099
 
2100
  if (mem_refs_may_alias_p (ref1->mem, ref2->mem,
2101
                            &memory_accesses.ttae_cache))
2102
    {
2103
      bitmap_set_bit (ref1->dep_ref, ref2->id);
2104
      bitmap_set_bit (ref2->dep_ref, ref1->id);
2105
      if (dump_file && (dump_flags & TDF_DETAILS))
2106
        fprintf (dump_file, "dependent.\n");
2107
      return false;
2108
    }
2109
  else
2110
    {
2111
      bitmap_set_bit (ref1->indep_ref, ref2->id);
2112
      bitmap_set_bit (ref2->indep_ref, ref1->id);
2113
      if (dump_file && (dump_flags & TDF_DETAILS))
2114
        fprintf (dump_file, "independent.\n");
2115
      return true;
2116
    }
2117
}
2118
 
2119
/* Records the information whether REF is independent in LOOP (according
2120
   to INDEP).  */
2121
 
2122
static void
2123
record_indep_loop (struct loop *loop, mem_ref_p ref, bool indep)
2124
{
2125
  if (indep)
2126
    bitmap_set_bit (ref->indep_loop, loop->num);
2127
  else
2128
    bitmap_set_bit (ref->dep_loop, loop->num);
2129
}
2130
 
2131
/* Returns true if REF is independent on all other memory references in
2132
   LOOP.  */
2133
 
2134
static bool
2135
ref_indep_loop_p_1 (struct loop *loop, mem_ref_p ref)
2136
{
2137
  bitmap refs_to_check;
2138
  unsigned i;
2139
  bitmap_iterator bi;
2140
  bool ret = true, stored = bitmap_bit_p (ref->stored, loop->num);
2141
  mem_ref_p aref;
2142
 
2143
  if (stored)
2144
    refs_to_check = VEC_index (bitmap,
2145
                               memory_accesses.all_refs_in_loop, loop->num);
2146
  else
2147
    refs_to_check = VEC_index (bitmap,
2148
                               memory_accesses.all_refs_stored_in_loop,
2149
                               loop->num);
2150
 
2151
  EXECUTE_IF_SET_IN_BITMAP (refs_to_check, 0, i, bi)
2152
    {
2153
      aref = VEC_index (mem_ref_p, memory_accesses.refs_list, i);
2154
      if (!MEM_ANALYZABLE (aref)
2155
          || !refs_independent_p (ref, aref))
2156
        {
2157
          ret = false;
2158
          record_indep_loop (loop, aref, false);
2159
          break;
2160
        }
2161
    }
2162
 
2163
  return ret;
2164
}
2165
 
2166
/* Returns true if REF is independent on all other memory references in
2167
   LOOP.  Wrapper over ref_indep_loop_p_1, caching its results.  */
2168
 
2169
static bool
2170
ref_indep_loop_p (struct loop *loop, mem_ref_p ref)
2171
{
2172
  bool ret;
2173
 
2174
  if (bitmap_bit_p (ref->indep_loop, loop->num))
2175
    return true;
2176
  if (bitmap_bit_p (ref->dep_loop, loop->num))
2177
    return false;
2178
 
2179
  ret = ref_indep_loop_p_1 (loop, ref);
2180
 
2181
  if (dump_file && (dump_flags & TDF_DETAILS))
2182
    fprintf (dump_file, "Querying dependencies of ref %u in loop %d: %s\n",
2183
             ref->id, loop->num, ret ? "independent" : "dependent");
2184
 
2185
  record_indep_loop (loop, ref, ret);
2186
 
2187
  return ret;
2188
}
2189
 
2190
/* Returns true if we can perform store motion of REF from LOOP.  */
2191
 
2192
static bool
2193
can_sm_ref_p (struct loop *loop, mem_ref_p ref)
2194
{
2195
  tree base;
2196
 
2197
  /* Can't hoist unanalyzable refs.  */
2198
  if (!MEM_ANALYZABLE (ref))
2199
    return false;
2200
 
2201
  /* Unless the reference is stored in the loop, there is nothing to do.  */
2202
  if (!bitmap_bit_p (ref->stored, loop->num))
2203
    return false;
2204
 
2205
  /* It should be movable.  */
2206
  if (!is_gimple_reg_type (TREE_TYPE (ref->mem))
2207
      || TREE_THIS_VOLATILE (ref->mem)
2208
      || !for_each_index (&ref->mem, may_move_till, loop))
2209
    return false;
2210
 
2211
  /* If it can throw fail, we do not properly update EH info.  */
2212
  if (tree_could_throw_p (ref->mem))
2213
    return false;
2214
 
2215
  /* If it can trap, it must be always executed in LOOP.
2216
     Readonly memory locations may trap when storing to them, but
2217
     tree_could_trap_p is a predicate for rvalues, so check that
2218
     explicitly.  */
2219
  base = get_base_address (ref->mem);
2220
  if ((tree_could_trap_p (ref->mem)
2221
       || (DECL_P (base) && TREE_READONLY (base)))
2222
      && !ref_always_accessed_p (loop, ref, true))
2223
    return false;
2224
 
2225
  /* And it must be independent on all other memory references
2226
     in LOOP.  */
2227
  if (!ref_indep_loop_p (loop, ref))
2228
    return false;
2229
 
2230
  return true;
2231
}
2232
 
2233
/* Marks the references in LOOP for that store motion should be performed
2234
   in REFS_TO_SM.  SM_EXECUTED is the set of references for that store
2235
   motion was performed in one of the outer loops.  */
2236
 
2237
static void
2238
find_refs_for_sm (struct loop *loop, bitmap sm_executed, bitmap refs_to_sm)
2239
{
2240
  bitmap refs = VEC_index (bitmap, memory_accesses.all_refs_in_loop,
2241
                           loop->num);
2242
  unsigned i;
2243
  bitmap_iterator bi;
2244
  mem_ref_p ref;
2245
 
2246
  EXECUTE_IF_AND_COMPL_IN_BITMAP (refs, sm_executed, 0, i, bi)
2247
    {
2248
      ref = VEC_index (mem_ref_p, memory_accesses.refs_list, i);
2249
      if (can_sm_ref_p (loop, ref))
2250
        bitmap_set_bit (refs_to_sm, i);
2251
    }
2252
}
2253
 
2254
/* Checks whether LOOP (with exits stored in EXITS array) is suitable
2255
   for a store motion optimization (i.e. whether we can insert statement
2256
   on its exits).  */
2257
 
2258
static bool
2259
loop_suitable_for_sm (struct loop *loop ATTRIBUTE_UNUSED,
2260
                      VEC (edge, heap) *exits)
2261
{
2262
  unsigned i;
2263
  edge ex;
2264
 
2265
  FOR_EACH_VEC_ELT (edge, exits, i, ex)
2266
    if (ex->flags & (EDGE_ABNORMAL | EDGE_EH))
2267
      return false;
2268
 
2269
  return true;
2270
}
2271
 
2272
/* Try to perform store motion for all memory references modified inside
2273
   LOOP.  SM_EXECUTED is the bitmap of the memory references for that
2274
   store motion was executed in one of the outer loops.  */
2275
 
2276
static void
2277
store_motion_loop (struct loop *loop, bitmap sm_executed)
2278
{
2279
  VEC (edge, heap) *exits = get_loop_exit_edges (loop);
2280
  struct loop *subloop;
2281
  bitmap sm_in_loop = BITMAP_ALLOC (NULL);
2282
 
2283
  if (loop_suitable_for_sm (loop, exits))
2284
    {
2285
      find_refs_for_sm (loop, sm_executed, sm_in_loop);
2286
      hoist_memory_references (loop, sm_in_loop, exits);
2287
    }
2288
  VEC_free (edge, heap, exits);
2289
 
2290
  bitmap_ior_into (sm_executed, sm_in_loop);
2291
  for (subloop = loop->inner; subloop != NULL; subloop = subloop->next)
2292
    store_motion_loop (subloop, sm_executed);
2293
  bitmap_and_compl_into (sm_executed, sm_in_loop);
2294
  BITMAP_FREE (sm_in_loop);
2295
}
2296
 
2297
/* Try to perform store motion for all memory references modified inside
2298
   loops.  */
2299
 
2300
static void
2301
store_motion (void)
2302
{
2303
  struct loop *loop;
2304
  bitmap sm_executed = BITMAP_ALLOC (NULL);
2305
 
2306
  for (loop = current_loops->tree_root->inner; loop != NULL; loop = loop->next)
2307
    store_motion_loop (loop, sm_executed);
2308
 
2309
  BITMAP_FREE (sm_executed);
2310
  gsi_commit_edge_inserts ();
2311
}
2312
 
2313
/* Fills ALWAYS_EXECUTED_IN information for basic blocks of LOOP, i.e.
2314
   for each such basic block bb records the outermost loop for that execution
2315
   of its header implies execution of bb.  CONTAINS_CALL is the bitmap of
2316
   blocks that contain a nonpure call.  */
2317
 
2318
static void
2319
fill_always_executed_in (struct loop *loop, sbitmap contains_call)
2320
{
2321
  basic_block bb = NULL, *bbs, last = NULL;
2322
  unsigned i;
2323
  edge e;
2324
  struct loop *inn_loop = loop;
2325
 
2326
  if (ALWAYS_EXECUTED_IN (loop->header) == NULL)
2327
    {
2328
      bbs = get_loop_body_in_dom_order (loop);
2329
 
2330
      for (i = 0; i < loop->num_nodes; i++)
2331
        {
2332
          edge_iterator ei;
2333
          bb = bbs[i];
2334
 
2335
          if (dominated_by_p (CDI_DOMINATORS, loop->latch, bb))
2336
            last = bb;
2337
 
2338
          if (TEST_BIT (contains_call, bb->index))
2339
            break;
2340
 
2341
          FOR_EACH_EDGE (e, ei, bb->succs)
2342
            if (!flow_bb_inside_loop_p (loop, e->dest))
2343
              break;
2344
          if (e)
2345
            break;
2346
 
2347
          /* A loop might be infinite (TODO use simple loop analysis
2348
             to disprove this if possible).  */
2349
          if (bb->flags & BB_IRREDUCIBLE_LOOP)
2350
            break;
2351
 
2352
          if (!flow_bb_inside_loop_p (inn_loop, bb))
2353
            break;
2354
 
2355
          if (bb->loop_father->header == bb)
2356
            {
2357
              if (!dominated_by_p (CDI_DOMINATORS, loop->latch, bb))
2358
                break;
2359
 
2360
              /* In a loop that is always entered we may proceed anyway.
2361
                 But record that we entered it and stop once we leave it.  */
2362
              inn_loop = bb->loop_father;
2363
            }
2364
        }
2365
 
2366
      while (1)
2367
        {
2368
          SET_ALWAYS_EXECUTED_IN (last, loop);
2369
          if (last == loop->header)
2370
            break;
2371
          last = get_immediate_dominator (CDI_DOMINATORS, last);
2372
        }
2373
 
2374
      free (bbs);
2375
    }
2376
 
2377
  for (loop = loop->inner; loop; loop = loop->next)
2378
    fill_always_executed_in (loop, contains_call);
2379
}
2380
 
2381
/* Compute the global information needed by the loop invariant motion pass.  */
2382
 
2383
static void
2384
tree_ssa_lim_initialize (void)
2385
{
2386
  sbitmap contains_call = sbitmap_alloc (last_basic_block);
2387
  gimple_stmt_iterator bsi;
2388
  struct loop *loop;
2389
  basic_block bb;
2390
 
2391
  sbitmap_zero (contains_call);
2392
  FOR_EACH_BB (bb)
2393
    {
2394
      for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
2395
        {
2396
          if (nonpure_call_p (gsi_stmt (bsi)))
2397
            break;
2398
        }
2399
 
2400
      if (!gsi_end_p (bsi))
2401
        SET_BIT (contains_call, bb->index);
2402
    }
2403
 
2404
  for (loop = current_loops->tree_root->inner; loop; loop = loop->next)
2405
    fill_always_executed_in (loop, contains_call);
2406
 
2407
  sbitmap_free (contains_call);
2408
 
2409
  lim_aux_data_map = pointer_map_create ();
2410
 
2411
  if (flag_tm)
2412
    compute_transaction_bits ();
2413
}
2414
 
2415
/* Cleans up after the invariant motion pass.  */
2416
 
2417
static void
2418
tree_ssa_lim_finalize (void)
2419
{
2420
  basic_block bb;
2421
  unsigned i;
2422
  bitmap b;
2423
 
2424
  FOR_EACH_BB (bb)
2425
    SET_ALWAYS_EXECUTED_IN (bb, NULL);
2426
 
2427
  pointer_map_destroy (lim_aux_data_map);
2428
 
2429
  VEC_free (mem_ref_p, heap, memory_accesses.refs_list);
2430
  htab_delete (memory_accesses.refs);
2431
 
2432
  FOR_EACH_VEC_ELT (bitmap, memory_accesses.refs_in_loop, i, b)
2433
    BITMAP_FREE (b);
2434
  VEC_free (bitmap, heap, memory_accesses.refs_in_loop);
2435
 
2436
  FOR_EACH_VEC_ELT (bitmap, memory_accesses.all_refs_in_loop, i, b)
2437
    BITMAP_FREE (b);
2438
  VEC_free (bitmap, heap, memory_accesses.all_refs_in_loop);
2439
 
2440
  FOR_EACH_VEC_ELT (bitmap, memory_accesses.all_refs_stored_in_loop, i, b)
2441
    BITMAP_FREE (b);
2442
  VEC_free (bitmap, heap, memory_accesses.all_refs_stored_in_loop);
2443
 
2444
  if (memory_accesses.ttae_cache)
2445
    pointer_map_destroy (memory_accesses.ttae_cache);
2446
}
2447
 
2448
/* Moves invariants from loops.  Only "expensive" invariants are moved out --
2449
   i.e. those that are likely to be win regardless of the register pressure.  */
2450
 
2451
unsigned int
2452
tree_ssa_lim (void)
2453
{
2454
  unsigned int todo;
2455
 
2456
  tree_ssa_lim_initialize ();
2457
 
2458
  /* Gathers information about memory accesses in the loops.  */
2459
  analyze_memory_references ();
2460
 
2461
  /* For each statement determine the outermost loop in that it is
2462
     invariant and cost for computing the invariant.  */
2463
  determine_invariantness ();
2464
 
2465
  /* Execute store motion.  Force the necessary invariants to be moved
2466
     out of the loops as well.  */
2467
  store_motion ();
2468
 
2469
  /* Move the expressions that are expensive enough.  */
2470
  todo = move_computations ();
2471
 
2472
  tree_ssa_lim_finalize ();
2473
 
2474
  return todo;
2475
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.