OpenCores
URL https://opencores.org/ocsvn/openrisc_me/openrisc_me/trunk

Subversion Repositories openrisc_me

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.5.1/] [gcc/] [tree-ssa-loop-im.c] - Blame information for rev 327

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 280 jeremybenn
/* Loop invariant motion.
2
   Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2010
3
   Free Software Foundation, Inc.
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify it
8
under the terms of the GNU General Public License as published by the
9
Free Software Foundation; either version 3, or (at your option) any
10
later version.
11
 
12
GCC is distributed in the hope that it will be useful, but WITHOUT
13
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15
for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING3.  If not see
19
<http://www.gnu.org/licenses/>.  */
20
 
21
#include "config.h"
22
#include "system.h"
23
#include "coretypes.h"
24
#include "tm.h"
25
#include "tree.h"
26
#include "rtl.h"
27
#include "tm_p.h"
28
#include "hard-reg-set.h"
29
#include "basic-block.h"
30
#include "output.h"
31
#include "diagnostic.h"
32
#include "tree-flow.h"
33
#include "tree-dump.h"
34
#include "timevar.h"
35
#include "cfgloop.h"
36
#include "domwalk.h"
37
#include "params.h"
38
#include "tree-pass.h"
39
#include "flags.h"
40
#include "real.h"
41
#include "hashtab.h"
42
#include "tree-affine.h"
43
#include "pointer-set.h"
44
#include "tree-ssa-propagate.h"
45
 
46
/* TODO:  Support for predicated code motion.  I.e.
47
 
48
   while (1)
49
     {
50
       if (cond)
51
         {
52
           a = inv;
53
           something;
54
         }
55
     }
56
 
57
   Where COND and INV are is invariants, but evaluating INV may trap or be
58
   invalid from some other reason if !COND.  This may be transformed to
59
 
60
   if (cond)
61
     a = inv;
62
   while (1)
63
     {
64
       if (cond)
65
         something;
66
     }  */
67
 
68
/* A type for the list of statements that have to be moved in order to be able
69
   to hoist an invariant computation.  */
70
 
71
struct depend
72
{
73
  gimple stmt;
74
  struct depend *next;
75
};
76
 
77
/* The auxiliary data kept for each statement.  */
78
 
79
struct lim_aux_data
80
{
81
  struct loop *max_loop;        /* The outermost loop in that the statement
82
                                   is invariant.  */
83
 
84
  struct loop *tgt_loop;        /* The loop out of that we want to move the
85
                                   invariant.  */
86
 
87
  struct loop *always_executed_in;
88
                                /* The outermost loop for that we are sure
89
                                   the statement is executed if the loop
90
                                   is entered.  */
91
 
92
  unsigned cost;                /* Cost of the computation performed by the
93
                                   statement.  */
94
 
95
  struct depend *depends;       /* List of statements that must be also hoisted
96
                                   out of the loop when this statement is
97
                                   hoisted; i.e. those that define the operands
98
                                   of the statement and are inside of the
99
                                   MAX_LOOP loop.  */
100
};
101
 
102
/* Maps statements to their lim_aux_data.  */
103
 
104
static struct pointer_map_t *lim_aux_data_map;
105
 
106
/* Description of a memory reference location.  */
107
 
108
typedef struct mem_ref_loc
109
{
110
  tree *ref;                    /* The reference itself.  */
111
  gimple stmt;                  /* The statement in that it occurs.  */
112
} *mem_ref_loc_p;
113
 
114
DEF_VEC_P(mem_ref_loc_p);
115
DEF_VEC_ALLOC_P(mem_ref_loc_p, heap);
116
 
117
/* The list of memory reference locations in a loop.  */
118
 
119
typedef struct mem_ref_locs
120
{
121
  VEC (mem_ref_loc_p, heap) *locs;
122
} *mem_ref_locs_p;
123
 
124
DEF_VEC_P(mem_ref_locs_p);
125
DEF_VEC_ALLOC_P(mem_ref_locs_p, heap);
126
 
127
/* Description of a memory reference.  */
128
 
129
typedef struct mem_ref
130
{
131
  tree mem;                     /* The memory itself.  */
132
  unsigned id;                  /* ID assigned to the memory reference
133
                                   (its index in memory_accesses.refs_list)  */
134
  hashval_t hash;               /* Its hash value.  */
135
  bitmap stored;                /* The set of loops in that this memory location
136
                                   is stored to.  */
137
  VEC (mem_ref_locs_p, heap) *accesses_in_loop;
138
                                /* The locations of the accesses.  Vector
139
                                   indexed by the loop number.  */
140
  bitmap vops;                  /* Vops corresponding to this memory
141
                                   location.  */
142
 
143
  /* The following sets are computed on demand.  We keep both set and
144
     its complement, so that we know whether the information was
145
     already computed or not.  */
146
  bitmap indep_loop;            /* The set of loops in that the memory
147
                                   reference is independent, meaning:
148
                                   If it is stored in the loop, this store
149
                                     is independent on all other loads and
150
                                     stores.
151
                                   If it is only loaded, then it is independent
152
                                     on all stores in the loop.  */
153
  bitmap dep_loop;              /* The complement of INDEP_LOOP.  */
154
 
155
  bitmap indep_ref;             /* The set of memory references on that
156
                                   this reference is independent.  */
157
  bitmap dep_ref;               /* The complement of DEP_REF.  */
158
} *mem_ref_p;
159
 
160
DEF_VEC_P(mem_ref_p);
161
DEF_VEC_ALLOC_P(mem_ref_p, heap);
162
 
163
DEF_VEC_P(bitmap);
164
DEF_VEC_ALLOC_P(bitmap, heap);
165
 
166
DEF_VEC_P(htab_t);
167
DEF_VEC_ALLOC_P(htab_t, heap);
168
 
169
/* Description of memory accesses in loops.  */
170
 
171
static struct
172
{
173
  /* The hash table of memory references accessed in loops.  */
174
  htab_t refs;
175
 
176
  /* The list of memory references.  */
177
  VEC (mem_ref_p, heap) *refs_list;
178
 
179
  /* The set of memory references accessed in each loop.  */
180
  VEC (bitmap, heap) *refs_in_loop;
181
 
182
  /* The set of memory references accessed in each loop, including
183
     subloops.  */
184
  VEC (bitmap, heap) *all_refs_in_loop;
185
 
186
  /* The set of virtual operands clobbered in a given loop.  */
187
  VEC (bitmap, heap) *clobbered_vops;
188
 
189
  /* Map from the pair (loop, virtual operand) to the set of refs that
190
     touch the virtual operand in the loop.  */
191
  VEC (htab_t, heap) *vop_ref_map;
192
 
193
  /* Cache for expanding memory addresses.  */
194
  struct pointer_map_t *ttae_cache;
195
} memory_accesses;
196
 
197
static bool ref_indep_loop_p (struct loop *, mem_ref_p);
198
 
199
/* Minimum cost of an expensive expression.  */
200
#define LIM_EXPENSIVE ((unsigned) PARAM_VALUE (PARAM_LIM_EXPENSIVE))
201
 
202
/* The outermost loop for that execution of the header guarantees that the
203
   block will be executed.  */
204
#define ALWAYS_EXECUTED_IN(BB) ((struct loop *) (BB)->aux)
205
 
206
static struct lim_aux_data *
207
init_lim_data (gimple stmt)
208
{
209
  void **p = pointer_map_insert (lim_aux_data_map, stmt);
210
 
211
  *p = XCNEW (struct lim_aux_data);
212
  return (struct lim_aux_data *) *p;
213
}
214
 
215
static struct lim_aux_data *
216
get_lim_data (gimple stmt)
217
{
218
  void **p = pointer_map_contains (lim_aux_data_map, stmt);
219
  if (!p)
220
    return NULL;
221
 
222
  return (struct lim_aux_data *) *p;
223
}
224
 
225
/* Releases the memory occupied by DATA.  */
226
 
227
static void
228
free_lim_aux_data (struct lim_aux_data *data)
229
{
230
  struct depend *dep, *next;
231
 
232
  for (dep = data->depends; dep; dep = next)
233
    {
234
      next = dep->next;
235
      free (dep);
236
    }
237
  free (data);
238
}
239
 
240
static void
241
clear_lim_data (gimple stmt)
242
{
243
  void **p = pointer_map_contains (lim_aux_data_map, stmt);
244
  if (!p)
245
    return;
246
 
247
  free_lim_aux_data ((struct lim_aux_data *) *p);
248
  *p = NULL;
249
}
250
 
251
/* Calls CBCK for each index in memory reference ADDR_P.  There are two
252
   kinds situations handled; in each of these cases, the memory reference
253
   and DATA are passed to the callback:
254
 
255
   Access to an array: ARRAY_{RANGE_}REF (base, index).  In this case we also
256
   pass the pointer to the index to the callback.
257
 
258
   Pointer dereference: INDIRECT_REF (addr).  In this case we also pass the
259
   pointer to addr to the callback.
260
 
261
   If the callback returns false, the whole search stops and false is returned.
262
   Otherwise the function returns true after traversing through the whole
263
   reference *ADDR_P.  */
264
 
265
bool
266
for_each_index (tree *addr_p, bool (*cbck) (tree, tree *, void *), void *data)
267
{
268
  tree *nxt, *idx;
269
 
270
  for (; ; addr_p = nxt)
271
    {
272
      switch (TREE_CODE (*addr_p))
273
        {
274
        case SSA_NAME:
275
          return cbck (*addr_p, addr_p, data);
276
 
277
        case MISALIGNED_INDIRECT_REF:
278
        case ALIGN_INDIRECT_REF:
279
        case INDIRECT_REF:
280
          nxt = &TREE_OPERAND (*addr_p, 0);
281
          return cbck (*addr_p, nxt, data);
282
 
283
        case BIT_FIELD_REF:
284
        case VIEW_CONVERT_EXPR:
285
        case REALPART_EXPR:
286
        case IMAGPART_EXPR:
287
          nxt = &TREE_OPERAND (*addr_p, 0);
288
          break;
289
 
290
        case COMPONENT_REF:
291
          /* If the component has varying offset, it behaves like index
292
             as well.  */
293
          idx = &TREE_OPERAND (*addr_p, 2);
294
          if (*idx
295
              && !cbck (*addr_p, idx, data))
296
            return false;
297
 
298
          nxt = &TREE_OPERAND (*addr_p, 0);
299
          break;
300
 
301
        case ARRAY_REF:
302
        case ARRAY_RANGE_REF:
303
          nxt = &TREE_OPERAND (*addr_p, 0);
304
          if (!cbck (*addr_p, &TREE_OPERAND (*addr_p, 1), data))
305
            return false;
306
          break;
307
 
308
        case VAR_DECL:
309
        case PARM_DECL:
310
        case STRING_CST:
311
        case RESULT_DECL:
312
        case VECTOR_CST:
313
        case COMPLEX_CST:
314
        case INTEGER_CST:
315
        case REAL_CST:
316
        case FIXED_CST:
317
        case CONSTRUCTOR:
318
          return true;
319
 
320
        case ADDR_EXPR:
321
          gcc_assert (is_gimple_min_invariant (*addr_p));
322
          return true;
323
 
324
        case TARGET_MEM_REF:
325
          idx = &TMR_BASE (*addr_p);
326
          if (*idx
327
              && !cbck (*addr_p, idx, data))
328
            return false;
329
          idx = &TMR_INDEX (*addr_p);
330
          if (*idx
331
              && !cbck (*addr_p, idx, data))
332
            return false;
333
          return true;
334
 
335
        default:
336
          gcc_unreachable ();
337
        }
338
    }
339
}
340
 
341
/* If it is possible to hoist the statement STMT unconditionally,
342
   returns MOVE_POSSIBLE.
343
   If it is possible to hoist the statement STMT, but we must avoid making
344
   it executed if it would not be executed in the original program (e.g.
345
   because it may trap), return MOVE_PRESERVE_EXECUTION.
346
   Otherwise return MOVE_IMPOSSIBLE.  */
347
 
348
enum move_pos
349
movement_possibility (gimple stmt)
350
{
351
  tree lhs;
352
  enum move_pos ret = MOVE_POSSIBLE;
353
 
354
  if (flag_unswitch_loops
355
      && gimple_code (stmt) == GIMPLE_COND)
356
    {
357
      /* If we perform unswitching, force the operands of the invariant
358
         condition to be moved out of the loop.  */
359
      return MOVE_POSSIBLE;
360
    }
361
 
362
  if (gimple_get_lhs (stmt) == NULL_TREE)
363
    return MOVE_IMPOSSIBLE;
364
 
365
  if (gimple_vdef (stmt))
366
    return MOVE_IMPOSSIBLE;
367
 
368
  if (stmt_ends_bb_p (stmt)
369
      || gimple_has_volatile_ops (stmt)
370
      || gimple_has_side_effects (stmt)
371
      || stmt_could_throw_p (stmt))
372
    return MOVE_IMPOSSIBLE;
373
 
374
  if (is_gimple_call (stmt))
375
    {
376
      /* While pure or const call is guaranteed to have no side effects, we
377
         cannot move it arbitrarily.  Consider code like
378
 
379
         char *s = something ();
380
 
381
         while (1)
382
           {
383
             if (s)
384
               t = strlen (s);
385
             else
386
               t = 0;
387
           }
388
 
389
         Here the strlen call cannot be moved out of the loop, even though
390
         s is invariant.  In addition to possibly creating a call with
391
         invalid arguments, moving out a function call that is not executed
392
         may cause performance regressions in case the call is costly and
393
         not executed at all.  */
394
      ret = MOVE_PRESERVE_EXECUTION;
395
      lhs = gimple_call_lhs (stmt);
396
    }
397
  else if (is_gimple_assign (stmt))
398
    lhs = gimple_assign_lhs (stmt);
399
  else
400
    return MOVE_IMPOSSIBLE;
401
 
402
  if (TREE_CODE (lhs) == SSA_NAME
403
      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
404
    return MOVE_IMPOSSIBLE;
405
 
406
  if (TREE_CODE (lhs) != SSA_NAME
407
      || gimple_could_trap_p (stmt))
408
    return MOVE_PRESERVE_EXECUTION;
409
 
410
  return ret;
411
}
412
 
413
/* Suppose that operand DEF is used inside the LOOP.  Returns the outermost
414
   loop to that we could move the expression using DEF if it did not have
415
   other operands, i.e. the outermost loop enclosing LOOP in that the value
416
   of DEF is invariant.  */
417
 
418
static struct loop *
419
outermost_invariant_loop (tree def, struct loop *loop)
420
{
421
  gimple def_stmt;
422
  basic_block def_bb;
423
  struct loop *max_loop;
424
  struct lim_aux_data *lim_data;
425
 
426
  if (!def)
427
    return superloop_at_depth (loop, 1);
428
 
429
  if (TREE_CODE (def) != SSA_NAME)
430
    {
431
      gcc_assert (is_gimple_min_invariant (def));
432
      return superloop_at_depth (loop, 1);
433
    }
434
 
435
  def_stmt = SSA_NAME_DEF_STMT (def);
436
  def_bb = gimple_bb (def_stmt);
437
  if (!def_bb)
438
    return superloop_at_depth (loop, 1);
439
 
440
  max_loop = find_common_loop (loop, def_bb->loop_father);
441
 
442
  lim_data = get_lim_data (def_stmt);
443
  if (lim_data != NULL && lim_data->max_loop != NULL)
444
    max_loop = find_common_loop (max_loop,
445
                                 loop_outer (lim_data->max_loop));
446
  if (max_loop == loop)
447
    return NULL;
448
  max_loop = superloop_at_depth (loop, loop_depth (max_loop) + 1);
449
 
450
  return max_loop;
451
}
452
 
453
/* DATA is a structure containing information associated with a statement
454
   inside LOOP.  DEF is one of the operands of this statement.
455
 
456
   Find the outermost loop enclosing LOOP in that value of DEF is invariant
457
   and record this in DATA->max_loop field.  If DEF itself is defined inside
458
   this loop as well (i.e. we need to hoist it out of the loop if we want
459
   to hoist the statement represented by DATA), record the statement in that
460
   DEF is defined to the DATA->depends list.  Additionally if ADD_COST is true,
461
   add the cost of the computation of DEF to the DATA->cost.
462
 
463
   If DEF is not invariant in LOOP, return false.  Otherwise return TRUE.  */
464
 
465
static bool
466
add_dependency (tree def, struct lim_aux_data *data, struct loop *loop,
467
                bool add_cost)
468
{
469
  gimple def_stmt = SSA_NAME_DEF_STMT (def);
470
  basic_block def_bb = gimple_bb (def_stmt);
471
  struct loop *max_loop;
472
  struct depend *dep;
473
  struct lim_aux_data *def_data;
474
 
475
  if (!def_bb)
476
    return true;
477
 
478
  max_loop = outermost_invariant_loop (def, loop);
479
  if (!max_loop)
480
    return false;
481
 
482
  if (flow_loop_nested_p (data->max_loop, max_loop))
483
    data->max_loop = max_loop;
484
 
485
  def_data = get_lim_data (def_stmt);
486
  if (!def_data)
487
    return true;
488
 
489
  if (add_cost
490
      /* Only add the cost if the statement defining DEF is inside LOOP,
491
         i.e. if it is likely that by moving the invariants dependent
492
         on it, we will be able to avoid creating a new register for
493
         it (since it will be only used in these dependent invariants).  */
494
      && def_bb->loop_father == loop)
495
    data->cost += def_data->cost;
496
 
497
  dep = XNEW (struct depend);
498
  dep->stmt = def_stmt;
499
  dep->next = data->depends;
500
  data->depends = dep;
501
 
502
  return true;
503
}
504
 
505
/* Returns an estimate for a cost of statement STMT.  TODO -- the values here
506
   are just ad-hoc constants.  The estimates should be based on target-specific
507
   values.  */
508
 
509
static unsigned
510
stmt_cost (gimple stmt)
511
{
512
  tree fndecl;
513
  unsigned cost = 1;
514
 
515
  /* Always try to create possibilities for unswitching.  */
516
  if (gimple_code (stmt) == GIMPLE_COND)
517
    return LIM_EXPENSIVE;
518
 
519
  /* Hoisting memory references out should almost surely be a win.  */
520
  if (gimple_references_memory_p (stmt))
521
    cost += 20;
522
 
523
  if (is_gimple_call (stmt))
524
    {
525
      /* We should be hoisting calls if possible.  */
526
 
527
      /* Unless the call is a builtin_constant_p; this always folds to a
528
         constant, so moving it is useless.  */
529
      fndecl = gimple_call_fndecl (stmt);
530
      if (fndecl
531
          && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
532
          && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CONSTANT_P)
533
        return 0;
534
 
535
      return cost + 20;
536
    }
537
 
538
  if (gimple_code (stmt) != GIMPLE_ASSIGN)
539
    return cost;
540
 
541
  switch (gimple_assign_rhs_code (stmt))
542
    {
543
    case MULT_EXPR:
544
    case TRUNC_DIV_EXPR:
545
    case CEIL_DIV_EXPR:
546
    case FLOOR_DIV_EXPR:
547
    case ROUND_DIV_EXPR:
548
    case EXACT_DIV_EXPR:
549
    case CEIL_MOD_EXPR:
550
    case FLOOR_MOD_EXPR:
551
    case ROUND_MOD_EXPR:
552
    case TRUNC_MOD_EXPR:
553
    case RDIV_EXPR:
554
      /* Division and multiplication are usually expensive.  */
555
      cost += 20;
556
      break;
557
 
558
    case LSHIFT_EXPR:
559
    case RSHIFT_EXPR:
560
      cost += 20;
561
      break;
562
 
563
    default:
564
      break;
565
    }
566
 
567
  return cost;
568
}
569
 
570
/* Finds the outermost loop between OUTER and LOOP in that the memory reference
571
   REF is independent.  If REF is not independent in LOOP, NULL is returned
572
   instead.  */
573
 
574
static struct loop *
575
outermost_indep_loop (struct loop *outer, struct loop *loop, mem_ref_p ref)
576
{
577
  struct loop *aloop;
578
 
579
  if (bitmap_bit_p (ref->stored, loop->num))
580
    return NULL;
581
 
582
  for (aloop = outer;
583
       aloop != loop;
584
       aloop = superloop_at_depth (loop, loop_depth (aloop) + 1))
585
    if (!bitmap_bit_p (ref->stored, aloop->num)
586
        && ref_indep_loop_p (aloop, ref))
587
      return aloop;
588
 
589
  if (ref_indep_loop_p (loop, ref))
590
    return loop;
591
  else
592
    return NULL;
593
}
594
 
595
/* If there is a simple load or store to a memory reference in STMT, returns
596
   the location of the memory reference, and sets IS_STORE according to whether
597
   it is a store or load.  Otherwise, returns NULL.  */
598
 
599
static tree *
600
simple_mem_ref_in_stmt (gimple stmt, bool *is_store)
601
{
602
  tree *lhs;
603
  enum tree_code code;
604
 
605
  /* Recognize MEM = (SSA_NAME | invariant) and SSA_NAME = MEM patterns.  */
606
  if (gimple_code (stmt) != GIMPLE_ASSIGN)
607
    return NULL;
608
 
609
  code = gimple_assign_rhs_code (stmt);
610
 
611
  lhs = gimple_assign_lhs_ptr (stmt);
612
 
613
  if (TREE_CODE (*lhs) == SSA_NAME)
614
    {
615
      if (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS
616
          || !is_gimple_addressable (gimple_assign_rhs1 (stmt)))
617
        return NULL;
618
 
619
      *is_store = false;
620
      return gimple_assign_rhs1_ptr (stmt);
621
    }
622
  else if (code == SSA_NAME
623
           || (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS
624
               && is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
625
    {
626
      *is_store = true;
627
      return lhs;
628
    }
629
  else
630
    return NULL;
631
}
632
 
633
/* Returns the memory reference contained in STMT.  */
634
 
635
static mem_ref_p
636
mem_ref_in_stmt (gimple stmt)
637
{
638
  bool store;
639
  tree *mem = simple_mem_ref_in_stmt (stmt, &store);
640
  hashval_t hash;
641
  mem_ref_p ref;
642
 
643
  if (!mem)
644
    return NULL;
645
  gcc_assert (!store);
646
 
647
  hash = iterative_hash_expr (*mem, 0);
648
  ref = (mem_ref_p) htab_find_with_hash (memory_accesses.refs, *mem, hash);
649
 
650
  gcc_assert (ref != NULL);
651
  return ref;
652
}
653
 
654
/* Determine the outermost loop to that it is possible to hoist a statement
655
   STMT and store it to LIM_DATA (STMT)->max_loop.  To do this we determine
656
   the outermost loop in that the value computed by STMT is invariant.
657
   If MUST_PRESERVE_EXEC is true, additionally choose such a loop that
658
   we preserve the fact whether STMT is executed.  It also fills other related
659
   information to LIM_DATA (STMT).
660
 
661
   The function returns false if STMT cannot be hoisted outside of the loop it
662
   is defined in, and true otherwise.  */
663
 
664
static bool
665
determine_max_movement (gimple stmt, bool must_preserve_exec)
666
{
667
  basic_block bb = gimple_bb (stmt);
668
  struct loop *loop = bb->loop_father;
669
  struct loop *level;
670
  struct lim_aux_data *lim_data = get_lim_data (stmt);
671
  tree val;
672
  ssa_op_iter iter;
673
 
674
  if (must_preserve_exec)
675
    level = ALWAYS_EXECUTED_IN (bb);
676
  else
677
    level = superloop_at_depth (loop, 1);
678
  lim_data->max_loop = level;
679
 
680
  FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_USE)
681
    if (!add_dependency (val, lim_data, loop, true))
682
      return false;
683
 
684
  if (gimple_vuse (stmt))
685
    {
686
      mem_ref_p ref = mem_ref_in_stmt (stmt);
687
 
688
      if (ref)
689
        {
690
          lim_data->max_loop
691
                  = outermost_indep_loop (lim_data->max_loop, loop, ref);
692
          if (!lim_data->max_loop)
693
            return false;
694
        }
695
      else
696
        {
697
          if ((val = gimple_vuse (stmt)) != NULL_TREE)
698
            {
699
              if (!add_dependency (val, lim_data, loop, false))
700
                return false;
701
            }
702
        }
703
    }
704
 
705
  lim_data->cost += stmt_cost (stmt);
706
 
707
  return true;
708
}
709
 
710
/* Suppose that some statement in ORIG_LOOP is hoisted to the loop LEVEL,
711
   and that one of the operands of this statement is computed by STMT.
712
   Ensure that STMT (together with all the statements that define its
713
   operands) is hoisted at least out of the loop LEVEL.  */
714
 
715
static void
716
set_level (gimple stmt, struct loop *orig_loop, struct loop *level)
717
{
718
  struct loop *stmt_loop = gimple_bb (stmt)->loop_father;
719
  struct depend *dep;
720
  struct lim_aux_data *lim_data;
721
 
722
  stmt_loop = find_common_loop (orig_loop, stmt_loop);
723
  lim_data = get_lim_data (stmt);
724
  if (lim_data != NULL && lim_data->tgt_loop != NULL)
725
    stmt_loop = find_common_loop (stmt_loop,
726
                                  loop_outer (lim_data->tgt_loop));
727
  if (flow_loop_nested_p (stmt_loop, level))
728
    return;
729
 
730
  gcc_assert (level == lim_data->max_loop
731
              || flow_loop_nested_p (lim_data->max_loop, level));
732
 
733
  lim_data->tgt_loop = level;
734
  for (dep = lim_data->depends; dep; dep = dep->next)
735
    set_level (dep->stmt, orig_loop, level);
736
}
737
 
738
/* Determines an outermost loop from that we want to hoist the statement STMT.
739
   For now we chose the outermost possible loop.  TODO -- use profiling
740
   information to set it more sanely.  */
741
 
742
static void
743
set_profitable_level (gimple stmt)
744
{
745
  set_level (stmt, gimple_bb (stmt)->loop_father, get_lim_data (stmt)->max_loop);
746
}
747
 
748
/* Returns true if STMT is a call that has side effects.  */
749
 
750
static bool
751
nonpure_call_p (gimple stmt)
752
{
753
  if (gimple_code (stmt) != GIMPLE_CALL)
754
    return false;
755
 
756
  return gimple_has_side_effects (stmt);
757
}
758
 
759
/* Rewrite a/b to a*(1/b).  Return the invariant stmt to process.  */
760
 
761
static gimple
762
rewrite_reciprocal (gimple_stmt_iterator *bsi)
763
{
764
  gimple stmt, stmt1, stmt2;
765
  tree var, name, lhs, type;
766
  tree real_one;
767
  gimple_stmt_iterator gsi;
768
 
769
  stmt = gsi_stmt (*bsi);
770
  lhs = gimple_assign_lhs (stmt);
771
  type = TREE_TYPE (lhs);
772
 
773
  var = create_tmp_var (type, "reciptmp");
774
  add_referenced_var (var);
775
  DECL_GIMPLE_REG_P (var) = 1;
776
 
777
  /* For vectors, create a VECTOR_CST full of 1's.  */
778
  if (TREE_CODE (type) == VECTOR_TYPE)
779
    {
780
      int i, len;
781
      tree list = NULL_TREE;
782
      real_one = build_real (TREE_TYPE (type), dconst1);
783
      len = TYPE_VECTOR_SUBPARTS (type);
784
      for (i = 0; i < len; i++)
785
        list = tree_cons (NULL, real_one, list);
786
      real_one = build_vector (type, list);
787
    }
788
  else
789
    real_one = build_real (type, dconst1);
790
 
791
  stmt1 = gimple_build_assign_with_ops (RDIV_EXPR,
792
                var, real_one, gimple_assign_rhs2 (stmt));
793
  name = make_ssa_name (var, stmt1);
794
  gimple_assign_set_lhs (stmt1, name);
795
 
796
  stmt2 = gimple_build_assign_with_ops (MULT_EXPR, lhs, name,
797
                                        gimple_assign_rhs1 (stmt));
798
 
799
  /* Replace division stmt with reciprocal and multiply stmts.
800
     The multiply stmt is not invariant, so update iterator
801
     and avoid rescanning.  */
802
  gsi = *bsi;
803
  gsi_insert_before (bsi, stmt1, GSI_NEW_STMT);
804
  gsi_replace (&gsi, stmt2, true);
805
 
806
  /* Continue processing with invariant reciprocal statement.  */
807
  return stmt1;
808
}
809
 
810
/* Check if the pattern at *BSI is a bittest of the form
811
   (A >> B) & 1 != 0 and in this case rewrite it to A & (1 << B) != 0.  */
812
 
813
static gimple
814
rewrite_bittest (gimple_stmt_iterator *bsi)
815
{
816
  gimple stmt, use_stmt, stmt1, stmt2;
817
  tree lhs, var, name, t, a, b;
818
  use_operand_p use;
819
 
820
  stmt = gsi_stmt (*bsi);
821
  lhs = gimple_assign_lhs (stmt);
822
 
823
  /* Verify that the single use of lhs is a comparison against zero.  */
824
  if (TREE_CODE (lhs) != SSA_NAME
825
      || !single_imm_use (lhs, &use, &use_stmt)
826
      || gimple_code (use_stmt) != GIMPLE_COND)
827
    return stmt;
828
  if (gimple_cond_lhs (use_stmt) != lhs
829
      || (gimple_cond_code (use_stmt) != NE_EXPR
830
          && gimple_cond_code (use_stmt) != EQ_EXPR)
831
      || !integer_zerop (gimple_cond_rhs (use_stmt)))
832
    return stmt;
833
 
834
  /* Get at the operands of the shift.  The rhs is TMP1 & 1.  */
835
  stmt1 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
836
  if (gimple_code (stmt1) != GIMPLE_ASSIGN)
837
    return stmt;
838
 
839
  /* There is a conversion in between possibly inserted by fold.  */
840
  if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt1)))
841
    {
842
      t = gimple_assign_rhs1 (stmt1);
843
      if (TREE_CODE (t) != SSA_NAME
844
          || !has_single_use (t))
845
        return stmt;
846
      stmt1 = SSA_NAME_DEF_STMT (t);
847
      if (gimple_code (stmt1) != GIMPLE_ASSIGN)
848
        return stmt;
849
    }
850
 
851
  /* Verify that B is loop invariant but A is not.  Verify that with
852
     all the stmt walking we are still in the same loop.  */
853
  if (gimple_assign_rhs_code (stmt1) != RSHIFT_EXPR
854
      || loop_containing_stmt (stmt1) != loop_containing_stmt (stmt))
855
    return stmt;
856
 
857
  a = gimple_assign_rhs1 (stmt1);
858
  b = gimple_assign_rhs2 (stmt1);
859
 
860
  if (outermost_invariant_loop (b, loop_containing_stmt (stmt1)) != NULL
861
      && outermost_invariant_loop (a, loop_containing_stmt (stmt1)) == NULL)
862
    {
863
      gimple_stmt_iterator rsi;
864
 
865
      /* 1 << B */
866
      var = create_tmp_var (TREE_TYPE (a), "shifttmp");
867
      add_referenced_var (var);
868
      t = fold_build2 (LSHIFT_EXPR, TREE_TYPE (a),
869
                       build_int_cst (TREE_TYPE (a), 1), b);
870
      stmt1 = gimple_build_assign (var, t);
871
      name = make_ssa_name (var, stmt1);
872
      gimple_assign_set_lhs (stmt1, name);
873
 
874
      /* A & (1 << B) */
875
      t = fold_build2 (BIT_AND_EXPR, TREE_TYPE (a), a, name);
876
      stmt2 = gimple_build_assign (var, t);
877
      name = make_ssa_name (var, stmt2);
878
      gimple_assign_set_lhs (stmt2, name);
879
 
880
      /* Replace the SSA_NAME we compare against zero.  Adjust
881
         the type of zero accordingly.  */
882
      SET_USE (use, name);
883
      gimple_cond_set_rhs (use_stmt, build_int_cst_type (TREE_TYPE (name), 0));
884
 
885
      /* Don't use gsi_replace here, none of the new assignments sets
886
         the variable originally set in stmt.  Move bsi to stmt1, and
887
         then remove the original stmt, so that we get a chance to
888
         retain debug info for it.  */
889
      rsi = *bsi;
890
      gsi_insert_before (bsi, stmt1, GSI_NEW_STMT);
891
      gsi_insert_before (&rsi, stmt2, GSI_SAME_STMT);
892
      gsi_remove (&rsi, true);
893
 
894
      return stmt1;
895
    }
896
 
897
  return stmt;
898
}
899
 
900
 
901
/* Determine the outermost loops in that statements in basic block BB are
902
   invariant, and record them to the LIM_DATA associated with the statements.
903
   Callback for walk_dominator_tree.  */
904
 
905
static void
906
determine_invariantness_stmt (struct dom_walk_data *dw_data ATTRIBUTE_UNUSED,
907
                              basic_block bb)
908
{
909
  enum move_pos pos;
910
  gimple_stmt_iterator bsi;
911
  gimple stmt;
912
  bool maybe_never = ALWAYS_EXECUTED_IN (bb) == NULL;
913
  struct loop *outermost = ALWAYS_EXECUTED_IN (bb);
914
  struct lim_aux_data *lim_data;
915
 
916
  if (!loop_outer (bb->loop_father))
917
    return;
918
 
919
  if (dump_file && (dump_flags & TDF_DETAILS))
920
    fprintf (dump_file, "Basic block %d (loop %d -- depth %d):\n\n",
921
             bb->index, bb->loop_father->num, loop_depth (bb->loop_father));
922
 
923
  for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
924
    {
925
      stmt = gsi_stmt (bsi);
926
 
927
      pos = movement_possibility (stmt);
928
      if (pos == MOVE_IMPOSSIBLE)
929
        {
930
          if (nonpure_call_p (stmt))
931
            {
932
              maybe_never = true;
933
              outermost = NULL;
934
            }
935
          /* Make sure to note always_executed_in for stores to make
936
             store-motion work.  */
937
          else if (stmt_makes_single_store (stmt))
938
            {
939
              struct lim_aux_data *lim_data = init_lim_data (stmt);
940
              lim_data->always_executed_in = outermost;
941
            }
942
          continue;
943
        }
944
 
945
      if (is_gimple_assign (stmt)
946
          && (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
947
              == GIMPLE_BINARY_RHS))
948
        {
949
          tree op0 = gimple_assign_rhs1 (stmt);
950
          tree op1 = gimple_assign_rhs2 (stmt);
951
          struct loop *ol1 = outermost_invariant_loop (op1,
952
                                        loop_containing_stmt (stmt));
953
 
954
          /* If divisor is invariant, convert a/b to a*(1/b), allowing reciprocal
955
             to be hoisted out of loop, saving expensive divide.  */
956
          if (pos == MOVE_POSSIBLE
957
              && gimple_assign_rhs_code (stmt) == RDIV_EXPR
958
              && flag_unsafe_math_optimizations
959
              && !flag_trapping_math
960
              && ol1 != NULL
961
              && outermost_invariant_loop (op0, ol1) == NULL)
962
            stmt = rewrite_reciprocal (&bsi);
963
 
964
          /* If the shift count is invariant, convert (A >> B) & 1 to
965
             A & (1 << B) allowing the bit mask to be hoisted out of the loop
966
             saving an expensive shift.  */
967
          if (pos == MOVE_POSSIBLE
968
              && gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
969
              && integer_onep (op1)
970
              && TREE_CODE (op0) == SSA_NAME
971
              && has_single_use (op0))
972
            stmt = rewrite_bittest (&bsi);
973
        }
974
 
975
      lim_data = init_lim_data (stmt);
976
      lim_data->always_executed_in = outermost;
977
 
978
      if (maybe_never && pos == MOVE_PRESERVE_EXECUTION)
979
        continue;
980
 
981
      if (!determine_max_movement (stmt, pos == MOVE_PRESERVE_EXECUTION))
982
        {
983
          lim_data->max_loop = NULL;
984
          continue;
985
        }
986
 
987
      if (dump_file && (dump_flags & TDF_DETAILS))
988
        {
989
          print_gimple_stmt (dump_file, stmt, 2, 0);
990
          fprintf (dump_file, "  invariant up to level %d, cost %d.\n\n",
991
                   loop_depth (lim_data->max_loop),
992
                   lim_data->cost);
993
        }
994
 
995
      if (lim_data->cost >= LIM_EXPENSIVE)
996
        set_profitable_level (stmt);
997
    }
998
}
999
 
1000
/* For each statement determines the outermost loop in that it is invariant,
1001
   statements on whose motion it depends and the cost of the computation.
1002
   This information is stored to the LIM_DATA structure associated with
1003
   each statement.  */
1004
 
1005
static void
1006
determine_invariantness (void)
1007
{
1008
  struct dom_walk_data walk_data;
1009
 
1010
  memset (&walk_data, 0, sizeof (struct dom_walk_data));
1011
  walk_data.dom_direction = CDI_DOMINATORS;
1012
  walk_data.before_dom_children = determine_invariantness_stmt;
1013
 
1014
  init_walk_dominator_tree (&walk_data);
1015
  walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
1016
  fini_walk_dominator_tree (&walk_data);
1017
}
1018
 
1019
/* Hoist the statements in basic block BB out of the loops prescribed by
1020
   data stored in LIM_DATA structures associated with each statement.  Callback
1021
   for walk_dominator_tree.  */
1022
 
1023
static void
1024
move_computations_stmt (struct dom_walk_data *dw_data ATTRIBUTE_UNUSED,
1025
                        basic_block bb)
1026
{
1027
  struct loop *level;
1028
  gimple_stmt_iterator bsi;
1029
  gimple stmt;
1030
  unsigned cost = 0;
1031
  struct lim_aux_data *lim_data;
1032
 
1033
  if (!loop_outer (bb->loop_father))
1034
    return;
1035
 
1036
  for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); )
1037
    {
1038
      stmt = gsi_stmt (bsi);
1039
 
1040
      lim_data = get_lim_data (stmt);
1041
      if (lim_data == NULL)
1042
        {
1043
          gsi_next (&bsi);
1044
          continue;
1045
        }
1046
 
1047
      cost = lim_data->cost;
1048
      level = lim_data->tgt_loop;
1049
      clear_lim_data (stmt);
1050
 
1051
      if (!level)
1052
        {
1053
          gsi_next (&bsi);
1054
          continue;
1055
        }
1056
 
1057
      /* We do not really want to move conditionals out of the loop; we just
1058
         placed it here to force its operands to be moved if necessary.  */
1059
      if (gimple_code (stmt) == GIMPLE_COND)
1060
        continue;
1061
 
1062
      if (dump_file && (dump_flags & TDF_DETAILS))
1063
        {
1064
          fprintf (dump_file, "Moving statement\n");
1065
          print_gimple_stmt (dump_file, stmt, 0, 0);
1066
          fprintf (dump_file, "(cost %u) out of loop %d.\n\n",
1067
                   cost, level->num);
1068
        }
1069
 
1070
      mark_virtual_ops_for_renaming (stmt);
1071
      gsi_insert_on_edge (loop_preheader_edge (level), stmt);
1072
      gsi_remove (&bsi, false);
1073
    }
1074
}
1075
 
1076
/* Hoist the statements out of the loops prescribed by data stored in
1077
   LIM_DATA structures associated with each statement.*/
1078
 
1079
static void
1080
move_computations (void)
1081
{
1082
  struct dom_walk_data walk_data;
1083
 
1084
  memset (&walk_data, 0, sizeof (struct dom_walk_data));
1085
  walk_data.dom_direction = CDI_DOMINATORS;
1086
  walk_data.before_dom_children = move_computations_stmt;
1087
 
1088
  init_walk_dominator_tree (&walk_data);
1089
  walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
1090
  fini_walk_dominator_tree (&walk_data);
1091
 
1092
  gsi_commit_edge_inserts ();
1093
  if (need_ssa_update_p (cfun))
1094
    rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
1095
}
1096
 
1097
/* Checks whether the statement defining variable *INDEX can be hoisted
1098
   out of the loop passed in DATA.  Callback for for_each_index.  */
1099
 
1100
static bool
1101
may_move_till (tree ref, tree *index, void *data)
1102
{
1103
  struct loop *loop = (struct loop *) data, *max_loop;
1104
 
1105
  /* If REF is an array reference, check also that the step and the lower
1106
     bound is invariant in LOOP.  */
1107
  if (TREE_CODE (ref) == ARRAY_REF)
1108
    {
1109
      tree step = TREE_OPERAND (ref, 3);
1110
      tree lbound = TREE_OPERAND (ref, 2);
1111
 
1112
      max_loop = outermost_invariant_loop (step, loop);
1113
      if (!max_loop)
1114
        return false;
1115
 
1116
      max_loop = outermost_invariant_loop (lbound, loop);
1117
      if (!max_loop)
1118
        return false;
1119
    }
1120
 
1121
  max_loop = outermost_invariant_loop (*index, loop);
1122
  if (!max_loop)
1123
    return false;
1124
 
1125
  return true;
1126
}
1127
 
1128
/* If OP is SSA NAME, force the statement that defines it to be
1129
   moved out of the LOOP.  ORIG_LOOP is the loop in that EXPR is used.  */
1130
 
1131
static void
1132
force_move_till_op (tree op, struct loop *orig_loop, struct loop *loop)
1133
{
1134
  gimple stmt;
1135
 
1136
  if (!op
1137
      || is_gimple_min_invariant (op))
1138
    return;
1139
 
1140
  gcc_assert (TREE_CODE (op) == SSA_NAME);
1141
 
1142
  stmt = SSA_NAME_DEF_STMT (op);
1143
  if (gimple_nop_p (stmt))
1144
    return;
1145
 
1146
  set_level (stmt, orig_loop, loop);
1147
}
1148
 
1149
/* Forces statement defining invariants in REF (and *INDEX) to be moved out of
1150
   the LOOP.  The reference REF is used in the loop ORIG_LOOP.  Callback for
1151
   for_each_index.  */
1152
 
1153
struct fmt_data
1154
{
1155
  struct loop *loop;
1156
  struct loop *orig_loop;
1157
};
1158
 
1159
static bool
1160
force_move_till (tree ref, tree *index, void *data)
1161
{
1162
  struct fmt_data *fmt_data = (struct fmt_data *) data;
1163
 
1164
  if (TREE_CODE (ref) == ARRAY_REF)
1165
    {
1166
      tree step = TREE_OPERAND (ref, 3);
1167
      tree lbound = TREE_OPERAND (ref, 2);
1168
 
1169
      force_move_till_op (step, fmt_data->orig_loop, fmt_data->loop);
1170
      force_move_till_op (lbound, fmt_data->orig_loop, fmt_data->loop);
1171
    }
1172
 
1173
  force_move_till_op (*index, fmt_data->orig_loop, fmt_data->loop);
1174
 
1175
  return true;
1176
}
1177
 
1178
/* A hash function for struct mem_ref object OBJ.  */
1179
 
1180
static hashval_t
1181
memref_hash (const void *obj)
1182
{
1183
  const struct mem_ref *const mem = (const struct mem_ref *) obj;
1184
 
1185
  return mem->hash;
1186
}
1187
 
1188
/* An equality function for struct mem_ref object OBJ1 with
1189
   memory reference OBJ2.  */
1190
 
1191
static int
1192
memref_eq (const void *obj1, const void *obj2)
1193
{
1194
  const struct mem_ref *const mem1 = (const struct mem_ref *) obj1;
1195
 
1196
  return operand_equal_p (mem1->mem, (const_tree) obj2, 0);
1197
}
1198
 
1199
/* Releases list of memory reference locations ACCS.  */
1200
 
1201
static void
1202
free_mem_ref_locs (mem_ref_locs_p accs)
1203
{
1204
  unsigned i;
1205
  mem_ref_loc_p loc;
1206
 
1207
  if (!accs)
1208
    return;
1209
 
1210
  for (i = 0; VEC_iterate (mem_ref_loc_p, accs->locs, i, loc); i++)
1211
    free (loc);
1212
  VEC_free (mem_ref_loc_p, heap, accs->locs);
1213
  free (accs);
1214
}
1215
 
1216
/* A function to free the mem_ref object OBJ.  */
1217
 
1218
static void
1219
memref_free (void *obj)
1220
{
1221
  struct mem_ref *const mem = (struct mem_ref *) obj;
1222
  unsigned i;
1223
  mem_ref_locs_p accs;
1224
 
1225
  BITMAP_FREE (mem->stored);
1226
  BITMAP_FREE (mem->indep_loop);
1227
  BITMAP_FREE (mem->dep_loop);
1228
  BITMAP_FREE (mem->indep_ref);
1229
  BITMAP_FREE (mem->dep_ref);
1230
 
1231
  for (i = 0; VEC_iterate (mem_ref_locs_p, mem->accesses_in_loop, i, accs); i++)
1232
    free_mem_ref_locs (accs);
1233
  VEC_free (mem_ref_locs_p, heap, mem->accesses_in_loop);
1234
 
1235
  BITMAP_FREE (mem->vops);
1236
  free (mem);
1237
}
1238
 
1239
/* Allocates and returns a memory reference description for MEM whose hash
1240
   value is HASH and id is ID.  */
1241
 
1242
static mem_ref_p
1243
mem_ref_alloc (tree mem, unsigned hash, unsigned id)
1244
{
1245
  mem_ref_p ref = XNEW (struct mem_ref);
1246
  ref->mem = mem;
1247
  ref->id = id;
1248
  ref->hash = hash;
1249
  ref->stored = BITMAP_ALLOC (NULL);
1250
  ref->indep_loop = BITMAP_ALLOC (NULL);
1251
  ref->dep_loop = BITMAP_ALLOC (NULL);
1252
  ref->indep_ref = BITMAP_ALLOC (NULL);
1253
  ref->dep_ref = BITMAP_ALLOC (NULL);
1254
  ref->accesses_in_loop = NULL;
1255
  ref->vops = BITMAP_ALLOC (NULL);
1256
 
1257
  return ref;
1258
}
1259
 
1260
/* Allocates and returns the new list of locations.  */
1261
 
1262
static mem_ref_locs_p
1263
mem_ref_locs_alloc (void)
1264
{
1265
  mem_ref_locs_p accs = XNEW (struct mem_ref_locs);
1266
  accs->locs = NULL;
1267
  return accs;
1268
}
1269
 
1270
/* Records memory reference location *LOC in LOOP to the memory reference
1271
   description REF.  The reference occurs in statement STMT.  */
1272
 
1273
static void
1274
record_mem_ref_loc (mem_ref_p ref, struct loop *loop, gimple stmt, tree *loc)
1275
{
1276
  mem_ref_loc_p aref = XNEW (struct mem_ref_loc);
1277
  mem_ref_locs_p accs;
1278
  bitmap ril = VEC_index (bitmap, memory_accesses.refs_in_loop, loop->num);
1279
 
1280
  if (VEC_length (mem_ref_locs_p, ref->accesses_in_loop)
1281
      <= (unsigned) loop->num)
1282
    VEC_safe_grow_cleared (mem_ref_locs_p, heap, ref->accesses_in_loop,
1283
                           loop->num + 1);
1284
  accs = VEC_index (mem_ref_locs_p, ref->accesses_in_loop, loop->num);
1285
  if (!accs)
1286
    {
1287
      accs = mem_ref_locs_alloc ();
1288
      VEC_replace (mem_ref_locs_p, ref->accesses_in_loop, loop->num, accs);
1289
    }
1290
 
1291
  aref->stmt = stmt;
1292
  aref->ref = loc;
1293
 
1294
  VEC_safe_push (mem_ref_loc_p, heap, accs->locs, aref);
1295
  bitmap_set_bit (ril, ref->id);
1296
}
1297
 
1298
/* Marks reference REF as stored in LOOP.  */
1299
 
1300
static void
1301
mark_ref_stored (mem_ref_p ref, struct loop *loop)
1302
{
1303
  for (;
1304
       loop != current_loops->tree_root
1305
       && !bitmap_bit_p (ref->stored, loop->num);
1306
       loop = loop_outer (loop))
1307
    bitmap_set_bit (ref->stored, loop->num);
1308
}
1309
 
1310
/* Gathers memory references in statement STMT in LOOP, storing the
1311
   information about them in the memory_accesses structure.  Marks
1312
   the vops accessed through unrecognized statements there as
1313
   well.  */
1314
 
1315
static void
1316
gather_mem_refs_stmt (struct loop *loop, gimple stmt)
1317
{
1318
  tree *mem = NULL;
1319
  hashval_t hash;
1320
  PTR *slot;
1321
  mem_ref_p ref;
1322
  tree vname;
1323
  bool is_stored;
1324
  bitmap clvops;
1325
  unsigned id;
1326
 
1327
  if (!gimple_vuse (stmt))
1328
    return;
1329
 
1330
  mem = simple_mem_ref_in_stmt (stmt, &is_stored);
1331
  if (!mem)
1332
    goto fail;
1333
 
1334
  hash = iterative_hash_expr (*mem, 0);
1335
  slot = htab_find_slot_with_hash (memory_accesses.refs, *mem, hash, INSERT);
1336
 
1337
  if (*slot)
1338
    {
1339
      ref = (mem_ref_p) *slot;
1340
      id = ref->id;
1341
    }
1342
  else
1343
    {
1344
      id = VEC_length (mem_ref_p, memory_accesses.refs_list);
1345
      ref = mem_ref_alloc (*mem, hash, id);
1346
      VEC_safe_push (mem_ref_p, heap, memory_accesses.refs_list, ref);
1347
      *slot = ref;
1348
 
1349
      if (dump_file && (dump_flags & TDF_DETAILS))
1350
        {
1351
          fprintf (dump_file, "Memory reference %u: ", id);
1352
          print_generic_expr (dump_file, ref->mem, TDF_SLIM);
1353
          fprintf (dump_file, "\n");
1354
        }
1355
    }
1356
  if (is_stored)
1357
    mark_ref_stored (ref, loop);
1358
 
1359
  if ((vname = gimple_vuse (stmt)) != NULL_TREE)
1360
    bitmap_set_bit (ref->vops, DECL_UID (SSA_NAME_VAR (vname)));
1361
  record_mem_ref_loc (ref, loop, stmt, mem);
1362
  return;
1363
 
1364
fail:
1365
  clvops = VEC_index (bitmap, memory_accesses.clobbered_vops, loop->num);
1366
  if ((vname = gimple_vuse (stmt)) != NULL_TREE)
1367
    bitmap_set_bit (clvops, DECL_UID (SSA_NAME_VAR (vname)));
1368
}
1369
 
1370
/* Gathers memory references in loops.  */
1371
 
1372
static void
1373
gather_mem_refs_in_loops (void)
1374
{
1375
  gimple_stmt_iterator bsi;
1376
  basic_block bb;
1377
  struct loop *loop;
1378
  loop_iterator li;
1379
  bitmap clvo, clvi;
1380
  bitmap lrefs, alrefs, alrefso;
1381
 
1382
  FOR_EACH_BB (bb)
1383
    {
1384
      loop = bb->loop_father;
1385
      if (loop == current_loops->tree_root)
1386
        continue;
1387
 
1388
      for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
1389
        gather_mem_refs_stmt (loop, gsi_stmt (bsi));
1390
    }
1391
 
1392
  /* Propagate the information about clobbered vops and accessed memory
1393
     references up the loop hierarchy.  */
1394
  FOR_EACH_LOOP (li, loop, LI_FROM_INNERMOST)
1395
    {
1396
      lrefs = VEC_index (bitmap, memory_accesses.refs_in_loop, loop->num);
1397
      alrefs = VEC_index (bitmap, memory_accesses.all_refs_in_loop, loop->num);
1398
      bitmap_ior_into (alrefs, lrefs);
1399
 
1400
      if (loop_outer (loop) == current_loops->tree_root)
1401
        continue;
1402
 
1403
      clvi = VEC_index (bitmap, memory_accesses.clobbered_vops, loop->num);
1404
      clvo = VEC_index (bitmap, memory_accesses.clobbered_vops,
1405
                        loop_outer (loop)->num);
1406
      bitmap_ior_into (clvo, clvi);
1407
 
1408
      alrefso = VEC_index (bitmap, memory_accesses.all_refs_in_loop,
1409
                           loop_outer (loop)->num);
1410
      bitmap_ior_into (alrefso, alrefs);
1411
    }
1412
}
1413
 
1414
/* Element of the hash table that maps vops to memory references.  */
1415
 
1416
struct vop_to_refs_elt
1417
{
1418
  /* DECL_UID of the vop.  */
1419
  unsigned uid;
1420
 
1421
  /* List of the all references.  */
1422
  bitmap refs_all;
1423
 
1424
  /* List of stored references.  */
1425
  bitmap refs_stored;
1426
};
1427
 
1428
/* A hash function for struct vop_to_refs_elt object OBJ.  */
1429
 
1430
static hashval_t
1431
vtoe_hash (const void *obj)
1432
{
1433
  const struct vop_to_refs_elt *const vtoe =
1434
    (const struct vop_to_refs_elt *) obj;
1435
 
1436
  return vtoe->uid;
1437
}
1438
 
1439
/* An equality function for struct vop_to_refs_elt object OBJ1 with
1440
   uid of a vop OBJ2.  */
1441
 
1442
static int
1443
vtoe_eq (const void *obj1, const void *obj2)
1444
{
1445
  const struct vop_to_refs_elt *const vtoe =
1446
    (const struct vop_to_refs_elt *) obj1;
1447
  const unsigned *const uid = (const unsigned *) obj2;
1448
 
1449
  return vtoe->uid == *uid;
1450
}
1451
 
1452
/* A function to free the struct vop_to_refs_elt object.  */
1453
 
1454
static void
1455
vtoe_free (void *obj)
1456
{
1457
  struct vop_to_refs_elt *const vtoe =
1458
    (struct vop_to_refs_elt *) obj;
1459
 
1460
  BITMAP_FREE (vtoe->refs_all);
1461
  BITMAP_FREE (vtoe->refs_stored);
1462
  free (vtoe);
1463
}
1464
 
1465
/* Records REF to hashtable VOP_TO_REFS for the index VOP.  STORED is true
1466
   if the reference REF is stored.  */
1467
 
1468
static void
1469
record_vop_access (htab_t vop_to_refs, unsigned vop, unsigned ref, bool stored)
1470
{
1471
  void **slot = htab_find_slot_with_hash (vop_to_refs, &vop, vop, INSERT);
1472
  struct vop_to_refs_elt *vtoe;
1473
 
1474
  if (!*slot)
1475
    {
1476
      vtoe = XNEW (struct vop_to_refs_elt);
1477
      vtoe->uid = vop;
1478
      vtoe->refs_all = BITMAP_ALLOC (NULL);
1479
      vtoe->refs_stored = BITMAP_ALLOC (NULL);
1480
      *slot = vtoe;
1481
    }
1482
  else
1483
    vtoe = (struct vop_to_refs_elt *) *slot;
1484
 
1485
  bitmap_set_bit (vtoe->refs_all, ref);
1486
  if (stored)
1487
    bitmap_set_bit (vtoe->refs_stored, ref);
1488
}
1489
 
1490
/* Returns the set of references that access VOP according to the table
1491
   VOP_TO_REFS.  */
1492
 
1493
static bitmap
1494
get_vop_accesses (htab_t vop_to_refs, unsigned vop)
1495
{
1496
  struct vop_to_refs_elt *const vtoe =
1497
    (struct vop_to_refs_elt *) htab_find_with_hash (vop_to_refs, &vop, vop);
1498
  return vtoe->refs_all;
1499
}
1500
 
1501
/* Returns the set of stores that access VOP according to the table
1502
   VOP_TO_REFS.  */
1503
 
1504
static bitmap
1505
get_vop_stores (htab_t vop_to_refs, unsigned vop)
1506
{
1507
  struct vop_to_refs_elt *const vtoe =
1508
    (struct vop_to_refs_elt *) htab_find_with_hash (vop_to_refs, &vop, vop);
1509
  return vtoe->refs_stored;
1510
}
1511
 
1512
/* Adds REF to mapping from virtual operands to references in LOOP.  */
1513
 
1514
static void
1515
add_vop_ref_mapping (struct loop *loop, mem_ref_p ref)
1516
{
1517
  htab_t map = VEC_index (htab_t, memory_accesses.vop_ref_map, loop->num);
1518
  bool stored = bitmap_bit_p (ref->stored, loop->num);
1519
  bitmap clobbers = VEC_index (bitmap, memory_accesses.clobbered_vops,
1520
                               loop->num);
1521
  bitmap_iterator bi;
1522
  unsigned vop;
1523
 
1524
  EXECUTE_IF_AND_COMPL_IN_BITMAP (ref->vops, clobbers, 0, vop, bi)
1525
    {
1526
      record_vop_access (map, vop, ref->id, stored);
1527
    }
1528
}
1529
 
1530
/* Create a mapping from virtual operands to references that touch them
1531
   in LOOP.  */
1532
 
1533
static void
1534
create_vop_ref_mapping_loop (struct loop *loop)
1535
{
1536
  bitmap refs = VEC_index (bitmap, memory_accesses.refs_in_loop, loop->num);
1537
  struct loop *sloop;
1538
  bitmap_iterator bi;
1539
  unsigned i;
1540
  mem_ref_p ref;
1541
 
1542
  EXECUTE_IF_SET_IN_BITMAP (refs, 0, i, bi)
1543
    {
1544
      ref = VEC_index (mem_ref_p, memory_accesses.refs_list, i);
1545
      for (sloop = loop; sloop != current_loops->tree_root; sloop = loop_outer (sloop))
1546
        add_vop_ref_mapping (sloop, ref);
1547
    }
1548
}
1549
 
1550
/* For each non-clobbered virtual operand and each loop, record the memory
1551
   references in this loop that touch the operand.  */
1552
 
1553
static void
1554
create_vop_ref_mapping (void)
1555
{
1556
  loop_iterator li;
1557
  struct loop *loop;
1558
 
1559
  FOR_EACH_LOOP (li, loop, 0)
1560
    {
1561
      create_vop_ref_mapping_loop (loop);
1562
    }
1563
}
1564
 
1565
/* Gathers information about memory accesses in the loops.  */
1566
 
1567
static void
1568
analyze_memory_references (void)
1569
{
1570
  unsigned i;
1571
  bitmap empty;
1572
  htab_t hempty;
1573
 
1574
  memory_accesses.refs
1575
          = htab_create (100, memref_hash, memref_eq, memref_free);
1576
  memory_accesses.refs_list = NULL;
1577
  memory_accesses.refs_in_loop = VEC_alloc (bitmap, heap,
1578
                                            number_of_loops ());
1579
  memory_accesses.all_refs_in_loop = VEC_alloc (bitmap, heap,
1580
                                                number_of_loops ());
1581
  memory_accesses.clobbered_vops = VEC_alloc (bitmap, heap,
1582
                                              number_of_loops ());
1583
  memory_accesses.vop_ref_map = VEC_alloc (htab_t, heap,
1584
                                           number_of_loops ());
1585
 
1586
  for (i = 0; i < number_of_loops (); i++)
1587
    {
1588
      empty = BITMAP_ALLOC (NULL);
1589
      VEC_quick_push (bitmap, memory_accesses.refs_in_loop, empty);
1590
      empty = BITMAP_ALLOC (NULL);
1591
      VEC_quick_push (bitmap, memory_accesses.all_refs_in_loop, empty);
1592
      empty = BITMAP_ALLOC (NULL);
1593
      VEC_quick_push (bitmap, memory_accesses.clobbered_vops, empty);
1594
      hempty = htab_create (10, vtoe_hash, vtoe_eq, vtoe_free);
1595
      VEC_quick_push (htab_t, memory_accesses.vop_ref_map, hempty);
1596
    }
1597
 
1598
  memory_accesses.ttae_cache = NULL;
1599
 
1600
  gather_mem_refs_in_loops ();
1601
  create_vop_ref_mapping ();
1602
}
1603
 
1604
/* Returns true if a region of size SIZE1 at position 0 and a region of
1605
   size SIZE2 at position DIFF cannot overlap.  */
1606
 
1607
static bool
1608
cannot_overlap_p (aff_tree *diff, double_int size1, double_int size2)
1609
{
1610
  double_int d, bound;
1611
 
1612
  /* Unless the difference is a constant, we fail.  */
1613
  if (diff->n != 0)
1614
    return false;
1615
 
1616
  d = diff->offset;
1617
  if (double_int_negative_p (d))
1618
    {
1619
      /* The second object is before the first one, we succeed if the last
1620
         element of the second object is before the start of the first one.  */
1621
      bound = double_int_add (d, double_int_add (size2, double_int_minus_one));
1622
      return double_int_negative_p (bound);
1623
    }
1624
  else
1625
    {
1626
      /* We succeed if the second object starts after the first one ends.  */
1627
      return double_int_scmp (size1, d) <= 0;
1628
    }
1629
}
1630
 
1631
/* Returns true if MEM1 and MEM2 may alias.  TTAE_CACHE is used as a cache in
1632
   tree_to_aff_combination_expand.  */
1633
 
1634
static bool
1635
mem_refs_may_alias_p (tree mem1, tree mem2, struct pointer_map_t **ttae_cache)
1636
{
1637
  /* Perform BASE + OFFSET analysis -- if MEM1 and MEM2 are based on the same
1638
     object and their offset differ in such a way that the locations cannot
1639
     overlap, then they cannot alias.  */
1640
  double_int size1, size2;
1641
  aff_tree off1, off2;
1642
 
1643
  /* Perform basic offset and type-based disambiguation.  */
1644
  if (!refs_may_alias_p (mem1, mem2))
1645
    return false;
1646
 
1647
  /* The expansion of addresses may be a bit expensive, thus we only do
1648
     the check at -O2 and higher optimization levels.  */
1649
  if (optimize < 2)
1650
    return true;
1651
 
1652
  get_inner_reference_aff (mem1, &off1, &size1);
1653
  get_inner_reference_aff (mem2, &off2, &size2);
1654
  aff_combination_expand (&off1, ttae_cache);
1655
  aff_combination_expand (&off2, ttae_cache);
1656
  aff_combination_scale (&off1, double_int_minus_one);
1657
  aff_combination_add (&off2, &off1);
1658
 
1659
  if (cannot_overlap_p (&off2, size1, size2))
1660
    return false;
1661
 
1662
  return true;
1663
}
1664
 
1665
/* Rewrites location LOC by TMP_VAR.  */
1666
 
1667
static void
1668
rewrite_mem_ref_loc (mem_ref_loc_p loc, tree tmp_var)
1669
{
1670
  mark_virtual_ops_for_renaming (loc->stmt);
1671
  *loc->ref = tmp_var;
1672
  update_stmt (loc->stmt);
1673
}
1674
 
1675
/* Adds all locations of REF in LOOP and its subloops to LOCS.  */
1676
 
1677
static void
1678
get_all_locs_in_loop (struct loop *loop, mem_ref_p ref,
1679
                      VEC (mem_ref_loc_p, heap) **locs)
1680
{
1681
  mem_ref_locs_p accs;
1682
  unsigned i;
1683
  mem_ref_loc_p loc;
1684
  bitmap refs = VEC_index (bitmap, memory_accesses.all_refs_in_loop,
1685
                           loop->num);
1686
  struct loop *subloop;
1687
 
1688
  if (!bitmap_bit_p (refs, ref->id))
1689
    return;
1690
 
1691
  if (VEC_length (mem_ref_locs_p, ref->accesses_in_loop)
1692
      > (unsigned) loop->num)
1693
    {
1694
      accs = VEC_index (mem_ref_locs_p, ref->accesses_in_loop, loop->num);
1695
      if (accs)
1696
        {
1697
          for (i = 0; VEC_iterate (mem_ref_loc_p, accs->locs, i, loc); i++)
1698
            VEC_safe_push (mem_ref_loc_p, heap, *locs, loc);
1699
        }
1700
    }
1701
 
1702
  for (subloop = loop->inner; subloop != NULL; subloop = subloop->next)
1703
    get_all_locs_in_loop (subloop, ref, locs);
1704
}
1705
 
1706
/* Rewrites all references to REF in LOOP by variable TMP_VAR.  */
1707
 
1708
static void
1709
rewrite_mem_refs (struct loop *loop, mem_ref_p ref, tree tmp_var)
1710
{
1711
  unsigned i;
1712
  mem_ref_loc_p loc;
1713
  VEC (mem_ref_loc_p, heap) *locs = NULL;
1714
 
1715
  get_all_locs_in_loop (loop, ref, &locs);
1716
  for (i = 0; VEC_iterate (mem_ref_loc_p, locs, i, loc); i++)
1717
    rewrite_mem_ref_loc (loc, tmp_var);
1718
  VEC_free (mem_ref_loc_p, heap, locs);
1719
}
1720
 
1721
/* The name and the length of the currently generated variable
1722
   for lsm.  */
1723
#define MAX_LSM_NAME_LENGTH 40
1724
static char lsm_tmp_name[MAX_LSM_NAME_LENGTH + 1];
1725
static int lsm_tmp_name_length;
1726
 
1727
/* Adds S to lsm_tmp_name.  */
1728
 
1729
static void
1730
lsm_tmp_name_add (const char *s)
1731
{
1732
  int l = strlen (s) + lsm_tmp_name_length;
1733
  if (l > MAX_LSM_NAME_LENGTH)
1734
    return;
1735
 
1736
  strcpy (lsm_tmp_name + lsm_tmp_name_length, s);
1737
  lsm_tmp_name_length = l;
1738
}
1739
 
1740
/* Stores the name for temporary variable that replaces REF to
1741
   lsm_tmp_name.  */
1742
 
1743
static void
1744
gen_lsm_tmp_name (tree ref)
1745
{
1746
  const char *name;
1747
 
1748
  switch (TREE_CODE (ref))
1749
    {
1750
    case MISALIGNED_INDIRECT_REF:
1751
    case ALIGN_INDIRECT_REF:
1752
    case INDIRECT_REF:
1753
      gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1754
      lsm_tmp_name_add ("_");
1755
      break;
1756
 
1757
    case BIT_FIELD_REF:
1758
    case VIEW_CONVERT_EXPR:
1759
    case ARRAY_RANGE_REF:
1760
      gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1761
      break;
1762
 
1763
    case REALPART_EXPR:
1764
      gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1765
      lsm_tmp_name_add ("_RE");
1766
      break;
1767
 
1768
    case IMAGPART_EXPR:
1769
      gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1770
      lsm_tmp_name_add ("_IM");
1771
      break;
1772
 
1773
    case COMPONENT_REF:
1774
      gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1775
      lsm_tmp_name_add ("_");
1776
      name = get_name (TREE_OPERAND (ref, 1));
1777
      if (!name)
1778
        name = "F";
1779
      lsm_tmp_name_add (name);
1780
      break;
1781
 
1782
    case ARRAY_REF:
1783
      gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1784
      lsm_tmp_name_add ("_I");
1785
      break;
1786
 
1787
    case SSA_NAME:
1788
      ref = SSA_NAME_VAR (ref);
1789
      /* Fallthru.  */
1790
 
1791
    case VAR_DECL:
1792
    case PARM_DECL:
1793
      name = get_name (ref);
1794
      if (!name)
1795
        name = "D";
1796
      lsm_tmp_name_add (name);
1797
      break;
1798
 
1799
    case STRING_CST:
1800
      lsm_tmp_name_add ("S");
1801
      break;
1802
 
1803
    case RESULT_DECL:
1804
      lsm_tmp_name_add ("R");
1805
      break;
1806
 
1807
    case INTEGER_CST:
1808
      /* Nothing.  */
1809
      break;
1810
 
1811
    default:
1812
      gcc_unreachable ();
1813
    }
1814
}
1815
 
1816
/* Determines name for temporary variable that replaces REF.
1817
   The name is accumulated into the lsm_tmp_name variable.
1818
   N is added to the name of the temporary.  */
1819
 
1820
char *
1821
get_lsm_tmp_name (tree ref, unsigned n)
1822
{
1823
  char ns[2];
1824
 
1825
  lsm_tmp_name_length = 0;
1826
  gen_lsm_tmp_name (ref);
1827
  lsm_tmp_name_add ("_lsm");
1828
  if (n < 10)
1829
    {
1830
      ns[0] = '0' + n;
1831
      ns[1] = 0;
1832
      lsm_tmp_name_add (ns);
1833
    }
1834
  return lsm_tmp_name;
1835
}
1836
 
1837
/* Executes store motion of memory reference REF from LOOP.
1838
   Exits from the LOOP are stored in EXITS.  The initialization of the
1839
   temporary variable is put to the preheader of the loop, and assignments
1840
   to the reference from the temporary variable are emitted to exits.  */
1841
 
1842
static void
1843
execute_sm (struct loop *loop, VEC (edge, heap) *exits, mem_ref_p ref)
1844
{
1845
  tree tmp_var;
1846
  unsigned i;
1847
  gimple load, store;
1848
  struct fmt_data fmt_data;
1849
  edge ex;
1850
  struct lim_aux_data *lim_data;
1851
 
1852
  if (dump_file && (dump_flags & TDF_DETAILS))
1853
    {
1854
      fprintf (dump_file, "Executing store motion of ");
1855
      print_generic_expr (dump_file, ref->mem, 0);
1856
      fprintf (dump_file, " from loop %d\n", loop->num);
1857
    }
1858
 
1859
  tmp_var = make_rename_temp (TREE_TYPE (ref->mem),
1860
                              get_lsm_tmp_name (ref->mem, ~0));
1861
 
1862
  fmt_data.loop = loop;
1863
  fmt_data.orig_loop = loop;
1864
  for_each_index (&ref->mem, force_move_till, &fmt_data);
1865
 
1866
  rewrite_mem_refs (loop, ref, tmp_var);
1867
 
1868
  /* Emit the load & stores.  */
1869
  load = gimple_build_assign (tmp_var, unshare_expr (ref->mem));
1870
  lim_data = init_lim_data (load);
1871
  lim_data->max_loop = loop;
1872
  lim_data->tgt_loop = loop;
1873
 
1874
  /* Put this into the latch, so that we are sure it will be processed after
1875
     all dependencies.  */
1876
  gsi_insert_on_edge (loop_latch_edge (loop), load);
1877
 
1878
  for (i = 0; VEC_iterate (edge, exits, i, ex); i++)
1879
    {
1880
      store = gimple_build_assign (unshare_expr (ref->mem), tmp_var);
1881
      gsi_insert_on_edge (ex, store);
1882
    }
1883
}
1884
 
1885
/* Hoists memory references MEM_REFS out of LOOP.  EXITS is the list of exit
1886
   edges of the LOOP.  */
1887
 
1888
static void
1889
hoist_memory_references (struct loop *loop, bitmap mem_refs,
1890
                         VEC (edge, heap) *exits)
1891
{
1892
  mem_ref_p ref;
1893
  unsigned  i;
1894
  bitmap_iterator bi;
1895
 
1896
  EXECUTE_IF_SET_IN_BITMAP (mem_refs, 0, i, bi)
1897
    {
1898
      ref = VEC_index (mem_ref_p, memory_accesses.refs_list, i);
1899
      execute_sm (loop, exits, ref);
1900
    }
1901
}
1902
 
1903
/* Returns true if REF is always accessed in LOOP.  If STORED_P is true
1904
   make sure REF is always stored to in LOOP.  */
1905
 
1906
static bool
1907
ref_always_accessed_p (struct loop *loop, mem_ref_p ref, bool stored_p)
1908
{
1909
  VEC (mem_ref_loc_p, heap) *locs = NULL;
1910
  unsigned i;
1911
  mem_ref_loc_p loc;
1912
  bool ret = false;
1913
  struct loop *must_exec;
1914
  tree base;
1915
 
1916
  base = get_base_address (ref->mem);
1917
  if (INDIRECT_REF_P (base))
1918
    base = TREE_OPERAND (base, 0);
1919
 
1920
  get_all_locs_in_loop (loop, ref, &locs);
1921
  for (i = 0; VEC_iterate (mem_ref_loc_p, locs, i, loc); i++)
1922
    {
1923
      if (!get_lim_data (loc->stmt))
1924
        continue;
1925
 
1926
      /* If we require an always executed store make sure the statement
1927
         stores to the reference.  */
1928
      if (stored_p)
1929
        {
1930
          tree lhs;
1931
          if (!gimple_get_lhs (loc->stmt))
1932
            continue;
1933
          lhs = get_base_address (gimple_get_lhs (loc->stmt));
1934
          if (!lhs)
1935
            continue;
1936
          if (INDIRECT_REF_P (lhs))
1937
            lhs = TREE_OPERAND (lhs, 0);
1938
          if (lhs != base)
1939
            continue;
1940
        }
1941
 
1942
      must_exec = get_lim_data (loc->stmt)->always_executed_in;
1943
      if (!must_exec)
1944
        continue;
1945
 
1946
      if (must_exec == loop
1947
          || flow_loop_nested_p (must_exec, loop))
1948
        {
1949
          ret = true;
1950
          break;
1951
        }
1952
    }
1953
  VEC_free (mem_ref_loc_p, heap, locs);
1954
 
1955
  return ret;
1956
}
1957
 
1958
/* Returns true if REF1 and REF2 are independent.  */
1959
 
1960
static bool
1961
refs_independent_p (mem_ref_p ref1, mem_ref_p ref2)
1962
{
1963
  if (ref1 == ref2
1964
      || bitmap_bit_p (ref1->indep_ref, ref2->id))
1965
    return true;
1966
  if (bitmap_bit_p (ref1->dep_ref, ref2->id))
1967
    return false;
1968
 
1969
  if (dump_file && (dump_flags & TDF_DETAILS))
1970
    fprintf (dump_file, "Querying dependency of refs %u and %u: ",
1971
             ref1->id, ref2->id);
1972
 
1973
  if (mem_refs_may_alias_p (ref1->mem, ref2->mem,
1974
                            &memory_accesses.ttae_cache))
1975
    {
1976
      bitmap_set_bit (ref1->dep_ref, ref2->id);
1977
      bitmap_set_bit (ref2->dep_ref, ref1->id);
1978
      if (dump_file && (dump_flags & TDF_DETAILS))
1979
        fprintf (dump_file, "dependent.\n");
1980
      return false;
1981
    }
1982
  else
1983
    {
1984
      bitmap_set_bit (ref1->indep_ref, ref2->id);
1985
      bitmap_set_bit (ref2->indep_ref, ref1->id);
1986
      if (dump_file && (dump_flags & TDF_DETAILS))
1987
        fprintf (dump_file, "independent.\n");
1988
      return true;
1989
    }
1990
}
1991
 
1992
/* Records the information whether REF is independent in LOOP (according
1993
   to INDEP).  */
1994
 
1995
static void
1996
record_indep_loop (struct loop *loop, mem_ref_p ref, bool indep)
1997
{
1998
  if (indep)
1999
    bitmap_set_bit (ref->indep_loop, loop->num);
2000
  else
2001
    bitmap_set_bit (ref->dep_loop, loop->num);
2002
}
2003
 
2004
/* Returns true if REF is independent on all other memory references in
2005
   LOOP.  */
2006
 
2007
static bool
2008
ref_indep_loop_p_1 (struct loop *loop, mem_ref_p ref)
2009
{
2010
  bitmap clobbers, refs_to_check, refs;
2011
  unsigned i;
2012
  bitmap_iterator bi;
2013
  bool ret = true, stored = bitmap_bit_p (ref->stored, loop->num);
2014
  htab_t map;
2015
  mem_ref_p aref;
2016
 
2017
  /* If the reference is clobbered, it is not independent.  */
2018
  clobbers = VEC_index (bitmap, memory_accesses.clobbered_vops, loop->num);
2019
  if (bitmap_intersect_p (ref->vops, clobbers))
2020
    return false;
2021
 
2022
  refs_to_check = BITMAP_ALLOC (NULL);
2023
 
2024
  map = VEC_index (htab_t, memory_accesses.vop_ref_map, loop->num);
2025
  EXECUTE_IF_AND_COMPL_IN_BITMAP (ref->vops, clobbers, 0, i, bi)
2026
    {
2027
      if (stored)
2028
        refs = get_vop_accesses (map, i);
2029
      else
2030
        refs = get_vop_stores (map, i);
2031
 
2032
      bitmap_ior_into (refs_to_check, refs);
2033
    }
2034
 
2035
  EXECUTE_IF_SET_IN_BITMAP (refs_to_check, 0, i, bi)
2036
    {
2037
      aref = VEC_index (mem_ref_p, memory_accesses.refs_list, i);
2038
      if (!refs_independent_p (ref, aref))
2039
        {
2040
          ret = false;
2041
          record_indep_loop (loop, aref, false);
2042
          break;
2043
        }
2044
    }
2045
 
2046
  BITMAP_FREE (refs_to_check);
2047
  return ret;
2048
}
2049
 
2050
/* Returns true if REF is independent on all other memory references in
2051
   LOOP.  Wrapper over ref_indep_loop_p_1, caching its results.  */
2052
 
2053
static bool
2054
ref_indep_loop_p (struct loop *loop, mem_ref_p ref)
2055
{
2056
  bool ret;
2057
 
2058
  if (bitmap_bit_p (ref->indep_loop, loop->num))
2059
    return true;
2060
  if (bitmap_bit_p (ref->dep_loop, loop->num))
2061
    return false;
2062
 
2063
  ret = ref_indep_loop_p_1 (loop, ref);
2064
 
2065
  if (dump_file && (dump_flags & TDF_DETAILS))
2066
    fprintf (dump_file, "Querying dependencies of ref %u in loop %d: %s\n",
2067
             ref->id, loop->num, ret ? "independent" : "dependent");
2068
 
2069
  record_indep_loop (loop, ref, ret);
2070
 
2071
  return ret;
2072
}
2073
 
2074
/* Returns true if we can perform store motion of REF from LOOP.  */
2075
 
2076
static bool
2077
can_sm_ref_p (struct loop *loop, mem_ref_p ref)
2078
{
2079
  tree base;
2080
 
2081
  /* Unless the reference is stored in the loop, there is nothing to do.  */
2082
  if (!bitmap_bit_p (ref->stored, loop->num))
2083
    return false;
2084
 
2085
  /* It should be movable.  */
2086
  if (!is_gimple_reg_type (TREE_TYPE (ref->mem))
2087
      || TREE_THIS_VOLATILE (ref->mem)
2088
      || !for_each_index (&ref->mem, may_move_till, loop))
2089
    return false;
2090
 
2091
  /* If it can trap, it must be always executed in LOOP.
2092
     Readonly memory locations may trap when storing to them, but
2093
     tree_could_trap_p is a predicate for rvalues, so check that
2094
     explicitly.  */
2095
  base = get_base_address (ref->mem);
2096
  if ((tree_could_trap_p (ref->mem)
2097
       || (DECL_P (base) && TREE_READONLY (base)))
2098
      && !ref_always_accessed_p (loop, ref, true))
2099
    return false;
2100
 
2101
  /* And it must be independent on all other memory references
2102
     in LOOP.  */
2103
  if (!ref_indep_loop_p (loop, ref))
2104
    return false;
2105
 
2106
  return true;
2107
}
2108
 
2109
/* Marks the references in LOOP for that store motion should be performed
2110
   in REFS_TO_SM.  SM_EXECUTED is the set of references for that store
2111
   motion was performed in one of the outer loops.  */
2112
 
2113
static void
2114
find_refs_for_sm (struct loop *loop, bitmap sm_executed, bitmap refs_to_sm)
2115
{
2116
  bitmap refs = VEC_index (bitmap, memory_accesses.all_refs_in_loop,
2117
                           loop->num);
2118
  unsigned i;
2119
  bitmap_iterator bi;
2120
  mem_ref_p ref;
2121
 
2122
  EXECUTE_IF_AND_COMPL_IN_BITMAP (refs, sm_executed, 0, i, bi)
2123
    {
2124
      ref = VEC_index (mem_ref_p, memory_accesses.refs_list, i);
2125
      if (can_sm_ref_p (loop, ref))
2126
        bitmap_set_bit (refs_to_sm, i);
2127
    }
2128
}
2129
 
2130
/* Checks whether LOOP (with exits stored in EXITS array) is suitable
2131
   for a store motion optimization (i.e. whether we can insert statement
2132
   on its exits).  */
2133
 
2134
static bool
2135
loop_suitable_for_sm (struct loop *loop ATTRIBUTE_UNUSED,
2136
                      VEC (edge, heap) *exits)
2137
{
2138
  unsigned i;
2139
  edge ex;
2140
 
2141
  for (i = 0; VEC_iterate (edge, exits, i, ex); i++)
2142
    if (ex->flags & EDGE_ABNORMAL)
2143
      return false;
2144
 
2145
  return true;
2146
}
2147
 
2148
/* Try to perform store motion for all memory references modified inside
2149
   LOOP.  SM_EXECUTED is the bitmap of the memory references for that
2150
   store motion was executed in one of the outer loops.  */
2151
 
2152
static void
2153
store_motion_loop (struct loop *loop, bitmap sm_executed)
2154
{
2155
  VEC (edge, heap) *exits = get_loop_exit_edges (loop);
2156
  struct loop *subloop;
2157
  bitmap sm_in_loop = BITMAP_ALLOC (NULL);
2158
 
2159
  if (loop_suitable_for_sm (loop, exits))
2160
    {
2161
      find_refs_for_sm (loop, sm_executed, sm_in_loop);
2162
      hoist_memory_references (loop, sm_in_loop, exits);
2163
    }
2164
  VEC_free (edge, heap, exits);
2165
 
2166
  bitmap_ior_into (sm_executed, sm_in_loop);
2167
  for (subloop = loop->inner; subloop != NULL; subloop = subloop->next)
2168
    store_motion_loop (subloop, sm_executed);
2169
  bitmap_and_compl_into (sm_executed, sm_in_loop);
2170
  BITMAP_FREE (sm_in_loop);
2171
}
2172
 
2173
/* Try to perform store motion for all memory references modified inside
2174
   loops.  */
2175
 
2176
static void
2177
store_motion (void)
2178
{
2179
  struct loop *loop;
2180
  bitmap sm_executed = BITMAP_ALLOC (NULL);
2181
 
2182
  for (loop = current_loops->tree_root->inner; loop != NULL; loop = loop->next)
2183
    store_motion_loop (loop, sm_executed);
2184
 
2185
  BITMAP_FREE (sm_executed);
2186
  gsi_commit_edge_inserts ();
2187
}
2188
 
2189
/* Fills ALWAYS_EXECUTED_IN information for basic blocks of LOOP, i.e.
2190
   for each such basic block bb records the outermost loop for that execution
2191
   of its header implies execution of bb.  CONTAINS_CALL is the bitmap of
2192
   blocks that contain a nonpure call.  */
2193
 
2194
static void
2195
fill_always_executed_in (struct loop *loop, sbitmap contains_call)
2196
{
2197
  basic_block bb = NULL, *bbs, last = NULL;
2198
  unsigned i;
2199
  edge e;
2200
  struct loop *inn_loop = loop;
2201
 
2202
  if (!loop->header->aux)
2203
    {
2204
      bbs = get_loop_body_in_dom_order (loop);
2205
 
2206
      for (i = 0; i < loop->num_nodes; i++)
2207
        {
2208
          edge_iterator ei;
2209
          bb = bbs[i];
2210
 
2211
          if (dominated_by_p (CDI_DOMINATORS, loop->latch, bb))
2212
            last = bb;
2213
 
2214
          if (TEST_BIT (contains_call, bb->index))
2215
            break;
2216
 
2217
          FOR_EACH_EDGE (e, ei, bb->succs)
2218
            if (!flow_bb_inside_loop_p (loop, e->dest))
2219
              break;
2220
          if (e)
2221
            break;
2222
 
2223
          /* A loop might be infinite (TODO use simple loop analysis
2224
             to disprove this if possible).  */
2225
          if (bb->flags & BB_IRREDUCIBLE_LOOP)
2226
            break;
2227
 
2228
          if (!flow_bb_inside_loop_p (inn_loop, bb))
2229
            break;
2230
 
2231
          if (bb->loop_father->header == bb)
2232
            {
2233
              if (!dominated_by_p (CDI_DOMINATORS, loop->latch, bb))
2234
                break;
2235
 
2236
              /* In a loop that is always entered we may proceed anyway.
2237
                 But record that we entered it and stop once we leave it.  */
2238
              inn_loop = bb->loop_father;
2239
            }
2240
        }
2241
 
2242
      while (1)
2243
        {
2244
          last->aux = loop;
2245
          if (last == loop->header)
2246
            break;
2247
          last = get_immediate_dominator (CDI_DOMINATORS, last);
2248
        }
2249
 
2250
      free (bbs);
2251
    }
2252
 
2253
  for (loop = loop->inner; loop; loop = loop->next)
2254
    fill_always_executed_in (loop, contains_call);
2255
}
2256
 
2257
/* Compute the global information needed by the loop invariant motion pass.  */
2258
 
2259
static void
2260
tree_ssa_lim_initialize (void)
2261
{
2262
  sbitmap contains_call = sbitmap_alloc (last_basic_block);
2263
  gimple_stmt_iterator bsi;
2264
  struct loop *loop;
2265
  basic_block bb;
2266
 
2267
  sbitmap_zero (contains_call);
2268
  FOR_EACH_BB (bb)
2269
    {
2270
      for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
2271
        {
2272
          if (nonpure_call_p (gsi_stmt (bsi)))
2273
            break;
2274
        }
2275
 
2276
      if (!gsi_end_p (bsi))
2277
        SET_BIT (contains_call, bb->index);
2278
    }
2279
 
2280
  for (loop = current_loops->tree_root->inner; loop; loop = loop->next)
2281
    fill_always_executed_in (loop, contains_call);
2282
 
2283
  sbitmap_free (contains_call);
2284
 
2285
  lim_aux_data_map = pointer_map_create ();
2286
}
2287
 
2288
/* Cleans up after the invariant motion pass.  */
2289
 
2290
static void
2291
tree_ssa_lim_finalize (void)
2292
{
2293
  basic_block bb;
2294
  unsigned i;
2295
  bitmap b;
2296
  htab_t h;
2297
 
2298
  FOR_EACH_BB (bb)
2299
    {
2300
      bb->aux = NULL;
2301
    }
2302
 
2303
  pointer_map_destroy (lim_aux_data_map);
2304
 
2305
  VEC_free (mem_ref_p, heap, memory_accesses.refs_list);
2306
  htab_delete (memory_accesses.refs);
2307
 
2308
  for (i = 0; VEC_iterate (bitmap, memory_accesses.refs_in_loop, i, b); i++)
2309
    BITMAP_FREE (b);
2310
  VEC_free (bitmap, heap, memory_accesses.refs_in_loop);
2311
 
2312
  for (i = 0; VEC_iterate (bitmap, memory_accesses.all_refs_in_loop, i, b); i++)
2313
    BITMAP_FREE (b);
2314
  VEC_free (bitmap, heap, memory_accesses.all_refs_in_loop);
2315
 
2316
  for (i = 0; VEC_iterate (bitmap, memory_accesses.clobbered_vops, i, b); i++)
2317
    BITMAP_FREE (b);
2318
  VEC_free (bitmap, heap, memory_accesses.clobbered_vops);
2319
 
2320
  for (i = 0; VEC_iterate (htab_t, memory_accesses.vop_ref_map, i, h); i++)
2321
    htab_delete (h);
2322
  VEC_free (htab_t, heap, memory_accesses.vop_ref_map);
2323
 
2324
  if (memory_accesses.ttae_cache)
2325
    pointer_map_destroy (memory_accesses.ttae_cache);
2326
}
2327
 
2328
/* Moves invariants from loops.  Only "expensive" invariants are moved out --
2329
   i.e. those that are likely to be win regardless of the register pressure.  */
2330
 
2331
void
2332
tree_ssa_lim (void)
2333
{
2334
  tree_ssa_lim_initialize ();
2335
 
2336
  /* Gathers information about memory accesses in the loops.  */
2337
  analyze_memory_references ();
2338
 
2339
  /* For each statement determine the outermost loop in that it is
2340
     invariant and cost for computing the invariant.  */
2341
  determine_invariantness ();
2342
 
2343
  /* Execute store motion.  Force the necessary invariants to be moved
2344
     out of the loops as well.  */
2345
  store_motion ();
2346
 
2347
  /* Move the expressions that are expensive enough.  */
2348
  move_computations ();
2349
 
2350
  tree_ssa_lim_finalize ();
2351
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.