OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-old/] [gcc-4.2.2/] [gcc/] [tree-ssa-loop-im.c] - Blame information for rev 867

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 38 julius
/* Loop invariant motion.
2
   Copyright (C) 2003, 2004, 2005, 2007 Free Software Foundation, Inc.
3
 
4
This file is part of GCC.
5
 
6
GCC is free software; you can redistribute it and/or modify it
7
under the terms of the GNU General Public License as published by the
8
Free Software Foundation; either version 3, or (at your option) any
9
later version.
10
 
11
GCC is distributed in the hope that it will be useful, but WITHOUT
12
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14
for more details.
15
 
16
You should have received a copy of the GNU General Public License
17
along with GCC; see the file COPYING3.  If not see
18
<http://www.gnu.org/licenses/>.  */
19
 
20
#include "config.h"
21
#include "system.h"
22
#include "coretypes.h"
23
#include "tm.h"
24
#include "tree.h"
25
#include "rtl.h"
26
#include "tm_p.h"
27
#include "hard-reg-set.h"
28
#include "basic-block.h"
29
#include "output.h"
30
#include "diagnostic.h"
31
#include "tree-flow.h"
32
#include "tree-dump.h"
33
#include "timevar.h"
34
#include "cfgloop.h"
35
#include "domwalk.h"
36
#include "params.h"
37
#include "tree-pass.h"
38
#include "flags.h"
39
#include "real.h"
40
#include "hashtab.h"
41
 
42
/* TODO:  Support for predicated code motion.  I.e.
43
 
44
   while (1)
45
     {
46
       if (cond)
47
         {
48
           a = inv;
49
           something;
50
         }
51
     }
52
 
53
   Where COND and INV are is invariants, but evaluating INV may trap or be
54
   invalid from some other reason if !COND.  This may be transformed to
55
 
56
   if (cond)
57
     a = inv;
58
   while (1)
59
     {
60
       if (cond)
61
         something;
62
     }  */
63
 
64
/* A type for the list of statements that have to be moved in order to be able
65
   to hoist an invariant computation.  */
66
 
67
struct depend
68
{
69
  tree stmt;
70
  struct depend *next;
71
};
72
 
73
/* The auxiliary data kept for each statement.  */
74
 
75
struct lim_aux_data
76
{
77
  struct loop *max_loop;        /* The outermost loop in that the statement
78
                                   is invariant.  */
79
 
80
  struct loop *tgt_loop;        /* The loop out of that we want to move the
81
                                   invariant.  */
82
 
83
  struct loop *always_executed_in;
84
                                /* The outermost loop for that we are sure
85
                                   the statement is executed if the loop
86
                                   is entered.  */
87
 
88
  bool sm_done;                 /* True iff the store motion for a memory
89
                                   reference in the statement has already
90
                                   been executed.  */
91
 
92
  unsigned cost;                /* Cost of the computation performed by the
93
                                   statement.  */
94
 
95
  struct depend *depends;       /* List of statements that must be also hoisted
96
                                   out of the loop when this statement is
97
                                   hoisted; i.e. those that define the operands
98
                                   of the statement and are inside of the
99
                                   MAX_LOOP loop.  */
100
};
101
 
102
#define LIM_DATA(STMT) (TREE_CODE (STMT) == PHI_NODE \
103
                        ? NULL \
104
                        : (struct lim_aux_data *) (stmt_ann (STMT)->common.aux))
105
 
106
/* Description of a memory reference location for store motion.  */
107
 
108
struct mem_ref_loc
109
{
110
  tree *ref;                    /* The reference itself.  */
111
  tree stmt;                    /* The statement in that it occurs.  */
112
  struct mem_ref_loc *next;     /* Next use in the chain.  */
113
};
114
 
115
/* Description of a memory reference for store motion.  */
116
 
117
struct mem_ref
118
{
119
  tree mem;                     /* The memory itself.  */
120
  hashval_t hash;               /* Its hash value.  */
121
  bool is_stored;               /* True if there is a store to the location
122
                                   in the loop.  */
123
  struct mem_ref_loc *locs;     /* The locations where it is found.  */
124
  bitmap vops;                  /* Vops corresponding to this memory
125
                                   location.  */
126
  struct mem_ref *next;         /* Next memory reference in the list.
127
                                   Memory references are stored in a hash
128
                                   table, but the hash function depends
129
                                   on values of pointers. Thus we cannot use
130
                                   htab_traverse, since then we would get
131
                                   miscompares during bootstrap (although the
132
                                   produced code would be correct).  */
133
};
134
 
135
/* Minimum cost of an expensive expression.  */
136
#define LIM_EXPENSIVE ((unsigned) PARAM_VALUE (PARAM_LIM_EXPENSIVE))
137
 
138
/* The outermost loop for that execution of the header guarantees that the
139
   block will be executed.  */
140
#define ALWAYS_EXECUTED_IN(BB) ((struct loop *) (BB)->aux)
141
 
142
/* Calls CBCK for each index in memory reference ADDR_P.  There are two
143
   kinds situations handled; in each of these cases, the memory reference
144
   and DATA are passed to the callback:
145
 
146
   Access to an array: ARRAY_{RANGE_}REF (base, index).  In this case we also
147
   pass the pointer to the index to the callback.
148
 
149
   Pointer dereference: INDIRECT_REF (addr).  In this case we also pass the
150
   pointer to addr to the callback.
151
 
152
   If the callback returns false, the whole search stops and false is returned.
153
   Otherwise the function returns true after traversing through the whole
154
   reference *ADDR_P.  */
155
 
156
bool
157
for_each_index (tree *addr_p, bool (*cbck) (tree, tree *, void *), void *data)
158
{
159
  tree *nxt, *idx;
160
 
161
  for (; ; addr_p = nxt)
162
    {
163
      switch (TREE_CODE (*addr_p))
164
        {
165
        case SSA_NAME:
166
          return cbck (*addr_p, addr_p, data);
167
 
168
        case MISALIGNED_INDIRECT_REF:
169
        case ALIGN_INDIRECT_REF:
170
        case INDIRECT_REF:
171
          nxt = &TREE_OPERAND (*addr_p, 0);
172
          return cbck (*addr_p, nxt, data);
173
 
174
        case BIT_FIELD_REF:
175
        case VIEW_CONVERT_EXPR:
176
        case REALPART_EXPR:
177
        case IMAGPART_EXPR:
178
          nxt = &TREE_OPERAND (*addr_p, 0);
179
          break;
180
 
181
        case COMPONENT_REF:
182
          /* If the component has varying offset, it behaves like index
183
             as well.  */
184
          idx = &TREE_OPERAND (*addr_p, 2);
185
          if (*idx
186
              && !cbck (*addr_p, idx, data))
187
            return false;
188
 
189
          nxt = &TREE_OPERAND (*addr_p, 0);
190
          break;
191
 
192
        case ARRAY_REF:
193
        case ARRAY_RANGE_REF:
194
          nxt = &TREE_OPERAND (*addr_p, 0);
195
          if (!cbck (*addr_p, &TREE_OPERAND (*addr_p, 1), data))
196
            return false;
197
          break;
198
 
199
        case VAR_DECL:
200
        case PARM_DECL:
201
        case STRING_CST:
202
        case RESULT_DECL:
203
        case VECTOR_CST:
204
        case COMPLEX_CST:
205
        case INTEGER_CST:
206
        case REAL_CST:
207
          return true;
208
 
209
        case TARGET_MEM_REF:
210
          idx = &TMR_BASE (*addr_p);
211
          if (*idx
212
              && !cbck (*addr_p, idx, data))
213
            return false;
214
          idx = &TMR_INDEX (*addr_p);
215
          if (*idx
216
              && !cbck (*addr_p, idx, data))
217
            return false;
218
          return true;
219
 
220
        default:
221
          gcc_unreachable ();
222
        }
223
    }
224
}
225
 
226
/* If it is possible to hoist the statement STMT unconditionally,
227
   returns MOVE_POSSIBLE.
228
   If it is possible to hoist the statement STMT, but we must avoid making
229
   it executed if it would not be executed in the original program (e.g.
230
   because it may trap), return MOVE_PRESERVE_EXECUTION.
231
   Otherwise return MOVE_IMPOSSIBLE.  */
232
 
233
enum move_pos
234
movement_possibility (tree stmt)
235
{
236
  tree lhs, rhs;
237
 
238
  if (flag_unswitch_loops
239
      && TREE_CODE (stmt) == COND_EXPR)
240
    {
241
      /* If we perform unswitching, force the operands of the invariant
242
         condition to be moved out of the loop.  */
243
      return MOVE_POSSIBLE;
244
    }
245
 
246
  if (TREE_CODE (stmt) != MODIFY_EXPR)
247
    return MOVE_IMPOSSIBLE;
248
 
249
  if (stmt_ends_bb_p (stmt))
250
    return MOVE_IMPOSSIBLE;
251
 
252
  if (stmt_ann (stmt)->has_volatile_ops)
253
    return MOVE_IMPOSSIBLE;
254
 
255
  lhs = TREE_OPERAND (stmt, 0);
256
  if (TREE_CODE (lhs) == SSA_NAME
257
      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
258
    return MOVE_IMPOSSIBLE;
259
 
260
  rhs = TREE_OPERAND (stmt, 1);
261
 
262
  if (TREE_SIDE_EFFECTS (rhs))
263
    return MOVE_IMPOSSIBLE;
264
 
265
  if (TREE_CODE (lhs) != SSA_NAME
266
      || tree_could_trap_p (rhs))
267
    return MOVE_PRESERVE_EXECUTION;
268
 
269
  if (get_call_expr_in (stmt))
270
    {
271
      /* While pure or const call is guaranteed to have no side effects, we
272
         cannot move it arbitrarily.  Consider code like
273
 
274
         char *s = something ();
275
 
276
         while (1)
277
           {
278
             if (s)
279
               t = strlen (s);
280
             else
281
               t = 0;
282
           }
283
 
284
         Here the strlen call cannot be moved out of the loop, even though
285
         s is invariant.  In addition to possibly creating a call with
286
         invalid arguments, moving out a function call that is not executed
287
         may cause performance regressions in case the call is costly and
288
         not executed at all.  */
289
      return MOVE_PRESERVE_EXECUTION;
290
    }
291
  return MOVE_POSSIBLE;
292
}
293
 
294
/* Suppose that operand DEF is used inside the LOOP.  Returns the outermost
295
   loop to that we could move the expression using DEF if it did not have
296
   other operands, i.e. the outermost loop enclosing LOOP in that the value
297
   of DEF is invariant.  */
298
 
299
static struct loop *
300
outermost_invariant_loop (tree def, struct loop *loop)
301
{
302
  tree def_stmt;
303
  basic_block def_bb;
304
  struct loop *max_loop;
305
 
306
  if (TREE_CODE (def) != SSA_NAME)
307
    return superloop_at_depth (loop, 1);
308
 
309
  def_stmt = SSA_NAME_DEF_STMT (def);
310
  def_bb = bb_for_stmt (def_stmt);
311
  if (!def_bb)
312
    return superloop_at_depth (loop, 1);
313
 
314
  max_loop = find_common_loop (loop, def_bb->loop_father);
315
 
316
  if (LIM_DATA (def_stmt) && LIM_DATA (def_stmt)->max_loop)
317
    max_loop = find_common_loop (max_loop,
318
                                 LIM_DATA (def_stmt)->max_loop->outer);
319
  if (max_loop == loop)
320
    return NULL;
321
  max_loop = superloop_at_depth (loop, max_loop->depth + 1);
322
 
323
  return max_loop;
324
}
325
 
326
/* Returns the outermost superloop of LOOP in that the expression EXPR is
327
   invariant.  */
328
 
329
static struct loop *
330
outermost_invariant_loop_expr (tree expr, struct loop *loop)
331
{
332
  enum tree_code_class class = TREE_CODE_CLASS (TREE_CODE (expr));
333
  unsigned i, nops;
334
  struct loop *max_loop = superloop_at_depth (loop, 1), *aloop;
335
 
336
  if (TREE_CODE (expr) == SSA_NAME
337
      || TREE_CODE (expr) == INTEGER_CST
338
      || is_gimple_min_invariant (expr))
339
    return outermost_invariant_loop (expr, loop);
340
 
341
  if (class != tcc_unary
342
      && class != tcc_binary
343
      && class != tcc_expression
344
      && class != tcc_comparison)
345
    return NULL;
346
 
347
  nops = TREE_CODE_LENGTH (TREE_CODE (expr));
348
  for (i = 0; i < nops; i++)
349
    {
350
      aloop = outermost_invariant_loop_expr (TREE_OPERAND (expr, i), loop);
351
      if (!aloop)
352
        return NULL;
353
 
354
      if (flow_loop_nested_p (max_loop, aloop))
355
        max_loop = aloop;
356
    }
357
 
358
  return max_loop;
359
}
360
 
361
/* DATA is a structure containing information associated with a statement
362
   inside LOOP.  DEF is one of the operands of this statement.
363
 
364
   Find the outermost loop enclosing LOOP in that value of DEF is invariant
365
   and record this in DATA->max_loop field.  If DEF itself is defined inside
366
   this loop as well (i.e. we need to hoist it out of the loop if we want
367
   to hoist the statement represented by DATA), record the statement in that
368
   DEF is defined to the DATA->depends list.  Additionally if ADD_COST is true,
369
   add the cost of the computation of DEF to the DATA->cost.
370
 
371
   If DEF is not invariant in LOOP, return false.  Otherwise return TRUE.  */
372
 
373
static bool
374
add_dependency (tree def, struct lim_aux_data *data, struct loop *loop,
375
                bool add_cost)
376
{
377
  tree def_stmt = SSA_NAME_DEF_STMT (def);
378
  basic_block def_bb = bb_for_stmt (def_stmt);
379
  struct loop *max_loop;
380
  struct depend *dep;
381
 
382
  if (!def_bb)
383
    return true;
384
 
385
  max_loop = outermost_invariant_loop (def, loop);
386
  if (!max_loop)
387
    return false;
388
 
389
  if (flow_loop_nested_p (data->max_loop, max_loop))
390
    data->max_loop = max_loop;
391
 
392
  if (!LIM_DATA (def_stmt))
393
    return true;
394
 
395
  if (add_cost
396
      /* Only add the cost if the statement defining DEF is inside LOOP,
397
         i.e. if it is likely that by moving the invariants dependent
398
         on it, we will be able to avoid creating a new register for
399
         it (since it will be only used in these dependent invariants).  */
400
      && def_bb->loop_father == loop)
401
    data->cost += LIM_DATA (def_stmt)->cost;
402
 
403
  dep = XNEW (struct depend);
404
  dep->stmt = def_stmt;
405
  dep->next = data->depends;
406
  data->depends = dep;
407
 
408
  return true;
409
}
410
 
411
/* Returns an estimate for a cost of statement STMT.  TODO -- the values here
412
   are just ad-hoc constants.  The estimates should be based on target-specific
413
   values.  */
414
 
415
static unsigned
416
stmt_cost (tree stmt)
417
{
418
  tree rhs;
419
  unsigned cost = 1;
420
 
421
  /* Always try to create possibilities for unswitching.  */
422
  if (TREE_CODE (stmt) == COND_EXPR)
423
    return LIM_EXPENSIVE;
424
 
425
  rhs = TREE_OPERAND (stmt, 1);
426
 
427
  /* Hoisting memory references out should almost surely be a win.  */
428
  if (stmt_references_memory_p (stmt))
429
    cost += 20;
430
 
431
  switch (TREE_CODE (rhs))
432
    {
433
    case CALL_EXPR:
434
      /* We should be hoisting calls if possible.  */
435
 
436
      /* Unless the call is a builtin_constant_p; this always folds to a
437
         constant, so moving it is useless.  */
438
      rhs = get_callee_fndecl (rhs);
439
      if (DECL_BUILT_IN_CLASS (rhs) == BUILT_IN_NORMAL
440
          && DECL_FUNCTION_CODE (rhs) == BUILT_IN_CONSTANT_P)
441
        return 0;
442
 
443
      cost += 20;
444
      break;
445
 
446
    case MULT_EXPR:
447
    case TRUNC_DIV_EXPR:
448
    case CEIL_DIV_EXPR:
449
    case FLOOR_DIV_EXPR:
450
    case ROUND_DIV_EXPR:
451
    case EXACT_DIV_EXPR:
452
    case CEIL_MOD_EXPR:
453
    case FLOOR_MOD_EXPR:
454
    case ROUND_MOD_EXPR:
455
    case TRUNC_MOD_EXPR:
456
    case RDIV_EXPR:
457
      /* Division and multiplication are usually expensive.  */
458
      cost += 20;
459
      break;
460
 
461
    default:
462
      break;
463
    }
464
 
465
  return cost;
466
}
467
 
468
/* Determine the outermost loop to that it is possible to hoist a statement
469
   STMT and store it to LIM_DATA (STMT)->max_loop.  To do this we determine
470
   the outermost loop in that the value computed by STMT is invariant.
471
   If MUST_PRESERVE_EXEC is true, additionally choose such a loop that
472
   we preserve the fact whether STMT is executed.  It also fills other related
473
   information to LIM_DATA (STMT).
474
 
475
   The function returns false if STMT cannot be hoisted outside of the loop it
476
   is defined in, and true otherwise.  */
477
 
478
static bool
479
determine_max_movement (tree stmt, bool must_preserve_exec)
480
{
481
  basic_block bb = bb_for_stmt (stmt);
482
  struct loop *loop = bb->loop_father;
483
  struct loop *level;
484
  struct lim_aux_data *lim_data = LIM_DATA (stmt);
485
  tree val;
486
  ssa_op_iter iter;
487
 
488
  if (must_preserve_exec)
489
    level = ALWAYS_EXECUTED_IN (bb);
490
  else
491
    level = superloop_at_depth (loop, 1);
492
  lim_data->max_loop = level;
493
 
494
  FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_USE)
495
    if (!add_dependency (val, lim_data, loop, true))
496
      return false;
497
 
498
  FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_VIRTUAL_USES | SSA_OP_VIRTUAL_KILLS)
499
    if (!add_dependency (val, lim_data, loop, false))
500
      return false;
501
 
502
  lim_data->cost += stmt_cost (stmt);
503
 
504
  return true;
505
}
506
 
507
/* Suppose that some statement in ORIG_LOOP is hoisted to the loop LEVEL,
508
   and that one of the operands of this statement is computed by STMT.
509
   Ensure that STMT (together with all the statements that define its
510
   operands) is hoisted at least out of the loop LEVEL.  */
511
 
512
static void
513
set_level (tree stmt, struct loop *orig_loop, struct loop *level)
514
{
515
  struct loop *stmt_loop = bb_for_stmt (stmt)->loop_father;
516
  struct depend *dep;
517
 
518
  stmt_loop = find_common_loop (orig_loop, stmt_loop);
519
  if (LIM_DATA (stmt) && LIM_DATA (stmt)->tgt_loop)
520
    stmt_loop = find_common_loop (stmt_loop,
521
                                  LIM_DATA (stmt)->tgt_loop->outer);
522
  if (flow_loop_nested_p (stmt_loop, level))
523
    return;
524
 
525
  gcc_assert (LIM_DATA (stmt));
526
  gcc_assert (level == LIM_DATA (stmt)->max_loop
527
              || flow_loop_nested_p (LIM_DATA (stmt)->max_loop, level));
528
 
529
  LIM_DATA (stmt)->tgt_loop = level;
530
  for (dep = LIM_DATA (stmt)->depends; dep; dep = dep->next)
531
    set_level (dep->stmt, orig_loop, level);
532
}
533
 
534
/* Determines an outermost loop from that we want to hoist the statement STMT.
535
   For now we chose the outermost possible loop.  TODO -- use profiling
536
   information to set it more sanely.  */
537
 
538
static void
539
set_profitable_level (tree stmt)
540
{
541
  set_level (stmt, bb_for_stmt (stmt)->loop_father, LIM_DATA (stmt)->max_loop);
542
}
543
 
544
/* Returns true if STMT is not a pure call.  */
545
 
546
static bool
547
nonpure_call_p (tree stmt)
548
{
549
  tree call = get_call_expr_in (stmt);
550
 
551
  if (!call)
552
    return false;
553
 
554
  return TREE_SIDE_EFFECTS (call) != 0;
555
}
556
 
557
/* Releases the memory occupied by DATA.  */
558
 
559
static void
560
free_lim_aux_data (struct lim_aux_data *data)
561
{
562
  struct depend *dep, *next;
563
 
564
  for (dep = data->depends; dep; dep = next)
565
    {
566
      next = dep->next;
567
      free (dep);
568
    }
569
  free (data);
570
}
571
 
572
/* Determine the outermost loops in that statements in basic block BB are
573
   invariant, and record them to the LIM_DATA associated with the statements.
574
   Callback for walk_dominator_tree.  */
575
 
576
static void
577
determine_invariantness_stmt (struct dom_walk_data *dw_data ATTRIBUTE_UNUSED,
578
                              basic_block bb)
579
{
580
  enum move_pos pos;
581
  block_stmt_iterator bsi;
582
  tree stmt, rhs;
583
  bool maybe_never = ALWAYS_EXECUTED_IN (bb) == NULL;
584
  struct loop *outermost = ALWAYS_EXECUTED_IN (bb);
585
 
586
  if (!bb->loop_father->outer)
587
    return;
588
 
589
  if (dump_file && (dump_flags & TDF_DETAILS))
590
    fprintf (dump_file, "Basic block %d (loop %d -- depth %d):\n\n",
591
             bb->index, bb->loop_father->num, bb->loop_father->depth);
592
 
593
  for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
594
    {
595
      stmt = bsi_stmt (bsi);
596
 
597
      pos = movement_possibility (stmt);
598
      if (pos == MOVE_IMPOSSIBLE)
599
        {
600
          if (nonpure_call_p (stmt))
601
            {
602
              maybe_never = true;
603
              outermost = NULL;
604
            }
605
          continue;
606
        }
607
 
608
      /* If divisor is invariant, convert a/b to a*(1/b), allowing reciprocal
609
         to be hoisted out of loop, saving expensive divide.  */
610
      if (pos == MOVE_POSSIBLE
611
          && (rhs = TREE_OPERAND (stmt, 1)) != NULL
612
          && TREE_CODE (rhs) == RDIV_EXPR
613
          && flag_unsafe_math_optimizations
614
          && !flag_trapping_math
615
          && outermost_invariant_loop_expr (TREE_OPERAND (rhs, 1),
616
                                            loop_containing_stmt (stmt)) != NULL
617
          && outermost_invariant_loop_expr (rhs,
618
                                            loop_containing_stmt (stmt)) == NULL)
619
        {
620
          tree lhs, stmt1, stmt2, var, name;
621
 
622
          lhs = TREE_OPERAND (stmt, 0);
623
 
624
          /* stmt must be MODIFY_EXPR.  */
625
          var = create_tmp_var (TREE_TYPE (rhs), "reciptmp");
626
          add_referenced_var (var);
627
 
628
          stmt1 = build2 (MODIFY_EXPR, void_type_node, var,
629
                          build2 (RDIV_EXPR, TREE_TYPE (rhs),
630
                                  build_real (TREE_TYPE (rhs), dconst1),
631
                                  TREE_OPERAND (rhs, 1)));
632
          name = make_ssa_name (var, stmt1);
633
          TREE_OPERAND (stmt1, 0) = name;
634
          stmt2 = build2 (MODIFY_EXPR, void_type_node, lhs,
635
                          build2 (MULT_EXPR, TREE_TYPE (rhs),
636
                                  name, TREE_OPERAND (rhs, 0)));
637
 
638
          /* Replace division stmt with reciprocal and multiply stmts.
639
             The multiply stmt is not invariant, so update iterator
640
             and avoid rescanning.  */
641
          bsi_replace (&bsi, stmt1, true);
642
          bsi_insert_after (&bsi, stmt2, BSI_NEW_STMT);
643
          SSA_NAME_DEF_STMT (lhs) = stmt2;
644
 
645
          /* Continue processing with invariant reciprocal statement.  */
646
          stmt = stmt1;
647
        }
648
 
649
      stmt_ann (stmt)->common.aux = xcalloc (1, sizeof (struct lim_aux_data));
650
      LIM_DATA (stmt)->always_executed_in = outermost;
651
 
652
      if (maybe_never && pos == MOVE_PRESERVE_EXECUTION)
653
        continue;
654
 
655
      if (!determine_max_movement (stmt, pos == MOVE_PRESERVE_EXECUTION))
656
        {
657
          LIM_DATA (stmt)->max_loop = NULL;
658
          continue;
659
        }
660
 
661
      if (dump_file && (dump_flags & TDF_DETAILS))
662
        {
663
          print_generic_stmt_indented (dump_file, stmt, 0, 2);
664
          fprintf (dump_file, "  invariant up to level %d, cost %d.\n\n",
665
                   LIM_DATA (stmt)->max_loop->depth,
666
                   LIM_DATA (stmt)->cost);
667
        }
668
 
669
      if (LIM_DATA (stmt)->cost >= LIM_EXPENSIVE)
670
        set_profitable_level (stmt);
671
    }
672
}
673
 
674
/* For each statement determines the outermost loop in that it is invariant,
675
   statements on whose motion it depends and the cost of the computation.
676
   This information is stored to the LIM_DATA structure associated with
677
   each statement.  */
678
 
679
static void
680
determine_invariantness (void)
681
{
682
  struct dom_walk_data walk_data;
683
 
684
  memset (&walk_data, 0, sizeof (struct dom_walk_data));
685
  walk_data.before_dom_children_before_stmts = determine_invariantness_stmt;
686
 
687
  init_walk_dominator_tree (&walk_data);
688
  walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
689
  fini_walk_dominator_tree (&walk_data);
690
}
691
 
692
/* Commits edge insertions and updates loop structures.  */
693
 
694
void
695
loop_commit_inserts (void)
696
{
697
  unsigned old_last_basic_block, i;
698
  basic_block bb;
699
 
700
  old_last_basic_block = last_basic_block;
701
  bsi_commit_edge_inserts ();
702
  for (i = old_last_basic_block; i < (unsigned) last_basic_block; i++)
703
    {
704
      bb = BASIC_BLOCK (i);
705
      add_bb_to_loop (bb,
706
                      find_common_loop (single_pred (bb)->loop_father,
707
                                        single_succ (bb)->loop_father));
708
    }
709
}
710
 
711
/* Hoist the statements in basic block BB out of the loops prescribed by
712
   data stored in LIM_DATA structures associated with each statement.  Callback
713
   for walk_dominator_tree.  */
714
 
715
static void
716
move_computations_stmt (struct dom_walk_data *dw_data ATTRIBUTE_UNUSED,
717
                        basic_block bb)
718
{
719
  struct loop *level;
720
  block_stmt_iterator bsi;
721
  tree stmt;
722
  unsigned cost = 0;
723
 
724
  if (!bb->loop_father->outer)
725
    return;
726
 
727
  for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
728
    {
729
      stmt = bsi_stmt (bsi);
730
 
731
      if (!LIM_DATA (stmt))
732
        {
733
          bsi_next (&bsi);
734
          continue;
735
        }
736
 
737
      cost = LIM_DATA (stmt)->cost;
738
      level = LIM_DATA (stmt)->tgt_loop;
739
      free_lim_aux_data (LIM_DATA (stmt));
740
      stmt_ann (stmt)->common.aux = NULL;
741
 
742
      if (!level)
743
        {
744
          bsi_next (&bsi);
745
          continue;
746
        }
747
 
748
      /* We do not really want to move conditionals out of the loop; we just
749
         placed it here to force its operands to be moved if necessary.  */
750
      if (TREE_CODE (stmt) == COND_EXPR)
751
        continue;
752
 
753
      if (dump_file && (dump_flags & TDF_DETAILS))
754
        {
755
          fprintf (dump_file, "Moving statement\n");
756
          print_generic_stmt (dump_file, stmt, 0);
757
          fprintf (dump_file, "(cost %u) out of loop %d.\n\n",
758
                   cost, level->num);
759
        }
760
      bsi_insert_on_edge (loop_preheader_edge (level), stmt);
761
      bsi_remove (&bsi, false);
762
    }
763
}
764
 
765
/* Hoist the statements out of the loops prescribed by data stored in
766
   LIM_DATA structures associated with each statement.*/
767
 
768
static void
769
move_computations (void)
770
{
771
  struct dom_walk_data walk_data;
772
 
773
  memset (&walk_data, 0, sizeof (struct dom_walk_data));
774
  walk_data.before_dom_children_before_stmts = move_computations_stmt;
775
 
776
  init_walk_dominator_tree (&walk_data);
777
  walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
778
  fini_walk_dominator_tree (&walk_data);
779
 
780
  loop_commit_inserts ();
781
  if (need_ssa_update_p ())
782
    rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
783
}
784
 
785
/* Checks whether the statement defining variable *INDEX can be hoisted
786
   out of the loop passed in DATA.  Callback for for_each_index.  */
787
 
788
static bool
789
may_move_till (tree ref, tree *index, void *data)
790
{
791
  struct loop *loop = data, *max_loop;
792
 
793
  /* If REF is an array reference, check also that the step and the lower
794
     bound is invariant in LOOP.  */
795
  if (TREE_CODE (ref) == ARRAY_REF)
796
    {
797
      tree step = array_ref_element_size (ref);
798
      tree lbound = array_ref_low_bound (ref);
799
 
800
      max_loop = outermost_invariant_loop_expr (step, loop);
801
      if (!max_loop)
802
        return false;
803
 
804
      max_loop = outermost_invariant_loop_expr (lbound, loop);
805
      if (!max_loop)
806
        return false;
807
    }
808
 
809
  max_loop = outermost_invariant_loop (*index, loop);
810
  if (!max_loop)
811
    return false;
812
 
813
  return true;
814
}
815
 
816
/* Forces statements defining (invariant) SSA names in expression EXPR to be
817
   moved out of the LOOP.  ORIG_LOOP is the loop in that EXPR is used.  */
818
 
819
static void
820
force_move_till_expr (tree expr, struct loop *orig_loop, struct loop *loop)
821
{
822
  enum tree_code_class class = TREE_CODE_CLASS (TREE_CODE (expr));
823
  unsigned i, nops;
824
 
825
  if (TREE_CODE (expr) == SSA_NAME)
826
    {
827
      tree stmt = SSA_NAME_DEF_STMT (expr);
828
      if (IS_EMPTY_STMT (stmt))
829
        return;
830
 
831
      set_level (stmt, orig_loop, loop);
832
      return;
833
    }
834
 
835
  if (class != tcc_unary
836
      && class != tcc_binary
837
      && class != tcc_expression
838
      && class != tcc_comparison)
839
    return;
840
 
841
  nops = TREE_CODE_LENGTH (TREE_CODE (expr));
842
  for (i = 0; i < nops; i++)
843
    force_move_till_expr (TREE_OPERAND (expr, i), orig_loop, loop);
844
}
845
 
846
/* Forces statement defining invariants in REF (and *INDEX) to be moved out of
847
   the LOOP.  The reference REF is used in the loop ORIG_LOOP.  Callback for
848
   for_each_index.  */
849
 
850
struct fmt_data
851
{
852
  struct loop *loop;
853
  struct loop *orig_loop;
854
};
855
 
856
static bool
857
force_move_till (tree ref, tree *index, void *data)
858
{
859
  tree stmt;
860
  struct fmt_data *fmt_data = data;
861
 
862
  if (TREE_CODE (ref) == ARRAY_REF)
863
    {
864
      tree step = array_ref_element_size (ref);
865
      tree lbound = array_ref_low_bound (ref);
866
 
867
      force_move_till_expr (step, fmt_data->orig_loop, fmt_data->loop);
868
      force_move_till_expr (lbound, fmt_data->orig_loop, fmt_data->loop);
869
    }
870
 
871
  if (TREE_CODE (*index) != SSA_NAME)
872
    return true;
873
 
874
  stmt = SSA_NAME_DEF_STMT (*index);
875
  if (IS_EMPTY_STMT (stmt))
876
    return true;
877
 
878
  set_level (stmt, fmt_data->orig_loop, fmt_data->loop);
879
 
880
  return true;
881
}
882
 
883
/* Records memory reference location *REF to the list MEM_REFS.  The reference
884
   occurs in statement STMT.  */
885
 
886
static void
887
record_mem_ref_loc (struct mem_ref_loc **mem_refs, tree stmt, tree *ref)
888
{
889
  struct mem_ref_loc *aref = XNEW (struct mem_ref_loc);
890
 
891
  aref->stmt = stmt;
892
  aref->ref = ref;
893
 
894
  aref->next = *mem_refs;
895
  *mem_refs = aref;
896
}
897
 
898
/* Releases list of memory reference locations MEM_REFS.  */
899
 
900
static void
901
free_mem_ref_locs (struct mem_ref_loc *mem_refs)
902
{
903
  struct mem_ref_loc *act;
904
 
905
  while (mem_refs)
906
    {
907
      act = mem_refs;
908
      mem_refs = mem_refs->next;
909
      free (act);
910
    }
911
}
912
 
913
/* Rewrites memory references in list MEM_REFS by variable TMP_VAR.  */
914
 
915
static void
916
rewrite_mem_refs (tree tmp_var, struct mem_ref_loc *mem_refs)
917
{
918
  tree var;
919
  ssa_op_iter iter;
920
 
921
  for (; mem_refs; mem_refs = mem_refs->next)
922
    {
923
      FOR_EACH_SSA_TREE_OPERAND (var, mem_refs->stmt, iter, SSA_OP_ALL_VIRTUALS)
924
        mark_sym_for_renaming (SSA_NAME_VAR (var));
925
 
926
      *mem_refs->ref = tmp_var;
927
      update_stmt (mem_refs->stmt);
928
    }
929
}
930
 
931
/* The name and the length of the currently generated variable
932
   for lsm.  */
933
#define MAX_LSM_NAME_LENGTH 40
934
static char lsm_tmp_name[MAX_LSM_NAME_LENGTH + 1];
935
static int lsm_tmp_name_length;
936
 
937
/* Adds S to lsm_tmp_name.  */
938
 
939
static void
940
lsm_tmp_name_add (const char *s)
941
{
942
  int l = strlen (s) + lsm_tmp_name_length;
943
  if (l > MAX_LSM_NAME_LENGTH)
944
    return;
945
 
946
  strcpy (lsm_tmp_name + lsm_tmp_name_length, s);
947
  lsm_tmp_name_length = l;
948
}
949
 
950
/* Stores the name for temporary variable that replaces REF to
951
   lsm_tmp_name.  */
952
 
953
static void
954
gen_lsm_tmp_name (tree ref)
955
{
956
  const char *name;
957
 
958
  switch (TREE_CODE (ref))
959
    {
960
    case MISALIGNED_INDIRECT_REF:
961
    case ALIGN_INDIRECT_REF:
962
    case INDIRECT_REF:
963
      gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
964
      lsm_tmp_name_add ("_");
965
      break;
966
 
967
    case BIT_FIELD_REF:
968
    case VIEW_CONVERT_EXPR:
969
    case ARRAY_RANGE_REF:
970
      gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
971
      break;
972
 
973
    case REALPART_EXPR:
974
      gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
975
      lsm_tmp_name_add ("_RE");
976
      break;
977
 
978
    case IMAGPART_EXPR:
979
      gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
980
      lsm_tmp_name_add ("_IM");
981
      break;
982
 
983
    case COMPONENT_REF:
984
      gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
985
      lsm_tmp_name_add ("_");
986
      name = get_name (TREE_OPERAND (ref, 1));
987
      if (!name)
988
        name = "F";
989
      lsm_tmp_name_add ("_");
990
      lsm_tmp_name_add (name);
991
 
992
    case ARRAY_REF:
993
      gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
994
      lsm_tmp_name_add ("_I");
995
      break;
996
 
997
    case SSA_NAME:
998
      ref = SSA_NAME_VAR (ref);
999
      /* Fallthru.  */
1000
 
1001
    case VAR_DECL:
1002
    case PARM_DECL:
1003
      name = get_name (ref);
1004
      if (!name)
1005
        name = "D";
1006
      lsm_tmp_name_add (name);
1007
      break;
1008
 
1009
    case STRING_CST:
1010
      lsm_tmp_name_add ("S");
1011
      break;
1012
 
1013
    case RESULT_DECL:
1014
      lsm_tmp_name_add ("R");
1015
      break;
1016
 
1017
    default:
1018
      gcc_unreachable ();
1019
    }
1020
}
1021
 
1022
/* Determines name for temporary variable that replaces REF.
1023
   The name is accumulated into the lsm_tmp_name variable.  */
1024
 
1025
static char *
1026
get_lsm_tmp_name (tree ref)
1027
{
1028
  lsm_tmp_name_length = 0;
1029
  gen_lsm_tmp_name (ref);
1030
  lsm_tmp_name_add ("_lsm");
1031
  return lsm_tmp_name;
1032
}
1033
 
1034
/* Records request for store motion of memory reference REF from LOOP.
1035
   MEM_REFS is the list of occurrences of the reference REF inside LOOP;
1036
   these references are rewritten by a new temporary variable.
1037
   Exits from the LOOP are stored in EXITS, there are N_EXITS of them.
1038
   The initialization of the temporary variable is put to the preheader
1039
   of the loop, and assignments to the reference from the temporary variable
1040
   are emitted to exits.  */
1041
 
1042
static void
1043
schedule_sm (struct loop *loop, edge *exits, unsigned n_exits, tree ref,
1044
             struct mem_ref_loc *mem_refs)
1045
{
1046
  struct mem_ref_loc *aref;
1047
  tree tmp_var;
1048
  unsigned i;
1049
  tree load, store;
1050
  struct fmt_data fmt_data;
1051
 
1052
  if (dump_file && (dump_flags & TDF_DETAILS))
1053
    {
1054
      fprintf (dump_file, "Executing store motion of ");
1055
      print_generic_expr (dump_file, ref, 0);
1056
      fprintf (dump_file, " from loop %d\n", loop->num);
1057
    }
1058
 
1059
  tmp_var = make_rename_temp (TREE_TYPE (ref),
1060
                              get_lsm_tmp_name (ref));
1061
 
1062
  fmt_data.loop = loop;
1063
  fmt_data.orig_loop = loop;
1064
  for_each_index (&ref, force_move_till, &fmt_data);
1065
 
1066
  rewrite_mem_refs (tmp_var, mem_refs);
1067
  for (aref = mem_refs; aref; aref = aref->next)
1068
    if (LIM_DATA (aref->stmt))
1069
      LIM_DATA (aref->stmt)->sm_done = true;
1070
 
1071
  /* Emit the load & stores.  */
1072
  load = build2 (MODIFY_EXPR, void_type_node, tmp_var, ref);
1073
  get_stmt_ann (load)->common.aux = xcalloc (1, sizeof (struct lim_aux_data));
1074
  LIM_DATA (load)->max_loop = loop;
1075
  LIM_DATA (load)->tgt_loop = loop;
1076
 
1077
  /* Put this into the latch, so that we are sure it will be processed after
1078
     all dependencies.  */
1079
  bsi_insert_on_edge (loop_latch_edge (loop), load);
1080
 
1081
  for (i = 0; i < n_exits; i++)
1082
    {
1083
      store = build2 (MODIFY_EXPR, void_type_node,
1084
                      unshare_expr (ref), tmp_var);
1085
      bsi_insert_on_edge (exits[i], store);
1086
    }
1087
}
1088
 
1089
/* Check whether memory reference REF can be hoisted out of the LOOP.  If this
1090
   is true, prepare the statements that load the value of the memory reference
1091
   to a temporary variable in the loop preheader, store it back on the loop
1092
   exits, and replace all the references inside LOOP by this temporary variable.
1093
   LOOP has N_EXITS stored in EXITS.  CLOBBERED_VOPS is the bitmap of virtual
1094
   operands that are clobbered by a call or accessed through multiple references
1095
   in loop.  */
1096
 
1097
static void
1098
determine_lsm_ref (struct loop *loop, edge *exits, unsigned n_exits,
1099
                   bitmap clobbered_vops, struct mem_ref *ref)
1100
{
1101
  struct mem_ref_loc *aref;
1102
  struct loop *must_exec;
1103
 
1104
  /* In case the memory is not stored to, there is nothing for SM to do.  */
1105
  if (!ref->is_stored)
1106
    return;
1107
 
1108
  /* If the reference is aliased with any different ref, or killed by call
1109
     in function, then fail.  */
1110
  if (bitmap_intersect_p (ref->vops, clobbered_vops))
1111
    return;
1112
 
1113
  if (tree_could_trap_p (ref->mem))
1114
    {
1115
      /* If the memory access is unsafe (i.e. it might trap), ensure that some
1116
         of the statements in that it occurs is always executed when the loop
1117
         is entered.  This way we know that by moving the load from the
1118
         reference out of the loop we will not cause the error that would not
1119
         occur otherwise.
1120
 
1121
         TODO -- in fact we would like to check for anticipability of the
1122
         reference, i.e. that on each path from loop entry to loop exit at
1123
         least one of the statements containing the memory reference is
1124
         executed.  */
1125
 
1126
      for (aref = ref->locs; aref; aref = aref->next)
1127
        {
1128
          if (!LIM_DATA (aref->stmt))
1129
            continue;
1130
 
1131
          must_exec = LIM_DATA (aref->stmt)->always_executed_in;
1132
          if (!must_exec)
1133
            continue;
1134
 
1135
          if (must_exec == loop
1136
              || flow_loop_nested_p (must_exec, loop))
1137
            break;
1138
        }
1139
 
1140
      if (!aref)
1141
        return;
1142
    }
1143
 
1144
  schedule_sm (loop, exits, n_exits, ref->mem, ref->locs);
1145
}
1146
 
1147
/* Hoists memory references MEM_REFS out of LOOP.  CLOBBERED_VOPS is the list
1148
   of vops clobbered by call in loop or accessed by multiple memory references.
1149
   EXITS is the list of N_EXITS exit edges of the LOOP.  */
1150
 
1151
static void
1152
hoist_memory_references (struct loop *loop, struct mem_ref *mem_refs,
1153
                         bitmap clobbered_vops, edge *exits, unsigned n_exits)
1154
{
1155
  struct mem_ref *ref;
1156
 
1157
  for (ref = mem_refs; ref; ref = ref->next)
1158
    determine_lsm_ref (loop, exits, n_exits, clobbered_vops, ref);
1159
}
1160
 
1161
/* Checks whether LOOP (with N_EXITS exits stored in EXITS array) is suitable
1162
   for a store motion optimization (i.e. whether we can insert statement
1163
   on its exits).  */
1164
 
1165
static bool
1166
loop_suitable_for_sm (struct loop *loop ATTRIBUTE_UNUSED, edge *exits,
1167
                      unsigned n_exits)
1168
{
1169
  unsigned i;
1170
 
1171
  for (i = 0; i < n_exits; i++)
1172
    if (exits[i]->flags & EDGE_ABNORMAL)
1173
      return false;
1174
 
1175
  return true;
1176
}
1177
 
1178
/* A hash function for struct mem_ref object OBJ.  */
1179
 
1180
static hashval_t
1181
memref_hash (const void *obj)
1182
{
1183
  const struct mem_ref *mem = obj;
1184
 
1185
  return mem->hash;
1186
}
1187
 
1188
/* An equality function for struct mem_ref object OBJ1 with
1189
   memory reference OBJ2.  */
1190
 
1191
static int
1192
memref_eq (const void *obj1, const void *obj2)
1193
{
1194
  const struct mem_ref *mem1 = obj1;
1195
 
1196
  return operand_equal_p (mem1->mem, (tree) obj2, 0);
1197
}
1198
 
1199
/* Gathers memory references in statement STMT in LOOP, storing the
1200
   information about them in MEM_REFS hash table.  Note vops accessed through
1201
   unrecognized statements in CLOBBERED_VOPS.  The newly created references
1202
   are also stored to MEM_REF_LIST.  */
1203
 
1204
static void
1205
gather_mem_refs_stmt (struct loop *loop, htab_t mem_refs,
1206
                      bitmap clobbered_vops, tree stmt,
1207
                      struct mem_ref **mem_ref_list)
1208
{
1209
  tree *lhs, *rhs, *mem = NULL;
1210
  hashval_t hash;
1211
  PTR *slot;
1212
  struct mem_ref *ref = NULL;
1213
  ssa_op_iter oi;
1214
  tree vname;
1215
  bool is_stored;
1216
 
1217
  if (ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
1218
    return;
1219
 
1220
  /* Recognize MEM = (SSA_NAME | invariant) and SSA_NAME = MEM patterns.  */
1221
  if (TREE_CODE (stmt) != MODIFY_EXPR)
1222
    goto fail;
1223
 
1224
  lhs = &TREE_OPERAND (stmt, 0);
1225
  rhs = &TREE_OPERAND (stmt, 1);
1226
 
1227
  if (TREE_CODE (*lhs) == SSA_NAME)
1228
    {
1229
      if (!is_gimple_addressable (*rhs))
1230
        goto fail;
1231
 
1232
      mem = rhs;
1233
      is_stored = false;
1234
    }
1235
  else if (TREE_CODE (*rhs) == SSA_NAME
1236
           || is_gimple_min_invariant (*rhs))
1237
    {
1238
      mem = lhs;
1239
      is_stored = true;
1240
    }
1241
  else
1242
    goto fail;
1243
 
1244
  /* If we cannot create an SSA name for the result, give up.  */
1245
  if (!is_gimple_reg_type (TREE_TYPE (*mem))
1246
      || TREE_THIS_VOLATILE (*mem))
1247
    goto fail;
1248
 
1249
  /* If we cannot move the reference out of the loop, fail.  */
1250
  if (!for_each_index (mem, may_move_till, loop))
1251
    goto fail;
1252
 
1253
  hash = iterative_hash_expr (*mem, 0);
1254
  slot = htab_find_slot_with_hash (mem_refs, *mem, hash, INSERT);
1255
 
1256
  if (*slot)
1257
    ref = *slot;
1258
  else
1259
    {
1260
      ref = XNEW (struct mem_ref);
1261
      ref->mem = *mem;
1262
      ref->hash = hash;
1263
      ref->locs = NULL;
1264
      ref->is_stored = false;
1265
      ref->vops = BITMAP_ALLOC (NULL);
1266
      ref->next = *mem_ref_list;
1267
      *mem_ref_list = ref;
1268
      *slot = ref;
1269
    }
1270
  ref->is_stored |= is_stored;
1271
 
1272
  FOR_EACH_SSA_TREE_OPERAND (vname, stmt, oi,
1273
                             SSA_OP_VIRTUAL_USES | SSA_OP_VIRTUAL_KILLS)
1274
    bitmap_set_bit (ref->vops, DECL_UID (SSA_NAME_VAR (vname)));
1275
  record_mem_ref_loc (&ref->locs, stmt, mem);
1276
  return;
1277
 
1278
fail:
1279
  FOR_EACH_SSA_TREE_OPERAND (vname, stmt, oi,
1280
                             SSA_OP_VIRTUAL_USES | SSA_OP_VIRTUAL_KILLS)
1281
    bitmap_set_bit (clobbered_vops, DECL_UID (SSA_NAME_VAR (vname)));
1282
}
1283
 
1284
/* Gathers memory references in LOOP.  Notes vops accessed through unrecognized
1285
   statements in CLOBBERED_VOPS.  The list of the references found by
1286
   the function is returned.  */
1287
 
1288
static struct mem_ref *
1289
gather_mem_refs (struct loop *loop, bitmap clobbered_vops)
1290
{
1291
  basic_block *body = get_loop_body (loop);
1292
  block_stmt_iterator bsi;
1293
  unsigned i;
1294
  struct mem_ref *mem_ref_list = NULL;
1295
  htab_t mem_refs = htab_create (100, memref_hash, memref_eq, NULL);
1296
 
1297
  for (i = 0; i < loop->num_nodes; i++)
1298
    {
1299
      for (bsi = bsi_start (body[i]); !bsi_end_p (bsi); bsi_next (&bsi))
1300
        gather_mem_refs_stmt (loop, mem_refs, clobbered_vops, bsi_stmt (bsi),
1301
                              &mem_ref_list);
1302
    }
1303
 
1304
  free (body);
1305
 
1306
  htab_delete (mem_refs);
1307
  return mem_ref_list;
1308
}
1309
 
1310
/* Finds the vops accessed by more than one of the memory references described
1311
   in MEM_REFS and marks them in CLOBBERED_VOPS.  */
1312
 
1313
static void
1314
find_more_ref_vops (struct mem_ref *mem_refs, bitmap clobbered_vops)
1315
{
1316
  bitmap_head tmp, all_vops;
1317
  struct mem_ref *ref;
1318
 
1319
  bitmap_initialize (&tmp, &bitmap_default_obstack);
1320
  bitmap_initialize (&all_vops, &bitmap_default_obstack);
1321
 
1322
  for (ref = mem_refs; ref; ref = ref->next)
1323
    {
1324
      /* The vops that are already in all_vops are accessed by more than
1325
         one memory reference.  */
1326
      bitmap_and (&tmp, &all_vops, ref->vops);
1327
      bitmap_ior_into (clobbered_vops, &tmp);
1328
      bitmap_clear (&tmp);
1329
 
1330
      bitmap_ior_into (&all_vops, ref->vops);
1331
    }
1332
 
1333
  bitmap_clear (&all_vops);
1334
}
1335
 
1336
/* Releases the memory occupied by REF.  */
1337
 
1338
static void
1339
free_mem_ref (struct mem_ref *ref)
1340
{
1341
  free_mem_ref_locs (ref->locs);
1342
  BITMAP_FREE (ref->vops);
1343
  free (ref);
1344
}
1345
 
1346
/* Releases the memory occupied by REFS.  */
1347
 
1348
static void
1349
free_mem_refs (struct mem_ref *refs)
1350
{
1351
  struct mem_ref *ref, *next;
1352
 
1353
  for (ref = refs; ref; ref = next)
1354
    {
1355
      next = ref->next;
1356
      free_mem_ref (ref);
1357
    }
1358
}
1359
 
1360
/* Try to perform store motion for all memory references modified inside
1361
   LOOP.  */
1362
 
1363
static void
1364
determine_lsm_loop (struct loop *loop)
1365
{
1366
  unsigned n_exits;
1367
  edge *exits = get_loop_exit_edges (loop, &n_exits);
1368
  bitmap clobbered_vops;
1369
  struct mem_ref *mem_refs;
1370
 
1371
  if (!loop_suitable_for_sm (loop, exits, n_exits))
1372
    {
1373
      free (exits);
1374
      return;
1375
    }
1376
 
1377
  /* Find the memory references in LOOP.  */
1378
  clobbered_vops = BITMAP_ALLOC (NULL);
1379
  mem_refs = gather_mem_refs (loop, clobbered_vops);
1380
 
1381
  /* Find the vops that are used for more than one reference.  */
1382
  find_more_ref_vops (mem_refs, clobbered_vops);
1383
 
1384
  /* Hoist all suitable memory references.  */
1385
  hoist_memory_references (loop, mem_refs, clobbered_vops, exits, n_exits);
1386
 
1387
  free_mem_refs (mem_refs);
1388
  free (exits);
1389
  BITMAP_FREE (clobbered_vops);
1390
}
1391
 
1392
/* Try to perform store motion for all memory references modified inside
1393
   any of LOOPS.  */
1394
 
1395
static void
1396
determine_lsm (struct loops *loops)
1397
{
1398
  struct loop *loop;
1399
 
1400
  if (!loops->tree_root->inner)
1401
    return;
1402
 
1403
  /* Pass the loops from the outermost and perform the store motion as
1404
     suitable.  */
1405
 
1406
  loop = loops->tree_root->inner;
1407
  while (1)
1408
    {
1409
      determine_lsm_loop (loop);
1410
 
1411
      if (loop->inner)
1412
        {
1413
          loop = loop->inner;
1414
          continue;
1415
        }
1416
      while (!loop->next)
1417
        {
1418
          loop = loop->outer;
1419
          if (loop == loops->tree_root)
1420
            {
1421
              loop_commit_inserts ();
1422
              return;
1423
            }
1424
        }
1425
      loop = loop->next;
1426
    }
1427
}
1428
 
1429
/* Fills ALWAYS_EXECUTED_IN information for basic blocks of LOOP, i.e.
1430
   for each such basic block bb records the outermost loop for that execution
1431
   of its header implies execution of bb.  CONTAINS_CALL is the bitmap of
1432
   blocks that contain a nonpure call.  */
1433
 
1434
static void
1435
fill_always_executed_in (struct loop *loop, sbitmap contains_call)
1436
{
1437
  basic_block bb = NULL, *bbs, last = NULL;
1438
  unsigned i;
1439
  edge e;
1440
  struct loop *inn_loop = loop;
1441
 
1442
  if (!loop->header->aux)
1443
    {
1444
      bbs = get_loop_body_in_dom_order (loop);
1445
 
1446
      for (i = 0; i < loop->num_nodes; i++)
1447
        {
1448
          edge_iterator ei;
1449
          bb = bbs[i];
1450
 
1451
          if (dominated_by_p (CDI_DOMINATORS, loop->latch, bb))
1452
            last = bb;
1453
 
1454
          if (TEST_BIT (contains_call, bb->index))
1455
            break;
1456
 
1457
          FOR_EACH_EDGE (e, ei, bb->succs)
1458
            if (!flow_bb_inside_loop_p (loop, e->dest))
1459
              break;
1460
          if (e)
1461
            break;
1462
 
1463
          /* A loop might be infinite (TODO use simple loop analysis
1464
             to disprove this if possible).  */
1465
          if (bb->flags & BB_IRREDUCIBLE_LOOP)
1466
            break;
1467
 
1468
          if (!flow_bb_inside_loop_p (inn_loop, bb))
1469
            break;
1470
 
1471
          if (bb->loop_father->header == bb)
1472
            {
1473
              if (!dominated_by_p (CDI_DOMINATORS, loop->latch, bb))
1474
                break;
1475
 
1476
              /* In a loop that is always entered we may proceed anyway.
1477
                 But record that we entered it and stop once we leave it.  */
1478
              inn_loop = bb->loop_father;
1479
            }
1480
        }
1481
 
1482
      while (1)
1483
        {
1484
          last->aux = loop;
1485
          if (last == loop->header)
1486
            break;
1487
          last = get_immediate_dominator (CDI_DOMINATORS, last);
1488
        }
1489
 
1490
      free (bbs);
1491
    }
1492
 
1493
  for (loop = loop->inner; loop; loop = loop->next)
1494
    fill_always_executed_in (loop, contains_call);
1495
}
1496
 
1497
/* Compute the global information needed by the loop invariant motion pass.
1498
   LOOPS is the loop tree.  */
1499
 
1500
static void
1501
tree_ssa_lim_initialize (struct loops *loops)
1502
{
1503
  sbitmap contains_call = sbitmap_alloc (last_basic_block);
1504
  block_stmt_iterator bsi;
1505
  struct loop *loop;
1506
  basic_block bb;
1507
 
1508
  sbitmap_zero (contains_call);
1509
  FOR_EACH_BB (bb)
1510
    {
1511
      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1512
        {
1513
          if (nonpure_call_p (bsi_stmt (bsi)))
1514
            break;
1515
        }
1516
 
1517
      if (!bsi_end_p (bsi))
1518
        SET_BIT (contains_call, bb->index);
1519
    }
1520
 
1521
  for (loop = loops->tree_root->inner; loop; loop = loop->next)
1522
    fill_always_executed_in (loop, contains_call);
1523
 
1524
  sbitmap_free (contains_call);
1525
}
1526
 
1527
/* Cleans up after the invariant motion pass.  */
1528
 
1529
static void
1530
tree_ssa_lim_finalize (void)
1531
{
1532
  basic_block bb;
1533
 
1534
  FOR_EACH_BB (bb)
1535
    {
1536
      bb->aux = NULL;
1537
    }
1538
}
1539
 
1540
/* Moves invariants from LOOPS.  Only "expensive" invariants are moved out --
1541
   i.e. those that are likely to be win regardless of the register pressure.  */
1542
 
1543
void
1544
tree_ssa_lim (struct loops *loops)
1545
{
1546
  tree_ssa_lim_initialize (loops);
1547
 
1548
  /* For each statement determine the outermost loop in that it is
1549
     invariant and cost for computing the invariant.  */
1550
  determine_invariantness ();
1551
 
1552
  /* For each memory reference determine whether it is possible to hoist it
1553
     out of the loop.  Force the necessary invariants to be moved out of the
1554
     loops as well.  */
1555
  determine_lsm (loops);
1556
 
1557
  /* Move the expressions that are expensive enough.  */
1558
  move_computations ();
1559
 
1560
  tree_ssa_lim_finalize ();
1561
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.