OpenCores
URL https://opencores.org/ocsvn/openrisc_me/openrisc_me/trunk

Subversion Repositories openrisc_me

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.5.1/] [gcc/] [tree-ssa-loop-manip.c] - Blame information for rev 280

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 280 jeremybenn
/* High-level loop manipulation functions.
2
   Copyright (C) 2004, 2005, 2006, 2007, 2008, 2010
3
   Free Software Foundation, Inc.
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify it
8
under the terms of the GNU General Public License as published by the
9
Free Software Foundation; either version 3, or (at your option) any
10
later version.
11
 
12
GCC is distributed in the hope that it will be useful, but WITHOUT
13
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15
for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING3.  If not see
19
<http://www.gnu.org/licenses/>.  */
20
 
21
#include "config.h"
22
#include "system.h"
23
#include "coretypes.h"
24
#include "tm.h"
25
#include "tree.h"
26
#include "rtl.h"
27
#include "tm_p.h"
28
#include "hard-reg-set.h"
29
#include "basic-block.h"
30
#include "output.h"
31
#include "diagnostic.h"
32
#include "tree-flow.h"
33
#include "tree-dump.h"
34
#include "timevar.h"
35
#include "cfgloop.h"
36
#include "tree-pass.h"
37
#include "cfglayout.h"
38
#include "tree-scalar-evolution.h"
39
#include "params.h"
40
#include "tree-inline.h"
41
#include "langhooks.h"
42
 
43
/* Creates an induction variable with value BASE + STEP * iteration in LOOP.
44
   It is expected that neither BASE nor STEP are shared with other expressions
45
   (unless the sharing rules allow this).  Use VAR as a base var_decl for it
46
   (if NULL, a new temporary will be created).  The increment will occur at
47
   INCR_POS (after it if AFTER is true, before it otherwise).  INCR_POS and
48
   AFTER can be computed using standard_iv_increment_position.  The ssa versions
49
   of the variable before and after increment will be stored in VAR_BEFORE and
50
   VAR_AFTER (unless they are NULL).  */
51
 
52
void
53
create_iv (tree base, tree step, tree var, struct loop *loop,
54
           gimple_stmt_iterator *incr_pos, bool after,
55
           tree *var_before, tree *var_after)
56
{
57
  gimple stmt;
58
  tree initial, step1;
59
  gimple_seq stmts;
60
  tree vb, va;
61
  enum tree_code incr_op = PLUS_EXPR;
62
  edge pe = loop_preheader_edge (loop);
63
 
64
  if (!var)
65
    {
66
      var = create_tmp_var (TREE_TYPE (base), "ivtmp");
67
      add_referenced_var (var);
68
    }
69
 
70
  vb = make_ssa_name (var, NULL);
71
  if (var_before)
72
    *var_before = vb;
73
  va = make_ssa_name (var, NULL);
74
  if (var_after)
75
    *var_after = va;
76
 
77
  /* For easier readability of the created code, produce MINUS_EXPRs
78
     when suitable.  */
79
  if (TREE_CODE (step) == INTEGER_CST)
80
    {
81
      if (TYPE_UNSIGNED (TREE_TYPE (step)))
82
        {
83
          step1 = fold_build1 (NEGATE_EXPR, TREE_TYPE (step), step);
84
          if (tree_int_cst_lt (step1, step))
85
            {
86
              incr_op = MINUS_EXPR;
87
              step = step1;
88
            }
89
        }
90
      else
91
        {
92
          bool ovf;
93
 
94
          if (!tree_expr_nonnegative_warnv_p (step, &ovf)
95
              && may_negate_without_overflow_p (step))
96
            {
97
              incr_op = MINUS_EXPR;
98
              step = fold_build1 (NEGATE_EXPR, TREE_TYPE (step), step);
99
            }
100
        }
101
    }
102
  if (POINTER_TYPE_P (TREE_TYPE (base)))
103
    {
104
      if (TREE_CODE (base) == ADDR_EXPR)
105
        mark_addressable (TREE_OPERAND (base, 0));
106
      step = fold_convert (sizetype, step);
107
      if (incr_op == MINUS_EXPR)
108
        step = fold_build1 (NEGATE_EXPR, sizetype, step);
109
      incr_op = POINTER_PLUS_EXPR;
110
    }
111
  /* Gimplify the step if necessary.  We put the computations in front of the
112
     loop (i.e. the step should be loop invariant).  */
113
  step = force_gimple_operand (step, &stmts, true, NULL_TREE);
114
  if (stmts)
115
    gsi_insert_seq_on_edge_immediate (pe, stmts);
116
 
117
  stmt = gimple_build_assign_with_ops (incr_op, va, vb, step);
118
  if (after)
119
    gsi_insert_after (incr_pos, stmt, GSI_NEW_STMT);
120
  else
121
    gsi_insert_before (incr_pos, stmt, GSI_NEW_STMT);
122
 
123
  initial = force_gimple_operand (base, &stmts, true, var);
124
  if (stmts)
125
    gsi_insert_seq_on_edge_immediate (pe, stmts);
126
 
127
  stmt = create_phi_node (vb, loop->header);
128
  SSA_NAME_DEF_STMT (vb) = stmt;
129
  add_phi_arg (stmt, initial, loop_preheader_edge (loop), UNKNOWN_LOCATION);
130
  add_phi_arg (stmt, va, loop_latch_edge (loop), UNKNOWN_LOCATION);
131
}
132
 
133
/* Add exit phis for the USE on EXIT.  */
134
 
135
static void
136
add_exit_phis_edge (basic_block exit, tree use)
137
{
138
  gimple phi, def_stmt = SSA_NAME_DEF_STMT (use);
139
  basic_block def_bb = gimple_bb (def_stmt);
140
  struct loop *def_loop;
141
  edge e;
142
  edge_iterator ei;
143
 
144
  /* Check that some of the edges entering the EXIT block exits a loop in
145
     that USE is defined.  */
146
  FOR_EACH_EDGE (e, ei, exit->preds)
147
    {
148
      def_loop = find_common_loop (def_bb->loop_father, e->src->loop_father);
149
      if (!flow_bb_inside_loop_p (def_loop, e->dest))
150
        break;
151
    }
152
 
153
  if (!e)
154
    return;
155
 
156
  phi = create_phi_node (use, exit);
157
  create_new_def_for (gimple_phi_result (phi), phi,
158
                      gimple_phi_result_ptr (phi));
159
  FOR_EACH_EDGE (e, ei, exit->preds)
160
    add_phi_arg (phi, use, e, UNKNOWN_LOCATION);
161
}
162
 
163
/* Add exit phis for VAR that is used in LIVEIN.
164
   Exits of the loops are stored in EXITS.  */
165
 
166
static void
167
add_exit_phis_var (tree var, bitmap livein, bitmap exits)
168
{
169
  bitmap def;
170
  unsigned index;
171
  basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (var));
172
  bitmap_iterator bi;
173
 
174
  if (is_gimple_reg (var))
175
    bitmap_clear_bit (livein, def_bb->index);
176
  else
177
    bitmap_set_bit (livein, def_bb->index);
178
 
179
  def = BITMAP_ALLOC (NULL);
180
  bitmap_set_bit (def, def_bb->index);
181
  compute_global_livein (livein, def);
182
  BITMAP_FREE (def);
183
 
184
  EXECUTE_IF_AND_IN_BITMAP (exits, livein, 0, index, bi)
185
    {
186
      add_exit_phis_edge (BASIC_BLOCK (index), var);
187
    }
188
}
189
 
190
/* Add exit phis for the names marked in NAMES_TO_RENAME.
191
   Exits of the loops are stored in EXITS.  Sets of blocks where the ssa
192
   names are used are stored in USE_BLOCKS.  */
193
 
194
static void
195
add_exit_phis (bitmap names_to_rename, bitmap *use_blocks, bitmap loop_exits)
196
{
197
  unsigned i;
198
  bitmap_iterator bi;
199
 
200
  EXECUTE_IF_SET_IN_BITMAP (names_to_rename, 0, i, bi)
201
    {
202
      add_exit_phis_var (ssa_name (i), use_blocks[i], loop_exits);
203
    }
204
}
205
 
206
/* Returns a bitmap of all loop exit edge targets.  */
207
 
208
static bitmap
209
get_loops_exits (void)
210
{
211
  bitmap exits = BITMAP_ALLOC (NULL);
212
  basic_block bb;
213
  edge e;
214
  edge_iterator ei;
215
 
216
  FOR_EACH_BB (bb)
217
    {
218
      FOR_EACH_EDGE (e, ei, bb->preds)
219
        if (e->src != ENTRY_BLOCK_PTR
220
            && !flow_bb_inside_loop_p (e->src->loop_father, bb))
221
          {
222
            bitmap_set_bit (exits, bb->index);
223
            break;
224
          }
225
    }
226
 
227
  return exits;
228
}
229
 
230
/* For USE in BB, if it is used outside of the loop it is defined in,
231
   mark it for rewrite.  Record basic block BB where it is used
232
   to USE_BLOCKS.  Record the ssa name index to NEED_PHIS bitmap.  */
233
 
234
static void
235
find_uses_to_rename_use (basic_block bb, tree use, bitmap *use_blocks,
236
                         bitmap need_phis)
237
{
238
  unsigned ver;
239
  basic_block def_bb;
240
  struct loop *def_loop;
241
 
242
  if (TREE_CODE (use) != SSA_NAME)
243
    return;
244
 
245
  /* We don't need to keep virtual operands in loop-closed form.  */
246
  if (!is_gimple_reg (use))
247
    return;
248
 
249
  ver = SSA_NAME_VERSION (use);
250
  def_bb = gimple_bb (SSA_NAME_DEF_STMT (use));
251
  if (!def_bb)
252
    return;
253
  def_loop = def_bb->loop_father;
254
 
255
  /* If the definition is not inside a loop, it is not interesting.  */
256
  if (!loop_outer (def_loop))
257
    return;
258
 
259
  /* If the use is not outside of the loop it is defined in, it is not
260
     interesting.  */
261
  if (flow_bb_inside_loop_p (def_loop, bb))
262
    return;
263
 
264
  if (!use_blocks[ver])
265
    use_blocks[ver] = BITMAP_ALLOC (NULL);
266
  bitmap_set_bit (use_blocks[ver], bb->index);
267
 
268
  bitmap_set_bit (need_phis, ver);
269
}
270
 
271
/* For uses in STMT, mark names that are used outside of the loop they are
272
   defined to rewrite.  Record the set of blocks in that the ssa
273
   names are defined to USE_BLOCKS and the ssa names themselves to
274
   NEED_PHIS.  */
275
 
276
static void
277
find_uses_to_rename_stmt (gimple stmt, bitmap *use_blocks, bitmap need_phis)
278
{
279
  ssa_op_iter iter;
280
  tree var;
281
  basic_block bb = gimple_bb (stmt);
282
 
283
  if (is_gimple_debug (stmt))
284
    return;
285
 
286
  FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_ALL_USES)
287
    find_uses_to_rename_use (bb, var, use_blocks, need_phis);
288
}
289
 
290
/* Marks names that are used in BB and outside of the loop they are
291
   defined in for rewrite.  Records the set of blocks in that the ssa
292
   names are defined to USE_BLOCKS.  Record the SSA names that will
293
   need exit PHIs in NEED_PHIS.  */
294
 
295
static void
296
find_uses_to_rename_bb (basic_block bb, bitmap *use_blocks, bitmap need_phis)
297
{
298
  gimple_stmt_iterator bsi;
299
  edge e;
300
  edge_iterator ei;
301
 
302
  FOR_EACH_EDGE (e, ei, bb->succs)
303
    for (bsi = gsi_start_phis (e->dest); !gsi_end_p (bsi); gsi_next (&bsi))
304
      find_uses_to_rename_use (bb, PHI_ARG_DEF_FROM_EDGE (gsi_stmt (bsi), e),
305
                               use_blocks, need_phis);
306
 
307
  for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
308
    find_uses_to_rename_stmt (gsi_stmt (bsi), use_blocks, need_phis);
309
}
310
 
311
/* Marks names that are used outside of the loop they are defined in
312
   for rewrite.  Records the set of blocks in that the ssa
313
   names are defined to USE_BLOCKS.  If CHANGED_BBS is not NULL,
314
   scan only blocks in this set.  */
315
 
316
static void
317
find_uses_to_rename (bitmap changed_bbs, bitmap *use_blocks, bitmap need_phis)
318
{
319
  basic_block bb;
320
  unsigned index;
321
  bitmap_iterator bi;
322
 
323
  if (changed_bbs && !bitmap_empty_p (changed_bbs))
324
    {
325
      EXECUTE_IF_SET_IN_BITMAP (changed_bbs, 0, index, bi)
326
        {
327
          find_uses_to_rename_bb (BASIC_BLOCK (index), use_blocks, need_phis);
328
        }
329
    }
330
  else
331
    {
332
      FOR_EACH_BB (bb)
333
        {
334
          find_uses_to_rename_bb (bb, use_blocks, need_phis);
335
        }
336
    }
337
}
338
 
339
/* Rewrites the program into a loop closed ssa form -- i.e. inserts extra
340
   phi nodes to ensure that no variable is used outside the loop it is
341
   defined in.
342
 
343
   This strengthening of the basic ssa form has several advantages:
344
 
345
   1) Updating it during unrolling/peeling/versioning is trivial, since
346
      we do not need to care about the uses outside of the loop.
347
   2) The behavior of all uses of an induction variable is the same.
348
      Without this, you need to distinguish the case when the variable
349
      is used outside of the loop it is defined in, for example
350
 
351
      for (i = 0; i < 100; i++)
352
        {
353
          for (j = 0; j < 100; j++)
354
            {
355
              k = i + j;
356
              use1 (k);
357
            }
358
          use2 (k);
359
        }
360
 
361
      Looking from the outer loop with the normal SSA form, the first use of k
362
      is not well-behaved, while the second one is an induction variable with
363
      base 99 and step 1.
364
 
365
      If CHANGED_BBS is not NULL, we look for uses outside loops only in
366
      the basic blocks in this set.
367
 
368
      UPDATE_FLAG is used in the call to update_ssa.  See
369
      TODO_update_ssa* for documentation.  */
370
 
371
void
372
rewrite_into_loop_closed_ssa (bitmap changed_bbs, unsigned update_flag)
373
{
374
  bitmap loop_exits;
375
  bitmap *use_blocks;
376
  unsigned i, old_num_ssa_names;
377
  bitmap names_to_rename;
378
 
379
  loops_state_set (LOOP_CLOSED_SSA);
380
  if (number_of_loops () <= 1)
381
    return;
382
 
383
  loop_exits = get_loops_exits ();
384
  names_to_rename = BITMAP_ALLOC (NULL);
385
 
386
  /* If the pass has caused the SSA form to be out-of-date, update it
387
     now.  */
388
  update_ssa (update_flag);
389
 
390
  old_num_ssa_names = num_ssa_names;
391
  use_blocks = XCNEWVEC (bitmap, old_num_ssa_names);
392
 
393
  /* Find the uses outside loops.  */
394
  find_uses_to_rename (changed_bbs, use_blocks, names_to_rename);
395
 
396
  /* Add the PHI nodes on exits of the loops for the names we need to
397
     rewrite.  */
398
  add_exit_phis (names_to_rename, use_blocks, loop_exits);
399
 
400
  for (i = 0; i < old_num_ssa_names; i++)
401
    BITMAP_FREE (use_blocks[i]);
402
  free (use_blocks);
403
  BITMAP_FREE (loop_exits);
404
  BITMAP_FREE (names_to_rename);
405
 
406
  /* Fix up all the names found to be used outside their original
407
     loops.  */
408
  update_ssa (TODO_update_ssa);
409
}
410
 
411
/* Check invariants of the loop closed ssa form for the USE in BB.  */
412
 
413
static void
414
check_loop_closed_ssa_use (basic_block bb, tree use)
415
{
416
  gimple def;
417
  basic_block def_bb;
418
 
419
  if (TREE_CODE (use) != SSA_NAME || !is_gimple_reg (use))
420
    return;
421
 
422
  def = SSA_NAME_DEF_STMT (use);
423
  def_bb = gimple_bb (def);
424
  gcc_assert (!def_bb
425
              || flow_bb_inside_loop_p (def_bb->loop_father, bb));
426
}
427
 
428
/* Checks invariants of loop closed ssa form in statement STMT in BB.  */
429
 
430
static void
431
check_loop_closed_ssa_stmt (basic_block bb, gimple stmt)
432
{
433
  ssa_op_iter iter;
434
  tree var;
435
 
436
  if (is_gimple_debug (stmt))
437
    return;
438
 
439
  FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_ALL_USES)
440
    check_loop_closed_ssa_use (bb, var);
441
}
442
 
443
/* Checks that invariants of the loop closed ssa form are preserved.  */
444
 
445
void
446
verify_loop_closed_ssa (void)
447
{
448
  basic_block bb;
449
  gimple_stmt_iterator bsi;
450
  gimple phi;
451
  edge e;
452
  edge_iterator ei;
453
 
454
  if (number_of_loops () <= 1)
455
    return;
456
 
457
  verify_ssa (false);
458
 
459
  FOR_EACH_BB (bb)
460
    {
461
      for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
462
        {
463
          phi = gsi_stmt (bsi);
464
          FOR_EACH_EDGE (e, ei, bb->preds)
465
            check_loop_closed_ssa_use (e->src,
466
                                       PHI_ARG_DEF_FROM_EDGE (phi, e));
467
        }
468
 
469
      for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
470
        check_loop_closed_ssa_stmt (bb, gsi_stmt (bsi));
471
    }
472
}
473
 
474
/* Split loop exit edge EXIT.  The things are a bit complicated by a need to
475
   preserve the loop closed ssa form.  The newly created block is returned.  */
476
 
477
basic_block
478
split_loop_exit_edge (edge exit)
479
{
480
  basic_block dest = exit->dest;
481
  basic_block bb = split_edge (exit);
482
  gimple phi, new_phi;
483
  tree new_name, name;
484
  use_operand_p op_p;
485
  gimple_stmt_iterator psi;
486
  source_location locus;
487
 
488
  for (psi = gsi_start_phis (dest); !gsi_end_p (psi); gsi_next (&psi))
489
    {
490
      phi = gsi_stmt (psi);
491
      op_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, single_succ_edge (bb));
492
      locus = gimple_phi_arg_location_from_edge (phi, single_succ_edge (bb));
493
 
494
      name = USE_FROM_PTR (op_p);
495
 
496
      /* If the argument of the PHI node is a constant, we do not need
497
         to keep it inside loop.  */
498
      if (TREE_CODE (name) != SSA_NAME)
499
        continue;
500
 
501
      /* Otherwise create an auxiliary phi node that will copy the value
502
         of the SSA name out of the loop.  */
503
      new_name = duplicate_ssa_name (name, NULL);
504
      new_phi = create_phi_node (new_name, bb);
505
      SSA_NAME_DEF_STMT (new_name) = new_phi;
506
      add_phi_arg (new_phi, name, exit, locus);
507
      SET_USE (op_p, new_name);
508
    }
509
 
510
  return bb;
511
}
512
 
513
/* Returns the basic block in that statements should be emitted for induction
514
   variables incremented at the end of the LOOP.  */
515
 
516
basic_block
517
ip_end_pos (struct loop *loop)
518
{
519
  return loop->latch;
520
}
521
 
522
/* Returns the basic block in that statements should be emitted for induction
523
   variables incremented just before exit condition of a LOOP.  */
524
 
525
basic_block
526
ip_normal_pos (struct loop *loop)
527
{
528
  gimple last;
529
  basic_block bb;
530
  edge exit;
531
 
532
  if (!single_pred_p (loop->latch))
533
    return NULL;
534
 
535
  bb = single_pred (loop->latch);
536
  last = last_stmt (bb);
537
  if (!last
538
      || gimple_code (last) != GIMPLE_COND)
539
    return NULL;
540
 
541
  exit = EDGE_SUCC (bb, 0);
542
  if (exit->dest == loop->latch)
543
    exit = EDGE_SUCC (bb, 1);
544
 
545
  if (flow_bb_inside_loop_p (loop, exit->dest))
546
    return NULL;
547
 
548
  return bb;
549
}
550
 
551
/* Stores the standard position for induction variable increment in LOOP
552
   (just before the exit condition if it is available and latch block is empty,
553
   end of the latch block otherwise) to BSI.  INSERT_AFTER is set to true if
554
   the increment should be inserted after *BSI.  */
555
 
556
void
557
standard_iv_increment_position (struct loop *loop, gimple_stmt_iterator *bsi,
558
                                bool *insert_after)
559
{
560
  basic_block bb = ip_normal_pos (loop), latch = ip_end_pos (loop);
561
  gimple last = last_stmt (latch);
562
 
563
  if (!bb
564
      || (last && gimple_code (last) != GIMPLE_LABEL))
565
    {
566
      *bsi = gsi_last_bb (latch);
567
      *insert_after = true;
568
    }
569
  else
570
    {
571
      *bsi = gsi_last_bb (bb);
572
      *insert_after = false;
573
    }
574
}
575
 
576
/* Copies phi node arguments for duplicated blocks.  The index of the first
577
   duplicated block is FIRST_NEW_BLOCK.  */
578
 
579
static void
580
copy_phi_node_args (unsigned first_new_block)
581
{
582
  unsigned i;
583
 
584
  for (i = first_new_block; i < (unsigned) last_basic_block; i++)
585
    BASIC_BLOCK (i)->flags |= BB_DUPLICATED;
586
 
587
  for (i = first_new_block; i < (unsigned) last_basic_block; i++)
588
    add_phi_args_after_copy_bb (BASIC_BLOCK (i));
589
 
590
  for (i = first_new_block; i < (unsigned) last_basic_block; i++)
591
    BASIC_BLOCK (i)->flags &= ~BB_DUPLICATED;
592
}
593
 
594
 
595
/* The same as cfgloopmanip.c:duplicate_loop_to_header_edge, but also
596
   updates the PHI nodes at start of the copied region.  In order to
597
   achieve this, only loops whose exits all lead to the same location
598
   are handled.
599
 
600
   Notice that we do not completely update the SSA web after
601
   duplication.  The caller is responsible for calling update_ssa
602
   after the loop has been duplicated.  */
603
 
604
bool
605
gimple_duplicate_loop_to_header_edge (struct loop *loop, edge e,
606
                                    unsigned int ndupl, sbitmap wont_exit,
607
                                    edge orig, VEC (edge, heap) **to_remove,
608
                                    int flags)
609
{
610
  unsigned first_new_block;
611
 
612
  if (!loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES))
613
    return false;
614
  if (!loops_state_satisfies_p (LOOPS_HAVE_PREHEADERS))
615
    return false;
616
 
617
#ifdef ENABLE_CHECKING
618
  if (loops_state_satisfies_p (LOOP_CLOSED_SSA))
619
    verify_loop_closed_ssa ();
620
#endif
621
 
622
  first_new_block = last_basic_block;
623
  if (!duplicate_loop_to_header_edge (loop, e, ndupl, wont_exit,
624
                                      orig, to_remove, flags))
625
    return false;
626
 
627
  /* Readd the removed phi args for e.  */
628
  flush_pending_stmts (e);
629
 
630
  /* Copy the phi node arguments.  */
631
  copy_phi_node_args (first_new_block);
632
 
633
  scev_reset ();
634
 
635
  return true;
636
}
637
 
638
/* Returns true if we can unroll LOOP FACTOR times.  Number
639
   of iterations of the loop is returned in NITER.  */
640
 
641
bool
642
can_unroll_loop_p (struct loop *loop, unsigned factor,
643
                   struct tree_niter_desc *niter)
644
{
645
  edge exit;
646
 
647
  /* Check whether unrolling is possible.  We only want to unroll loops
648
     for that we are able to determine number of iterations.  We also
649
     want to split the extra iterations of the loop from its end,
650
     therefore we require that the loop has precisely one
651
     exit.  */
652
 
653
  exit = single_dom_exit (loop);
654
  if (!exit)
655
    return false;
656
 
657
  if (!number_of_iterations_exit (loop, exit, niter, false)
658
      || niter->cmp == ERROR_MARK
659
      /* Scalar evolutions analysis might have copy propagated
660
         the abnormal ssa names into these expressions, hence
661
         emitting the computations based on them during loop
662
         unrolling might create overlapping life ranges for
663
         them, and failures in out-of-ssa.  */
664
      || contains_abnormal_ssa_name_p (niter->may_be_zero)
665
      || contains_abnormal_ssa_name_p (niter->control.base)
666
      || contains_abnormal_ssa_name_p (niter->control.step)
667
      || contains_abnormal_ssa_name_p (niter->bound))
668
    return false;
669
 
670
  /* And of course, we must be able to duplicate the loop.  */
671
  if (!can_duplicate_loop_p (loop))
672
    return false;
673
 
674
  /* The final loop should be small enough.  */
675
  if (tree_num_loop_insns (loop, &eni_size_weights) * factor
676
      > (unsigned) PARAM_VALUE (PARAM_MAX_UNROLLED_INSNS))
677
    return false;
678
 
679
  return true;
680
}
681
 
682
/* Determines the conditions that control execution of LOOP unrolled FACTOR
683
   times.  DESC is number of iterations of LOOP.  ENTER_COND is set to
684
   condition that must be true if the main loop can be entered.
685
   EXIT_BASE, EXIT_STEP, EXIT_CMP and EXIT_BOUND are set to values describing
686
   how the exit from the unrolled loop should be controlled.  */
687
 
688
static void
689
determine_exit_conditions (struct loop *loop, struct tree_niter_desc *desc,
690
                           unsigned factor, tree *enter_cond,
691
                           tree *exit_base, tree *exit_step,
692
                           enum tree_code *exit_cmp, tree *exit_bound)
693
{
694
  gimple_seq stmts;
695
  tree base = desc->control.base;
696
  tree step = desc->control.step;
697
  tree bound = desc->bound;
698
  tree type = TREE_TYPE (step);
699
  tree bigstep, delta;
700
  tree min = lower_bound_in_type (type, type);
701
  tree max = upper_bound_in_type (type, type);
702
  enum tree_code cmp = desc->cmp;
703
  tree cond = boolean_true_node, assum;
704
 
705
  /* For pointers, do the arithmetics in the type of step (sizetype).  */
706
  base = fold_convert (type, base);
707
  bound = fold_convert (type, bound);
708
 
709
  *enter_cond = boolean_false_node;
710
  *exit_base = NULL_TREE;
711
  *exit_step = NULL_TREE;
712
  *exit_cmp = ERROR_MARK;
713
  *exit_bound = NULL_TREE;
714
  gcc_assert (cmp != ERROR_MARK);
715
 
716
  /* We only need to be correct when we answer question
717
     "Do at least FACTOR more iterations remain?" in the unrolled loop.
718
     Thus, transforming BASE + STEP * i <> BOUND to
719
     BASE + STEP * i < BOUND is ok.  */
720
  if (cmp == NE_EXPR)
721
    {
722
      if (tree_int_cst_sign_bit (step))
723
        cmp = GT_EXPR;
724
      else
725
        cmp = LT_EXPR;
726
    }
727
  else if (cmp == LT_EXPR)
728
    {
729
      gcc_assert (!tree_int_cst_sign_bit (step));
730
    }
731
  else if (cmp == GT_EXPR)
732
    {
733
      gcc_assert (tree_int_cst_sign_bit (step));
734
    }
735
  else
736
    gcc_unreachable ();
737
 
738
  /* The main body of the loop may be entered iff:
739
 
740
     1) desc->may_be_zero is false.
741
     2) it is possible to check that there are at least FACTOR iterations
742
        of the loop, i.e., BOUND - step * FACTOR does not overflow.
743
     3) # of iterations is at least FACTOR  */
744
 
745
  if (!integer_zerop (desc->may_be_zero))
746
    cond = fold_build2 (TRUTH_AND_EXPR, boolean_type_node,
747
                        invert_truthvalue (desc->may_be_zero),
748
                        cond);
749
 
750
  bigstep = fold_build2 (MULT_EXPR, type, step,
751
                         build_int_cst_type (type, factor));
752
  delta = fold_build2 (MINUS_EXPR, type, bigstep, step);
753
  if (cmp == LT_EXPR)
754
    assum = fold_build2 (GE_EXPR, boolean_type_node,
755
                         bound,
756
                         fold_build2 (PLUS_EXPR, type, min, delta));
757
  else
758
    assum = fold_build2 (LE_EXPR, boolean_type_node,
759
                         bound,
760
                         fold_build2 (PLUS_EXPR, type, max, delta));
761
  cond = fold_build2 (TRUTH_AND_EXPR, boolean_type_node, assum, cond);
762
 
763
  bound = fold_build2 (MINUS_EXPR, type, bound, delta);
764
  assum = fold_build2 (cmp, boolean_type_node, base, bound);
765
  cond = fold_build2 (TRUTH_AND_EXPR, boolean_type_node, assum, cond);
766
 
767
  cond = force_gimple_operand (unshare_expr (cond), &stmts, false, NULL_TREE);
768
  if (stmts)
769
    gsi_insert_seq_on_edge_immediate (loop_preheader_edge (loop), stmts);
770
  /* cond now may be a gimple comparison, which would be OK, but also any
771
     other gimple rhs (say a && b).  In this case we need to force it to
772
     operand.  */
773
  if (!is_gimple_condexpr (cond))
774
    {
775
      cond = force_gimple_operand (cond, &stmts, true, NULL_TREE);
776
      if (stmts)
777
        gsi_insert_seq_on_edge_immediate (loop_preheader_edge (loop), stmts);
778
    }
779
  *enter_cond = cond;
780
 
781
  base = force_gimple_operand (unshare_expr (base), &stmts, true, NULL_TREE);
782
  if (stmts)
783
    gsi_insert_seq_on_edge_immediate (loop_preheader_edge (loop), stmts);
784
  bound = force_gimple_operand (unshare_expr (bound), &stmts, true, NULL_TREE);
785
  if (stmts)
786
    gsi_insert_seq_on_edge_immediate (loop_preheader_edge (loop), stmts);
787
 
788
  *exit_base = base;
789
  *exit_step = bigstep;
790
  *exit_cmp = cmp;
791
  *exit_bound = bound;
792
}
793
 
794
/* Scales the frequencies of all basic blocks in LOOP that are strictly
795
   dominated by BB by NUM/DEN.  */
796
 
797
static void
798
scale_dominated_blocks_in_loop (struct loop *loop, basic_block bb,
799
                                int num, int den)
800
{
801
  basic_block son;
802
 
803
  if (den == 0)
804
    return;
805
 
806
  for (son = first_dom_son (CDI_DOMINATORS, bb);
807
       son;
808
       son = next_dom_son (CDI_DOMINATORS, son))
809
    {
810
      if (!flow_bb_inside_loop_p (loop, son))
811
        continue;
812
      scale_bbs_frequencies_int (&son, 1, num, den);
813
      scale_dominated_blocks_in_loop (loop, son, num, den);
814
    }
815
}
816
 
817
/* Unroll LOOP FACTOR times.  DESC describes number of iterations of LOOP.
818
   EXIT is the exit of the loop to that DESC corresponds.
819
 
820
   If N is number of iterations of the loop and MAY_BE_ZERO is the condition
821
   under that loop exits in the first iteration even if N != 0,
822
 
823
   while (1)
824
     {
825
       x = phi (init, next);
826
 
827
       pre;
828
       if (st)
829
         break;
830
       post;
831
     }
832
 
833
   becomes (with possibly the exit conditions formulated a bit differently,
834
   avoiding the need to create a new iv):
835
 
836
   if (MAY_BE_ZERO || N < FACTOR)
837
     goto rest;
838
 
839
   do
840
     {
841
       x = phi (init, next);
842
 
843
       pre;
844
       post;
845
       pre;
846
       post;
847
       ...
848
       pre;
849
       post;
850
       N -= FACTOR;
851
 
852
     } while (N >= FACTOR);
853
 
854
   rest:
855
     init' = phi (init, x);
856
 
857
   while (1)
858
     {
859
       x = phi (init', next);
860
 
861
       pre;
862
       if (st)
863
         break;
864
       post;
865
     }
866
 
867
   Before the loop is unrolled, TRANSFORM is called for it (only for the
868
   unrolled loop, but not for its versioned copy).  DATA is passed to
869
   TRANSFORM.  */
870
 
871
/* Probability in % that the unrolled loop is entered.  Just a guess.  */
872
#define PROB_UNROLLED_LOOP_ENTERED 90
873
 
874
void
875
tree_transform_and_unroll_loop (struct loop *loop, unsigned factor,
876
                                edge exit, struct tree_niter_desc *desc,
877
                                transform_callback transform,
878
                                void *data)
879
{
880
  gimple exit_if;
881
  tree ctr_before, ctr_after;
882
  tree enter_main_cond, exit_base, exit_step, exit_bound;
883
  enum tree_code exit_cmp;
884
  gimple phi_old_loop, phi_new_loop, phi_rest;
885
  gimple_stmt_iterator psi_old_loop, psi_new_loop;
886
  tree init, next, new_init, var;
887
  struct loop *new_loop;
888
  basic_block rest, exit_bb;
889
  edge old_entry, new_entry, old_latch, precond_edge, new_exit;
890
  edge new_nonexit, e;
891
  gimple_stmt_iterator bsi;
892
  use_operand_p op;
893
  bool ok;
894
  unsigned est_niter, prob_entry, scale_unrolled, scale_rest, freq_e, freq_h;
895
  unsigned new_est_niter, i, prob;
896
  unsigned irr = loop_preheader_edge (loop)->flags & EDGE_IRREDUCIBLE_LOOP;
897
  sbitmap wont_exit;
898
  VEC (edge, heap) *to_remove = NULL;
899
 
900
  est_niter = expected_loop_iterations (loop);
901
  determine_exit_conditions (loop, desc, factor,
902
                             &enter_main_cond, &exit_base, &exit_step,
903
                             &exit_cmp, &exit_bound);
904
 
905
  /* Let us assume that the unrolled loop is quite likely to be entered.  */
906
  if (integer_nonzerop (enter_main_cond))
907
    prob_entry = REG_BR_PROB_BASE;
908
  else
909
    prob_entry = PROB_UNROLLED_LOOP_ENTERED * REG_BR_PROB_BASE / 100;
910
 
911
  /* The values for scales should keep profile consistent, and somewhat close
912
     to correct.
913
 
914
     TODO: The current value of SCALE_REST makes it appear that the loop that
915
     is created by splitting the remaining iterations of the unrolled loop is
916
     executed the same number of times as the original loop, and with the same
917
     frequencies, which is obviously wrong.  This does not appear to cause
918
     problems, so we do not bother with fixing it for now.  To make the profile
919
     correct, we would need to change the probability of the exit edge of the
920
     loop, and recompute the distribution of frequencies in its body because
921
     of this change (scale the frequencies of blocks before and after the exit
922
     by appropriate factors).  */
923
  scale_unrolled = prob_entry;
924
  scale_rest = REG_BR_PROB_BASE;
925
 
926
  new_loop = loop_version (loop, enter_main_cond, NULL,
927
                           prob_entry, scale_unrolled, scale_rest, true);
928
  gcc_assert (new_loop != NULL);
929
  update_ssa (TODO_update_ssa);
930
 
931
  /* Determine the probability of the exit edge of the unrolled loop.  */
932
  new_est_niter = est_niter / factor;
933
 
934
  /* Without profile feedback, loops for that we do not know a better estimate
935
     are assumed to roll 10 times.  When we unroll such loop, it appears to
936
     roll too little, and it may even seem to be cold.  To avoid this, we
937
     ensure that the created loop appears to roll at least 5 times (but at
938
     most as many times as before unrolling).  */
939
  if (new_est_niter < 5)
940
    {
941
      if (est_niter < 5)
942
        new_est_niter = est_niter;
943
      else
944
        new_est_niter = 5;
945
    }
946
 
947
  /* Prepare the cfg and update the phi nodes.  Move the loop exit to the
948
     loop latch (and make its condition dummy, for the moment).  */
949
  rest = loop_preheader_edge (new_loop)->src;
950
  precond_edge = single_pred_edge (rest);
951
  split_edge (loop_latch_edge (loop));
952
  exit_bb = single_pred (loop->latch);
953
 
954
  /* Since the exit edge will be removed, the frequency of all the blocks
955
     in the loop that are dominated by it must be scaled by
956
     1 / (1 - exit->probability).  */
957
  scale_dominated_blocks_in_loop (loop, exit->src,
958
                                  REG_BR_PROB_BASE,
959
                                  REG_BR_PROB_BASE - exit->probability);
960
 
961
  bsi = gsi_last_bb (exit_bb);
962
  exit_if = gimple_build_cond (EQ_EXPR, integer_zero_node,
963
                               integer_zero_node,
964
                               NULL_TREE, NULL_TREE);
965
 
966
  gsi_insert_after (&bsi, exit_if, GSI_NEW_STMT);
967
  new_exit = make_edge (exit_bb, rest, EDGE_FALSE_VALUE | irr);
968
  rescan_loop_exit (new_exit, true, false);
969
 
970
  /* Set the probability of new exit to the same of the old one.  Fix
971
     the frequency of the latch block, by scaling it back by
972
     1 - exit->probability.  */
973
  new_exit->count = exit->count;
974
  new_exit->probability = exit->probability;
975
  new_nonexit = single_pred_edge (loop->latch);
976
  new_nonexit->probability = REG_BR_PROB_BASE - exit->probability;
977
  new_nonexit->flags = EDGE_TRUE_VALUE;
978
  new_nonexit->count -= exit->count;
979
  if (new_nonexit->count < 0)
980
    new_nonexit->count = 0;
981
  scale_bbs_frequencies_int (&loop->latch, 1, new_nonexit->probability,
982
                             REG_BR_PROB_BASE);
983
 
984
  old_entry = loop_preheader_edge (loop);
985
  new_entry = loop_preheader_edge (new_loop);
986
  old_latch = loop_latch_edge (loop);
987
  for (psi_old_loop = gsi_start_phis (loop->header),
988
       psi_new_loop = gsi_start_phis (new_loop->header);
989
       !gsi_end_p (psi_old_loop);
990
       gsi_next (&psi_old_loop), gsi_next (&psi_new_loop))
991
    {
992
      phi_old_loop = gsi_stmt (psi_old_loop);
993
      phi_new_loop = gsi_stmt (psi_new_loop);
994
 
995
      init = PHI_ARG_DEF_FROM_EDGE (phi_old_loop, old_entry);
996
      op = PHI_ARG_DEF_PTR_FROM_EDGE (phi_new_loop, new_entry);
997
      gcc_assert (operand_equal_for_phi_arg_p (init, USE_FROM_PTR (op)));
998
      next = PHI_ARG_DEF_FROM_EDGE (phi_old_loop, old_latch);
999
 
1000
      /* Prefer using original variable as a base for the new ssa name.
1001
         This is necessary for virtual ops, and useful in order to avoid
1002
         losing debug info for real ops.  */
1003
      if (TREE_CODE (next) == SSA_NAME
1004
          && useless_type_conversion_p (TREE_TYPE (next),
1005
                                        TREE_TYPE (init)))
1006
        var = SSA_NAME_VAR (next);
1007
      else if (TREE_CODE (init) == SSA_NAME
1008
               && useless_type_conversion_p (TREE_TYPE (init),
1009
                                             TREE_TYPE (next)))
1010
        var = SSA_NAME_VAR (init);
1011
      else if (useless_type_conversion_p (TREE_TYPE (next), TREE_TYPE (init)))
1012
        {
1013
          var = create_tmp_var (TREE_TYPE (next), "unrinittmp");
1014
          add_referenced_var (var);
1015
        }
1016
      else
1017
        {
1018
          var = create_tmp_var (TREE_TYPE (init), "unrinittmp");
1019
          add_referenced_var (var);
1020
        }
1021
 
1022
      new_init = make_ssa_name (var, NULL);
1023
      phi_rest = create_phi_node (new_init, rest);
1024
      SSA_NAME_DEF_STMT (new_init) = phi_rest;
1025
 
1026
      add_phi_arg (phi_rest, init, precond_edge, UNKNOWN_LOCATION);
1027
      add_phi_arg (phi_rest, next, new_exit, UNKNOWN_LOCATION);
1028
      SET_USE (op, new_init);
1029
    }
1030
 
1031
  remove_path (exit);
1032
 
1033
  /* Transform the loop.  */
1034
  if (transform)
1035
    (*transform) (loop, data);
1036
 
1037
  /* Unroll the loop and remove the exits in all iterations except for the
1038
     last one.  */
1039
  wont_exit = sbitmap_alloc (factor);
1040
  sbitmap_ones (wont_exit);
1041
  RESET_BIT (wont_exit, factor - 1);
1042
 
1043
  ok = gimple_duplicate_loop_to_header_edge
1044
          (loop, loop_latch_edge (loop), factor - 1,
1045
           wont_exit, new_exit, &to_remove, DLTHE_FLAG_UPDATE_FREQ);
1046
  free (wont_exit);
1047
  gcc_assert (ok);
1048
 
1049
  for (i = 0; VEC_iterate (edge, to_remove, i, e); i++)
1050
    {
1051
      ok = remove_path (e);
1052
      gcc_assert (ok);
1053
    }
1054
  VEC_free (edge, heap, to_remove);
1055
  update_ssa (TODO_update_ssa);
1056
 
1057
  /* Ensure that the frequencies in the loop match the new estimated
1058
     number of iterations, and change the probability of the new
1059
     exit edge.  */
1060
  freq_h = loop->header->frequency;
1061
  freq_e = EDGE_FREQUENCY (loop_preheader_edge (loop));
1062
  if (freq_h != 0)
1063
    scale_loop_frequencies (loop, freq_e * (new_est_niter + 1), freq_h);
1064
 
1065
  exit_bb = single_pred (loop->latch);
1066
  new_exit = find_edge (exit_bb, rest);
1067
  new_exit->count = loop_preheader_edge (loop)->count;
1068
  new_exit->probability = REG_BR_PROB_BASE / (new_est_niter + 1);
1069
 
1070
  rest->count += new_exit->count;
1071
  rest->frequency += EDGE_FREQUENCY (new_exit);
1072
 
1073
  new_nonexit = single_pred_edge (loop->latch);
1074
  prob = new_nonexit->probability;
1075
  new_nonexit->probability = REG_BR_PROB_BASE - new_exit->probability;
1076
  new_nonexit->count = exit_bb->count - new_exit->count;
1077
  if (new_nonexit->count < 0)
1078
    new_nonexit->count = 0;
1079
  if (prob > 0)
1080
    scale_bbs_frequencies_int (&loop->latch, 1, new_nonexit->probability,
1081
                               prob);
1082
 
1083
  /* Finally create the new counter for number of iterations and add the new
1084
     exit instruction.  */
1085
  bsi = gsi_last_bb (exit_bb);
1086
  exit_if = gsi_stmt (bsi);
1087
  create_iv (exit_base, exit_step, NULL_TREE, loop,
1088
             &bsi, false, &ctr_before, &ctr_after);
1089
  gimple_cond_set_code (exit_if, exit_cmp);
1090
  gimple_cond_set_lhs (exit_if, ctr_after);
1091
  gimple_cond_set_rhs (exit_if, exit_bound);
1092
  update_stmt (exit_if);
1093
 
1094
#ifdef ENABLE_CHECKING
1095
  verify_flow_info ();
1096
  verify_dominators (CDI_DOMINATORS);
1097
  verify_loop_structure ();
1098
  verify_loop_closed_ssa ();
1099
#endif
1100
}
1101
 
1102
/* Wrapper over tree_transform_and_unroll_loop for case we do not
1103
   want to transform the loop before unrolling.  The meaning
1104
   of the arguments is the same as for tree_transform_and_unroll_loop.  */
1105
 
1106
void
1107
tree_unroll_loop (struct loop *loop, unsigned factor,
1108
                  edge exit, struct tree_niter_desc *desc)
1109
{
1110
  tree_transform_and_unroll_loop (loop, factor, exit, desc,
1111
                                  NULL, NULL);
1112
}
1113
 
1114
/* Rewrite the phi node at position PSI in function of the main
1115
   induction variable MAIN_IV and insert the generated code at GSI.  */
1116
 
1117
static void
1118
rewrite_phi_with_iv (loop_p loop,
1119
                     gimple_stmt_iterator *psi,
1120
                     gimple_stmt_iterator *gsi,
1121
                     tree main_iv)
1122
{
1123
  affine_iv iv;
1124
  gimple stmt, phi = gsi_stmt (*psi);
1125
  tree atype, mtype, val, res = PHI_RESULT (phi);
1126
 
1127
  if (!is_gimple_reg (res) || res == main_iv)
1128
    {
1129
      gsi_next (psi);
1130
      return;
1131
    }
1132
 
1133
  if (!simple_iv (loop, loop, res, &iv, true))
1134
    {
1135
      gsi_next (psi);
1136
      return;
1137
    }
1138
 
1139
  remove_phi_node (psi, false);
1140
 
1141
  atype = TREE_TYPE (res);
1142
  mtype = POINTER_TYPE_P (atype) ? sizetype : atype;
1143
  val = fold_build2 (MULT_EXPR, mtype, unshare_expr (iv.step),
1144
                     fold_convert (mtype, main_iv));
1145
  val = fold_build2 (POINTER_TYPE_P (atype)
1146
                     ? POINTER_PLUS_EXPR : PLUS_EXPR,
1147
                     atype, unshare_expr (iv.base), val);
1148
  val = force_gimple_operand_gsi (gsi, val, false, NULL_TREE, true,
1149
                                  GSI_SAME_STMT);
1150
  stmt = gimple_build_assign (res, val);
1151
  gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1152
  SSA_NAME_DEF_STMT (res) = stmt;
1153
}
1154
 
1155
/* Rewrite all the phi nodes of LOOP in function of the main induction
1156
   variable MAIN_IV.  */
1157
 
1158
static void
1159
rewrite_all_phi_nodes_with_iv (loop_p loop, tree main_iv)
1160
{
1161
  unsigned i;
1162
  basic_block *bbs = get_loop_body_in_dom_order (loop);
1163
  gimple_stmt_iterator psi;
1164
 
1165
  for (i = 0; i < loop->num_nodes; i++)
1166
    {
1167
      basic_block bb = bbs[i];
1168
      gimple_stmt_iterator gsi = gsi_after_labels (bb);
1169
 
1170
      if (bb->loop_father != loop)
1171
        continue;
1172
 
1173
      for (psi = gsi_start_phis (bb); !gsi_end_p (psi); )
1174
        rewrite_phi_with_iv (loop, &psi, &gsi, main_iv);
1175
    }
1176
 
1177
  free (bbs);
1178
}
1179
 
1180
/* Bases all the induction variables in LOOP on a single induction
1181
   variable (unsigned with base 0 and step 1), whose final value is
1182
   compared with *NIT.  When the IV type precision has to be larger
1183
   than *NIT type precision, *NIT is converted to the larger type, the
1184
   conversion code is inserted before the loop, and *NIT is updated to
1185
   the new definition.  When BUMP_IN_LATCH is true, the induction
1186
   variable is incremented in the loop latch, otherwise it is
1187
   incremented in the loop header.  Return the induction variable that
1188
   was created.  */
1189
 
1190
tree
1191
canonicalize_loop_ivs (struct loop *loop, tree *nit, bool bump_in_latch)
1192
{
1193
  unsigned precision = TYPE_PRECISION (TREE_TYPE (*nit));
1194
  unsigned original_precision = precision;
1195
  tree type, var_before;
1196
  gimple_stmt_iterator gsi, psi;
1197
  gimple stmt;
1198
  edge exit = single_dom_exit (loop);
1199
  gimple_seq stmts;
1200
 
1201
  for (psi = gsi_start_phis (loop->header);
1202
       !gsi_end_p (psi); gsi_next (&psi))
1203
    {
1204
      gimple phi = gsi_stmt (psi);
1205
      tree res = PHI_RESULT (phi);
1206
 
1207
      if (is_gimple_reg (res) && TYPE_PRECISION (TREE_TYPE (res)) > precision)
1208
        precision = TYPE_PRECISION (TREE_TYPE (res));
1209
    }
1210
 
1211
  type = lang_hooks.types.type_for_size (precision, 1);
1212
 
1213
  if (original_precision != precision)
1214
    {
1215
      *nit = fold_convert (type, *nit);
1216
      *nit = force_gimple_operand (*nit, &stmts, true, NULL_TREE);
1217
      if (stmts)
1218
        gsi_insert_seq_on_edge_immediate (loop_preheader_edge (loop), stmts);
1219
    }
1220
 
1221
  gsi = gsi_last_bb (bump_in_latch ? loop->latch : loop->header);
1222
  create_iv (build_int_cst_type (type, 0), build_int_cst (type, 1), NULL_TREE,
1223
             loop, &gsi, bump_in_latch, &var_before, NULL);
1224
 
1225
  rewrite_all_phi_nodes_with_iv (loop, var_before);
1226
 
1227
  stmt = last_stmt (exit->src);
1228
  /* Make the loop exit if the control condition is not satisfied.  */
1229
  if (exit->flags & EDGE_TRUE_VALUE)
1230
    {
1231
      edge te, fe;
1232
 
1233
      extract_true_false_edges_from_block (exit->src, &te, &fe);
1234
      te->flags = EDGE_FALSE_VALUE;
1235
      fe->flags = EDGE_TRUE_VALUE;
1236
    }
1237
  gimple_cond_set_code (stmt, LT_EXPR);
1238
  gimple_cond_set_lhs (stmt, var_before);
1239
  gimple_cond_set_rhs (stmt, *nit);
1240
  update_stmt (stmt);
1241
 
1242
  return var_before;
1243
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.