OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-stable/] [gcc-4.5.1/] [gcc/] [loop-iv.c] - Blame information for rev 826

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 280 jeremybenn
/* Rtl-level induction variable analysis.
2
   Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009
3
   Free Software Foundation, Inc.
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify it
8
under the terms of the GNU General Public License as published by the
9
Free Software Foundation; either version 3, or (at your option) any
10
later version.
11
 
12
GCC is distributed in the hope that it will be useful, but WITHOUT
13
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15
for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING3.  If not see
19
<http://www.gnu.org/licenses/>.  */
20
 
21
/* This is a simple analysis of induction variables of the loop.  The major use
22
   is for determining the number of iterations of a loop for loop unrolling,
23
   doloop optimization and branch prediction.  The iv information is computed
24
   on demand.
25
 
26
   Induction variables are analyzed by walking the use-def chains.  When
27
   a basic induction variable (biv) is found, it is cached in the bivs
28
   hash table.  When register is proved to be a biv, its description
29
   is stored to DF_REF_DATA of the def reference.
30
 
31
   The analysis works always with one loop -- you must call
32
   iv_analysis_loop_init (loop) for it.  All the other functions then work with
33
   this loop.   When you need to work with another loop, just call
34
   iv_analysis_loop_init for it.  When you no longer need iv analysis, call
35
   iv_analysis_done () to clean up the memory.
36
 
37
   The available functions are:
38
 
39
   iv_analyze (insn, reg, iv): Stores the description of the induction variable
40
     corresponding to the use of register REG in INSN to IV.  Returns true if
41
     REG is an induction variable in INSN. false otherwise.
42
     If use of REG is not found in INSN, following insns are scanned (so that
43
     we may call this function on insn returned by get_condition).
44
   iv_analyze_result (insn, def, iv):  Stores to IV the description of the iv
45
     corresponding to DEF, which is a register defined in INSN.
46
   iv_analyze_expr (insn, rhs, mode, iv):  Stores to IV the description of iv
47
     corresponding to expression EXPR evaluated at INSN.  All registers used bu
48
     EXPR must also be used in INSN.
49
*/
50
 
51
#include "config.h"
52
#include "system.h"
53
#include "coretypes.h"
54
#include "tm.h"
55
#include "rtl.h"
56
#include "hard-reg-set.h"
57
#include "obstack.h"
58
#include "basic-block.h"
59
#include "cfgloop.h"
60
#include "expr.h"
61
#include "intl.h"
62
#include "output.h"
63
#include "toplev.h"
64
#include "df.h"
65
#include "hashtab.h"
66
 
67
/* Possible return values of iv_get_reaching_def.  */
68
 
69
enum iv_grd_result
70
{
71
  /* More than one reaching def, or reaching def that does not
72
     dominate the use.  */
73
  GRD_INVALID,
74
 
75
  /* The use is trivial invariant of the loop, i.e. is not changed
76
     inside the loop.  */
77
  GRD_INVARIANT,
78
 
79
  /* The use is reached by initial value and a value from the
80
     previous iteration.  */
81
  GRD_MAYBE_BIV,
82
 
83
  /* The use has single dominating def.  */
84
  GRD_SINGLE_DOM
85
};
86
 
87
/* Information about a biv.  */
88
 
89
struct biv_entry
90
{
91
  unsigned regno;       /* The register of the biv.  */
92
  struct rtx_iv iv;     /* Value of the biv.  */
93
};
94
 
95
static bool clean_slate = true;
96
 
97
static unsigned int iv_ref_table_size = 0;
98
 
99
/* Table of rtx_ivs indexed by the df_ref uid field.  */
100
static struct rtx_iv ** iv_ref_table;
101
 
102
/* Induction variable stored at the reference.  */
103
#define DF_REF_IV(REF) iv_ref_table[DF_REF_ID(REF)]
104
#define DF_REF_IV_SET(REF, IV) iv_ref_table[DF_REF_ID(REF)] = (IV)
105
 
106
/* The current loop.  */
107
 
108
static struct loop *current_loop;
109
 
110
/* Bivs of the current loop.  */
111
 
112
static htab_t bivs;
113
 
114
static bool iv_analyze_op (rtx, rtx, struct rtx_iv *);
115
 
116
/* Dumps information about IV to FILE.  */
117
 
118
extern void dump_iv_info (FILE *, struct rtx_iv *);
119
void
120
dump_iv_info (FILE *file, struct rtx_iv *iv)
121
{
122
  if (!iv->base)
123
    {
124
      fprintf (file, "not simple");
125
      return;
126
    }
127
 
128
  if (iv->step == const0_rtx
129
      && !iv->first_special)
130
    fprintf (file, "invariant ");
131
 
132
  print_rtl (file, iv->base);
133
  if (iv->step != const0_rtx)
134
    {
135
      fprintf (file, " + ");
136
      print_rtl (file, iv->step);
137
      fprintf (file, " * iteration");
138
    }
139
  fprintf (file, " (in %s)", GET_MODE_NAME (iv->mode));
140
 
141
  if (iv->mode != iv->extend_mode)
142
    fprintf (file, " %s to %s",
143
             rtx_name[iv->extend],
144
             GET_MODE_NAME (iv->extend_mode));
145
 
146
  if (iv->mult != const1_rtx)
147
    {
148
      fprintf (file, " * ");
149
      print_rtl (file, iv->mult);
150
    }
151
  if (iv->delta != const0_rtx)
152
    {
153
      fprintf (file, " + ");
154
      print_rtl (file, iv->delta);
155
    }
156
  if (iv->first_special)
157
    fprintf (file, " (first special)");
158
}
159
 
160
/* Generates a subreg to get the least significant part of EXPR (in mode
161
   INNER_MODE) to OUTER_MODE.  */
162
 
163
rtx
164
lowpart_subreg (enum machine_mode outer_mode, rtx expr,
165
                enum machine_mode inner_mode)
166
{
167
  return simplify_gen_subreg (outer_mode, expr, inner_mode,
168
                              subreg_lowpart_offset (outer_mode, inner_mode));
169
}
170
 
171
static void
172
check_iv_ref_table_size (void)
173
{
174
  if (iv_ref_table_size < DF_DEFS_TABLE_SIZE())
175
    {
176
      unsigned int new_size = DF_DEFS_TABLE_SIZE () + (DF_DEFS_TABLE_SIZE () / 4);
177
      iv_ref_table = XRESIZEVEC (struct rtx_iv *, iv_ref_table, new_size);
178
      memset (&iv_ref_table[iv_ref_table_size], 0,
179
              (new_size - iv_ref_table_size) * sizeof (struct rtx_iv *));
180
      iv_ref_table_size = new_size;
181
    }
182
}
183
 
184
 
185
/* Checks whether REG is a well-behaved register.  */
186
 
187
static bool
188
simple_reg_p (rtx reg)
189
{
190
  unsigned r;
191
 
192
  if (GET_CODE (reg) == SUBREG)
193
    {
194
      if (!subreg_lowpart_p (reg))
195
        return false;
196
      reg = SUBREG_REG (reg);
197
    }
198
 
199
  if (!REG_P (reg))
200
    return false;
201
 
202
  r = REGNO (reg);
203
  if (HARD_REGISTER_NUM_P (r))
204
    return false;
205
 
206
  if (GET_MODE_CLASS (GET_MODE (reg)) != MODE_INT)
207
    return false;
208
 
209
  return true;
210
}
211
 
212
/* Clears the information about ivs stored in df.  */
213
 
214
static void
215
clear_iv_info (void)
216
{
217
  unsigned i, n_defs = DF_DEFS_TABLE_SIZE ();
218
  struct rtx_iv *iv;
219
 
220
  check_iv_ref_table_size ();
221
  for (i = 0; i < n_defs; i++)
222
    {
223
      iv = iv_ref_table[i];
224
      if (iv)
225
        {
226
          free (iv);
227
          iv_ref_table[i] = NULL;
228
        }
229
    }
230
 
231
  htab_empty (bivs);
232
}
233
 
234
/* Returns hash value for biv B.  */
235
 
236
static hashval_t
237
biv_hash (const void *b)
238
{
239
  return ((const struct biv_entry *) b)->regno;
240
}
241
 
242
/* Compares biv B and register R.  */
243
 
244
static int
245
biv_eq (const void *b, const void *r)
246
{
247
  return ((const struct biv_entry *) b)->regno == REGNO ((const_rtx) r);
248
}
249
 
250
/* Prepare the data for an induction variable analysis of a LOOP.  */
251
 
252
void
253
iv_analysis_loop_init (struct loop *loop)
254
{
255
  basic_block *body = get_loop_body_in_dom_order (loop), bb;
256
  bitmap blocks = BITMAP_ALLOC (NULL);
257
  unsigned i;
258
 
259
  current_loop = loop;
260
 
261
  /* Clear the information from the analysis of the previous loop.  */
262
  if (clean_slate)
263
    {
264
      df_set_flags (DF_EQ_NOTES + DF_DEFER_INSN_RESCAN);
265
      bivs = htab_create (10, biv_hash, biv_eq, free);
266
      clean_slate = false;
267
    }
268
  else
269
    clear_iv_info ();
270
 
271
  for (i = 0; i < loop->num_nodes; i++)
272
    {
273
      bb = body[i];
274
      bitmap_set_bit (blocks, bb->index);
275
    }
276
  /* Get rid of the ud chains before processing the rescans.  Then add
277
     the problem back.  */
278
  df_remove_problem (df_chain);
279
  df_process_deferred_rescans ();
280
  df_chain_add_problem (DF_UD_CHAIN);
281
  df_note_add_problem ();
282
  df_set_blocks (blocks);
283
  df_analyze ();
284
  if (dump_file)
285
    df_dump_region (dump_file);
286
 
287
  check_iv_ref_table_size ();
288
  BITMAP_FREE (blocks);
289
  free (body);
290
}
291
 
292
/* Finds the definition of REG that dominates loop latch and stores
293
   it to DEF.  Returns false if there is not a single definition
294
   dominating the latch.  If REG has no definition in loop, DEF
295
   is set to NULL and true is returned.  */
296
 
297
static bool
298
latch_dominating_def (rtx reg, df_ref *def)
299
{
300
  df_ref single_rd = NULL, adef;
301
  unsigned regno = REGNO (reg);
302
  struct df_rd_bb_info *bb_info = DF_RD_BB_INFO (current_loop->latch);
303
 
304
  for (adef = DF_REG_DEF_CHAIN (regno); adef; adef = DF_REF_NEXT_REG (adef))
305
    {
306
      if (!bitmap_bit_p (df->blocks_to_analyze, DF_REF_BBNO (adef))
307
          || !bitmap_bit_p (bb_info->out, DF_REF_ID (adef)))
308
        continue;
309
 
310
      /* More than one reaching definition.  */
311
      if (single_rd)
312
        return false;
313
 
314
      if (!just_once_each_iteration_p (current_loop, DF_REF_BB (adef)))
315
        return false;
316
 
317
      single_rd = adef;
318
    }
319
 
320
  *def = single_rd;
321
  return true;
322
}
323
 
324
/* Gets definition of REG reaching its use in INSN and stores it to DEF.  */
325
 
326
static enum iv_grd_result
327
iv_get_reaching_def (rtx insn, rtx reg, df_ref *def)
328
{
329
  df_ref use, adef;
330
  basic_block def_bb, use_bb;
331
  rtx def_insn;
332
  bool dom_p;
333
 
334
  *def = NULL;
335
  if (!simple_reg_p (reg))
336
    return GRD_INVALID;
337
  if (GET_CODE (reg) == SUBREG)
338
    reg = SUBREG_REG (reg);
339
  gcc_assert (REG_P (reg));
340
 
341
  use = df_find_use (insn, reg);
342
  gcc_assert (use != NULL);
343
 
344
  if (!DF_REF_CHAIN (use))
345
    return GRD_INVARIANT;
346
 
347
  /* More than one reaching def.  */
348
  if (DF_REF_CHAIN (use)->next)
349
    return GRD_INVALID;
350
 
351
  adef = DF_REF_CHAIN (use)->ref;
352
 
353
  /* We do not handle setting only part of the register.  */
354
  if (DF_REF_FLAGS (adef) & DF_REF_READ_WRITE)
355
    return GRD_INVALID;
356
 
357
  def_insn = DF_REF_INSN (adef);
358
  def_bb = DF_REF_BB (adef);
359
  use_bb = BLOCK_FOR_INSN (insn);
360
 
361
  if (use_bb == def_bb)
362
    dom_p = (DF_INSN_LUID (def_insn) < DF_INSN_LUID (insn));
363
  else
364
    dom_p = dominated_by_p (CDI_DOMINATORS, use_bb, def_bb);
365
 
366
  if (dom_p)
367
    {
368
      *def = adef;
369
      return GRD_SINGLE_DOM;
370
    }
371
 
372
  /* The definition does not dominate the use.  This is still OK if
373
     this may be a use of a biv, i.e. if the def_bb dominates loop
374
     latch.  */
375
  if (just_once_each_iteration_p (current_loop, def_bb))
376
    return GRD_MAYBE_BIV;
377
 
378
  return GRD_INVALID;
379
}
380
 
381
/* Sets IV to invariant CST in MODE.  Always returns true (just for
382
   consistency with other iv manipulation functions that may fail).  */
383
 
384
static bool
385
iv_constant (struct rtx_iv *iv, rtx cst, enum machine_mode mode)
386
{
387
  if (mode == VOIDmode)
388
    mode = GET_MODE (cst);
389
 
390
  iv->mode = mode;
391
  iv->base = cst;
392
  iv->step = const0_rtx;
393
  iv->first_special = false;
394
  iv->extend = UNKNOWN;
395
  iv->extend_mode = iv->mode;
396
  iv->delta = const0_rtx;
397
  iv->mult = const1_rtx;
398
 
399
  return true;
400
}
401
 
402
/* Evaluates application of subreg to MODE on IV.  */
403
 
404
static bool
405
iv_subreg (struct rtx_iv *iv, enum machine_mode mode)
406
{
407
  /* If iv is invariant, just calculate the new value.  */
408
  if (iv->step == const0_rtx
409
      && !iv->first_special)
410
    {
411
      rtx val = get_iv_value (iv, const0_rtx);
412
      val = lowpart_subreg (mode, val, iv->extend_mode);
413
 
414
      iv->base = val;
415
      iv->extend = UNKNOWN;
416
      iv->mode = iv->extend_mode = mode;
417
      iv->delta = const0_rtx;
418
      iv->mult = const1_rtx;
419
      return true;
420
    }
421
 
422
  if (iv->extend_mode == mode)
423
    return true;
424
 
425
  if (GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (iv->mode))
426
    return false;
427
 
428
  iv->extend = UNKNOWN;
429
  iv->mode = mode;
430
 
431
  iv->base = simplify_gen_binary (PLUS, iv->extend_mode, iv->delta,
432
                                  simplify_gen_binary (MULT, iv->extend_mode,
433
                                                       iv->base, iv->mult));
434
  iv->step = simplify_gen_binary (MULT, iv->extend_mode, iv->step, iv->mult);
435
  iv->mult = const1_rtx;
436
  iv->delta = const0_rtx;
437
  iv->first_special = false;
438
 
439
  return true;
440
}
441
 
442
/* Evaluates application of EXTEND to MODE on IV.  */
443
 
444
static bool
445
iv_extend (struct rtx_iv *iv, enum rtx_code extend, enum machine_mode mode)
446
{
447
  /* If iv is invariant, just calculate the new value.  */
448
  if (iv->step == const0_rtx
449
      && !iv->first_special)
450
    {
451
      rtx val = get_iv_value (iv, const0_rtx);
452
      val = simplify_gen_unary (extend, mode, val, iv->extend_mode);
453
 
454
      iv->base = val;
455
      iv->extend = UNKNOWN;
456
      iv->mode = iv->extend_mode = mode;
457
      iv->delta = const0_rtx;
458
      iv->mult = const1_rtx;
459
      return true;
460
    }
461
 
462
  if (mode != iv->extend_mode)
463
    return false;
464
 
465
  if (iv->extend != UNKNOWN
466
      && iv->extend != extend)
467
    return false;
468
 
469
  iv->extend = extend;
470
 
471
  return true;
472
}
473
 
474
/* Evaluates negation of IV.  */
475
 
476
static bool
477
iv_neg (struct rtx_iv *iv)
478
{
479
  if (iv->extend == UNKNOWN)
480
    {
481
      iv->base = simplify_gen_unary (NEG, iv->extend_mode,
482
                                     iv->base, iv->extend_mode);
483
      iv->step = simplify_gen_unary (NEG, iv->extend_mode,
484
                                     iv->step, iv->extend_mode);
485
    }
486
  else
487
    {
488
      iv->delta = simplify_gen_unary (NEG, iv->extend_mode,
489
                                      iv->delta, iv->extend_mode);
490
      iv->mult = simplify_gen_unary (NEG, iv->extend_mode,
491
                                     iv->mult, iv->extend_mode);
492
    }
493
 
494
  return true;
495
}
496
 
497
/* Evaluates addition or subtraction (according to OP) of IV1 to IV0.  */
498
 
499
static bool
500
iv_add (struct rtx_iv *iv0, struct rtx_iv *iv1, enum rtx_code op)
501
{
502
  enum machine_mode mode;
503
  rtx arg;
504
 
505
  /* Extend the constant to extend_mode of the other operand if necessary.  */
506
  if (iv0->extend == UNKNOWN
507
      && iv0->mode == iv0->extend_mode
508
      && iv0->step == const0_rtx
509
      && GET_MODE_SIZE (iv0->extend_mode) < GET_MODE_SIZE (iv1->extend_mode))
510
    {
511
      iv0->extend_mode = iv1->extend_mode;
512
      iv0->base = simplify_gen_unary (ZERO_EXTEND, iv0->extend_mode,
513
                                      iv0->base, iv0->mode);
514
    }
515
  if (iv1->extend == UNKNOWN
516
      && iv1->mode == iv1->extend_mode
517
      && iv1->step == const0_rtx
518
      && GET_MODE_SIZE (iv1->extend_mode) < GET_MODE_SIZE (iv0->extend_mode))
519
    {
520
      iv1->extend_mode = iv0->extend_mode;
521
      iv1->base = simplify_gen_unary (ZERO_EXTEND, iv1->extend_mode,
522
                                      iv1->base, iv1->mode);
523
    }
524
 
525
  mode = iv0->extend_mode;
526
  if (mode != iv1->extend_mode)
527
    return false;
528
 
529
  if (iv0->extend == UNKNOWN && iv1->extend == UNKNOWN)
530
    {
531
      if (iv0->mode != iv1->mode)
532
        return false;
533
 
534
      iv0->base = simplify_gen_binary (op, mode, iv0->base, iv1->base);
535
      iv0->step = simplify_gen_binary (op, mode, iv0->step, iv1->step);
536
 
537
      return true;
538
    }
539
 
540
  /* Handle addition of constant.  */
541
  if (iv1->extend == UNKNOWN
542
      && iv1->mode == mode
543
      && iv1->step == const0_rtx)
544
    {
545
      iv0->delta = simplify_gen_binary (op, mode, iv0->delta, iv1->base);
546
      return true;
547
    }
548
 
549
  if (iv0->extend == UNKNOWN
550
      && iv0->mode == mode
551
      && iv0->step == const0_rtx)
552
    {
553
      arg = iv0->base;
554
      *iv0 = *iv1;
555
      if (op == MINUS
556
          && !iv_neg (iv0))
557
        return false;
558
 
559
      iv0->delta = simplify_gen_binary (PLUS, mode, iv0->delta, arg);
560
      return true;
561
    }
562
 
563
  return false;
564
}
565
 
566
/* Evaluates multiplication of IV by constant CST.  */
567
 
568
static bool
569
iv_mult (struct rtx_iv *iv, rtx mby)
570
{
571
  enum machine_mode mode = iv->extend_mode;
572
 
573
  if (GET_MODE (mby) != VOIDmode
574
      && GET_MODE (mby) != mode)
575
    return false;
576
 
577
  if (iv->extend == UNKNOWN)
578
    {
579
      iv->base = simplify_gen_binary (MULT, mode, iv->base, mby);
580
      iv->step = simplify_gen_binary (MULT, mode, iv->step, mby);
581
    }
582
  else
583
    {
584
      iv->delta = simplify_gen_binary (MULT, mode, iv->delta, mby);
585
      iv->mult = simplify_gen_binary (MULT, mode, iv->mult, mby);
586
    }
587
 
588
  return true;
589
}
590
 
591
/* Evaluates shift of IV by constant CST.  */
592
 
593
static bool
594
iv_shift (struct rtx_iv *iv, rtx mby)
595
{
596
  enum machine_mode mode = iv->extend_mode;
597
 
598
  if (GET_MODE (mby) != VOIDmode
599
      && GET_MODE (mby) != mode)
600
    return false;
601
 
602
  if (iv->extend == UNKNOWN)
603
    {
604
      iv->base = simplify_gen_binary (ASHIFT, mode, iv->base, mby);
605
      iv->step = simplify_gen_binary (ASHIFT, mode, iv->step, mby);
606
    }
607
  else
608
    {
609
      iv->delta = simplify_gen_binary (ASHIFT, mode, iv->delta, mby);
610
      iv->mult = simplify_gen_binary (ASHIFT, mode, iv->mult, mby);
611
    }
612
 
613
  return true;
614
}
615
 
616
/* The recursive part of get_biv_step.  Gets the value of the single value
617
   defined by DEF wrto initial value of REG inside loop, in shape described
618
   at get_biv_step.  */
619
 
620
static bool
621
get_biv_step_1 (df_ref def, rtx reg,
622
                rtx *inner_step, enum machine_mode *inner_mode,
623
                enum rtx_code *extend, enum machine_mode outer_mode,
624
                rtx *outer_step)
625
{
626
  rtx set, rhs, op0 = NULL_RTX, op1 = NULL_RTX;
627
  rtx next, nextr, tmp;
628
  enum rtx_code code;
629
  rtx insn = DF_REF_INSN (def);
630
  df_ref next_def;
631
  enum iv_grd_result res;
632
 
633
  set = single_set (insn);
634
  if (!set)
635
    return false;
636
 
637
  rhs = find_reg_equal_equiv_note (insn);
638
  if (rhs)
639
    rhs = XEXP (rhs, 0);
640
  else
641
    rhs = SET_SRC (set);
642
 
643
  code = GET_CODE (rhs);
644
  switch (code)
645
    {
646
    case SUBREG:
647
    case REG:
648
      next = rhs;
649
      break;
650
 
651
    case PLUS:
652
    case MINUS:
653
      op0 = XEXP (rhs, 0);
654
      op1 = XEXP (rhs, 1);
655
 
656
      if (code == PLUS && CONSTANT_P (op0))
657
        {
658
          tmp = op0; op0 = op1; op1 = tmp;
659
        }
660
 
661
      if (!simple_reg_p (op0)
662
          || !CONSTANT_P (op1))
663
        return false;
664
 
665
      if (GET_MODE (rhs) != outer_mode)
666
        {
667
          /* ppc64 uses expressions like
668
 
669
             (set x:SI (plus:SI (subreg:SI y:DI) 1)).
670
 
671
             this is equivalent to
672
 
673
             (set x':DI (plus:DI y:DI 1))
674
             (set x:SI (subreg:SI (x':DI)).  */
675
          if (GET_CODE (op0) != SUBREG)
676
            return false;
677
          if (GET_MODE (SUBREG_REG (op0)) != outer_mode)
678
            return false;
679
        }
680
 
681
      next = op0;
682
      break;
683
 
684
    case SIGN_EXTEND:
685
    case ZERO_EXTEND:
686
      if (GET_MODE (rhs) != outer_mode)
687
        return false;
688
 
689
      op0 = XEXP (rhs, 0);
690
      if (!simple_reg_p (op0))
691
        return false;
692
 
693
      next = op0;
694
      break;
695
 
696
    default:
697
      return false;
698
    }
699
 
700
  if (GET_CODE (next) == SUBREG)
701
    {
702
      if (!subreg_lowpart_p (next))
703
        return false;
704
 
705
      nextr = SUBREG_REG (next);
706
      if (GET_MODE (nextr) != outer_mode)
707
        return false;
708
    }
709
  else
710
    nextr = next;
711
 
712
  res = iv_get_reaching_def (insn, nextr, &next_def);
713
 
714
  if (res == GRD_INVALID || res == GRD_INVARIANT)
715
    return false;
716
 
717
  if (res == GRD_MAYBE_BIV)
718
    {
719
      if (!rtx_equal_p (nextr, reg))
720
        return false;
721
 
722
      *inner_step = const0_rtx;
723
      *extend = UNKNOWN;
724
      *inner_mode = outer_mode;
725
      *outer_step = const0_rtx;
726
    }
727
  else if (!get_biv_step_1 (next_def, reg,
728
                            inner_step, inner_mode, extend, outer_mode,
729
                            outer_step))
730
    return false;
731
 
732
  if (GET_CODE (next) == SUBREG)
733
    {
734
      enum machine_mode amode = GET_MODE (next);
735
 
736
      if (GET_MODE_SIZE (amode) > GET_MODE_SIZE (*inner_mode))
737
        return false;
738
 
739
      *inner_mode = amode;
740
      *inner_step = simplify_gen_binary (PLUS, outer_mode,
741
                                         *inner_step, *outer_step);
742
      *outer_step = const0_rtx;
743
      *extend = UNKNOWN;
744
    }
745
 
746
  switch (code)
747
    {
748
    case REG:
749
    case SUBREG:
750
      break;
751
 
752
    case PLUS:
753
    case MINUS:
754
      if (*inner_mode == outer_mode
755
          /* See comment in previous switch.  */
756
          || GET_MODE (rhs) != outer_mode)
757
        *inner_step = simplify_gen_binary (code, outer_mode,
758
                                           *inner_step, op1);
759
      else
760
        *outer_step = simplify_gen_binary (code, outer_mode,
761
                                           *outer_step, op1);
762
      break;
763
 
764
    case SIGN_EXTEND:
765
    case ZERO_EXTEND:
766
      gcc_assert (GET_MODE (op0) == *inner_mode
767
                  && *extend == UNKNOWN
768
                  && *outer_step == const0_rtx);
769
 
770
      *extend = code;
771
      break;
772
 
773
    default:
774
      return false;
775
    }
776
 
777
  return true;
778
}
779
 
780
/* Gets the operation on register REG inside loop, in shape
781
 
782
   OUTER_STEP + EXTEND_{OUTER_MODE} (SUBREG_{INNER_MODE} (REG + INNER_STEP))
783
 
784
   If the operation cannot be described in this shape, return false.
785
   LAST_DEF is the definition of REG that dominates loop latch.  */
786
 
787
static bool
788
get_biv_step (df_ref last_def, rtx reg, rtx *inner_step,
789
              enum machine_mode *inner_mode, enum rtx_code *extend,
790
              enum machine_mode *outer_mode, rtx *outer_step)
791
{
792
  *outer_mode = GET_MODE (reg);
793
 
794
  if (!get_biv_step_1 (last_def, reg,
795
                       inner_step, inner_mode, extend, *outer_mode,
796
                       outer_step))
797
    return false;
798
 
799
  gcc_assert ((*inner_mode == *outer_mode) != (*extend != UNKNOWN));
800
  gcc_assert (*inner_mode != *outer_mode || *outer_step == const0_rtx);
801
 
802
  return true;
803
}
804
 
805
/* Records information that DEF is induction variable IV.  */
806
 
807
static void
808
record_iv (df_ref def, struct rtx_iv *iv)
809
{
810
  struct rtx_iv *recorded_iv = XNEW (struct rtx_iv);
811
 
812
  *recorded_iv = *iv;
813
  check_iv_ref_table_size ();
814
  DF_REF_IV_SET (def, recorded_iv);
815
}
816
 
817
/* If DEF was already analyzed for bivness, store the description of the biv to
818
   IV and return true.  Otherwise return false.  */
819
 
820
static bool
821
analyzed_for_bivness_p (rtx def, struct rtx_iv *iv)
822
{
823
  struct biv_entry *biv =
824
    (struct biv_entry *) htab_find_with_hash (bivs, def, REGNO (def));
825
 
826
  if (!biv)
827
    return false;
828
 
829
  *iv = biv->iv;
830
  return true;
831
}
832
 
833
static void
834
record_biv (rtx def, struct rtx_iv *iv)
835
{
836
  struct biv_entry *biv = XNEW (struct biv_entry);
837
  void **slot = htab_find_slot_with_hash (bivs, def, REGNO (def), INSERT);
838
 
839
  biv->regno = REGNO (def);
840
  biv->iv = *iv;
841
  gcc_assert (!*slot);
842
  *slot = biv;
843
}
844
 
845
/* Determines whether DEF is a biv and if so, stores its description
846
   to *IV.  */
847
 
848
static bool
849
iv_analyze_biv (rtx def, struct rtx_iv *iv)
850
{
851
  rtx inner_step, outer_step;
852
  enum machine_mode inner_mode, outer_mode;
853
  enum rtx_code extend;
854
  df_ref last_def;
855
 
856
  if (dump_file)
857
    {
858
      fprintf (dump_file, "Analyzing ");
859
      print_rtl (dump_file, def);
860
      fprintf (dump_file, " for bivness.\n");
861
    }
862
 
863
  if (!REG_P (def))
864
    {
865
      if (!CONSTANT_P (def))
866
        return false;
867
 
868
      return iv_constant (iv, def, VOIDmode);
869
    }
870
 
871
  if (!latch_dominating_def (def, &last_def))
872
    {
873
      if (dump_file)
874
        fprintf (dump_file, "  not simple.\n");
875
      return false;
876
    }
877
 
878
  if (!last_def)
879
    return iv_constant (iv, def, VOIDmode);
880
 
881
  if (analyzed_for_bivness_p (def, iv))
882
    {
883
      if (dump_file)
884
        fprintf (dump_file, "  already analysed.\n");
885
      return iv->base != NULL_RTX;
886
    }
887
 
888
  if (!get_biv_step (last_def, def, &inner_step, &inner_mode, &extend,
889
                     &outer_mode, &outer_step))
890
    {
891
      iv->base = NULL_RTX;
892
      goto end;
893
    }
894
 
895
  /* Loop transforms base to es (base + inner_step) + outer_step,
896
     where es means extend of subreg between inner_mode and outer_mode.
897
     The corresponding induction variable is
898
 
899
     es ((base - outer_step) + i * (inner_step + outer_step)) + outer_step  */
900
 
901
  iv->base = simplify_gen_binary (MINUS, outer_mode, def, outer_step);
902
  iv->step = simplify_gen_binary (PLUS, outer_mode, inner_step, outer_step);
903
  iv->mode = inner_mode;
904
  iv->extend_mode = outer_mode;
905
  iv->extend = extend;
906
  iv->mult = const1_rtx;
907
  iv->delta = outer_step;
908
  iv->first_special = inner_mode != outer_mode;
909
 
910
 end:
911
  if (dump_file)
912
    {
913
      fprintf (dump_file, "  ");
914
      dump_iv_info (dump_file, iv);
915
      fprintf (dump_file, "\n");
916
    }
917
 
918
  record_biv (def, iv);
919
  return iv->base != NULL_RTX;
920
}
921
 
922
/* Analyzes expression RHS used at INSN and stores the result to *IV.
923
   The mode of the induction variable is MODE.  */
924
 
925
bool
926
iv_analyze_expr (rtx insn, rtx rhs, enum machine_mode mode, struct rtx_iv *iv)
927
{
928
  rtx mby = NULL_RTX, tmp;
929
  rtx op0 = NULL_RTX, op1 = NULL_RTX;
930
  struct rtx_iv iv0, iv1;
931
  enum rtx_code code = GET_CODE (rhs);
932
  enum machine_mode omode = mode;
933
 
934
  iv->mode = VOIDmode;
935
  iv->base = NULL_RTX;
936
  iv->step = NULL_RTX;
937
 
938
  gcc_assert (GET_MODE (rhs) == mode || GET_MODE (rhs) == VOIDmode);
939
 
940
  if (CONSTANT_P (rhs)
941
      || REG_P (rhs)
942
      || code == SUBREG)
943
    {
944
      if (!iv_analyze_op (insn, rhs, iv))
945
        return false;
946
 
947
      if (iv->mode == VOIDmode)
948
        {
949
          iv->mode = mode;
950
          iv->extend_mode = mode;
951
        }
952
 
953
      return true;
954
    }
955
 
956
  switch (code)
957
    {
958
    case REG:
959
      op0 = rhs;
960
      break;
961
 
962
    case SIGN_EXTEND:
963
    case ZERO_EXTEND:
964
    case NEG:
965
      op0 = XEXP (rhs, 0);
966
      omode = GET_MODE (op0);
967
      break;
968
 
969
    case PLUS:
970
    case MINUS:
971
      op0 = XEXP (rhs, 0);
972
      op1 = XEXP (rhs, 1);
973
      break;
974
 
975
    case MULT:
976
      op0 = XEXP (rhs, 0);
977
      mby = XEXP (rhs, 1);
978
      if (!CONSTANT_P (mby))
979
        {
980
          tmp = op0;
981
          op0 = mby;
982
          mby = tmp;
983
        }
984
      if (!CONSTANT_P (mby))
985
        return false;
986
      break;
987
 
988
    case ASHIFT:
989
      op0 = XEXP (rhs, 0);
990
      mby = XEXP (rhs, 1);
991
      if (!CONSTANT_P (mby))
992
        return false;
993
      break;
994
 
995
    default:
996
      return false;
997
    }
998
 
999
  if (op0
1000
      && !iv_analyze_expr (insn, op0, omode, &iv0))
1001
    return false;
1002
 
1003
  if (op1
1004
      && !iv_analyze_expr (insn, op1, omode, &iv1))
1005
    return false;
1006
 
1007
  switch (code)
1008
    {
1009
    case SIGN_EXTEND:
1010
    case ZERO_EXTEND:
1011
      if (!iv_extend (&iv0, code, mode))
1012
        return false;
1013
      break;
1014
 
1015
    case NEG:
1016
      if (!iv_neg (&iv0))
1017
        return false;
1018
      break;
1019
 
1020
    case PLUS:
1021
    case MINUS:
1022
      if (!iv_add (&iv0, &iv1, code))
1023
        return false;
1024
      break;
1025
 
1026
    case MULT:
1027
      if (!iv_mult (&iv0, mby))
1028
        return false;
1029
      break;
1030
 
1031
    case ASHIFT:
1032
      if (!iv_shift (&iv0, mby))
1033
        return false;
1034
      break;
1035
 
1036
    default:
1037
      break;
1038
    }
1039
 
1040
  *iv = iv0;
1041
  return iv->base != NULL_RTX;
1042
}
1043
 
1044
/* Analyzes iv DEF and stores the result to *IV.  */
1045
 
1046
static bool
1047
iv_analyze_def (df_ref def, struct rtx_iv *iv)
1048
{
1049
  rtx insn = DF_REF_INSN (def);
1050
  rtx reg = DF_REF_REG (def);
1051
  rtx set, rhs;
1052
 
1053
  if (dump_file)
1054
    {
1055
      fprintf (dump_file, "Analyzing def of ");
1056
      print_rtl (dump_file, reg);
1057
      fprintf (dump_file, " in insn ");
1058
      print_rtl_single (dump_file, insn);
1059
    }
1060
 
1061
  check_iv_ref_table_size ();
1062
  if (DF_REF_IV (def))
1063
    {
1064
      if (dump_file)
1065
        fprintf (dump_file, "  already analysed.\n");
1066
      *iv = *DF_REF_IV (def);
1067
      return iv->base != NULL_RTX;
1068
    }
1069
 
1070
  iv->mode = VOIDmode;
1071
  iv->base = NULL_RTX;
1072
  iv->step = NULL_RTX;
1073
 
1074
  if (!REG_P (reg))
1075
    return false;
1076
 
1077
  set = single_set (insn);
1078
  if (!set)
1079
    return false;
1080
 
1081
  if (!REG_P (SET_DEST (set)))
1082
    return false;
1083
 
1084
  gcc_assert (SET_DEST (set) == reg);
1085
  rhs = find_reg_equal_equiv_note (insn);
1086
  if (rhs)
1087
    rhs = XEXP (rhs, 0);
1088
  else
1089
    rhs = SET_SRC (set);
1090
 
1091
  iv_analyze_expr (insn, rhs, GET_MODE (reg), iv);
1092
  record_iv (def, iv);
1093
 
1094
  if (dump_file)
1095
    {
1096
      print_rtl (dump_file, reg);
1097
      fprintf (dump_file, " in insn ");
1098
      print_rtl_single (dump_file, insn);
1099
      fprintf (dump_file, "  is ");
1100
      dump_iv_info (dump_file, iv);
1101
      fprintf (dump_file, "\n");
1102
    }
1103
 
1104
  return iv->base != NULL_RTX;
1105
}
1106
 
1107
/* Analyzes operand OP of INSN and stores the result to *IV.  */
1108
 
1109
static bool
1110
iv_analyze_op (rtx insn, rtx op, struct rtx_iv *iv)
1111
{
1112
  df_ref def = NULL;
1113
  enum iv_grd_result res;
1114
 
1115
  if (dump_file)
1116
    {
1117
      fprintf (dump_file, "Analyzing operand ");
1118
      print_rtl (dump_file, op);
1119
      fprintf (dump_file, " of insn ");
1120
      print_rtl_single (dump_file, insn);
1121
    }
1122
 
1123
  if (function_invariant_p (op))
1124
    res = GRD_INVARIANT;
1125
  else if (GET_CODE (op) == SUBREG)
1126
    {
1127
      if (!subreg_lowpart_p (op))
1128
        return false;
1129
 
1130
      if (!iv_analyze_op (insn, SUBREG_REG (op), iv))
1131
        return false;
1132
 
1133
      return iv_subreg (iv, GET_MODE (op));
1134
    }
1135
  else
1136
    {
1137
      res = iv_get_reaching_def (insn, op, &def);
1138
      if (res == GRD_INVALID)
1139
        {
1140
          if (dump_file)
1141
            fprintf (dump_file, "  not simple.\n");
1142
          return false;
1143
        }
1144
    }
1145
 
1146
  if (res == GRD_INVARIANT)
1147
    {
1148
      iv_constant (iv, op, VOIDmode);
1149
 
1150
      if (dump_file)
1151
        {
1152
          fprintf (dump_file, "  ");
1153
          dump_iv_info (dump_file, iv);
1154
          fprintf (dump_file, "\n");
1155
        }
1156
      return true;
1157
    }
1158
 
1159
  if (res == GRD_MAYBE_BIV)
1160
    return iv_analyze_biv (op, iv);
1161
 
1162
  return iv_analyze_def (def, iv);
1163
}
1164
 
1165
/* Analyzes value VAL at INSN and stores the result to *IV.  */
1166
 
1167
bool
1168
iv_analyze (rtx insn, rtx val, struct rtx_iv *iv)
1169
{
1170
  rtx reg;
1171
 
1172
  /* We must find the insn in that val is used, so that we get to UD chains.
1173
     Since the function is sometimes called on result of get_condition,
1174
     this does not necessarily have to be directly INSN; scan also the
1175
     following insns.  */
1176
  if (simple_reg_p (val))
1177
    {
1178
      if (GET_CODE (val) == SUBREG)
1179
        reg = SUBREG_REG (val);
1180
      else
1181
        reg = val;
1182
 
1183
      while (!df_find_use (insn, reg))
1184
        insn = NEXT_INSN (insn);
1185
    }
1186
 
1187
  return iv_analyze_op (insn, val, iv);
1188
}
1189
 
1190
/* Analyzes definition of DEF in INSN and stores the result to IV.  */
1191
 
1192
bool
1193
iv_analyze_result (rtx insn, rtx def, struct rtx_iv *iv)
1194
{
1195
  df_ref adef;
1196
 
1197
  adef = df_find_def (insn, def);
1198
  if (!adef)
1199
    return false;
1200
 
1201
  return iv_analyze_def (adef, iv);
1202
}
1203
 
1204
/* Checks whether definition of register REG in INSN is a basic induction
1205
   variable.  IV analysis must have been initialized (via a call to
1206
   iv_analysis_loop_init) for this function to produce a result.  */
1207
 
1208
bool
1209
biv_p (rtx insn, rtx reg)
1210
{
1211
  struct rtx_iv iv;
1212
  df_ref def, last_def;
1213
 
1214
  if (!simple_reg_p (reg))
1215
    return false;
1216
 
1217
  def = df_find_def (insn, reg);
1218
  gcc_assert (def != NULL);
1219
  if (!latch_dominating_def (reg, &last_def))
1220
    return false;
1221
  if (last_def != def)
1222
    return false;
1223
 
1224
  if (!iv_analyze_biv (reg, &iv))
1225
    return false;
1226
 
1227
  return iv.step != const0_rtx;
1228
}
1229
 
1230
/* Calculates value of IV at ITERATION-th iteration.  */
1231
 
1232
rtx
1233
get_iv_value (struct rtx_iv *iv, rtx iteration)
1234
{
1235
  rtx val;
1236
 
1237
  /* We would need to generate some if_then_else patterns, and so far
1238
     it is not needed anywhere.  */
1239
  gcc_assert (!iv->first_special);
1240
 
1241
  if (iv->step != const0_rtx && iteration != const0_rtx)
1242
    val = simplify_gen_binary (PLUS, iv->extend_mode, iv->base,
1243
                               simplify_gen_binary (MULT, iv->extend_mode,
1244
                                                    iv->step, iteration));
1245
  else
1246
    val = iv->base;
1247
 
1248
  if (iv->extend_mode == iv->mode)
1249
    return val;
1250
 
1251
  val = lowpart_subreg (iv->mode, val, iv->extend_mode);
1252
 
1253
  if (iv->extend == UNKNOWN)
1254
    return val;
1255
 
1256
  val = simplify_gen_unary (iv->extend, iv->extend_mode, val, iv->mode);
1257
  val = simplify_gen_binary (PLUS, iv->extend_mode, iv->delta,
1258
                             simplify_gen_binary (MULT, iv->extend_mode,
1259
                                                  iv->mult, val));
1260
 
1261
  return val;
1262
}
1263
 
1264
/* Free the data for an induction variable analysis.  */
1265
 
1266
void
1267
iv_analysis_done (void)
1268
{
1269
  if (!clean_slate)
1270
    {
1271
      clear_iv_info ();
1272
      clean_slate = true;
1273
      df_finish_pass (true);
1274
      htab_delete (bivs);
1275
      free (iv_ref_table);
1276
      iv_ref_table = NULL;
1277
      iv_ref_table_size = 0;
1278
      bivs = NULL;
1279
    }
1280
}
1281
 
1282
/* Computes inverse to X modulo (1 << MOD).  */
1283
 
1284
static unsigned HOST_WIDEST_INT
1285
inverse (unsigned HOST_WIDEST_INT x, int mod)
1286
{
1287
  unsigned HOST_WIDEST_INT mask =
1288
          ((unsigned HOST_WIDEST_INT) 1 << (mod - 1) << 1) - 1;
1289
  unsigned HOST_WIDEST_INT rslt = 1;
1290
  int i;
1291
 
1292
  for (i = 0; i < mod - 1; i++)
1293
    {
1294
      rslt = (rslt * x) & mask;
1295
      x = (x * x) & mask;
1296
    }
1297
 
1298
  return rslt;
1299
}
1300
 
1301
/* Checks whether register *REG is in set ALT.  Callback for for_each_rtx.  */
1302
 
1303
static int
1304
altered_reg_used (rtx *reg, void *alt)
1305
{
1306
  if (!REG_P (*reg))
1307
    return 0;
1308
 
1309
  return REGNO_REG_SET_P ((bitmap) alt, REGNO (*reg));
1310
}
1311
 
1312
/* Marks registers altered by EXPR in set ALT.  */
1313
 
1314
static void
1315
mark_altered (rtx expr, const_rtx by ATTRIBUTE_UNUSED, void *alt)
1316
{
1317
  if (GET_CODE (expr) == SUBREG)
1318
    expr = SUBREG_REG (expr);
1319
  if (!REG_P (expr))
1320
    return;
1321
 
1322
  SET_REGNO_REG_SET ((bitmap) alt, REGNO (expr));
1323
}
1324
 
1325
/* Checks whether RHS is simple enough to process.  */
1326
 
1327
static bool
1328
simple_rhs_p (rtx rhs)
1329
{
1330
  rtx op0, op1;
1331
 
1332
  if (function_invariant_p (rhs)
1333
      || (REG_P (rhs) && !HARD_REGISTER_P (rhs)))
1334
    return true;
1335
 
1336
  switch (GET_CODE (rhs))
1337
    {
1338
    case PLUS:
1339
    case MINUS:
1340
    case AND:
1341
      op0 = XEXP (rhs, 0);
1342
      op1 = XEXP (rhs, 1);
1343
      /* Allow reg OP const and reg OP reg.  */
1344
      if (!(REG_P (op0) && !HARD_REGISTER_P (op0))
1345
          && !function_invariant_p (op0))
1346
        return false;
1347
      if (!(REG_P (op1) && !HARD_REGISTER_P (op1))
1348
          && !function_invariant_p (op1))
1349
        return false;
1350
 
1351
      return true;
1352
 
1353
    case ASHIFT:
1354
    case ASHIFTRT:
1355
    case LSHIFTRT:
1356
    case MULT:
1357
      op0 = XEXP (rhs, 0);
1358
      op1 = XEXP (rhs, 1);
1359
      /* Allow reg OP const.  */
1360
      if (!(REG_P (op0) && !HARD_REGISTER_P (op0)))
1361
        return false;
1362
      if (!function_invariant_p (op1))
1363
        return false;
1364
 
1365
      return true;
1366
 
1367
    default:
1368
      return false;
1369
    }
1370
}
1371
 
1372
/* If REG has a single definition, replace it with its known value in EXPR.
1373
   Callback for for_each_rtx.  */
1374
 
1375
static int
1376
replace_single_def_regs (rtx *reg, void *expr1)
1377
{
1378
  unsigned regno;
1379
  df_ref adef;
1380
  rtx set, src;
1381
  rtx *expr = (rtx *)expr1;
1382
 
1383
  if (!REG_P (*reg))
1384
    return 0;
1385
 
1386
  regno = REGNO (*reg);
1387
  for (;;)
1388
    {
1389
      rtx note;
1390
      adef = DF_REG_DEF_CHAIN (regno);
1391
      if (adef == NULL || DF_REF_NEXT_REG (adef) != NULL
1392
            || DF_REF_IS_ARTIFICIAL (adef))
1393
        return -1;
1394
 
1395
      set = single_set (DF_REF_INSN (adef));
1396
      if (set == NULL || !REG_P (SET_DEST (set))
1397
          || REGNO (SET_DEST (set)) != regno)
1398
        return -1;
1399
 
1400
      note = find_reg_equal_equiv_note (DF_REF_INSN (adef));
1401
 
1402
      if (note && function_invariant_p (XEXP (note, 0)))
1403
        {
1404
          src = XEXP (note, 0);
1405
          break;
1406
        }
1407
      src = SET_SRC (set);
1408
 
1409
      if (REG_P (src))
1410
        {
1411
          regno = REGNO (src);
1412
          continue;
1413
        }
1414
      break;
1415
    }
1416
  if (!function_invariant_p (src))
1417
    return -1;
1418
 
1419
  *expr = simplify_replace_rtx (*expr, *reg, src);
1420
  return 1;
1421
}
1422
 
1423
/* A subroutine of simplify_using_initial_values, this function examines INSN
1424
   to see if it contains a suitable set that we can use to make a replacement.
1425
   If it is suitable, return true and set DEST and SRC to the lhs and rhs of
1426
   the set; return false otherwise.  */
1427
 
1428
static bool
1429
suitable_set_for_replacement (rtx insn, rtx *dest, rtx *src)
1430
{
1431
  rtx set = single_set (insn);
1432
  rtx lhs = NULL_RTX, rhs;
1433
 
1434
  if (!set)
1435
    return false;
1436
 
1437
  lhs = SET_DEST (set);
1438
  if (!REG_P (lhs))
1439
    return false;
1440
 
1441
  rhs = find_reg_equal_equiv_note (insn);
1442
  if (rhs)
1443
    rhs = XEXP (rhs, 0);
1444
  else
1445
    rhs = SET_SRC (set);
1446
 
1447
  if (!simple_rhs_p (rhs))
1448
    return false;
1449
 
1450
  *dest = lhs;
1451
  *src = rhs;
1452
  return true;
1453
}
1454
 
1455
/* Using the data returned by suitable_set_for_replacement, replace DEST
1456
   with SRC in *EXPR and return the new expression.  Also call
1457
   replace_single_def_regs if the replacement changed something.  */
1458
static void
1459
replace_in_expr (rtx *expr, rtx dest, rtx src)
1460
{
1461
  rtx old = *expr;
1462
  *expr = simplify_replace_rtx (*expr, dest, src);
1463
  if (old == *expr)
1464
    return;
1465
  while (for_each_rtx (expr, replace_single_def_regs, expr) != 0)
1466
    continue;
1467
}
1468
 
1469
/* Checks whether A implies B.  */
1470
 
1471
static bool
1472
implies_p (rtx a, rtx b)
1473
{
1474
  rtx op0, op1, opb0, opb1, r;
1475
  enum machine_mode mode;
1476
 
1477
  if (GET_CODE (a) == EQ)
1478
    {
1479
      op0 = XEXP (a, 0);
1480
      op1 = XEXP (a, 1);
1481
 
1482
      if (REG_P (op0))
1483
        {
1484
          r = simplify_replace_rtx (b, op0, op1);
1485
          if (r == const_true_rtx)
1486
            return true;
1487
        }
1488
 
1489
      if (REG_P (op1))
1490
        {
1491
          r = simplify_replace_rtx (b, op1, op0);
1492
          if (r == const_true_rtx)
1493
            return true;
1494
        }
1495
    }
1496
 
1497
  if (b == const_true_rtx)
1498
    return true;
1499
 
1500
  if ((GET_RTX_CLASS (GET_CODE (a)) != RTX_COMM_COMPARE
1501
       && GET_RTX_CLASS (GET_CODE (a)) != RTX_COMPARE)
1502
      || (GET_RTX_CLASS (GET_CODE (b)) != RTX_COMM_COMPARE
1503
          && GET_RTX_CLASS (GET_CODE (b)) != RTX_COMPARE))
1504
    return false;
1505
 
1506
  op0 = XEXP (a, 0);
1507
  op1 = XEXP (a, 1);
1508
  opb0 = XEXP (b, 0);
1509
  opb1 = XEXP (b, 1);
1510
 
1511
  mode = GET_MODE (op0);
1512
  if (mode != GET_MODE (opb0))
1513
    mode = VOIDmode;
1514
  else if (mode == VOIDmode)
1515
    {
1516
      mode = GET_MODE (op1);
1517
      if (mode != GET_MODE (opb1))
1518
        mode = VOIDmode;
1519
    }
1520
 
1521
  /* A < B implies A + 1 <= B.  */
1522
  if ((GET_CODE (a) == GT || GET_CODE (a) == LT)
1523
      && (GET_CODE (b) == GE || GET_CODE (b) == LE))
1524
    {
1525
 
1526
      if (GET_CODE (a) == GT)
1527
        {
1528
          r = op0;
1529
          op0 = op1;
1530
          op1 = r;
1531
        }
1532
 
1533
      if (GET_CODE (b) == GE)
1534
        {
1535
          r = opb0;
1536
          opb0 = opb1;
1537
          opb1 = r;
1538
        }
1539
 
1540
      if (SCALAR_INT_MODE_P (mode)
1541
          && rtx_equal_p (op1, opb1)
1542
          && simplify_gen_binary (MINUS, mode, opb0, op0) == const1_rtx)
1543
        return true;
1544
      return false;
1545
    }
1546
 
1547
  /* A < B or A > B imply A != B.  TODO: Likewise
1548
     A + n < B implies A != B + n if neither wraps.  */
1549
  if (GET_CODE (b) == NE
1550
      && (GET_CODE (a) == GT || GET_CODE (a) == GTU
1551
          || GET_CODE (a) == LT || GET_CODE (a) == LTU))
1552
    {
1553
      if (rtx_equal_p (op0, opb0)
1554
          && rtx_equal_p (op1, opb1))
1555
        return true;
1556
    }
1557
 
1558
  /* For unsigned comparisons, A != 0 implies A > 0 and A >= 1.  */
1559
  if (GET_CODE (a) == NE
1560
      && op1 == const0_rtx)
1561
    {
1562
      if ((GET_CODE (b) == GTU
1563
           && opb1 == const0_rtx)
1564
          || (GET_CODE (b) == GEU
1565
              && opb1 == const1_rtx))
1566
        return rtx_equal_p (op0, opb0);
1567
    }
1568
 
1569
  /* A != N is equivalent to A - (N + 1) <u -1.  */
1570
  if (GET_CODE (a) == NE
1571
      && CONST_INT_P (op1)
1572
      && GET_CODE (b) == LTU
1573
      && opb1 == constm1_rtx
1574
      && GET_CODE (opb0) == PLUS
1575
      && CONST_INT_P (XEXP (opb0, 1))
1576
      /* Avoid overflows.  */
1577
      && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (opb0, 1))
1578
          != ((unsigned HOST_WIDE_INT)1
1579
              << (HOST_BITS_PER_WIDE_INT - 1)) - 1)
1580
      && INTVAL (XEXP (opb0, 1)) + 1 == -INTVAL (op1))
1581
    return rtx_equal_p (op0, XEXP (opb0, 0));
1582
 
1583
  /* Likewise, A != N implies A - N > 0.  */
1584
  if (GET_CODE (a) == NE
1585
      && CONST_INT_P (op1))
1586
    {
1587
      if (GET_CODE (b) == GTU
1588
          && GET_CODE (opb0) == PLUS
1589
          && opb1 == const0_rtx
1590
          && CONST_INT_P (XEXP (opb0, 1))
1591
          /* Avoid overflows.  */
1592
          && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (opb0, 1))
1593
              != ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)))
1594
          && rtx_equal_p (XEXP (opb0, 0), op0))
1595
        return INTVAL (op1) == -INTVAL (XEXP (opb0, 1));
1596
      if (GET_CODE (b) == GEU
1597
          && GET_CODE (opb0) == PLUS
1598
          && opb1 == const1_rtx
1599
          && CONST_INT_P (XEXP (opb0, 1))
1600
          /* Avoid overflows.  */
1601
          && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (opb0, 1))
1602
              != ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)))
1603
          && rtx_equal_p (XEXP (opb0, 0), op0))
1604
        return INTVAL (op1) == -INTVAL (XEXP (opb0, 1));
1605
    }
1606
 
1607
  /* A >s X, where X is positive, implies A <u Y, if Y is negative.  */
1608
  if ((GET_CODE (a) == GT || GET_CODE (a) == GE)
1609
      && CONST_INT_P (op1)
1610
      && ((GET_CODE (a) == GT && op1 == constm1_rtx)
1611
          || INTVAL (op1) >= 0)
1612
      && GET_CODE (b) == LTU
1613
      && CONST_INT_P (opb1)
1614
      && rtx_equal_p (op0, opb0))
1615
    return INTVAL (opb1) < 0;
1616
 
1617
  return false;
1618
}
1619
 
1620
/* Canonicalizes COND so that
1621
 
1622
   (1) Ensure that operands are ordered according to
1623
       swap_commutative_operands_p.
1624
   (2) (LE x const) will be replaced with (LT x <const+1>) and similarly
1625
       for GE, GEU, and LEU.  */
1626
 
1627
rtx
1628
canon_condition (rtx cond)
1629
{
1630
  rtx tem;
1631
  rtx op0, op1;
1632
  enum rtx_code code;
1633
  enum machine_mode mode;
1634
 
1635
  code = GET_CODE (cond);
1636
  op0 = XEXP (cond, 0);
1637
  op1 = XEXP (cond, 1);
1638
 
1639
  if (swap_commutative_operands_p (op0, op1))
1640
    {
1641
      code = swap_condition (code);
1642
      tem = op0;
1643
      op0 = op1;
1644
      op1 = tem;
1645
    }
1646
 
1647
  mode = GET_MODE (op0);
1648
  if (mode == VOIDmode)
1649
    mode = GET_MODE (op1);
1650
  gcc_assert (mode != VOIDmode);
1651
 
1652
  if (CONST_INT_P (op1)
1653
      && GET_MODE_CLASS (mode) != MODE_CC
1654
      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1655
    {
1656
      HOST_WIDE_INT const_val = INTVAL (op1);
1657
      unsigned HOST_WIDE_INT uconst_val = const_val;
1658
      unsigned HOST_WIDE_INT max_val
1659
        = (unsigned HOST_WIDE_INT) GET_MODE_MASK (mode);
1660
 
1661
      switch (code)
1662
        {
1663
        case LE:
1664
          if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
1665
            code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
1666
          break;
1667
 
1668
        /* When cross-compiling, const_val might be sign-extended from
1669
           BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
1670
        case GE:
1671
          if ((HOST_WIDE_INT) (const_val & max_val)
1672
              != (((HOST_WIDE_INT) 1
1673
                   << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
1674
            code = GT, op1 = gen_int_mode (const_val - 1, mode);
1675
          break;
1676
 
1677
        case LEU:
1678
          if (uconst_val < max_val)
1679
            code = LTU, op1 = gen_int_mode (uconst_val + 1, mode);
1680
          break;
1681
 
1682
        case GEU:
1683
          if (uconst_val != 0)
1684
            code = GTU, op1 = gen_int_mode (uconst_val - 1, mode);
1685
          break;
1686
 
1687
        default:
1688
          break;
1689
        }
1690
    }
1691
 
1692
  if (op0 != XEXP (cond, 0)
1693
      || op1 != XEXP (cond, 1)
1694
      || code != GET_CODE (cond)
1695
      || GET_MODE (cond) != SImode)
1696
    cond = gen_rtx_fmt_ee (code, SImode, op0, op1);
1697
 
1698
  return cond;
1699
}
1700
 
1701
/* Tries to use the fact that COND holds to simplify EXPR.  ALTERED is the
1702
   set of altered regs.  */
1703
 
1704
void
1705
simplify_using_condition (rtx cond, rtx *expr, regset altered)
1706
{
1707
  rtx rev, reve, exp = *expr;
1708
 
1709
  /* If some register gets altered later, we do not really speak about its
1710
     value at the time of comparison.  */
1711
  if (altered
1712
      && for_each_rtx (&cond, altered_reg_used, altered))
1713
    return;
1714
 
1715
  if (GET_CODE (cond) == EQ
1716
      && REG_P (XEXP (cond, 0)) && CONSTANT_P (XEXP (cond, 1)))
1717
    {
1718
      *expr = simplify_replace_rtx (*expr, XEXP (cond, 0), XEXP (cond, 1));
1719
      return;
1720
    }
1721
 
1722
  if (!COMPARISON_P (exp))
1723
    return;
1724
 
1725
  rev = reversed_condition (cond);
1726
  reve = reversed_condition (exp);
1727
 
1728
  cond = canon_condition (cond);
1729
  exp = canon_condition (exp);
1730
  if (rev)
1731
    rev = canon_condition (rev);
1732
  if (reve)
1733
    reve = canon_condition (reve);
1734
 
1735
  if (rtx_equal_p (exp, cond))
1736
    {
1737
      *expr = const_true_rtx;
1738
      return;
1739
    }
1740
 
1741
  if (rev && rtx_equal_p (exp, rev))
1742
    {
1743
      *expr = const0_rtx;
1744
      return;
1745
    }
1746
 
1747
  if (implies_p (cond, exp))
1748
    {
1749
      *expr = const_true_rtx;
1750
      return;
1751
    }
1752
 
1753
  if (reve && implies_p (cond, reve))
1754
    {
1755
      *expr = const0_rtx;
1756
      return;
1757
    }
1758
 
1759
  /* A proof by contradiction.  If *EXPR implies (not cond), *EXPR must
1760
     be false.  */
1761
  if (rev && implies_p (exp, rev))
1762
    {
1763
      *expr = const0_rtx;
1764
      return;
1765
    }
1766
 
1767
  /* Similarly, If (not *EXPR) implies (not cond), *EXPR must be true.  */
1768
  if (rev && reve && implies_p (reve, rev))
1769
    {
1770
      *expr = const_true_rtx;
1771
      return;
1772
    }
1773
 
1774
  /* We would like to have some other tests here.  TODO.  */
1775
 
1776
  return;
1777
}
1778
 
1779
/* Use relationship between A and *B to eventually eliminate *B.
1780
   OP is the operation we consider.  */
1781
 
1782
static void
1783
eliminate_implied_condition (enum rtx_code op, rtx a, rtx *b)
1784
{
1785
  switch (op)
1786
    {
1787
    case AND:
1788
      /* If A implies *B, we may replace *B by true.  */
1789
      if (implies_p (a, *b))
1790
        *b = const_true_rtx;
1791
      break;
1792
 
1793
    case IOR:
1794
      /* If *B implies A, we may replace *B by false.  */
1795
      if (implies_p (*b, a))
1796
        *b = const0_rtx;
1797
      break;
1798
 
1799
    default:
1800
      gcc_unreachable ();
1801
    }
1802
}
1803
 
1804
/* Eliminates the conditions in TAIL that are implied by HEAD.  OP is the
1805
   operation we consider.  */
1806
 
1807
static void
1808
eliminate_implied_conditions (enum rtx_code op, rtx *head, rtx tail)
1809
{
1810
  rtx elt;
1811
 
1812
  for (elt = tail; elt; elt = XEXP (elt, 1))
1813
    eliminate_implied_condition (op, *head, &XEXP (elt, 0));
1814
  for (elt = tail; elt; elt = XEXP (elt, 1))
1815
    eliminate_implied_condition (op, XEXP (elt, 0), head);
1816
}
1817
 
1818
/* Simplifies *EXPR using initial values at the start of the LOOP.  If *EXPR
1819
   is a list, its elements are assumed to be combined using OP.  */
1820
 
1821
static void
1822
simplify_using_initial_values (struct loop *loop, enum rtx_code op, rtx *expr)
1823
{
1824
  bool expression_valid;
1825
  rtx head, tail, insn, cond_list, last_valid_expr;
1826
  rtx neutral, aggr;
1827
  regset altered, this_altered;
1828
  edge e;
1829
 
1830
  if (!*expr)
1831
    return;
1832
 
1833
  if (CONSTANT_P (*expr))
1834
    return;
1835
 
1836
  if (GET_CODE (*expr) == EXPR_LIST)
1837
    {
1838
      head = XEXP (*expr, 0);
1839
      tail = XEXP (*expr, 1);
1840
 
1841
      eliminate_implied_conditions (op, &head, tail);
1842
 
1843
      switch (op)
1844
        {
1845
        case AND:
1846
          neutral = const_true_rtx;
1847
          aggr = const0_rtx;
1848
          break;
1849
 
1850
        case IOR:
1851
          neutral = const0_rtx;
1852
          aggr = const_true_rtx;
1853
          break;
1854
 
1855
        default:
1856
          gcc_unreachable ();
1857
        }
1858
 
1859
      simplify_using_initial_values (loop, UNKNOWN, &head);
1860
      if (head == aggr)
1861
        {
1862
          XEXP (*expr, 0) = aggr;
1863
          XEXP (*expr, 1) = NULL_RTX;
1864
          return;
1865
        }
1866
      else if (head == neutral)
1867
        {
1868
          *expr = tail;
1869
          simplify_using_initial_values (loop, op, expr);
1870
          return;
1871
        }
1872
      simplify_using_initial_values (loop, op, &tail);
1873
 
1874
      if (tail && XEXP (tail, 0) == aggr)
1875
        {
1876
          *expr = tail;
1877
          return;
1878
        }
1879
 
1880
      XEXP (*expr, 0) = head;
1881
      XEXP (*expr, 1) = tail;
1882
      return;
1883
    }
1884
 
1885
  gcc_assert (op == UNKNOWN);
1886
 
1887
  for (;;)
1888
    if (for_each_rtx (expr, replace_single_def_regs, expr) == 0)
1889
      break;
1890
  if (CONSTANT_P (*expr))
1891
    return;
1892
 
1893
  e = loop_preheader_edge (loop);
1894
  if (e->src == ENTRY_BLOCK_PTR)
1895
    return;
1896
 
1897
  altered = ALLOC_REG_SET (&reg_obstack);
1898
  this_altered = ALLOC_REG_SET (&reg_obstack);
1899
 
1900
  expression_valid = true;
1901
  last_valid_expr = *expr;
1902
  cond_list = NULL_RTX;
1903
  while (1)
1904
    {
1905
      insn = BB_END (e->src);
1906
      if (any_condjump_p (insn))
1907
        {
1908
          rtx cond = get_condition (BB_END (e->src), NULL, false, true);
1909
 
1910
          if (cond && (e->flags & EDGE_FALLTHRU))
1911
            cond = reversed_condition (cond);
1912
          if (cond)
1913
            {
1914
              rtx old = *expr;
1915
              simplify_using_condition (cond, expr, altered);
1916
              if (old != *expr)
1917
                {
1918
                  rtx note;
1919
                  if (CONSTANT_P (*expr))
1920
                    goto out;
1921
                  for (note = cond_list; note; note = XEXP (note, 1))
1922
                    {
1923
                      simplify_using_condition (XEXP (note, 0), expr, altered);
1924
                      if (CONSTANT_P (*expr))
1925
                        goto out;
1926
                    }
1927
                }
1928
              cond_list = alloc_EXPR_LIST (0, cond, cond_list);
1929
            }
1930
        }
1931
 
1932
      FOR_BB_INSNS_REVERSE (e->src, insn)
1933
        {
1934
          rtx src, dest;
1935
          rtx old = *expr;
1936
 
1937
          if (!INSN_P (insn))
1938
            continue;
1939
 
1940
          CLEAR_REG_SET (this_altered);
1941
          note_stores (PATTERN (insn), mark_altered, this_altered);
1942
          if (CALL_P (insn))
1943
            {
1944
              int i;
1945
 
1946
              /* Kill all call clobbered registers.  */
1947
              for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1948
                if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1949
                  SET_REGNO_REG_SET (this_altered, i);
1950
            }
1951
 
1952
          if (suitable_set_for_replacement (insn, &dest, &src))
1953
            {
1954
              rtx *pnote, *pnote_next;
1955
 
1956
              replace_in_expr (expr, dest, src);
1957
              if (CONSTANT_P (*expr))
1958
                goto out;
1959
 
1960
              for (pnote = &cond_list; *pnote; pnote = pnote_next)
1961
                {
1962
                  rtx note = *pnote;
1963
                  rtx old_cond = XEXP (note, 0);
1964
 
1965
                  pnote_next = &XEXP (note, 1);
1966
                  replace_in_expr (&XEXP (note, 0), dest, src);
1967
 
1968
                  /* We can no longer use a condition that has been simplified
1969
                     to a constant, and simplify_using_condition will abort if
1970
                     we try.  */
1971
                  if (CONSTANT_P (XEXP (note, 0)))
1972
                    {
1973
                      *pnote = *pnote_next;
1974
                      pnote_next = pnote;
1975
                      free_EXPR_LIST_node (note);
1976
                    }
1977
                  /* Retry simplifications with this condition if either the
1978
                     expression or the condition changed.  */
1979
                  else if (old_cond != XEXP (note, 0) || old != *expr)
1980
                    simplify_using_condition (XEXP (note, 0), expr, altered);
1981
                }
1982
            }
1983
          else
1984
            /* If we did not use this insn to make a replacement, any overlap
1985
               between stores in this insn and our expression will cause the
1986
               expression to become invalid.  */
1987
            if (for_each_rtx (expr, altered_reg_used, this_altered))
1988
              goto out;
1989
 
1990
          if (CONSTANT_P (*expr))
1991
            goto out;
1992
 
1993
          IOR_REG_SET (altered, this_altered);
1994
 
1995
          /* If the expression now contains regs that have been altered, we
1996
             can't return it to the caller.  However, it is still valid for
1997
             further simplification, so keep searching to see if we can
1998
             eventually turn it into a constant.  */
1999
          if (for_each_rtx (expr, altered_reg_used, altered))
2000
            expression_valid = false;
2001
          if (expression_valid)
2002
            last_valid_expr = *expr;
2003
        }
2004
 
2005
      if (!single_pred_p (e->src)
2006
          || single_pred (e->src) == ENTRY_BLOCK_PTR)
2007
        break;
2008
      e = single_pred_edge (e->src);
2009
    }
2010
 
2011
 out:
2012
  free_EXPR_LIST_list (&cond_list);
2013
  if (!CONSTANT_P (*expr))
2014
    *expr = last_valid_expr;
2015
  FREE_REG_SET (altered);
2016
  FREE_REG_SET (this_altered);
2017
}
2018
 
2019
/* Transforms invariant IV into MODE.  Adds assumptions based on the fact
2020
   that IV occurs as left operands of comparison COND and its signedness
2021
   is SIGNED_P to DESC.  */
2022
 
2023
static void
2024
shorten_into_mode (struct rtx_iv *iv, enum machine_mode mode,
2025
                   enum rtx_code cond, bool signed_p, struct niter_desc *desc)
2026
{
2027
  rtx mmin, mmax, cond_over, cond_under;
2028
 
2029
  get_mode_bounds (mode, signed_p, iv->extend_mode, &mmin, &mmax);
2030
  cond_under = simplify_gen_relational (LT, SImode, iv->extend_mode,
2031
                                        iv->base, mmin);
2032
  cond_over = simplify_gen_relational (GT, SImode, iv->extend_mode,
2033
                                       iv->base, mmax);
2034
 
2035
  switch (cond)
2036
    {
2037
      case LE:
2038
      case LT:
2039
      case LEU:
2040
      case LTU:
2041
        if (cond_under != const0_rtx)
2042
          desc->infinite =
2043
                  alloc_EXPR_LIST (0, cond_under, desc->infinite);
2044
        if (cond_over != const0_rtx)
2045
          desc->noloop_assumptions =
2046
                  alloc_EXPR_LIST (0, cond_over, desc->noloop_assumptions);
2047
        break;
2048
 
2049
      case GE:
2050
      case GT:
2051
      case GEU:
2052
      case GTU:
2053
        if (cond_over != const0_rtx)
2054
          desc->infinite =
2055
                  alloc_EXPR_LIST (0, cond_over, desc->infinite);
2056
        if (cond_under != const0_rtx)
2057
          desc->noloop_assumptions =
2058
                  alloc_EXPR_LIST (0, cond_under, desc->noloop_assumptions);
2059
        break;
2060
 
2061
      case NE:
2062
        if (cond_over != const0_rtx)
2063
          desc->infinite =
2064
                  alloc_EXPR_LIST (0, cond_over, desc->infinite);
2065
        if (cond_under != const0_rtx)
2066
          desc->infinite =
2067
                  alloc_EXPR_LIST (0, cond_under, desc->infinite);
2068
        break;
2069
 
2070
      default:
2071
        gcc_unreachable ();
2072
    }
2073
 
2074
  iv->mode = mode;
2075
  iv->extend = signed_p ? SIGN_EXTEND : ZERO_EXTEND;
2076
}
2077
 
2078
/* Transforms IV0 and IV1 compared by COND so that they are both compared as
2079
   subregs of the same mode if possible (sometimes it is necessary to add
2080
   some assumptions to DESC).  */
2081
 
2082
static bool
2083
canonicalize_iv_subregs (struct rtx_iv *iv0, struct rtx_iv *iv1,
2084
                         enum rtx_code cond, struct niter_desc *desc)
2085
{
2086
  enum machine_mode comp_mode;
2087
  bool signed_p;
2088
 
2089
  /* If the ivs behave specially in the first iteration, or are
2090
     added/multiplied after extending, we ignore them.  */
2091
  if (iv0->first_special || iv0->mult != const1_rtx || iv0->delta != const0_rtx)
2092
    return false;
2093
  if (iv1->first_special || iv1->mult != const1_rtx || iv1->delta != const0_rtx)
2094
    return false;
2095
 
2096
  /* If there is some extend, it must match signedness of the comparison.  */
2097
  switch (cond)
2098
    {
2099
      case LE:
2100
      case LT:
2101
        if (iv0->extend == ZERO_EXTEND
2102
            || iv1->extend == ZERO_EXTEND)
2103
          return false;
2104
        signed_p = true;
2105
        break;
2106
 
2107
      case LEU:
2108
      case LTU:
2109
        if (iv0->extend == SIGN_EXTEND
2110
            || iv1->extend == SIGN_EXTEND)
2111
          return false;
2112
        signed_p = false;
2113
        break;
2114
 
2115
      case NE:
2116
        if (iv0->extend != UNKNOWN
2117
            && iv1->extend != UNKNOWN
2118
            && iv0->extend != iv1->extend)
2119
          return false;
2120
 
2121
        signed_p = false;
2122
        if (iv0->extend != UNKNOWN)
2123
          signed_p = iv0->extend == SIGN_EXTEND;
2124
        if (iv1->extend != UNKNOWN)
2125
          signed_p = iv1->extend == SIGN_EXTEND;
2126
        break;
2127
 
2128
      default:
2129
        gcc_unreachable ();
2130
    }
2131
 
2132
  /* Values of both variables should be computed in the same mode.  These
2133
     might indeed be different, if we have comparison like
2134
 
2135
     (compare (subreg:SI (iv0)) (subreg:SI (iv1)))
2136
 
2137
     and iv0 and iv1 are both ivs iterating in SI mode, but calculated
2138
     in different modes.  This does not seem impossible to handle, but
2139
     it hardly ever occurs in practice.
2140
 
2141
     The only exception is the case when one of operands is invariant.
2142
     For example pentium 3 generates comparisons like
2143
     (lt (subreg:HI (reg:SI)) 100).  Here we assign HImode to 100, but we
2144
     definitely do not want this prevent the optimization.  */
2145
  comp_mode = iv0->extend_mode;
2146
  if (GET_MODE_BITSIZE (comp_mode) < GET_MODE_BITSIZE (iv1->extend_mode))
2147
    comp_mode = iv1->extend_mode;
2148
 
2149
  if (iv0->extend_mode != comp_mode)
2150
    {
2151
      if (iv0->mode != iv0->extend_mode
2152
          || iv0->step != const0_rtx)
2153
        return false;
2154
 
2155
      iv0->base = simplify_gen_unary (signed_p ? SIGN_EXTEND : ZERO_EXTEND,
2156
                                      comp_mode, iv0->base, iv0->mode);
2157
      iv0->extend_mode = comp_mode;
2158
    }
2159
 
2160
  if (iv1->extend_mode != comp_mode)
2161
    {
2162
      if (iv1->mode != iv1->extend_mode
2163
          || iv1->step != const0_rtx)
2164
        return false;
2165
 
2166
      iv1->base = simplify_gen_unary (signed_p ? SIGN_EXTEND : ZERO_EXTEND,
2167
                                      comp_mode, iv1->base, iv1->mode);
2168
      iv1->extend_mode = comp_mode;
2169
    }
2170
 
2171
  /* Check that both ivs belong to a range of a single mode.  If one of the
2172
     operands is an invariant, we may need to shorten it into the common
2173
     mode.  */
2174
  if (iv0->mode == iv0->extend_mode
2175
      && iv0->step == const0_rtx
2176
      && iv0->mode != iv1->mode)
2177
    shorten_into_mode (iv0, iv1->mode, cond, signed_p, desc);
2178
 
2179
  if (iv1->mode == iv1->extend_mode
2180
      && iv1->step == const0_rtx
2181
      && iv0->mode != iv1->mode)
2182
    shorten_into_mode (iv1, iv0->mode, swap_condition (cond), signed_p, desc);
2183
 
2184
  if (iv0->mode != iv1->mode)
2185
    return false;
2186
 
2187
  desc->mode = iv0->mode;
2188
  desc->signed_p = signed_p;
2189
 
2190
  return true;
2191
}
2192
 
2193
/* Tries to estimate the maximum number of iterations in LOOP, and store the
2194
   result in DESC.  This function is called from iv_number_of_iterations with
2195
   a number of fields in DESC already filled in.  OLD_NITER is the original
2196
   expression for the number of iterations, before we tried to simplify it.  */
2197
 
2198
static unsigned HOST_WIDEST_INT
2199
determine_max_iter (struct loop *loop, struct niter_desc *desc, rtx old_niter)
2200
{
2201
  rtx niter = desc->niter_expr;
2202
  rtx mmin, mmax, cmp;
2203
  unsigned HOST_WIDEST_INT nmax, inc;
2204
 
2205
  if (GET_CODE (niter) == AND
2206
      && CONST_INT_P (XEXP (niter, 0)))
2207
    {
2208
      nmax = INTVAL (XEXP (niter, 0));
2209
      if (!(nmax & (nmax + 1)))
2210
        {
2211
          desc->niter_max = nmax;
2212
          return nmax;
2213
        }
2214
    }
2215
 
2216
  get_mode_bounds (desc->mode, desc->signed_p, desc->mode, &mmin, &mmax);
2217
  nmax = INTVAL (mmax) - INTVAL (mmin);
2218
 
2219
  if (GET_CODE (niter) == UDIV)
2220
    {
2221
      if (!CONST_INT_P (XEXP (niter, 1)))
2222
        {
2223
          desc->niter_max = nmax;
2224
          return nmax;
2225
        }
2226
      inc = INTVAL (XEXP (niter, 1));
2227
      niter = XEXP (niter, 0);
2228
    }
2229
  else
2230
    inc = 1;
2231
 
2232
  /* We could use a binary search here, but for now improving the upper
2233
     bound by just one eliminates one important corner case.  */
2234
  cmp = simplify_gen_relational (desc->signed_p ? LT : LTU, VOIDmode,
2235
                                 desc->mode, old_niter, mmax);
2236
  simplify_using_initial_values (loop, UNKNOWN, &cmp);
2237
  if (cmp == const_true_rtx)
2238
    {
2239
      nmax--;
2240
 
2241
      if (dump_file)
2242
        fprintf (dump_file, ";; improved upper bound by one.\n");
2243
    }
2244
  desc->niter_max = nmax / inc;
2245
  return nmax / inc;
2246
}
2247
 
2248
/* Computes number of iterations of the CONDITION in INSN in LOOP and stores
2249
   the result into DESC.  Very similar to determine_number_of_iterations
2250
   (basically its rtl version), complicated by things like subregs.  */
2251
 
2252
static void
2253
iv_number_of_iterations (struct loop *loop, rtx insn, rtx condition,
2254
                         struct niter_desc *desc)
2255
{
2256
  rtx op0, op1, delta, step, bound, may_xform, tmp, tmp0, tmp1;
2257
  struct rtx_iv iv0, iv1, tmp_iv;
2258
  rtx assumption, may_not_xform;
2259
  enum rtx_code cond;
2260
  enum machine_mode mode, comp_mode;
2261
  rtx mmin, mmax, mode_mmin, mode_mmax;
2262
  unsigned HOST_WIDEST_INT s, size, d, inv;
2263
  HOST_WIDEST_INT up, down, inc, step_val;
2264
  int was_sharp = false;
2265
  rtx old_niter;
2266
  bool step_is_pow2;
2267
 
2268
  /* The meaning of these assumptions is this:
2269
     if !assumptions
2270
       then the rest of information does not have to be valid
2271
     if noloop_assumptions then the loop does not roll
2272
     if infinite then this exit is never used */
2273
 
2274
  desc->assumptions = NULL_RTX;
2275
  desc->noloop_assumptions = NULL_RTX;
2276
  desc->infinite = NULL_RTX;
2277
  desc->simple_p = true;
2278
 
2279
  desc->const_iter = false;
2280
  desc->niter_expr = NULL_RTX;
2281
  desc->niter_max = 0;
2282
 
2283
  cond = GET_CODE (condition);
2284
  gcc_assert (COMPARISON_P (condition));
2285
 
2286
  mode = GET_MODE (XEXP (condition, 0));
2287
  if (mode == VOIDmode)
2288
    mode = GET_MODE (XEXP (condition, 1));
2289
  /* The constant comparisons should be folded.  */
2290
  gcc_assert (mode != VOIDmode);
2291
 
2292
  /* We only handle integers or pointers.  */
2293
  if (GET_MODE_CLASS (mode) != MODE_INT
2294
      && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
2295
    goto fail;
2296
 
2297
  op0 = XEXP (condition, 0);
2298
  if (!iv_analyze (insn, op0, &iv0))
2299
    goto fail;
2300
  if (iv0.extend_mode == VOIDmode)
2301
    iv0.mode = iv0.extend_mode = mode;
2302
 
2303
  op1 = XEXP (condition, 1);
2304
  if (!iv_analyze (insn, op1, &iv1))
2305
    goto fail;
2306
  if (iv1.extend_mode == VOIDmode)
2307
    iv1.mode = iv1.extend_mode = mode;
2308
 
2309
  if (GET_MODE_BITSIZE (iv0.extend_mode) > HOST_BITS_PER_WIDE_INT
2310
      || GET_MODE_BITSIZE (iv1.extend_mode) > HOST_BITS_PER_WIDE_INT)
2311
    goto fail;
2312
 
2313
  /* Check condition and normalize it.  */
2314
 
2315
  switch (cond)
2316
    {
2317
      case GE:
2318
      case GT:
2319
      case GEU:
2320
      case GTU:
2321
        tmp_iv = iv0; iv0 = iv1; iv1 = tmp_iv;
2322
        cond = swap_condition (cond);
2323
        break;
2324
      case NE:
2325
      case LE:
2326
      case LEU:
2327
      case LT:
2328
      case LTU:
2329
        break;
2330
      default:
2331
        goto fail;
2332
    }
2333
 
2334
  /* Handle extends.  This is relatively nontrivial, so we only try in some
2335
     easy cases, when we can canonicalize the ivs (possibly by adding some
2336
     assumptions) to shape subreg (base + i * step).  This function also fills
2337
     in desc->mode and desc->signed_p.  */
2338
 
2339
  if (!canonicalize_iv_subregs (&iv0, &iv1, cond, desc))
2340
    goto fail;
2341
 
2342
  comp_mode = iv0.extend_mode;
2343
  mode = iv0.mode;
2344
  size = GET_MODE_BITSIZE (mode);
2345
  get_mode_bounds (mode, (cond == LE || cond == LT), comp_mode, &mmin, &mmax);
2346
  mode_mmin = lowpart_subreg (mode, mmin, comp_mode);
2347
  mode_mmax = lowpart_subreg (mode, mmax, comp_mode);
2348
 
2349
  if (!CONST_INT_P (iv0.step) || !CONST_INT_P (iv1.step))
2350
    goto fail;
2351
 
2352
  /* We can take care of the case of two induction variables chasing each other
2353
     if the test is NE. I have never seen a loop using it, but still it is
2354
     cool.  */
2355
  if (iv0.step != const0_rtx && iv1.step != const0_rtx)
2356
    {
2357
      if (cond != NE)
2358
        goto fail;
2359
 
2360
      iv0.step = simplify_gen_binary (MINUS, comp_mode, iv0.step, iv1.step);
2361
      iv1.step = const0_rtx;
2362
    }
2363
 
2364
  /* This is either infinite loop or the one that ends immediately, depending
2365
     on initial values.  Unswitching should remove this kind of conditions.  */
2366
  if (iv0.step == const0_rtx && iv1.step == const0_rtx)
2367
    goto fail;
2368
 
2369
  if (cond != NE)
2370
    {
2371
      if (iv0.step == const0_rtx)
2372
        step_val = -INTVAL (iv1.step);
2373
      else
2374
        step_val = INTVAL (iv0.step);
2375
 
2376
      /* Ignore loops of while (i-- < 10) type.  */
2377
      if (step_val < 0)
2378
        goto fail;
2379
 
2380
      step_is_pow2 = !(step_val & (step_val - 1));
2381
    }
2382
  else
2383
    {
2384
      /* We do not care about whether the step is power of two in this
2385
         case.  */
2386
      step_is_pow2 = false;
2387
      step_val = 0;
2388
    }
2389
 
2390
  /* Some more condition normalization.  We must record some assumptions
2391
     due to overflows.  */
2392
  switch (cond)
2393
    {
2394
      case LT:
2395
      case LTU:
2396
        /* We want to take care only of non-sharp relationals; this is easy,
2397
           as in cases the overflow would make the transformation unsafe
2398
           the loop does not roll.  Seemingly it would make more sense to want
2399
           to take care of sharp relationals instead, as NE is more similar to
2400
           them, but the problem is that here the transformation would be more
2401
           difficult due to possibly infinite loops.  */
2402
        if (iv0.step == const0_rtx)
2403
          {
2404
            tmp = lowpart_subreg (mode, iv0.base, comp_mode);
2405
            assumption = simplify_gen_relational (EQ, SImode, mode, tmp,
2406
                                                  mode_mmax);
2407
            if (assumption == const_true_rtx)
2408
              goto zero_iter_simplify;
2409
            iv0.base = simplify_gen_binary (PLUS, comp_mode,
2410
                                            iv0.base, const1_rtx);
2411
          }
2412
        else
2413
          {
2414
            tmp = lowpart_subreg (mode, iv1.base, comp_mode);
2415
            assumption = simplify_gen_relational (EQ, SImode, mode, tmp,
2416
                                                  mode_mmin);
2417
            if (assumption == const_true_rtx)
2418
              goto zero_iter_simplify;
2419
            iv1.base = simplify_gen_binary (PLUS, comp_mode,
2420
                                            iv1.base, constm1_rtx);
2421
          }
2422
 
2423
        if (assumption != const0_rtx)
2424
          desc->noloop_assumptions =
2425
                  alloc_EXPR_LIST (0, assumption, desc->noloop_assumptions);
2426
        cond = (cond == LT) ? LE : LEU;
2427
 
2428
        /* It will be useful to be able to tell the difference once more in
2429
           LE -> NE reduction.  */
2430
        was_sharp = true;
2431
        break;
2432
      default: ;
2433
    }
2434
 
2435
  /* Take care of trivially infinite loops.  */
2436
  if (cond != NE)
2437
    {
2438
      if (iv0.step == const0_rtx)
2439
        {
2440
          tmp = lowpart_subreg (mode, iv0.base, comp_mode);
2441
          if (rtx_equal_p (tmp, mode_mmin))
2442
            {
2443
              desc->infinite =
2444
                      alloc_EXPR_LIST (0, const_true_rtx, NULL_RTX);
2445
              /* Fill in the remaining fields somehow.  */
2446
              goto zero_iter_simplify;
2447
            }
2448
        }
2449
      else
2450
        {
2451
          tmp = lowpart_subreg (mode, iv1.base, comp_mode);
2452
          if (rtx_equal_p (tmp, mode_mmax))
2453
            {
2454
              desc->infinite =
2455
                      alloc_EXPR_LIST (0, const_true_rtx, NULL_RTX);
2456
              /* Fill in the remaining fields somehow.  */
2457
              goto zero_iter_simplify;
2458
            }
2459
        }
2460
    }
2461
 
2462
  /* If we can we want to take care of NE conditions instead of size
2463
     comparisons, as they are much more friendly (most importantly
2464
     this takes care of special handling of loops with step 1).  We can
2465
     do it if we first check that upper bound is greater or equal to
2466
     lower bound, their difference is constant c modulo step and that
2467
     there is not an overflow.  */
2468
  if (cond != NE)
2469
    {
2470
      if (iv0.step == const0_rtx)
2471
        step = simplify_gen_unary (NEG, comp_mode, iv1.step, comp_mode);
2472
      else
2473
        step = iv0.step;
2474
      delta = simplify_gen_binary (MINUS, comp_mode, iv1.base, iv0.base);
2475
      delta = lowpart_subreg (mode, delta, comp_mode);
2476
      delta = simplify_gen_binary (UMOD, mode, delta, step);
2477
      may_xform = const0_rtx;
2478
      may_not_xform = const_true_rtx;
2479
 
2480
      if (CONST_INT_P (delta))
2481
        {
2482
          if (was_sharp && INTVAL (delta) == INTVAL (step) - 1)
2483
            {
2484
              /* A special case.  We have transformed condition of type
2485
                 for (i = 0; i < 4; i += 4)
2486
                 into
2487
                 for (i = 0; i <= 3; i += 4)
2488
                 obviously if the test for overflow during that transformation
2489
                 passed, we cannot overflow here.  Most importantly any
2490
                 loop with sharp end condition and step 1 falls into this
2491
                 category, so handling this case specially is definitely
2492
                 worth the troubles.  */
2493
              may_xform = const_true_rtx;
2494
            }
2495
          else if (iv0.step == const0_rtx)
2496
            {
2497
              bound = simplify_gen_binary (PLUS, comp_mode, mmin, step);
2498
              bound = simplify_gen_binary (MINUS, comp_mode, bound, delta);
2499
              bound = lowpart_subreg (mode, bound, comp_mode);
2500
              tmp = lowpart_subreg (mode, iv0.base, comp_mode);
2501
              may_xform = simplify_gen_relational (cond, SImode, mode,
2502
                                                   bound, tmp);
2503
              may_not_xform = simplify_gen_relational (reverse_condition (cond),
2504
                                                       SImode, mode,
2505
                                                       bound, tmp);
2506
            }
2507
          else
2508
            {
2509
              bound = simplify_gen_binary (MINUS, comp_mode, mmax, step);
2510
              bound = simplify_gen_binary (PLUS, comp_mode, bound, delta);
2511
              bound = lowpart_subreg (mode, bound, comp_mode);
2512
              tmp = lowpart_subreg (mode, iv1.base, comp_mode);
2513
              may_xform = simplify_gen_relational (cond, SImode, mode,
2514
                                                   tmp, bound);
2515
              may_not_xform = simplify_gen_relational (reverse_condition (cond),
2516
                                                       SImode, mode,
2517
                                                       tmp, bound);
2518
            }
2519
        }
2520
 
2521
      if (may_xform != const0_rtx)
2522
        {
2523
          /* We perform the transformation always provided that it is not
2524
             completely senseless.  This is OK, as we would need this assumption
2525
             to determine the number of iterations anyway.  */
2526
          if (may_xform != const_true_rtx)
2527
            {
2528
              /* If the step is a power of two and the final value we have
2529
                 computed overflows, the cycle is infinite.  Otherwise it
2530
                 is nontrivial to compute the number of iterations.  */
2531
              if (step_is_pow2)
2532
                desc->infinite = alloc_EXPR_LIST (0, may_not_xform,
2533
                                                  desc->infinite);
2534
              else
2535
                desc->assumptions = alloc_EXPR_LIST (0, may_xform,
2536
                                                     desc->assumptions);
2537
            }
2538
 
2539
          /* We are going to lose some information about upper bound on
2540
             number of iterations in this step, so record the information
2541
             here.  */
2542
          inc = INTVAL (iv0.step) - INTVAL (iv1.step);
2543
          if (CONST_INT_P (iv1.base))
2544
            up = INTVAL (iv1.base);
2545
          else
2546
            up = INTVAL (mode_mmax) - inc;
2547
          down = INTVAL (CONST_INT_P (iv0.base)
2548
                         ? iv0.base
2549
                         : mode_mmin);
2550
          desc->niter_max = (up - down) / inc + 1;
2551
 
2552
          if (iv0.step == const0_rtx)
2553
            {
2554
              iv0.base = simplify_gen_binary (PLUS, comp_mode, iv0.base, delta);
2555
              iv0.base = simplify_gen_binary (MINUS, comp_mode, iv0.base, step);
2556
            }
2557
          else
2558
            {
2559
              iv1.base = simplify_gen_binary (MINUS, comp_mode, iv1.base, delta);
2560
              iv1.base = simplify_gen_binary (PLUS, comp_mode, iv1.base, step);
2561
            }
2562
 
2563
          tmp0 = lowpart_subreg (mode, iv0.base, comp_mode);
2564
          tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2565
          assumption = simplify_gen_relational (reverse_condition (cond),
2566
                                                SImode, mode, tmp0, tmp1);
2567
          if (assumption == const_true_rtx)
2568
            goto zero_iter_simplify;
2569
          else if (assumption != const0_rtx)
2570
            desc->noloop_assumptions =
2571
                    alloc_EXPR_LIST (0, assumption, desc->noloop_assumptions);
2572
          cond = NE;
2573
        }
2574
    }
2575
 
2576
  /* Count the number of iterations.  */
2577
  if (cond == NE)
2578
    {
2579
      /* Everything we do here is just arithmetics modulo size of mode.  This
2580
         makes us able to do more involved computations of number of iterations
2581
         than in other cases.  First transform the condition into shape
2582
         s * i <> c, with s positive.  */
2583
      iv1.base = simplify_gen_binary (MINUS, comp_mode, iv1.base, iv0.base);
2584
      iv0.base = const0_rtx;
2585
      iv0.step = simplify_gen_binary (MINUS, comp_mode, iv0.step, iv1.step);
2586
      iv1.step = const0_rtx;
2587
      if (INTVAL (iv0.step) < 0)
2588
        {
2589
          iv0.step = simplify_gen_unary (NEG, comp_mode, iv0.step, mode);
2590
          iv1.base = simplify_gen_unary (NEG, comp_mode, iv1.base, mode);
2591
        }
2592
      iv0.step = lowpart_subreg (mode, iv0.step, comp_mode);
2593
 
2594
      /* Let nsd (s, size of mode) = d.  If d does not divide c, the loop
2595
         is infinite.  Otherwise, the number of iterations is
2596
         (inverse(s/d) * (c/d)) mod (size of mode/d).  */
2597
      s = INTVAL (iv0.step); d = 1;
2598
      while (s % 2 != 1)
2599
        {
2600
          s /= 2;
2601
          d *= 2;
2602
          size--;
2603
        }
2604
      bound = GEN_INT (((unsigned HOST_WIDEST_INT) 1 << (size - 1 ) << 1) - 1);
2605
 
2606
      tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2607
      tmp = simplify_gen_binary (UMOD, mode, tmp1, GEN_INT (d));
2608
      assumption = simplify_gen_relational (NE, SImode, mode, tmp, const0_rtx);
2609
      desc->infinite = alloc_EXPR_LIST (0, assumption, desc->infinite);
2610
 
2611
      tmp = simplify_gen_binary (UDIV, mode, tmp1, GEN_INT (d));
2612
      inv = inverse (s, size);
2613
      tmp = simplify_gen_binary (MULT, mode, tmp, gen_int_mode (inv, mode));
2614
      desc->niter_expr = simplify_gen_binary (AND, mode, tmp, bound);
2615
    }
2616
  else
2617
    {
2618
      if (iv1.step == const0_rtx)
2619
        /* Condition in shape a + s * i <= b
2620
           We must know that b + s does not overflow and a <= b + s and then we
2621
           can compute number of iterations as (b + s - a) / s.  (It might
2622
           seem that we in fact could be more clever about testing the b + s
2623
           overflow condition using some information about b - a mod s,
2624
           but it was already taken into account during LE -> NE transform).  */
2625
        {
2626
          step = iv0.step;
2627
          tmp0 = lowpart_subreg (mode, iv0.base, comp_mode);
2628
          tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2629
 
2630
          bound = simplify_gen_binary (MINUS, mode, mode_mmax,
2631
                                       lowpart_subreg (mode, step,
2632
                                                       comp_mode));
2633
          if (step_is_pow2)
2634
            {
2635
              rtx t0, t1;
2636
 
2637
              /* If s is power of 2, we know that the loop is infinite if
2638
                 a % s <= b % s and b + s overflows.  */
2639
              assumption = simplify_gen_relational (reverse_condition (cond),
2640
                                                    SImode, mode,
2641
                                                    tmp1, bound);
2642
 
2643
              t0 = simplify_gen_binary (UMOD, mode, copy_rtx (tmp0), step);
2644
              t1 = simplify_gen_binary (UMOD, mode, copy_rtx (tmp1), step);
2645
              tmp = simplify_gen_relational (cond, SImode, mode, t0, t1);
2646
              assumption = simplify_gen_binary (AND, SImode, assumption, tmp);
2647
              desc->infinite =
2648
                      alloc_EXPR_LIST (0, assumption, desc->infinite);
2649
            }
2650
          else
2651
            {
2652
              assumption = simplify_gen_relational (cond, SImode, mode,
2653
                                                    tmp1, bound);
2654
              desc->assumptions =
2655
                      alloc_EXPR_LIST (0, assumption, desc->assumptions);
2656
            }
2657
 
2658
          tmp = simplify_gen_binary (PLUS, comp_mode, iv1.base, iv0.step);
2659
          tmp = lowpart_subreg (mode, tmp, comp_mode);
2660
          assumption = simplify_gen_relational (reverse_condition (cond),
2661
                                                SImode, mode, tmp0, tmp);
2662
 
2663
          delta = simplify_gen_binary (PLUS, mode, tmp1, step);
2664
          delta = simplify_gen_binary (MINUS, mode, delta, tmp0);
2665
        }
2666
      else
2667
        {
2668
          /* Condition in shape a <= b - s * i
2669
             We must know that a - s does not overflow and a - s <= b and then
2670
             we can again compute number of iterations as (b - (a - s)) / s.  */
2671
          step = simplify_gen_unary (NEG, mode, iv1.step, mode);
2672
          tmp0 = lowpart_subreg (mode, iv0.base, comp_mode);
2673
          tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2674
 
2675
          bound = simplify_gen_binary (PLUS, mode, mode_mmin,
2676
                                       lowpart_subreg (mode, step, comp_mode));
2677
          if (step_is_pow2)
2678
            {
2679
              rtx t0, t1;
2680
 
2681
              /* If s is power of 2, we know that the loop is infinite if
2682
                 a % s <= b % s and a - s overflows.  */
2683
              assumption = simplify_gen_relational (reverse_condition (cond),
2684
                                                    SImode, mode,
2685
                                                    bound, tmp0);
2686
 
2687
              t0 = simplify_gen_binary (UMOD, mode, copy_rtx (tmp0), step);
2688
              t1 = simplify_gen_binary (UMOD, mode, copy_rtx (tmp1), step);
2689
              tmp = simplify_gen_relational (cond, SImode, mode, t0, t1);
2690
              assumption = simplify_gen_binary (AND, SImode, assumption, tmp);
2691
              desc->infinite =
2692
                      alloc_EXPR_LIST (0, assumption, desc->infinite);
2693
            }
2694
          else
2695
            {
2696
              assumption = simplify_gen_relational (cond, SImode, mode,
2697
                                                    bound, tmp0);
2698
              desc->assumptions =
2699
                      alloc_EXPR_LIST (0, assumption, desc->assumptions);
2700
            }
2701
 
2702
          tmp = simplify_gen_binary (PLUS, comp_mode, iv0.base, iv1.step);
2703
          tmp = lowpart_subreg (mode, tmp, comp_mode);
2704
          assumption = simplify_gen_relational (reverse_condition (cond),
2705
                                                SImode, mode,
2706
                                                tmp, tmp1);
2707
          delta = simplify_gen_binary (MINUS, mode, tmp0, step);
2708
          delta = simplify_gen_binary (MINUS, mode, tmp1, delta);
2709
        }
2710
      if (assumption == const_true_rtx)
2711
        goto zero_iter_simplify;
2712
      else if (assumption != const0_rtx)
2713
        desc->noloop_assumptions =
2714
                alloc_EXPR_LIST (0, assumption, desc->noloop_assumptions);
2715
      delta = simplify_gen_binary (UDIV, mode, delta, step);
2716
      desc->niter_expr = delta;
2717
    }
2718
 
2719
  old_niter = desc->niter_expr;
2720
 
2721
  simplify_using_initial_values (loop, AND, &desc->assumptions);
2722
  if (desc->assumptions
2723
      && XEXP (desc->assumptions, 0) == const0_rtx)
2724
    goto fail;
2725
  simplify_using_initial_values (loop, IOR, &desc->noloop_assumptions);
2726
  simplify_using_initial_values (loop, IOR, &desc->infinite);
2727
  simplify_using_initial_values (loop, UNKNOWN, &desc->niter_expr);
2728
 
2729
  /* Rerun the simplification.  Consider code (created by copying loop headers)
2730
 
2731
     i = 0;
2732
 
2733
     if (0 < n)
2734
       {
2735
         do
2736
           {
2737
             i++;
2738
           } while (i < n);
2739
       }
2740
 
2741
    The first pass determines that i = 0, the second pass uses it to eliminate
2742
    noloop assumption.  */
2743
 
2744
  simplify_using_initial_values (loop, AND, &desc->assumptions);
2745
  if (desc->assumptions
2746
      && XEXP (desc->assumptions, 0) == const0_rtx)
2747
    goto fail;
2748
  simplify_using_initial_values (loop, IOR, &desc->noloop_assumptions);
2749
  simplify_using_initial_values (loop, IOR, &desc->infinite);
2750
  simplify_using_initial_values (loop, UNKNOWN, &desc->niter_expr);
2751
 
2752
  if (desc->noloop_assumptions
2753
      && XEXP (desc->noloop_assumptions, 0) == const_true_rtx)
2754
    goto zero_iter;
2755
 
2756
  if (CONST_INT_P (desc->niter_expr))
2757
    {
2758
      unsigned HOST_WIDEST_INT val = INTVAL (desc->niter_expr);
2759
 
2760
      desc->const_iter = true;
2761
      desc->niter_max = desc->niter = val & GET_MODE_MASK (desc->mode);
2762
    }
2763
  else
2764
    {
2765
      if (!desc->niter_max)
2766
        desc->niter_max = determine_max_iter (loop, desc, old_niter);
2767
 
2768
      /* simplify_using_initial_values does a copy propagation on the registers
2769
         in the expression for the number of iterations.  This prolongs life
2770
         ranges of registers and increases register pressure, and usually
2771
         brings no gain (and if it happens to do, the cse pass will take care
2772
         of it anyway).  So prevent this behavior, unless it enabled us to
2773
         derive that the number of iterations is a constant.  */
2774
      desc->niter_expr = old_niter;
2775
    }
2776
 
2777
  return;
2778
 
2779
zero_iter_simplify:
2780
  /* Simplify the assumptions.  */
2781
  simplify_using_initial_values (loop, AND, &desc->assumptions);
2782
  if (desc->assumptions
2783
      && XEXP (desc->assumptions, 0) == const0_rtx)
2784
    goto fail;
2785
  simplify_using_initial_values (loop, IOR, &desc->infinite);
2786
 
2787
  /* Fallthru.  */
2788
zero_iter:
2789
  desc->const_iter = true;
2790
  desc->niter = 0;
2791
  desc->niter_max = 0;
2792
  desc->noloop_assumptions = NULL_RTX;
2793
  desc->niter_expr = const0_rtx;
2794
  return;
2795
 
2796
fail:
2797
  desc->simple_p = false;
2798
  return;
2799
}
2800
 
2801
/* Checks whether E is a simple exit from LOOP and stores its description
2802
   into DESC.  */
2803
 
2804
static void
2805
check_simple_exit (struct loop *loop, edge e, struct niter_desc *desc)
2806
{
2807
  basic_block exit_bb;
2808
  rtx condition, at;
2809
  edge ein;
2810
 
2811
  exit_bb = e->src;
2812
  desc->simple_p = false;
2813
 
2814
  /* It must belong directly to the loop.  */
2815
  if (exit_bb->loop_father != loop)
2816
    return;
2817
 
2818
  /* It must be tested (at least) once during any iteration.  */
2819
  if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit_bb))
2820
    return;
2821
 
2822
  /* It must end in a simple conditional jump.  */
2823
  if (!any_condjump_p (BB_END (exit_bb)))
2824
    return;
2825
 
2826
  ein = EDGE_SUCC (exit_bb, 0);
2827
  if (ein == e)
2828
    ein = EDGE_SUCC (exit_bb, 1);
2829
 
2830
  desc->out_edge = e;
2831
  desc->in_edge = ein;
2832
 
2833
  /* Test whether the condition is suitable.  */
2834
  if (!(condition = get_condition (BB_END (ein->src), &at, false, false)))
2835
    return;
2836
 
2837
  if (ein->flags & EDGE_FALLTHRU)
2838
    {
2839
      condition = reversed_condition (condition);
2840
      if (!condition)
2841
        return;
2842
    }
2843
 
2844
  /* Check that we are able to determine number of iterations and fill
2845
     in information about it.  */
2846
  iv_number_of_iterations (loop, at, condition, desc);
2847
}
2848
 
2849
/* Finds a simple exit of LOOP and stores its description into DESC.  */
2850
 
2851
void
2852
find_simple_exit (struct loop *loop, struct niter_desc *desc)
2853
{
2854
  unsigned i;
2855
  basic_block *body;
2856
  edge e;
2857
  struct niter_desc act;
2858
  bool any = false;
2859
  edge_iterator ei;
2860
 
2861
  desc->simple_p = false;
2862
  body = get_loop_body (loop);
2863
 
2864
  for (i = 0; i < loop->num_nodes; i++)
2865
    {
2866
      FOR_EACH_EDGE (e, ei, body[i]->succs)
2867
        {
2868
          if (flow_bb_inside_loop_p (loop, e->dest))
2869
            continue;
2870
 
2871
          check_simple_exit (loop, e, &act);
2872
          if (!act.simple_p)
2873
            continue;
2874
 
2875
          if (!any)
2876
            any = true;
2877
          else
2878
            {
2879
              /* Prefer constant iterations; the less the better.  */
2880
              if (!act.const_iter
2881
                  || (desc->const_iter && act.niter >= desc->niter))
2882
                continue;
2883
 
2884
              /* Also if the actual exit may be infinite, while the old one
2885
                 not, prefer the old one.  */
2886
              if (act.infinite && !desc->infinite)
2887
                continue;
2888
            }
2889
 
2890
          *desc = act;
2891
        }
2892
    }
2893
 
2894
  if (dump_file)
2895
    {
2896
      if (desc->simple_p)
2897
        {
2898
          fprintf (dump_file, "Loop %d is simple:\n", loop->num);
2899
          fprintf (dump_file, "  simple exit %d -> %d\n",
2900
                   desc->out_edge->src->index,
2901
                   desc->out_edge->dest->index);
2902
          if (desc->assumptions)
2903
            {
2904
              fprintf (dump_file, "  assumptions: ");
2905
              print_rtl (dump_file, desc->assumptions);
2906
              fprintf (dump_file, "\n");
2907
            }
2908
          if (desc->noloop_assumptions)
2909
            {
2910
              fprintf (dump_file, "  does not roll if: ");
2911
              print_rtl (dump_file, desc->noloop_assumptions);
2912
              fprintf (dump_file, "\n");
2913
            }
2914
          if (desc->infinite)
2915
            {
2916
              fprintf (dump_file, "  infinite if: ");
2917
              print_rtl (dump_file, desc->infinite);
2918
              fprintf (dump_file, "\n");
2919
            }
2920
 
2921
          fprintf (dump_file, "  number of iterations: ");
2922
          print_rtl (dump_file, desc->niter_expr);
2923
          fprintf (dump_file, "\n");
2924
 
2925
          fprintf (dump_file, "  upper bound: ");
2926
          fprintf (dump_file, HOST_WIDEST_INT_PRINT_DEC, desc->niter_max);
2927
          fprintf (dump_file, "\n");
2928
        }
2929
      else
2930
        fprintf (dump_file, "Loop %d is not simple.\n", loop->num);
2931
    }
2932
 
2933
  free (body);
2934
}
2935
 
2936
/* Creates a simple loop description of LOOP if it was not computed
2937
   already.  */
2938
 
2939
struct niter_desc *
2940
get_simple_loop_desc (struct loop *loop)
2941
{
2942
  struct niter_desc *desc = simple_loop_desc (loop);
2943
 
2944
  if (desc)
2945
    return desc;
2946
 
2947
  /* At least desc->infinite is not always initialized by
2948
     find_simple_loop_exit.  */
2949
  desc = XCNEW (struct niter_desc);
2950
  iv_analysis_loop_init (loop);
2951
  find_simple_exit (loop, desc);
2952
  loop->aux = desc;
2953
 
2954
  if (desc->simple_p && (desc->assumptions || desc->infinite))
2955
    {
2956
      const char *wording;
2957
 
2958
      /* Assume that no overflow happens and that the loop is finite.
2959
         We already warned at the tree level if we ran optimizations there.  */
2960
      if (!flag_tree_loop_optimize && warn_unsafe_loop_optimizations)
2961
        {
2962
          if (desc->infinite)
2963
            {
2964
              wording =
2965
                flag_unsafe_loop_optimizations
2966
                ? N_("assuming that the loop is not infinite")
2967
                : N_("cannot optimize possibly infinite loops");
2968
              warning (OPT_Wunsafe_loop_optimizations, "%s",
2969
                       gettext (wording));
2970
            }
2971
          if (desc->assumptions)
2972
            {
2973
              wording =
2974
                flag_unsafe_loop_optimizations
2975
                ? N_("assuming that the loop counter does not overflow")
2976
                : N_("cannot optimize loop, the loop counter may overflow");
2977
              warning (OPT_Wunsafe_loop_optimizations, "%s",
2978
                       gettext (wording));
2979
            }
2980
        }
2981
 
2982
      if (flag_unsafe_loop_optimizations)
2983
        {
2984
          desc->assumptions = NULL_RTX;
2985
          desc->infinite = NULL_RTX;
2986
        }
2987
    }
2988
 
2989
  return desc;
2990
}
2991
 
2992
/* Releases simple loop description for LOOP.  */
2993
 
2994
void
2995
free_simple_loop_desc (struct loop *loop)
2996
{
2997
  struct niter_desc *desc = simple_loop_desc (loop);
2998
 
2999
  if (!desc)
3000
    return;
3001
 
3002
  free (desc);
3003
  loop->aux = NULL;
3004
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.