OpenCores
URL https://opencores.org/ocsvn/scarts/scarts/trunk

Subversion Repositories scarts

[/] [scarts/] [trunk/] [toolchain/] [scarts-gcc/] [gcc-4.1.1/] [gcc/] [fold-const.c] - Blame information for rev 20

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 12 jlechner
/* Fold a constant sub-tree into a single node for C-compiler
2
   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3
   2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify it under
8
the terms of the GNU General Public License as published by the Free
9
Software Foundation; either version 2, or (at your option) any later
10
version.
11
 
12
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13
WARRANTY; without even the implied warranty of MERCHANTABILITY or
14
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15
for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING.  If not, write to the Free
19
Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20
02110-1301, USA.  */
21
 
22
/*@@ This file should be rewritten to use an arbitrary precision
23
  @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24
  @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25
  @@ The routines that translate from the ap rep should
26
  @@ warn if precision et. al. is lost.
27
  @@ This would also make life easier when this technology is used
28
  @@ for cross-compilers.  */
29
 
30
/* The entry points in this file are fold, size_int_wide, size_binop
31
   and force_fit_type.
32
 
33
   fold takes a tree as argument and returns a simplified tree.
34
 
35
   size_binop takes a tree code for an arithmetic operation
36
   and two operands that are trees, and produces a tree for the
37
   result, assuming the type comes from `sizetype'.
38
 
39
   size_int takes an integer value, and creates a tree constant
40
   with type from `sizetype'.
41
 
42
   force_fit_type takes a constant, an overflowable flag and prior
43
   overflow indicators.  It forces the value to fit the type and sets
44
   TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate.  */
45
 
46
#include "config.h"
47
#include "system.h"
48
#include "coretypes.h"
49
#include "tm.h"
50
#include "flags.h"
51
#include "tree.h"
52
#include "real.h"
53
#include "rtl.h"
54
#include "expr.h"
55
#include "tm_p.h"
56
#include "toplev.h"
57
#include "ggc.h"
58
#include "hashtab.h"
59
#include "langhooks.h"
60
#include "md5.h"
61
 
62
/* The following constants represent a bit based encoding of GCC's
63
   comparison operators.  This encoding simplifies transformations
64
   on relational comparison operators, such as AND and OR.  */
65
enum comparison_code {
66
  COMPCODE_FALSE = 0,
67
  COMPCODE_LT = 1,
68
  COMPCODE_EQ = 2,
69
  COMPCODE_LE = 3,
70
  COMPCODE_GT = 4,
71
  COMPCODE_LTGT = 5,
72
  COMPCODE_GE = 6,
73
  COMPCODE_ORD = 7,
74
  COMPCODE_UNORD = 8,
75
  COMPCODE_UNLT = 9,
76
  COMPCODE_UNEQ = 10,
77
  COMPCODE_UNLE = 11,
78
  COMPCODE_UNGT = 12,
79
  COMPCODE_NE = 13,
80
  COMPCODE_UNGE = 14,
81
  COMPCODE_TRUE = 15
82
};
83
 
84
static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85
static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86
static bool negate_mathfn_p (enum built_in_function);
87
static bool negate_expr_p (tree);
88
static tree negate_expr (tree);
89
static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90
static tree associate_trees (tree, tree, enum tree_code, tree);
91
static tree const_binop (enum tree_code, tree, tree, int);
92
static enum comparison_code comparison_to_compcode (enum tree_code);
93
static enum tree_code compcode_to_comparison (enum comparison_code);
94
static tree combine_comparisons (enum tree_code, enum tree_code,
95
                                 enum tree_code, tree, tree, tree);
96
static int truth_value_p (enum tree_code);
97
static int operand_equal_for_comparison_p (tree, tree, tree);
98
static int twoval_comparison_p (tree, tree *, tree *, int *);
99
static tree eval_subst (tree, tree, tree, tree, tree);
100
static tree pedantic_omit_one_operand (tree, tree, tree);
101
static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
102
static tree make_bit_field_ref (tree, tree, int, int, int);
103
static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
104
static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
105
                                    enum machine_mode *, int *, int *,
106
                                    tree *, tree *);
107
static int all_ones_mask_p (tree, int);
108
static tree sign_bit_p (tree, tree);
109
static int simple_operand_p (tree);
110
static tree range_binop (enum tree_code, tree, tree, int, tree, int);
111
static tree make_range (tree, int *, tree *, tree *);
112
static tree build_range_check (tree, tree, int, tree, tree);
113
static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
114
                         tree);
115
static tree fold_range_test (enum tree_code, tree, tree, tree);
116
static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
117
static tree unextend (tree, int, int, tree);
118
static tree fold_truthop (enum tree_code, tree, tree, tree);
119
static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
120
static tree extract_muldiv (tree, tree, enum tree_code, tree);
121
static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
122
static int multiple_of_p (tree, tree, tree);
123
static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
124
                                                 tree, tree,
125
                                                 tree, tree, int);
126
static bool fold_real_zero_addition_p (tree, tree, int);
127
static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
128
                                 tree, tree, tree);
129
static tree fold_inf_compare (enum tree_code, tree, tree, tree);
130
static tree fold_div_compare (enum tree_code, tree, tree, tree);
131
static bool reorder_operands_p (tree, tree);
132
static tree fold_negate_const (tree, tree);
133
static tree fold_not_const (tree, tree);
134
static tree fold_relational_const (enum tree_code, tree, tree, tree);
135
 
136
/* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
137
   overflow.  Suppose A, B and SUM have the same respective signs as A1, B1,
138
   and SUM1.  Then this yields nonzero if overflow occurred during the
139
   addition.
140
 
141
   Overflow occurs if A and B have the same sign, but A and SUM differ in
142
   sign.  Use `^' to test whether signs differ, and `< 0' to isolate the
143
   sign.  */
144
#define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
145
 
146
/* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
147
   We do that by representing the two-word integer in 4 words, with only
148
   HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
149
   number.  The value of the word is LOWPART + HIGHPART * BASE.  */
150
 
151
#define LOWPART(x) \
152
  ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
153
#define HIGHPART(x) \
154
  ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
155
#define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
156
 
157
/* Unpack a two-word integer into 4 words.
158
   LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
159
   WORDS points to the array of HOST_WIDE_INTs.  */
160
 
161
static void
162
encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
163
{
164
  words[0] = LOWPART (low);
165
  words[1] = HIGHPART (low);
166
  words[2] = LOWPART (hi);
167
  words[3] = HIGHPART (hi);
168
}
169
 
170
/* Pack an array of 4 words into a two-word integer.
171
   WORDS points to the array of words.
172
   The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces.  */
173
 
174
static void
175
decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
176
        HOST_WIDE_INT *hi)
177
{
178
  *low = words[0] + words[1] * BASE;
179
  *hi = words[2] + words[3] * BASE;
180
}
181
 
182
/* T is an INT_CST node.  OVERFLOWABLE indicates if we are interested
183
   in overflow of the value, when >0 we are only interested in signed
184
   overflow, for <0 we are interested in any overflow.  OVERFLOWED
185
   indicates whether overflow has already occurred.  CONST_OVERFLOWED
186
   indicates whether constant overflow has already occurred.  We force
187
   T's value to be within range of T's type (by setting to 0 or 1 all
188
   the bits outside the type's range).  We set TREE_OVERFLOWED if,
189
        OVERFLOWED is nonzero,
190
        or OVERFLOWABLE is >0 and signed overflow occurs
191
        or OVERFLOWABLE is <0 and any overflow occurs
192
   We set TREE_CONSTANT_OVERFLOWED if,
193
        CONST_OVERFLOWED is nonzero
194
        or we set TREE_OVERFLOWED.
195
  We return either the original T, or a copy.  */
196
 
197
tree
198
force_fit_type (tree t, int overflowable,
199
                bool overflowed, bool overflowed_const)
200
{
201
  unsigned HOST_WIDE_INT low;
202
  HOST_WIDE_INT high;
203
  unsigned int prec;
204
  int sign_extended_type;
205
 
206
  gcc_assert (TREE_CODE (t) == INTEGER_CST);
207
 
208
  low = TREE_INT_CST_LOW (t);
209
  high = TREE_INT_CST_HIGH (t);
210
 
211
  if (POINTER_TYPE_P (TREE_TYPE (t))
212
      || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
213
    prec = POINTER_SIZE;
214
  else
215
    prec = TYPE_PRECISION (TREE_TYPE (t));
216
  /* Size types *are* sign extended.  */
217
  sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
218
                        || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
219
                            && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
220
 
221
  /* First clear all bits that are beyond the type's precision.  */
222
 
223
  if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
224
    ;
225
  else if (prec > HOST_BITS_PER_WIDE_INT)
226
    high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
227
  else
228
    {
229
      high = 0;
230
      if (prec < HOST_BITS_PER_WIDE_INT)
231
        low &= ~((HOST_WIDE_INT) (-1) << prec);
232
    }
233
 
234
  if (!sign_extended_type)
235
    /* No sign extension */;
236
  else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
237
    /* Correct width already.  */;
238
  else if (prec > HOST_BITS_PER_WIDE_INT)
239
    {
240
      /* Sign extend top half? */
241
      if (high & ((unsigned HOST_WIDE_INT)1
242
                  << (prec - HOST_BITS_PER_WIDE_INT - 1)))
243
        high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
244
    }
245
  else if (prec == HOST_BITS_PER_WIDE_INT)
246
    {
247
      if ((HOST_WIDE_INT)low < 0)
248
        high = -1;
249
    }
250
  else
251
    {
252
      /* Sign extend bottom half? */
253
      if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
254
        {
255
          high = -1;
256
          low |= (HOST_WIDE_INT)(-1) << prec;
257
        }
258
    }
259
 
260
  /* If the value changed, return a new node.  */
261
  if (overflowed || overflowed_const
262
      || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
263
    {
264
      t = build_int_cst_wide (TREE_TYPE (t), low, high);
265
 
266
      if (overflowed
267
          || overflowable < 0
268
          || (overflowable > 0 && sign_extended_type))
269
        {
270
          t = copy_node (t);
271
          TREE_OVERFLOW (t) = 1;
272
          TREE_CONSTANT_OVERFLOW (t) = 1;
273
        }
274
      else if (overflowed_const)
275
        {
276
          t = copy_node (t);
277
          TREE_CONSTANT_OVERFLOW (t) = 1;
278
        }
279
    }
280
 
281
  return t;
282
}
283
 
284
/* Add two doubleword integers with doubleword result.
285
   Each argument is given as two `HOST_WIDE_INT' pieces.
286
   One argument is L1 and H1; the other, L2 and H2.
287
   The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
288
 
289
int
290
add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
291
            unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
292
            unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
293
{
294
  unsigned HOST_WIDE_INT l;
295
  HOST_WIDE_INT h;
296
 
297
  l = l1 + l2;
298
  h = h1 + h2 + (l < l1);
299
 
300
  *lv = l;
301
  *hv = h;
302
  return OVERFLOW_SUM_SIGN (h1, h2, h);
303
}
304
 
305
/* Negate a doubleword integer with doubleword result.
306
   Return nonzero if the operation overflows, assuming it's signed.
307
   The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
308
   The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
309
 
310
int
311
neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
312
            unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
313
{
314
  if (l1 == 0)
315
    {
316
      *lv = 0;
317
      *hv = - h1;
318
      return (*hv & h1) < 0;
319
    }
320
  else
321
    {
322
      *lv = -l1;
323
      *hv = ~h1;
324
      return 0;
325
    }
326
}
327
 
328
/* Multiply two doubleword integers with doubleword result.
329
   Return nonzero if the operation overflows, assuming it's signed.
330
   Each argument is given as two `HOST_WIDE_INT' pieces.
331
   One argument is L1 and H1; the other, L2 and H2.
332
   The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
333
 
334
int
335
mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
336
            unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
337
            unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
338
{
339
  HOST_WIDE_INT arg1[4];
340
  HOST_WIDE_INT arg2[4];
341
  HOST_WIDE_INT prod[4 * 2];
342
  unsigned HOST_WIDE_INT carry;
343
  int i, j, k;
344
  unsigned HOST_WIDE_INT toplow, neglow;
345
  HOST_WIDE_INT tophigh, neghigh;
346
 
347
  encode (arg1, l1, h1);
348
  encode (arg2, l2, h2);
349
 
350
  memset (prod, 0, sizeof prod);
351
 
352
  for (i = 0; i < 4; i++)
353
    {
354
      carry = 0;
355
      for (j = 0; j < 4; j++)
356
        {
357
          k = i + j;
358
          /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000.  */
359
          carry += arg1[i] * arg2[j];
360
          /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF.  */
361
          carry += prod[k];
362
          prod[k] = LOWPART (carry);
363
          carry = HIGHPART (carry);
364
        }
365
      prod[i + 4] = carry;
366
    }
367
 
368
  decode (prod, lv, hv);        /* This ignores prod[4] through prod[4*2-1] */
369
 
370
  /* Check for overflow by calculating the top half of the answer in full;
371
     it should agree with the low half's sign bit.  */
372
  decode (prod + 4, &toplow, &tophigh);
373
  if (h1 < 0)
374
    {
375
      neg_double (l2, h2, &neglow, &neghigh);
376
      add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
377
    }
378
  if (h2 < 0)
379
    {
380
      neg_double (l1, h1, &neglow, &neghigh);
381
      add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
382
    }
383
  return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
384
}
385
 
386
/* Shift the doubleword integer in L1, H1 left by COUNT places
387
   keeping only PREC bits of result.
388
   Shift right if COUNT is negative.
389
   ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
390
   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
391
 
392
void
393
lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
394
               HOST_WIDE_INT count, unsigned int prec,
395
               unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
396
{
397
  unsigned HOST_WIDE_INT signmask;
398
 
399
  if (count < 0)
400
    {
401
      rshift_double (l1, h1, -count, prec, lv, hv, arith);
402
      return;
403
    }
404
 
405
  if (SHIFT_COUNT_TRUNCATED)
406
    count %= prec;
407
 
408
  if (count >= 2 * HOST_BITS_PER_WIDE_INT)
409
    {
410
      /* Shifting by the host word size is undefined according to the
411
         ANSI standard, so we must handle this as a special case.  */
412
      *hv = 0;
413
      *lv = 0;
414
    }
415
  else if (count >= HOST_BITS_PER_WIDE_INT)
416
    {
417
      *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
418
      *lv = 0;
419
    }
420
  else
421
    {
422
      *hv = (((unsigned HOST_WIDE_INT) h1 << count)
423
             | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
424
      *lv = l1 << count;
425
    }
426
 
427
  /* Sign extend all bits that are beyond the precision.  */
428
 
429
  signmask = -((prec > HOST_BITS_PER_WIDE_INT
430
                ? ((unsigned HOST_WIDE_INT) *hv
431
                   >> (prec - HOST_BITS_PER_WIDE_INT - 1))
432
                : (*lv >> (prec - 1))) & 1);
433
 
434
  if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
435
    ;
436
  else if (prec >= HOST_BITS_PER_WIDE_INT)
437
    {
438
      *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
439
      *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
440
    }
441
  else
442
    {
443
      *hv = signmask;
444
      *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
445
      *lv |= signmask << prec;
446
    }
447
}
448
 
449
/* Shift the doubleword integer in L1, H1 right by COUNT places
450
   keeping only PREC bits of result.  COUNT must be positive.
451
   ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
452
   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
453
 
454
void
455
rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
456
               HOST_WIDE_INT count, unsigned int prec,
457
               unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
458
               int arith)
459
{
460
  unsigned HOST_WIDE_INT signmask;
461
 
462
  signmask = (arith
463
              ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
464
              : 0);
465
 
466
  if (SHIFT_COUNT_TRUNCATED)
467
    count %= prec;
468
 
469
  if (count >= 2 * HOST_BITS_PER_WIDE_INT)
470
    {
471
      /* Shifting by the host word size is undefined according to the
472
         ANSI standard, so we must handle this as a special case.  */
473
      *hv = 0;
474
      *lv = 0;
475
    }
476
  else if (count >= HOST_BITS_PER_WIDE_INT)
477
    {
478
      *hv = 0;
479
      *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
480
    }
481
  else
482
    {
483
      *hv = (unsigned HOST_WIDE_INT) h1 >> count;
484
      *lv = ((l1 >> count)
485
             | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
486
    }
487
 
488
  /* Zero / sign extend all bits that are beyond the precision.  */
489
 
490
  if (count >= (HOST_WIDE_INT)prec)
491
    {
492
      *hv = signmask;
493
      *lv = signmask;
494
    }
495
  else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
496
    ;
497
  else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
498
    {
499
      *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
500
      *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
501
    }
502
  else
503
    {
504
      *hv = signmask;
505
      *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
506
      *lv |= signmask << (prec - count);
507
    }
508
}
509
 
510
/* Rotate the doubleword integer in L1, H1 left by COUNT places
511
   keeping only PREC bits of result.
512
   Rotate right if COUNT is negative.
513
   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
514
 
515
void
516
lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
517
                HOST_WIDE_INT count, unsigned int prec,
518
                unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
519
{
520
  unsigned HOST_WIDE_INT s1l, s2l;
521
  HOST_WIDE_INT s1h, s2h;
522
 
523
  count %= prec;
524
  if (count < 0)
525
    count += prec;
526
 
527
  lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
528
  rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
529
  *lv = s1l | s2l;
530
  *hv = s1h | s2h;
531
}
532
 
533
/* Rotate the doubleword integer in L1, H1 left by COUNT places
534
   keeping only PREC bits of result.  COUNT must be positive.
535
   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
536
 
537
void
538
rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
539
                HOST_WIDE_INT count, unsigned int prec,
540
                unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
541
{
542
  unsigned HOST_WIDE_INT s1l, s2l;
543
  HOST_WIDE_INT s1h, s2h;
544
 
545
  count %= prec;
546
  if (count < 0)
547
    count += prec;
548
 
549
  rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
550
  lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
551
  *lv = s1l | s2l;
552
  *hv = s1h | s2h;
553
}
554
 
555
/* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
556
   for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
557
   CODE is a tree code for a kind of division, one of
558
   TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
559
   or EXACT_DIV_EXPR
560
   It controls how the quotient is rounded to an integer.
561
   Return nonzero if the operation overflows.
562
   UNS nonzero says do unsigned division.  */
563
 
564
int
565
div_and_round_double (enum tree_code code, int uns,
566
                      unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
567
                      HOST_WIDE_INT hnum_orig,
568
                      unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
569
                      HOST_WIDE_INT hden_orig,
570
                      unsigned HOST_WIDE_INT *lquo,
571
                      HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
572
                      HOST_WIDE_INT *hrem)
573
{
574
  int quo_neg = 0;
575
  HOST_WIDE_INT num[4 + 1];     /* extra element for scaling.  */
576
  HOST_WIDE_INT den[4], quo[4];
577
  int i, j;
578
  unsigned HOST_WIDE_INT work;
579
  unsigned HOST_WIDE_INT carry = 0;
580
  unsigned HOST_WIDE_INT lnum = lnum_orig;
581
  HOST_WIDE_INT hnum = hnum_orig;
582
  unsigned HOST_WIDE_INT lden = lden_orig;
583
  HOST_WIDE_INT hden = hden_orig;
584
  int overflow = 0;
585
 
586
  if (hden == 0 && lden == 0)
587
    overflow = 1, lden = 1;
588
 
589
  /* Calculate quotient sign and convert operands to unsigned.  */
590
  if (!uns)
591
    {
592
      if (hnum < 0)
593
        {
594
          quo_neg = ~ quo_neg;
595
          /* (minimum integer) / (-1) is the only overflow case.  */
596
          if (neg_double (lnum, hnum, &lnum, &hnum)
597
              && ((HOST_WIDE_INT) lden & hden) == -1)
598
            overflow = 1;
599
        }
600
      if (hden < 0)
601
        {
602
          quo_neg = ~ quo_neg;
603
          neg_double (lden, hden, &lden, &hden);
604
        }
605
    }
606
 
607
  if (hnum == 0 && hden == 0)
608
    {                           /* single precision */
609
      *hquo = *hrem = 0;
610
      /* This unsigned division rounds toward zero.  */
611
      *lquo = lnum / lden;
612
      goto finish_up;
613
    }
614
 
615
  if (hnum == 0)
616
    {                           /* trivial case: dividend < divisor */
617
      /* hden != 0 already checked.  */
618
      *hquo = *lquo = 0;
619
      *hrem = hnum;
620
      *lrem = lnum;
621
      goto finish_up;
622
    }
623
 
624
  memset (quo, 0, sizeof quo);
625
 
626
  memset (num, 0, sizeof num);   /* to zero 9th element */
627
  memset (den, 0, sizeof den);
628
 
629
  encode (num, lnum, hnum);
630
  encode (den, lden, hden);
631
 
632
  /* Special code for when the divisor < BASE.  */
633
  if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
634
    {
635
      /* hnum != 0 already checked.  */
636
      for (i = 4 - 1; i >= 0; i--)
637
        {
638
          work = num[i] + carry * BASE;
639
          quo[i] = work / lden;
640
          carry = work % lden;
641
        }
642
    }
643
  else
644
    {
645
      /* Full double precision division,
646
         with thanks to Don Knuth's "Seminumerical Algorithms".  */
647
      int num_hi_sig, den_hi_sig;
648
      unsigned HOST_WIDE_INT quo_est, scale;
649
 
650
      /* Find the highest nonzero divisor digit.  */
651
      for (i = 4 - 1;; i--)
652
        if (den[i] != 0)
653
          {
654
            den_hi_sig = i;
655
            break;
656
          }
657
 
658
      /* Insure that the first digit of the divisor is at least BASE/2.
659
         This is required by the quotient digit estimation algorithm.  */
660
 
661
      scale = BASE / (den[den_hi_sig] + 1);
662
      if (scale > 1)
663
        {               /* scale divisor and dividend */
664
          carry = 0;
665
          for (i = 0; i <= 4 - 1; i++)
666
            {
667
              work = (num[i] * scale) + carry;
668
              num[i] = LOWPART (work);
669
              carry = HIGHPART (work);
670
            }
671
 
672
          num[4] = carry;
673
          carry = 0;
674
          for (i = 0; i <= 4 - 1; i++)
675
            {
676
              work = (den[i] * scale) + carry;
677
              den[i] = LOWPART (work);
678
              carry = HIGHPART (work);
679
              if (den[i] != 0) den_hi_sig = i;
680
            }
681
        }
682
 
683
      num_hi_sig = 4;
684
 
685
      /* Main loop */
686
      for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
687
        {
688
          /* Guess the next quotient digit, quo_est, by dividing the first
689
             two remaining dividend digits by the high order quotient digit.
690
             quo_est is never low and is at most 2 high.  */
691
          unsigned HOST_WIDE_INT tmp;
692
 
693
          num_hi_sig = i + den_hi_sig + 1;
694
          work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
695
          if (num[num_hi_sig] != den[den_hi_sig])
696
            quo_est = work / den[den_hi_sig];
697
          else
698
            quo_est = BASE - 1;
699
 
700
          /* Refine quo_est so it's usually correct, and at most one high.  */
701
          tmp = work - quo_est * den[den_hi_sig];
702
          if (tmp < BASE
703
              && (den[den_hi_sig - 1] * quo_est
704
                  > (tmp * BASE + num[num_hi_sig - 2])))
705
            quo_est--;
706
 
707
          /* Try QUO_EST as the quotient digit, by multiplying the
708
             divisor by QUO_EST and subtracting from the remaining dividend.
709
             Keep in mind that QUO_EST is the I - 1st digit.  */
710
 
711
          carry = 0;
712
          for (j = 0; j <= den_hi_sig; j++)
713
            {
714
              work = quo_est * den[j] + carry;
715
              carry = HIGHPART (work);
716
              work = num[i + j] - LOWPART (work);
717
              num[i + j] = LOWPART (work);
718
              carry += HIGHPART (work) != 0;
719
            }
720
 
721
          /* If quo_est was high by one, then num[i] went negative and
722
             we need to correct things.  */
723
          if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
724
            {
725
              quo_est--;
726
              carry = 0;         /* add divisor back in */
727
              for (j = 0; j <= den_hi_sig; j++)
728
                {
729
                  work = num[i + j] + den[j] + carry;
730
                  carry = HIGHPART (work);
731
                  num[i + j] = LOWPART (work);
732
                }
733
 
734
              num [num_hi_sig] += carry;
735
            }
736
 
737
          /* Store the quotient digit.  */
738
          quo[i] = quo_est;
739
        }
740
    }
741
 
742
  decode (quo, lquo, hquo);
743
 
744
 finish_up:
745
  /* If result is negative, make it so.  */
746
  if (quo_neg)
747
    neg_double (*lquo, *hquo, lquo, hquo);
748
 
749
  /* Compute trial remainder:  rem = num - (quo * den)  */
750
  mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
751
  neg_double (*lrem, *hrem, lrem, hrem);
752
  add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
753
 
754
  switch (code)
755
    {
756
    case TRUNC_DIV_EXPR:
757
    case TRUNC_MOD_EXPR:        /* round toward zero */
758
    case EXACT_DIV_EXPR:        /* for this one, it shouldn't matter */
759
      return overflow;
760
 
761
    case FLOOR_DIV_EXPR:
762
    case FLOOR_MOD_EXPR:        /* round toward negative infinity */
763
      if (quo_neg && (*lrem != 0 || *hrem != 0))   /* ratio < 0 && rem != 0 */
764
        {
765
          /* quo = quo - 1;  */
766
          add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT)  -1,
767
                      lquo, hquo);
768
        }
769
      else
770
        return overflow;
771
      break;
772
 
773
    case CEIL_DIV_EXPR:
774
    case CEIL_MOD_EXPR:         /* round toward positive infinity */
775
      if (!quo_neg && (*lrem != 0 || *hrem != 0))  /* ratio > 0 && rem != 0 */
776
        {
777
          add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
778
                      lquo, hquo);
779
        }
780
      else
781
        return overflow;
782
      break;
783
 
784
    case ROUND_DIV_EXPR:
785
    case ROUND_MOD_EXPR:        /* round to closest integer */
786
      {
787
        unsigned HOST_WIDE_INT labs_rem = *lrem;
788
        HOST_WIDE_INT habs_rem = *hrem;
789
        unsigned HOST_WIDE_INT labs_den = lden, ltwice;
790
        HOST_WIDE_INT habs_den = hden, htwice;
791
 
792
        /* Get absolute values.  */
793
        if (*hrem < 0)
794
          neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
795
        if (hden < 0)
796
          neg_double (lden, hden, &labs_den, &habs_den);
797
 
798
        /* If (2 * abs (lrem) >= abs (lden)) */
799
        mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
800
                    labs_rem, habs_rem, &ltwice, &htwice);
801
 
802
        if (((unsigned HOST_WIDE_INT) habs_den
803
             < (unsigned HOST_WIDE_INT) htwice)
804
            || (((unsigned HOST_WIDE_INT) habs_den
805
                 == (unsigned HOST_WIDE_INT) htwice)
806
                && (labs_den < ltwice)))
807
          {
808
            if (*hquo < 0)
809
              /* quo = quo - 1;  */
810
              add_double (*lquo, *hquo,
811
                          (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
812
            else
813
              /* quo = quo + 1; */
814
              add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
815
                          lquo, hquo);
816
          }
817
        else
818
          return overflow;
819
      }
820
      break;
821
 
822
    default:
823
      gcc_unreachable ();
824
    }
825
 
826
  /* Compute true remainder:  rem = num - (quo * den)  */
827
  mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
828
  neg_double (*lrem, *hrem, lrem, hrem);
829
  add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
830
  return overflow;
831
}
832
 
833
/* If ARG2 divides ARG1 with zero remainder, carries out the division
834
   of type CODE and returns the quotient.
835
   Otherwise returns NULL_TREE.  */
836
 
837
static tree
838
div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
839
{
840
  unsigned HOST_WIDE_INT int1l, int2l;
841
  HOST_WIDE_INT int1h, int2h;
842
  unsigned HOST_WIDE_INT quol, reml;
843
  HOST_WIDE_INT quoh, remh;
844
  tree type = TREE_TYPE (arg1);
845
  int uns = TYPE_UNSIGNED (type);
846
 
847
  int1l = TREE_INT_CST_LOW (arg1);
848
  int1h = TREE_INT_CST_HIGH (arg1);
849
  int2l = TREE_INT_CST_LOW (arg2);
850
  int2h = TREE_INT_CST_HIGH (arg2);
851
 
852
  div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
853
                        &quol, &quoh, &reml, &remh);
854
  if (remh != 0 || reml != 0)
855
    return NULL_TREE;
856
 
857
  return build_int_cst_wide (type, quol, quoh);
858
}
859
 
860
/* Return true if the built-in mathematical function specified by CODE
861
   is odd, i.e. -f(x) == f(-x).  */
862
 
863
static bool
864
negate_mathfn_p (enum built_in_function code)
865
{
866
  switch (code)
867
    {
868
    case BUILT_IN_ASIN:
869
    case BUILT_IN_ASINF:
870
    case BUILT_IN_ASINL:
871
    case BUILT_IN_ATAN:
872
    case BUILT_IN_ATANF:
873
    case BUILT_IN_ATANL:
874
    case BUILT_IN_SIN:
875
    case BUILT_IN_SINF:
876
    case BUILT_IN_SINL:
877
    case BUILT_IN_TAN:
878
    case BUILT_IN_TANF:
879
    case BUILT_IN_TANL:
880
      return true;
881
 
882
    default:
883
      break;
884
    }
885
  return false;
886
}
887
 
888
/* Check whether we may negate an integer constant T without causing
889
   overflow.  */
890
 
891
bool
892
may_negate_without_overflow_p (tree t)
893
{
894
  unsigned HOST_WIDE_INT val;
895
  unsigned int prec;
896
  tree type;
897
 
898
  gcc_assert (TREE_CODE (t) == INTEGER_CST);
899
 
900
  type = TREE_TYPE (t);
901
  if (TYPE_UNSIGNED (type))
902
    return false;
903
 
904
  prec = TYPE_PRECISION (type);
905
  if (prec > HOST_BITS_PER_WIDE_INT)
906
    {
907
      if (TREE_INT_CST_LOW (t) != 0)
908
        return true;
909
      prec -= HOST_BITS_PER_WIDE_INT;
910
      val = TREE_INT_CST_HIGH (t);
911
    }
912
  else
913
    val = TREE_INT_CST_LOW (t);
914
  if (prec < HOST_BITS_PER_WIDE_INT)
915
    val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
916
  return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
917
}
918
 
919
/* Determine whether an expression T can be cheaply negated using
920
   the function negate_expr.  */
921
 
922
static bool
923
negate_expr_p (tree t)
924
{
925
  tree type;
926
 
927
  if (t == 0)
928
    return false;
929
 
930
  type = TREE_TYPE (t);
931
 
932
  STRIP_SIGN_NOPS (t);
933
  switch (TREE_CODE (t))
934
    {
935
    case INTEGER_CST:
936
      if (TYPE_UNSIGNED (type) || ! flag_trapv)
937
        return true;
938
 
939
      /* Check that -CST will not overflow type.  */
940
      return may_negate_without_overflow_p (t);
941
 
942
    case REAL_CST:
943
    case NEGATE_EXPR:
944
      return true;
945
 
946
    case COMPLEX_CST:
947
      return negate_expr_p (TREE_REALPART (t))
948
             && negate_expr_p (TREE_IMAGPART (t));
949
 
950
    case PLUS_EXPR:
951
      if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
952
        return false;
953
      /* -(A + B) -> (-B) - A.  */
954
      if (negate_expr_p (TREE_OPERAND (t, 1))
955
          && reorder_operands_p (TREE_OPERAND (t, 0),
956
                                 TREE_OPERAND (t, 1)))
957
        return true;
958
      /* -(A + B) -> (-A) - B.  */
959
      return negate_expr_p (TREE_OPERAND (t, 0));
960
 
961
    case MINUS_EXPR:
962
      /* We can't turn -(A-B) into B-A when we honor signed zeros.  */
963
      return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
964
             && reorder_operands_p (TREE_OPERAND (t, 0),
965
                                    TREE_OPERAND (t, 1));
966
 
967
    case MULT_EXPR:
968
      if (TYPE_UNSIGNED (TREE_TYPE (t)))
969
        break;
970
 
971
      /* Fall through.  */
972
 
973
    case RDIV_EXPR:
974
      if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
975
        return negate_expr_p (TREE_OPERAND (t, 1))
976
               || negate_expr_p (TREE_OPERAND (t, 0));
977
      break;
978
 
979
    case NOP_EXPR:
980
      /* Negate -((double)float) as (double)(-float).  */
981
      if (TREE_CODE (type) == REAL_TYPE)
982
        {
983
          tree tem = strip_float_extensions (t);
984
          if (tem != t)
985
            return negate_expr_p (tem);
986
        }
987
      break;
988
 
989
    case CALL_EXPR:
990
      /* Negate -f(x) as f(-x).  */
991
      if (negate_mathfn_p (builtin_mathfn_code (t)))
992
        return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
993
      break;
994
 
995
    case RSHIFT_EXPR:
996
      /* Optimize -((int)x >> 31) into (unsigned)x >> 31.  */
997
      if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
998
        {
999
          tree op1 = TREE_OPERAND (t, 1);
1000
          if (TREE_INT_CST_HIGH (op1) == 0
1001
              && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1002
                 == TREE_INT_CST_LOW (op1))
1003
            return true;
1004
        }
1005
      break;
1006
 
1007
    default:
1008
      break;
1009
    }
1010
  return false;
1011
}
1012
 
1013
/* Given T, an expression, return the negation of T.  Allow for T to be
1014
   null, in which case return null.  */
1015
 
1016
static tree
1017
negate_expr (tree t)
1018
{
1019
  tree type;
1020
  tree tem;
1021
 
1022
  if (t == 0)
1023
    return 0;
1024
 
1025
  type = TREE_TYPE (t);
1026
  STRIP_SIGN_NOPS (t);
1027
 
1028
  switch (TREE_CODE (t))
1029
    {
1030
    case INTEGER_CST:
1031
      tem = fold_negate_const (t, type);
1032
      if (! TREE_OVERFLOW (tem)
1033
          || TYPE_UNSIGNED (type)
1034
          || ! flag_trapv)
1035
        return tem;
1036
      break;
1037
 
1038
    case REAL_CST:
1039
      tem = fold_negate_const (t, type);
1040
      /* Two's complement FP formats, such as c4x, may overflow.  */
1041
      if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1042
        return fold_convert (type, tem);
1043
      break;
1044
 
1045
    case COMPLEX_CST:
1046
      {
1047
        tree rpart = negate_expr (TREE_REALPART (t));
1048
        tree ipart = negate_expr (TREE_IMAGPART (t));
1049
 
1050
        if ((TREE_CODE (rpart) == REAL_CST
1051
             && TREE_CODE (ipart) == REAL_CST)
1052
            || (TREE_CODE (rpart) == INTEGER_CST
1053
                && TREE_CODE (ipart) == INTEGER_CST))
1054
          return build_complex (type, rpart, ipart);
1055
      }
1056
      break;
1057
 
1058
    case NEGATE_EXPR:
1059
      return fold_convert (type, TREE_OPERAND (t, 0));
1060
 
1061
    case PLUS_EXPR:
1062
      if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1063
        {
1064
          /* -(A + B) -> (-B) - A.  */
1065
          if (negate_expr_p (TREE_OPERAND (t, 1))
1066
              && reorder_operands_p (TREE_OPERAND (t, 0),
1067
                                     TREE_OPERAND (t, 1)))
1068
            {
1069
              tem = negate_expr (TREE_OPERAND (t, 1));
1070
              tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1071
                                 tem, TREE_OPERAND (t, 0));
1072
              return fold_convert (type, tem);
1073
            }
1074
 
1075
          /* -(A + B) -> (-A) - B.  */
1076
          if (negate_expr_p (TREE_OPERAND (t, 0)))
1077
            {
1078
              tem = negate_expr (TREE_OPERAND (t, 0));
1079
              tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1080
                                 tem, TREE_OPERAND (t, 1));
1081
              return fold_convert (type, tem);
1082
            }
1083
        }
1084
      break;
1085
 
1086
    case MINUS_EXPR:
1087
      /* - (A - B) -> B - A  */
1088
      if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1089
          && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1090
        return fold_convert (type,
1091
                             fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1092
                                          TREE_OPERAND (t, 1),
1093
                                          TREE_OPERAND (t, 0)));
1094
      break;
1095
 
1096
    case MULT_EXPR:
1097
      if (TYPE_UNSIGNED (TREE_TYPE (t)))
1098
        break;
1099
 
1100
      /* Fall through.  */
1101
 
1102
    case RDIV_EXPR:
1103
      if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1104
        {
1105
          tem = TREE_OPERAND (t, 1);
1106
          if (negate_expr_p (tem))
1107
            return fold_convert (type,
1108
                                 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1109
                                              TREE_OPERAND (t, 0),
1110
                                              negate_expr (tem)));
1111
          tem = TREE_OPERAND (t, 0);
1112
          if (negate_expr_p (tem))
1113
            return fold_convert (type,
1114
                                 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1115
                                              negate_expr (tem),
1116
                                              TREE_OPERAND (t, 1)));
1117
        }
1118
      break;
1119
 
1120
    case NOP_EXPR:
1121
      /* Convert -((double)float) into (double)(-float).  */
1122
      if (TREE_CODE (type) == REAL_TYPE)
1123
        {
1124
          tem = strip_float_extensions (t);
1125
          if (tem != t && negate_expr_p (tem))
1126
            return fold_convert (type, negate_expr (tem));
1127
        }
1128
      break;
1129
 
1130
    case CALL_EXPR:
1131
      /* Negate -f(x) as f(-x).  */
1132
      if (negate_mathfn_p (builtin_mathfn_code (t))
1133
          && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1134
        {
1135
          tree fndecl, arg, arglist;
1136
 
1137
          fndecl = get_callee_fndecl (t);
1138
          arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1139
          arglist = build_tree_list (NULL_TREE, arg);
1140
          return build_function_call_expr (fndecl, arglist);
1141
        }
1142
      break;
1143
 
1144
    case RSHIFT_EXPR:
1145
      /* Optimize -((int)x >> 31) into (unsigned)x >> 31.  */
1146
      if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1147
        {
1148
          tree op1 = TREE_OPERAND (t, 1);
1149
          if (TREE_INT_CST_HIGH (op1) == 0
1150
              && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1151
                 == TREE_INT_CST_LOW (op1))
1152
            {
1153
              tree ntype = TYPE_UNSIGNED (type)
1154
                           ? lang_hooks.types.signed_type (type)
1155
                           : lang_hooks.types.unsigned_type (type);
1156
              tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1157
              temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1158
              return fold_convert (type, temp);
1159
            }
1160
        }
1161
      break;
1162
 
1163
    default:
1164
      break;
1165
    }
1166
 
1167
  tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1168
  return fold_convert (type, tem);
1169
}
1170
 
1171
/* Split a tree IN into a constant, literal and variable parts that could be
1172
   combined with CODE to make IN.  "constant" means an expression with
1173
   TREE_CONSTANT but that isn't an actual constant.  CODE must be a
1174
   commutative arithmetic operation.  Store the constant part into *CONP,
1175
   the literal in *LITP and return the variable part.  If a part isn't
1176
   present, set it to null.  If the tree does not decompose in this way,
1177
   return the entire tree as the variable part and the other parts as null.
1178
 
1179
   If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.  In that
1180
   case, we negate an operand that was subtracted.  Except if it is a
1181
   literal for which we use *MINUS_LITP instead.
1182
 
1183
   If NEGATE_P is true, we are negating all of IN, again except a literal
1184
   for which we use *MINUS_LITP instead.
1185
 
1186
   If IN is itself a literal or constant, return it as appropriate.
1187
 
1188
   Note that we do not guarantee that any of the three values will be the
1189
   same type as IN, but they will have the same signedness and mode.  */
1190
 
1191
static tree
1192
split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1193
            tree *minus_litp, int negate_p)
1194
{
1195
  tree var = 0;
1196
 
1197
  *conp = 0;
1198
  *litp = 0;
1199
  *minus_litp = 0;
1200
 
1201
  /* Strip any conversions that don't change the machine mode or signedness.  */
1202
  STRIP_SIGN_NOPS (in);
1203
 
1204
  if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1205
    *litp = in;
1206
  else if (TREE_CODE (in) == code
1207
           || (! FLOAT_TYPE_P (TREE_TYPE (in))
1208
               /* We can associate addition and subtraction together (even
1209
                  though the C standard doesn't say so) for integers because
1210
                  the value is not affected.  For reals, the value might be
1211
                  affected, so we can't.  */
1212
               && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1213
                   || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1214
    {
1215
      tree op0 = TREE_OPERAND (in, 0);
1216
      tree op1 = TREE_OPERAND (in, 1);
1217
      int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1218
      int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1219
 
1220
      /* First see if either of the operands is a literal, then a constant.  */
1221
      if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1222
        *litp = op0, op0 = 0;
1223
      else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1224
        *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1225
 
1226
      if (op0 != 0 && TREE_CONSTANT (op0))
1227
        *conp = op0, op0 = 0;
1228
      else if (op1 != 0 && TREE_CONSTANT (op1))
1229
        *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1230
 
1231
      /* If we haven't dealt with either operand, this is not a case we can
1232
         decompose.  Otherwise, VAR is either of the ones remaining, if any.  */
1233
      if (op0 != 0 && op1 != 0)
1234
        var = in;
1235
      else if (op0 != 0)
1236
        var = op0;
1237
      else
1238
        var = op1, neg_var_p = neg1_p;
1239
 
1240
      /* Now do any needed negations.  */
1241
      if (neg_litp_p)
1242
        *minus_litp = *litp, *litp = 0;
1243
      if (neg_conp_p)
1244
        *conp = negate_expr (*conp);
1245
      if (neg_var_p)
1246
        var = negate_expr (var);
1247
    }
1248
  else if (TREE_CONSTANT (in))
1249
    *conp = in;
1250
  else
1251
    var = in;
1252
 
1253
  if (negate_p)
1254
    {
1255
      if (*litp)
1256
        *minus_litp = *litp, *litp = 0;
1257
      else if (*minus_litp)
1258
        *litp = *minus_litp, *minus_litp = 0;
1259
      *conp = negate_expr (*conp);
1260
      var = negate_expr (var);
1261
    }
1262
 
1263
  return var;
1264
}
1265
 
1266
/* Re-associate trees split by the above function.  T1 and T2 are either
1267
   expressions to associate or null.  Return the new expression, if any.  If
1268
   we build an operation, do it in TYPE and with CODE.  */
1269
 
1270
static tree
1271
associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1272
{
1273
  if (t1 == 0)
1274
    return t2;
1275
  else if (t2 == 0)
1276
    return t1;
1277
 
1278
  /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1279
     try to fold this since we will have infinite recursion.  But do
1280
     deal with any NEGATE_EXPRs.  */
1281
  if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1282
      || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1283
    {
1284
      if (code == PLUS_EXPR)
1285
        {
1286
          if (TREE_CODE (t1) == NEGATE_EXPR)
1287
            return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1288
                           fold_convert (type, TREE_OPERAND (t1, 0)));
1289
          else if (TREE_CODE (t2) == NEGATE_EXPR)
1290
            return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1291
                           fold_convert (type, TREE_OPERAND (t2, 0)));
1292
          else if (integer_zerop (t2))
1293
            return fold_convert (type, t1);
1294
        }
1295
      else if (code == MINUS_EXPR)
1296
        {
1297
          if (integer_zerop (t2))
1298
            return fold_convert (type, t1);
1299
        }
1300
 
1301
      return build2 (code, type, fold_convert (type, t1),
1302
                     fold_convert (type, t2));
1303
    }
1304
 
1305
  return fold_build2 (code, type, fold_convert (type, t1),
1306
                      fold_convert (type, t2));
1307
}
1308
 
1309
/* Combine two integer constants ARG1 and ARG2 under operation CODE
1310
   to produce a new constant.
1311
 
1312
   If NOTRUNC is nonzero, do not truncate the result to fit the data type.  */
1313
 
1314
tree
1315
int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1316
{
1317
  unsigned HOST_WIDE_INT int1l, int2l;
1318
  HOST_WIDE_INT int1h, int2h;
1319
  unsigned HOST_WIDE_INT low;
1320
  HOST_WIDE_INT hi;
1321
  unsigned HOST_WIDE_INT garbagel;
1322
  HOST_WIDE_INT garbageh;
1323
  tree t;
1324
  tree type = TREE_TYPE (arg1);
1325
  int uns = TYPE_UNSIGNED (type);
1326
  int is_sizetype
1327
    = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1328
  int overflow = 0;
1329
 
1330
  int1l = TREE_INT_CST_LOW (arg1);
1331
  int1h = TREE_INT_CST_HIGH (arg1);
1332
  int2l = TREE_INT_CST_LOW (arg2);
1333
  int2h = TREE_INT_CST_HIGH (arg2);
1334
 
1335
  switch (code)
1336
    {
1337
    case BIT_IOR_EXPR:
1338
      low = int1l | int2l, hi = int1h | int2h;
1339
      break;
1340
 
1341
    case BIT_XOR_EXPR:
1342
      low = int1l ^ int2l, hi = int1h ^ int2h;
1343
      break;
1344
 
1345
    case BIT_AND_EXPR:
1346
      low = int1l & int2l, hi = int1h & int2h;
1347
      break;
1348
 
1349
    case RSHIFT_EXPR:
1350
      int2l = -int2l;
1351
    case LSHIFT_EXPR:
1352
      /* It's unclear from the C standard whether shifts can overflow.
1353
         The following code ignores overflow; perhaps a C standard
1354
         interpretation ruling is needed.  */
1355
      lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1356
                     &low, &hi, !uns);
1357
      break;
1358
 
1359
    case RROTATE_EXPR:
1360
      int2l = - int2l;
1361
    case LROTATE_EXPR:
1362
      lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1363
                      &low, &hi);
1364
      break;
1365
 
1366
    case PLUS_EXPR:
1367
      overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1368
      break;
1369
 
1370
    case MINUS_EXPR:
1371
      neg_double (int2l, int2h, &low, &hi);
1372
      add_double (int1l, int1h, low, hi, &low, &hi);
1373
      overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1374
      break;
1375
 
1376
    case MULT_EXPR:
1377
      overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1378
      break;
1379
 
1380
    case TRUNC_DIV_EXPR:
1381
    case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1382
    case EXACT_DIV_EXPR:
1383
      /* This is a shortcut for a common special case.  */
1384
      if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1385
          && ! TREE_CONSTANT_OVERFLOW (arg1)
1386
          && ! TREE_CONSTANT_OVERFLOW (arg2)
1387
          && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1388
        {
1389
          if (code == CEIL_DIV_EXPR)
1390
            int1l += int2l - 1;
1391
 
1392
          low = int1l / int2l, hi = 0;
1393
          break;
1394
        }
1395
 
1396
      /* ... fall through ...  */
1397
 
1398
    case ROUND_DIV_EXPR:
1399
      if (int2h == 0 && int2l == 1)
1400
        {
1401
          low = int1l, hi = int1h;
1402
          break;
1403
        }
1404
      if (int1l == int2l && int1h == int2h
1405
          && ! (int1l == 0 && int1h == 0))
1406
        {
1407
          low = 1, hi = 0;
1408
          break;
1409
        }
1410
      overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1411
                                       &low, &hi, &garbagel, &garbageh);
1412
      break;
1413
 
1414
    case TRUNC_MOD_EXPR:
1415
    case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1416
      /* This is a shortcut for a common special case.  */
1417
      if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1418
          && ! TREE_CONSTANT_OVERFLOW (arg1)
1419
          && ! TREE_CONSTANT_OVERFLOW (arg2)
1420
          && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1421
        {
1422
          if (code == CEIL_MOD_EXPR)
1423
            int1l += int2l - 1;
1424
          low = int1l % int2l, hi = 0;
1425
          break;
1426
        }
1427
 
1428
      /* ... fall through ...  */
1429
 
1430
    case ROUND_MOD_EXPR:
1431
      overflow = div_and_round_double (code, uns,
1432
                                       int1l, int1h, int2l, int2h,
1433
                                       &garbagel, &garbageh, &low, &hi);
1434
      break;
1435
 
1436
    case MIN_EXPR:
1437
    case MAX_EXPR:
1438
      if (uns)
1439
        low = (((unsigned HOST_WIDE_INT) int1h
1440
                < (unsigned HOST_WIDE_INT) int2h)
1441
               || (((unsigned HOST_WIDE_INT) int1h
1442
                    == (unsigned HOST_WIDE_INT) int2h)
1443
                   && int1l < int2l));
1444
      else
1445
        low = (int1h < int2h
1446
               || (int1h == int2h && int1l < int2l));
1447
 
1448
      if (low == (code == MIN_EXPR))
1449
        low = int1l, hi = int1h;
1450
      else
1451
        low = int2l, hi = int2h;
1452
      break;
1453
 
1454
    default:
1455
      gcc_unreachable ();
1456
    }
1457
 
1458
  t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1459
 
1460
  if (notrunc)
1461
    {
1462
      /* Propagate overflow flags ourselves.  */
1463
      if (((!uns || is_sizetype) && overflow)
1464
          | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1465
        {
1466
          t = copy_node (t);
1467
          TREE_OVERFLOW (t) = 1;
1468
          TREE_CONSTANT_OVERFLOW (t) = 1;
1469
        }
1470
      else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1471
        {
1472
          t = copy_node (t);
1473
          TREE_CONSTANT_OVERFLOW (t) = 1;
1474
        }
1475
    }
1476
  else
1477
    t = force_fit_type (t, 1,
1478
                        ((!uns || is_sizetype) && overflow)
1479
                        | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1480
                        TREE_CONSTANT_OVERFLOW (arg1)
1481
                        | TREE_CONSTANT_OVERFLOW (arg2));
1482
 
1483
  return t;
1484
}
1485
 
1486
/* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1487
   constant.  We assume ARG1 and ARG2 have the same data type, or at least
1488
   are the same kind of constant and the same machine mode.
1489
 
1490
   If NOTRUNC is nonzero, do not truncate the result to fit the data type.  */
1491
 
1492
static tree
1493
const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1494
{
1495
  STRIP_NOPS (arg1);
1496
  STRIP_NOPS (arg2);
1497
 
1498
  if (TREE_CODE (arg1) == INTEGER_CST)
1499
    return int_const_binop (code, arg1, arg2, notrunc);
1500
 
1501
  if (TREE_CODE (arg1) == REAL_CST)
1502
    {
1503
      enum machine_mode mode;
1504
      REAL_VALUE_TYPE d1;
1505
      REAL_VALUE_TYPE d2;
1506
      REAL_VALUE_TYPE value;
1507
      REAL_VALUE_TYPE result;
1508
      bool inexact;
1509
      tree t, type;
1510
 
1511
      d1 = TREE_REAL_CST (arg1);
1512
      d2 = TREE_REAL_CST (arg2);
1513
 
1514
      type = TREE_TYPE (arg1);
1515
      mode = TYPE_MODE (type);
1516
 
1517
      /* Don't perform operation if we honor signaling NaNs and
1518
         either operand is a NaN.  */
1519
      if (HONOR_SNANS (mode)
1520
          && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1521
        return NULL_TREE;
1522
 
1523
      /* Don't perform operation if it would raise a division
1524
         by zero exception.  */
1525
      if (code == RDIV_EXPR
1526
          && REAL_VALUES_EQUAL (d2, dconst0)
1527
          && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1528
        return NULL_TREE;
1529
 
1530
      /* If either operand is a NaN, just return it.  Otherwise, set up
1531
         for floating-point trap; we return an overflow.  */
1532
      if (REAL_VALUE_ISNAN (d1))
1533
        return arg1;
1534
      else if (REAL_VALUE_ISNAN (d2))
1535
        return arg2;
1536
 
1537
      inexact = real_arithmetic (&value, code, &d1, &d2);
1538
      real_convert (&result, mode, &value);
1539
 
1540
      /* Don't constant fold this floating point operation if
1541
         the result has overflowed and flag_trapping_math.  */
1542
 
1543
      if (flag_trapping_math
1544
          && MODE_HAS_INFINITIES (mode)
1545
          && REAL_VALUE_ISINF (result)
1546
          && !REAL_VALUE_ISINF (d1)
1547
          && !REAL_VALUE_ISINF (d2))
1548
        return NULL_TREE;
1549
 
1550
      /* Don't constant fold this floating point operation if the
1551
         result may dependent upon the run-time rounding mode and
1552
         flag_rounding_math is set, or if GCC's software emulation
1553
         is unable to accurately represent the result.  */
1554
 
1555
      if ((flag_rounding_math
1556
           || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1557
               && !flag_unsafe_math_optimizations))
1558
          && (inexact || !real_identical (&result, &value)))
1559
        return NULL_TREE;
1560
 
1561
      t = build_real (type, result);
1562
 
1563
      TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1564
      TREE_CONSTANT_OVERFLOW (t)
1565
        = TREE_OVERFLOW (t)
1566
          | TREE_CONSTANT_OVERFLOW (arg1)
1567
          | TREE_CONSTANT_OVERFLOW (arg2);
1568
      return t;
1569
    }
1570
  if (TREE_CODE (arg1) == COMPLEX_CST)
1571
    {
1572
      tree type = TREE_TYPE (arg1);
1573
      tree r1 = TREE_REALPART (arg1);
1574
      tree i1 = TREE_IMAGPART (arg1);
1575
      tree r2 = TREE_REALPART (arg2);
1576
      tree i2 = TREE_IMAGPART (arg2);
1577
      tree t;
1578
 
1579
      switch (code)
1580
        {
1581
        case PLUS_EXPR:
1582
          t = build_complex (type,
1583
                             const_binop (PLUS_EXPR, r1, r2, notrunc),
1584
                             const_binop (PLUS_EXPR, i1, i2, notrunc));
1585
          break;
1586
 
1587
        case MINUS_EXPR:
1588
          t = build_complex (type,
1589
                             const_binop (MINUS_EXPR, r1, r2, notrunc),
1590
                             const_binop (MINUS_EXPR, i1, i2, notrunc));
1591
          break;
1592
 
1593
        case MULT_EXPR:
1594
          t = build_complex (type,
1595
                             const_binop (MINUS_EXPR,
1596
                                          const_binop (MULT_EXPR,
1597
                                                       r1, r2, notrunc),
1598
                                          const_binop (MULT_EXPR,
1599
                                                       i1, i2, notrunc),
1600
                                          notrunc),
1601
                             const_binop (PLUS_EXPR,
1602
                                          const_binop (MULT_EXPR,
1603
                                                       r1, i2, notrunc),
1604
                                          const_binop (MULT_EXPR,
1605
                                                       i1, r2, notrunc),
1606
                                          notrunc));
1607
          break;
1608
 
1609
        case RDIV_EXPR:
1610
          {
1611
            tree t1, t2, real, imag;
1612
            tree magsquared
1613
              = const_binop (PLUS_EXPR,
1614
                             const_binop (MULT_EXPR, r2, r2, notrunc),
1615
                             const_binop (MULT_EXPR, i2, i2, notrunc),
1616
                             notrunc);
1617
 
1618
            t1 = const_binop (PLUS_EXPR,
1619
                              const_binop (MULT_EXPR, r1, r2, notrunc),
1620
                              const_binop (MULT_EXPR, i1, i2, notrunc),
1621
                              notrunc);
1622
            t2 = const_binop (MINUS_EXPR,
1623
                              const_binop (MULT_EXPR, i1, r2, notrunc),
1624
                              const_binop (MULT_EXPR, r1, i2, notrunc),
1625
                              notrunc);
1626
 
1627
            if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1628
              {
1629
                real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
1630
                imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
1631
              }
1632
            else
1633
              {
1634
                real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
1635
                imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
1636
                if (!real || !imag)
1637
                  return NULL_TREE;
1638
              }
1639
 
1640
            t = build_complex (type, real, imag);
1641
          }
1642
          break;
1643
 
1644
        default:
1645
          gcc_unreachable ();
1646
        }
1647
      return t;
1648
    }
1649
  return 0;
1650
}
1651
 
1652
/* Create a size type INT_CST node with NUMBER sign extended.  KIND
1653
   indicates which particular sizetype to create.  */
1654
 
1655
tree
1656
size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1657
{
1658
  return build_int_cst (sizetype_tab[(int) kind], number);
1659
}
1660
 
1661
/* Combine operands OP1 and OP2 with arithmetic operation CODE.  CODE
1662
   is a tree code.  The type of the result is taken from the operands.
1663
   Both must be the same type integer type and it must be a size type.
1664
   If the operands are constant, so is the result.  */
1665
 
1666
tree
1667
size_binop (enum tree_code code, tree arg0, tree arg1)
1668
{
1669
  tree type = TREE_TYPE (arg0);
1670
 
1671
  if (arg0 == error_mark_node || arg1 == error_mark_node)
1672
    return error_mark_node;
1673
 
1674
  gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1675
              && type == TREE_TYPE (arg1));
1676
 
1677
  /* Handle the special case of two integer constants faster.  */
1678
  if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1679
    {
1680
      /* And some specific cases even faster than that.  */
1681
      if (code == PLUS_EXPR && integer_zerop (arg0))
1682
        return arg1;
1683
      else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1684
               && integer_zerop (arg1))
1685
        return arg0;
1686
      else if (code == MULT_EXPR && integer_onep (arg0))
1687
        return arg1;
1688
 
1689
      /* Handle general case of two integer constants.  */
1690
      return int_const_binop (code, arg0, arg1, 0);
1691
    }
1692
 
1693
  return fold_build2 (code, type, arg0, arg1);
1694
}
1695
 
1696
/* Given two values, either both of sizetype or both of bitsizetype,
1697
   compute the difference between the two values.  Return the value
1698
   in signed type corresponding to the type of the operands.  */
1699
 
1700
tree
1701
size_diffop (tree arg0, tree arg1)
1702
{
1703
  tree type = TREE_TYPE (arg0);
1704
  tree ctype;
1705
 
1706
  gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1707
              && type == TREE_TYPE (arg1));
1708
 
1709
  /* If the type is already signed, just do the simple thing.  */
1710
  if (!TYPE_UNSIGNED (type))
1711
    return size_binop (MINUS_EXPR, arg0, arg1);
1712
 
1713
  ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1714
 
1715
  /* If either operand is not a constant, do the conversions to the signed
1716
     type and subtract.  The hardware will do the right thing with any
1717
     overflow in the subtraction.  */
1718
  if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1719
    return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1720
                       fold_convert (ctype, arg1));
1721
 
1722
  /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1723
     Otherwise, subtract the other way, convert to CTYPE (we know that can't
1724
     overflow) and negate (which can't either).  Special-case a result
1725
     of zero while we're here.  */
1726
  if (tree_int_cst_equal (arg0, arg1))
1727
    return fold_convert (ctype, integer_zero_node);
1728
  else if (tree_int_cst_lt (arg1, arg0))
1729
    return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1730
  else
1731
    return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1732
                       fold_convert (ctype, size_binop (MINUS_EXPR,
1733
                                                        arg1, arg0)));
1734
}
1735
 
1736
/* A subroutine of fold_convert_const handling conversions of an
1737
   INTEGER_CST to another integer type.  */
1738
 
1739
static tree
1740
fold_convert_const_int_from_int (tree type, tree arg1)
1741
{
1742
  tree t;
1743
 
1744
  /* Given an integer constant, make new constant with new type,
1745
     appropriately sign-extended or truncated.  */
1746
  t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1747
                          TREE_INT_CST_HIGH (arg1));
1748
 
1749
  t = force_fit_type (t,
1750
                      /* Don't set the overflow when
1751
                         converting a pointer  */
1752
                      !POINTER_TYPE_P (TREE_TYPE (arg1)),
1753
                      (TREE_INT_CST_HIGH (arg1) < 0
1754
                       && (TYPE_UNSIGNED (type)
1755
                           < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1756
                      | TREE_OVERFLOW (arg1),
1757
                      TREE_CONSTANT_OVERFLOW (arg1));
1758
 
1759
  return t;
1760
}
1761
 
1762
/* A subroutine of fold_convert_const handling conversions a REAL_CST
1763
   to an integer type.  */
1764
 
1765
static tree
1766
fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1767
{
1768
  int overflow = 0;
1769
  tree t;
1770
 
1771
  /* The following code implements the floating point to integer
1772
     conversion rules required by the Java Language Specification,
1773
     that IEEE NaNs are mapped to zero and values that overflow
1774
     the target precision saturate, i.e. values greater than
1775
     INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1776
     are mapped to INT_MIN.  These semantics are allowed by the
1777
     C and C++ standards that simply state that the behavior of
1778
     FP-to-integer conversion is unspecified upon overflow.  */
1779
 
1780
  HOST_WIDE_INT high, low;
1781
  REAL_VALUE_TYPE r;
1782
  REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1783
 
1784
  switch (code)
1785
    {
1786
    case FIX_TRUNC_EXPR:
1787
      real_trunc (&r, VOIDmode, &x);
1788
      break;
1789
 
1790
    case FIX_CEIL_EXPR:
1791
      real_ceil (&r, VOIDmode, &x);
1792
      break;
1793
 
1794
    case FIX_FLOOR_EXPR:
1795
      real_floor (&r, VOIDmode, &x);
1796
      break;
1797
 
1798
    case FIX_ROUND_EXPR:
1799
      real_round (&r, VOIDmode, &x);
1800
      break;
1801
 
1802
    default:
1803
      gcc_unreachable ();
1804
    }
1805
 
1806
  /* If R is NaN, return zero and show we have an overflow.  */
1807
  if (REAL_VALUE_ISNAN (r))
1808
    {
1809
      overflow = 1;
1810
      high = 0;
1811
      low = 0;
1812
    }
1813
 
1814
  /* See if R is less than the lower bound or greater than the
1815
     upper bound.  */
1816
 
1817
  if (! overflow)
1818
    {
1819
      tree lt = TYPE_MIN_VALUE (type);
1820
      REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1821
      if (REAL_VALUES_LESS (r, l))
1822
        {
1823
          overflow = 1;
1824
          high = TREE_INT_CST_HIGH (lt);
1825
          low = TREE_INT_CST_LOW (lt);
1826
        }
1827
    }
1828
 
1829
  if (! overflow)
1830
    {
1831
      tree ut = TYPE_MAX_VALUE (type);
1832
      if (ut)
1833
        {
1834
          REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1835
          if (REAL_VALUES_LESS (u, r))
1836
            {
1837
              overflow = 1;
1838
              high = TREE_INT_CST_HIGH (ut);
1839
              low = TREE_INT_CST_LOW (ut);
1840
            }
1841
        }
1842
    }
1843
 
1844
  if (! overflow)
1845
    REAL_VALUE_TO_INT (&low, &high, r);
1846
 
1847
  t = build_int_cst_wide (type, low, high);
1848
 
1849
  t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1850
                      TREE_CONSTANT_OVERFLOW (arg1));
1851
  return t;
1852
}
1853
 
1854
/* A subroutine of fold_convert_const handling conversions a REAL_CST
1855
   to another floating point type.  */
1856
 
1857
static tree
1858
fold_convert_const_real_from_real (tree type, tree arg1)
1859
{
1860
  REAL_VALUE_TYPE value;
1861
  tree t;
1862
 
1863
  real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1864
  t = build_real (type, value);
1865
 
1866
  TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1867
  TREE_CONSTANT_OVERFLOW (t)
1868
    = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1869
  return t;
1870
}
1871
 
1872
/* Attempt to fold type conversion operation CODE of expression ARG1 to
1873
   type TYPE.  If no simplification can be done return NULL_TREE.  */
1874
 
1875
static tree
1876
fold_convert_const (enum tree_code code, tree type, tree arg1)
1877
{
1878
  if (TREE_TYPE (arg1) == type)
1879
    return arg1;
1880
 
1881
  if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1882
    {
1883
      if (TREE_CODE (arg1) == INTEGER_CST)
1884
        return fold_convert_const_int_from_int (type, arg1);
1885
      else if (TREE_CODE (arg1) == REAL_CST)
1886
        return fold_convert_const_int_from_real (code, type, arg1);
1887
    }
1888
  else if (TREE_CODE (type) == REAL_TYPE)
1889
    {
1890
      if (TREE_CODE (arg1) == INTEGER_CST)
1891
        return build_real_from_int_cst (type, arg1);
1892
      if (TREE_CODE (arg1) == REAL_CST)
1893
        return fold_convert_const_real_from_real (type, arg1);
1894
    }
1895
  return NULL_TREE;
1896
}
1897
 
1898
/* Construct a vector of zero elements of vector type TYPE.  */
1899
 
1900
static tree
1901
build_zero_vector (tree type)
1902
{
1903
  tree elem, list;
1904
  int i, units;
1905
 
1906
  elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1907
  units = TYPE_VECTOR_SUBPARTS (type);
1908
 
1909
  list = NULL_TREE;
1910
  for (i = 0; i < units; i++)
1911
    list = tree_cons (NULL_TREE, elem, list);
1912
  return build_vector (type, list);
1913
}
1914
 
1915
/* Convert expression ARG to type TYPE.  Used by the middle-end for
1916
   simple conversions in preference to calling the front-end's convert.  */
1917
 
1918
tree
1919
fold_convert (tree type, tree arg)
1920
{
1921
  tree orig = TREE_TYPE (arg);
1922
  tree tem;
1923
 
1924
  if (type == orig)
1925
    return arg;
1926
 
1927
  if (TREE_CODE (arg) == ERROR_MARK
1928
      || TREE_CODE (type) == ERROR_MARK
1929
      || TREE_CODE (orig) == ERROR_MARK)
1930
    return error_mark_node;
1931
 
1932
  if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1933
      || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1934
                                        TYPE_MAIN_VARIANT (orig)))
1935
    return fold_build1 (NOP_EXPR, type, arg);
1936
 
1937
  switch (TREE_CODE (type))
1938
    {
1939
    case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1940
    case POINTER_TYPE: case REFERENCE_TYPE:
1941
    case OFFSET_TYPE:
1942
      if (TREE_CODE (arg) == INTEGER_CST)
1943
        {
1944
          tem = fold_convert_const (NOP_EXPR, type, arg);
1945
          if (tem != NULL_TREE)
1946
            return tem;
1947
        }
1948
      if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1949
          || TREE_CODE (orig) == OFFSET_TYPE)
1950
        return fold_build1 (NOP_EXPR, type, arg);
1951
      if (TREE_CODE (orig) == COMPLEX_TYPE)
1952
        {
1953
          tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1954
          return fold_convert (type, tem);
1955
        }
1956
      gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1957
                  && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1958
      return fold_build1 (NOP_EXPR, type, arg);
1959
 
1960
    case REAL_TYPE:
1961
      if (TREE_CODE (arg) == INTEGER_CST)
1962
        {
1963
          tem = fold_convert_const (FLOAT_EXPR, type, arg);
1964
          if (tem != NULL_TREE)
1965
            return tem;
1966
        }
1967
      else if (TREE_CODE (arg) == REAL_CST)
1968
        {
1969
          tem = fold_convert_const (NOP_EXPR, type, arg);
1970
          if (tem != NULL_TREE)
1971
            return tem;
1972
        }
1973
 
1974
      switch (TREE_CODE (orig))
1975
        {
1976
        case INTEGER_TYPE: case CHAR_TYPE:
1977
        case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1978
        case POINTER_TYPE: case REFERENCE_TYPE:
1979
          return fold_build1 (FLOAT_EXPR, type, arg);
1980
 
1981
        case REAL_TYPE:
1982
          return fold_build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1983
                              type, arg);
1984
 
1985
        case COMPLEX_TYPE:
1986
          tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1987
          return fold_convert (type, tem);
1988
 
1989
        default:
1990
          gcc_unreachable ();
1991
        }
1992
 
1993
    case COMPLEX_TYPE:
1994
      switch (TREE_CODE (orig))
1995
        {
1996
        case INTEGER_TYPE: case CHAR_TYPE:
1997
        case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1998
        case POINTER_TYPE: case REFERENCE_TYPE:
1999
        case REAL_TYPE:
2000
          return build2 (COMPLEX_EXPR, type,
2001
                         fold_convert (TREE_TYPE (type), arg),
2002
                         fold_convert (TREE_TYPE (type), integer_zero_node));
2003
        case COMPLEX_TYPE:
2004
          {
2005
            tree rpart, ipart;
2006
 
2007
            if (TREE_CODE (arg) == COMPLEX_EXPR)
2008
              {
2009
                rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2010
                ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2011
                return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2012
              }
2013
 
2014
            arg = save_expr (arg);
2015
            rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2016
            ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2017
            rpart = fold_convert (TREE_TYPE (type), rpart);
2018
            ipart = fold_convert (TREE_TYPE (type), ipart);
2019
            return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2020
          }
2021
 
2022
        default:
2023
          gcc_unreachable ();
2024
        }
2025
 
2026
    case VECTOR_TYPE:
2027
      if (integer_zerop (arg))
2028
        return build_zero_vector (type);
2029
      gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2030
      gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2031
                  || TREE_CODE (orig) == VECTOR_TYPE);
2032
      return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2033
 
2034
    case VOID_TYPE:
2035
      return fold_build1 (CONVERT_EXPR, type, fold_ignored_result (arg));
2036
 
2037
    default:
2038
      gcc_unreachable ();
2039
    }
2040
}
2041
 
2042
/* Return false if expr can be assumed not to be an lvalue, true
2043
   otherwise.  */
2044
 
2045
static bool
2046
maybe_lvalue_p (tree x)
2047
{
2048
  /* We only need to wrap lvalue tree codes.  */
2049
  switch (TREE_CODE (x))
2050
  {
2051
  case VAR_DECL:
2052
  case PARM_DECL:
2053
  case RESULT_DECL:
2054
  case LABEL_DECL:
2055
  case FUNCTION_DECL:
2056
  case SSA_NAME:
2057
 
2058
  case COMPONENT_REF:
2059
  case INDIRECT_REF:
2060
  case ALIGN_INDIRECT_REF:
2061
  case MISALIGNED_INDIRECT_REF:
2062
  case ARRAY_REF:
2063
  case ARRAY_RANGE_REF:
2064
  case BIT_FIELD_REF:
2065
  case OBJ_TYPE_REF:
2066
 
2067
  case REALPART_EXPR:
2068
  case IMAGPART_EXPR:
2069
  case PREINCREMENT_EXPR:
2070
  case PREDECREMENT_EXPR:
2071
  case SAVE_EXPR:
2072
  case TRY_CATCH_EXPR:
2073
  case WITH_CLEANUP_EXPR:
2074
  case COMPOUND_EXPR:
2075
  case MODIFY_EXPR:
2076
  case TARGET_EXPR:
2077
  case COND_EXPR:
2078
  case BIND_EXPR:
2079
  case MIN_EXPR:
2080
  case MAX_EXPR:
2081
    break;
2082
 
2083
  default:
2084
    /* Assume the worst for front-end tree codes.  */
2085
    if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2086
      break;
2087
    return false;
2088
  }
2089
 
2090
  return true;
2091
}
2092
 
2093
/* Return an expr equal to X but certainly not valid as an lvalue.  */
2094
 
2095
tree
2096
non_lvalue (tree x)
2097
{
2098
  /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2099
     us.  */
2100
  if (in_gimple_form)
2101
    return x;
2102
 
2103
  if (! maybe_lvalue_p (x))
2104
    return x;
2105
  return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2106
}
2107
 
2108
/* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2109
   Zero means allow extended lvalues.  */
2110
 
2111
int pedantic_lvalues;
2112
 
2113
/* When pedantic, return an expr equal to X but certainly not valid as a
2114
   pedantic lvalue.  Otherwise, return X.  */
2115
 
2116
static tree
2117
pedantic_non_lvalue (tree x)
2118
{
2119
  if (pedantic_lvalues)
2120
    return non_lvalue (x);
2121
  else
2122
    return x;
2123
}
2124
 
2125
/* Given a tree comparison code, return the code that is the logical inverse
2126
   of the given code.  It is not safe to do this for floating-point
2127
   comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2128
   as well: if reversing the comparison is unsafe, return ERROR_MARK.  */
2129
 
2130
enum tree_code
2131
invert_tree_comparison (enum tree_code code, bool honor_nans)
2132
{
2133
  if (honor_nans && flag_trapping_math)
2134
    return ERROR_MARK;
2135
 
2136
  switch (code)
2137
    {
2138
    case EQ_EXPR:
2139
      return NE_EXPR;
2140
    case NE_EXPR:
2141
      return EQ_EXPR;
2142
    case GT_EXPR:
2143
      return honor_nans ? UNLE_EXPR : LE_EXPR;
2144
    case GE_EXPR:
2145
      return honor_nans ? UNLT_EXPR : LT_EXPR;
2146
    case LT_EXPR:
2147
      return honor_nans ? UNGE_EXPR : GE_EXPR;
2148
    case LE_EXPR:
2149
      return honor_nans ? UNGT_EXPR : GT_EXPR;
2150
    case LTGT_EXPR:
2151
      return UNEQ_EXPR;
2152
    case UNEQ_EXPR:
2153
      return LTGT_EXPR;
2154
    case UNGT_EXPR:
2155
      return LE_EXPR;
2156
    case UNGE_EXPR:
2157
      return LT_EXPR;
2158
    case UNLT_EXPR:
2159
      return GE_EXPR;
2160
    case UNLE_EXPR:
2161
      return GT_EXPR;
2162
    case ORDERED_EXPR:
2163
      return UNORDERED_EXPR;
2164
    case UNORDERED_EXPR:
2165
      return ORDERED_EXPR;
2166
    default:
2167
      gcc_unreachable ();
2168
    }
2169
}
2170
 
2171
/* Similar, but return the comparison that results if the operands are
2172
   swapped.  This is safe for floating-point.  */
2173
 
2174
enum tree_code
2175
swap_tree_comparison (enum tree_code code)
2176
{
2177
  switch (code)
2178
    {
2179
    case EQ_EXPR:
2180
    case NE_EXPR:
2181
    case ORDERED_EXPR:
2182
    case UNORDERED_EXPR:
2183
    case LTGT_EXPR:
2184
    case UNEQ_EXPR:
2185
      return code;
2186
    case GT_EXPR:
2187
      return LT_EXPR;
2188
    case GE_EXPR:
2189
      return LE_EXPR;
2190
    case LT_EXPR:
2191
      return GT_EXPR;
2192
    case LE_EXPR:
2193
      return GE_EXPR;
2194
    case UNGT_EXPR:
2195
      return UNLT_EXPR;
2196
    case UNGE_EXPR:
2197
      return UNLE_EXPR;
2198
    case UNLT_EXPR:
2199
      return UNGT_EXPR;
2200
    case UNLE_EXPR:
2201
      return UNGE_EXPR;
2202
    default:
2203
      gcc_unreachable ();
2204
    }
2205
}
2206
 
2207
 
2208
/* Convert a comparison tree code from an enum tree_code representation
2209
   into a compcode bit-based encoding.  This function is the inverse of
2210
   compcode_to_comparison.  */
2211
 
2212
static enum comparison_code
2213
comparison_to_compcode (enum tree_code code)
2214
{
2215
  switch (code)
2216
    {
2217
    case LT_EXPR:
2218
      return COMPCODE_LT;
2219
    case EQ_EXPR:
2220
      return COMPCODE_EQ;
2221
    case LE_EXPR:
2222
      return COMPCODE_LE;
2223
    case GT_EXPR:
2224
      return COMPCODE_GT;
2225
    case NE_EXPR:
2226
      return COMPCODE_NE;
2227
    case GE_EXPR:
2228
      return COMPCODE_GE;
2229
    case ORDERED_EXPR:
2230
      return COMPCODE_ORD;
2231
    case UNORDERED_EXPR:
2232
      return COMPCODE_UNORD;
2233
    case UNLT_EXPR:
2234
      return COMPCODE_UNLT;
2235
    case UNEQ_EXPR:
2236
      return COMPCODE_UNEQ;
2237
    case UNLE_EXPR:
2238
      return COMPCODE_UNLE;
2239
    case UNGT_EXPR:
2240
      return COMPCODE_UNGT;
2241
    case LTGT_EXPR:
2242
      return COMPCODE_LTGT;
2243
    case UNGE_EXPR:
2244
      return COMPCODE_UNGE;
2245
    default:
2246
      gcc_unreachable ();
2247
    }
2248
}
2249
 
2250
/* Convert a compcode bit-based encoding of a comparison operator back
2251
   to GCC's enum tree_code representation.  This function is the
2252
   inverse of comparison_to_compcode.  */
2253
 
2254
static enum tree_code
2255
compcode_to_comparison (enum comparison_code code)
2256
{
2257
  switch (code)
2258
    {
2259
    case COMPCODE_LT:
2260
      return LT_EXPR;
2261
    case COMPCODE_EQ:
2262
      return EQ_EXPR;
2263
    case COMPCODE_LE:
2264
      return LE_EXPR;
2265
    case COMPCODE_GT:
2266
      return GT_EXPR;
2267
    case COMPCODE_NE:
2268
      return NE_EXPR;
2269
    case COMPCODE_GE:
2270
      return GE_EXPR;
2271
    case COMPCODE_ORD:
2272
      return ORDERED_EXPR;
2273
    case COMPCODE_UNORD:
2274
      return UNORDERED_EXPR;
2275
    case COMPCODE_UNLT:
2276
      return UNLT_EXPR;
2277
    case COMPCODE_UNEQ:
2278
      return UNEQ_EXPR;
2279
    case COMPCODE_UNLE:
2280
      return UNLE_EXPR;
2281
    case COMPCODE_UNGT:
2282
      return UNGT_EXPR;
2283
    case COMPCODE_LTGT:
2284
      return LTGT_EXPR;
2285
    case COMPCODE_UNGE:
2286
      return UNGE_EXPR;
2287
    default:
2288
      gcc_unreachable ();
2289
    }
2290
}
2291
 
2292
/* Return a tree for the comparison which is the combination of
2293
   doing the AND or OR (depending on CODE) of the two operations LCODE
2294
   and RCODE on the identical operands LL_ARG and LR_ARG.  Take into account
2295
   the possibility of trapping if the mode has NaNs, and return NULL_TREE
2296
   if this makes the transformation invalid.  */
2297
 
2298
tree
2299
combine_comparisons (enum tree_code code, enum tree_code lcode,
2300
                     enum tree_code rcode, tree truth_type,
2301
                     tree ll_arg, tree lr_arg)
2302
{
2303
  bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2304
  enum comparison_code lcompcode = comparison_to_compcode (lcode);
2305
  enum comparison_code rcompcode = comparison_to_compcode (rcode);
2306
  enum comparison_code compcode;
2307
 
2308
  switch (code)
2309
    {
2310
    case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2311
      compcode = lcompcode & rcompcode;
2312
      break;
2313
 
2314
    case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2315
      compcode = lcompcode | rcompcode;
2316
      break;
2317
 
2318
    default:
2319
      return NULL_TREE;
2320
    }
2321
 
2322
  if (!honor_nans)
2323
    {
2324
      /* Eliminate unordered comparisons, as well as LTGT and ORD
2325
         which are not used unless the mode has NaNs.  */
2326
      compcode &= ~COMPCODE_UNORD;
2327
      if (compcode == COMPCODE_LTGT)
2328
        compcode = COMPCODE_NE;
2329
      else if (compcode == COMPCODE_ORD)
2330
        compcode = COMPCODE_TRUE;
2331
    }
2332
   else if (flag_trapping_math)
2333
     {
2334
        /* Check that the original operation and the optimized ones will trap
2335
           under the same condition.  */
2336
        bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2337
                     && (lcompcode != COMPCODE_EQ)
2338
                     && (lcompcode != COMPCODE_ORD);
2339
        bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2340
                     && (rcompcode != COMPCODE_EQ)
2341
                     && (rcompcode != COMPCODE_ORD);
2342
        bool trap = (compcode & COMPCODE_UNORD) == 0
2343
                    && (compcode != COMPCODE_EQ)
2344
                    && (compcode != COMPCODE_ORD);
2345
 
2346
        /* In a short-circuited boolean expression the LHS might be
2347
           such that the RHS, if evaluated, will never trap.  For
2348
           example, in ORD (x, y) && (x < y), we evaluate the RHS only
2349
           if neither x nor y is NaN.  (This is a mixed blessing: for
2350
           example, the expression above will never trap, hence
2351
           optimizing it to x < y would be invalid).  */
2352
        if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2353
            || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2354
          rtrap = false;
2355
 
2356
        /* If the comparison was short-circuited, and only the RHS
2357
           trapped, we may now generate a spurious trap.  */
2358
        if (rtrap && !ltrap
2359
            && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2360
          return NULL_TREE;
2361
 
2362
        /* If we changed the conditions that cause a trap, we lose.  */
2363
        if ((ltrap || rtrap) != trap)
2364
          return NULL_TREE;
2365
      }
2366
 
2367
  if (compcode == COMPCODE_TRUE)
2368
    return constant_boolean_node (true, truth_type);
2369
  else if (compcode == COMPCODE_FALSE)
2370
    return constant_boolean_node (false, truth_type);
2371
  else
2372
    return fold_build2 (compcode_to_comparison (compcode),
2373
                        truth_type, ll_arg, lr_arg);
2374
}
2375
 
2376
/* Return nonzero if CODE is a tree code that represents a truth value.  */
2377
 
2378
static int
2379
truth_value_p (enum tree_code code)
2380
{
2381
  return (TREE_CODE_CLASS (code) == tcc_comparison
2382
          || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2383
          || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2384
          || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2385
}
2386
 
2387
/* Return nonzero if two operands (typically of the same tree node)
2388
   are necessarily equal.  If either argument has side-effects this
2389
   function returns zero.  FLAGS modifies behavior as follows:
2390
 
2391
   If OEP_ONLY_CONST is set, only return nonzero for constants.
2392
   This function tests whether the operands are indistinguishable;
2393
   it does not test whether they are equal using C's == operation.
2394
   The distinction is important for IEEE floating point, because
2395
   (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2396
   (2) two NaNs may be indistinguishable, but NaN!=NaN.
2397
 
2398
   If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2399
   even though it may hold multiple values during a function.
2400
   This is because a GCC tree node guarantees that nothing else is
2401
   executed between the evaluation of its "operands" (which may often
2402
   be evaluated in arbitrary order).  Hence if the operands themselves
2403
   don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2404
   same value in each operand/subexpression.  Hence leaving OEP_ONLY_CONST
2405
   unset means assuming isochronic (or instantaneous) tree equivalence.
2406
   Unless comparing arbitrary expression trees, such as from different
2407
   statements, this flag can usually be left unset.
2408
 
2409
   If OEP_PURE_SAME is set, then pure functions with identical arguments
2410
   are considered the same.  It is used when the caller has other ways
2411
   to ensure that global memory is unchanged in between.  */
2412
 
2413
int
2414
operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2415
{
2416
  /* If either is ERROR_MARK, they aren't equal.  */
2417
  if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2418
    return 0;
2419
 
2420
  /* If both types don't have the same signedness, then we can't consider
2421
     them equal.  We must check this before the STRIP_NOPS calls
2422
     because they may change the signedness of the arguments.  */
2423
  if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2424
    return 0;
2425
 
2426
  STRIP_NOPS (arg0);
2427
  STRIP_NOPS (arg1);
2428
 
2429
  if (TREE_CODE (arg0) != TREE_CODE (arg1)
2430
      /* This is needed for conversions and for COMPONENT_REF.
2431
         Might as well play it safe and always test this.  */
2432
      || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2433
      || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2434
      || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2435
    return 0;
2436
 
2437
  /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2438
     We don't care about side effects in that case because the SAVE_EXPR
2439
     takes care of that for us. In all other cases, two expressions are
2440
     equal if they have no side effects.  If we have two identical
2441
     expressions with side effects that should be treated the same due
2442
     to the only side effects being identical SAVE_EXPR's, that will
2443
     be detected in the recursive calls below.  */
2444
  if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2445
      && (TREE_CODE (arg0) == SAVE_EXPR
2446
          || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2447
    return 1;
2448
 
2449
  /* Next handle constant cases, those for which we can return 1 even
2450
     if ONLY_CONST is set.  */
2451
  if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2452
    switch (TREE_CODE (arg0))
2453
      {
2454
      case INTEGER_CST:
2455
        return (! TREE_CONSTANT_OVERFLOW (arg0)
2456
                && ! TREE_CONSTANT_OVERFLOW (arg1)
2457
                && tree_int_cst_equal (arg0, arg1));
2458
 
2459
      case REAL_CST:
2460
        return (! TREE_CONSTANT_OVERFLOW (arg0)
2461
                && ! TREE_CONSTANT_OVERFLOW (arg1)
2462
                && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2463
                                          TREE_REAL_CST (arg1)));
2464
 
2465
      case VECTOR_CST:
2466
        {
2467
          tree v1, v2;
2468
 
2469
          if (TREE_CONSTANT_OVERFLOW (arg0)
2470
              || TREE_CONSTANT_OVERFLOW (arg1))
2471
            return 0;
2472
 
2473
          v1 = TREE_VECTOR_CST_ELTS (arg0);
2474
          v2 = TREE_VECTOR_CST_ELTS (arg1);
2475
          while (v1 && v2)
2476
            {
2477
              if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2478
                                    flags))
2479
                return 0;
2480
              v1 = TREE_CHAIN (v1);
2481
              v2 = TREE_CHAIN (v2);
2482
            }
2483
 
2484
          return v1 == v2;
2485
        }
2486
 
2487
      case COMPLEX_CST:
2488
        return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2489
                                 flags)
2490
                && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2491
                                    flags));
2492
 
2493
      case STRING_CST:
2494
        return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2495
                && ! memcmp (TREE_STRING_POINTER (arg0),
2496
                              TREE_STRING_POINTER (arg1),
2497
                              TREE_STRING_LENGTH (arg0)));
2498
 
2499
      case ADDR_EXPR:
2500
        return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2501
                                0);
2502
      default:
2503
        break;
2504
      }
2505
 
2506
  if (flags & OEP_ONLY_CONST)
2507
    return 0;
2508
 
2509
/* Define macros to test an operand from arg0 and arg1 for equality and a
2510
   variant that allows null and views null as being different from any
2511
   non-null value.  In the latter case, if either is null, the both
2512
   must be; otherwise, do the normal comparison.  */
2513
#define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N),     \
2514
                                    TREE_OPERAND (arg1, N), flags)
2515
 
2516
#define OP_SAME_WITH_NULL(N)                            \
2517
  ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2518
   ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2519
 
2520
  switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2521
    {
2522
    case tcc_unary:
2523
      /* Two conversions are equal only if signedness and modes match.  */
2524
      switch (TREE_CODE (arg0))
2525
        {
2526
        case NOP_EXPR:
2527
        case CONVERT_EXPR:
2528
        case FIX_CEIL_EXPR:
2529
        case FIX_TRUNC_EXPR:
2530
        case FIX_FLOOR_EXPR:
2531
        case FIX_ROUND_EXPR:
2532
          if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2533
              != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2534
            return 0;
2535
          break;
2536
        default:
2537
          break;
2538
        }
2539
 
2540
      return OP_SAME (0);
2541
 
2542
 
2543
    case tcc_comparison:
2544
    case tcc_binary:
2545
      if (OP_SAME (0) && OP_SAME (1))
2546
        return 1;
2547
 
2548
      /* For commutative ops, allow the other order.  */
2549
      return (commutative_tree_code (TREE_CODE (arg0))
2550
              && operand_equal_p (TREE_OPERAND (arg0, 0),
2551
                                  TREE_OPERAND (arg1, 1), flags)
2552
              && operand_equal_p (TREE_OPERAND (arg0, 1),
2553
                                  TREE_OPERAND (arg1, 0), flags));
2554
 
2555
    case tcc_reference:
2556
      /* If either of the pointer (or reference) expressions we are
2557
         dereferencing contain a side effect, these cannot be equal.  */
2558
      if (TREE_SIDE_EFFECTS (arg0)
2559
          || TREE_SIDE_EFFECTS (arg1))
2560
        return 0;
2561
 
2562
      switch (TREE_CODE (arg0))
2563
        {
2564
        case INDIRECT_REF:
2565
        case ALIGN_INDIRECT_REF:
2566
        case MISALIGNED_INDIRECT_REF:
2567
        case REALPART_EXPR:
2568
        case IMAGPART_EXPR:
2569
          return OP_SAME (0);
2570
 
2571
        case ARRAY_REF:
2572
        case ARRAY_RANGE_REF:
2573
          /* Operands 2 and 3 may be null.  */
2574
          return (OP_SAME (0)
2575
                  && OP_SAME (1)
2576
                  && OP_SAME_WITH_NULL (2)
2577
                  && OP_SAME_WITH_NULL (3));
2578
 
2579
        case COMPONENT_REF:
2580
          /* Handle operand 2 the same as for ARRAY_REF.  Operand 0
2581
             may be NULL when we're called to compare MEM_EXPRs.  */
2582
          return OP_SAME_WITH_NULL (0)
2583
                 && OP_SAME (1)
2584
                 && OP_SAME_WITH_NULL (2);
2585
 
2586
        case BIT_FIELD_REF:
2587
          return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2588
 
2589
        default:
2590
          return 0;
2591
        }
2592
 
2593
    case tcc_expression:
2594
      switch (TREE_CODE (arg0))
2595
        {
2596
        case ADDR_EXPR:
2597
        case TRUTH_NOT_EXPR:
2598
          return OP_SAME (0);
2599
 
2600
        case TRUTH_ANDIF_EXPR:
2601
        case TRUTH_ORIF_EXPR:
2602
          return OP_SAME (0) && OP_SAME (1);
2603
 
2604
        case TRUTH_AND_EXPR:
2605
        case TRUTH_OR_EXPR:
2606
        case TRUTH_XOR_EXPR:
2607
          if (OP_SAME (0) && OP_SAME (1))
2608
            return 1;
2609
 
2610
          /* Otherwise take into account this is a commutative operation.  */
2611
          return (operand_equal_p (TREE_OPERAND (arg0, 0),
2612
                                   TREE_OPERAND (arg1, 1), flags)
2613
                  && operand_equal_p (TREE_OPERAND (arg0, 1),
2614
                                      TREE_OPERAND (arg1, 0), flags));
2615
 
2616
        case CALL_EXPR:
2617
          /* If the CALL_EXPRs call different functions, then they
2618
             clearly can not be equal.  */
2619
          if (!OP_SAME (0))
2620
            return 0;
2621
 
2622
          {
2623
            unsigned int cef = call_expr_flags (arg0);
2624
            if (flags & OEP_PURE_SAME)
2625
              cef &= ECF_CONST | ECF_PURE;
2626
            else
2627
              cef &= ECF_CONST;
2628
            if (!cef)
2629
              return 0;
2630
          }
2631
 
2632
          /* Now see if all the arguments are the same.  operand_equal_p
2633
             does not handle TREE_LIST, so we walk the operands here
2634
             feeding them to operand_equal_p.  */
2635
          arg0 = TREE_OPERAND (arg0, 1);
2636
          arg1 = TREE_OPERAND (arg1, 1);
2637
          while (arg0 && arg1)
2638
            {
2639
              if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2640
                                     flags))
2641
                return 0;
2642
 
2643
              arg0 = TREE_CHAIN (arg0);
2644
              arg1 = TREE_CHAIN (arg1);
2645
            }
2646
 
2647
          /* If we get here and both argument lists are exhausted
2648
             then the CALL_EXPRs are equal.  */
2649
          return ! (arg0 || arg1);
2650
 
2651
        default:
2652
          return 0;
2653
        }
2654
 
2655
    case tcc_declaration:
2656
      /* Consider __builtin_sqrt equal to sqrt.  */
2657
      return (TREE_CODE (arg0) == FUNCTION_DECL
2658
              && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2659
              && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2660
              && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2661
 
2662
    default:
2663
      return 0;
2664
    }
2665
 
2666
#undef OP_SAME
2667
#undef OP_SAME_WITH_NULL
2668
}
2669
 
2670
/* Similar to operand_equal_p, but see if ARG0 might have been made by
2671
   shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2672
 
2673
   When in doubt, return 0.  */
2674
 
2675
static int
2676
operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2677
{
2678
  int unsignedp1, unsignedpo;
2679
  tree primarg0, primarg1, primother;
2680
  unsigned int correct_width;
2681
 
2682
  if (operand_equal_p (arg0, arg1, 0))
2683
    return 1;
2684
 
2685
  if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2686
      || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2687
    return 0;
2688
 
2689
  /* Discard any conversions that don't change the modes of ARG0 and ARG1
2690
     and see if the inner values are the same.  This removes any
2691
     signedness comparison, which doesn't matter here.  */
2692
  primarg0 = arg0, primarg1 = arg1;
2693
  STRIP_NOPS (primarg0);
2694
  STRIP_NOPS (primarg1);
2695
  if (operand_equal_p (primarg0, primarg1, 0))
2696
    return 1;
2697
 
2698
  /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2699
     actual comparison operand, ARG0.
2700
 
2701
     First throw away any conversions to wider types
2702
     already present in the operands.  */
2703
 
2704
  primarg1 = get_narrower (arg1, &unsignedp1);
2705
  primother = get_narrower (other, &unsignedpo);
2706
 
2707
  correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2708
  if (unsignedp1 == unsignedpo
2709
      && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2710
      && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2711
    {
2712
      tree type = TREE_TYPE (arg0);
2713
 
2714
      /* Make sure shorter operand is extended the right way
2715
         to match the longer operand.  */
2716
      primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2717
                               (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2718
 
2719
      if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2720
        return 1;
2721
    }
2722
 
2723
  return 0;
2724
}
2725
 
2726
/* See if ARG is an expression that is either a comparison or is performing
2727
   arithmetic on comparisons.  The comparisons must only be comparing
2728
   two different values, which will be stored in *CVAL1 and *CVAL2; if
2729
   they are nonzero it means that some operands have already been found.
2730
   No variables may be used anywhere else in the expression except in the
2731
   comparisons.  If SAVE_P is true it means we removed a SAVE_EXPR around
2732
   the expression and save_expr needs to be called with CVAL1 and CVAL2.
2733
 
2734
   If this is true, return 1.  Otherwise, return zero.  */
2735
 
2736
static int
2737
twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2738
{
2739
  enum tree_code code = TREE_CODE (arg);
2740
  enum tree_code_class class = TREE_CODE_CLASS (code);
2741
 
2742
  /* We can handle some of the tcc_expression cases here.  */
2743
  if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2744
    class = tcc_unary;
2745
  else if (class == tcc_expression
2746
           && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2747
               || code == COMPOUND_EXPR))
2748
    class = tcc_binary;
2749
 
2750
  else if (class == tcc_expression && code == SAVE_EXPR
2751
           && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2752
    {
2753
      /* If we've already found a CVAL1 or CVAL2, this expression is
2754
         two complex to handle.  */
2755
      if (*cval1 || *cval2)
2756
        return 0;
2757
 
2758
      class = tcc_unary;
2759
      *save_p = 1;
2760
    }
2761
 
2762
  switch (class)
2763
    {
2764
    case tcc_unary:
2765
      return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2766
 
2767
    case tcc_binary:
2768
      return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2769
              && twoval_comparison_p (TREE_OPERAND (arg, 1),
2770
                                      cval1, cval2, save_p));
2771
 
2772
    case tcc_constant:
2773
      return 1;
2774
 
2775
    case tcc_expression:
2776
      if (code == COND_EXPR)
2777
        return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2778
                                     cval1, cval2, save_p)
2779
                && twoval_comparison_p (TREE_OPERAND (arg, 1),
2780
                                        cval1, cval2, save_p)
2781
                && twoval_comparison_p (TREE_OPERAND (arg, 2),
2782
                                        cval1, cval2, save_p));
2783
      return 0;
2784
 
2785
    case tcc_comparison:
2786
      /* First see if we can handle the first operand, then the second.  For
2787
         the second operand, we know *CVAL1 can't be zero.  It must be that
2788
         one side of the comparison is each of the values; test for the
2789
         case where this isn't true by failing if the two operands
2790
         are the same.  */
2791
 
2792
      if (operand_equal_p (TREE_OPERAND (arg, 0),
2793
                           TREE_OPERAND (arg, 1), 0))
2794
        return 0;
2795
 
2796
      if (*cval1 == 0)
2797
        *cval1 = TREE_OPERAND (arg, 0);
2798
      else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2799
        ;
2800
      else if (*cval2 == 0)
2801
        *cval2 = TREE_OPERAND (arg, 0);
2802
      else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2803
        ;
2804
      else
2805
        return 0;
2806
 
2807
      if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2808
        ;
2809
      else if (*cval2 == 0)
2810
        *cval2 = TREE_OPERAND (arg, 1);
2811
      else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2812
        ;
2813
      else
2814
        return 0;
2815
 
2816
      return 1;
2817
 
2818
    default:
2819
      return 0;
2820
    }
2821
}
2822
 
2823
/* ARG is a tree that is known to contain just arithmetic operations and
2824
   comparisons.  Evaluate the operations in the tree substituting NEW0 for
2825
   any occurrence of OLD0 as an operand of a comparison and likewise for
2826
   NEW1 and OLD1.  */
2827
 
2828
static tree
2829
eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2830
{
2831
  tree type = TREE_TYPE (arg);
2832
  enum tree_code code = TREE_CODE (arg);
2833
  enum tree_code_class class = TREE_CODE_CLASS (code);
2834
 
2835
  /* We can handle some of the tcc_expression cases here.  */
2836
  if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2837
    class = tcc_unary;
2838
  else if (class == tcc_expression
2839
           && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2840
    class = tcc_binary;
2841
 
2842
  switch (class)
2843
    {
2844
    case tcc_unary:
2845
      return fold_build1 (code, type,
2846
                          eval_subst (TREE_OPERAND (arg, 0),
2847
                                      old0, new0, old1, new1));
2848
 
2849
    case tcc_binary:
2850
      return fold_build2 (code, type,
2851
                          eval_subst (TREE_OPERAND (arg, 0),
2852
                                      old0, new0, old1, new1),
2853
                          eval_subst (TREE_OPERAND (arg, 1),
2854
                                      old0, new0, old1, new1));
2855
 
2856
    case tcc_expression:
2857
      switch (code)
2858
        {
2859
        case SAVE_EXPR:
2860
          return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2861
 
2862
        case COMPOUND_EXPR:
2863
          return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2864
 
2865
        case COND_EXPR:
2866
          return fold_build3 (code, type,
2867
                              eval_subst (TREE_OPERAND (arg, 0),
2868
                                          old0, new0, old1, new1),
2869
                              eval_subst (TREE_OPERAND (arg, 1),
2870
                                          old0, new0, old1, new1),
2871
                              eval_subst (TREE_OPERAND (arg, 2),
2872
                                          old0, new0, old1, new1));
2873
        default:
2874
          break;
2875
        }
2876
      /* Fall through - ???  */
2877
 
2878
    case tcc_comparison:
2879
      {
2880
        tree arg0 = TREE_OPERAND (arg, 0);
2881
        tree arg1 = TREE_OPERAND (arg, 1);
2882
 
2883
        /* We need to check both for exact equality and tree equality.  The
2884
           former will be true if the operand has a side-effect.  In that
2885
           case, we know the operand occurred exactly once.  */
2886
 
2887
        if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2888
          arg0 = new0;
2889
        else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2890
          arg0 = new1;
2891
 
2892
        if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2893
          arg1 = new0;
2894
        else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2895
          arg1 = new1;
2896
 
2897
        return fold_build2 (code, type, arg0, arg1);
2898
      }
2899
 
2900
    default:
2901
      return arg;
2902
    }
2903
}
2904
 
2905
/* Return a tree for the case when the result of an expression is RESULT
2906
   converted to TYPE and OMITTED was previously an operand of the expression
2907
   but is now not needed (e.g., we folded OMITTED * 0).
2908
 
2909
   If OMITTED has side effects, we must evaluate it.  Otherwise, just do
2910
   the conversion of RESULT to TYPE.  */
2911
 
2912
tree
2913
omit_one_operand (tree type, tree result, tree omitted)
2914
{
2915
  tree t = fold_convert (type, result);
2916
 
2917
  if (TREE_SIDE_EFFECTS (omitted))
2918
    return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2919
 
2920
  return non_lvalue (t);
2921
}
2922
 
2923
/* Similar, but call pedantic_non_lvalue instead of non_lvalue.  */
2924
 
2925
static tree
2926
pedantic_omit_one_operand (tree type, tree result, tree omitted)
2927
{
2928
  tree t = fold_convert (type, result);
2929
 
2930
  if (TREE_SIDE_EFFECTS (omitted))
2931
    return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2932
 
2933
  return pedantic_non_lvalue (t);
2934
}
2935
 
2936
/* Return a tree for the case when the result of an expression is RESULT
2937
   converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2938
   of the expression but are now not needed.
2939
 
2940
   If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2941
   If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2942
   evaluated before OMITTED2.  Otherwise, if neither has side effects,
2943
   just do the conversion of RESULT to TYPE.  */
2944
 
2945
tree
2946
omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2947
{
2948
  tree t = fold_convert (type, result);
2949
 
2950
  if (TREE_SIDE_EFFECTS (omitted2))
2951
    t = build2 (COMPOUND_EXPR, type, omitted2, t);
2952
  if (TREE_SIDE_EFFECTS (omitted1))
2953
    t = build2 (COMPOUND_EXPR, type, omitted1, t);
2954
 
2955
  return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2956
}
2957
 
2958
 
2959
/* Return a simplified tree node for the truth-negation of ARG.  This
2960
   never alters ARG itself.  We assume that ARG is an operation that
2961
   returns a truth value (0 or 1).
2962
 
2963
   FIXME: one would think we would fold the result, but it causes
2964
   problems with the dominator optimizer.  */
2965
tree
2966
invert_truthvalue (tree arg)
2967
{
2968
  tree type = TREE_TYPE (arg);
2969
  enum tree_code code = TREE_CODE (arg);
2970
 
2971
  if (code == ERROR_MARK)
2972
    return arg;
2973
 
2974
  /* If this is a comparison, we can simply invert it, except for
2975
     floating-point non-equality comparisons, in which case we just
2976
     enclose a TRUTH_NOT_EXPR around what we have.  */
2977
 
2978
  if (TREE_CODE_CLASS (code) == tcc_comparison)
2979
    {
2980
      tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2981
      if (FLOAT_TYPE_P (op_type)
2982
          && flag_trapping_math
2983
          && code != ORDERED_EXPR && code != UNORDERED_EXPR
2984
          && code != NE_EXPR && code != EQ_EXPR)
2985
        return build1 (TRUTH_NOT_EXPR, type, arg);
2986
      else
2987
        {
2988
          code = invert_tree_comparison (code,
2989
                                         HONOR_NANS (TYPE_MODE (op_type)));
2990
          if (code == ERROR_MARK)
2991
            return build1 (TRUTH_NOT_EXPR, type, arg);
2992
          else
2993
            return build2 (code, type,
2994
                           TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2995
        }
2996
    }
2997
 
2998
  switch (code)
2999
    {
3000
    case INTEGER_CST:
3001
      return constant_boolean_node (integer_zerop (arg), type);
3002
 
3003
    case TRUTH_AND_EXPR:
3004
      return build2 (TRUTH_OR_EXPR, type,
3005
                     invert_truthvalue (TREE_OPERAND (arg, 0)),
3006
                     invert_truthvalue (TREE_OPERAND (arg, 1)));
3007
 
3008
    case TRUTH_OR_EXPR:
3009
      return build2 (TRUTH_AND_EXPR, type,
3010
                     invert_truthvalue (TREE_OPERAND (arg, 0)),
3011
                     invert_truthvalue (TREE_OPERAND (arg, 1)));
3012
 
3013
    case TRUTH_XOR_EXPR:
3014
      /* Here we can invert either operand.  We invert the first operand
3015
         unless the second operand is a TRUTH_NOT_EXPR in which case our
3016
         result is the XOR of the first operand with the inside of the
3017
         negation of the second operand.  */
3018
 
3019
      if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3020
        return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3021
                       TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3022
      else
3023
        return build2 (TRUTH_XOR_EXPR, type,
3024
                       invert_truthvalue (TREE_OPERAND (arg, 0)),
3025
                       TREE_OPERAND (arg, 1));
3026
 
3027
    case TRUTH_ANDIF_EXPR:
3028
      return build2 (TRUTH_ORIF_EXPR, type,
3029
                     invert_truthvalue (TREE_OPERAND (arg, 0)),
3030
                     invert_truthvalue (TREE_OPERAND (arg, 1)));
3031
 
3032
    case TRUTH_ORIF_EXPR:
3033
      return build2 (TRUTH_ANDIF_EXPR, type,
3034
                     invert_truthvalue (TREE_OPERAND (arg, 0)),
3035
                     invert_truthvalue (TREE_OPERAND (arg, 1)));
3036
 
3037
    case TRUTH_NOT_EXPR:
3038
      return TREE_OPERAND (arg, 0);
3039
 
3040
    case COND_EXPR:
3041
      {
3042
        tree arg1 = TREE_OPERAND (arg, 1);
3043
        tree arg2 = TREE_OPERAND (arg, 2);
3044
        /* A COND_EXPR may have a throw as one operand, which
3045
           then has void type.  Just leave void operands
3046
           as they are.  */
3047
        return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3048
                       VOID_TYPE_P (TREE_TYPE (arg1))
3049
                       ? arg1 : invert_truthvalue (arg1),
3050
                       VOID_TYPE_P (TREE_TYPE (arg2))
3051
                       ? arg2 : invert_truthvalue (arg2));
3052
      }
3053
 
3054
    case COMPOUND_EXPR:
3055
      return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3056
                     invert_truthvalue (TREE_OPERAND (arg, 1)));
3057
 
3058
    case NON_LVALUE_EXPR:
3059
      return invert_truthvalue (TREE_OPERAND (arg, 0));
3060
 
3061
    case NOP_EXPR:
3062
      if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3063
        break;
3064
 
3065
    case CONVERT_EXPR:
3066
    case FLOAT_EXPR:
3067
      return build1 (TREE_CODE (arg), type,
3068
                     invert_truthvalue (TREE_OPERAND (arg, 0)));
3069
 
3070
    case BIT_AND_EXPR:
3071
      if (!integer_onep (TREE_OPERAND (arg, 1)))
3072
        break;
3073
      return build2 (EQ_EXPR, type, arg,
3074
                     fold_convert (type, integer_zero_node));
3075
 
3076
    case SAVE_EXPR:
3077
      return build1 (TRUTH_NOT_EXPR, type, arg);
3078
 
3079
    case CLEANUP_POINT_EXPR:
3080
      return build1 (CLEANUP_POINT_EXPR, type,
3081
                     invert_truthvalue (TREE_OPERAND (arg, 0)));
3082
 
3083
    default:
3084
      break;
3085
    }
3086
  gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3087
  return build1 (TRUTH_NOT_EXPR, type, arg);
3088
}
3089
 
3090
/* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3091
   operands are another bit-wise operation with a common input.  If so,
3092
   distribute the bit operations to save an operation and possibly two if
3093
   constants are involved.  For example, convert
3094
        (A | B) & (A | C) into A | (B & C)
3095
   Further simplification will occur if B and C are constants.
3096
 
3097
   If this optimization cannot be done, 0 will be returned.  */
3098
 
3099
static tree
3100
distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3101
{
3102
  tree common;
3103
  tree left, right;
3104
 
3105
  if (TREE_CODE (arg0) != TREE_CODE (arg1)
3106
      || TREE_CODE (arg0) == code
3107
      || (TREE_CODE (arg0) != BIT_AND_EXPR
3108
          && TREE_CODE (arg0) != BIT_IOR_EXPR))
3109
    return 0;
3110
 
3111
  if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3112
    {
3113
      common = TREE_OPERAND (arg0, 0);
3114
      left = TREE_OPERAND (arg0, 1);
3115
      right = TREE_OPERAND (arg1, 1);
3116
    }
3117
  else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3118
    {
3119
      common = TREE_OPERAND (arg0, 0);
3120
      left = TREE_OPERAND (arg0, 1);
3121
      right = TREE_OPERAND (arg1, 0);
3122
    }
3123
  else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3124
    {
3125
      common = TREE_OPERAND (arg0, 1);
3126
      left = TREE_OPERAND (arg0, 0);
3127
      right = TREE_OPERAND (arg1, 1);
3128
    }
3129
  else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3130
    {
3131
      common = TREE_OPERAND (arg0, 1);
3132
      left = TREE_OPERAND (arg0, 0);
3133
      right = TREE_OPERAND (arg1, 0);
3134
    }
3135
  else
3136
    return 0;
3137
 
3138
  return fold_build2 (TREE_CODE (arg0), type, common,
3139
                      fold_build2 (code, type, left, right));
3140
}
3141
 
3142
/* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3143
   with code CODE.  This optimization is unsafe.  */
3144
static tree
3145
distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3146
{
3147
  bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3148
  bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3149
 
3150
  /* (A / C) +- (B / C) -> (A +- B) / C.  */
3151
  if (mul0 == mul1
3152
      && operand_equal_p (TREE_OPERAND (arg0, 1),
3153
                       TREE_OPERAND (arg1, 1), 0))
3154
    return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3155
                        fold_build2 (code, type,
3156
                                     TREE_OPERAND (arg0, 0),
3157
                                     TREE_OPERAND (arg1, 0)),
3158
                        TREE_OPERAND (arg0, 1));
3159
 
3160
  /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2).  */
3161
  if (operand_equal_p (TREE_OPERAND (arg0, 0),
3162
                       TREE_OPERAND (arg1, 0), 0)
3163
      && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3164
      && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3165
    {
3166
      REAL_VALUE_TYPE r0, r1;
3167
      r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3168
      r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3169
      if (!mul0)
3170
        real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3171
      if (!mul1)
3172
        real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3173
      real_arithmetic (&r0, code, &r0, &r1);
3174
      return fold_build2 (MULT_EXPR, type,
3175
                          TREE_OPERAND (arg0, 0),
3176
                          build_real (type, r0));
3177
    }
3178
 
3179
  return NULL_TREE;
3180
}
3181
 
3182
/* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3183
   starting at BITPOS.  The field is unsigned if UNSIGNEDP is nonzero.  */
3184
 
3185
static tree
3186
make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3187
                    int unsignedp)
3188
{
3189
  tree result;
3190
 
3191
  if (bitpos == 0)
3192
    {
3193
      tree size = TYPE_SIZE (TREE_TYPE (inner));
3194
      if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3195
           || POINTER_TYPE_P (TREE_TYPE (inner)))
3196
          && host_integerp (size, 0)
3197
          && tree_low_cst (size, 0) == bitsize)
3198
        return fold_convert (type, inner);
3199
    }
3200
 
3201
  result = build3 (BIT_FIELD_REF, type, inner,
3202
                   size_int (bitsize), bitsize_int (bitpos));
3203
 
3204
  BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3205
 
3206
  return result;
3207
}
3208
 
3209
/* Optimize a bit-field compare.
3210
 
3211
   There are two cases:  First is a compare against a constant and the
3212
   second is a comparison of two items where the fields are at the same
3213
   bit position relative to the start of a chunk (byte, halfword, word)
3214
   large enough to contain it.  In these cases we can avoid the shift
3215
   implicit in bitfield extractions.
3216
 
3217
   For constants, we emit a compare of the shifted constant with the
3218
   BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3219
   compared.  For two fields at the same position, we do the ANDs with the
3220
   similar mask and compare the result of the ANDs.
3221
 
3222
   CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3223
   COMPARE_TYPE is the type of the comparison, and LHS and RHS
3224
   are the left and right operands of the comparison, respectively.
3225
 
3226
   If the optimization described above can be done, we return the resulting
3227
   tree.  Otherwise we return zero.  */
3228
 
3229
static tree
3230
optimize_bit_field_compare (enum tree_code code, tree compare_type,
3231
                            tree lhs, tree rhs)
3232
{
3233
  HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3234
  tree type = TREE_TYPE (lhs);
3235
  tree signed_type, unsigned_type;
3236
  int const_p = TREE_CODE (rhs) == INTEGER_CST;
3237
  enum machine_mode lmode, rmode, nmode;
3238
  int lunsignedp, runsignedp;
3239
  int lvolatilep = 0, rvolatilep = 0;
3240
  tree linner, rinner = NULL_TREE;
3241
  tree mask;
3242
  tree offset;
3243
 
3244
  /* Get all the information about the extractions being done.  If the bit size
3245
     if the same as the size of the underlying object, we aren't doing an
3246
     extraction at all and so can do nothing.  We also don't want to
3247
     do anything if the inner expression is a PLACEHOLDER_EXPR since we
3248
     then will no longer be able to replace it.  */
3249
  linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3250
                                &lunsignedp, &lvolatilep, false);
3251
  if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3252
      || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3253
    return 0;
3254
 
3255
 if (!const_p)
3256
   {
3257
     /* If this is not a constant, we can only do something if bit positions,
3258
        sizes, and signedness are the same.  */
3259
     rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3260
                                   &runsignedp, &rvolatilep, false);
3261
 
3262
     if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3263
         || lunsignedp != runsignedp || offset != 0
3264
         || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3265
       return 0;
3266
   }
3267
 
3268
  /* See if we can find a mode to refer to this field.  We should be able to,
3269
     but fail if we can't.  */
3270
  nmode = get_best_mode (lbitsize, lbitpos,
3271
                         const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3272
                         : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3273
                                TYPE_ALIGN (TREE_TYPE (rinner))),
3274
                         word_mode, lvolatilep || rvolatilep);
3275
  if (nmode == VOIDmode)
3276
    return 0;
3277
 
3278
  /* Set signed and unsigned types of the precision of this mode for the
3279
     shifts below.  */
3280
  signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3281
  unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3282
 
3283
  /* Compute the bit position and size for the new reference and our offset
3284
     within it. If the new reference is the same size as the original, we
3285
     won't optimize anything, so return zero.  */
3286
  nbitsize = GET_MODE_BITSIZE (nmode);
3287
  nbitpos = lbitpos & ~ (nbitsize - 1);
3288
  lbitpos -= nbitpos;
3289
  if (nbitsize == lbitsize)
3290
    return 0;
3291
 
3292
  if (BYTES_BIG_ENDIAN)
3293
    lbitpos = nbitsize - lbitsize - lbitpos;
3294
 
3295
  /* Make the mask to be used against the extracted field.  */
3296
  mask = build_int_cst (unsigned_type, -1);
3297
  mask = force_fit_type (mask, 0, false, false);
3298
  mask = fold_convert (unsigned_type, mask);
3299
  mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3300
  mask = const_binop (RSHIFT_EXPR, mask,
3301
                      size_int (nbitsize - lbitsize - lbitpos), 0);
3302
 
3303
  if (! const_p)
3304
    /* If not comparing with constant, just rework the comparison
3305
       and return.  */
3306
    return build2 (code, compare_type,
3307
                   build2 (BIT_AND_EXPR, unsigned_type,
3308
                           make_bit_field_ref (linner, unsigned_type,
3309
                                               nbitsize, nbitpos, 1),
3310
                           mask),
3311
                   build2 (BIT_AND_EXPR, unsigned_type,
3312
                           make_bit_field_ref (rinner, unsigned_type,
3313
                                               nbitsize, nbitpos, 1),
3314
                           mask));
3315
 
3316
  /* Otherwise, we are handling the constant case. See if the constant is too
3317
     big for the field.  Warn and return a tree of for 0 (false) if so.  We do
3318
     this not only for its own sake, but to avoid having to test for this
3319
     error case below.  If we didn't, we might generate wrong code.
3320
 
3321
     For unsigned fields, the constant shifted right by the field length should
3322
     be all zero.  For signed fields, the high-order bits should agree with
3323
     the sign bit.  */
3324
 
3325
  if (lunsignedp)
3326
    {
3327
      if (! integer_zerop (const_binop (RSHIFT_EXPR,
3328
                                        fold_convert (unsigned_type, rhs),
3329
                                        size_int (lbitsize), 0)))
3330
        {
3331
          warning (0, "comparison is always %d due to width of bit-field",
3332
                   code == NE_EXPR);
3333
          return constant_boolean_node (code == NE_EXPR, compare_type);
3334
        }
3335
    }
3336
  else
3337
    {
3338
      tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3339
                              size_int (lbitsize - 1), 0);
3340
      if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3341
        {
3342
          warning (0, "comparison is always %d due to width of bit-field",
3343
                   code == NE_EXPR);
3344
          return constant_boolean_node (code == NE_EXPR, compare_type);
3345
        }
3346
    }
3347
 
3348
  /* Single-bit compares should always be against zero.  */
3349
  if (lbitsize == 1 && ! integer_zerop (rhs))
3350
    {
3351
      code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3352
      rhs = fold_convert (type, integer_zero_node);
3353
    }
3354
 
3355
  /* Make a new bitfield reference, shift the constant over the
3356
     appropriate number of bits and mask it with the computed mask
3357
     (in case this was a signed field).  If we changed it, make a new one.  */
3358
  lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3359
  if (lvolatilep)
3360
    {
3361
      TREE_SIDE_EFFECTS (lhs) = 1;
3362
      TREE_THIS_VOLATILE (lhs) = 1;
3363
    }
3364
 
3365
  rhs = const_binop (BIT_AND_EXPR,
3366
                     const_binop (LSHIFT_EXPR,
3367
                                  fold_convert (unsigned_type, rhs),
3368
                                  size_int (lbitpos), 0),
3369
                     mask, 0);
3370
 
3371
  return build2 (code, compare_type,
3372
                 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3373
                 rhs);
3374
}
3375
 
3376
/* Subroutine for fold_truthop: decode a field reference.
3377
 
3378
   If EXP is a comparison reference, we return the innermost reference.
3379
 
3380
   *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3381
   set to the starting bit number.
3382
 
3383
   If the innermost field can be completely contained in a mode-sized
3384
   unit, *PMODE is set to that mode.  Otherwise, it is set to VOIDmode.
3385
 
3386
   *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3387
   otherwise it is not changed.
3388
 
3389
   *PUNSIGNEDP is set to the signedness of the field.
3390
 
3391
   *PMASK is set to the mask used.  This is either contained in a
3392
   BIT_AND_EXPR or derived from the width of the field.
3393
 
3394
   *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3395
 
3396
   Return 0 if this is not a component reference or is one that we can't
3397
   do anything with.  */
3398
 
3399
static tree
3400
decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3401
                        HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3402
                        int *punsignedp, int *pvolatilep,
3403
                        tree *pmask, tree *pand_mask)
3404
{
3405
  tree outer_type = 0;
3406
  tree and_mask = 0;
3407
  tree mask, inner, offset;
3408
  tree unsigned_type;
3409
  unsigned int precision;
3410
 
3411
  /* All the optimizations using this function assume integer fields.
3412
     There are problems with FP fields since the type_for_size call
3413
     below can fail for, e.g., XFmode.  */
3414
  if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3415
    return 0;
3416
 
3417
  /* We are interested in the bare arrangement of bits, so strip everything
3418
     that doesn't affect the machine mode.  However, record the type of the
3419
     outermost expression if it may matter below.  */
3420
  if (TREE_CODE (exp) == NOP_EXPR
3421
      || TREE_CODE (exp) == CONVERT_EXPR
3422
      || TREE_CODE (exp) == NON_LVALUE_EXPR)
3423
    outer_type = TREE_TYPE (exp);
3424
  STRIP_NOPS (exp);
3425
 
3426
  if (TREE_CODE (exp) == BIT_AND_EXPR)
3427
    {
3428
      and_mask = TREE_OPERAND (exp, 1);
3429
      exp = TREE_OPERAND (exp, 0);
3430
      STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3431
      if (TREE_CODE (and_mask) != INTEGER_CST)
3432
        return 0;
3433
    }
3434
 
3435
  inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3436
                               punsignedp, pvolatilep, false);
3437
  if ((inner == exp && and_mask == 0)
3438
      || *pbitsize < 0 || offset != 0
3439
      || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3440
    return 0;
3441
 
3442
  /* If the number of bits in the reference is the same as the bitsize of
3443
     the outer type, then the outer type gives the signedness. Otherwise
3444
     (in case of a small bitfield) the signedness is unchanged.  */
3445
  if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3446
    *punsignedp = TYPE_UNSIGNED (outer_type);
3447
 
3448
  /* Compute the mask to access the bitfield.  */
3449
  unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3450
  precision = TYPE_PRECISION (unsigned_type);
3451
 
3452
  mask = build_int_cst (unsigned_type, -1);
3453
  mask = force_fit_type (mask, 0, false, false);
3454
 
3455
  mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3456
  mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3457
 
3458
  /* Merge it with the mask we found in the BIT_AND_EXPR, if any.  */
3459
  if (and_mask != 0)
3460
    mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3461
                        fold_convert (unsigned_type, and_mask), mask);
3462
 
3463
  *pmask = mask;
3464
  *pand_mask = and_mask;
3465
  return inner;
3466
}
3467
 
3468
/* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3469
   bit positions.  */
3470
 
3471
static int
3472
all_ones_mask_p (tree mask, int size)
3473
{
3474
  tree type = TREE_TYPE (mask);
3475
  unsigned int precision = TYPE_PRECISION (type);
3476
  tree tmask;
3477
 
3478
  tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3479
  tmask = force_fit_type (tmask, 0, false, false);
3480
 
3481
  return
3482
    tree_int_cst_equal (mask,
3483
                        const_binop (RSHIFT_EXPR,
3484
                                     const_binop (LSHIFT_EXPR, tmask,
3485
                                                  size_int (precision - size),
3486
                                                  0),
3487
                                     size_int (precision - size), 0));
3488
}
3489
 
3490
/* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3491
   represents the sign bit of EXP's type.  If EXP represents a sign
3492
   or zero extension, also test VAL against the unextended type.
3493
   The return value is the (sub)expression whose sign bit is VAL,
3494
   or NULL_TREE otherwise.  */
3495
 
3496
static tree
3497
sign_bit_p (tree exp, tree val)
3498
{
3499
  unsigned HOST_WIDE_INT mask_lo, lo;
3500
  HOST_WIDE_INT mask_hi, hi;
3501
  int width;
3502
  tree t;
3503
 
3504
  /* Tree EXP must have an integral type.  */
3505
  t = TREE_TYPE (exp);
3506
  if (! INTEGRAL_TYPE_P (t))
3507
    return NULL_TREE;
3508
 
3509
  /* Tree VAL must be an integer constant.  */
3510
  if (TREE_CODE (val) != INTEGER_CST
3511
      || TREE_CONSTANT_OVERFLOW (val))
3512
    return NULL_TREE;
3513
 
3514
  width = TYPE_PRECISION (t);
3515
  if (width > HOST_BITS_PER_WIDE_INT)
3516
    {
3517
      hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3518
      lo = 0;
3519
 
3520
      mask_hi = ((unsigned HOST_WIDE_INT) -1
3521
                 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3522
      mask_lo = -1;
3523
    }
3524
  else
3525
    {
3526
      hi = 0;
3527
      lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3528
 
3529
      mask_hi = 0;
3530
      mask_lo = ((unsigned HOST_WIDE_INT) -1
3531
                 >> (HOST_BITS_PER_WIDE_INT - width));
3532
    }
3533
 
3534
  /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3535
     treat VAL as if it were unsigned.  */
3536
  if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3537
      && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3538
    return exp;
3539
 
3540
  /* Handle extension from a narrower type.  */
3541
  if (TREE_CODE (exp) == NOP_EXPR
3542
      && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3543
    return sign_bit_p (TREE_OPERAND (exp, 0), val);
3544
 
3545
  return NULL_TREE;
3546
}
3547
 
3548
/* Subroutine for fold_truthop: determine if an operand is simple enough
3549
   to be evaluated unconditionally.  */
3550
 
3551
static int
3552
simple_operand_p (tree exp)
3553
{
3554
  /* Strip any conversions that don't change the machine mode.  */
3555
  STRIP_NOPS (exp);
3556
 
3557
  return (CONSTANT_CLASS_P (exp)
3558
          || TREE_CODE (exp) == SSA_NAME
3559
          || (DECL_P (exp)
3560
              && ! TREE_ADDRESSABLE (exp)
3561
              && ! TREE_THIS_VOLATILE (exp)
3562
              && ! DECL_NONLOCAL (exp)
3563
              /* Don't regard global variables as simple.  They may be
3564
                 allocated in ways unknown to the compiler (shared memory,
3565
                 #pragma weak, etc).  */
3566
              && ! TREE_PUBLIC (exp)
3567
              && ! DECL_EXTERNAL (exp)
3568
              /* Loading a static variable is unduly expensive, but global
3569
                 registers aren't expensive.  */
3570
              && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3571
}
3572
 
3573
/* The following functions are subroutines to fold_range_test and allow it to
3574
   try to change a logical combination of comparisons into a range test.
3575
 
3576
   For example, both
3577
        X == 2 || X == 3 || X == 4 || X == 5
3578
   and
3579
        X >= 2 && X <= 5
3580
   are converted to
3581
        (unsigned) (X - 2) <= 3
3582
 
3583
   We describe each set of comparisons as being either inside or outside
3584
   a range, using a variable named like IN_P, and then describe the
3585
   range with a lower and upper bound.  If one of the bounds is omitted,
3586
   it represents either the highest or lowest value of the type.
3587
 
3588
   In the comments below, we represent a range by two numbers in brackets
3589
   preceded by a "+" to designate being inside that range, or a "-" to
3590
   designate being outside that range, so the condition can be inverted by
3591
   flipping the prefix.  An omitted bound is represented by a "-".  For
3592
   example, "- [-, 10]" means being outside the range starting at the lowest
3593
   possible value and ending at 10, in other words, being greater than 10.
3594
   The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3595
   always false.
3596
 
3597
   We set up things so that the missing bounds are handled in a consistent
3598
   manner so neither a missing bound nor "true" and "false" need to be
3599
   handled using a special case.  */
3600
 
3601
/* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3602
   of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3603
   and UPPER1_P are nonzero if the respective argument is an upper bound
3604
   and zero for a lower.  TYPE, if nonzero, is the type of the result; it
3605
   must be specified for a comparison.  ARG1 will be converted to ARG0's
3606
   type if both are specified.  */
3607
 
3608
static tree
3609
range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3610
             tree arg1, int upper1_p)
3611
{
3612
  tree tem;
3613
  int result;
3614
  int sgn0, sgn1;
3615
 
3616
  /* If neither arg represents infinity, do the normal operation.
3617
     Else, if not a comparison, return infinity.  Else handle the special
3618
     comparison rules. Note that most of the cases below won't occur, but
3619
     are handled for consistency.  */
3620
 
3621
  if (arg0 != 0 && arg1 != 0)
3622
    {
3623
      tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3624
                         arg0, fold_convert (TREE_TYPE (arg0), arg1));
3625
      STRIP_NOPS (tem);
3626
      return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3627
    }
3628
 
3629
  if (TREE_CODE_CLASS (code) != tcc_comparison)
3630
    return 0;
3631
 
3632
  /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3633
     for neither.  In real maths, we cannot assume open ended ranges are
3634
     the same. But, this is computer arithmetic, where numbers are finite.
3635
     We can therefore make the transformation of any unbounded range with
3636
     the value Z, Z being greater than any representable number. This permits
3637
     us to treat unbounded ranges as equal.  */
3638
  sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3639
  sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3640
  switch (code)
3641
    {
3642
    case EQ_EXPR:
3643
      result = sgn0 == sgn1;
3644
      break;
3645
    case NE_EXPR:
3646
      result = sgn0 != sgn1;
3647
      break;
3648
    case LT_EXPR:
3649
      result = sgn0 < sgn1;
3650
      break;
3651
    case LE_EXPR:
3652
      result = sgn0 <= sgn1;
3653
      break;
3654
    case GT_EXPR:
3655
      result = sgn0 > sgn1;
3656
      break;
3657
    case GE_EXPR:
3658
      result = sgn0 >= sgn1;
3659
      break;
3660
    default:
3661
      gcc_unreachable ();
3662
    }
3663
 
3664
  return constant_boolean_node (result, type);
3665
}
3666
 
3667
/* Given EXP, a logical expression, set the range it is testing into
3668
   variables denoted by PIN_P, PLOW, and PHIGH.  Return the expression
3669
   actually being tested.  *PLOW and *PHIGH will be made of the same type
3670
   as the returned expression.  If EXP is not a comparison, we will most
3671
   likely not be returning a useful value and range.  */
3672
 
3673
static tree
3674
make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3675
{
3676
  enum tree_code code;
3677
  tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3678
  tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3679
  int in_p, n_in_p;
3680
  tree low, high, n_low, n_high;
3681
 
3682
  /* Start with simply saying "EXP != 0" and then look at the code of EXP
3683
     and see if we can refine the range.  Some of the cases below may not
3684
     happen, but it doesn't seem worth worrying about this.  We "continue"
3685
     the outer loop when we've changed something; otherwise we "break"
3686
     the switch, which will "break" the while.  */
3687
 
3688
  in_p = 0;
3689
  low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3690
 
3691
  while (1)
3692
    {
3693
      code = TREE_CODE (exp);
3694
      exp_type = TREE_TYPE (exp);
3695
 
3696
      if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3697
        {
3698
          if (TREE_CODE_LENGTH (code) > 0)
3699
            arg0 = TREE_OPERAND (exp, 0);
3700
          if (TREE_CODE_CLASS (code) == tcc_comparison
3701
              || TREE_CODE_CLASS (code) == tcc_unary
3702
              || TREE_CODE_CLASS (code) == tcc_binary)
3703
            arg0_type = TREE_TYPE (arg0);
3704
          if (TREE_CODE_CLASS (code) == tcc_binary
3705
              || TREE_CODE_CLASS (code) == tcc_comparison
3706
              || (TREE_CODE_CLASS (code) == tcc_expression
3707
                  && TREE_CODE_LENGTH (code) > 1))
3708
            arg1 = TREE_OPERAND (exp, 1);
3709
        }
3710
 
3711
      switch (code)
3712
        {
3713
        case TRUTH_NOT_EXPR:
3714
          in_p = ! in_p, exp = arg0;
3715
          continue;
3716
 
3717
        case EQ_EXPR: case NE_EXPR:
3718
        case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3719
          /* We can only do something if the range is testing for zero
3720
             and if the second operand is an integer constant.  Note that
3721
             saying something is "in" the range we make is done by
3722
             complementing IN_P since it will set in the initial case of
3723
             being not equal to zero; "out" is leaving it alone.  */
3724
          if (low == 0 || high == 0
3725
              || ! integer_zerop (low) || ! integer_zerop (high)
3726
              || TREE_CODE (arg1) != INTEGER_CST)
3727
            break;
3728
 
3729
          switch (code)
3730
            {
3731
            case NE_EXPR:  /* - [c, c]  */
3732
              low = high = arg1;
3733
              break;
3734
            case EQ_EXPR:  /* + [c, c]  */
3735
              in_p = ! in_p, low = high = arg1;
3736
              break;
3737
            case GT_EXPR:  /* - [-, c] */
3738
              low = 0, high = arg1;
3739
              break;
3740
            case GE_EXPR:  /* + [c, -] */
3741
              in_p = ! in_p, low = arg1, high = 0;
3742
              break;
3743
            case LT_EXPR:  /* - [c, -] */
3744
              low = arg1, high = 0;
3745
              break;
3746
            case LE_EXPR:  /* + [-, c] */
3747
              in_p = ! in_p, low = 0, high = arg1;
3748
              break;
3749
            default:
3750
              gcc_unreachable ();
3751
            }
3752
 
3753
          /* If this is an unsigned comparison, we also know that EXP is
3754
             greater than or equal to zero.  We base the range tests we make
3755
             on that fact, so we record it here so we can parse existing
3756
             range tests.  We test arg0_type since often the return type
3757
             of, e.g. EQ_EXPR, is boolean.  */
3758
          if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3759
            {
3760
              if (! merge_ranges (&n_in_p, &n_low, &n_high,
3761
                                  in_p, low, high, 1,
3762
                                  fold_convert (arg0_type, integer_zero_node),
3763
                                  NULL_TREE))
3764
                break;
3765
 
3766
              in_p = n_in_p, low = n_low, high = n_high;
3767
 
3768
              /* If the high bound is missing, but we have a nonzero low
3769
                 bound, reverse the range so it goes from zero to the low bound
3770
                 minus 1.  */
3771
              if (high == 0 && low && ! integer_zerop (low))
3772
                {
3773
                  in_p = ! in_p;
3774
                  high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3775
                                      integer_one_node, 0);
3776
                  low = fold_convert (arg0_type, integer_zero_node);
3777
                }
3778
            }
3779
 
3780
          exp = arg0;
3781
          continue;
3782
 
3783
        case NEGATE_EXPR:
3784
          /* (-x) IN [a,b] -> x in [-b, -a]  */
3785
          n_low = range_binop (MINUS_EXPR, exp_type,
3786
                               fold_convert (exp_type, integer_zero_node),
3787
                               0, high, 1);
3788
          n_high = range_binop (MINUS_EXPR, exp_type,
3789
                                fold_convert (exp_type, integer_zero_node),
3790
                                0, low, 0);
3791
          low = n_low, high = n_high;
3792
          exp = arg0;
3793
          continue;
3794
 
3795
        case BIT_NOT_EXPR:
3796
          /* ~ X -> -X - 1  */
3797
          exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3798
                        fold_convert (exp_type, integer_one_node));
3799
          continue;
3800
 
3801
        case PLUS_EXPR:  case MINUS_EXPR:
3802
          if (TREE_CODE (arg1) != INTEGER_CST)
3803
            break;
3804
 
3805
          /* If EXP is signed, any overflow in the computation is undefined,
3806
             so we don't worry about it so long as our computations on
3807
             the bounds don't overflow.  For unsigned, overflow is defined
3808
             and this is exactly the right thing.  */
3809
          n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3810
                               arg0_type, low, 0, arg1, 0);
3811
          n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3812
                                arg0_type, high, 1, arg1, 0);
3813
          if ((n_low != 0 && TREE_OVERFLOW (n_low))
3814
              || (n_high != 0 && TREE_OVERFLOW (n_high)))
3815
            break;
3816
 
3817
          /* Check for an unsigned range which has wrapped around the maximum
3818
             value thus making n_high < n_low, and normalize it.  */
3819
          if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3820
            {
3821
              low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3822
                                 integer_one_node, 0);
3823
              high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3824
                                  integer_one_node, 0);
3825
 
3826
              /* If the range is of the form +/- [ x+1, x ], we won't
3827
                 be able to normalize it.  But then, it represents the
3828
                 whole range or the empty set, so make it
3829
                 +/- [ -, - ].  */
3830
              if (tree_int_cst_equal (n_low, low)
3831
                  && tree_int_cst_equal (n_high, high))
3832
                low = high = 0;
3833
              else
3834
                in_p = ! in_p;
3835
            }
3836
          else
3837
            low = n_low, high = n_high;
3838
 
3839
          exp = arg0;
3840
          continue;
3841
 
3842
        case NOP_EXPR:  case NON_LVALUE_EXPR:  case CONVERT_EXPR:
3843
          if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3844
            break;
3845
 
3846
          if (! INTEGRAL_TYPE_P (arg0_type)
3847
              || (low != 0 && ! int_fits_type_p (low, arg0_type))
3848
              || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3849
            break;
3850
 
3851
          n_low = low, n_high = high;
3852
 
3853
          if (n_low != 0)
3854
            n_low = fold_convert (arg0_type, n_low);
3855
 
3856
          if (n_high != 0)
3857
            n_high = fold_convert (arg0_type, n_high);
3858
 
3859
 
3860
          /* If we're converting arg0 from an unsigned type, to exp,
3861
             a signed type,  we will be doing the comparison as unsigned.
3862
             The tests above have already verified that LOW and HIGH
3863
             are both positive.
3864
 
3865
             So we have to ensure that we will handle large unsigned
3866
             values the same way that the current signed bounds treat
3867
             negative values.  */
3868
 
3869
          if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3870
            {
3871
              tree high_positive;
3872
              tree equiv_type = lang_hooks.types.type_for_mode
3873
                (TYPE_MODE (arg0_type), 1);
3874
 
3875
              /* A range without an upper bound is, naturally, unbounded.
3876
                 Since convert would have cropped a very large value, use
3877
                 the max value for the destination type.  */
3878
              high_positive
3879
                = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3880
                : TYPE_MAX_VALUE (arg0_type);
3881
 
3882
              if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3883
                high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3884
                                             fold_convert (arg0_type,
3885
                                                           high_positive),
3886
                                             fold_convert (arg0_type,
3887
                                                           integer_one_node));
3888
 
3889
              /* If the low bound is specified, "and" the range with the
3890
                 range for which the original unsigned value will be
3891
                 positive.  */
3892
              if (low != 0)
3893
                {
3894
                  if (! merge_ranges (&n_in_p, &n_low, &n_high,
3895
                                      1, n_low, n_high, 1,
3896
                                      fold_convert (arg0_type,
3897
                                                    integer_zero_node),
3898
                                      high_positive))
3899
                    break;
3900
 
3901
                  in_p = (n_in_p == in_p);
3902
                }
3903
              else
3904
                {
3905
                  /* Otherwise, "or" the range with the range of the input
3906
                     that will be interpreted as negative.  */
3907
                  if (! merge_ranges (&n_in_p, &n_low, &n_high,
3908
                                      0, n_low, n_high, 1,
3909
                                      fold_convert (arg0_type,
3910
                                                    integer_zero_node),
3911
                                      high_positive))
3912
                    break;
3913
 
3914
                  in_p = (in_p != n_in_p);
3915
                }
3916
            }
3917
 
3918
          exp = arg0;
3919
          low = n_low, high = n_high;
3920
          continue;
3921
 
3922
        default:
3923
          break;
3924
        }
3925
 
3926
      break;
3927
    }
3928
 
3929
  /* If EXP is a constant, we can evaluate whether this is true or false.  */
3930
  if (TREE_CODE (exp) == INTEGER_CST)
3931
    {
3932
      in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3933
                                                 exp, 0, low, 0))
3934
                      && integer_onep (range_binop (LE_EXPR, integer_type_node,
3935
                                                    exp, 1, high, 1)));
3936
      low = high = 0;
3937
      exp = 0;
3938
    }
3939
 
3940
  *pin_p = in_p, *plow = low, *phigh = high;
3941
  return exp;
3942
}
3943
 
3944
/* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3945
   type, TYPE, return an expression to test if EXP is in (or out of, depending
3946
   on IN_P) the range.  Return 0 if the test couldn't be created.  */
3947
 
3948
static tree
3949
build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3950
{
3951
  tree etype = TREE_TYPE (exp);
3952
  tree value;
3953
 
3954
#ifdef HAVE_canonicalize_funcptr_for_compare
3955
  /* Disable this optimization for function pointer expressions
3956
     on targets that require function pointer canonicalization.  */
3957
  if (HAVE_canonicalize_funcptr_for_compare
3958
      && TREE_CODE (etype) == POINTER_TYPE
3959
      && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
3960
    return NULL_TREE;
3961
#endif
3962
 
3963
  if (! in_p)
3964
    {
3965
      value = build_range_check (type, exp, 1, low, high);
3966
      if (value != 0)
3967
        return invert_truthvalue (value);
3968
 
3969
      return 0;
3970
    }
3971
 
3972
  if (low == 0 && high == 0)
3973
    return fold_convert (type, integer_one_node);
3974
 
3975
  if (low == 0)
3976
    return fold_build2 (LE_EXPR, type, exp,
3977
                        fold_convert (etype, high));
3978
 
3979
  if (high == 0)
3980
    return fold_build2 (GE_EXPR, type, exp,
3981
                        fold_convert (etype, low));
3982
 
3983
  if (operand_equal_p (low, high, 0))
3984
    return fold_build2 (EQ_EXPR, type, exp,
3985
                        fold_convert (etype, low));
3986
 
3987
  if (integer_zerop (low))
3988
    {
3989
      if (! TYPE_UNSIGNED (etype))
3990
        {
3991
          etype = lang_hooks.types.unsigned_type (etype);
3992
          high = fold_convert (etype, high);
3993
          exp = fold_convert (etype, exp);
3994
        }
3995
      return build_range_check (type, exp, 1, 0, high);
3996
    }
3997
 
3998
  /* Optimize (c>=1) && (c<=127) into (signed char)c > 0.  */
3999
  if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4000
    {
4001
      unsigned HOST_WIDE_INT lo;
4002
      HOST_WIDE_INT hi;
4003
      int prec;
4004
 
4005
      prec = TYPE_PRECISION (etype);
4006
      if (prec <= HOST_BITS_PER_WIDE_INT)
4007
        {
4008
          hi = 0;
4009
          lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4010
        }
4011
      else
4012
        {
4013
          hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4014
          lo = (unsigned HOST_WIDE_INT) -1;
4015
        }
4016
 
4017
      if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4018
        {
4019
          if (TYPE_UNSIGNED (etype))
4020
            {
4021
              etype = lang_hooks.types.signed_type (etype);
4022
              exp = fold_convert (etype, exp);
4023
            }
4024
          return fold_build2 (GT_EXPR, type, exp,
4025
                              fold_convert (etype, integer_zero_node));
4026
        }
4027
    }
4028
 
4029
  value = const_binop (MINUS_EXPR, high, low, 0);
4030
  if (value != 0 && (!flag_wrapv || TREE_OVERFLOW (value))
4031
      && ! TYPE_UNSIGNED (etype))
4032
    {
4033
      tree utype, minv, maxv;
4034
 
4035
      /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4036
         for the type in question, as we rely on this here.  */
4037
      switch (TREE_CODE (etype))
4038
        {
4039
        case INTEGER_TYPE:
4040
        case ENUMERAL_TYPE:
4041
        case CHAR_TYPE:
4042
          /* There is no requirement that LOW be within the range of ETYPE
4043
             if the latter is a subtype.  It must, however, be within the base
4044
             type of ETYPE.  So be sure we do the subtraction in that type.  */
4045
          if (TREE_TYPE (etype))
4046
            etype = TREE_TYPE (etype);
4047
          utype = lang_hooks.types.unsigned_type (etype);
4048
          maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4049
          maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4050
                              integer_one_node, 1);
4051
          minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4052
          if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4053
                                          minv, 1, maxv, 1)))
4054
            {
4055
              etype = utype;
4056
              high = fold_convert (etype, high);
4057
              low = fold_convert (etype, low);
4058
              exp = fold_convert (etype, exp);
4059
              value = const_binop (MINUS_EXPR, high, low, 0);
4060
            }
4061
          break;
4062
        default:
4063
          break;
4064
        }
4065
    }
4066
 
4067
  if (value != 0 && ! TREE_OVERFLOW (value))
4068
    {
4069
      /* There is no requirement that LOW be within the range of ETYPE
4070
         if the latter is a subtype.  It must, however, be within the base
4071
         type of ETYPE.  So be sure we do the subtraction in that type.  */
4072
      if (INTEGRAL_TYPE_P (etype) && TREE_TYPE (etype))
4073
        {
4074
          etype = TREE_TYPE (etype);
4075
          exp = fold_convert (etype, exp);
4076
          low = fold_convert (etype, low);
4077
          value = fold_convert (etype, value);
4078
        }
4079
 
4080
      return build_range_check (type,
4081
                                fold_build2 (MINUS_EXPR, etype, exp, low),
4082
                                1, build_int_cst (etype, 0), value);
4083
    }
4084
 
4085
  return 0;
4086
}
4087
 
4088
/* Given two ranges, see if we can merge them into one.  Return 1 if we
4089
   can, 0 if we can't.  Set the output range into the specified parameters.  */
4090
 
4091
static int
4092
merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4093
              tree high0, int in1_p, tree low1, tree high1)
4094
{
4095
  int no_overlap;
4096
  int subset;
4097
  int temp;
4098
  tree tem;
4099
  int in_p;
4100
  tree low, high;
4101
  int lowequal = ((low0 == 0 && low1 == 0)
4102
                  || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4103
                                                low0, 0, low1, 0)));
4104
  int highequal = ((high0 == 0 && high1 == 0)
4105
                   || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4106
                                                 high0, 1, high1, 1)));
4107
 
4108
  /* Make range 0 be the range that starts first, or ends last if they
4109
     start at the same value.  Swap them if it isn't.  */
4110
  if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4111
                                 low0, 0, low1, 0))
4112
      || (lowequal
4113
          && integer_onep (range_binop (GT_EXPR, integer_type_node,
4114
                                        high1, 1, high0, 1))))
4115
    {
4116
      temp = in0_p, in0_p = in1_p, in1_p = temp;
4117
      tem = low0, low0 = low1, low1 = tem;
4118
      tem = high0, high0 = high1, high1 = tem;
4119
    }
4120
 
4121
  /* Now flag two cases, whether the ranges are disjoint or whether the
4122
     second range is totally subsumed in the first.  Note that the tests
4123
     below are simplified by the ones above.  */
4124
  no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4125
                                          high0, 1, low1, 0));
4126
  subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4127
                                      high1, 1, high0, 1));
4128
 
4129
  /* We now have four cases, depending on whether we are including or
4130
     excluding the two ranges.  */
4131
  if (in0_p && in1_p)
4132
    {
4133
      /* If they don't overlap, the result is false.  If the second range
4134
         is a subset it is the result.  Otherwise, the range is from the start
4135
         of the second to the end of the first.  */
4136
      if (no_overlap)
4137
        in_p = 0, low = high = 0;
4138
      else if (subset)
4139
        in_p = 1, low = low1, high = high1;
4140
      else
4141
        in_p = 1, low = low1, high = high0;
4142
    }
4143
 
4144
  else if (in0_p && ! in1_p)
4145
    {
4146
      /* If they don't overlap, the result is the first range.  If they are
4147
         equal, the result is false.  If the second range is a subset of the
4148
         first, and the ranges begin at the same place, we go from just after
4149
         the end of the first range to the end of the second.  If the second
4150
         range is not a subset of the first, or if it is a subset and both
4151
         ranges end at the same place, the range starts at the start of the
4152
         first range and ends just before the second range.
4153
         Otherwise, we can't describe this as a single range.  */
4154
      if (no_overlap)
4155
        in_p = 1, low = low0, high = high0;
4156
      else if (lowequal && highequal)
4157
        in_p = 0, low = high = 0;
4158
      else if (subset && lowequal)
4159
        {
4160
          in_p = 1, high = high0;
4161
          low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4162
                             integer_one_node, 0);
4163
        }
4164
      else if (! subset || highequal)
4165
        {
4166
          in_p = 1, low = low0;
4167
          high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4168
                              integer_one_node, 0);
4169
        }
4170
      else
4171
        return 0;
4172
    }
4173
 
4174
  else if (! in0_p && in1_p)
4175
    {
4176
      /* If they don't overlap, the result is the second range.  If the second
4177
         is a subset of the first, the result is false.  Otherwise,
4178
         the range starts just after the first range and ends at the
4179
         end of the second.  */
4180
      if (no_overlap)
4181
        in_p = 1, low = low1, high = high1;
4182
      else if (subset || highequal)
4183
        in_p = 0, low = high = 0;
4184
      else
4185
        {
4186
          in_p = 1, high = high1;
4187
          low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4188
                             integer_one_node, 0);
4189
        }
4190
    }
4191
 
4192
  else
4193
    {
4194
      /* The case where we are excluding both ranges.  Here the complex case
4195
         is if they don't overlap.  In that case, the only time we have a
4196
         range is if they are adjacent.  If the second is a subset of the
4197
         first, the result is the first.  Otherwise, the range to exclude
4198
         starts at the beginning of the first range and ends at the end of the
4199
         second.  */
4200
      if (no_overlap)
4201
        {
4202
          if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4203
                                         range_binop (PLUS_EXPR, NULL_TREE,
4204
                                                      high0, 1,
4205
                                                      integer_one_node, 1),
4206
                                         1, low1, 0)))
4207
            in_p = 0, low = low0, high = high1;
4208
          else
4209
            {
4210
              /* Canonicalize - [min, x] into - [-, x].  */
4211
              if (low0 && TREE_CODE (low0) == INTEGER_CST)
4212
                switch (TREE_CODE (TREE_TYPE (low0)))
4213
                  {
4214
                  case ENUMERAL_TYPE:
4215
                    if (TYPE_PRECISION (TREE_TYPE (low0))
4216
                        != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4217
                      break;
4218
                    /* FALLTHROUGH */
4219
                  case INTEGER_TYPE:
4220
                  case CHAR_TYPE:
4221
                    if (tree_int_cst_equal (low0,
4222
                                            TYPE_MIN_VALUE (TREE_TYPE (low0))))
4223
                      low0 = 0;
4224
                    break;
4225
                  case POINTER_TYPE:
4226
                    if (TYPE_UNSIGNED (TREE_TYPE (low0))
4227
                        && integer_zerop (low0))
4228
                      low0 = 0;
4229
                    break;
4230
                  default:
4231
                    break;
4232
                  }
4233
 
4234
              /* Canonicalize - [x, max] into - [x, -].  */
4235
              if (high1 && TREE_CODE (high1) == INTEGER_CST)
4236
                switch (TREE_CODE (TREE_TYPE (high1)))
4237
                  {
4238
                  case ENUMERAL_TYPE:
4239
                    if (TYPE_PRECISION (TREE_TYPE (high1))
4240
                        != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4241
                      break;
4242
                    /* FALLTHROUGH */
4243
                  case INTEGER_TYPE:
4244
                  case CHAR_TYPE:
4245
                    if (tree_int_cst_equal (high1,
4246
                                            TYPE_MAX_VALUE (TREE_TYPE (high1))))
4247
                      high1 = 0;
4248
                    break;
4249
                  case POINTER_TYPE:
4250
                    if (TYPE_UNSIGNED (TREE_TYPE (high1))
4251
                        && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4252
                                                       high1, 1,
4253
                                                       integer_one_node, 1)))
4254
                      high1 = 0;
4255
                    break;
4256
                  default:
4257
                    break;
4258
                  }
4259
 
4260
              /* The ranges might be also adjacent between the maximum and
4261
                 minimum values of the given type.  For
4262
                 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4263
                 return + [x + 1, y - 1].  */
4264
              if (low0 == 0 && high1 == 0)
4265
                {
4266
                  low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4267
                                     integer_one_node, 1);
4268
                  high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4269
                                      integer_one_node, 0);
4270
                  if (low == 0 || high == 0)
4271
                    return 0;
4272
 
4273
                  in_p = 1;
4274
                }
4275
              else
4276
                return 0;
4277
            }
4278
        }
4279
      else if (subset)
4280
        in_p = 0, low = low0, high = high0;
4281
      else
4282
        in_p = 0, low = low0, high = high1;
4283
    }
4284
 
4285
  *pin_p = in_p, *plow = low, *phigh = high;
4286
  return 1;
4287
}
4288
 
4289
 
4290
/* Subroutine of fold, looking inside expressions of the form
4291
   A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4292
   of the COND_EXPR.  This function is being used also to optimize
4293
   A op B ? C : A, by reversing the comparison first.
4294
 
4295
   Return a folded expression whose code is not a COND_EXPR
4296
   anymore, or NULL_TREE if no folding opportunity is found.  */
4297
 
4298
static tree
4299
fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4300
{
4301
  enum tree_code comp_code = TREE_CODE (arg0);
4302
  tree arg00 = TREE_OPERAND (arg0, 0);
4303
  tree arg01 = TREE_OPERAND (arg0, 1);
4304
  tree arg1_type = TREE_TYPE (arg1);
4305
  tree tem;
4306
 
4307
  STRIP_NOPS (arg1);
4308
  STRIP_NOPS (arg2);
4309
 
4310
  /* If we have A op 0 ? A : -A, consider applying the following
4311
     transformations:
4312
 
4313
     A == 0? A : -A    same as -A
4314
     A != 0? A : -A    same as A
4315
     A >= 0? A : -A    same as abs (A)
4316
     A > 0?  A : -A    same as abs (A)
4317
     A <= 0? A : -A    same as -abs (A)
4318
     A < 0?  A : -A    same as -abs (A)
4319
 
4320
     None of these transformations work for modes with signed
4321
     zeros.  If A is +/-0, the first two transformations will
4322
     change the sign of the result (from +0 to -0, or vice
4323
     versa).  The last four will fix the sign of the result,
4324
     even though the original expressions could be positive or
4325
     negative, depending on the sign of A.
4326
 
4327
     Note that all these transformations are correct if A is
4328
     NaN, since the two alternatives (A and -A) are also NaNs.  */
4329
  if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4330
       ? real_zerop (arg01)
4331
       : integer_zerop (arg01))
4332
      && ((TREE_CODE (arg2) == NEGATE_EXPR
4333
           && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4334
             /* In the case that A is of the form X-Y, '-A' (arg2) may
4335
                have already been folded to Y-X, check for that. */
4336
          || (TREE_CODE (arg1) == MINUS_EXPR
4337
              && TREE_CODE (arg2) == MINUS_EXPR
4338
              && operand_equal_p (TREE_OPERAND (arg1, 0),
4339
                                  TREE_OPERAND (arg2, 1), 0)
4340
              && operand_equal_p (TREE_OPERAND (arg1, 1),
4341
                                  TREE_OPERAND (arg2, 0), 0))))
4342
    switch (comp_code)
4343
      {
4344
      case EQ_EXPR:
4345
      case UNEQ_EXPR:
4346
        tem = fold_convert (arg1_type, arg1);
4347
        return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4348
      case NE_EXPR:
4349
      case LTGT_EXPR:
4350
        return pedantic_non_lvalue (fold_convert (type, arg1));
4351
      case UNGE_EXPR:
4352
      case UNGT_EXPR:
4353
        if (flag_trapping_math)
4354
          break;
4355
        /* Fall through.  */
4356
      case GE_EXPR:
4357
      case GT_EXPR:
4358
        if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4359
          arg1 = fold_convert (lang_hooks.types.signed_type
4360
                               (TREE_TYPE (arg1)), arg1);
4361
        tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4362
        return pedantic_non_lvalue (fold_convert (type, tem));
4363
      case UNLE_EXPR:
4364
      case UNLT_EXPR:
4365
        if (flag_trapping_math)
4366
          break;
4367
      case LE_EXPR:
4368
      case LT_EXPR:
4369
        if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4370
          arg1 = fold_convert (lang_hooks.types.signed_type
4371
                               (TREE_TYPE (arg1)), arg1);
4372
        tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4373
        return negate_expr (fold_convert (type, tem));
4374
      default:
4375
        gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4376
        break;
4377
      }
4378
 
4379
  /* A != 0 ? A : 0 is simply A, unless A is -0.  Likewise
4380
     A == 0 ? A : 0 is always 0 unless A is -0.  Note that
4381
     both transformations are correct when A is NaN: A != 0
4382
     is then true, and A == 0 is false.  */
4383
 
4384
  if (integer_zerop (arg01) && integer_zerop (arg2))
4385
    {
4386
      if (comp_code == NE_EXPR)
4387
        return pedantic_non_lvalue (fold_convert (type, arg1));
4388
      else if (comp_code == EQ_EXPR)
4389
        return fold_convert (type, integer_zero_node);
4390
    }
4391
 
4392
  /* Try some transformations of A op B ? A : B.
4393
 
4394
     A == B? A : B    same as B
4395
     A != B? A : B    same as A
4396
     A >= B? A : B    same as max (A, B)
4397
     A > B?  A : B    same as max (B, A)
4398
     A <= B? A : B    same as min (A, B)
4399
     A < B?  A : B    same as min (B, A)
4400
 
4401
     As above, these transformations don't work in the presence
4402
     of signed zeros.  For example, if A and B are zeros of
4403
     opposite sign, the first two transformations will change
4404
     the sign of the result.  In the last four, the original
4405
     expressions give different results for (A=+0, B=-0) and
4406
     (A=-0, B=+0), but the transformed expressions do not.
4407
 
4408
     The first two transformations are correct if either A or B
4409
     is a NaN.  In the first transformation, the condition will
4410
     be false, and B will indeed be chosen.  In the case of the
4411
     second transformation, the condition A != B will be true,
4412
     and A will be chosen.
4413
 
4414
     The conversions to max() and min() are not correct if B is
4415
     a number and A is not.  The conditions in the original
4416
     expressions will be false, so all four give B.  The min()
4417
     and max() versions would give a NaN instead.  */
4418
  if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4419
      /* Avoid these transformations if the COND_EXPR may be used
4420
         as an lvalue in the C++ front-end.  PR c++/19199.  */
4421
      && (in_gimple_form
4422
          || strcmp (lang_hooks.name, "GNU C++") != 0
4423
          || ! maybe_lvalue_p (arg1)
4424
          || ! maybe_lvalue_p (arg2)))
4425
    {
4426
      tree comp_op0 = arg00;
4427
      tree comp_op1 = arg01;
4428
      tree comp_type = TREE_TYPE (comp_op0);
4429
 
4430
      /* Avoid adding NOP_EXPRs in case this is an lvalue.  */
4431
      if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4432
        {
4433
          comp_type = type;
4434
          comp_op0 = arg1;
4435
          comp_op1 = arg2;
4436
        }
4437
 
4438
      switch (comp_code)
4439
        {
4440
        case EQ_EXPR:
4441
          return pedantic_non_lvalue (fold_convert (type, arg2));
4442
        case NE_EXPR:
4443
          return pedantic_non_lvalue (fold_convert (type, arg1));
4444
        case LE_EXPR:
4445
        case LT_EXPR:
4446
        case UNLE_EXPR:
4447
        case UNLT_EXPR:
4448
          /* In C++ a ?: expression can be an lvalue, so put the
4449
             operand which will be used if they are equal first
4450
             so that we can convert this back to the
4451
             corresponding COND_EXPR.  */
4452
          if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4453
            {
4454
              comp_op0 = fold_convert (comp_type, comp_op0);
4455
              comp_op1 = fold_convert (comp_type, comp_op1);
4456
              tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4457
                    ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4458
                    : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4459
              return pedantic_non_lvalue (fold_convert (type, tem));
4460
            }
4461
          break;
4462
        case GE_EXPR:
4463
        case GT_EXPR:
4464
        case UNGE_EXPR:
4465
        case UNGT_EXPR:
4466
          if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4467
            {
4468
              comp_op0 = fold_convert (comp_type, comp_op0);
4469
              comp_op1 = fold_convert (comp_type, comp_op1);
4470
              tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4471
                    ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4472
                    : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4473
              return pedantic_non_lvalue (fold_convert (type, tem));
4474
            }
4475
          break;
4476
        case UNEQ_EXPR:
4477
          if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4478
            return pedantic_non_lvalue (fold_convert (type, arg2));
4479
          break;
4480
        case LTGT_EXPR:
4481
          if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4482
            return pedantic_non_lvalue (fold_convert (type, arg1));
4483
          break;
4484
        default:
4485
          gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4486
          break;
4487
        }
4488
    }
4489
 
4490
  /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4491
     we might still be able to simplify this.  For example,
4492
     if C1 is one less or one more than C2, this might have started
4493
     out as a MIN or MAX and been transformed by this function.
4494
     Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE.  */
4495
 
4496
  if (INTEGRAL_TYPE_P (type)
4497
      && TREE_CODE (arg01) == INTEGER_CST
4498
      && TREE_CODE (arg2) == INTEGER_CST)
4499
    switch (comp_code)
4500
      {
4501
      case EQ_EXPR:
4502
        /* We can replace A with C1 in this case.  */
4503
        arg1 = fold_convert (type, arg01);
4504
        return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4505
 
4506
      case LT_EXPR:
4507
        /* If C1 is C2 + 1, this is min(A, C2).  */
4508
        if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4509
                               OEP_ONLY_CONST)
4510
            && operand_equal_p (arg01,
4511
                                const_binop (PLUS_EXPR, arg2,
4512
                                             integer_one_node, 0),
4513
                                OEP_ONLY_CONST))
4514
          return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4515
                                                   type, arg1, arg2));
4516
        break;
4517
 
4518
      case LE_EXPR:
4519
        /* If C1 is C2 - 1, this is min(A, C2).  */
4520
        if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4521
                               OEP_ONLY_CONST)
4522
            && operand_equal_p (arg01,
4523
                                const_binop (MINUS_EXPR, arg2,
4524
                                             integer_one_node, 0),
4525
                                OEP_ONLY_CONST))
4526
          return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4527
                                                   type, arg1, arg2));
4528
        break;
4529
 
4530
      case GT_EXPR:
4531
        /* If C1 is C2 - 1, this is max(A, C2).  */
4532
        if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4533
                               OEP_ONLY_CONST)
4534
            && operand_equal_p (arg01,
4535
                                const_binop (MINUS_EXPR, arg2,
4536
                                             integer_one_node, 0),
4537
                                OEP_ONLY_CONST))
4538
          return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4539
                                                   type, arg1, arg2));
4540
        break;
4541
 
4542
      case GE_EXPR:
4543
        /* If C1 is C2 + 1, this is max(A, C2).  */
4544
        if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4545
                               OEP_ONLY_CONST)
4546
            && operand_equal_p (arg01,
4547
                                const_binop (PLUS_EXPR, arg2,
4548
                                             integer_one_node, 0),
4549
                                OEP_ONLY_CONST))
4550
          return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4551
                                                   type, arg1, arg2));
4552
        break;
4553
      case NE_EXPR:
4554
        break;
4555
      default:
4556
        gcc_unreachable ();
4557
      }
4558
 
4559
  return NULL_TREE;
4560
}
4561
 
4562
 
4563
 
4564
#ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4565
#define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4566
#endif
4567
 
4568
/* EXP is some logical combination of boolean tests.  See if we can
4569
   merge it into some range test.  Return the new tree if so.  */
4570
 
4571
static tree
4572
fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4573
{
4574
  int or_op = (code == TRUTH_ORIF_EXPR
4575
               || code == TRUTH_OR_EXPR);
4576
  int in0_p, in1_p, in_p;
4577
  tree low0, low1, low, high0, high1, high;
4578
  tree lhs = make_range (op0, &in0_p, &low0, &high0);
4579
  tree rhs = make_range (op1, &in1_p, &low1, &high1);
4580
  tree tem;
4581
 
4582
  /* If this is an OR operation, invert both sides; we will invert
4583
     again at the end.  */
4584
  if (or_op)
4585
    in0_p = ! in0_p, in1_p = ! in1_p;
4586
 
4587
  /* If both expressions are the same, if we can merge the ranges, and we
4588
     can build the range test, return it or it inverted.  If one of the
4589
     ranges is always true or always false, consider it to be the same
4590
     expression as the other.  */
4591
  if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4592
      && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4593
                       in1_p, low1, high1)
4594
      && 0 != (tem = (build_range_check (type,
4595
                                         lhs != 0 ? lhs
4596
                                         : rhs != 0 ? rhs : integer_zero_node,
4597
                                         in_p, low, high))))
4598
    return or_op ? invert_truthvalue (tem) : tem;
4599
 
4600
  /* On machines where the branch cost is expensive, if this is a
4601
     short-circuited branch and the underlying object on both sides
4602
     is the same, make a non-short-circuit operation.  */
4603
  else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4604
           && lhs != 0 && rhs != 0
4605
           && (code == TRUTH_ANDIF_EXPR
4606
               || code == TRUTH_ORIF_EXPR)
4607
           && operand_equal_p (lhs, rhs, 0))
4608
    {
4609
      /* If simple enough, just rewrite.  Otherwise, make a SAVE_EXPR
4610
         unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4611
         which cases we can't do this.  */
4612
      if (simple_operand_p (lhs))
4613
        return build2 (code == TRUTH_ANDIF_EXPR
4614
                       ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4615
                       type, op0, op1);
4616
 
4617
      else if (lang_hooks.decls.global_bindings_p () == 0
4618
               && ! CONTAINS_PLACEHOLDER_P (lhs))
4619
        {
4620
          tree common = save_expr (lhs);
4621
 
4622
          if (0 != (lhs = build_range_check (type, common,
4623
                                             or_op ? ! in0_p : in0_p,
4624
                                             low0, high0))
4625
              && (0 != (rhs = build_range_check (type, common,
4626
                                                 or_op ? ! in1_p : in1_p,
4627
                                                 low1, high1))))
4628
            return build2 (code == TRUTH_ANDIF_EXPR
4629
                           ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4630
                           type, lhs, rhs);
4631
        }
4632
    }
4633
 
4634
  return 0;
4635
}
4636
 
4637
/* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4638
   bit value.  Arrange things so the extra bits will be set to zero if and
4639
   only if C is signed-extended to its full width.  If MASK is nonzero,
4640
   it is an INTEGER_CST that should be AND'ed with the extra bits.  */
4641
 
4642
static tree
4643
unextend (tree c, int p, int unsignedp, tree mask)
4644
{
4645
  tree type = TREE_TYPE (c);
4646
  int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4647
  tree temp;
4648
 
4649
  if (p == modesize || unsignedp)
4650
    return c;
4651
 
4652
  /* We work by getting just the sign bit into the low-order bit, then
4653
     into the high-order bit, then sign-extend.  We then XOR that value
4654
     with C.  */
4655
  temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4656
  temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4657
 
4658
  /* We must use a signed type in order to get an arithmetic right shift.
4659
     However, we must also avoid introducing accidental overflows, so that
4660
     a subsequent call to integer_zerop will work.  Hence we must
4661
     do the type conversion here.  At this point, the constant is either
4662
     zero or one, and the conversion to a signed type can never overflow.
4663
     We could get an overflow if this conversion is done anywhere else.  */
4664
  if (TYPE_UNSIGNED (type))
4665
    temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4666
 
4667
  temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4668
  temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4669
  if (mask != 0)
4670
    temp = const_binop (BIT_AND_EXPR, temp,
4671
                        fold_convert (TREE_TYPE (c), mask), 0);
4672
  /* If necessary, convert the type back to match the type of C.  */
4673
  if (TYPE_UNSIGNED (type))
4674
    temp = fold_convert (type, temp);
4675
 
4676
  return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4677
}
4678
 
4679
/* Find ways of folding logical expressions of LHS and RHS:
4680
   Try to merge two comparisons to the same innermost item.
4681
   Look for range tests like "ch >= '0' && ch <= '9'".
4682
   Look for combinations of simple terms on machines with expensive branches
4683
   and evaluate the RHS unconditionally.
4684
 
4685
   For example, if we have p->a == 2 && p->b == 4 and we can make an
4686
   object large enough to span both A and B, we can do this with a comparison
4687
   against the object ANDed with the a mask.
4688
 
4689
   If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4690
   operations to do this with one comparison.
4691
 
4692
   We check for both normal comparisons and the BIT_AND_EXPRs made this by
4693
   function and the one above.
4694
 
4695
   CODE is the logical operation being done.  It can be TRUTH_ANDIF_EXPR,
4696
   TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4697
 
4698
   TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4699
   two operands.
4700
 
4701
   We return the simplified tree or 0 if no optimization is possible.  */
4702
 
4703
static tree
4704
fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4705
{
4706
  /* If this is the "or" of two comparisons, we can do something if
4707
     the comparisons are NE_EXPR.  If this is the "and", we can do something
4708
     if the comparisons are EQ_EXPR.  I.e.,
4709
        (a->b == 2 && a->c == 4) can become (a->new == NEW).
4710
 
4711
     WANTED_CODE is this operation code.  For single bit fields, we can
4712
     convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4713
     comparison for one-bit fields.  */
4714
 
4715
  enum tree_code wanted_code;
4716
  enum tree_code lcode, rcode;
4717
  tree ll_arg, lr_arg, rl_arg, rr_arg;
4718
  tree ll_inner, lr_inner, rl_inner, rr_inner;
4719
  HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4720
  HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4721
  HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4722
  HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4723
  int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4724
  enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4725
  enum machine_mode lnmode, rnmode;
4726
  tree ll_mask, lr_mask, rl_mask, rr_mask;
4727
  tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4728
  tree l_const, r_const;
4729
  tree lntype, rntype, result;
4730
  int first_bit, end_bit;
4731
  int volatilep;
4732
 
4733
  /* Start by getting the comparison codes.  Fail if anything is volatile.
4734
     If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4735
     it were surrounded with a NE_EXPR.  */
4736
 
4737
  if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4738
    return 0;
4739
 
4740
  lcode = TREE_CODE (lhs);
4741
  rcode = TREE_CODE (rhs);
4742
 
4743
  if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4744
    {
4745
      lhs = build2 (NE_EXPR, truth_type, lhs,
4746
                    fold_convert (TREE_TYPE (lhs), integer_zero_node));
4747
      lcode = NE_EXPR;
4748
    }
4749
 
4750
  if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4751
    {
4752
      rhs = build2 (NE_EXPR, truth_type, rhs,
4753
                    fold_convert (TREE_TYPE (rhs), integer_zero_node));
4754
      rcode = NE_EXPR;
4755
    }
4756
 
4757
  if (TREE_CODE_CLASS (lcode) != tcc_comparison
4758
      || TREE_CODE_CLASS (rcode) != tcc_comparison)
4759
    return 0;
4760
 
4761
  ll_arg = TREE_OPERAND (lhs, 0);
4762
  lr_arg = TREE_OPERAND (lhs, 1);
4763
  rl_arg = TREE_OPERAND (rhs, 0);
4764
  rr_arg = TREE_OPERAND (rhs, 1);
4765
 
4766
  /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations.  */
4767
  if (simple_operand_p (ll_arg)
4768
      && simple_operand_p (lr_arg))
4769
    {
4770
      tree result;
4771
      if (operand_equal_p (ll_arg, rl_arg, 0)
4772
          && operand_equal_p (lr_arg, rr_arg, 0))
4773
        {
4774
          result = combine_comparisons (code, lcode, rcode,
4775
                                        truth_type, ll_arg, lr_arg);
4776
          if (result)
4777
            return result;
4778
        }
4779
      else if (operand_equal_p (ll_arg, rr_arg, 0)
4780
               && operand_equal_p (lr_arg, rl_arg, 0))
4781
        {
4782
          result = combine_comparisons (code, lcode,
4783
                                        swap_tree_comparison (rcode),
4784
                                        truth_type, ll_arg, lr_arg);
4785
          if (result)
4786
            return result;
4787
        }
4788
    }
4789
 
4790
  code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4791
          ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4792
 
4793
  /* If the RHS can be evaluated unconditionally and its operands are
4794
     simple, it wins to evaluate the RHS unconditionally on machines
4795
     with expensive branches.  In this case, this isn't a comparison
4796
     that can be merged.  Avoid doing this if the RHS is a floating-point
4797
     comparison since those can trap.  */
4798
 
4799
  if (BRANCH_COST >= 2
4800
      && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4801
      && simple_operand_p (rl_arg)
4802
      && simple_operand_p (rr_arg))
4803
    {
4804
      /* Convert (a != 0) || (b != 0) into (a | b) != 0.  */
4805
      if (code == TRUTH_OR_EXPR
4806
          && lcode == NE_EXPR && integer_zerop (lr_arg)
4807
          && rcode == NE_EXPR && integer_zerop (rr_arg)
4808
          && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4809
        return build2 (NE_EXPR, truth_type,
4810
                       build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4811
                               ll_arg, rl_arg),
4812
                       fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4813
 
4814
      /* Convert (a == 0) && (b == 0) into (a | b) == 0.  */
4815
      if (code == TRUTH_AND_EXPR
4816
          && lcode == EQ_EXPR && integer_zerop (lr_arg)
4817
          && rcode == EQ_EXPR && integer_zerop (rr_arg)
4818
          && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4819
        return build2 (EQ_EXPR, truth_type,
4820
                       build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4821
                               ll_arg, rl_arg),
4822
                       fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4823
 
4824
      if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4825
        return build2 (code, truth_type, lhs, rhs);
4826
    }
4827
 
4828
  /* See if the comparisons can be merged.  Then get all the parameters for
4829
     each side.  */
4830
 
4831
  if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4832
      || (rcode != EQ_EXPR && rcode != NE_EXPR))
4833
    return 0;
4834
 
4835
  volatilep = 0;
4836
  ll_inner = decode_field_reference (ll_arg,
4837
                                     &ll_bitsize, &ll_bitpos, &ll_mode,
4838
                                     &ll_unsignedp, &volatilep, &ll_mask,
4839
                                     &ll_and_mask);
4840
  lr_inner = decode_field_reference (lr_arg,
4841
                                     &lr_bitsize, &lr_bitpos, &lr_mode,
4842
                                     &lr_unsignedp, &volatilep, &lr_mask,
4843
                                     &lr_and_mask);
4844
  rl_inner = decode_field_reference (rl_arg,
4845
                                     &rl_bitsize, &rl_bitpos, &rl_mode,
4846
                                     &rl_unsignedp, &volatilep, &rl_mask,
4847
                                     &rl_and_mask);
4848
  rr_inner = decode_field_reference (rr_arg,
4849
                                     &rr_bitsize, &rr_bitpos, &rr_mode,
4850
                                     &rr_unsignedp, &volatilep, &rr_mask,
4851
                                     &rr_and_mask);
4852
 
4853
  /* It must be true that the inner operation on the lhs of each
4854
     comparison must be the same if we are to be able to do anything.
4855
     Then see if we have constants.  If not, the same must be true for
4856
     the rhs's.  */
4857
  if (volatilep || ll_inner == 0 || rl_inner == 0
4858
      || ! operand_equal_p (ll_inner, rl_inner, 0))
4859
    return 0;
4860
 
4861
  if (TREE_CODE (lr_arg) == INTEGER_CST
4862
      && TREE_CODE (rr_arg) == INTEGER_CST)
4863
    l_const = lr_arg, r_const = rr_arg;
4864
  else if (lr_inner == 0 || rr_inner == 0
4865
           || ! operand_equal_p (lr_inner, rr_inner, 0))
4866
    return 0;
4867
  else
4868
    l_const = r_const = 0;
4869
 
4870
  /* If either comparison code is not correct for our logical operation,
4871
     fail.  However, we can convert a one-bit comparison against zero into
4872
     the opposite comparison against that bit being set in the field.  */
4873
 
4874
  wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4875
  if (lcode != wanted_code)
4876
    {
4877
      if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4878
        {
4879
          /* Make the left operand unsigned, since we are only interested
4880
             in the value of one bit.  Otherwise we are doing the wrong
4881
             thing below.  */
4882
          ll_unsignedp = 1;
4883
          l_const = ll_mask;
4884
        }
4885
      else
4886
        return 0;
4887
    }
4888
 
4889
  /* This is analogous to the code for l_const above.  */
4890
  if (rcode != wanted_code)
4891
    {
4892
      if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4893
        {
4894
          rl_unsignedp = 1;
4895
          r_const = rl_mask;
4896
        }
4897
      else
4898
        return 0;
4899
    }
4900
 
4901
  /* After this point all optimizations will generate bit-field
4902
     references, which we might not want.  */
4903
  if (! lang_hooks.can_use_bit_fields_p ())
4904
    return 0;
4905
 
4906
  /* See if we can find a mode that contains both fields being compared on
4907
     the left.  If we can't, fail.  Otherwise, update all constants and masks
4908
     to be relative to a field of that size.  */
4909
  first_bit = MIN (ll_bitpos, rl_bitpos);
4910
  end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4911
  lnmode = get_best_mode (end_bit - first_bit, first_bit,
4912
                          TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4913
                          volatilep);
4914
  if (lnmode == VOIDmode)
4915
    return 0;
4916
 
4917
  lnbitsize = GET_MODE_BITSIZE (lnmode);
4918
  lnbitpos = first_bit & ~ (lnbitsize - 1);
4919
  lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4920
  xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4921
 
4922
  if (BYTES_BIG_ENDIAN)
4923
    {
4924
      xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4925
      xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4926
    }
4927
 
4928
  ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4929
                         size_int (xll_bitpos), 0);
4930
  rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4931
                         size_int (xrl_bitpos), 0);
4932
 
4933
  if (l_const)
4934
    {
4935
      l_const = fold_convert (lntype, l_const);
4936
      l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4937
      l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4938
      if (integer_nonzerop (const_binop (BIT_AND_EXPR, l_const,
4939
                                         fold_build1 (BIT_NOT_EXPR,
4940
                                                      lntype, ll_mask),
4941
                                         0)))
4942
        {
4943
          warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4944
 
4945
          return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4946
        }
4947
    }
4948
  if (r_const)
4949
    {
4950
      r_const = fold_convert (lntype, r_const);
4951
      r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4952
      r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4953
      if (integer_nonzerop (const_binop (BIT_AND_EXPR, r_const,
4954
                                         fold_build1 (BIT_NOT_EXPR,
4955
                                                      lntype, rl_mask),
4956
                                         0)))
4957
        {
4958
          warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4959
 
4960
          return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4961
        }
4962
    }
4963
 
4964
  /* If the right sides are not constant, do the same for it.  Also,
4965
     disallow this optimization if a size or signedness mismatch occurs
4966
     between the left and right sides.  */
4967
  if (l_const == 0)
4968
    {
4969
      if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4970
          || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4971
          /* Make sure the two fields on the right
4972
             correspond to the left without being swapped.  */
4973
          || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4974
        return 0;
4975
 
4976
      first_bit = MIN (lr_bitpos, rr_bitpos);
4977
      end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4978
      rnmode = get_best_mode (end_bit - first_bit, first_bit,
4979
                              TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4980
                              volatilep);
4981
      if (rnmode == VOIDmode)
4982
        return 0;
4983
 
4984
      rnbitsize = GET_MODE_BITSIZE (rnmode);
4985
      rnbitpos = first_bit & ~ (rnbitsize - 1);
4986
      rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4987
      xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4988
 
4989
      if (BYTES_BIG_ENDIAN)
4990
        {
4991
          xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4992
          xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4993
        }
4994
 
4995
      lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4996
                             size_int (xlr_bitpos), 0);
4997
      rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4998
                             size_int (xrr_bitpos), 0);
4999
 
5000
      /* Make a mask that corresponds to both fields being compared.
5001
         Do this for both items being compared.  If the operands are the
5002
         same size and the bits being compared are in the same position
5003
         then we can do this by masking both and comparing the masked
5004
         results.  */
5005
      ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5006
      lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5007
      if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5008
        {
5009
          lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5010
                                    ll_unsignedp || rl_unsignedp);
5011
          if (! all_ones_mask_p (ll_mask, lnbitsize))
5012
            lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5013
 
5014
          rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5015
                                    lr_unsignedp || rr_unsignedp);
5016
          if (! all_ones_mask_p (lr_mask, rnbitsize))
5017
            rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5018
 
5019
          return build2 (wanted_code, truth_type, lhs, rhs);
5020
        }
5021
 
5022
      /* There is still another way we can do something:  If both pairs of
5023
         fields being compared are adjacent, we may be able to make a wider
5024
         field containing them both.
5025
 
5026
         Note that we still must mask the lhs/rhs expressions.  Furthermore,
5027
         the mask must be shifted to account for the shift done by
5028
         make_bit_field_ref.  */
5029
      if ((ll_bitsize + ll_bitpos == rl_bitpos
5030
           && lr_bitsize + lr_bitpos == rr_bitpos)
5031
          || (ll_bitpos == rl_bitpos + rl_bitsize
5032
              && lr_bitpos == rr_bitpos + rr_bitsize))
5033
        {
5034
          tree type;
5035
 
5036
          lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5037
                                    MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5038
          rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5039
                                    MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5040
 
5041
          ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5042
                                 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5043
          lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5044
                                 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5045
 
5046
          /* Convert to the smaller type before masking out unwanted bits.  */
5047
          type = lntype;
5048
          if (lntype != rntype)
5049
            {
5050
              if (lnbitsize > rnbitsize)
5051
                {
5052
                  lhs = fold_convert (rntype, lhs);
5053
                  ll_mask = fold_convert (rntype, ll_mask);
5054
                  type = rntype;
5055
                }
5056
              else if (lnbitsize < rnbitsize)
5057
                {
5058
                  rhs = fold_convert (lntype, rhs);
5059
                  lr_mask = fold_convert (lntype, lr_mask);
5060
                  type = lntype;
5061
                }
5062
            }
5063
 
5064
          if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5065
            lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5066
 
5067
          if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5068
            rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5069
 
5070
          return build2 (wanted_code, truth_type, lhs, rhs);
5071
        }
5072
 
5073
      return 0;
5074
    }
5075
 
5076
  /* Handle the case of comparisons with constants.  If there is something in
5077
     common between the masks, those bits of the constants must be the same.
5078
     If not, the condition is always false.  Test for this to avoid generating
5079
     incorrect code below.  */
5080
  result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5081
  if (! integer_zerop (result)
5082
      && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5083
                           const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5084
    {
5085
      if (wanted_code == NE_EXPR)
5086
        {
5087
          warning (0, "%<or%> of unmatched not-equal tests is always 1");
5088
          return constant_boolean_node (true, truth_type);
5089
        }
5090
      else
5091
        {
5092
          warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5093
          return constant_boolean_node (false, truth_type);
5094
        }
5095
    }
5096
 
5097
  /* Construct the expression we will return.  First get the component
5098
     reference we will make.  Unless the mask is all ones the width of
5099
     that field, perform the mask operation.  Then compare with the
5100
     merged constant.  */
5101
  result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5102
                               ll_unsignedp || rl_unsignedp);
5103
 
5104
  ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5105
  if (! all_ones_mask_p (ll_mask, lnbitsize))
5106
    result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5107
 
5108
  return build2 (wanted_code, truth_type, result,
5109
                 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5110
}
5111
 
5112
/* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5113
   constant.  */
5114
 
5115
static tree
5116
optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5117
{
5118
  tree arg0 = op0;
5119
  enum tree_code op_code;
5120
  tree comp_const = op1;
5121
  tree minmax_const;
5122
  int consts_equal, consts_lt;
5123
  tree inner;
5124
 
5125
  STRIP_SIGN_NOPS (arg0);
5126
 
5127
  op_code = TREE_CODE (arg0);
5128
  minmax_const = TREE_OPERAND (arg0, 1);
5129
  consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5130
  consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5131
  inner = TREE_OPERAND (arg0, 0);
5132
 
5133
  /* If something does not permit us to optimize, return the original tree.  */
5134
  if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5135
      || TREE_CODE (comp_const) != INTEGER_CST
5136
      || TREE_CONSTANT_OVERFLOW (comp_const)
5137
      || TREE_CODE (minmax_const) != INTEGER_CST
5138
      || TREE_CONSTANT_OVERFLOW (minmax_const))
5139
    return NULL_TREE;
5140
 
5141
  /* Now handle all the various comparison codes.  We only handle EQ_EXPR
5142
     and GT_EXPR, doing the rest with recursive calls using logical
5143
     simplifications.  */
5144
  switch (code)
5145
    {
5146
    case NE_EXPR:  case LT_EXPR:  case LE_EXPR:
5147
      {
5148
        /* FIXME: We should be able to invert code without building a
5149
           scratch tree node, but doing so would require us to
5150
           duplicate a part of invert_truthvalue here.  */
5151
        tree tem = invert_truthvalue (build2 (code, type, op0, op1));
5152
        tem = optimize_minmax_comparison (TREE_CODE (tem),
5153
                                          TREE_TYPE (tem),
5154
                                          TREE_OPERAND (tem, 0),
5155
                                          TREE_OPERAND (tem, 1));
5156
        return invert_truthvalue (tem);
5157
      }
5158
 
5159
    case GE_EXPR:
5160
      return
5161
        fold_build2 (TRUTH_ORIF_EXPR, type,
5162
                     optimize_minmax_comparison
5163
                     (EQ_EXPR, type, arg0, comp_const),
5164
                     optimize_minmax_comparison
5165
                     (GT_EXPR, type, arg0, comp_const));
5166
 
5167
    case EQ_EXPR:
5168
      if (op_code == MAX_EXPR && consts_equal)
5169
        /* MAX (X, 0) == 0  ->  X <= 0  */
5170
        return fold_build2 (LE_EXPR, type, inner, comp_const);
5171
 
5172
      else if (op_code == MAX_EXPR && consts_lt)
5173
        /* MAX (X, 0) == 5  ->  X == 5   */
5174
        return fold_build2 (EQ_EXPR, type, inner, comp_const);
5175
 
5176
      else if (op_code == MAX_EXPR)
5177
        /* MAX (X, 0) == -1  ->  false  */
5178
        return omit_one_operand (type, integer_zero_node, inner);
5179
 
5180
      else if (consts_equal)
5181
        /* MIN (X, 0) == 0  ->  X >= 0  */
5182
        return fold_build2 (GE_EXPR, type, inner, comp_const);
5183
 
5184
      else if (consts_lt)
5185
        /* MIN (X, 0) == 5  ->  false  */
5186
        return omit_one_operand (type, integer_zero_node, inner);
5187
 
5188
      else
5189
        /* MIN (X, 0) == -1  ->  X == -1  */
5190
        return fold_build2 (EQ_EXPR, type, inner, comp_const);
5191
 
5192
    case GT_EXPR:
5193
      if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5194
        /* MAX (X, 0) > 0  ->  X > 0
5195
           MAX (X, 0) > 5  ->  X > 5  */
5196
        return fold_build2 (GT_EXPR, type, inner, comp_const);
5197
 
5198
      else if (op_code == MAX_EXPR)
5199
        /* MAX (X, 0) > -1  ->  true  */
5200
        return omit_one_operand (type, integer_one_node, inner);
5201
 
5202
      else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5203
        /* MIN (X, 0) > 0  ->  false
5204
           MIN (X, 0) > 5  ->  false  */
5205
        return omit_one_operand (type, integer_zero_node, inner);
5206
 
5207
      else
5208
        /* MIN (X, 0) > -1  ->  X > -1  */
5209
        return fold_build2 (GT_EXPR, type, inner, comp_const);
5210
 
5211
    default:
5212
      return NULL_TREE;
5213
    }
5214
}
5215
 
5216
/* T is an integer expression that is being multiplied, divided, or taken a
5217
   modulus (CODE says which and what kind of divide or modulus) by a
5218
   constant C.  See if we can eliminate that operation by folding it with
5219
   other operations already in T.  WIDE_TYPE, if non-null, is a type that
5220
   should be used for the computation if wider than our type.
5221
 
5222
   For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5223
   (X * 2) + (Y * 4).  We must, however, be assured that either the original
5224
   expression would not overflow or that overflow is undefined for the type
5225
   in the language in question.
5226
 
5227
   We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5228
   the machine has a multiply-accumulate insn or that this is part of an
5229
   addressing calculation.
5230
 
5231
   If we return a non-null expression, it is an equivalent form of the
5232
   original computation, but need not be in the original type.  */
5233
 
5234
static tree
5235
extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5236
{
5237
  /* To avoid exponential search depth, refuse to allow recursion past
5238
     three levels.  Beyond that (1) it's highly unlikely that we'll find
5239
     something interesting and (2) we've probably processed it before
5240
     when we built the inner expression.  */
5241
 
5242
  static int depth;
5243
  tree ret;
5244
 
5245
  if (depth > 3)
5246
    return NULL;
5247
 
5248
  depth++;
5249
  ret = extract_muldiv_1 (t, c, code, wide_type);
5250
  depth--;
5251
 
5252
  return ret;
5253
}
5254
 
5255
static tree
5256
extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5257
{
5258
  tree type = TREE_TYPE (t);
5259
  enum tree_code tcode = TREE_CODE (t);
5260
  tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5261
                                   > GET_MODE_SIZE (TYPE_MODE (type)))
5262
                ? wide_type : type);
5263
  tree t1, t2;
5264
  int same_p = tcode == code;
5265
  tree op0 = NULL_TREE, op1 = NULL_TREE;
5266
 
5267
  /* Don't deal with constants of zero here; they confuse the code below.  */
5268
  if (integer_zerop (c))
5269
    return NULL_TREE;
5270
 
5271
  if (TREE_CODE_CLASS (tcode) == tcc_unary)
5272
    op0 = TREE_OPERAND (t, 0);
5273
 
5274
  if (TREE_CODE_CLASS (tcode) == tcc_binary)
5275
    op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5276
 
5277
  /* Note that we need not handle conditional operations here since fold
5278
     already handles those cases.  So just do arithmetic here.  */
5279
  switch (tcode)
5280
    {
5281
    case INTEGER_CST:
5282
      /* For a constant, we can always simplify if we are a multiply
5283
         or (for divide and modulus) if it is a multiple of our constant.  */
5284
      if (code == MULT_EXPR
5285
          || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5286
        return const_binop (code, fold_convert (ctype, t),
5287
                            fold_convert (ctype, c), 0);
5288
      break;
5289
 
5290
    case CONVERT_EXPR:  case NON_LVALUE_EXPR:  case NOP_EXPR:
5291
      /* If op0 is an expression ...  */
5292
      if ((COMPARISON_CLASS_P (op0)
5293
           || UNARY_CLASS_P (op0)
5294
           || BINARY_CLASS_P (op0)
5295
           || EXPRESSION_CLASS_P (op0))
5296
          /* ... and is unsigned, and its type is smaller than ctype,
5297
             then we cannot pass through as widening.  */
5298
          && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5299
               && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5300
                     && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5301
               && (GET_MODE_SIZE (TYPE_MODE (ctype))
5302
                   > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5303
              /* ... or this is a truncation (t is narrower than op0),
5304
                 then we cannot pass through this narrowing.  */
5305
              || (GET_MODE_SIZE (TYPE_MODE (type))
5306
                  < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5307
              /* ... or signedness changes for division or modulus,
5308
                 then we cannot pass through this conversion.  */
5309
              || (code != MULT_EXPR
5310
                  && (TYPE_UNSIGNED (ctype)
5311
                      != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5312
        break;
5313
 
5314
      /* Pass the constant down and see if we can make a simplification.  If
5315
         we can, replace this expression with the inner simplification for
5316
         possible later conversion to our or some other type.  */
5317
      if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5318
          && TREE_CODE (t2) == INTEGER_CST
5319
          && ! TREE_CONSTANT_OVERFLOW (t2)
5320
          && (0 != (t1 = extract_muldiv (op0, t2, code,
5321
                                         code == MULT_EXPR
5322
                                         ? ctype : NULL_TREE))))
5323
        return t1;
5324
      break;
5325
 
5326
    case ABS_EXPR:
5327
      /* If widening the type changes it from signed to unsigned, then we
5328
         must avoid building ABS_EXPR itself as unsigned.  */
5329
      if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5330
        {
5331
          tree cstype = (*lang_hooks.types.signed_type) (ctype);
5332
          if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5333
            {
5334
              t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5335
              return fold_convert (ctype, t1);
5336
            }
5337
          break;
5338
        }
5339
      /* FALLTHROUGH */
5340
    case NEGATE_EXPR:
5341
      if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5342
        return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5343
      break;
5344
 
5345
    case MIN_EXPR:  case MAX_EXPR:
5346
      /* If widening the type changes the signedness, then we can't perform
5347
         this optimization as that changes the result.  */
5348
      if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5349
        break;
5350
 
5351
      /* MIN (a, b) / 5 -> MIN (a / 5, b / 5)  */
5352
      if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5353
          && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5354
        {
5355
          if (tree_int_cst_sgn (c) < 0)
5356
            tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5357
 
5358
          return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5359
                              fold_convert (ctype, t2));
5360
        }
5361
      break;
5362
 
5363
    case LSHIFT_EXPR:  case RSHIFT_EXPR:
5364
      /* If the second operand is constant, this is a multiplication
5365
         or floor division, by a power of two, so we can treat it that
5366
         way unless the multiplier or divisor overflows.  Signed
5367
         left-shift overflow is implementation-defined rather than
5368
         undefined in C90, so do not convert signed left shift into
5369
         multiplication.  */
5370
      if (TREE_CODE (op1) == INTEGER_CST
5371
          && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5372
          /* const_binop may not detect overflow correctly,
5373
             so check for it explicitly here.  */
5374
          && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5375
          && TREE_INT_CST_HIGH (op1) == 0
5376
          && 0 != (t1 = fold_convert (ctype,
5377
                                      const_binop (LSHIFT_EXPR,
5378
                                                   size_one_node,
5379
                                                   op1, 0)))
5380
          && ! TREE_OVERFLOW (t1))
5381
        return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5382
                                       ? MULT_EXPR : FLOOR_DIV_EXPR,
5383
                                       ctype, fold_convert (ctype, op0), t1),
5384
                               c, code, wide_type);
5385
      break;
5386
 
5387
    case PLUS_EXPR:  case MINUS_EXPR:
5388
      /* See if we can eliminate the operation on both sides.  If we can, we
5389
         can return a new PLUS or MINUS.  If we can't, the only remaining
5390
         cases where we can do anything are if the second operand is a
5391
         constant.  */
5392
      t1 = extract_muldiv (op0, c, code, wide_type);
5393
      t2 = extract_muldiv (op1, c, code, wide_type);
5394
      if (t1 != 0 && t2 != 0
5395
          && (code == MULT_EXPR
5396
              /* If not multiplication, we can only do this if both operands
5397
                 are divisible by c.  */
5398
              || (multiple_of_p (ctype, op0, c)
5399
                  && multiple_of_p (ctype, op1, c))))
5400
        return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5401
                            fold_convert (ctype, t2));
5402
 
5403
      /* If this was a subtraction, negate OP1 and set it to be an addition.
5404
         This simplifies the logic below.  */
5405
      if (tcode == MINUS_EXPR)
5406
        tcode = PLUS_EXPR, op1 = negate_expr (op1);
5407
 
5408
      if (TREE_CODE (op1) != INTEGER_CST)
5409
        break;
5410
 
5411
      /* If either OP1 or C are negative, this optimization is not safe for
5412
         some of the division and remainder types while for others we need
5413
         to change the code.  */
5414
      if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5415
        {
5416
          if (code == CEIL_DIV_EXPR)
5417
            code = FLOOR_DIV_EXPR;
5418
          else if (code == FLOOR_DIV_EXPR)
5419
            code = CEIL_DIV_EXPR;
5420
          else if (code != MULT_EXPR
5421
                   && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5422
            break;
5423
        }
5424
 
5425
      /* If it's a multiply or a division/modulus operation of a multiple
5426
         of our constant, do the operation and verify it doesn't overflow.  */
5427
      if (code == MULT_EXPR
5428
          || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5429
        {
5430
          op1 = const_binop (code, fold_convert (ctype, op1),
5431
                             fold_convert (ctype, c), 0);
5432
          /* We allow the constant to overflow with wrapping semantics.  */
5433
          if (op1 == 0
5434
              || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5435
            break;
5436
        }
5437
      else
5438
        break;
5439
 
5440
      /* If we have an unsigned type is not a sizetype, we cannot widen
5441
         the operation since it will change the result if the original
5442
         computation overflowed.  */
5443
      if (TYPE_UNSIGNED (ctype)
5444
          && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5445
          && ctype != type)
5446
        break;
5447
 
5448
      /* If we were able to eliminate our operation from the first side,
5449
         apply our operation to the second side and reform the PLUS.  */
5450
      if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5451
        return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5452
 
5453
      /* The last case is if we are a multiply.  In that case, we can
5454
         apply the distributive law to commute the multiply and addition
5455
         if the multiplication of the constants doesn't overflow.  */
5456
      if (code == MULT_EXPR)
5457
        return fold_build2 (tcode, ctype,
5458
                            fold_build2 (code, ctype,
5459
                                         fold_convert (ctype, op0),
5460
                                         fold_convert (ctype, c)),
5461
                            op1);
5462
 
5463
      break;
5464
 
5465
    case MULT_EXPR:
5466
      /* We have a special case here if we are doing something like
5467
         (C * 8) % 4 since we know that's zero.  */
5468
      if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5469
           || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5470
          && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5471
          && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5472
        return omit_one_operand (type, integer_zero_node, op0);
5473
 
5474
      /* ... fall through ...  */
5475
 
5476
    case TRUNC_DIV_EXPR:  case CEIL_DIV_EXPR:  case FLOOR_DIV_EXPR:
5477
    case ROUND_DIV_EXPR:  case EXACT_DIV_EXPR:
5478
      /* If we can extract our operation from the LHS, do so and return a
5479
         new operation.  Likewise for the RHS from a MULT_EXPR.  Otherwise,
5480
         do something only if the second operand is a constant.  */
5481
      if (same_p
5482
          && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5483
        return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5484
                            fold_convert (ctype, op1));
5485
      else if (tcode == MULT_EXPR && code == MULT_EXPR
5486
               && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5487
        return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5488
                            fold_convert (ctype, t1));
5489
      else if (TREE_CODE (op1) != INTEGER_CST)
5490
        return 0;
5491
 
5492
      /* If these are the same operation types, we can associate them
5493
         assuming no overflow.  */
5494
      if (tcode == code
5495
          && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5496
                                     fold_convert (ctype, c), 0))
5497
          && ! TREE_OVERFLOW (t1))
5498
        return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5499
 
5500
      /* If these operations "cancel" each other, we have the main
5501
         optimizations of this pass, which occur when either constant is a
5502
         multiple of the other, in which case we replace this with either an
5503
         operation or CODE or TCODE.
5504
 
5505
         If we have an unsigned type that is not a sizetype, we cannot do
5506
         this since it will change the result if the original computation
5507
         overflowed.  */
5508
      if ((! TYPE_UNSIGNED (ctype)
5509
           || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5510
          && ! flag_wrapv
5511
          && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5512
              || (tcode == MULT_EXPR
5513
                  && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5514
                  && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5515
        {
5516
          if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5517
            return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5518
                                fold_convert (ctype,
5519
                                              const_binop (TRUNC_DIV_EXPR,
5520
                                                           op1, c, 0)));
5521
          else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5522
            return fold_build2 (code, ctype, fold_convert (ctype, op0),
5523
                                fold_convert (ctype,
5524
                                              const_binop (TRUNC_DIV_EXPR,
5525
                                                           c, op1, 0)));
5526
        }
5527
      break;
5528
 
5529
    default:
5530
      break;
5531
    }
5532
 
5533
  return 0;
5534
}
5535
 
5536
/* Return a node which has the indicated constant VALUE (either 0 or
5537
   1), and is of the indicated TYPE.  */
5538
 
5539
tree
5540
constant_boolean_node (int value, tree type)
5541
{
5542
  if (type == integer_type_node)
5543
    return value ? integer_one_node : integer_zero_node;
5544
  else if (type == boolean_type_node)
5545
    return value ? boolean_true_node : boolean_false_node;
5546
  else
5547
    return build_int_cst (type, value);
5548
}
5549
 
5550
 
5551
/* Return true if expr looks like an ARRAY_REF and set base and
5552
   offset to the appropriate trees.  If there is no offset,
5553
   offset is set to NULL_TREE.  Base will be canonicalized to
5554
   something you can get the element type from using
5555
   TREE_TYPE (TREE_TYPE (base)).  Offset will be the offset
5556
   in bytes to the base.  */
5557
 
5558
static bool
5559
extract_array_ref (tree expr, tree *base, tree *offset)
5560
{
5561
  /* One canonical form is a PLUS_EXPR with the first
5562
     argument being an ADDR_EXPR with a possible NOP_EXPR
5563
     attached.  */
5564
  if (TREE_CODE (expr) == PLUS_EXPR)
5565
    {
5566
      tree op0 = TREE_OPERAND (expr, 0);
5567
      tree inner_base, dummy1;
5568
      /* Strip NOP_EXPRs here because the C frontends and/or
5569
         folders present us (int *)&x.a + 4B possibly.  */
5570
      STRIP_NOPS (op0);
5571
      if (extract_array_ref (op0, &inner_base, &dummy1))
5572
        {
5573
          *base = inner_base;
5574
          if (dummy1 == NULL_TREE)
5575
            *offset = TREE_OPERAND (expr, 1);
5576
          else
5577
            *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5578
                                   dummy1, TREE_OPERAND (expr, 1));
5579
          return true;
5580
        }
5581
    }
5582
  /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5583
     which we transform into an ADDR_EXPR with appropriate
5584
     offset.  For other arguments to the ADDR_EXPR we assume
5585
     zero offset and as such do not care about the ADDR_EXPR
5586
     type and strip possible nops from it.  */
5587
  else if (TREE_CODE (expr) == ADDR_EXPR)
5588
    {
5589
      tree op0 = TREE_OPERAND (expr, 0);
5590
      if (TREE_CODE (op0) == ARRAY_REF)
5591
        {
5592
          tree idx = TREE_OPERAND (op0, 1);
5593
          *base = TREE_OPERAND (op0, 0);
5594
          *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5595
                                 array_ref_element_size (op0));
5596
        }
5597
      else
5598
        {
5599
          /* Handle array-to-pointer decay as &a.  */
5600
          if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5601
            *base = TREE_OPERAND (expr, 0);
5602
          else
5603
            *base = expr;
5604
          *offset = NULL_TREE;
5605
        }
5606
      return true;
5607
    }
5608
  /* The next canonical form is a VAR_DECL with POINTER_TYPE.  */
5609
  else if (SSA_VAR_P (expr)
5610
           && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5611
    {
5612
      *base = expr;
5613
      *offset = NULL_TREE;
5614
      return true;
5615
    }
5616
 
5617
  return false;
5618
}
5619
 
5620
 
5621
/* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5622
   Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'.  Here
5623
   CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5624
   expression, and ARG to `a'.  If COND_FIRST_P is nonzero, then the
5625
   COND is the first argument to CODE; otherwise (as in the example
5626
   given here), it is the second argument.  TYPE is the type of the
5627
   original expression.  Return NULL_TREE if no simplification is
5628
   possible.  */
5629
 
5630
static tree
5631
fold_binary_op_with_conditional_arg (enum tree_code code,
5632
                                     tree type, tree op0, tree op1,
5633
                                     tree cond, tree arg, int cond_first_p)
5634
{
5635
  tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5636
  tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5637
  tree test, true_value, false_value;
5638
  tree lhs = NULL_TREE;
5639
  tree rhs = NULL_TREE;
5640
 
5641
  /* This transformation is only worthwhile if we don't have to wrap
5642
     arg in a SAVE_EXPR, and the operation can be simplified on at least
5643
     one of the branches once its pushed inside the COND_EXPR.  */
5644
  if (!TREE_CONSTANT (arg))
5645
    return NULL_TREE;
5646
 
5647
  if (TREE_CODE (cond) == COND_EXPR)
5648
    {
5649
      test = TREE_OPERAND (cond, 0);
5650
      true_value = TREE_OPERAND (cond, 1);
5651
      false_value = TREE_OPERAND (cond, 2);
5652
      /* If this operand throws an expression, then it does not make
5653
         sense to try to perform a logical or arithmetic operation
5654
         involving it.  */
5655
      if (VOID_TYPE_P (TREE_TYPE (true_value)))
5656
        lhs = true_value;
5657
      if (VOID_TYPE_P (TREE_TYPE (false_value)))
5658
        rhs = false_value;
5659
    }
5660
  else
5661
    {
5662
      tree testtype = TREE_TYPE (cond);
5663
      test = cond;
5664
      true_value = constant_boolean_node (true, testtype);
5665
      false_value = constant_boolean_node (false, testtype);
5666
    }
5667
 
5668
  arg = fold_convert (arg_type, arg);
5669
  if (lhs == 0)
5670
    {
5671
      true_value = fold_convert (cond_type, true_value);
5672
      if (cond_first_p)
5673
        lhs = fold_build2 (code, type, true_value, arg);
5674
      else
5675
        lhs = fold_build2 (code, type, arg, true_value);
5676
    }
5677
  if (rhs == 0)
5678
    {
5679
      false_value = fold_convert (cond_type, false_value);
5680
      if (cond_first_p)
5681
        rhs = fold_build2 (code, type, false_value, arg);
5682
      else
5683
        rhs = fold_build2 (code, type, arg, false_value);
5684
    }
5685
 
5686
  test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5687
  return fold_convert (type, test);
5688
}
5689
 
5690
 
5691
/* Subroutine of fold() that checks for the addition of +/- 0.0.
5692
 
5693
   If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5694
   TYPE, X + ADDEND is the same as X.  If NEGATE, return true if X -
5695
   ADDEND is the same as X.
5696
 
5697
   X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5698
   and finite.  The problematic cases are when X is zero, and its mode
5699
   has signed zeros.  In the case of rounding towards -infinity,
5700
   X - 0 is not the same as X because 0 - 0 is -0.  In other rounding
5701
   modes, X + 0 is not the same as X because -0 + 0 is 0.  */
5702
 
5703
static bool
5704
fold_real_zero_addition_p (tree type, tree addend, int negate)
5705
{
5706
  if (!real_zerop (addend))
5707
    return false;
5708
 
5709
  /* Don't allow the fold with -fsignaling-nans.  */
5710
  if (HONOR_SNANS (TYPE_MODE (type)))
5711
    return false;
5712
 
5713
  /* Allow the fold if zeros aren't signed, or their sign isn't important.  */
5714
  if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5715
    return true;
5716
 
5717
  /* Treat x + -0 as x - 0 and x - -0 as x + 0.  */
5718
  if (TREE_CODE (addend) == REAL_CST
5719
      && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5720
    negate = !negate;
5721
 
5722
  /* The mode has signed zeros, and we have to honor their sign.
5723
     In this situation, there is only one case we can return true for.
5724
     X - 0 is the same as X unless rounding towards -infinity is
5725
     supported.  */
5726
  return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5727
}
5728
 
5729
/* Subroutine of fold() that checks comparisons of built-in math
5730
   functions against real constants.
5731
 
5732
   FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5733
   operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR.  TYPE
5734
   is the type of the result and ARG0 and ARG1 are the operands of the
5735
   comparison.  ARG1 must be a TREE_REAL_CST.
5736
 
5737
   The function returns the constant folded tree if a simplification
5738
   can be made, and NULL_TREE otherwise.  */
5739
 
5740
static tree
5741
fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5742
                     tree type, tree arg0, tree arg1)
5743
{
5744
  REAL_VALUE_TYPE c;
5745
 
5746
  if (BUILTIN_SQRT_P (fcode))
5747
    {
5748
      tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5749
      enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5750
 
5751
      c = TREE_REAL_CST (arg1);
5752
      if (REAL_VALUE_NEGATIVE (c))
5753
        {
5754
          /* sqrt(x) < y is always false, if y is negative.  */
5755
          if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5756
            return omit_one_operand (type, integer_zero_node, arg);
5757
 
5758
          /* sqrt(x) > y is always true, if y is negative and we
5759
             don't care about NaNs, i.e. negative values of x.  */
5760
          if (code == NE_EXPR || !HONOR_NANS (mode))
5761
            return omit_one_operand (type, integer_one_node, arg);
5762
 
5763
          /* sqrt(x) > y is the same as x >= 0, if y is negative.  */
5764
          return fold_build2 (GE_EXPR, type, arg,
5765
                              build_real (TREE_TYPE (arg), dconst0));
5766
        }
5767
      else if (code == GT_EXPR || code == GE_EXPR)
5768
        {
5769
          REAL_VALUE_TYPE c2;
5770
 
5771
          REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5772
          real_convert (&c2, mode, &c2);
5773
 
5774
          if (REAL_VALUE_ISINF (c2))
5775
            {
5776
              /* sqrt(x) > y is x == +Inf, when y is very large.  */
5777
              if (HONOR_INFINITIES (mode))
5778
                return fold_build2 (EQ_EXPR, type, arg,
5779
                                    build_real (TREE_TYPE (arg), c2));
5780
 
5781
              /* sqrt(x) > y is always false, when y is very large
5782
                 and we don't care about infinities.  */
5783
              return omit_one_operand (type, integer_zero_node, arg);
5784
            }
5785
 
5786
          /* sqrt(x) > c is the same as x > c*c.  */
5787
          return fold_build2 (code, type, arg,
5788
                              build_real (TREE_TYPE (arg), c2));
5789
        }
5790
      else if (code == LT_EXPR || code == LE_EXPR)
5791
        {
5792
          REAL_VALUE_TYPE c2;
5793
 
5794
          REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5795
          real_convert (&c2, mode, &c2);
5796
 
5797
          if (REAL_VALUE_ISINF (c2))
5798
            {
5799
              /* sqrt(x) < y is always true, when y is a very large
5800
                 value and we don't care about NaNs or Infinities.  */
5801
              if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5802
                return omit_one_operand (type, integer_one_node, arg);
5803
 
5804
              /* sqrt(x) < y is x != +Inf when y is very large and we
5805
                 don't care about NaNs.  */
5806
              if (! HONOR_NANS (mode))
5807
                return fold_build2 (NE_EXPR, type, arg,
5808
                                    build_real (TREE_TYPE (arg), c2));
5809
 
5810
              /* sqrt(x) < y is x >= 0 when y is very large and we
5811
                 don't care about Infinities.  */
5812
              if (! HONOR_INFINITIES (mode))
5813
                return fold_build2 (GE_EXPR, type, arg,
5814
                                    build_real (TREE_TYPE (arg), dconst0));
5815
 
5816
              /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large.  */
5817
              if (lang_hooks.decls.global_bindings_p () != 0
5818
                  || CONTAINS_PLACEHOLDER_P (arg))
5819
                return NULL_TREE;
5820
 
5821
              arg = save_expr (arg);
5822
              return fold_build2 (TRUTH_ANDIF_EXPR, type,
5823
                                  fold_build2 (GE_EXPR, type, arg,
5824
                                               build_real (TREE_TYPE (arg),
5825
                                                           dconst0)),
5826
                                  fold_build2 (NE_EXPR, type, arg,
5827
                                               build_real (TREE_TYPE (arg),
5828
                                                           c2)));
5829
            }
5830
 
5831
          /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs.  */
5832
          if (! HONOR_NANS (mode))
5833
            return fold_build2 (code, type, arg,
5834
                                build_real (TREE_TYPE (arg), c2));
5835
 
5836
          /* sqrt(x) < c is the same as x >= 0 && x < c*c.  */
5837
          if (lang_hooks.decls.global_bindings_p () == 0
5838
              && ! CONTAINS_PLACEHOLDER_P (arg))
5839
            {
5840
              arg = save_expr (arg);
5841
              return fold_build2 (TRUTH_ANDIF_EXPR, type,
5842
                                  fold_build2 (GE_EXPR, type, arg,
5843
                                               build_real (TREE_TYPE (arg),
5844
                                                           dconst0)),
5845
                                  fold_build2 (code, type, arg,
5846
                                               build_real (TREE_TYPE (arg),
5847
                                                           c2)));
5848
            }
5849
        }
5850
    }
5851
 
5852
  return NULL_TREE;
5853
}
5854
 
5855
/* Subroutine of fold() that optimizes comparisons against Infinities,
5856
   either +Inf or -Inf.
5857
 
5858
   CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5859
   GE_EXPR or LE_EXPR.  TYPE is the type of the result and ARG0 and ARG1
5860
   are the operands of the comparison.  ARG1 must be a TREE_REAL_CST.
5861
 
5862
   The function returns the constant folded tree if a simplification
5863
   can be made, and NULL_TREE otherwise.  */
5864
 
5865
static tree
5866
fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5867
{
5868
  enum machine_mode mode;
5869
  REAL_VALUE_TYPE max;
5870
  tree temp;
5871
  bool neg;
5872
 
5873
  mode = TYPE_MODE (TREE_TYPE (arg0));
5874
 
5875
  /* For negative infinity swap the sense of the comparison.  */
5876
  neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5877
  if (neg)
5878
    code = swap_tree_comparison (code);
5879
 
5880
  switch (code)
5881
    {
5882
    case GT_EXPR:
5883
      /* x > +Inf is always false, if with ignore sNANs.  */
5884
      if (HONOR_SNANS (mode))
5885
        return NULL_TREE;
5886
      return omit_one_operand (type, integer_zero_node, arg0);
5887
 
5888
    case LE_EXPR:
5889
      /* x <= +Inf is always true, if we don't case about NaNs.  */
5890
      if (! HONOR_NANS (mode))
5891
        return omit_one_operand (type, integer_one_node, arg0);
5892
 
5893
      /* x <= +Inf is the same as x == x, i.e. isfinite(x).  */
5894
      if (lang_hooks.decls.global_bindings_p () == 0
5895
          && ! CONTAINS_PLACEHOLDER_P (arg0))
5896
        {
5897
          arg0 = save_expr (arg0);
5898
          return fold_build2 (EQ_EXPR, type, arg0, arg0);
5899
        }
5900
      break;
5901
 
5902
    case EQ_EXPR:
5903
    case GE_EXPR:
5904
      /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX.  */
5905
      real_maxval (&max, neg, mode);
5906
      return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5907
                          arg0, build_real (TREE_TYPE (arg0), max));
5908
 
5909
    case LT_EXPR:
5910
      /* x < +Inf is always equal to x <= DBL_MAX.  */
5911
      real_maxval (&max, neg, mode);
5912
      return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5913
                          arg0, build_real (TREE_TYPE (arg0), max));
5914
 
5915
    case NE_EXPR:
5916
      /* x != +Inf is always equal to !(x > DBL_MAX).  */
5917
      real_maxval (&max, neg, mode);
5918
      if (! HONOR_NANS (mode))
5919
        return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5920
                            arg0, build_real (TREE_TYPE (arg0), max));
5921
 
5922
      /* The transformation below creates non-gimple code and thus is
5923
         not appropriate if we are in gimple form.  */
5924
      if (in_gimple_form)
5925
        return NULL_TREE;
5926
 
5927
      temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5928
                          arg0, build_real (TREE_TYPE (arg0), max));
5929
      return fold_build1 (TRUTH_NOT_EXPR, type, temp);
5930
 
5931
    default:
5932
      break;
5933
    }
5934
 
5935
  return NULL_TREE;
5936
}
5937
 
5938
/* Subroutine of fold() that optimizes comparisons of a division by
5939
   a nonzero integer constant against an integer constant, i.e.
5940
   X/C1 op C2.
5941
 
5942
   CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5943
   GE_EXPR or LE_EXPR.  TYPE is the type of the result and ARG0 and ARG1
5944
   are the operands of the comparison.  ARG1 must be a TREE_REAL_CST.
5945
 
5946
   The function returns the constant folded tree if a simplification
5947
   can be made, and NULL_TREE otherwise.  */
5948
 
5949
static tree
5950
fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5951
{
5952
  tree prod, tmp, hi, lo;
5953
  tree arg00 = TREE_OPERAND (arg0, 0);
5954
  tree arg01 = TREE_OPERAND (arg0, 1);
5955
  unsigned HOST_WIDE_INT lpart;
5956
  HOST_WIDE_INT hpart;
5957
  int overflow;
5958
 
5959
  /* We have to do this the hard way to detect unsigned overflow.
5960
     prod = int_const_binop (MULT_EXPR, arg01, arg1, 0);  */
5961
  overflow = mul_double (TREE_INT_CST_LOW (arg01),
5962
                         TREE_INT_CST_HIGH (arg01),
5963
                         TREE_INT_CST_LOW (arg1),
5964
                         TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5965
  prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5966
  prod = force_fit_type (prod, -1, overflow, false);
5967
 
5968
  if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5969
    {
5970
      tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5971
      lo = prod;
5972
 
5973
      /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0).  */
5974
      overflow = add_double (TREE_INT_CST_LOW (prod),
5975
                             TREE_INT_CST_HIGH (prod),
5976
                             TREE_INT_CST_LOW (tmp),
5977
                             TREE_INT_CST_HIGH (tmp),
5978
                             &lpart, &hpart);
5979
      hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5980
      hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5981
                           TREE_CONSTANT_OVERFLOW (prod));
5982
    }
5983
  else if (tree_int_cst_sgn (arg01) >= 0)
5984
    {
5985
      tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5986
      switch (tree_int_cst_sgn (arg1))
5987
        {
5988
        case -1:
5989
          lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5990
          hi = prod;
5991
          break;
5992
 
5993
        case  0:
5994
          lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5995
          hi = tmp;
5996
          break;
5997
 
5998
        case  1:
5999
          hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6000
          lo = prod;
6001
          break;
6002
 
6003
        default:
6004
          gcc_unreachable ();
6005
        }
6006
    }
6007
  else
6008
    {
6009
      /* A negative divisor reverses the relational operators.  */
6010
      code = swap_tree_comparison (code);
6011
 
6012
      tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
6013
      switch (tree_int_cst_sgn (arg1))
6014
        {
6015
        case -1:
6016
          hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6017
          lo = prod;
6018
          break;
6019
 
6020
        case  0:
6021
          hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6022
          lo = tmp;
6023
          break;
6024
 
6025
        case  1:
6026
          lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6027
          hi = prod;
6028
          break;
6029
 
6030
        default:
6031
          gcc_unreachable ();
6032
        }
6033
    }
6034
 
6035
  switch (code)
6036
    {
6037
    case EQ_EXPR:
6038
      if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6039
        return omit_one_operand (type, integer_zero_node, arg00);
6040
      if (TREE_OVERFLOW (hi))
6041
        return fold_build2 (GE_EXPR, type, arg00, lo);
6042
      if (TREE_OVERFLOW (lo))
6043
        return fold_build2 (LE_EXPR, type, arg00, hi);
6044
      return build_range_check (type, arg00, 1, lo, hi);
6045
 
6046
    case NE_EXPR:
6047
      if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6048
        return omit_one_operand (type, integer_one_node, arg00);
6049
      if (TREE_OVERFLOW (hi))
6050
        return fold_build2 (LT_EXPR, type, arg00, lo);
6051
      if (TREE_OVERFLOW (lo))
6052
        return fold_build2 (GT_EXPR, type, arg00, hi);
6053
      return build_range_check (type, arg00, 0, lo, hi);
6054
 
6055
    case LT_EXPR:
6056
      if (TREE_OVERFLOW (lo))
6057
        return omit_one_operand (type, integer_zero_node, arg00);
6058
      return fold_build2 (LT_EXPR, type, arg00, lo);
6059
 
6060
    case LE_EXPR:
6061
      if (TREE_OVERFLOW (hi))
6062
        return omit_one_operand (type, integer_one_node, arg00);
6063
      return fold_build2 (LE_EXPR, type, arg00, hi);
6064
 
6065
    case GT_EXPR:
6066
      if (TREE_OVERFLOW (hi))
6067
        return omit_one_operand (type, integer_zero_node, arg00);
6068
      return fold_build2 (GT_EXPR, type, arg00, hi);
6069
 
6070
    case GE_EXPR:
6071
      if (TREE_OVERFLOW (lo))
6072
        return omit_one_operand (type, integer_one_node, arg00);
6073
      return fold_build2 (GE_EXPR, type, arg00, lo);
6074
 
6075
    default:
6076
      break;
6077
    }
6078
 
6079
  return NULL_TREE;
6080
}
6081
 
6082
 
6083
/* If CODE with arguments ARG0 and ARG1 represents a single bit
6084
   equality/inequality test, then return a simplified form of the test
6085
   using a sign testing.  Otherwise return NULL.  TYPE is the desired
6086
   result type.  */
6087
 
6088
static tree
6089
fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6090
                                     tree result_type)
6091
{
6092
  /* If this is testing a single bit, we can optimize the test.  */
6093
  if ((code == NE_EXPR || code == EQ_EXPR)
6094
      && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6095
      && integer_pow2p (TREE_OPERAND (arg0, 1)))
6096
    {
6097
      /* If we have (A & C) != 0 where C is the sign bit of A, convert
6098
         this into A < 0.  Similarly for (A & C) == 0 into A >= 0.  */
6099
      tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6100
 
6101
      if (arg00 != NULL_TREE
6102
          /* This is only a win if casting to a signed type is cheap,
6103
             i.e. when arg00's type is not a partial mode.  */
6104
          && TYPE_PRECISION (TREE_TYPE (arg00))
6105
             == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6106
        {
6107
          tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6108
          return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6109
                              result_type, fold_convert (stype, arg00),
6110
                              fold_convert (stype, integer_zero_node));
6111
        }
6112
    }
6113
 
6114
  return NULL_TREE;
6115
}
6116
 
6117
/* If CODE with arguments ARG0 and ARG1 represents a single bit
6118
   equality/inequality test, then return a simplified form of
6119
   the test using shifts and logical operations.  Otherwise return
6120
   NULL.  TYPE is the desired result type.  */
6121
 
6122
tree
6123
fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6124
                      tree result_type)
6125
{
6126
  /* If this is testing a single bit, we can optimize the test.  */
6127
  if ((code == NE_EXPR || code == EQ_EXPR)
6128
      && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6129
      && integer_pow2p (TREE_OPERAND (arg0, 1)))
6130
    {
6131
      tree inner = TREE_OPERAND (arg0, 0);
6132
      tree type = TREE_TYPE (arg0);
6133
      int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6134
      enum machine_mode operand_mode = TYPE_MODE (type);
6135
      int ops_unsigned;
6136
      tree signed_type, unsigned_type, intermediate_type;
6137
      tree tem;
6138
 
6139
      /* First, see if we can fold the single bit test into a sign-bit
6140
         test.  */
6141
      tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6142
                                                 result_type);
6143
      if (tem)
6144
        return tem;
6145
 
6146
      /* Otherwise we have (A & C) != 0 where C is a single bit,
6147
         convert that into ((A >> C2) & 1).  Where C2 = log2(C).
6148
         Similarly for (A & C) == 0.  */
6149
 
6150
      /* If INNER is a right shift of a constant and it plus BITNUM does
6151
         not overflow, adjust BITNUM and INNER.  */
6152
      if (TREE_CODE (inner) == RSHIFT_EXPR
6153
          && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6154
          && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6155
          && bitnum < TYPE_PRECISION (type)
6156
          && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6157
                                   bitnum - TYPE_PRECISION (type)))
6158
        {
6159
          bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6160
          inner = TREE_OPERAND (inner, 0);
6161
        }
6162
 
6163
      /* If we are going to be able to omit the AND below, we must do our
6164
         operations as unsigned.  If we must use the AND, we have a choice.
6165
         Normally unsigned is faster, but for some machines signed is.  */
6166
#ifdef LOAD_EXTEND_OP
6167
      ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6168
                      && !flag_syntax_only) ? 0 : 1;
6169
#else
6170
      ops_unsigned = 1;
6171
#endif
6172
 
6173
      signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6174
      unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6175
      intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6176
      inner = fold_convert (intermediate_type, inner);
6177
 
6178
      if (bitnum != 0)
6179
        inner = build2 (RSHIFT_EXPR, intermediate_type,
6180
                        inner, size_int (bitnum));
6181
 
6182
      if (code == EQ_EXPR)
6183
        inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6184
                             inner, integer_one_node);
6185
 
6186
      /* Put the AND last so it can combine with more things.  */
6187
      inner = build2 (BIT_AND_EXPR, intermediate_type,
6188
                      inner, integer_one_node);
6189
 
6190
      /* Make sure to return the proper type.  */
6191
      inner = fold_convert (result_type, inner);
6192
 
6193
      return inner;
6194
    }
6195
  return NULL_TREE;
6196
}
6197
 
6198
/* Check whether we are allowed to reorder operands arg0 and arg1,
6199
   such that the evaluation of arg1 occurs before arg0.  */
6200
 
6201
static bool
6202
reorder_operands_p (tree arg0, tree arg1)
6203
{
6204
  if (! flag_evaluation_order)
6205
      return true;
6206
  if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6207
    return true;
6208
  return ! TREE_SIDE_EFFECTS (arg0)
6209
         && ! TREE_SIDE_EFFECTS (arg1);
6210
}
6211
 
6212
/* Test whether it is preferable two swap two operands, ARG0 and
6213
   ARG1, for example because ARG0 is an integer constant and ARG1
6214
   isn't.  If REORDER is true, only recommend swapping if we can
6215
   evaluate the operands in reverse order.  */
6216
 
6217
bool
6218
tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6219
{
6220
  STRIP_SIGN_NOPS (arg0);
6221
  STRIP_SIGN_NOPS (arg1);
6222
 
6223
  if (TREE_CODE (arg1) == INTEGER_CST)
6224
    return 0;
6225
  if (TREE_CODE (arg0) == INTEGER_CST)
6226
    return 1;
6227
 
6228
  if (TREE_CODE (arg1) == REAL_CST)
6229
    return 0;
6230
  if (TREE_CODE (arg0) == REAL_CST)
6231
    return 1;
6232
 
6233
  if (TREE_CODE (arg1) == COMPLEX_CST)
6234
    return 0;
6235
  if (TREE_CODE (arg0) == COMPLEX_CST)
6236
    return 1;
6237
 
6238
  if (TREE_CONSTANT (arg1))
6239
    return 0;
6240
  if (TREE_CONSTANT (arg0))
6241
    return 1;
6242
 
6243
  if (optimize_size)
6244
    return 0;
6245
 
6246
  if (reorder && flag_evaluation_order
6247
      && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6248
    return 0;
6249
 
6250
  if (DECL_P (arg1))
6251
    return 0;
6252
  if (DECL_P (arg0))
6253
    return 1;
6254
 
6255
  /* It is preferable to swap two SSA_NAME to ensure a canonical form
6256
     for commutative and comparison operators.  Ensuring a canonical
6257
     form allows the optimizers to find additional redundancies without
6258
     having to explicitly check for both orderings.  */
6259
  if (TREE_CODE (arg0) == SSA_NAME
6260
      && TREE_CODE (arg1) == SSA_NAME
6261
      && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6262
    return 1;
6263
 
6264
  return 0;
6265
}
6266
 
6267
/* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6268
   ARG0 is extended to a wider type.  */
6269
 
6270
static tree
6271
fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6272
{
6273
  tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6274
  tree arg1_unw;
6275
  tree shorter_type, outer_type;
6276
  tree min, max;
6277
  bool above, below;
6278
 
6279
  if (arg0_unw == arg0)
6280
    return NULL_TREE;
6281
  shorter_type = TREE_TYPE (arg0_unw);
6282
 
6283
#ifdef HAVE_canonicalize_funcptr_for_compare
6284
  /* Disable this optimization if we're casting a function pointer
6285
     type on targets that require function pointer canonicalization.  */
6286
  if (HAVE_canonicalize_funcptr_for_compare
6287
      && TREE_CODE (shorter_type) == POINTER_TYPE
6288
      && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6289
    return NULL_TREE;
6290
#endif
6291
 
6292
  if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6293
    return NULL_TREE;
6294
 
6295
  arg1_unw = get_unwidened (arg1, shorter_type);
6296
 
6297
  /* If possible, express the comparison in the shorter mode.  */
6298
  if ((code == EQ_EXPR || code == NE_EXPR
6299
       || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6300
      && (TREE_TYPE (arg1_unw) == shorter_type
6301
          || (TREE_CODE (arg1_unw) == INTEGER_CST
6302
              && (TREE_CODE (shorter_type) == INTEGER_TYPE
6303
                  || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6304
              && int_fits_type_p (arg1_unw, shorter_type))))
6305
    return fold_build2 (code, type, arg0_unw,
6306
                       fold_convert (shorter_type, arg1_unw));
6307
 
6308
  if (TREE_CODE (arg1_unw) != INTEGER_CST
6309
      || TREE_CODE (shorter_type) != INTEGER_TYPE
6310
      || !int_fits_type_p (arg1_unw, shorter_type))
6311
    return NULL_TREE;
6312
 
6313
  /* If we are comparing with the integer that does not fit into the range
6314
     of the shorter type, the result is known.  */
6315
  outer_type = TREE_TYPE (arg1_unw);
6316
  min = lower_bound_in_type (outer_type, shorter_type);
6317
  max = upper_bound_in_type (outer_type, shorter_type);
6318
 
6319
  above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6320
                                                   max, arg1_unw));
6321
  below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6322
                                                   arg1_unw, min));
6323
 
6324
  switch (code)
6325
    {
6326
    case EQ_EXPR:
6327
      if (above || below)
6328
        return omit_one_operand (type, integer_zero_node, arg0);
6329
      break;
6330
 
6331
    case NE_EXPR:
6332
      if (above || below)
6333
        return omit_one_operand (type, integer_one_node, arg0);
6334
      break;
6335
 
6336
    case LT_EXPR:
6337
    case LE_EXPR:
6338
      if (above)
6339
        return omit_one_operand (type, integer_one_node, arg0);
6340
      else if (below)
6341
        return omit_one_operand (type, integer_zero_node, arg0);
6342
 
6343
    case GT_EXPR:
6344
    case GE_EXPR:
6345
      if (above)
6346
        return omit_one_operand (type, integer_zero_node, arg0);
6347
      else if (below)
6348
        return omit_one_operand (type, integer_one_node, arg0);
6349
 
6350
    default:
6351
      break;
6352
    }
6353
 
6354
  return NULL_TREE;
6355
}
6356
 
6357
/* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6358
   ARG0 just the signedness is changed.  */
6359
 
6360
static tree
6361
fold_sign_changed_comparison (enum tree_code code, tree type,
6362
                              tree arg0, tree arg1)
6363
{
6364
  tree arg0_inner, tmp;
6365
  tree inner_type, outer_type;
6366
 
6367
  if (TREE_CODE (arg0) != NOP_EXPR
6368
      && TREE_CODE (arg0) != CONVERT_EXPR)
6369
    return NULL_TREE;
6370
 
6371
  outer_type = TREE_TYPE (arg0);
6372
  arg0_inner = TREE_OPERAND (arg0, 0);
6373
  inner_type = TREE_TYPE (arg0_inner);
6374
 
6375
#ifdef HAVE_canonicalize_funcptr_for_compare
6376
  /* Disable this optimization if we're casting a function pointer
6377
     type on targets that require function pointer canonicalization.  */
6378
  if (HAVE_canonicalize_funcptr_for_compare
6379
      && TREE_CODE (inner_type) == POINTER_TYPE
6380
      && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6381
    return NULL_TREE;
6382
#endif
6383
 
6384
  if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6385
    return NULL_TREE;
6386
 
6387
  if (TREE_CODE (arg1) != INTEGER_CST
6388
      && !((TREE_CODE (arg1) == NOP_EXPR
6389
            || TREE_CODE (arg1) == CONVERT_EXPR)
6390
           && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6391
    return NULL_TREE;
6392
 
6393
  if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6394
      && code != NE_EXPR
6395
      && code != EQ_EXPR)
6396
    return NULL_TREE;
6397
 
6398
  if (TREE_CODE (arg1) == INTEGER_CST)
6399
    {
6400
      tmp = build_int_cst_wide (inner_type,
6401
                                TREE_INT_CST_LOW (arg1),
6402
                                TREE_INT_CST_HIGH (arg1));
6403
      arg1 = force_fit_type (tmp, 0,
6404
                             TREE_OVERFLOW (arg1),
6405
                             TREE_CONSTANT_OVERFLOW (arg1));
6406
    }
6407
  else
6408
    arg1 = fold_convert (inner_type, arg1);
6409
 
6410
  return fold_build2 (code, type, arg0_inner, arg1);
6411
}
6412
 
6413
/* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6414
   step of the array.  Reconstructs s and delta in the case of s * delta
6415
   being an integer constant (and thus already folded).
6416
   ADDR is the address. MULT is the multiplicative expression.
6417
   If the function succeeds, the new address expression is returned.  Otherwise
6418
   NULL_TREE is returned.  */
6419
 
6420
static tree
6421
try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6422
{
6423
  tree s, delta, step;
6424
  tree ref = TREE_OPERAND (addr, 0), pref;
6425
  tree ret, pos;
6426
  tree itype;
6427
 
6428
  /* Canonicalize op1 into a possibly non-constant delta
6429
     and an INTEGER_CST s.  */
6430
  if (TREE_CODE (op1) == MULT_EXPR)
6431
    {
6432
      tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6433
 
6434
      STRIP_NOPS (arg0);
6435
      STRIP_NOPS (arg1);
6436
 
6437
      if (TREE_CODE (arg0) == INTEGER_CST)
6438
        {
6439
          s = arg0;
6440
          delta = arg1;
6441
        }
6442
      else if (TREE_CODE (arg1) == INTEGER_CST)
6443
        {
6444
          s = arg1;
6445
          delta = arg0;
6446
        }
6447
      else
6448
        return NULL_TREE;
6449
    }
6450
  else if (TREE_CODE (op1) == INTEGER_CST)
6451
    {
6452
      delta = op1;
6453
      s = NULL_TREE;
6454
    }
6455
  else
6456
    {
6457
      /* Simulate we are delta * 1.  */
6458
      delta = op1;
6459
      s = integer_one_node;
6460
    }
6461
 
6462
  for (;; ref = TREE_OPERAND (ref, 0))
6463
    {
6464
      if (TREE_CODE (ref) == ARRAY_REF)
6465
        {
6466
          itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6467
          if (! itype)
6468
            continue;
6469
 
6470
          step = array_ref_element_size (ref);
6471
          if (TREE_CODE (step) != INTEGER_CST)
6472
            continue;
6473
 
6474
          if (s)
6475
            {
6476
              if (! tree_int_cst_equal (step, s))
6477
                continue;
6478
            }
6479
          else
6480
            {
6481
              /* Try if delta is a multiple of step.  */
6482
              tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6483
              if (! tmp)
6484
                continue;
6485
              delta = tmp;
6486
            }
6487
 
6488
          break;
6489
        }
6490
 
6491
      if (!handled_component_p (ref))
6492
        return NULL_TREE;
6493
    }
6494
 
6495
  /* We found the suitable array reference.  So copy everything up to it,
6496
     and replace the index.  */
6497
 
6498
  pref = TREE_OPERAND (addr, 0);
6499
  ret = copy_node (pref);
6500
  pos = ret;
6501
 
6502
  while (pref != ref)
6503
    {
6504
      pref = TREE_OPERAND (pref, 0);
6505
      TREE_OPERAND (pos, 0) = copy_node (pref);
6506
      pos = TREE_OPERAND (pos, 0);
6507
    }
6508
 
6509
  TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6510
                                       fold_convert (itype,
6511
                                                     TREE_OPERAND (pos, 1)),
6512
                                       fold_convert (itype, delta));
6513
 
6514
  return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6515
}
6516
 
6517
 
6518
/* Fold A < X && A + 1 > Y to A < X && A >= Y.  Normally A + 1 > Y
6519
   means A >= Y && A != MAX, but in this case we know that
6520
   A < X <= MAX.  INEQ is A + 1 > Y, BOUND is A < X.  */
6521
 
6522
static tree
6523
fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6524
{
6525
  tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6526
 
6527
  if (TREE_CODE (bound) == LT_EXPR)
6528
    a = TREE_OPERAND (bound, 0);
6529
  else if (TREE_CODE (bound) == GT_EXPR)
6530
    a = TREE_OPERAND (bound, 1);
6531
  else
6532
    return NULL_TREE;
6533
 
6534
  typea = TREE_TYPE (a);
6535
  if (!INTEGRAL_TYPE_P (typea)
6536
      && !POINTER_TYPE_P (typea))
6537
    return NULL_TREE;
6538
 
6539
  if (TREE_CODE (ineq) == LT_EXPR)
6540
    {
6541
      a1 = TREE_OPERAND (ineq, 1);
6542
      y = TREE_OPERAND (ineq, 0);
6543
    }
6544
  else if (TREE_CODE (ineq) == GT_EXPR)
6545
    {
6546
      a1 = TREE_OPERAND (ineq, 0);
6547
      y = TREE_OPERAND (ineq, 1);
6548
    }
6549
  else
6550
    return NULL_TREE;
6551
 
6552
  if (TREE_TYPE (a1) != typea)
6553
    return NULL_TREE;
6554
 
6555
  diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6556
  if (!integer_onep (diff))
6557
    return NULL_TREE;
6558
 
6559
  return fold_build2 (GE_EXPR, type, a, y);
6560
}
6561
 
6562
/* Fold a unary expression of code CODE and type TYPE with operand
6563
   OP0.  Return the folded expression if folding is successful.
6564
   Otherwise, return NULL_TREE.  */
6565
 
6566
tree
6567
fold_unary (enum tree_code code, tree type, tree op0)
6568
{
6569
  tree tem;
6570
  tree arg0;
6571
  enum tree_code_class kind = TREE_CODE_CLASS (code);
6572
 
6573
  gcc_assert (IS_EXPR_CODE_CLASS (kind)
6574
              && TREE_CODE_LENGTH (code) == 1);
6575
 
6576
  arg0 = op0;
6577
  if (arg0)
6578
    {
6579
      if (code == NOP_EXPR || code == CONVERT_EXPR
6580
          || code == FLOAT_EXPR || code == ABS_EXPR)
6581
        {
6582
          /* Don't use STRIP_NOPS, because signedness of argument type
6583
             matters.  */
6584
          STRIP_SIGN_NOPS (arg0);
6585
        }
6586
      else
6587
        {
6588
          /* Strip any conversions that don't change the mode.  This
6589
             is safe for every expression, except for a comparison
6590
             expression because its signedness is derived from its
6591
             operands.
6592
 
6593
             Note that this is done as an internal manipulation within
6594
             the constant folder, in order to find the simplest
6595
             representation of the arguments so that their form can be
6596
             studied.  In any cases, the appropriate type conversions
6597
             should be put back in the tree that will get out of the
6598
             constant folder.  */
6599
          STRIP_NOPS (arg0);
6600
        }
6601
    }
6602
 
6603
  if (TREE_CODE_CLASS (code) == tcc_unary)
6604
    {
6605
      if (TREE_CODE (arg0) == COMPOUND_EXPR)
6606
        return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6607
                       fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
6608
      else if (TREE_CODE (arg0) == COND_EXPR)
6609
        {
6610
          tree arg01 = TREE_OPERAND (arg0, 1);
6611
          tree arg02 = TREE_OPERAND (arg0, 2);
6612
          if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6613
            arg01 = fold_build1 (code, type, arg01);
6614
          if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6615
            arg02 = fold_build1 (code, type, arg02);
6616
          tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6617
                             arg01, arg02);
6618
 
6619
          /* If this was a conversion, and all we did was to move into
6620
             inside the COND_EXPR, bring it back out.  But leave it if
6621
             it is a conversion from integer to integer and the
6622
             result precision is no wider than a word since such a
6623
             conversion is cheap and may be optimized away by combine,
6624
             while it couldn't if it were outside the COND_EXPR.  Then return
6625
             so we don't get into an infinite recursion loop taking the
6626
             conversion out and then back in.  */
6627
 
6628
          if ((code == NOP_EXPR || code == CONVERT_EXPR
6629
               || code == NON_LVALUE_EXPR)
6630
              && TREE_CODE (tem) == COND_EXPR
6631
              && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6632
              && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6633
              && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6634
              && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6635
              && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6636
                  == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6637
              && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6638
                     && (INTEGRAL_TYPE_P
6639
                         (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6640
                     && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6641
                  || flag_syntax_only))
6642
            tem = build1 (code, type,
6643
                          build3 (COND_EXPR,
6644
                                  TREE_TYPE (TREE_OPERAND
6645
                                             (TREE_OPERAND (tem, 1), 0)),
6646
                                  TREE_OPERAND (tem, 0),
6647
                                  TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6648
                                  TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6649
          return tem;
6650
        }
6651
      else if (COMPARISON_CLASS_P (arg0))
6652
        {
6653
          if (TREE_CODE (type) == BOOLEAN_TYPE)
6654
            {
6655
              arg0 = copy_node (arg0);
6656
              TREE_TYPE (arg0) = type;
6657
              return arg0;
6658
            }
6659
          else if (TREE_CODE (type) != INTEGER_TYPE)
6660
            return fold_build3 (COND_EXPR, type, arg0,
6661
                                fold_build1 (code, type,
6662
                                             integer_one_node),
6663
                                fold_build1 (code, type,
6664
                                             integer_zero_node));
6665
        }
6666
   }
6667
 
6668
  switch (code)
6669
    {
6670
    case NOP_EXPR:
6671
    case FLOAT_EXPR:
6672
    case CONVERT_EXPR:
6673
    case FIX_TRUNC_EXPR:
6674
    case FIX_CEIL_EXPR:
6675
    case FIX_FLOOR_EXPR:
6676
    case FIX_ROUND_EXPR:
6677
      if (TREE_TYPE (op0) == type)
6678
        return op0;
6679
 
6680
      /* Handle cases of two conversions in a row.  */
6681
      if (TREE_CODE (op0) == NOP_EXPR
6682
          || TREE_CODE (op0) == CONVERT_EXPR)
6683
        {
6684
          tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
6685
          tree inter_type = TREE_TYPE (op0);
6686
          int inside_int = INTEGRAL_TYPE_P (inside_type);
6687
          int inside_ptr = POINTER_TYPE_P (inside_type);
6688
          int inside_float = FLOAT_TYPE_P (inside_type);
6689
          int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
6690
          unsigned int inside_prec = TYPE_PRECISION (inside_type);
6691
          int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6692
          int inter_int = INTEGRAL_TYPE_P (inter_type);
6693
          int inter_ptr = POINTER_TYPE_P (inter_type);
6694
          int inter_float = FLOAT_TYPE_P (inter_type);
6695
          int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
6696
          unsigned int inter_prec = TYPE_PRECISION (inter_type);
6697
          int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6698
          int final_int = INTEGRAL_TYPE_P (type);
6699
          int final_ptr = POINTER_TYPE_P (type);
6700
          int final_float = FLOAT_TYPE_P (type);
6701
          int final_vec = TREE_CODE (type) == VECTOR_TYPE;
6702
          unsigned int final_prec = TYPE_PRECISION (type);
6703
          int final_unsignedp = TYPE_UNSIGNED (type);
6704
 
6705
          /* In addition to the cases of two conversions in a row
6706
             handled below, if we are converting something to its own
6707
             type via an object of identical or wider precision, neither
6708
             conversion is needed.  */
6709
          if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6710
              && ((inter_int && final_int) || (inter_float && final_float))
6711
              && inter_prec >= final_prec)
6712
            return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6713
 
6714
          /* Likewise, if the intermediate and final types are either both
6715
             float or both integer, we don't need the middle conversion if
6716
             it is wider than the final type and doesn't change the signedness
6717
             (for integers).  Avoid this if the final type is a pointer
6718
             since then we sometimes need the inner conversion.  Likewise if
6719
             the outer has a precision not equal to the size of its mode.  */
6720
          if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6721
               || (inter_float && inside_float)
6722
               || (inter_vec && inside_vec))
6723
              && inter_prec >= inside_prec
6724
              && (inter_float || inter_vec
6725
                  || inter_unsignedp == inside_unsignedp)
6726
              && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6727
                    && TYPE_MODE (type) == TYPE_MODE (inter_type))
6728
              && ! final_ptr
6729
              && (! final_vec || inter_prec == inside_prec))
6730
            return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6731
 
6732
          /* If we have a sign-extension of a zero-extended value, we can
6733
             replace that by a single zero-extension.  */
6734
          if (inside_int && inter_int && final_int
6735
              && inside_prec < inter_prec && inter_prec < final_prec
6736
              && inside_unsignedp && !inter_unsignedp)
6737
            return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6738
 
6739
          /* Two conversions in a row are not needed unless:
6740
             - some conversion is floating-point (overstrict for now), or
6741
             - some conversion is a vector (overstrict for now), or
6742
             - the intermediate type is narrower than both initial and
6743
               final, or
6744
             - the intermediate type and innermost type differ in signedness,
6745
               and the outermost type is wider than the intermediate, or
6746
             - the initial type is a pointer type and the precisions of the
6747
               intermediate and final types differ, or
6748
             - the final type is a pointer type and the precisions of the
6749
               initial and intermediate types differ.  */
6750
          if (! inside_float && ! inter_float && ! final_float
6751
              && ! inside_vec && ! inter_vec && ! final_vec
6752
              && (inter_prec > inside_prec || inter_prec > final_prec)
6753
              && ! (inside_int && inter_int
6754
                    && inter_unsignedp != inside_unsignedp
6755
                    && inter_prec < final_prec)
6756
              && ((inter_unsignedp && inter_prec > inside_prec)
6757
                  == (final_unsignedp && final_prec > inter_prec))
6758
              && ! (inside_ptr && inter_prec != final_prec)
6759
              && ! (final_ptr && inside_prec != inter_prec)
6760
              && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6761
                    && TYPE_MODE (type) == TYPE_MODE (inter_type))
6762
              && ! final_ptr)
6763
            return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6764
        }
6765
 
6766
      /* Handle (T *)&A.B.C for A being of type T and B and C
6767
         living at offset zero.  This occurs frequently in
6768
         C++ upcasting and then accessing the base.  */
6769
      if (TREE_CODE (op0) == ADDR_EXPR
6770
          && POINTER_TYPE_P (type)
6771
          && handled_component_p (TREE_OPERAND (op0, 0)))
6772
        {
6773
          HOST_WIDE_INT bitsize, bitpos;
6774
          tree offset;
6775
          enum machine_mode mode;
6776
          int unsignedp, volatilep;
6777
          tree base = TREE_OPERAND (op0, 0);
6778
          base = get_inner_reference (base, &bitsize, &bitpos, &offset,
6779
                                      &mode, &unsignedp, &volatilep, false);
6780
          /* If the reference was to a (constant) zero offset, we can use
6781
             the address of the base if it has the same base type
6782
             as the result type.  */
6783
          if (! offset && bitpos == 0
6784
              && TYPE_MAIN_VARIANT (TREE_TYPE (type))
6785
                  == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
6786
            return fold_convert (type, build_fold_addr_expr (base));
6787
        }
6788
 
6789
      if (TREE_CODE (op0) == MODIFY_EXPR
6790
          && TREE_CONSTANT (TREE_OPERAND (op0, 1))
6791
          /* Detect assigning a bitfield.  */
6792
          && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
6793
               && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
6794
        {
6795
          /* Don't leave an assignment inside a conversion
6796
             unless assigning a bitfield.  */
6797
          tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
6798
          /* First do the assignment, then return converted constant.  */
6799
          tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
6800
          TREE_NO_WARNING (tem) = 1;
6801
          TREE_USED (tem) = 1;
6802
          return tem;
6803
        }
6804
 
6805
      /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6806
         constants (if x has signed type, the sign bit cannot be set
6807
         in c).  This folds extension into the BIT_AND_EXPR.  */
6808
      if (INTEGRAL_TYPE_P (type)
6809
          && TREE_CODE (type) != BOOLEAN_TYPE
6810
          && TREE_CODE (op0) == BIT_AND_EXPR
6811
          && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
6812
        {
6813
          tree and = op0;
6814
          tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6815
          int change = 0;
6816
 
6817
          if (TYPE_UNSIGNED (TREE_TYPE (and))
6818
              || (TYPE_PRECISION (type)
6819
                  <= TYPE_PRECISION (TREE_TYPE (and))))
6820
            change = 1;
6821
          else if (TYPE_PRECISION (TREE_TYPE (and1))
6822
                   <= HOST_BITS_PER_WIDE_INT
6823
                   && host_integerp (and1, 1))
6824
            {
6825
              unsigned HOST_WIDE_INT cst;
6826
 
6827
              cst = tree_low_cst (and1, 1);
6828
              cst &= (HOST_WIDE_INT) -1
6829
                     << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6830
              change = (cst == 0);
6831
#ifdef LOAD_EXTEND_OP
6832
              if (change
6833
                  && !flag_syntax_only
6834
                  && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6835
                      == ZERO_EXTEND))
6836
                {
6837
                  tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6838
                  and0 = fold_convert (uns, and0);
6839
                  and1 = fold_convert (uns, and1);
6840
                }
6841
#endif
6842
            }
6843
          if (change)
6844
            {
6845
              tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
6846
                                        TREE_INT_CST_HIGH (and1));
6847
              tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
6848
                                    TREE_CONSTANT_OVERFLOW (and1));
6849
              return fold_build2 (BIT_AND_EXPR, type,
6850
                                  fold_convert (type, and0), tem);
6851
            }
6852
        }
6853
 
6854
      /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6855
         T2 being pointers to types of the same size.  */
6856
      if (POINTER_TYPE_P (type)
6857
          && BINARY_CLASS_P (arg0)
6858
          && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6859
          && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6860
        {
6861
          tree arg00 = TREE_OPERAND (arg0, 0);
6862
          tree t0 = type;
6863
          tree t1 = TREE_TYPE (arg00);
6864
          tree tt0 = TREE_TYPE (t0);
6865
          tree tt1 = TREE_TYPE (t1);
6866
          tree s0 = TYPE_SIZE (tt0);
6867
          tree s1 = TYPE_SIZE (tt1);
6868
 
6869
          if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6870
            return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6871
                           TREE_OPERAND (arg0, 1));
6872
        }
6873
 
6874
      tem = fold_convert_const (code, type, arg0);
6875
      return tem ? tem : NULL_TREE;
6876
 
6877
    case VIEW_CONVERT_EXPR:
6878
      if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
6879
        return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
6880
      return NULL_TREE;
6881
 
6882
    case NEGATE_EXPR:
6883
      if (negate_expr_p (arg0))
6884
        return fold_convert (type, negate_expr (arg0));
6885
      /* Convert - (~A) to A + 1.  */
6886
      if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == BIT_NOT_EXPR)
6887
        return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (arg0, 0),
6888
                            build_int_cst (type, 1));
6889
      return NULL_TREE;
6890
 
6891
    case ABS_EXPR:
6892
      if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6893
        return fold_abs_const (arg0, type);
6894
      else if (TREE_CODE (arg0) == NEGATE_EXPR)
6895
        return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
6896
      /* Convert fabs((double)float) into (double)fabsf(float).  */
6897
      else if (TREE_CODE (arg0) == NOP_EXPR
6898
               && TREE_CODE (type) == REAL_TYPE)
6899
        {
6900
          tree targ0 = strip_float_extensions (arg0);
6901
          if (targ0 != arg0)
6902
            return fold_convert (type, fold_build1 (ABS_EXPR,
6903
                                                    TREE_TYPE (targ0),
6904
                                                    targ0));
6905
        }
6906
      /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on.  */
6907
      else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
6908
        return arg0;
6909
 
6910
      /* Strip sign ops from argument.  */
6911
      if (TREE_CODE (type) == REAL_TYPE)
6912
        {
6913
          tem = fold_strip_sign_ops (arg0);
6914
          if (tem)
6915
            return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
6916
        }
6917
      return NULL_TREE;
6918
 
6919
    case CONJ_EXPR:
6920
      if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6921
        return fold_convert (type, arg0);
6922
      else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6923
        return build2 (COMPLEX_EXPR, type,
6924
                       TREE_OPERAND (arg0, 0),
6925
                       negate_expr (TREE_OPERAND (arg0, 1)));
6926
      else if (TREE_CODE (arg0) == COMPLEX_CST)
6927
        return build_complex (type, TREE_REALPART (arg0),
6928
                              negate_expr (TREE_IMAGPART (arg0)));
6929
      else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6930
        return fold_build2 (TREE_CODE (arg0), type,
6931
                            fold_build1 (CONJ_EXPR, type,
6932
                                         TREE_OPERAND (arg0, 0)),
6933
                            fold_build1 (CONJ_EXPR, type,
6934
                                         TREE_OPERAND (arg0, 1)));
6935
      else if (TREE_CODE (arg0) == CONJ_EXPR)
6936
        return TREE_OPERAND (arg0, 0);
6937
      return NULL_TREE;
6938
 
6939
    case BIT_NOT_EXPR:
6940
      if (TREE_CODE (arg0) == INTEGER_CST)
6941
        return fold_not_const (arg0, type);
6942
      else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6943
        return TREE_OPERAND (arg0, 0);
6944
      /* Convert ~ (-A) to A - 1.  */
6945
      else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
6946
        return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
6947
                            build_int_cst (type, 1));
6948
      /* Convert ~ (A - 1) or ~ (A + -1) to -A.  */
6949
      else if (INTEGRAL_TYPE_P (type)
6950
               && ((TREE_CODE (arg0) == MINUS_EXPR
6951
                    && integer_onep (TREE_OPERAND (arg0, 1)))
6952
                   || (TREE_CODE (arg0) == PLUS_EXPR
6953
                       && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
6954
        return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
6955
      /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify.  */
6956
      else if (TREE_CODE (arg0) == BIT_XOR_EXPR
6957
               && (tem = fold_unary (BIT_NOT_EXPR, type,
6958
                                     fold_convert (type,
6959
                                                   TREE_OPERAND (arg0, 0)))))
6960
        return fold_build2 (BIT_XOR_EXPR, type, tem,
6961
                            fold_convert (type, TREE_OPERAND (arg0, 1)));
6962
      else if (TREE_CODE (arg0) == BIT_XOR_EXPR
6963
               && (tem = fold_unary (BIT_NOT_EXPR, type,
6964
                                     fold_convert (type,
6965
                                                   TREE_OPERAND (arg0, 1)))))
6966
        return fold_build2 (BIT_XOR_EXPR, type,
6967
                            fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
6968
 
6969
      return NULL_TREE;
6970
 
6971
    case TRUTH_NOT_EXPR:
6972
      /* The argument to invert_truthvalue must have Boolean type.  */
6973
      if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
6974
          arg0 = fold_convert (boolean_type_node, arg0);
6975
 
6976
      /* Note that the operand of this must be an int
6977
         and its values must be 0 or 1.
6978
         ("true" is a fixed value perhaps depending on the language,
6979
         but we don't handle values other than 1 correctly yet.)  */
6980
      tem = invert_truthvalue (arg0);
6981
      /* Avoid infinite recursion.  */
6982
      if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
6983
        return NULL_TREE;
6984
      return fold_convert (type, tem);
6985
 
6986
    case REALPART_EXPR:
6987
      if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6988
        return NULL_TREE;
6989
      else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6990
        return omit_one_operand (type, TREE_OPERAND (arg0, 0),
6991
                                 TREE_OPERAND (arg0, 1));
6992
      else if (TREE_CODE (arg0) == COMPLEX_CST)
6993
        return TREE_REALPART (arg0);
6994
      else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6995
        return fold_build2 (TREE_CODE (arg0), type,
6996
                            fold_build1 (REALPART_EXPR, type,
6997
                                         TREE_OPERAND (arg0, 0)),
6998
                            fold_build1 (REALPART_EXPR, type,
6999
                                         TREE_OPERAND (arg0, 1)));
7000
      return NULL_TREE;
7001
 
7002
    case IMAGPART_EXPR:
7003
      if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7004
        return fold_convert (type, integer_zero_node);
7005
      else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7006
        return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7007
                                 TREE_OPERAND (arg0, 0));
7008
      else if (TREE_CODE (arg0) == COMPLEX_CST)
7009
        return TREE_IMAGPART (arg0);
7010
      else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7011
        return fold_build2 (TREE_CODE (arg0), type,
7012
                            fold_build1 (IMAGPART_EXPR, type,
7013
                                         TREE_OPERAND (arg0, 0)),
7014
                            fold_build1 (IMAGPART_EXPR, type,
7015
                                         TREE_OPERAND (arg0, 1)));
7016
      return NULL_TREE;
7017
 
7018
    default:
7019
      return NULL_TREE;
7020
    } /* switch (code) */
7021
}
7022
 
7023
/* Fold a binary expression of code CODE and type TYPE with operands
7024
   OP0 and OP1.  Return the folded expression if folding is
7025
   successful.  Otherwise, return NULL_TREE.  */
7026
 
7027
tree
7028
fold_binary (enum tree_code code, tree type, tree op0, tree op1)
7029
{
7030
  tree t1 = NULL_TREE;
7031
  tree tem;
7032
  tree arg0 = NULL_TREE, arg1 = NULL_TREE;
7033
  enum tree_code_class kind = TREE_CODE_CLASS (code);
7034
 
7035
  /* WINS will be nonzero when the switch is done
7036
     if all operands are constant.  */
7037
  int wins = 1;
7038
 
7039
  gcc_assert (IS_EXPR_CODE_CLASS (kind)
7040
              && TREE_CODE_LENGTH (code) == 2);
7041
 
7042
  arg0 = op0;
7043
  arg1 = op1;
7044
 
7045
  if (arg0)
7046
    {
7047
      tree subop;
7048
 
7049
      /* Strip any conversions that don't change the mode.  This is
7050
         safe for every expression, except for a comparison expression
7051
         because its signedness is derived from its operands.  So, in
7052
         the latter case, only strip conversions that don't change the
7053
         signedness.
7054
 
7055
         Note that this is done as an internal manipulation within the
7056
         constant folder, in order to find the simplest representation
7057
         of the arguments so that their form can be studied.  In any
7058
         cases, the appropriate type conversions should be put back in
7059
         the tree that will get out of the constant folder.  */
7060
      if (kind == tcc_comparison)
7061
        STRIP_SIGN_NOPS (arg0);
7062
      else
7063
        STRIP_NOPS (arg0);
7064
 
7065
      if (TREE_CODE (arg0) == COMPLEX_CST)
7066
        subop = TREE_REALPART (arg0);
7067
      else
7068
        subop = arg0;
7069
 
7070
      if (TREE_CODE (subop) != INTEGER_CST
7071
          && TREE_CODE (subop) != REAL_CST)
7072
        /* Note that TREE_CONSTANT isn't enough:
7073
           static var addresses are constant but we can't
7074
           do arithmetic on them.  */
7075
        wins = 0;
7076
    }
7077
 
7078
  if (arg1)
7079
    {
7080
      tree subop;
7081
 
7082
      /* Strip any conversions that don't change the mode.  This is
7083
         safe for every expression, except for a comparison expression
7084
         because its signedness is derived from its operands.  So, in
7085
         the latter case, only strip conversions that don't change the
7086
         signedness.
7087
 
7088
         Note that this is done as an internal manipulation within the
7089
         constant folder, in order to find the simplest representation
7090
         of the arguments so that their form can be studied.  In any
7091
         cases, the appropriate type conversions should be put back in
7092
         the tree that will get out of the constant folder.  */
7093
      if (kind == tcc_comparison)
7094
        STRIP_SIGN_NOPS (arg1);
7095
      else
7096
        STRIP_NOPS (arg1);
7097
 
7098
      if (TREE_CODE (arg1) == COMPLEX_CST)
7099
        subop = TREE_REALPART (arg1);
7100
      else
7101
        subop = arg1;
7102
 
7103
      if (TREE_CODE (subop) != INTEGER_CST
7104
          && TREE_CODE (subop) != REAL_CST)
7105
        /* Note that TREE_CONSTANT isn't enough:
7106
           static var addresses are constant but we can't
7107
           do arithmetic on them.  */
7108
        wins = 0;
7109
    }
7110
 
7111
  /* If this is a commutative operation, and ARG0 is a constant, move it
7112
     to ARG1 to reduce the number of tests below.  */
7113
  if (commutative_tree_code (code)
7114
      && tree_swap_operands_p (arg0, arg1, true))
7115
    return fold_build2 (code, type, op1, op0);
7116
 
7117
  /* Now WINS is set as described above,
7118
     ARG0 is the first operand of EXPR,
7119
     and ARG1 is the second operand (if it has more than one operand).
7120
 
7121
     First check for cases where an arithmetic operation is applied to a
7122
     compound, conditional, or comparison operation.  Push the arithmetic
7123
     operation inside the compound or conditional to see if any folding
7124
     can then be done.  Convert comparison to conditional for this purpose.
7125
     The also optimizes non-constant cases that used to be done in
7126
     expand_expr.
7127
 
7128
     Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7129
     one of the operands is a comparison and the other is a comparison, a
7130
     BIT_AND_EXPR with the constant 1, or a truth value.  In that case, the
7131
     code below would make the expression more complex.  Change it to a
7132
     TRUTH_{AND,OR}_EXPR.  Likewise, convert a similar NE_EXPR to
7133
     TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR.  */
7134
 
7135
  if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
7136
       || code == EQ_EXPR || code == NE_EXPR)
7137
      && ((truth_value_p (TREE_CODE (arg0))
7138
           && (truth_value_p (TREE_CODE (arg1))
7139
               || (TREE_CODE (arg1) == BIT_AND_EXPR
7140
                   && integer_onep (TREE_OPERAND (arg1, 1)))))
7141
          || (truth_value_p (TREE_CODE (arg1))
7142
              && (truth_value_p (TREE_CODE (arg0))
7143
                  || (TREE_CODE (arg0) == BIT_AND_EXPR
7144
                      && integer_onep (TREE_OPERAND (arg0, 1)))))))
7145
    {
7146
      tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
7147
                         : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
7148
                         : TRUTH_XOR_EXPR,
7149
                         boolean_type_node,
7150
                         fold_convert (boolean_type_node, arg0),
7151
                         fold_convert (boolean_type_node, arg1));
7152
 
7153
      if (code == EQ_EXPR)
7154
        tem = invert_truthvalue (tem);
7155
 
7156
      return fold_convert (type, tem);
7157
    }
7158
 
7159
  if (TREE_CODE_CLASS (code) == tcc_binary
7160
      || TREE_CODE_CLASS (code) == tcc_comparison)
7161
    {
7162
      if (TREE_CODE (arg0) == COMPOUND_EXPR)
7163
        return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7164
                       fold_build2 (code, type,
7165
                                    TREE_OPERAND (arg0, 1), op1));
7166
      if (TREE_CODE (arg1) == COMPOUND_EXPR
7167
          && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
7168
        return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7169
                       fold_build2 (code, type,
7170
                                    op0, TREE_OPERAND (arg1, 1)));
7171
 
7172
      if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
7173
        {
7174
          tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7175
                                                     arg0, arg1,
7176
                                                     /*cond_first_p=*/1);
7177
          if (tem != NULL_TREE)
7178
            return tem;
7179
        }
7180
 
7181
      if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
7182
        {
7183
          tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7184
                                                     arg1, arg0,
7185
                                                     /*cond_first_p=*/0);
7186
          if (tem != NULL_TREE)
7187
            return tem;
7188
        }
7189
    }
7190
 
7191
  switch (code)
7192
    {
7193
    case PLUS_EXPR:
7194
      /* A + (-B) -> A - B */
7195
      if (TREE_CODE (arg1) == NEGATE_EXPR)
7196
        return fold_build2 (MINUS_EXPR, type,
7197
                            fold_convert (type, arg0),
7198
                            fold_convert (type, TREE_OPERAND (arg1, 0)));
7199
      /* (-A) + B -> B - A */
7200
      if (TREE_CODE (arg0) == NEGATE_EXPR
7201
          && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
7202
        return fold_build2 (MINUS_EXPR, type,
7203
                            fold_convert (type, arg1),
7204
                            fold_convert (type, TREE_OPERAND (arg0, 0)));
7205
      /* Convert ~A + 1 to -A.  */
7206
      if (INTEGRAL_TYPE_P (type)
7207
          && TREE_CODE (arg0) == BIT_NOT_EXPR
7208
          && integer_onep (arg1))
7209
        return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7210
 
7211
      if (! FLOAT_TYPE_P (type))
7212
        {
7213
          if (integer_zerop (arg1))
7214
            return non_lvalue (fold_convert (type, arg0));
7215
 
7216
          /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7217
             with a constant, and the two constants have no bits in common,
7218
             we should treat this as a BIT_IOR_EXPR since this may produce more
7219
             simplifications.  */
7220
          if (TREE_CODE (arg0) == BIT_AND_EXPR
7221
              && TREE_CODE (arg1) == BIT_AND_EXPR
7222
              && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7223
              && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7224
              && integer_zerop (const_binop (BIT_AND_EXPR,
7225
                                             TREE_OPERAND (arg0, 1),
7226
                                             TREE_OPERAND (arg1, 1), 0)))
7227
            {
7228
              code = BIT_IOR_EXPR;
7229
              goto bit_ior;
7230
            }
7231
 
7232
          /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7233
             (plus (plus (mult) (mult)) (foo)) so that we can
7234
             take advantage of the factoring cases below.  */
7235
          if (((TREE_CODE (arg0) == PLUS_EXPR
7236
                || TREE_CODE (arg0) == MINUS_EXPR)
7237
               && TREE_CODE (arg1) == MULT_EXPR)
7238
              || ((TREE_CODE (arg1) == PLUS_EXPR
7239
                   || TREE_CODE (arg1) == MINUS_EXPR)
7240
                  && TREE_CODE (arg0) == MULT_EXPR))
7241
            {
7242
              tree parg0, parg1, parg, marg;
7243
              enum tree_code pcode;
7244
 
7245
              if (TREE_CODE (arg1) == MULT_EXPR)
7246
                parg = arg0, marg = arg1;
7247
              else
7248
                parg = arg1, marg = arg0;
7249
              pcode = TREE_CODE (parg);
7250
              parg0 = TREE_OPERAND (parg, 0);
7251
              parg1 = TREE_OPERAND (parg, 1);
7252
              STRIP_NOPS (parg0);
7253
              STRIP_NOPS (parg1);
7254
 
7255
              if (TREE_CODE (parg0) == MULT_EXPR
7256
                  && TREE_CODE (parg1) != MULT_EXPR)
7257
                return fold_build2 (pcode, type,
7258
                                    fold_build2 (PLUS_EXPR, type,
7259
                                                 fold_convert (type, parg0),
7260
                                                 fold_convert (type, marg)),
7261
                                    fold_convert (type, parg1));
7262
              if (TREE_CODE (parg0) != MULT_EXPR
7263
                  && TREE_CODE (parg1) == MULT_EXPR)
7264
                return fold_build2 (PLUS_EXPR, type,
7265
                                    fold_convert (type, parg0),
7266
                                    fold_build2 (pcode, type,
7267
                                                 fold_convert (type, marg),
7268
                                                 fold_convert (type,
7269
                                                               parg1)));
7270
            }
7271
 
7272
          if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
7273
            {
7274
              tree arg00, arg01, arg10, arg11;
7275
              tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7276
 
7277
              /* (A * C) + (B * C) -> (A+B) * C.
7278
                 We are most concerned about the case where C is a constant,
7279
                 but other combinations show up during loop reduction.  Since
7280
                 it is not difficult, try all four possibilities.  */
7281
 
7282
              arg00 = TREE_OPERAND (arg0, 0);
7283
              arg01 = TREE_OPERAND (arg0, 1);
7284
              arg10 = TREE_OPERAND (arg1, 0);
7285
              arg11 = TREE_OPERAND (arg1, 1);
7286
              same = NULL_TREE;
7287
 
7288
              if (operand_equal_p (arg01, arg11, 0))
7289
                same = arg01, alt0 = arg00, alt1 = arg10;
7290
              else if (operand_equal_p (arg00, arg10, 0))
7291
                same = arg00, alt0 = arg01, alt1 = arg11;
7292
              else if (operand_equal_p (arg00, arg11, 0))
7293
                same = arg00, alt0 = arg01, alt1 = arg10;
7294
              else if (operand_equal_p (arg01, arg10, 0))
7295
                same = arg01, alt0 = arg00, alt1 = arg11;
7296
 
7297
              /* No identical multiplicands; see if we can find a common
7298
                 power-of-two factor in non-power-of-two multiplies.  This
7299
                 can help in multi-dimensional array access.  */
7300
              else if (TREE_CODE (arg01) == INTEGER_CST
7301
                       && TREE_CODE (arg11) == INTEGER_CST
7302
                       && TREE_INT_CST_HIGH (arg01) == 0
7303
                       && TREE_INT_CST_HIGH (arg11) == 0)
7304
                {
7305
                  HOST_WIDE_INT int01, int11, tmp;
7306
                  int01 = TREE_INT_CST_LOW (arg01);
7307
                  int11 = TREE_INT_CST_LOW (arg11);
7308
 
7309
                  /* Move min of absolute values to int11.  */
7310
                  if ((int01 >= 0 ? int01 : -int01)
7311
                      < (int11 >= 0 ? int11 : -int11))
7312
                    {
7313
                      tmp = int01, int01 = int11, int11 = tmp;
7314
                      alt0 = arg00, arg00 = arg10, arg10 = alt0;
7315
                      alt0 = arg01, arg01 = arg11, arg11 = alt0;
7316
                    }
7317
 
7318
                  if (exact_log2 (int11) > 0 && int01 % int11 == 0)
7319
                    {
7320
                      alt0 = fold_build2 (MULT_EXPR, type, arg00,
7321
                                          build_int_cst (NULL_TREE,
7322
                                                         int01 / int11));
7323
                      alt1 = arg10;
7324
                      same = arg11;
7325
                    }
7326
                }
7327
 
7328
              if (same)
7329
                return fold_build2 (MULT_EXPR, type,
7330
                                    fold_build2 (PLUS_EXPR, type,
7331
                                                 fold_convert (type, alt0),
7332
                                                 fold_convert (type, alt1)),
7333
                                    fold_convert (type, same));
7334
            }
7335
 
7336
          /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7337
             of the array.  Loop optimizer sometimes produce this type of
7338
             expressions.  */
7339
          if (TREE_CODE (arg0) == ADDR_EXPR)
7340
            {
7341
              tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
7342
              if (tem)
7343
                return fold_convert (type, tem);
7344
            }
7345
          else if (TREE_CODE (arg1) == ADDR_EXPR)
7346
            {
7347
              tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
7348
              if (tem)
7349
                return fold_convert (type, tem);
7350
            }
7351
        }
7352
      else
7353
        {
7354
          /* See if ARG1 is zero and X + ARG1 reduces to X.  */
7355
          if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
7356
            return non_lvalue (fold_convert (type, arg0));
7357
 
7358
          /* Likewise if the operands are reversed.  */
7359
          if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7360
            return non_lvalue (fold_convert (type, arg1));
7361
 
7362
          /* Convert X + -C into X - C.  */
7363
          if (TREE_CODE (arg1) == REAL_CST
7364
              && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
7365
            {
7366
              tem = fold_negate_const (arg1, type);
7367
              if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
7368
                return fold_build2 (MINUS_EXPR, type,
7369
                                    fold_convert (type, arg0),
7370
                                    fold_convert (type, tem));
7371
            }
7372
 
7373
          if (flag_unsafe_math_optimizations
7374
              && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7375
              && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7376
              && (tem = distribute_real_division (code, type, arg0, arg1)))
7377
            return tem;
7378
 
7379
          /* Convert x+x into x*2.0.  */
7380
          if (operand_equal_p (arg0, arg1, 0)
7381
              && SCALAR_FLOAT_TYPE_P (type))
7382
            return fold_build2 (MULT_EXPR, type, arg0,
7383
                                build_real (type, dconst2));
7384
 
7385
          /* Convert x*c+x into x*(c+1).  */
7386
          if (flag_unsafe_math_optimizations
7387
              && TREE_CODE (arg0) == MULT_EXPR
7388
              && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7389
              && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7390
              && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7391
            {
7392
              REAL_VALUE_TYPE c;
7393
 
7394
              c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7395
              real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7396
              return fold_build2 (MULT_EXPR, type, arg1,
7397
                                  build_real (type, c));
7398
            }
7399
 
7400
          /* Convert x+x*c into x*(c+1).  */
7401
          if (flag_unsafe_math_optimizations
7402
              && TREE_CODE (arg1) == MULT_EXPR
7403
              && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7404
              && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7405
              && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
7406
            {
7407
              REAL_VALUE_TYPE c;
7408
 
7409
              c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7410
              real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7411
              return fold_build2 (MULT_EXPR, type, arg0,
7412
                                  build_real (type, c));
7413
            }
7414
 
7415
          /* Convert x*c1+x*c2 into x*(c1+c2).  */
7416
          if (flag_unsafe_math_optimizations
7417
              && TREE_CODE (arg0) == MULT_EXPR
7418
              && TREE_CODE (arg1) == MULT_EXPR
7419
              && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7420
              && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7421
              && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7422
              && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7423
              && operand_equal_p (TREE_OPERAND (arg0, 0),
7424
                                  TREE_OPERAND (arg1, 0), 0))
7425
            {
7426
              REAL_VALUE_TYPE c1, c2;
7427
 
7428
              c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7429
              c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7430
              real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
7431
              return fold_build2 (MULT_EXPR, type,
7432
                                  TREE_OPERAND (arg0, 0),
7433
                                  build_real (type, c1));
7434
            }
7435
          /* Convert a + (b*c + d*e) into (a + b*c) + d*e.  */
7436
          if (flag_unsafe_math_optimizations
7437
              && TREE_CODE (arg1) == PLUS_EXPR
7438
              && TREE_CODE (arg0) != MULT_EXPR)
7439
            {
7440
              tree tree10 = TREE_OPERAND (arg1, 0);
7441
              tree tree11 = TREE_OPERAND (arg1, 1);
7442
              if (TREE_CODE (tree11) == MULT_EXPR
7443
                  && TREE_CODE (tree10) == MULT_EXPR)
7444
                {
7445
                  tree tree0;
7446
                  tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
7447
                  return fold_build2 (PLUS_EXPR, type, tree0, tree11);
7448
                }
7449
            }
7450
          /* Convert (b*c + d*e) + a into b*c + (d*e +a).  */
7451
          if (flag_unsafe_math_optimizations
7452
              && TREE_CODE (arg0) == PLUS_EXPR
7453
              && TREE_CODE (arg1) != MULT_EXPR)
7454
            {
7455
              tree tree00 = TREE_OPERAND (arg0, 0);
7456
              tree tree01 = TREE_OPERAND (arg0, 1);
7457
              if (TREE_CODE (tree01) == MULT_EXPR
7458
                  && TREE_CODE (tree00) == MULT_EXPR)
7459
                {
7460
                  tree tree0;
7461
                  tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
7462
                  return fold_build2 (PLUS_EXPR, type, tree00, tree0);
7463
                }
7464
            }
7465
        }
7466
 
7467
     bit_rotate:
7468
      /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7469
         is a rotate of A by C1 bits.  */
7470
      /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7471
         is a rotate of A by B bits.  */
7472
      {
7473
        enum tree_code code0, code1;
7474
        code0 = TREE_CODE (arg0);
7475
        code1 = TREE_CODE (arg1);
7476
        if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7477
             || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7478
            && operand_equal_p (TREE_OPERAND (arg0, 0),
7479
                                TREE_OPERAND (arg1, 0), 0)
7480
            && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7481
          {
7482
            tree tree01, tree11;
7483
            enum tree_code code01, code11;
7484
 
7485
            tree01 = TREE_OPERAND (arg0, 1);
7486
            tree11 = TREE_OPERAND (arg1, 1);
7487
            STRIP_NOPS (tree01);
7488
            STRIP_NOPS (tree11);
7489
            code01 = TREE_CODE (tree01);
7490
            code11 = TREE_CODE (tree11);
7491
            if (code01 == INTEGER_CST
7492
                && code11 == INTEGER_CST
7493
                && TREE_INT_CST_HIGH (tree01) == 0
7494
                && TREE_INT_CST_HIGH (tree11) == 0
7495
                && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7496
                    == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7497
              return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7498
                             code0 == LSHIFT_EXPR ? tree01 : tree11);
7499
            else if (code11 == MINUS_EXPR)
7500
              {
7501
                tree tree110, tree111;
7502
                tree110 = TREE_OPERAND (tree11, 0);
7503
                tree111 = TREE_OPERAND (tree11, 1);
7504
                STRIP_NOPS (tree110);
7505
                STRIP_NOPS (tree111);
7506
                if (TREE_CODE (tree110) == INTEGER_CST
7507
                    && 0 == compare_tree_int (tree110,
7508
                                              TYPE_PRECISION
7509
                                              (TREE_TYPE (TREE_OPERAND
7510
                                                          (arg0, 0))))
7511
                    && operand_equal_p (tree01, tree111, 0))
7512
                  return build2 ((code0 == LSHIFT_EXPR
7513
                                  ? LROTATE_EXPR
7514
                                  : RROTATE_EXPR),
7515
                                 type, TREE_OPERAND (arg0, 0), tree01);
7516
              }
7517
            else if (code01 == MINUS_EXPR)
7518
              {
7519
                tree tree010, tree011;
7520
                tree010 = TREE_OPERAND (tree01, 0);
7521
                tree011 = TREE_OPERAND (tree01, 1);
7522
                STRIP_NOPS (tree010);
7523
                STRIP_NOPS (tree011);
7524
                if (TREE_CODE (tree010) == INTEGER_CST
7525
                    && 0 == compare_tree_int (tree010,
7526
                                              TYPE_PRECISION
7527
                                              (TREE_TYPE (TREE_OPERAND
7528
                                                          (arg0, 0))))
7529
                    && operand_equal_p (tree11, tree011, 0))
7530
                  return build2 ((code0 != LSHIFT_EXPR
7531
                                  ? LROTATE_EXPR
7532
                                  : RROTATE_EXPR),
7533
                                 type, TREE_OPERAND (arg0, 0), tree11);
7534
              }
7535
          }
7536
      }
7537
 
7538
    associate:
7539
      /* In most languages, can't associate operations on floats through
7540
         parentheses.  Rather than remember where the parentheses were, we
7541
         don't associate floats at all, unless the user has specified
7542
         -funsafe-math-optimizations.  */
7543
 
7544
      if (! wins
7545
          && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7546
        {
7547
          tree var0, con0, lit0, minus_lit0;
7548
          tree var1, con1, lit1, minus_lit1;
7549
 
7550
          /* Split both trees into variables, constants, and literals.  Then
7551
             associate each group together, the constants with literals,
7552
             then the result with variables.  This increases the chances of
7553
             literals being recombined later and of generating relocatable
7554
             expressions for the sum of a constant and literal.  */
7555
          var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7556
          var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7557
                             code == MINUS_EXPR);
7558
 
7559
          /* Only do something if we found more than two objects.  Otherwise,
7560
             nothing has changed and we risk infinite recursion.  */
7561
          if (2 < ((var0 != 0) + (var1 != 0)
7562
                   + (con0 != 0) + (con1 != 0)
7563
                   + (lit0 != 0) + (lit1 != 0)
7564
                   + (minus_lit0 != 0) + (minus_lit1 != 0)))
7565
            {
7566
              /* Recombine MINUS_EXPR operands by using PLUS_EXPR.  */
7567
              if (code == MINUS_EXPR)
7568
                code = PLUS_EXPR;
7569
 
7570
              var0 = associate_trees (var0, var1, code, type);
7571
              con0 = associate_trees (con0, con1, code, type);
7572
              lit0 = associate_trees (lit0, lit1, code, type);
7573
              minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7574
 
7575
              /* Preserve the MINUS_EXPR if the negative part of the literal is
7576
                 greater than the positive part.  Otherwise, the multiplicative
7577
                 folding code (i.e extract_muldiv) may be fooled in case
7578
                 unsigned constants are subtracted, like in the following
7579
                 example: ((X*2 + 4) - 8U)/2.  */
7580
              if (minus_lit0 && lit0)
7581
                {
7582
                  if (TREE_CODE (lit0) == INTEGER_CST
7583
                      && TREE_CODE (minus_lit0) == INTEGER_CST
7584
                      && tree_int_cst_lt (lit0, minus_lit0))
7585
                    {
7586
                      minus_lit0 = associate_trees (minus_lit0, lit0,
7587
                                                    MINUS_EXPR, type);
7588
                      lit0 = 0;
7589
                    }
7590
                  else
7591
                    {
7592
                      lit0 = associate_trees (lit0, minus_lit0,
7593
                                              MINUS_EXPR, type);
7594
                      minus_lit0 = 0;
7595
                    }
7596
                }
7597
              if (minus_lit0)
7598
                {
7599
                  if (con0 == 0)
7600
                    return fold_convert (type,
7601
                                         associate_trees (var0, minus_lit0,
7602
                                                          MINUS_EXPR, type));
7603
                  else
7604
                    {
7605
                      con0 = associate_trees (con0, minus_lit0,
7606
                                              MINUS_EXPR, type);
7607
                      return fold_convert (type,
7608
                                           associate_trees (var0, con0,
7609
                                                            PLUS_EXPR, type));
7610
                    }
7611
                }
7612
 
7613
              con0 = associate_trees (con0, lit0, code, type);
7614
              return fold_convert (type, associate_trees (var0, con0,
7615
                                                          code, type));
7616
            }
7617
        }
7618
 
7619
    binary:
7620
      if (wins)
7621
        t1 = const_binop (code, arg0, arg1, 0);
7622
      if (t1 != NULL_TREE)
7623
        {
7624
          /* The return value should always have
7625
             the same type as the original expression.  */
7626
          if (TREE_TYPE (t1) != type)
7627
            t1 = fold_convert (type, t1);
7628
 
7629
          return t1;
7630
        }
7631
      return NULL_TREE;
7632
 
7633
    case MINUS_EXPR:
7634
      /* A - (-B) -> A + B */
7635
      if (TREE_CODE (arg1) == NEGATE_EXPR)
7636
        return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
7637
      /* (-A) - B -> (-B) - A  where B is easily negated and we can swap.  */
7638
      if (TREE_CODE (arg0) == NEGATE_EXPR
7639
          && (FLOAT_TYPE_P (type)
7640
              || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7641
          && negate_expr_p (arg1)
7642
          && reorder_operands_p (arg0, arg1))
7643
        return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
7644
                            TREE_OPERAND (arg0, 0));
7645
      /* Convert -A - 1 to ~A.  */
7646
      if (INTEGRAL_TYPE_P (type)
7647
          && TREE_CODE (arg0) == NEGATE_EXPR
7648
          && integer_onep (arg1))
7649
        return fold_build1 (BIT_NOT_EXPR, type, TREE_OPERAND (arg0, 0));
7650
 
7651
      /* Convert -1 - A to ~A.  */
7652
      if (INTEGRAL_TYPE_P (type)
7653
          && integer_all_onesp (arg0))
7654
        return fold_build1 (BIT_NOT_EXPR, type, arg1);
7655
 
7656
      if (! FLOAT_TYPE_P (type))
7657
        {
7658
          if (! wins && integer_zerop (arg0))
7659
            return negate_expr (fold_convert (type, arg1));
7660
          if (integer_zerop (arg1))
7661
            return non_lvalue (fold_convert (type, arg0));
7662
 
7663
          /* Fold A - (A & B) into ~B & A.  */
7664
          if (!TREE_SIDE_EFFECTS (arg0)
7665
              && TREE_CODE (arg1) == BIT_AND_EXPR)
7666
            {
7667
              if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7668
                return fold_build2 (BIT_AND_EXPR, type,
7669
                                    fold_build1 (BIT_NOT_EXPR, type,
7670
                                                 TREE_OPERAND (arg1, 0)),
7671
                                    arg0);
7672
              if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7673
                return fold_build2 (BIT_AND_EXPR, type,
7674
                                    fold_build1 (BIT_NOT_EXPR, type,
7675
                                                 TREE_OPERAND (arg1, 1)),
7676
                                    arg0);
7677
            }
7678
 
7679
          /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7680
             any power of 2 minus 1.  */
7681
          if (TREE_CODE (arg0) == BIT_AND_EXPR
7682
              && TREE_CODE (arg1) == BIT_AND_EXPR
7683
              && operand_equal_p (TREE_OPERAND (arg0, 0),
7684
                                  TREE_OPERAND (arg1, 0), 0))
7685
            {
7686
              tree mask0 = TREE_OPERAND (arg0, 1);
7687
              tree mask1 = TREE_OPERAND (arg1, 1);
7688
              tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
7689
 
7690
              if (operand_equal_p (tem, mask1, 0))
7691
                {
7692
                  tem = fold_build2 (BIT_XOR_EXPR, type,
7693
                                     TREE_OPERAND (arg0, 0), mask1);
7694
                  return fold_build2 (MINUS_EXPR, type, tem, mask1);
7695
                }
7696
            }
7697
        }
7698
 
7699
      /* See if ARG1 is zero and X - ARG1 reduces to X.  */
7700
      else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7701
        return non_lvalue (fold_convert (type, arg0));
7702
 
7703
      /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0).  So check whether
7704
         ARG0 is zero and X + ARG0 reduces to X, since that would mean
7705
         (-ARG1 + ARG0) reduces to -ARG1.  */
7706
      else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7707
        return negate_expr (fold_convert (type, arg1));
7708
 
7709
      /* Fold &x - &x.  This can happen from &x.foo - &x.
7710
         This is unsafe for certain floats even in non-IEEE formats.
7711
         In IEEE, it is unsafe because it does wrong for NaNs.
7712
         Also note that operand_equal_p is always false if an operand
7713
         is volatile.  */
7714
 
7715
      if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7716
          && operand_equal_p (arg0, arg1, 0))
7717
        return fold_convert (type, integer_zero_node);
7718
 
7719
      /* A - B -> A + (-B) if B is easily negatable.  */
7720
      if (!wins && negate_expr_p (arg1)
7721
          && ((FLOAT_TYPE_P (type)
7722
               /* Avoid this transformation if B is a positive REAL_CST.  */
7723
               && (TREE_CODE (arg1) != REAL_CST
7724
                   ||  REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7725
              || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7726
        return fold_build2 (PLUS_EXPR, type,
7727
                            fold_convert (type, arg0),
7728
                            fold_convert (type, negate_expr (arg1)));
7729
 
7730
      /* Try folding difference of addresses.  */
7731
      {
7732
        HOST_WIDE_INT diff;
7733
 
7734
        if ((TREE_CODE (arg0) == ADDR_EXPR
7735
             || TREE_CODE (arg1) == ADDR_EXPR)
7736
            && ptr_difference_const (arg0, arg1, &diff))
7737
          return build_int_cst_type (type, diff);
7738
      }
7739
 
7740
      /* Fold &a[i] - &a[j] to i-j.  */
7741
      if (TREE_CODE (arg0) == ADDR_EXPR
7742
          && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
7743
          && TREE_CODE (arg1) == ADDR_EXPR
7744
          && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
7745
        {
7746
          tree aref0 = TREE_OPERAND (arg0, 0);
7747
          tree aref1 = TREE_OPERAND (arg1, 0);
7748
          if (operand_equal_p (TREE_OPERAND (aref0, 0),
7749
                               TREE_OPERAND (aref1, 0), 0))
7750
            {
7751
              tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
7752
              tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
7753
              tree esz = array_ref_element_size (aref0);
7754
              tree diff = build2 (MINUS_EXPR, type, op0, op1);
7755
              return fold_build2 (MULT_EXPR, type, diff,
7756
                                  fold_convert (type, esz));
7757
 
7758
            }
7759
        }
7760
 
7761
      /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7762
         of the array.  Loop optimizer sometimes produce this type of
7763
         expressions.  */
7764
      if (TREE_CODE (arg0) == ADDR_EXPR)
7765
        {
7766
          tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
7767
          if (tem)
7768
            return fold_convert (type, tem);
7769
        }
7770
 
7771
      if (flag_unsafe_math_optimizations
7772
          && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7773
          && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7774
          && (tem = distribute_real_division (code, type, arg0, arg1)))
7775
        return tem;
7776
 
7777
      if (TREE_CODE (arg0) == MULT_EXPR
7778
          && TREE_CODE (arg1) == MULT_EXPR
7779
          && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7780
        {
7781
          /* (A * C) - (B * C) -> (A-B) * C.  */
7782
          if (operand_equal_p (TREE_OPERAND (arg0, 1),
7783
                               TREE_OPERAND (arg1, 1), 0))
7784
            return fold_build2 (MULT_EXPR, type,
7785
                                fold_build2 (MINUS_EXPR, type,
7786
                                             TREE_OPERAND (arg0, 0),
7787
                                             TREE_OPERAND (arg1, 0)),
7788
                                TREE_OPERAND (arg0, 1));
7789
          /* (A * C1) - (A * C2) -> A * (C1-C2).  */
7790
          if (operand_equal_p (TREE_OPERAND (arg0, 0),
7791
                               TREE_OPERAND (arg1, 0), 0))
7792
            return fold_build2 (MULT_EXPR, type,
7793
                                TREE_OPERAND (arg0, 0),
7794
                                fold_build2 (MINUS_EXPR, type,
7795
                                             TREE_OPERAND (arg0, 1),
7796
                                             TREE_OPERAND (arg1, 1)));
7797
        }
7798
 
7799
      goto associate;
7800
 
7801
    case MULT_EXPR:
7802
      /* (-A) * (-B) -> A * B  */
7803
      if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7804
        return fold_build2 (MULT_EXPR, type,
7805
                            TREE_OPERAND (arg0, 0),
7806
                            negate_expr (arg1));
7807
      if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7808
        return fold_build2 (MULT_EXPR, type,
7809
                            negate_expr (arg0),
7810
                            TREE_OPERAND (arg1, 0));
7811
 
7812
      if (! FLOAT_TYPE_P (type))
7813
        {
7814
          if (integer_zerop (arg1))
7815
            return omit_one_operand (type, arg1, arg0);
7816
          if (integer_onep (arg1))
7817
            return non_lvalue (fold_convert (type, arg0));
7818
          /* Transform x * -1 into -x.  */
7819
          if (integer_all_onesp (arg1))
7820
            return fold_convert (type, negate_expr (arg0));
7821
 
7822
          /* (a * (1 << b)) is (a << b)  */
7823
          if (TREE_CODE (arg1) == LSHIFT_EXPR
7824
              && integer_onep (TREE_OPERAND (arg1, 0)))
7825
            return fold_build2 (LSHIFT_EXPR, type, arg0,
7826
                                TREE_OPERAND (arg1, 1));
7827
          if (TREE_CODE (arg0) == LSHIFT_EXPR
7828
              && integer_onep (TREE_OPERAND (arg0, 0)))
7829
            return fold_build2 (LSHIFT_EXPR, type, arg1,
7830
                                TREE_OPERAND (arg0, 1));
7831
 
7832
          if (TREE_CODE (arg1) == INTEGER_CST
7833
              && 0 != (tem = extract_muldiv (op0,
7834
                                             fold_convert (type, arg1),
7835
                                             code, NULL_TREE)))
7836
            return fold_convert (type, tem);
7837
 
7838
        }
7839
      else
7840
        {
7841
          /* Maybe fold x * 0 to 0.  The expressions aren't the same
7842
             when x is NaN, since x * 0 is also NaN.  Nor are they the
7843
             same in modes with signed zeros, since multiplying a
7844
             negative value by 0 gives -0, not +0.  */
7845
          if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7846
              && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7847
              && real_zerop (arg1))
7848
            return omit_one_operand (type, arg1, arg0);
7849
          /* In IEEE floating point, x*1 is not equivalent to x for snans.  */
7850
          if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7851
              && real_onep (arg1))
7852
            return non_lvalue (fold_convert (type, arg0));
7853
 
7854
          /* Transform x * -1.0 into -x.  */
7855
          if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7856
              && real_minus_onep (arg1))
7857
            return fold_convert (type, negate_expr (arg0));
7858
 
7859
          /* Convert (C1/X)*C2 into (C1*C2)/X.  */
7860
          if (flag_unsafe_math_optimizations
7861
              && TREE_CODE (arg0) == RDIV_EXPR
7862
              && TREE_CODE (arg1) == REAL_CST
7863
              && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7864
            {
7865
              tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7866
                                      arg1, 0);
7867
              if (tem)
7868
                return fold_build2 (RDIV_EXPR, type, tem,
7869
                                    TREE_OPERAND (arg0, 1));
7870
            }
7871
 
7872
          /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y.  */
7873
          if (operand_equal_p (arg0, arg1, 0))
7874
            {
7875
              tree tem = fold_strip_sign_ops (arg0);
7876
              if (tem != NULL_TREE)
7877
                {
7878
                  tem = fold_convert (type, tem);
7879
                  return fold_build2 (MULT_EXPR, type, tem, tem);
7880
                }
7881
            }
7882
 
7883
          if (flag_unsafe_math_optimizations)
7884
            {
7885
              enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7886
              enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7887
 
7888
              /* Optimizations of root(...)*root(...).  */
7889
              if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7890
                {
7891
                  tree rootfn, arg, arglist;
7892
                  tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7893
                  tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7894
 
7895
                  /* Optimize sqrt(x)*sqrt(x) as x.  */
7896
                  if (BUILTIN_SQRT_P (fcode0)
7897
                      && operand_equal_p (arg00, arg10, 0)
7898
                      && ! HONOR_SNANS (TYPE_MODE (type)))
7899
                    return arg00;
7900
 
7901
                  /* Optimize root(x)*root(y) as root(x*y).  */
7902
                  rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7903
                  arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7904
                  arglist = build_tree_list (NULL_TREE, arg);
7905
                  return build_function_call_expr (rootfn, arglist);
7906
                }
7907
 
7908
              /* Optimize expN(x)*expN(y) as expN(x+y).  */
7909
              if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7910
                {
7911
                  tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7912
                  tree arg = fold_build2 (PLUS_EXPR, type,
7913
                                          TREE_VALUE (TREE_OPERAND (arg0, 1)),
7914
                                          TREE_VALUE (TREE_OPERAND (arg1, 1)));
7915
                  tree arglist = build_tree_list (NULL_TREE, arg);
7916
                  return build_function_call_expr (expfn, arglist);
7917
                }
7918
 
7919
              /* Optimizations of pow(...)*pow(...).  */
7920
              if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7921
                  || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7922
                  || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7923
                {
7924
                  tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7925
                  tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7926
                                                                     1)));
7927
                  tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7928
                  tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7929
                                                                     1)));
7930
 
7931
                  /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y).  */
7932
                  if (operand_equal_p (arg01, arg11, 0))
7933
                    {
7934
                      tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7935
                      tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7936
                      tree arglist = tree_cons (NULL_TREE, arg,
7937
                                                build_tree_list (NULL_TREE,
7938
                                                                 arg01));
7939
                      return build_function_call_expr (powfn, arglist);
7940
                    }
7941
 
7942
                  /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z).  */
7943
                  if (operand_equal_p (arg00, arg10, 0))
7944
                    {
7945
                      tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7946
                      tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
7947
                      tree arglist = tree_cons (NULL_TREE, arg00,
7948
                                                build_tree_list (NULL_TREE,
7949
                                                                 arg));
7950
                      return build_function_call_expr (powfn, arglist);
7951
                    }
7952
                }
7953
 
7954
              /* Optimize tan(x)*cos(x) as sin(x).  */
7955
              if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7956
                   || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7957
                   || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7958
                   || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7959
                   || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7960
                   || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7961
                  && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7962
                                      TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7963
                {
7964
                  tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7965
 
7966
                  if (sinfn != NULL_TREE)
7967
                    return build_function_call_expr (sinfn,
7968
                                                     TREE_OPERAND (arg0, 1));
7969
                }
7970
 
7971
              /* Optimize x*pow(x,c) as pow(x,c+1).  */
7972
              if (fcode1 == BUILT_IN_POW
7973
                  || fcode1 == BUILT_IN_POWF
7974
                  || fcode1 == BUILT_IN_POWL)
7975
                {
7976
                  tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7977
                  tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7978
                                                                     1)));
7979
                  if (TREE_CODE (arg11) == REAL_CST
7980
                      && ! TREE_CONSTANT_OVERFLOW (arg11)
7981
                      && operand_equal_p (arg0, arg10, 0))
7982
                    {
7983
                      tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7984
                      REAL_VALUE_TYPE c;
7985
                      tree arg, arglist;
7986
 
7987
                      c = TREE_REAL_CST (arg11);
7988
                      real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7989
                      arg = build_real (type, c);
7990
                      arglist = build_tree_list (NULL_TREE, arg);
7991
                      arglist = tree_cons (NULL_TREE, arg0, arglist);
7992
                      return build_function_call_expr (powfn, arglist);
7993
                    }
7994
                }
7995
 
7996
              /* Optimize pow(x,c)*x as pow(x,c+1).  */
7997
              if (fcode0 == BUILT_IN_POW
7998
                  || fcode0 == BUILT_IN_POWF
7999
                  || fcode0 == BUILT_IN_POWL)
8000
                {
8001
                  tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8002
                  tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8003
                                                                     1)));
8004
                  if (TREE_CODE (arg01) == REAL_CST
8005
                      && ! TREE_CONSTANT_OVERFLOW (arg01)
8006
                      && operand_equal_p (arg1, arg00, 0))
8007
                    {
8008
                      tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8009
                      REAL_VALUE_TYPE c;
8010
                      tree arg, arglist;
8011
 
8012
                      c = TREE_REAL_CST (arg01);
8013
                      real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8014
                      arg = build_real (type, c);
8015
                      arglist = build_tree_list (NULL_TREE, arg);
8016
                      arglist = tree_cons (NULL_TREE, arg1, arglist);
8017
                      return build_function_call_expr (powfn, arglist);
8018
                    }
8019
                }
8020
 
8021
              /* Optimize x*x as pow(x,2.0), which is expanded as x*x.  */
8022
              if (! optimize_size
8023
                  && operand_equal_p (arg0, arg1, 0))
8024
                {
8025
                  tree powfn = mathfn_built_in (type, BUILT_IN_POW);
8026
 
8027
                  if (powfn)
8028
                    {
8029
                      tree arg = build_real (type, dconst2);
8030
                      tree arglist = build_tree_list (NULL_TREE, arg);
8031
                      arglist = tree_cons (NULL_TREE, arg0, arglist);
8032
                      return build_function_call_expr (powfn, arglist);
8033
                    }
8034
                }
8035
            }
8036
        }
8037
      goto associate;
8038
 
8039
    case BIT_IOR_EXPR:
8040
    bit_ior:
8041
      if (integer_all_onesp (arg1))
8042
        return omit_one_operand (type, arg1, arg0);
8043
      if (integer_zerop (arg1))
8044
        return non_lvalue (fold_convert (type, arg0));
8045
      if (operand_equal_p (arg0, arg1, 0))
8046
        return non_lvalue (fold_convert (type, arg0));
8047
 
8048
      /* ~X | X is -1.  */
8049
      if (TREE_CODE (arg0) == BIT_NOT_EXPR
8050
          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8051
        {
8052
          t1 = build_int_cst (type, -1);
8053
          t1 = force_fit_type (t1, 0, false, false);
8054
          return omit_one_operand (type, t1, arg1);
8055
        }
8056
 
8057
      /* X | ~X is -1.  */
8058
      if (TREE_CODE (arg1) == BIT_NOT_EXPR
8059
          && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8060
        {
8061
          t1 = build_int_cst (type, -1);
8062
          t1 = force_fit_type (t1, 0, false, false);
8063
          return omit_one_operand (type, t1, arg0);
8064
        }
8065
 
8066
      t1 = distribute_bit_expr (code, type, arg0, arg1);
8067
      if (t1 != NULL_TREE)
8068
        return t1;
8069
 
8070
      /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8071
 
8072
         This results in more efficient code for machines without a NAND
8073
         instruction.  Combine will canonicalize to the first form
8074
         which will allow use of NAND instructions provided by the
8075
         backend if they exist.  */
8076
      if (TREE_CODE (arg0) == BIT_NOT_EXPR
8077
          && TREE_CODE (arg1) == BIT_NOT_EXPR)
8078
        {
8079
          return fold_build1 (BIT_NOT_EXPR, type,
8080
                              build2 (BIT_AND_EXPR, type,
8081
                                      TREE_OPERAND (arg0, 0),
8082
                                      TREE_OPERAND (arg1, 0)));
8083
        }
8084
 
8085
      /* See if this can be simplified into a rotate first.  If that
8086
         is unsuccessful continue in the association code.  */
8087
      goto bit_rotate;
8088
 
8089
    case BIT_XOR_EXPR:
8090
      if (integer_zerop (arg1))
8091
        return non_lvalue (fold_convert (type, arg0));
8092
      if (integer_all_onesp (arg1))
8093
        return fold_build1 (BIT_NOT_EXPR, type, arg0);
8094
      if (operand_equal_p (arg0, arg1, 0))
8095
        return omit_one_operand (type, integer_zero_node, arg0);
8096
 
8097
      /* ~X ^ X is -1.  */
8098
      if (TREE_CODE (arg0) == BIT_NOT_EXPR
8099
          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8100
        {
8101
          t1 = build_int_cst (type, -1);
8102
          t1 = force_fit_type (t1, 0, false, false);
8103
          return omit_one_operand (type, t1, arg1);
8104
        }
8105
 
8106
      /* X ^ ~X is -1.  */
8107
      if (TREE_CODE (arg1) == BIT_NOT_EXPR
8108
          && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8109
        {
8110
          t1 = build_int_cst (type, -1);
8111
          t1 = force_fit_type (t1, 0, false, false);
8112
          return omit_one_operand (type, t1, arg0);
8113
        }
8114
 
8115
      /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8116
         with a constant, and the two constants have no bits in common,
8117
         we should treat this as a BIT_IOR_EXPR since this may produce more
8118
         simplifications.  */
8119
      if (TREE_CODE (arg0) == BIT_AND_EXPR
8120
          && TREE_CODE (arg1) == BIT_AND_EXPR
8121
          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8122
          && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8123
          && integer_zerop (const_binop (BIT_AND_EXPR,
8124
                                         TREE_OPERAND (arg0, 1),
8125
                                         TREE_OPERAND (arg1, 1), 0)))
8126
        {
8127
          code = BIT_IOR_EXPR;
8128
          goto bit_ior;
8129
        }
8130
 
8131
      /* (X | Y) ^ X -> Y & ~ X*/
8132
      if (TREE_CODE (arg0) == BIT_IOR_EXPR
8133
          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8134
        {
8135
          tree t2 = TREE_OPERAND (arg0, 1);
8136
          t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8137
                            arg1);
8138
          t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8139
                            fold_convert (type, t1));
8140
          return t1;
8141
        }
8142
 
8143
      /* (Y | X) ^ X -> Y & ~ X*/
8144
      if (TREE_CODE (arg0) == BIT_IOR_EXPR
8145
          && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8146
        {
8147
          tree t2 = TREE_OPERAND (arg0, 0);
8148
          t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8149
                            arg1);
8150
          t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8151
                            fold_convert (type, t1));
8152
          return t1;
8153
        }
8154
 
8155
      /* X ^ (X | Y) -> Y & ~ X*/
8156
      if (TREE_CODE (arg1) == BIT_IOR_EXPR
8157
          && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
8158
        {
8159
          tree t2 = TREE_OPERAND (arg1, 1);
8160
          t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8161
                            arg0);
8162
          t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8163
                            fold_convert (type, t1));
8164
          return t1;
8165
        }
8166
 
8167
      /* X ^ (Y | X) -> Y & ~ X*/
8168
      if (TREE_CODE (arg1) == BIT_IOR_EXPR
8169
          && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
8170
        {
8171
          tree t2 = TREE_OPERAND (arg1, 0);
8172
          t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8173
                            arg0);
8174
          t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8175
                            fold_convert (type, t1));
8176
          return t1;
8177
        }
8178
 
8179
      /* Convert ~X ^ ~Y to X ^ Y.  */
8180
      if (TREE_CODE (arg0) == BIT_NOT_EXPR
8181
          && TREE_CODE (arg1) == BIT_NOT_EXPR)
8182
        return fold_build2 (code, type,
8183
                            fold_convert (type, TREE_OPERAND (arg0, 0)),
8184
                            fold_convert (type, TREE_OPERAND (arg1, 0)));
8185
 
8186
      /* See if this can be simplified into a rotate first.  If that
8187
         is unsuccessful continue in the association code.  */
8188
      goto bit_rotate;
8189
 
8190
    case BIT_AND_EXPR:
8191
      if (integer_all_onesp (arg1))
8192
        return non_lvalue (fold_convert (type, arg0));
8193
      if (integer_zerop (arg1))
8194
        return omit_one_operand (type, arg1, arg0);
8195
      if (operand_equal_p (arg0, arg1, 0))
8196
        return non_lvalue (fold_convert (type, arg0));
8197
 
8198
      /* ~X & X is always zero.  */
8199
      if (TREE_CODE (arg0) == BIT_NOT_EXPR
8200
          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8201
        return omit_one_operand (type, integer_zero_node, arg1);
8202
 
8203
      /* X & ~X is always zero.  */
8204
      if (TREE_CODE (arg1) == BIT_NOT_EXPR
8205
          && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8206
        return omit_one_operand (type, integer_zero_node, arg0);
8207
 
8208
      t1 = distribute_bit_expr (code, type, arg0, arg1);
8209
      if (t1 != NULL_TREE)
8210
        return t1;
8211
      /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char.  */
8212
      if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
8213
          && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8214
        {
8215
          unsigned int prec
8216
            = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
8217
 
8218
          if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
8219
              && (~TREE_INT_CST_LOW (arg1)
8220
                  & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
8221
            return fold_convert (type, TREE_OPERAND (arg0, 0));
8222
        }
8223
 
8224
      /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8225
 
8226
         This results in more efficient code for machines without a NOR
8227
         instruction.  Combine will canonicalize to the first form
8228
         which will allow use of NOR instructions provided by the
8229
         backend if they exist.  */
8230
      if (TREE_CODE (arg0) == BIT_NOT_EXPR
8231
          && TREE_CODE (arg1) == BIT_NOT_EXPR)
8232
        {
8233
          return fold_build1 (BIT_NOT_EXPR, type,
8234
                              build2 (BIT_IOR_EXPR, type,
8235
                                      TREE_OPERAND (arg0, 0),
8236
                                      TREE_OPERAND (arg1, 0)));
8237
        }
8238
 
8239
      goto associate;
8240
 
8241
    case RDIV_EXPR:
8242
      /* Don't touch a floating-point divide by zero unless the mode
8243
         of the constant can represent infinity.  */
8244
      if (TREE_CODE (arg1) == REAL_CST
8245
          && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
8246
          && real_zerop (arg1))
8247
        return NULL_TREE;
8248
 
8249
      /* (-A) / (-B) -> A / B  */
8250
      if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8251
        return fold_build2 (RDIV_EXPR, type,
8252
                            TREE_OPERAND (arg0, 0),
8253
                            negate_expr (arg1));
8254
      if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8255
        return fold_build2 (RDIV_EXPR, type,
8256
                            negate_expr (arg0),
8257
                            TREE_OPERAND (arg1, 0));
8258
 
8259
      /* In IEEE floating point, x/1 is not equivalent to x for snans.  */
8260
      if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8261
          && real_onep (arg1))
8262
        return non_lvalue (fold_convert (type, arg0));
8263
 
8264
      /* In IEEE floating point, x/-1 is not equivalent to -x for snans.  */
8265
      if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8266
          && real_minus_onep (arg1))
8267
        return non_lvalue (fold_convert (type, negate_expr (arg0)));
8268
 
8269
      /* If ARG1 is a constant, we can convert this to a multiply by the
8270
         reciprocal.  This does not have the same rounding properties,
8271
         so only do this if -funsafe-math-optimizations.  We can actually
8272
         always safely do it if ARG1 is a power of two, but it's hard to
8273
         tell if it is or not in a portable manner.  */
8274
      if (TREE_CODE (arg1) == REAL_CST)
8275
        {
8276
          if (flag_unsafe_math_optimizations
8277
              && 0 != (tem = const_binop (code, build_real (type, dconst1),
8278
                                          arg1, 0)))
8279
            return fold_build2 (MULT_EXPR, type, arg0, tem);
8280
          /* Find the reciprocal if optimizing and the result is exact.  */
8281
          if (optimize)
8282
            {
8283
              REAL_VALUE_TYPE r;
8284
              r = TREE_REAL_CST (arg1);
8285
              if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
8286
                {
8287
                  tem = build_real (type, r);
8288
                  return fold_build2 (MULT_EXPR, type,
8289
                                      fold_convert (type, arg0), tem);
8290
                }
8291
            }
8292
        }
8293
      /* Convert A/B/C to A/(B*C).  */
8294
      if (flag_unsafe_math_optimizations
8295
          && TREE_CODE (arg0) == RDIV_EXPR)
8296
        return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
8297
                            fold_build2 (MULT_EXPR, type,
8298
                                         TREE_OPERAND (arg0, 1), arg1));
8299
 
8300
      /* Convert A/(B/C) to (A/B)*C.  */
8301
      if (flag_unsafe_math_optimizations
8302
          && TREE_CODE (arg1) == RDIV_EXPR)
8303
        return fold_build2 (MULT_EXPR, type,
8304
                            fold_build2 (RDIV_EXPR, type, arg0,
8305
                                         TREE_OPERAND (arg1, 0)),
8306
                            TREE_OPERAND (arg1, 1));
8307
 
8308
      /* Convert C1/(X*C2) into (C1/C2)/X.  */
8309
      if (flag_unsafe_math_optimizations
8310
          && TREE_CODE (arg1) == MULT_EXPR
8311
          && TREE_CODE (arg0) == REAL_CST
8312
          && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
8313
        {
8314
          tree tem = const_binop (RDIV_EXPR, arg0,
8315
                                  TREE_OPERAND (arg1, 1), 0);
8316
          if (tem)
8317
            return fold_build2 (RDIV_EXPR, type, tem,
8318
                                TREE_OPERAND (arg1, 0));
8319
        }
8320
 
8321
      if (flag_unsafe_math_optimizations)
8322
        {
8323
          enum built_in_function fcode = builtin_mathfn_code (arg1);
8324
          /* Optimize x/expN(y) into x*expN(-y).  */
8325
          if (BUILTIN_EXPONENT_P (fcode))
8326
            {
8327
              tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8328
              tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
8329
              tree arglist = build_tree_list (NULL_TREE,
8330
                                              fold_convert (type, arg));
8331
              arg1 = build_function_call_expr (expfn, arglist);
8332
              return fold_build2 (MULT_EXPR, type, arg0, arg1);
8333
            }
8334
 
8335
          /* Optimize x/pow(y,z) into x*pow(y,-z).  */
8336
          if (fcode == BUILT_IN_POW
8337
              || fcode == BUILT_IN_POWF
8338
              || fcode == BUILT_IN_POWL)
8339
            {
8340
              tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8341
              tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8342
              tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
8343
              tree neg11 = fold_convert (type, negate_expr (arg11));
8344
              tree arglist = tree_cons(NULL_TREE, arg10,
8345
                                       build_tree_list (NULL_TREE, neg11));
8346
              arg1 = build_function_call_expr (powfn, arglist);
8347
              return fold_build2 (MULT_EXPR, type, arg0, arg1);
8348
            }
8349
        }
8350
 
8351
      if (flag_unsafe_math_optimizations)
8352
        {
8353
          enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8354
          enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8355
 
8356
          /* Optimize sin(x)/cos(x) as tan(x).  */
8357
          if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
8358
               || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
8359
               || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
8360
              && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8361
                                  TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8362
            {
8363
              tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8364
 
8365
              if (tanfn != NULL_TREE)
8366
                return build_function_call_expr (tanfn,
8367
                                                 TREE_OPERAND (arg0, 1));
8368
            }
8369
 
8370
          /* Optimize cos(x)/sin(x) as 1.0/tan(x).  */
8371
          if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
8372
               || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
8373
               || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
8374
              && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8375
                                  TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8376
            {
8377
              tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8378
 
8379
              if (tanfn != NULL_TREE)
8380
                {
8381
                  tree tmp = TREE_OPERAND (arg0, 1);
8382
                  tmp = build_function_call_expr (tanfn, tmp);
8383
                  return fold_build2 (RDIV_EXPR, type,
8384
                                      build_real (type, dconst1), tmp);
8385
                }
8386
            }
8387
 
8388
          /* Optimize pow(x,c)/x as pow(x,c-1).  */
8389
          if (fcode0 == BUILT_IN_POW
8390
              || fcode0 == BUILT_IN_POWF
8391
              || fcode0 == BUILT_IN_POWL)
8392
            {
8393
              tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8394
              tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
8395
              if (TREE_CODE (arg01) == REAL_CST
8396
                  && ! TREE_CONSTANT_OVERFLOW (arg01)
8397
                  && operand_equal_p (arg1, arg00, 0))
8398
                {
8399
                  tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8400
                  REAL_VALUE_TYPE c;
8401
                  tree arg, arglist;
8402
 
8403
                  c = TREE_REAL_CST (arg01);
8404
                  real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
8405
                  arg = build_real (type, c);
8406
                  arglist = build_tree_list (NULL_TREE, arg);
8407
                  arglist = tree_cons (NULL_TREE, arg1, arglist);
8408
                  return build_function_call_expr (powfn, arglist);
8409
                }
8410
            }
8411
        }
8412
      goto binary;
8413
 
8414
    case TRUNC_DIV_EXPR:
8415
    case ROUND_DIV_EXPR:
8416
    case FLOOR_DIV_EXPR:
8417
    case CEIL_DIV_EXPR:
8418
    case EXACT_DIV_EXPR:
8419
      if (integer_onep (arg1))
8420
        return non_lvalue (fold_convert (type, arg0));
8421
      if (integer_zerop (arg1))
8422
        return NULL_TREE;
8423
      /* X / -1 is -X.  */
8424
      if (!TYPE_UNSIGNED (type)
8425
          && TREE_CODE (arg1) == INTEGER_CST
8426
          && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8427
          && TREE_INT_CST_HIGH (arg1) == -1)
8428
        return fold_convert (type, negate_expr (arg0));
8429
 
8430
      /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8431
         operation, EXACT_DIV_EXPR.
8432
 
8433
         Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8434
         At one time others generated faster code, it's not clear if they do
8435
         after the last round to changes to the DIV code in expmed.c.  */
8436
      if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
8437
          && multiple_of_p (type, arg0, arg1))
8438
        return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
8439
 
8440
      if (TREE_CODE (arg1) == INTEGER_CST
8441
          && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8442
        return fold_convert (type, tem);
8443
 
8444
      goto binary;
8445
 
8446
    case CEIL_MOD_EXPR:
8447
    case FLOOR_MOD_EXPR:
8448
    case ROUND_MOD_EXPR:
8449
    case TRUNC_MOD_EXPR:
8450
      /* X % 1 is always zero, but be sure to preserve any side
8451
         effects in X.  */
8452
      if (integer_onep (arg1))
8453
        return omit_one_operand (type, integer_zero_node, arg0);
8454
 
8455
      /* X % 0, return X % 0 unchanged so that we can get the
8456
         proper warnings and errors.  */
8457
      if (integer_zerop (arg1))
8458
        return NULL_TREE;
8459
 
8460
      /* 0 % X is always zero, but be sure to preserve any side
8461
         effects in X.  Place this after checking for X == 0.  */
8462
      if (integer_zerop (arg0))
8463
        return omit_one_operand (type, integer_zero_node, arg1);
8464
 
8465
      /* X % -1 is zero.  */
8466
      if (!TYPE_UNSIGNED (type)
8467
          && TREE_CODE (arg1) == INTEGER_CST
8468
          && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8469
          && TREE_INT_CST_HIGH (arg1) == -1)
8470
        return omit_one_operand (type, integer_zero_node, arg0);
8471
 
8472
      /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
8473
         i.e. "X % C" into "X & C2", if X and C are positive.  */
8474
      if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
8475
          && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0))
8476
          && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) >= 0)
8477
        {
8478
          unsigned HOST_WIDE_INT high, low;
8479
          tree mask;
8480
          int l;
8481
 
8482
          l = tree_log2 (arg1);
8483
          if (l >= HOST_BITS_PER_WIDE_INT)
8484
            {
8485
              high = ((unsigned HOST_WIDE_INT) 1
8486
                      << (l - HOST_BITS_PER_WIDE_INT)) - 1;
8487
              low = -1;
8488
            }
8489
          else
8490
            {
8491
              high = 0;
8492
              low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
8493
            }
8494
 
8495
          mask = build_int_cst_wide (type, low, high);
8496
          return fold_build2 (BIT_AND_EXPR, type,
8497
                              fold_convert (type, arg0), mask);
8498
        }
8499
 
8500
      /* X % -C is the same as X % C.  */
8501
      if (code == TRUNC_MOD_EXPR
8502
          && !TYPE_UNSIGNED (type)
8503
          && TREE_CODE (arg1) == INTEGER_CST
8504
          && !TREE_CONSTANT_OVERFLOW (arg1)
8505
          && TREE_INT_CST_HIGH (arg1) < 0
8506
          && !flag_trapv
8507
          /* Avoid this transformation if C is INT_MIN, i.e. C == -C.  */
8508
          && !sign_bit_p (arg1, arg1))
8509
        return fold_build2 (code, type, fold_convert (type, arg0),
8510
                            fold_convert (type, negate_expr (arg1)));
8511
 
8512
      /* X % -Y is the same as X % Y.  */
8513
      if (code == TRUNC_MOD_EXPR
8514
          && !TYPE_UNSIGNED (type)
8515
          && TREE_CODE (arg1) == NEGATE_EXPR
8516
          && !flag_trapv)
8517
        return fold_build2 (code, type, fold_convert (type, arg0),
8518
                            fold_convert (type, TREE_OPERAND (arg1, 0)));
8519
 
8520
      if (TREE_CODE (arg1) == INTEGER_CST
8521
          && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8522
        return fold_convert (type, tem);
8523
 
8524
      goto binary;
8525
 
8526
    case LROTATE_EXPR:
8527
    case RROTATE_EXPR:
8528
      if (integer_all_onesp (arg0))
8529
        return omit_one_operand (type, arg0, arg1);
8530
      goto shift;
8531
 
8532
    case RSHIFT_EXPR:
8533
      /* Optimize -1 >> x for arithmetic right shifts.  */
8534
      if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
8535
        return omit_one_operand (type, arg0, arg1);
8536
      /* ... fall through ...  */
8537
 
8538
    case LSHIFT_EXPR:
8539
    shift:
8540
      if (integer_zerop (arg1))
8541
        return non_lvalue (fold_convert (type, arg0));
8542
      if (integer_zerop (arg0))
8543
        return omit_one_operand (type, arg0, arg1);
8544
 
8545
      /* Since negative shift count is not well-defined,
8546
         don't try to compute it in the compiler.  */
8547
      if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
8548
        return NULL_TREE;
8549
 
8550
      /* Turn (a OP c1) OP c2 into a OP (c1+c2).  */
8551
      if (TREE_CODE (arg0) == code && host_integerp (arg1, false)
8552
          && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8553
          && host_integerp (TREE_OPERAND (arg0, 1), false)
8554
          && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8555
        {
8556
          HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
8557
                               + TREE_INT_CST_LOW (arg1));
8558
 
8559
          /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
8560
             being well defined.  */
8561
          if (low >= TYPE_PRECISION (type))
8562
            {
8563
              if (code == LROTATE_EXPR || code == RROTATE_EXPR)
8564
                low = low % TYPE_PRECISION (type);
8565
              else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
8566
                return build_int_cst (type, 0);
8567
              else
8568
                low = TYPE_PRECISION (type) - 1;
8569
            }
8570
 
8571
          return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
8572
                              build_int_cst (type, low));
8573
        }
8574
 
8575
      /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
8576
         into x & ((unsigned)-1 >> c) for unsigned types.  */
8577
      if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
8578
           || (TYPE_UNSIGNED (type)
8579
               && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
8580
          && host_integerp (arg1, false)
8581
          && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8582
          && host_integerp (TREE_OPERAND (arg0, 1), false)
8583
          && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8584
        {
8585
          HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
8586
          HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
8587
          tree lshift;
8588
          tree arg00;
8589
 
8590
          if (low0 == low1)
8591
            {
8592
              arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
8593
 
8594
              lshift = build_int_cst (type, -1);
8595
              lshift = int_const_binop (code, lshift, arg1, 0);
8596
 
8597
              return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
8598
            }
8599
        }
8600
 
8601
      /* Rewrite an LROTATE_EXPR by a constant into an
8602
         RROTATE_EXPR by a new constant.  */
8603
      if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
8604
        {
8605
          tree tem = build_int_cst (NULL_TREE,
8606
                                    GET_MODE_BITSIZE (TYPE_MODE (type)));
8607
          tem = fold_convert (TREE_TYPE (arg1), tem);
8608
          tem = const_binop (MINUS_EXPR, tem, arg1, 0);
8609
          return fold_build2 (RROTATE_EXPR, type, arg0, tem);
8610
        }
8611
 
8612
      /* If we have a rotate of a bit operation with the rotate count and
8613
         the second operand of the bit operation both constant,
8614
         permute the two operations.  */
8615
      if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8616
          && (TREE_CODE (arg0) == BIT_AND_EXPR
8617
              || TREE_CODE (arg0) == BIT_IOR_EXPR
8618
              || TREE_CODE (arg0) == BIT_XOR_EXPR)
8619
          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8620
        return fold_build2 (TREE_CODE (arg0), type,
8621
                            fold_build2 (code, type,
8622
                                         TREE_OPERAND (arg0, 0), arg1),
8623
                            fold_build2 (code, type,
8624
                                         TREE_OPERAND (arg0, 1), arg1));
8625
 
8626
      /* Two consecutive rotates adding up to the width of the mode can
8627
         be ignored.  */
8628
      if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8629
          && TREE_CODE (arg0) == RROTATE_EXPR
8630
          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8631
          && TREE_INT_CST_HIGH (arg1) == 0
8632
          && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
8633
          && ((TREE_INT_CST_LOW (arg1)
8634
               + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
8635
              == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
8636
        return TREE_OPERAND (arg0, 0);
8637
 
8638
      goto binary;
8639
 
8640
    case MIN_EXPR:
8641
      if (operand_equal_p (arg0, arg1, 0))
8642
        return omit_one_operand (type, arg0, arg1);
8643
      if (INTEGRAL_TYPE_P (type)
8644
          && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8645
        return omit_one_operand (type, arg1, arg0);
8646
      goto associate;
8647
 
8648
    case MAX_EXPR:
8649
      if (operand_equal_p (arg0, arg1, 0))
8650
        return omit_one_operand (type, arg0, arg1);
8651
      if (INTEGRAL_TYPE_P (type)
8652
          && TYPE_MAX_VALUE (type)
8653
          && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8654
        return omit_one_operand (type, arg1, arg0);
8655
      goto associate;
8656
 
8657
    case TRUTH_ANDIF_EXPR:
8658
      /* Note that the operands of this must be ints
8659
         and their values must be 0 or 1.
8660
         ("true" is a fixed value perhaps depending on the language.)  */
8661
      /* If first arg is constant zero, return it.  */
8662
      if (integer_zerop (arg0))
8663
        return fold_convert (type, arg0);
8664
    case TRUTH_AND_EXPR:
8665
      /* If either arg is constant true, drop it.  */
8666
      if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8667
        return non_lvalue (fold_convert (type, arg1));
8668
      if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8669
          /* Preserve sequence points.  */
8670
          && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8671
        return non_lvalue (fold_convert (type, arg0));
8672
      /* If second arg is constant zero, result is zero, but first arg
8673
         must be evaluated.  */
8674
      if (integer_zerop (arg1))
8675
        return omit_one_operand (type, arg1, arg0);
8676
      /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8677
         case will be handled here.  */
8678
      if (integer_zerop (arg0))
8679
        return omit_one_operand (type, arg0, arg1);
8680
 
8681
      /* !X && X is always false.  */
8682
      if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8683
          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8684
        return omit_one_operand (type, integer_zero_node, arg1);
8685
      /* X && !X is always false.  */
8686
      if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8687
          && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8688
        return omit_one_operand (type, integer_zero_node, arg0);
8689
 
8690
      /* A < X && A + 1 > Y ==> A < X && A >= Y.  Normally A + 1 > Y
8691
         means A >= Y && A != MAX, but in this case we know that
8692
         A < X <= MAX.  */
8693
 
8694
      if (!TREE_SIDE_EFFECTS (arg0)
8695
          && !TREE_SIDE_EFFECTS (arg1))
8696
        {
8697
          tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8698
          if (tem && !operand_equal_p (tem, arg0, 0))
8699
            return fold_build2 (code, type, tem, arg1);
8700
 
8701
          tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8702
          if (tem && !operand_equal_p (tem, arg1, 0))
8703
            return fold_build2 (code, type, arg0, tem);
8704
        }
8705
 
8706
    truth_andor:
8707
      /* We only do these simplifications if we are optimizing.  */
8708
      if (!optimize)
8709
        return NULL_TREE;
8710
 
8711
      /* Check for things like (A || B) && (A || C).  We can convert this
8712
         to A || (B && C).  Note that either operator can be any of the four
8713
         truth and/or operations and the transformation will still be
8714
         valid.   Also note that we only care about order for the
8715
         ANDIF and ORIF operators.  If B contains side effects, this
8716
         might change the truth-value of A.  */
8717
      if (TREE_CODE (arg0) == TREE_CODE (arg1)
8718
          && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8719
              || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8720
              || TREE_CODE (arg0) == TRUTH_AND_EXPR
8721
              || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8722
          && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8723
        {
8724
          tree a00 = TREE_OPERAND (arg0, 0);
8725
          tree a01 = TREE_OPERAND (arg0, 1);
8726
          tree a10 = TREE_OPERAND (arg1, 0);
8727
          tree a11 = TREE_OPERAND (arg1, 1);
8728
          int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8729
                              || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8730
                             && (code == TRUTH_AND_EXPR
8731
                                 || code == TRUTH_OR_EXPR));
8732
 
8733
          if (operand_equal_p (a00, a10, 0))
8734
            return fold_build2 (TREE_CODE (arg0), type, a00,
8735
                                fold_build2 (code, type, a01, a11));
8736
          else if (commutative && operand_equal_p (a00, a11, 0))
8737
            return fold_build2 (TREE_CODE (arg0), type, a00,
8738
                                fold_build2 (code, type, a01, a10));
8739
          else if (commutative && operand_equal_p (a01, a10, 0))
8740
            return fold_build2 (TREE_CODE (arg0), type, a01,
8741
                                fold_build2 (code, type, a00, a11));
8742
 
8743
          /* This case if tricky because we must either have commutative
8744
             operators or else A10 must not have side-effects.  */
8745
 
8746
          else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8747
                   && operand_equal_p (a01, a11, 0))
8748
            return fold_build2 (TREE_CODE (arg0), type,
8749
                                fold_build2 (code, type, a00, a10),
8750
                                a01);
8751
        }
8752
 
8753
      /* See if we can build a range comparison.  */
8754
      if (0 != (tem = fold_range_test (code, type, op0, op1)))
8755
        return tem;
8756
 
8757
      /* Check for the possibility of merging component references.  If our
8758
         lhs is another similar operation, try to merge its rhs with our
8759
         rhs.  Then try to merge our lhs and rhs.  */
8760
      if (TREE_CODE (arg0) == code
8761
          && 0 != (tem = fold_truthop (code, type,
8762
                                       TREE_OPERAND (arg0, 1), arg1)))
8763
        return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8764
 
8765
      if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8766
        return tem;
8767
 
8768
      return NULL_TREE;
8769
 
8770
    case TRUTH_ORIF_EXPR:
8771
      /* Note that the operands of this must be ints
8772
         and their values must be 0 or true.
8773
         ("true" is a fixed value perhaps depending on the language.)  */
8774
      /* If first arg is constant true, return it.  */
8775
      if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8776
        return fold_convert (type, arg0);
8777
    case TRUTH_OR_EXPR:
8778
      /* If either arg is constant zero, drop it.  */
8779
      if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8780
        return non_lvalue (fold_convert (type, arg1));
8781
      if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8782
          /* Preserve sequence points.  */
8783
          && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8784
        return non_lvalue (fold_convert (type, arg0));
8785
      /* If second arg is constant true, result is true, but we must
8786
         evaluate first arg.  */
8787
      if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8788
        return omit_one_operand (type, arg1, arg0);
8789
      /* Likewise for first arg, but note this only occurs here for
8790
         TRUTH_OR_EXPR.  */
8791
      if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8792
        return omit_one_operand (type, arg0, arg1);
8793
 
8794
      /* !X || X is always true.  */
8795
      if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8796
          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8797
        return omit_one_operand (type, integer_one_node, arg1);
8798
      /* X || !X is always true.  */
8799
      if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8800
          && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8801
        return omit_one_operand (type, integer_one_node, arg0);
8802
 
8803
      goto truth_andor;
8804
 
8805
    case TRUTH_XOR_EXPR:
8806
      /* If the second arg is constant zero, drop it.  */
8807
      if (integer_zerop (arg1))
8808
        return non_lvalue (fold_convert (type, arg0));
8809
      /* If the second arg is constant true, this is a logical inversion.  */
8810
      if (integer_onep (arg1))
8811
        {
8812
          /* Only call invert_truthvalue if operand is a truth value.  */
8813
          if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8814
            tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
8815
          else
8816
            tem = invert_truthvalue (arg0);
8817
          return non_lvalue (fold_convert (type, tem));
8818
        }
8819
      /* Identical arguments cancel to zero.  */
8820
      if (operand_equal_p (arg0, arg1, 0))
8821
        return omit_one_operand (type, integer_zero_node, arg0);
8822
 
8823
      /* !X ^ X is always true.  */
8824
      if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8825
          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8826
        return omit_one_operand (type, integer_one_node, arg1);
8827
 
8828
      /* X ^ !X is always true.  */
8829
      if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8830
          && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8831
        return omit_one_operand (type, integer_one_node, arg0);
8832
 
8833
      return NULL_TREE;
8834
 
8835
    case EQ_EXPR:
8836
    case NE_EXPR:
8837
    case LT_EXPR:
8838
    case GT_EXPR:
8839
    case LE_EXPR:
8840
    case GE_EXPR:
8841
      /* If one arg is a real or integer constant, put it last.  */
8842
      if (tree_swap_operands_p (arg0, arg1, true))
8843
        return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8844
 
8845
      /* bool_var != 0 becomes bool_var. */
8846
      if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
8847
          && code == NE_EXPR)
8848
        return non_lvalue (fold_convert (type, arg0));
8849
 
8850
      /* bool_var == 1 becomes bool_var. */
8851
      if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
8852
          && code == EQ_EXPR)
8853
        return non_lvalue (fold_convert (type, arg0));
8854
 
8855
      /* If this is an equality comparison of the address of a non-weak
8856
         object against zero, then we know the result.  */
8857
      if ((code == EQ_EXPR || code == NE_EXPR)
8858
          && TREE_CODE (arg0) == ADDR_EXPR
8859
          && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8860
          && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8861
          && integer_zerop (arg1))
8862
        return constant_boolean_node (code != EQ_EXPR, type);
8863
 
8864
      /* If this is an equality comparison of the address of two non-weak,
8865
         unaliased symbols neither of which are extern (since we do not
8866
         have access to attributes for externs), then we know the result.  */
8867
      if ((code == EQ_EXPR || code == NE_EXPR)
8868
          && TREE_CODE (arg0) == ADDR_EXPR
8869
          && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8870
          && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8871
          && ! lookup_attribute ("alias",
8872
                                 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8873
          && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8874
          && TREE_CODE (arg1) == ADDR_EXPR
8875
          && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
8876
          && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8877
          && ! lookup_attribute ("alias",
8878
                                 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
8879
          && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
8880
        {
8881
          /* We know that we're looking at the address of two
8882
             non-weak, unaliased, static _DECL nodes.
8883
 
8884
             It is both wasteful and incorrect to call operand_equal_p
8885
             to compare the two ADDR_EXPR nodes.  It is wasteful in that
8886
             all we need to do is test pointer equality for the arguments
8887
             to the two ADDR_EXPR nodes.  It is incorrect to use
8888
             operand_equal_p as that function is NOT equivalent to a
8889
             C equality test.  It can in fact return false for two
8890
             objects which would test as equal using the C equality
8891
             operator.  */
8892
          bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
8893
          return constant_boolean_node (equal
8894
                                        ? code == EQ_EXPR : code != EQ_EXPR,
8895
                                        type);
8896
        }
8897
 
8898
      /* If this is a comparison of two exprs that look like an
8899
         ARRAY_REF of the same object, then we can fold this to a
8900
         comparison of the two offsets.  This is only safe for
8901
         EQ_EXPR and NE_EXPR because of overflow issues.  */
8902
      if (code == EQ_EXPR || code == NE_EXPR)
8903
        {
8904
          tree base0, offset0, base1, offset1;
8905
 
8906
          if (extract_array_ref (arg0, &base0, &offset0)
8907
              && extract_array_ref (arg1, &base1, &offset1)
8908
              && operand_equal_p (base0, base1, 0))
8909
            {
8910
              /* Handle no offsets on both sides specially.  */
8911
              if (offset0 == NULL_TREE
8912
                  && offset1 == NULL_TREE)
8913
                return fold_build2 (code, type, integer_zero_node,
8914
                                    integer_zero_node);
8915
 
8916
              if (!offset0 || !offset1
8917
                  || TREE_TYPE (offset0) == TREE_TYPE (offset1))
8918
                {
8919
                  if (offset0 == NULL_TREE)
8920
                    offset0 = build_int_cst (TREE_TYPE (offset1), 0);
8921
                  if (offset1 == NULL_TREE)
8922
                    offset1 = build_int_cst (TREE_TYPE (offset0), 0);
8923
                  return fold_build2 (code, type, offset0, offset1);
8924
                }
8925
            }
8926
        }
8927
 
8928
      /* Transform comparisons of the form X +- C CMP X.  */
8929
      if ((code != EQ_EXPR && code != NE_EXPR)
8930
          && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8931
          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
8932
          && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8933
               && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
8934
              || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8935
                  && !TYPE_UNSIGNED (TREE_TYPE (arg1))
8936
                  && !(flag_wrapv || flag_trapv))))
8937
        {
8938
          tree arg01 = TREE_OPERAND (arg0, 1);
8939
          enum tree_code code0 = TREE_CODE (arg0);
8940
          int is_positive;
8941
 
8942
          if (TREE_CODE (arg01) == REAL_CST)
8943
            is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
8944
          else
8945
            is_positive = tree_int_cst_sgn (arg01);
8946
 
8947
          /* (X - c) > X becomes false.  */
8948
          if (code == GT_EXPR
8949
              && ((code0 == MINUS_EXPR && is_positive >= 0)
8950
                  || (code0 == PLUS_EXPR && is_positive <= 0)))
8951
            return constant_boolean_node (0, type);
8952
 
8953
          /* Likewise (X + c) < X becomes false.  */
8954
          if (code == LT_EXPR
8955
              && ((code0 == PLUS_EXPR && is_positive >= 0)
8956
                  || (code0 == MINUS_EXPR && is_positive <= 0)))
8957
            return constant_boolean_node (0, type);
8958
 
8959
          /* Convert (X - c) <= X to true.  */
8960
          if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
8961
              && code == LE_EXPR
8962
              && ((code0 == MINUS_EXPR && is_positive >= 0)
8963
                  || (code0 == PLUS_EXPR && is_positive <= 0)))
8964
            return constant_boolean_node (1, type);
8965
 
8966
          /* Convert (X + c) >= X to true.  */
8967
          if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
8968
              && code == GE_EXPR
8969
              && ((code0 == PLUS_EXPR && is_positive >= 0)
8970
                  || (code0 == MINUS_EXPR && is_positive <= 0)))
8971
            return constant_boolean_node (1, type);
8972
 
8973
          if (TREE_CODE (arg01) == INTEGER_CST)
8974
            {
8975
              /* Convert X + c > X and X - c < X to true for integers.  */
8976
              if (code == GT_EXPR
8977
                  && ((code0 == PLUS_EXPR && is_positive > 0)
8978
                      || (code0 == MINUS_EXPR && is_positive < 0)))
8979
                return constant_boolean_node (1, type);
8980
 
8981
              if (code == LT_EXPR
8982
                  && ((code0 == MINUS_EXPR && is_positive > 0)
8983
                      || (code0 == PLUS_EXPR && is_positive < 0)))
8984
                return constant_boolean_node (1, type);
8985
 
8986
              /* Convert X + c <= X and X - c >= X to false for integers.  */
8987
              if (code == LE_EXPR
8988
                  && ((code0 == PLUS_EXPR && is_positive > 0)
8989
                      || (code0 == MINUS_EXPR && is_positive < 0)))
8990
                return constant_boolean_node (0, type);
8991
 
8992
              if (code == GE_EXPR
8993
                  && ((code0 == MINUS_EXPR && is_positive > 0)
8994
                      || (code0 == PLUS_EXPR && is_positive < 0)))
8995
                return constant_boolean_node (0, type);
8996
            }
8997
        }
8998
 
8999
      /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1.  */
9000
      if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9001
          && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9002
              && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9003
              && !TYPE_UNSIGNED (TREE_TYPE (arg1))
9004
              && !(flag_wrapv || flag_trapv))
9005
          && (TREE_CODE (arg1) == INTEGER_CST
9006
              && !TREE_OVERFLOW (arg1)))
9007
        {
9008
          tree const1 = TREE_OPERAND (arg0, 1);
9009
          tree const2 = arg1;
9010
          tree variable = TREE_OPERAND (arg0, 0);
9011
          tree lhs;
9012
          int lhs_add;
9013
          lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
9014
 
9015
          lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
9016
                             TREE_TYPE (arg1), const2, const1);
9017
          if (TREE_CODE (lhs) == TREE_CODE (arg1)
9018
              && (TREE_CODE (lhs) != INTEGER_CST
9019
                  || !TREE_OVERFLOW (lhs)))
9020
            return fold_build2 (code, type, variable, lhs);
9021
        }
9022
 
9023
      if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9024
        {
9025
          tree targ0 = strip_float_extensions (arg0);
9026
          tree targ1 = strip_float_extensions (arg1);
9027
          tree newtype = TREE_TYPE (targ0);
9028
 
9029
          if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9030
            newtype = TREE_TYPE (targ1);
9031
 
9032
          /* Fold (double)float1 CMP (double)float2 into float1 CMP float2.  */
9033
          if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9034
            return fold_build2 (code, type, fold_convert (newtype, targ0),
9035
                                fold_convert (newtype, targ1));
9036
 
9037
          /* (-a) CMP (-b) -> b CMP a  */
9038
          if (TREE_CODE (arg0) == NEGATE_EXPR
9039
              && TREE_CODE (arg1) == NEGATE_EXPR)
9040
            return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
9041
                                TREE_OPERAND (arg0, 0));
9042
 
9043
          if (TREE_CODE (arg1) == REAL_CST)
9044
          {
9045
            REAL_VALUE_TYPE cst;
9046
            cst = TREE_REAL_CST (arg1);
9047
 
9048
            /* (-a) CMP CST -> a swap(CMP) (-CST)  */
9049
            if (TREE_CODE (arg0) == NEGATE_EXPR)
9050
              return
9051
                fold_build2 (swap_tree_comparison (code), type,
9052
                             TREE_OPERAND (arg0, 0),
9053
                             build_real (TREE_TYPE (arg1),
9054
                                         REAL_VALUE_NEGATE (cst)));
9055
 
9056
            /* IEEE doesn't distinguish +0 and -0 in comparisons.  */
9057
            /* a CMP (-0) -> a CMP 0  */
9058
            if (REAL_VALUE_MINUS_ZERO (cst))
9059
              return fold_build2 (code, type, arg0,
9060
                                  build_real (TREE_TYPE (arg1), dconst0));
9061
 
9062
            /* x != NaN is always true, other ops are always false.  */
9063
            if (REAL_VALUE_ISNAN (cst)
9064
                && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9065
              {
9066
                tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9067
                return omit_one_operand (type, tem, arg0);
9068
              }
9069
 
9070
            /* Fold comparisons against infinity.  */
9071
            if (REAL_VALUE_ISINF (cst))
9072
              {
9073
                tem = fold_inf_compare (code, type, arg0, arg1);
9074
                if (tem != NULL_TREE)
9075
                  return tem;
9076
              }
9077
          }
9078
 
9079
          /* If this is a comparison of a real constant with a PLUS_EXPR
9080
             or a MINUS_EXPR of a real constant, we can convert it into a
9081
             comparison with a revised real constant as long as no overflow
9082
             occurs when unsafe_math_optimizations are enabled.  */
9083
          if (flag_unsafe_math_optimizations
9084
              && TREE_CODE (arg1) == REAL_CST
9085
              && (TREE_CODE (arg0) == PLUS_EXPR
9086
                  || TREE_CODE (arg0) == MINUS_EXPR)
9087
              && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9088
              && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9089
                                          ? MINUS_EXPR : PLUS_EXPR,
9090
                                          arg1, TREE_OPERAND (arg0, 1), 0))
9091
              && ! TREE_CONSTANT_OVERFLOW (tem))
9092
            return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9093
 
9094
          /* Likewise, we can simplify a comparison of a real constant with
9095
             a MINUS_EXPR whose first operand is also a real constant, i.e.
9096
             (c1 - x) < c2 becomes x > c1-c2.  */
9097
          if (flag_unsafe_math_optimizations
9098
              && TREE_CODE (arg1) == REAL_CST
9099
              && TREE_CODE (arg0) == MINUS_EXPR
9100
              && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9101
              && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9102
                                          arg1, 0))
9103
              && ! TREE_CONSTANT_OVERFLOW (tem))
9104
            return fold_build2 (swap_tree_comparison (code), type,
9105
                                TREE_OPERAND (arg0, 1), tem);
9106
 
9107
          /* Fold comparisons against built-in math functions.  */
9108
          if (TREE_CODE (arg1) == REAL_CST
9109
              && flag_unsafe_math_optimizations
9110
              && ! flag_errno_math)
9111
            {
9112
              enum built_in_function fcode = builtin_mathfn_code (arg0);
9113
 
9114
              if (fcode != END_BUILTINS)
9115
                {
9116
                  tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9117
                  if (tem != NULL_TREE)
9118
                    return tem;
9119
                }
9120
            }
9121
        }
9122
 
9123
      /* Convert foo++ == CONST into ++foo == CONST + INCR.  */
9124
      if (TREE_CONSTANT (arg1)
9125
          && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
9126
              || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
9127
          /* This optimization is invalid for ordered comparisons
9128
             if CONST+INCR overflows or if foo+incr might overflow.
9129
             This optimization is invalid for floating point due to rounding.
9130
             For pointer types we assume overflow doesn't happen.  */
9131
          && (POINTER_TYPE_P (TREE_TYPE (arg0))
9132
              || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9133
                  && (code == EQ_EXPR || code == NE_EXPR))))
9134
        {
9135
          tree varop, newconst;
9136
 
9137
          if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
9138
            {
9139
              newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
9140
                                      arg1, TREE_OPERAND (arg0, 1));
9141
              varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
9142
                              TREE_OPERAND (arg0, 0),
9143
                              TREE_OPERAND (arg0, 1));
9144
            }
9145
          else
9146
            {
9147
              newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
9148
                                      arg1, TREE_OPERAND (arg0, 1));
9149
              varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
9150
                              TREE_OPERAND (arg0, 0),
9151
                              TREE_OPERAND (arg0, 1));
9152
            }
9153
 
9154
 
9155
          /* If VAROP is a reference to a bitfield, we must mask
9156
             the constant by the width of the field.  */
9157
          if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
9158
              && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
9159
              && host_integerp (DECL_SIZE (TREE_OPERAND
9160
                                           (TREE_OPERAND (varop, 0), 1)), 1))
9161
            {
9162
              tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
9163
              HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
9164
              tree folded_compare, shift;
9165
 
9166
              /* First check whether the comparison would come out
9167
                 always the same.  If we don't do that we would
9168
                 change the meaning with the masking.  */
9169
              folded_compare = fold_build2 (code, type,
9170
                                            TREE_OPERAND (varop, 0), arg1);
9171
              if (integer_zerop (folded_compare)
9172
                  || integer_onep (folded_compare))
9173
                return omit_one_operand (type, folded_compare, varop);
9174
 
9175
              shift = build_int_cst (NULL_TREE,
9176
                                     TYPE_PRECISION (TREE_TYPE (varop)) - size);
9177
              shift = fold_convert (TREE_TYPE (varop), shift);
9178
              newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
9179
                                      newconst, shift);
9180
              newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
9181
                                      newconst, shift);
9182
            }
9183
 
9184
          return fold_build2 (code, type, varop, newconst);
9185
        }
9186
 
9187
      /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9188
         This transformation affects the cases which are handled in later
9189
         optimizations involving comparisons with non-negative constants.  */
9190
      if (TREE_CODE (arg1) == INTEGER_CST
9191
          && TREE_CODE (arg0) != INTEGER_CST
9192
          && tree_int_cst_sgn (arg1) > 0)
9193
        {
9194
          switch (code)
9195
            {
9196
            case GE_EXPR:
9197
              arg1 = const_binop (MINUS_EXPR, arg1,
9198
                                  build_int_cst (TREE_TYPE (arg1), 1), 0);
9199
              return fold_build2 (GT_EXPR, type, arg0,
9200
                                  fold_convert (TREE_TYPE (arg0), arg1));
9201
 
9202
            case LT_EXPR:
9203
              arg1 = const_binop (MINUS_EXPR, arg1,
9204
                                  build_int_cst (TREE_TYPE (arg1), 1), 0);
9205
              return fold_build2 (LE_EXPR, type, arg0,
9206
                                  fold_convert (TREE_TYPE (arg0), arg1));
9207
 
9208
            default:
9209
              break;
9210
            }
9211
        }
9212
 
9213
      /* Comparisons with the highest or lowest possible integer of
9214
         the specified size will have known values.  */
9215
      {
9216
        int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
9217
 
9218
        if (TREE_CODE (arg1) == INTEGER_CST
9219
            && ! TREE_CONSTANT_OVERFLOW (arg1)
9220
            && width <= 2 * HOST_BITS_PER_WIDE_INT
9221
            && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9222
                || POINTER_TYPE_P (TREE_TYPE (arg1))))
9223
          {
9224
            HOST_WIDE_INT signed_max_hi;
9225
            unsigned HOST_WIDE_INT signed_max_lo;
9226
            unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
9227
 
9228
            if (width <= HOST_BITS_PER_WIDE_INT)
9229
              {
9230
                signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9231
                                - 1;
9232
                signed_max_hi = 0;
9233
                max_hi = 0;
9234
 
9235
                if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9236
                  {
9237
                    max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9238
                    min_lo = 0;
9239
                    min_hi = 0;
9240
                  }
9241
                else
9242
                  {
9243
                    max_lo = signed_max_lo;
9244
                    min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9245
                    min_hi = -1;
9246
                  }
9247
              }
9248
            else
9249
              {
9250
                width -= HOST_BITS_PER_WIDE_INT;
9251
                signed_max_lo = -1;
9252
                signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9253
                                - 1;
9254
                max_lo = -1;
9255
                min_lo = 0;
9256
 
9257
                if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9258
                  {
9259
                    max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9260
                    min_hi = 0;
9261
                  }
9262
                else
9263
                  {
9264
                    max_hi = signed_max_hi;
9265
                    min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9266
                  }
9267
              }
9268
 
9269
            if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
9270
                && TREE_INT_CST_LOW (arg1) == max_lo)
9271
              switch (code)
9272
                {
9273
                case GT_EXPR:
9274
                  return omit_one_operand (type, integer_zero_node, arg0);
9275
 
9276
                case GE_EXPR:
9277
                  return fold_build2 (EQ_EXPR, type, arg0, arg1);
9278
 
9279
                case LE_EXPR:
9280
                  return omit_one_operand (type, integer_one_node, arg0);
9281
 
9282
                case LT_EXPR:
9283
                  return fold_build2 (NE_EXPR, type, arg0, arg1);
9284
 
9285
                /* The GE_EXPR and LT_EXPR cases above are not normally
9286
                   reached because of previous transformations.  */
9287
 
9288
                default:
9289
                  break;
9290
                }
9291
            else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9292
                     == max_hi
9293
                     && TREE_INT_CST_LOW (arg1) == max_lo - 1)
9294
              switch (code)
9295
                {
9296
                case GT_EXPR:
9297
                  arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9298
                  return fold_build2 (EQ_EXPR, type, arg0, arg1);
9299
                case LE_EXPR:
9300
                  arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9301
                  return fold_build2 (NE_EXPR, type, arg0, arg1);
9302
                default:
9303
                  break;
9304
                }
9305
            else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9306
                     == min_hi
9307
                     && TREE_INT_CST_LOW (arg1) == min_lo)
9308
              switch (code)
9309
                {
9310
                case LT_EXPR:
9311
                  return omit_one_operand (type, integer_zero_node, arg0);
9312
 
9313
                case LE_EXPR:
9314
                  return fold_build2 (EQ_EXPR, type, arg0, arg1);
9315
 
9316
                case GE_EXPR:
9317
                  return omit_one_operand (type, integer_one_node, arg0);
9318
 
9319
                case GT_EXPR:
9320
                  return fold_build2 (NE_EXPR, type, op0, op1);
9321
 
9322
                default:
9323
                  break;
9324
                }
9325
            else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9326
                     == min_hi
9327
                     && TREE_INT_CST_LOW (arg1) == min_lo + 1)
9328
              switch (code)
9329
                {
9330
                case GE_EXPR:
9331
                  arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9332
                  return fold_build2 (NE_EXPR, type, arg0, arg1);
9333
                case LT_EXPR:
9334
                  arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9335
                  return fold_build2 (EQ_EXPR, type, arg0, arg1);
9336
                default:
9337
                  break;
9338
                }
9339
 
9340
            else if (!in_gimple_form
9341
                     && TREE_INT_CST_HIGH (arg1) == signed_max_hi
9342
                     && TREE_INT_CST_LOW (arg1) == signed_max_lo
9343
                     && TYPE_UNSIGNED (TREE_TYPE (arg1))
9344
                     /* signed_type does not work on pointer types.  */
9345
                     && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9346
              {
9347
                /* The following case also applies to X < signed_max+1
9348
                   and X >= signed_max+1 because previous transformations.  */
9349
                if (code == LE_EXPR || code == GT_EXPR)
9350
                  {
9351
                    tree st0, st1;
9352
                    st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
9353
                    st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
9354
                    return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9355
                                        type, fold_convert (st0, arg0),
9356
                                        build_int_cst (st1, 0));
9357
                  }
9358
              }
9359
          }
9360
      }
9361
 
9362
      /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9363
         a MINUS_EXPR of a constant, we can convert it into a comparison with
9364
         a revised constant as long as no overflow occurs.  */
9365
      if ((code == EQ_EXPR || code == NE_EXPR)
9366
          && TREE_CODE (arg1) == INTEGER_CST
9367
          && (TREE_CODE (arg0) == PLUS_EXPR
9368
              || TREE_CODE (arg0) == MINUS_EXPR)
9369
          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9370
          && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9371
                                      ? MINUS_EXPR : PLUS_EXPR,
9372
                                      arg1, TREE_OPERAND (arg0, 1), 0))
9373
          && ! TREE_CONSTANT_OVERFLOW (tem))
9374
        return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9375
 
9376
      /* Similarly for a NEGATE_EXPR.  */
9377
      else if ((code == EQ_EXPR || code == NE_EXPR)
9378
               && TREE_CODE (arg0) == NEGATE_EXPR
9379
               && TREE_CODE (arg1) == INTEGER_CST
9380
               && 0 != (tem = negate_expr (arg1))
9381
               && TREE_CODE (tem) == INTEGER_CST
9382
               && ! TREE_CONSTANT_OVERFLOW (tem))
9383
        return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9384
 
9385
      /* If we have X - Y == 0, we can convert that to X == Y and similarly
9386
         for !=.  Don't do this for ordered comparisons due to overflow.  */
9387
      else if ((code == NE_EXPR || code == EQ_EXPR)
9388
               && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
9389
        return fold_build2 (code, type,
9390
                            TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
9391
 
9392
      else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9393
               && (TREE_CODE (arg0) == NOP_EXPR
9394
                   || TREE_CODE (arg0) == CONVERT_EXPR))
9395
        {
9396
          /* If we are widening one operand of an integer comparison,
9397
             see if the other operand is similarly being widened.  Perhaps we
9398
             can do the comparison in the narrower type.  */
9399
          tem = fold_widened_comparison (code, type, arg0, arg1);
9400
          if (tem)
9401
            return tem;
9402
 
9403
          /* Or if we are changing signedness.  */
9404
          tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9405
          if (tem)
9406
            return tem;
9407
        }
9408
 
9409
      /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9410
         constant, we can simplify it.  */
9411
      else if (TREE_CODE (arg1) == INTEGER_CST
9412
               && (TREE_CODE (arg0) == MIN_EXPR
9413
                   || TREE_CODE (arg0) == MAX_EXPR)
9414
               && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9415
        {
9416
          tem = optimize_minmax_comparison (code, type, op0, op1);
9417
          if (tem)
9418
            return tem;
9419
 
9420
          return NULL_TREE;
9421
        }
9422
 
9423
      /* If we are comparing an ABS_EXPR with a constant, we can
9424
         convert all the cases into explicit comparisons, but they may
9425
         well not be faster than doing the ABS and one comparison.
9426
         But ABS (X) <= C is a range comparison, which becomes a subtraction
9427
         and a comparison, and is probably faster.  */
9428
      else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9429
               && TREE_CODE (arg0) == ABS_EXPR
9430
               && ! TREE_SIDE_EFFECTS (arg0)
9431
               && (0 != (tem = negate_expr (arg1)))
9432
               && TREE_CODE (tem) == INTEGER_CST
9433
               && ! TREE_CONSTANT_OVERFLOW (tem))
9434
        return fold_build2 (TRUTH_ANDIF_EXPR, type,
9435
                            build2 (GE_EXPR, type,
9436
                                    TREE_OPERAND (arg0, 0), tem),
9437
                            build2 (LE_EXPR, type,
9438
                                    TREE_OPERAND (arg0, 0), arg1));
9439
 
9440
      /* Convert ABS_EXPR<x> >= 0 to true.  */
9441
      else if (code == GE_EXPR
9442
               && tree_expr_nonnegative_p (arg0)
9443
               && (integer_zerop (arg1)
9444
                   || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9445
                       && real_zerop (arg1))))
9446
        return omit_one_operand (type, integer_one_node, arg0);
9447
 
9448
      /* Convert ABS_EXPR<x> < 0 to false.  */
9449
      else if (code == LT_EXPR
9450
               && tree_expr_nonnegative_p (arg0)
9451
               && (integer_zerop (arg1) || real_zerop (arg1)))
9452
        return omit_one_operand (type, integer_zero_node, arg0);
9453
 
9454
      /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0.  */
9455
      else if ((code == EQ_EXPR || code == NE_EXPR)
9456
               && TREE_CODE (arg0) == ABS_EXPR
9457
               && (integer_zerop (arg1) || real_zerop (arg1)))
9458
        return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
9459
 
9460
      /* If this is an EQ or NE comparison with zero and ARG0 is
9461
         (1 << foo) & bar, convert it to (bar >> foo) & 1.  Both require
9462
         two operations, but the latter can be done in one less insn
9463
         on machines that have only two-operand insns or on which a
9464
         constant cannot be the first operand.  */
9465
      if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
9466
          && TREE_CODE (arg0) == BIT_AND_EXPR)
9467
        {
9468
          tree arg00 = TREE_OPERAND (arg0, 0);
9469
          tree arg01 = TREE_OPERAND (arg0, 1);
9470
          if (TREE_CODE (arg00) == LSHIFT_EXPR
9471
              && integer_onep (TREE_OPERAND (arg00, 0)))
9472
            return
9473
              fold_build2 (code, type,
9474
                           build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9475
                                   build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
9476
                                           arg01, TREE_OPERAND (arg00, 1)),
9477
                                   fold_convert (TREE_TYPE (arg0),
9478
                                                 integer_one_node)),
9479
                           arg1);
9480
          else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
9481
                   && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
9482
            return
9483
              fold_build2 (code, type,
9484
                           build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9485
                                   build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
9486
                                           arg00, TREE_OPERAND (arg01, 1)),
9487
                                   fold_convert (TREE_TYPE (arg0),
9488
                                                 integer_one_node)),
9489
                           arg1);
9490
        }
9491
 
9492
      /* If this is an NE or EQ comparison of zero against the result of a
9493
         signed MOD operation whose second operand is a power of 2, make
9494
         the MOD operation unsigned since it is simpler and equivalent.  */
9495
      if ((code == NE_EXPR || code == EQ_EXPR)
9496
          && integer_zerop (arg1)
9497
          && !TYPE_UNSIGNED (TREE_TYPE (arg0))
9498
          && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
9499
              || TREE_CODE (arg0) == CEIL_MOD_EXPR
9500
              || TREE_CODE (arg0) == FLOOR_MOD_EXPR
9501
              || TREE_CODE (arg0) == ROUND_MOD_EXPR)
9502
          && integer_pow2p (TREE_OPERAND (arg0, 1)))
9503
        {
9504
          tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
9505
          tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
9506
                                     fold_convert (newtype,
9507
                                                   TREE_OPERAND (arg0, 0)),
9508
                                     fold_convert (newtype,
9509
                                                   TREE_OPERAND (arg0, 1)));
9510
 
9511
          return fold_build2 (code, type, newmod,
9512
                              fold_convert (newtype, arg1));
9513
        }
9514
 
9515
      /* If this is an NE comparison of zero with an AND of one, remove the
9516
         comparison since the AND will give the correct value.  */
9517
      if (code == NE_EXPR && integer_zerop (arg1)
9518
          && TREE_CODE (arg0) == BIT_AND_EXPR
9519
          && integer_onep (TREE_OPERAND (arg0, 1)))
9520
        return fold_convert (type, arg0);
9521
 
9522
      /* If we have (A & C) == C where C is a power of 2, convert this into
9523
         (A & C) != 0.  Similarly for NE_EXPR.  */
9524
      if ((code == EQ_EXPR || code == NE_EXPR)
9525
          && TREE_CODE (arg0) == BIT_AND_EXPR
9526
          && integer_pow2p (TREE_OPERAND (arg0, 1))
9527
          && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9528
        return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
9529
                            arg0, fold_convert (TREE_TYPE (arg0),
9530
                                                integer_zero_node));
9531
 
9532
      /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
9533
         bit, then fold the expression into A < 0 or A >= 0.  */
9534
      tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
9535
      if (tem)
9536
        return tem;
9537
 
9538
      /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9539
         Similarly for NE_EXPR.  */
9540
      if ((code == EQ_EXPR || code == NE_EXPR)
9541
          && TREE_CODE (arg0) == BIT_AND_EXPR
9542
          && TREE_CODE (arg1) == INTEGER_CST
9543
          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9544
        {
9545
          tree notc = fold_build1 (BIT_NOT_EXPR,
9546
                                   TREE_TYPE (TREE_OPERAND (arg0, 1)),
9547
                                   TREE_OPERAND (arg0, 1));
9548
          tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9549
                                       arg1, notc);
9550
          tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9551
          if (integer_nonzerop (dandnotc))
9552
            return omit_one_operand (type, rslt, arg0);
9553
        }
9554
 
9555
      /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9556
         Similarly for NE_EXPR.  */
9557
      if ((code == EQ_EXPR || code == NE_EXPR)
9558
          && TREE_CODE (arg0) == BIT_IOR_EXPR
9559
          && TREE_CODE (arg1) == INTEGER_CST
9560
          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9561
        {
9562
          tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
9563
          tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9564
                                       TREE_OPERAND (arg0, 1), notd);
9565
          tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9566
          if (integer_nonzerop (candnotd))
9567
            return omit_one_operand (type, rslt, arg0);
9568
        }
9569
 
9570
      /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9571
         and similarly for >= into !=.  */
9572
      if ((code == LT_EXPR || code == GE_EXPR)
9573
          && TYPE_UNSIGNED (TREE_TYPE (arg0))
9574
          && TREE_CODE (arg1) == LSHIFT_EXPR
9575
          && integer_onep (TREE_OPERAND (arg1, 0)))
9576
        return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9577
                       build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9578
                               TREE_OPERAND (arg1, 1)),
9579
                       fold_convert (TREE_TYPE (arg0), integer_zero_node));
9580
 
9581
      else if ((code == LT_EXPR || code == GE_EXPR)
9582
               && TYPE_UNSIGNED (TREE_TYPE (arg0))
9583
               && (TREE_CODE (arg1) == NOP_EXPR
9584
                   || TREE_CODE (arg1) == CONVERT_EXPR)
9585
               && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
9586
               && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
9587
        return
9588
          build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9589
                  fold_convert (TREE_TYPE (arg0),
9590
                                build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9591
                                        TREE_OPERAND (TREE_OPERAND (arg1, 0),
9592
                                                      1))),
9593
                  fold_convert (TREE_TYPE (arg0), integer_zero_node));
9594
 
9595
      /* Simplify comparison of something with itself.  (For IEEE
9596
         floating-point, we can only do some of these simplifications.)  */
9597
      if (operand_equal_p (arg0, arg1, 0))
9598
        {
9599
          switch (code)
9600
            {
9601
            case EQ_EXPR:
9602
              if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9603
                  || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9604
                return constant_boolean_node (1, type);
9605
              break;
9606
 
9607
            case GE_EXPR:
9608
            case LE_EXPR:
9609
              if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9610
                  || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9611
                return constant_boolean_node (1, type);
9612
              return fold_build2 (EQ_EXPR, type, arg0, arg1);
9613
 
9614
            case NE_EXPR:
9615
              /* For NE, we can only do this simplification if integer
9616
                 or we don't honor IEEE floating point NaNs.  */
9617
              if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9618
                  && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9619
                break;
9620
              /* ... fall through ...  */
9621
            case GT_EXPR:
9622
            case LT_EXPR:
9623
              return constant_boolean_node (0, type);
9624
            default:
9625
              gcc_unreachable ();
9626
            }
9627
        }
9628
 
9629
      /* If we are comparing an expression that just has comparisons
9630
         of two integer values, arithmetic expressions of those comparisons,
9631
         and constants, we can simplify it.  There are only three cases
9632
         to check: the two values can either be equal, the first can be
9633
         greater, or the second can be greater.  Fold the expression for
9634
         those three values.  Since each value must be 0 or 1, we have
9635
         eight possibilities, each of which corresponds to the constant 0
9636
         or 1 or one of the six possible comparisons.
9637
 
9638
         This handles common cases like (a > b) == 0 but also handles
9639
         expressions like  ((x > y) - (y > x)) > 0, which supposedly
9640
         occur in macroized code.  */
9641
 
9642
      if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9643
        {
9644
          tree cval1 = 0, cval2 = 0;
9645
          int save_p = 0;
9646
 
9647
          if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9648
              /* Don't handle degenerate cases here; they should already
9649
                 have been handled anyway.  */
9650
              && cval1 != 0 && cval2 != 0
9651
              && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9652
              && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9653
              && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9654
              && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9655
              && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9656
              && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9657
                                    TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9658
            {
9659
              tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9660
              tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9661
 
9662
              /* We can't just pass T to eval_subst in case cval1 or cval2
9663
                 was the same as ARG1.  */
9664
 
9665
              tree high_result
9666
                = fold_build2 (code, type,
9667
                               eval_subst (arg0, cval1, maxval,
9668
                                           cval2, minval),
9669
                               arg1);
9670
              tree equal_result
9671
                = fold_build2 (code, type,
9672
                               eval_subst (arg0, cval1, maxval,
9673
                                           cval2, maxval),
9674
                               arg1);
9675
              tree low_result
9676
                = fold_build2 (code, type,
9677
                               eval_subst (arg0, cval1, minval,
9678
                                           cval2, maxval),
9679
                               arg1);
9680
 
9681
              /* All three of these results should be 0 or 1.  Confirm they
9682
                 are.  Then use those values to select the proper code
9683
                 to use.  */
9684
 
9685
              if ((integer_zerop (high_result)
9686
                   || integer_onep (high_result))
9687
                  && (integer_zerop (equal_result)
9688
                      || integer_onep (equal_result))
9689
                  && (integer_zerop (low_result)
9690
                      || integer_onep (low_result)))
9691
                {
9692
                  /* Make a 3-bit mask with the high-order bit being the
9693
                     value for `>', the next for '=', and the low for '<'.  */
9694
                  switch ((integer_onep (high_result) * 4)
9695
                          + (integer_onep (equal_result) * 2)
9696
                          + integer_onep (low_result))
9697
                    {
9698
                    case 0:
9699
                      /* Always false.  */
9700
                      return omit_one_operand (type, integer_zero_node, arg0);
9701
                    case 1:
9702
                      code = LT_EXPR;
9703
                      break;
9704
                    case 2:
9705
                      code = EQ_EXPR;
9706
                      break;
9707
                    case 3:
9708
                      code = LE_EXPR;
9709
                      break;
9710
                    case 4:
9711
                      code = GT_EXPR;
9712
                      break;
9713
                    case 5:
9714
                      code = NE_EXPR;
9715
                      break;
9716
                    case 6:
9717
                      code = GE_EXPR;
9718
                      break;
9719
                    case 7:
9720
                      /* Always true.  */
9721
                      return omit_one_operand (type, integer_one_node, arg0);
9722
                    }
9723
 
9724
                  if (save_p)
9725
                    return save_expr (build2 (code, type, cval1, cval2));
9726
                  else
9727
                    return fold_build2 (code, type, cval1, cval2);
9728
                }
9729
            }
9730
        }
9731
 
9732
      /* If this is a comparison of a field, we may be able to simplify it.  */
9733
      if (((TREE_CODE (arg0) == COMPONENT_REF
9734
            && lang_hooks.can_use_bit_fields_p ())
9735
           || TREE_CODE (arg0) == BIT_FIELD_REF)
9736
          && (code == EQ_EXPR || code == NE_EXPR)
9737
          /* Handle the constant case even without -O
9738
             to make sure the warnings are given.  */
9739
          && (optimize || TREE_CODE (arg1) == INTEGER_CST))
9740
        {
9741
          t1 = optimize_bit_field_compare (code, type, arg0, arg1);
9742
          if (t1)
9743
            return t1;
9744
        }
9745
 
9746
      /* Fold a comparison of the address of COMPONENT_REFs with the same
9747
         type and component to a comparison of the address of the base
9748
         object.  In short, &x->a OP &y->a to x OP y and
9749
         &x->a OP &y.a to x OP &y  */
9750
      if (TREE_CODE (arg0) == ADDR_EXPR
9751
          && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
9752
          && TREE_CODE (arg1) == ADDR_EXPR
9753
          && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
9754
        {
9755
          tree cref0 = TREE_OPERAND (arg0, 0);
9756
          tree cref1 = TREE_OPERAND (arg1, 0);
9757
          if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
9758
            {
9759
              tree op0 = TREE_OPERAND (cref0, 0);
9760
              tree op1 = TREE_OPERAND (cref1, 0);
9761
              return fold_build2 (code, type,
9762
                                  build_fold_addr_expr (op0),
9763
                                  build_fold_addr_expr (op1));
9764
            }
9765
        }
9766
 
9767
      /* Optimize comparisons of strlen vs zero to a compare of the
9768
         first character of the string vs zero.  To wit,
9769
                strlen(ptr) == 0   =>  *ptr == 0
9770
                strlen(ptr) != 0   =>  *ptr != 0
9771
         Other cases should reduce to one of these two (or a constant)
9772
         due to the return value of strlen being unsigned.  */
9773
      if ((code == EQ_EXPR || code == NE_EXPR)
9774
          && integer_zerop (arg1)
9775
          && TREE_CODE (arg0) == CALL_EXPR)
9776
        {
9777
          tree fndecl = get_callee_fndecl (arg0);
9778
          tree arglist;
9779
 
9780
          if (fndecl
9781
              && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
9782
              && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
9783
              && (arglist = TREE_OPERAND (arg0, 1))
9784
              && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
9785
              && ! TREE_CHAIN (arglist))
9786
            {
9787
              tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
9788
              return fold_build2 (code, type, iref,
9789
                                  build_int_cst (TREE_TYPE (iref), 0));
9790
            }
9791
        }
9792
 
9793
      /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9794
         into a single range test.  */
9795
      if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9796
           || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9797
          && TREE_CODE (arg1) == INTEGER_CST
9798
          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9799
          && !integer_zerop (TREE_OPERAND (arg0, 1))
9800
          && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9801
          && !TREE_OVERFLOW (arg1))
9802
        {
9803
          t1 = fold_div_compare (code, type, arg0, arg1);
9804
          if (t1 != NULL_TREE)
9805
            return t1;
9806
        }
9807
 
9808
      if ((code == EQ_EXPR || code == NE_EXPR)
9809
          && integer_zerop (arg1)
9810
          && tree_expr_nonzero_p (arg0))
9811
        {
9812
          tree res = constant_boolean_node (code==NE_EXPR, type);
9813
          return omit_one_operand (type, res, arg0);
9814
        }
9815
 
9816
      t1 = fold_relational_const (code, type, arg0, arg1);
9817
      return t1 == NULL_TREE ? NULL_TREE : t1;
9818
 
9819
    case UNORDERED_EXPR:
9820
    case ORDERED_EXPR:
9821
    case UNLT_EXPR:
9822
    case UNLE_EXPR:
9823
    case UNGT_EXPR:
9824
    case UNGE_EXPR:
9825
    case UNEQ_EXPR:
9826
    case LTGT_EXPR:
9827
      if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9828
        {
9829
          t1 = fold_relational_const (code, type, arg0, arg1);
9830
          if (t1 != NULL_TREE)
9831
            return t1;
9832
        }
9833
 
9834
      /* If the first operand is NaN, the result is constant.  */
9835
      if (TREE_CODE (arg0) == REAL_CST
9836
          && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9837
          && (code != LTGT_EXPR || ! flag_trapping_math))
9838
        {
9839
          t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9840
               ? integer_zero_node
9841
               : integer_one_node;
9842
          return omit_one_operand (type, t1, arg1);
9843
        }
9844
 
9845
      /* If the second operand is NaN, the result is constant.  */
9846
      if (TREE_CODE (arg1) == REAL_CST
9847
          && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9848
          && (code != LTGT_EXPR || ! flag_trapping_math))
9849
        {
9850
          t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9851
               ? integer_zero_node
9852
               : integer_one_node;
9853
          return omit_one_operand (type, t1, arg0);
9854
        }
9855
 
9856
      /* Simplify unordered comparison of something with itself.  */
9857
      if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9858
          && operand_equal_p (arg0, arg1, 0))
9859
        return constant_boolean_node (1, type);
9860
 
9861
      if (code == LTGT_EXPR
9862
          && !flag_trapping_math
9863
          && operand_equal_p (arg0, arg1, 0))
9864
        return constant_boolean_node (0, type);
9865
 
9866
      /* Fold (double)float1 CMP (double)float2 into float1 CMP float2.  */
9867
      {
9868
        tree targ0 = strip_float_extensions (arg0);
9869
        tree targ1 = strip_float_extensions (arg1);
9870
        tree newtype = TREE_TYPE (targ0);
9871
 
9872
        if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9873
          newtype = TREE_TYPE (targ1);
9874
 
9875
        if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9876
          return fold_build2 (code, type, fold_convert (newtype, targ0),
9877
                              fold_convert (newtype, targ1));
9878
      }
9879
 
9880
      return NULL_TREE;
9881
 
9882
    case COMPOUND_EXPR:
9883
      /* When pedantic, a compound expression can be neither an lvalue
9884
         nor an integer constant expression.  */
9885
      if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
9886
        return NULL_TREE;
9887
      /* Don't let (0, 0) be null pointer constant.  */
9888
      tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
9889
                                 : fold_convert (type, arg1);
9890
      return pedantic_non_lvalue (tem);
9891
 
9892
    case COMPLEX_EXPR:
9893
      if (wins)
9894
        return build_complex (type, arg0, arg1);
9895
      return NULL_TREE;
9896
 
9897
    case ASSERT_EXPR:
9898
      /* An ASSERT_EXPR should never be passed to fold_binary.  */
9899
      gcc_unreachable ();
9900
 
9901
    default:
9902
      return NULL_TREE;
9903
    } /* switch (code) */
9904
}
9905
 
9906
/* Callback for walk_tree, looking for LABEL_EXPR.
9907
   Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
9908
   Do not check the sub-tree of GOTO_EXPR.  */
9909
 
9910
static tree
9911
contains_label_1 (tree *tp,
9912
                  int *walk_subtrees,
9913
                  void *data ATTRIBUTE_UNUSED)
9914
{
9915
  switch (TREE_CODE (*tp))
9916
    {
9917
    case LABEL_EXPR:
9918
      return *tp;
9919
    case GOTO_EXPR:
9920
      *walk_subtrees = 0;
9921
    /* no break */
9922
    default:
9923
      return NULL_TREE;
9924
    }
9925
}
9926
 
9927
/* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
9928
   accessible from outside the sub-tree. Returns NULL_TREE if no
9929
   addressable label is found.  */
9930
 
9931
static bool
9932
contains_label_p (tree st)
9933
{
9934
  return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
9935
}
9936
 
9937
/* Fold a ternary expression of code CODE and type TYPE with operands
9938
   OP0, OP1, and OP2.  Return the folded expression if folding is
9939
   successful.  Otherwise, return NULL_TREE.  */
9940
 
9941
tree
9942
fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
9943
{
9944
  tree tem;
9945
  tree arg0 = NULL_TREE, arg1 = NULL_TREE;
9946
  enum tree_code_class kind = TREE_CODE_CLASS (code);
9947
 
9948
  gcc_assert (IS_EXPR_CODE_CLASS (kind)
9949
              && TREE_CODE_LENGTH (code) == 3);
9950
 
9951
  /* Strip any conversions that don't change the mode.  This is safe
9952
     for every expression, except for a comparison expression because
9953
     its signedness is derived from its operands.  So, in the latter
9954
     case, only strip conversions that don't change the signedness.
9955
 
9956
     Note that this is done as an internal manipulation within the
9957
     constant folder, in order to find the simplest representation of
9958
     the arguments so that their form can be studied.  In any cases,
9959
     the appropriate type conversions should be put back in the tree
9960
     that will get out of the constant folder.  */
9961
  if (op0)
9962
    {
9963
      arg0 = op0;
9964
      STRIP_NOPS (arg0);
9965
    }
9966
 
9967
  if (op1)
9968
    {
9969
      arg1 = op1;
9970
      STRIP_NOPS (arg1);
9971
    }
9972
 
9973
  switch (code)
9974
    {
9975
    case COMPONENT_REF:
9976
      if (TREE_CODE (arg0) == CONSTRUCTOR
9977
          && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
9978
        {
9979
          unsigned HOST_WIDE_INT idx;
9980
          tree field, value;
9981
          FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
9982
            if (field == arg1)
9983
              return value;
9984
        }
9985
      return NULL_TREE;
9986
 
9987
    case COND_EXPR:
9988
      /* Pedantic ANSI C says that a conditional expression is never an lvalue,
9989
         so all simple results must be passed through pedantic_non_lvalue.  */
9990
      if (TREE_CODE (arg0) == INTEGER_CST)
9991
        {
9992
          tree unused_op = integer_zerop (arg0) ? op1 : op2;
9993
          tem = integer_zerop (arg0) ? op2 : op1;
9994
          /* Only optimize constant conditions when the selected branch
9995
             has the same type as the COND_EXPR.  This avoids optimizing
9996
             away "c ? x : throw", where the throw has a void type.
9997
             Avoid throwing away that operand which contains label.  */
9998
          if ((!TREE_SIDE_EFFECTS (unused_op)
9999
               || !contains_label_p (unused_op))
10000
              && (! VOID_TYPE_P (TREE_TYPE (tem))
10001
                  || VOID_TYPE_P (type)))
10002
            return pedantic_non_lvalue (tem);
10003
          return NULL_TREE;
10004
        }
10005
      if (operand_equal_p (arg1, op2, 0))
10006
        return pedantic_omit_one_operand (type, arg1, arg0);
10007
 
10008
      /* If we have A op B ? A : C, we may be able to convert this to a
10009
         simpler expression, depending on the operation and the values
10010
         of B and C.  Signed zeros prevent all of these transformations,
10011
         for reasons given above each one.
10012
 
10013
         Also try swapping the arguments and inverting the conditional.  */
10014
      if (COMPARISON_CLASS_P (arg0)
10015
          && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10016
                                             arg1, TREE_OPERAND (arg0, 1))
10017
          && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
10018
        {
10019
          tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
10020
          if (tem)
10021
            return tem;
10022
        }
10023
 
10024
      if (COMPARISON_CLASS_P (arg0)
10025
          && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10026
                                             op2,
10027
                                             TREE_OPERAND (arg0, 1))
10028
          && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
10029
        {
10030
          tem = invert_truthvalue (arg0);
10031
          if (COMPARISON_CLASS_P (tem))
10032
            {
10033
              tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
10034
              if (tem)
10035
                return tem;
10036
            }
10037
        }
10038
 
10039
      /* If the second operand is simpler than the third, swap them
10040
         since that produces better jump optimization results.  */
10041
      if (truth_value_p (TREE_CODE (arg0))
10042
          && tree_swap_operands_p (op1, op2, false))
10043
        {
10044
          /* See if this can be inverted.  If it can't, possibly because
10045
             it was a floating-point inequality comparison, don't do
10046
             anything.  */
10047
          tem = invert_truthvalue (arg0);
10048
 
10049
          if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10050
            return fold_build3 (code, type, tem, op2, op1);
10051
        }
10052
 
10053
      /* Convert A ? 1 : 0 to simply A.  */
10054
      if (integer_onep (op1)
10055
          && integer_zerop (op2)
10056
          /* If we try to convert OP0 to our type, the
10057
             call to fold will try to move the conversion inside
10058
             a COND, which will recurse.  In that case, the COND_EXPR
10059
             is probably the best choice, so leave it alone.  */
10060
          && type == TREE_TYPE (arg0))
10061
        return pedantic_non_lvalue (arg0);
10062
 
10063
      /* Convert A ? 0 : 1 to !A.  This prefers the use of NOT_EXPR
10064
         over COND_EXPR in cases such as floating point comparisons.  */
10065
      if (integer_zerop (op1)
10066
          && integer_onep (op2)
10067
          && truth_value_p (TREE_CODE (arg0)))
10068
        return pedantic_non_lvalue (fold_convert (type,
10069
                                                  invert_truthvalue (arg0)));
10070
 
10071
      /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>).  */
10072
      if (TREE_CODE (arg0) == LT_EXPR
10073
          && integer_zerop (TREE_OPERAND (arg0, 1))
10074
          && integer_zerop (op2)
10075
          && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
10076
        return fold_convert (type, fold_build2 (BIT_AND_EXPR,
10077
                                                TREE_TYPE (tem), tem, arg1));
10078
 
10079
      /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N).  A & 1 was
10080
         already handled above.  */
10081
      if (TREE_CODE (arg0) == BIT_AND_EXPR
10082
          && integer_onep (TREE_OPERAND (arg0, 1))
10083
          && integer_zerop (op2)
10084
          && integer_pow2p (arg1))
10085
        {
10086
          tree tem = TREE_OPERAND (arg0, 0);
10087
          STRIP_NOPS (tem);
10088
          if (TREE_CODE (tem) == RSHIFT_EXPR
10089
              && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
10090
              && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
10091
                 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
10092
            return fold_build2 (BIT_AND_EXPR, type,
10093
                                TREE_OPERAND (tem, 0), arg1);
10094
        }
10095
 
10096
      /* A & N ? N : 0 is simply A & N if N is a power of two.  This
10097
         is probably obsolete because the first operand should be a
10098
         truth value (that's why we have the two cases above), but let's
10099
         leave it in until we can confirm this for all front-ends.  */
10100
      if (integer_zerop (op2)
10101
          && TREE_CODE (arg0) == NE_EXPR
10102
          && integer_zerop (TREE_OPERAND (arg0, 1))
10103
          && integer_pow2p (arg1)
10104
          && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10105
          && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10106
                              arg1, OEP_ONLY_CONST))
10107
        return pedantic_non_lvalue (fold_convert (type,
10108
                                                  TREE_OPERAND (arg0, 0)));
10109
 
10110
      /* Convert A ? B : 0 into A && B if A and B are truth values.  */
10111
      if (integer_zerop (op2)
10112
          && truth_value_p (TREE_CODE (arg0))
10113
          && truth_value_p (TREE_CODE (arg1)))
10114
        return fold_build2 (TRUTH_ANDIF_EXPR, type,
10115
                            fold_convert (type, arg0),
10116
                            arg1);
10117
 
10118
      /* Convert A ? B : 1 into !A || B if A and B are truth values.  */
10119
      if (integer_onep (op2)
10120
          && truth_value_p (TREE_CODE (arg0))
10121
          && truth_value_p (TREE_CODE (arg1)))
10122
        {
10123
          /* Only perform transformation if ARG0 is easily inverted.  */
10124
          tem = invert_truthvalue (arg0);
10125
          if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10126
            return fold_build2 (TRUTH_ORIF_EXPR, type,
10127
                                fold_convert (type, tem),
10128
                                arg1);
10129
        }
10130
 
10131
      /* Convert A ? 0 : B into !A && B if A and B are truth values.  */
10132
      if (integer_zerop (arg1)
10133
          && truth_value_p (TREE_CODE (arg0))
10134
          && truth_value_p (TREE_CODE (op2)))
10135
        {
10136
          /* Only perform transformation if ARG0 is easily inverted.  */
10137
          tem = invert_truthvalue (arg0);
10138
          if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10139
            return fold_build2 (TRUTH_ANDIF_EXPR, type,
10140
                                fold_convert (type, tem),
10141
                                op2);
10142
        }
10143
 
10144
      /* Convert A ? 1 : B into A || B if A and B are truth values.  */
10145
      if (integer_onep (arg1)
10146
          && truth_value_p (TREE_CODE (arg0))
10147
          && truth_value_p (TREE_CODE (op2)))
10148
        return fold_build2 (TRUTH_ORIF_EXPR, type,
10149
                            fold_convert (type, arg0),
10150
                            op2);
10151
 
10152
      return NULL_TREE;
10153
 
10154
    case CALL_EXPR:
10155
      /* Check for a built-in function.  */
10156
      if (TREE_CODE (op0) == ADDR_EXPR
10157
          && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
10158
          && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
10159
        return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
10160
      return NULL_TREE;
10161
 
10162
    case BIT_FIELD_REF:
10163
      if (TREE_CODE (arg0) == VECTOR_CST
10164
          && type == TREE_TYPE (TREE_TYPE (arg0))
10165
          && host_integerp (arg1, 1)
10166
          && host_integerp (op2, 1))
10167
        {
10168
          unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
10169
          unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
10170
 
10171
          if (width != 0
10172
              && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
10173
              && (idx % width) == 0
10174
              && (idx = idx / width)
10175
                 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
10176
            {
10177
              tree elements = TREE_VECTOR_CST_ELTS (arg0);
10178
              while (idx-- > 0 && elements)
10179
                elements = TREE_CHAIN (elements);
10180
              if (elements)
10181
                return TREE_VALUE (elements);
10182
              else
10183
                return fold_convert (type, integer_zero_node);
10184
            }
10185
        }
10186
      return NULL_TREE;
10187
 
10188
    default:
10189
      return NULL_TREE;
10190
    } /* switch (code) */
10191
}
10192
 
10193
/* Perform constant folding and related simplification of EXPR.
10194
   The related simplifications include x*1 => x, x*0 => 0, etc.,
10195
   and application of the associative law.
10196
   NOP_EXPR conversions may be removed freely (as long as we
10197
   are careful not to change the type of the overall expression).
10198
   We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
10199
   but we can constant-fold them if they have constant operands.  */
10200
 
10201
#ifdef ENABLE_FOLD_CHECKING
10202
# define fold(x) fold_1 (x)
10203
static tree fold_1 (tree);
10204
static
10205
#endif
10206
tree
10207
fold (tree expr)
10208
{
10209
  const tree t = expr;
10210
  enum tree_code code = TREE_CODE (t);
10211
  enum tree_code_class kind = TREE_CODE_CLASS (code);
10212
  tree tem;
10213
 
10214
  /* Return right away if a constant.  */
10215
  if (kind == tcc_constant)
10216
    return t;
10217
 
10218
  if (IS_EXPR_CODE_CLASS (kind))
10219
    {
10220
      tree type = TREE_TYPE (t);
10221
      tree op0, op1, op2;
10222
 
10223
      switch (TREE_CODE_LENGTH (code))
10224
        {
10225
        case 1:
10226
          op0 = TREE_OPERAND (t, 0);
10227
          tem = fold_unary (code, type, op0);
10228
          return tem ? tem : expr;
10229
        case 2:
10230
          op0 = TREE_OPERAND (t, 0);
10231
          op1 = TREE_OPERAND (t, 1);
10232
          tem = fold_binary (code, type, op0, op1);
10233
          return tem ? tem : expr;
10234
        case 3:
10235
          op0 = TREE_OPERAND (t, 0);
10236
          op1 = TREE_OPERAND (t, 1);
10237
          op2 = TREE_OPERAND (t, 2);
10238
          tem = fold_ternary (code, type, op0, op1, op2);
10239
          return tem ? tem : expr;
10240
        default:
10241
          break;
10242
        }
10243
    }
10244
 
10245
  switch (code)
10246
    {
10247
    case CONST_DECL:
10248
      return fold (DECL_INITIAL (t));
10249
 
10250
    default:
10251
      return t;
10252
    } /* switch (code) */
10253
}
10254
 
10255
#ifdef ENABLE_FOLD_CHECKING
10256
#undef fold
10257
 
10258
static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
10259
static void fold_check_failed (tree, tree);
10260
void print_fold_checksum (tree);
10261
 
10262
/* When --enable-checking=fold, compute a digest of expr before
10263
   and after actual fold call to see if fold did not accidentally
10264
   change original expr.  */
10265
 
10266
tree
10267
fold (tree expr)
10268
{
10269
  tree ret;
10270
  struct md5_ctx ctx;
10271
  unsigned char checksum_before[16], checksum_after[16];
10272
  htab_t ht;
10273
 
10274
  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10275
  md5_init_ctx (&ctx);
10276
  fold_checksum_tree (expr, &ctx, ht);
10277
  md5_finish_ctx (&ctx, checksum_before);
10278
  htab_empty (ht);
10279
 
10280
  ret = fold_1 (expr);
10281
 
10282
  md5_init_ctx (&ctx);
10283
  fold_checksum_tree (expr, &ctx, ht);
10284
  md5_finish_ctx (&ctx, checksum_after);
10285
  htab_delete (ht);
10286
 
10287
  if (memcmp (checksum_before, checksum_after, 16))
10288
    fold_check_failed (expr, ret);
10289
 
10290
  return ret;
10291
}
10292
 
10293
void
10294
print_fold_checksum (tree expr)
10295
{
10296
  struct md5_ctx ctx;
10297
  unsigned char checksum[16], cnt;
10298
  htab_t ht;
10299
 
10300
  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10301
  md5_init_ctx (&ctx);
10302
  fold_checksum_tree (expr, &ctx, ht);
10303
  md5_finish_ctx (&ctx, checksum);
10304
  htab_delete (ht);
10305
  for (cnt = 0; cnt < 16; ++cnt)
10306
    fprintf (stderr, "%02x", checksum[cnt]);
10307
  putc ('\n', stderr);
10308
}
10309
 
10310
static void
10311
fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
10312
{
10313
  internal_error ("fold check: original tree changed by fold");
10314
}
10315
 
10316
static void
10317
fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
10318
{
10319
  void **slot;
10320
  enum tree_code code;
10321
  struct tree_function_decl buf;
10322
  int i, len;
10323
 
10324
recursive_label:
10325
 
10326
  gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
10327
               <= sizeof (struct tree_function_decl))
10328
              && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
10329
  if (expr == NULL)
10330
    return;
10331
  slot = htab_find_slot (ht, expr, INSERT);
10332
  if (*slot != NULL)
10333
    return;
10334
  *slot = expr;
10335
  code = TREE_CODE (expr);
10336
  if (TREE_CODE_CLASS (code) == tcc_declaration
10337
      && DECL_ASSEMBLER_NAME_SET_P (expr))
10338
    {
10339
      /* Allow DECL_ASSEMBLER_NAME to be modified.  */
10340
      memcpy ((char *) &buf, expr, tree_size (expr));
10341
      expr = (tree) &buf;
10342
      SET_DECL_ASSEMBLER_NAME (expr, NULL);
10343
    }
10344
  else if (TREE_CODE_CLASS (code) == tcc_type
10345
           && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
10346
               || TYPE_CACHED_VALUES_P (expr)
10347
               || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
10348
    {
10349
      /* Allow these fields to be modified.  */
10350
      memcpy ((char *) &buf, expr, tree_size (expr));
10351
      expr = (tree) &buf;
10352
      TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
10353
      TYPE_POINTER_TO (expr) = NULL;
10354
      TYPE_REFERENCE_TO (expr) = NULL;
10355
      if (TYPE_CACHED_VALUES_P (expr))
10356
        {
10357
          TYPE_CACHED_VALUES_P (expr) = 0;
10358
          TYPE_CACHED_VALUES (expr) = NULL;
10359
        }
10360
    }
10361
  md5_process_bytes (expr, tree_size (expr), ctx);
10362
  fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
10363
  if (TREE_CODE_CLASS (code) != tcc_type
10364
      && TREE_CODE_CLASS (code) != tcc_declaration
10365
      && code != TREE_LIST)
10366
    fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
10367
  switch (TREE_CODE_CLASS (code))
10368
    {
10369
    case tcc_constant:
10370
      switch (code)
10371
        {
10372
        case STRING_CST:
10373
          md5_process_bytes (TREE_STRING_POINTER (expr),
10374
                             TREE_STRING_LENGTH (expr), ctx);
10375
          break;
10376
        case COMPLEX_CST:
10377
          fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
10378
          fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
10379
          break;
10380
        case VECTOR_CST:
10381
          fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
10382
          break;
10383
        default:
10384
          break;
10385
        }
10386
      break;
10387
    case tcc_exceptional:
10388
      switch (code)
10389
        {
10390
        case TREE_LIST:
10391
          fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
10392
          fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
10393
          expr = TREE_CHAIN (expr);
10394
          goto recursive_label;
10395
          break;
10396
        case TREE_VEC:
10397
          for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
10398
            fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
10399
          break;
10400
        default:
10401
          break;
10402
        }
10403
      break;
10404
    case tcc_expression:
10405
    case tcc_reference:
10406
    case tcc_comparison:
10407
    case tcc_unary:
10408
    case tcc_binary:
10409
    case tcc_statement:
10410
      len = TREE_CODE_LENGTH (code);
10411
      for (i = 0; i < len; ++i)
10412
        fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
10413
      break;
10414
    case tcc_declaration:
10415
      fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
10416
      fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
10417
      fold_checksum_tree (DECL_NAME (expr), ctx, ht);
10418
      fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
10419
      fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
10420
      fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
10421
      fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
10422
      if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
10423
        fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
10424
 
10425
      if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
10426
        {
10427
          fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
10428
          fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
10429
          fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
10430
        }
10431
      break;
10432
    case tcc_type:
10433
      if (TREE_CODE (expr) == ENUMERAL_TYPE)
10434
        fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
10435
      fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
10436
      fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
10437
      fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
10438
      fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
10439
      if (INTEGRAL_TYPE_P (expr)
10440
          || SCALAR_FLOAT_TYPE_P (expr))
10441
        {
10442
          fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
10443
          fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
10444
        }
10445
      fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
10446
      if (TREE_CODE (expr) == RECORD_TYPE
10447
          || TREE_CODE (expr) == UNION_TYPE
10448
          || TREE_CODE (expr) == QUAL_UNION_TYPE)
10449
        fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
10450
      fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
10451
      break;
10452
    default:
10453
      break;
10454
    }
10455
}
10456
 
10457
#endif
10458
 
10459
/* Fold a unary tree expression with code CODE of type TYPE with an
10460
   operand OP0.  Return a folded expression if successful.  Otherwise,
10461
   return a tree expression with code CODE of type TYPE with an
10462
   operand OP0.  */
10463
 
10464
tree
10465
fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
10466
{
10467
  tree tem;
10468
#ifdef ENABLE_FOLD_CHECKING
10469
  unsigned char checksum_before[16], checksum_after[16];
10470
  struct md5_ctx ctx;
10471
  htab_t ht;
10472
 
10473
  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10474
  md5_init_ctx (&ctx);
10475
  fold_checksum_tree (op0, &ctx, ht);
10476
  md5_finish_ctx (&ctx, checksum_before);
10477
  htab_empty (ht);
10478
#endif
10479
 
10480
  tem = fold_unary (code, type, op0);
10481
  if (!tem)
10482
    tem = build1_stat (code, type, op0 PASS_MEM_STAT);
10483
 
10484
#ifdef ENABLE_FOLD_CHECKING
10485
  md5_init_ctx (&ctx);
10486
  fold_checksum_tree (op0, &ctx, ht);
10487
  md5_finish_ctx (&ctx, checksum_after);
10488
  htab_delete (ht);
10489
 
10490
  if (memcmp (checksum_before, checksum_after, 16))
10491
    fold_check_failed (op0, tem);
10492
#endif
10493
  return tem;
10494
}
10495
 
10496
/* Fold a binary tree expression with code CODE of type TYPE with
10497
   operands OP0 and OP1.  Return a folded expression if successful.
10498
   Otherwise, return a tree expression with code CODE of type TYPE
10499
   with operands OP0 and OP1.  */
10500
 
10501
tree
10502
fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
10503
                  MEM_STAT_DECL)
10504
{
10505
  tree tem;
10506
#ifdef ENABLE_FOLD_CHECKING
10507
  unsigned char checksum_before_op0[16],
10508
                checksum_before_op1[16],
10509
                checksum_after_op0[16],
10510
                checksum_after_op1[16];
10511
  struct md5_ctx ctx;
10512
  htab_t ht;
10513
 
10514
  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10515
  md5_init_ctx (&ctx);
10516
  fold_checksum_tree (op0, &ctx, ht);
10517
  md5_finish_ctx (&ctx, checksum_before_op0);
10518
  htab_empty (ht);
10519
 
10520
  md5_init_ctx (&ctx);
10521
  fold_checksum_tree (op1, &ctx, ht);
10522
  md5_finish_ctx (&ctx, checksum_before_op1);
10523
  htab_empty (ht);
10524
#endif
10525
 
10526
  tem = fold_binary (code, type, op0, op1);
10527
  if (!tem)
10528
    tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
10529
 
10530
#ifdef ENABLE_FOLD_CHECKING
10531
  md5_init_ctx (&ctx);
10532
  fold_checksum_tree (op0, &ctx, ht);
10533
  md5_finish_ctx (&ctx, checksum_after_op0);
10534
  htab_empty (ht);
10535
 
10536
  if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10537
    fold_check_failed (op0, tem);
10538
 
10539
  md5_init_ctx (&ctx);
10540
  fold_checksum_tree (op1, &ctx, ht);
10541
  md5_finish_ctx (&ctx, checksum_after_op1);
10542
  htab_delete (ht);
10543
 
10544
  if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10545
    fold_check_failed (op1, tem);
10546
#endif
10547
  return tem;
10548
}
10549
 
10550
/* Fold a ternary tree expression with code CODE of type TYPE with
10551
   operands OP0, OP1, and OP2.  Return a folded expression if
10552
   successful.  Otherwise, return a tree expression with code CODE of
10553
   type TYPE with operands OP0, OP1, and OP2.  */
10554
 
10555
tree
10556
fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
10557
             MEM_STAT_DECL)
10558
{
10559
  tree tem;
10560
#ifdef ENABLE_FOLD_CHECKING
10561
  unsigned char checksum_before_op0[16],
10562
                checksum_before_op1[16],
10563
                checksum_before_op2[16],
10564
                checksum_after_op0[16],
10565
                checksum_after_op1[16],
10566
                checksum_after_op2[16];
10567
  struct md5_ctx ctx;
10568
  htab_t ht;
10569
 
10570
  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10571
  md5_init_ctx (&ctx);
10572
  fold_checksum_tree (op0, &ctx, ht);
10573
  md5_finish_ctx (&ctx, checksum_before_op0);
10574
  htab_empty (ht);
10575
 
10576
  md5_init_ctx (&ctx);
10577
  fold_checksum_tree (op1, &ctx, ht);
10578
  md5_finish_ctx (&ctx, checksum_before_op1);
10579
  htab_empty (ht);
10580
 
10581
  md5_init_ctx (&ctx);
10582
  fold_checksum_tree (op2, &ctx, ht);
10583
  md5_finish_ctx (&ctx, checksum_before_op2);
10584
  htab_empty (ht);
10585
#endif
10586
 
10587
  tem = fold_ternary (code, type, op0, op1, op2);
10588
  if (!tem)
10589
    tem =  build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
10590
 
10591
#ifdef ENABLE_FOLD_CHECKING
10592
  md5_init_ctx (&ctx);
10593
  fold_checksum_tree (op0, &ctx, ht);
10594
  md5_finish_ctx (&ctx, checksum_after_op0);
10595
  htab_empty (ht);
10596
 
10597
  if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10598
    fold_check_failed (op0, tem);
10599
 
10600
  md5_init_ctx (&ctx);
10601
  fold_checksum_tree (op1, &ctx, ht);
10602
  md5_finish_ctx (&ctx, checksum_after_op1);
10603
  htab_empty (ht);
10604
 
10605
  if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10606
    fold_check_failed (op1, tem);
10607
 
10608
  md5_init_ctx (&ctx);
10609
  fold_checksum_tree (op2, &ctx, ht);
10610
  md5_finish_ctx (&ctx, checksum_after_op2);
10611
  htab_delete (ht);
10612
 
10613
  if (memcmp (checksum_before_op2, checksum_after_op2, 16))
10614
    fold_check_failed (op2, tem);
10615
#endif
10616
  return tem;
10617
}
10618
 
10619
/* Perform constant folding and related simplification of initializer
10620
   expression EXPR.  These behave identically to "fold_buildN" but ignore
10621
   potential run-time traps and exceptions that fold must preserve.  */
10622
 
10623
#define START_FOLD_INIT \
10624
  int saved_signaling_nans = flag_signaling_nans;\
10625
  int saved_trapping_math = flag_trapping_math;\
10626
  int saved_rounding_math = flag_rounding_math;\
10627
  int saved_trapv = flag_trapv;\
10628
  flag_signaling_nans = 0;\
10629
  flag_trapping_math = 0;\
10630
  flag_rounding_math = 0;\
10631
  flag_trapv = 0
10632
 
10633
#define END_FOLD_INIT \
10634
  flag_signaling_nans = saved_signaling_nans;\
10635
  flag_trapping_math = saved_trapping_math;\
10636
  flag_rounding_math = saved_rounding_math;\
10637
  flag_trapv = saved_trapv
10638
 
10639
tree
10640
fold_build1_initializer (enum tree_code code, tree type, tree op)
10641
{
10642
  tree result;
10643
  START_FOLD_INIT;
10644
 
10645
  result = fold_build1 (code, type, op);
10646
 
10647
  END_FOLD_INIT;
10648
  return result;
10649
}
10650
 
10651
tree
10652
fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
10653
{
10654
  tree result;
10655
  START_FOLD_INIT;
10656
 
10657
  result = fold_build2 (code, type, op0, op1);
10658
 
10659
  END_FOLD_INIT;
10660
  return result;
10661
}
10662
 
10663
tree
10664
fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
10665
                         tree op2)
10666
{
10667
  tree result;
10668
  START_FOLD_INIT;
10669
 
10670
  result = fold_build3 (code, type, op0, op1, op2);
10671
 
10672
  END_FOLD_INIT;
10673
  return result;
10674
}
10675
 
10676
#undef START_FOLD_INIT
10677
#undef END_FOLD_INIT
10678
 
10679
/* Determine if first argument is a multiple of second argument.  Return 0 if
10680
   it is not, or we cannot easily determined it to be.
10681
 
10682
   An example of the sort of thing we care about (at this point; this routine
10683
   could surely be made more general, and expanded to do what the *_DIV_EXPR's
10684
   fold cases do now) is discovering that
10685
 
10686
     SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10687
 
10688
   is a multiple of
10689
 
10690
     SAVE_EXPR (J * 8)
10691
 
10692
   when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10693
 
10694
   This code also handles discovering that
10695
 
10696
     SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10697
 
10698
   is a multiple of 8 so we don't have to worry about dealing with a
10699
   possible remainder.
10700
 
10701
   Note that we *look* inside a SAVE_EXPR only to determine how it was
10702
   calculated; it is not safe for fold to do much of anything else with the
10703
   internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10704
   at run time.  For example, the latter example above *cannot* be implemented
10705
   as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10706
   evaluation time of the original SAVE_EXPR is not necessarily the same at
10707
   the time the new expression is evaluated.  The only optimization of this
10708
   sort that would be valid is changing
10709
 
10710
     SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10711
 
10712
   divided by 8 to
10713
 
10714
     SAVE_EXPR (I) * SAVE_EXPR (J)
10715
 
10716
   (where the same SAVE_EXPR (J) is used in the original and the
10717
   transformed version).  */
10718
 
10719
static int
10720
multiple_of_p (tree type, tree top, tree bottom)
10721
{
10722
  if (operand_equal_p (top, bottom, 0))
10723
    return 1;
10724
 
10725
  if (TREE_CODE (type) != INTEGER_TYPE)
10726
    return 0;
10727
 
10728
  switch (TREE_CODE (top))
10729
    {
10730
    case BIT_AND_EXPR:
10731
      /* Bitwise and provides a power of two multiple.  If the mask is
10732
         a multiple of BOTTOM then TOP is a multiple of BOTTOM.  */
10733
      if (!integer_pow2p (bottom))
10734
        return 0;
10735
      /* FALLTHRU */
10736
 
10737
    case MULT_EXPR:
10738
      return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10739
              || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10740
 
10741
    case PLUS_EXPR:
10742
    case MINUS_EXPR:
10743
      return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10744
              && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10745
 
10746
    case LSHIFT_EXPR:
10747
      if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
10748
        {
10749
          tree op1, t1;
10750
 
10751
          op1 = TREE_OPERAND (top, 1);
10752
          /* const_binop may not detect overflow correctly,
10753
             so check for it explicitly here.  */
10754
          if (TYPE_PRECISION (TREE_TYPE (size_one_node))
10755
              > TREE_INT_CST_LOW (op1)
10756
              && TREE_INT_CST_HIGH (op1) == 0
10757
              && 0 != (t1 = fold_convert (type,
10758
                                          const_binop (LSHIFT_EXPR,
10759
                                                       size_one_node,
10760
                                                       op1, 0)))
10761
              && ! TREE_OVERFLOW (t1))
10762
            return multiple_of_p (type, t1, bottom);
10763
        }
10764
      return 0;
10765
 
10766
    case NOP_EXPR:
10767
      /* Can't handle conversions from non-integral or wider integral type.  */
10768
      if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
10769
          || (TYPE_PRECISION (type)
10770
              < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
10771
        return 0;
10772
 
10773
      /* .. fall through ...  */
10774
 
10775
    case SAVE_EXPR:
10776
      return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
10777
 
10778
    case INTEGER_CST:
10779
      if (TREE_CODE (bottom) != INTEGER_CST
10780
          || (TYPE_UNSIGNED (type)
10781
              && (tree_int_cst_sgn (top) < 0
10782
                  || tree_int_cst_sgn (bottom) < 0)))
10783
        return 0;
10784
      return integer_zerop (const_binop (TRUNC_MOD_EXPR,
10785
                                         top, bottom, 0));
10786
 
10787
    default:
10788
      return 0;
10789
    }
10790
}
10791
 
10792
/* Return true if `t' is known to be non-negative.  */
10793
 
10794
int
10795
tree_expr_nonnegative_p (tree t)
10796
{
10797
  if (t == error_mark_node)
10798
    return 0;
10799
 
10800
  if (TYPE_UNSIGNED (TREE_TYPE (t)))
10801
    return 1;
10802
 
10803
  switch (TREE_CODE (t))
10804
    {
10805
    case ABS_EXPR:
10806
      /* We can't return 1 if flag_wrapv is set because
10807
         ABS_EXPR<INT_MIN> = INT_MIN.  */
10808
      if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
10809
        return 1;
10810
      break;
10811
 
10812
    case INTEGER_CST:
10813
      return tree_int_cst_sgn (t) >= 0;
10814
 
10815
    case REAL_CST:
10816
      return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
10817
 
10818
    case PLUS_EXPR:
10819
      if (FLOAT_TYPE_P (TREE_TYPE (t)))
10820
        return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10821
               && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10822
 
10823
      /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
10824
         both unsigned and at least 2 bits shorter than the result.  */
10825
      if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10826
          && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10827
          && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10828
        {
10829
          tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10830
          tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10831
          if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10832
              && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10833
            {
10834
              unsigned int prec = MAX (TYPE_PRECISION (inner1),
10835
                                       TYPE_PRECISION (inner2)) + 1;
10836
              return prec < TYPE_PRECISION (TREE_TYPE (t));
10837
            }
10838
        }
10839
      break;
10840
 
10841
    case MULT_EXPR:
10842
      if (FLOAT_TYPE_P (TREE_TYPE (t)))
10843
        {
10844
          /* x * x for floating point x is always non-negative.  */
10845
          if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
10846
            return 1;
10847
          return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10848
                 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10849
        }
10850
 
10851
      /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
10852
         both unsigned and their total bits is shorter than the result.  */
10853
      if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10854
          && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10855
          && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10856
        {
10857
          tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10858
          tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10859
          if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10860
              && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10861
            return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
10862
                   < TYPE_PRECISION (TREE_TYPE (t));
10863
        }
10864
      return 0;
10865
 
10866
    case BIT_AND_EXPR:
10867
    case MAX_EXPR:
10868
      return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10869
             || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10870
 
10871
    case BIT_IOR_EXPR:
10872
    case BIT_XOR_EXPR:
10873
    case MIN_EXPR:
10874
    case RDIV_EXPR:
10875
    case TRUNC_DIV_EXPR:
10876
    case CEIL_DIV_EXPR:
10877
    case FLOOR_DIV_EXPR:
10878
    case ROUND_DIV_EXPR:
10879
      return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10880
             && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10881
 
10882
    case TRUNC_MOD_EXPR:
10883
    case CEIL_MOD_EXPR:
10884
    case FLOOR_MOD_EXPR:
10885
    case ROUND_MOD_EXPR:
10886
    case SAVE_EXPR:
10887
    case NON_LVALUE_EXPR:
10888
    case FLOAT_EXPR:
10889
      return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10890
 
10891
    case COMPOUND_EXPR:
10892
    case MODIFY_EXPR:
10893
      return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10894
 
10895
    case BIND_EXPR:
10896
      return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
10897
 
10898
    case COND_EXPR:
10899
      return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10900
             && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
10901
 
10902
    case NOP_EXPR:
10903
      {
10904
        tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10905
        tree outer_type = TREE_TYPE (t);
10906
 
10907
        if (TREE_CODE (outer_type) == REAL_TYPE)
10908
          {
10909
            if (TREE_CODE (inner_type) == REAL_TYPE)
10910
              return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10911
            if (TREE_CODE (inner_type) == INTEGER_TYPE)
10912
              {
10913
                if (TYPE_UNSIGNED (inner_type))
10914
                  return 1;
10915
                return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10916
              }
10917
          }
10918
        else if (TREE_CODE (outer_type) == INTEGER_TYPE)
10919
          {
10920
            if (TREE_CODE (inner_type) == REAL_TYPE)
10921
              return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
10922
            if (TREE_CODE (inner_type) == INTEGER_TYPE)
10923
              return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
10924
                      && TYPE_UNSIGNED (inner_type);
10925
          }
10926
      }
10927
      break;
10928
 
10929
    case TARGET_EXPR:
10930
      {
10931
        tree temp = TARGET_EXPR_SLOT (t);
10932
        t = TARGET_EXPR_INITIAL (t);
10933
 
10934
        /* If the initializer is non-void, then it's a normal expression
10935
           that will be assigned to the slot.  */
10936
        if (!VOID_TYPE_P (t))
10937
          return tree_expr_nonnegative_p (t);
10938
 
10939
        /* Otherwise, the initializer sets the slot in some way.  One common
10940
           way is an assignment statement at the end of the initializer.  */
10941
        while (1)
10942
          {
10943
            if (TREE_CODE (t) == BIND_EXPR)
10944
              t = expr_last (BIND_EXPR_BODY (t));
10945
            else if (TREE_CODE (t) == TRY_FINALLY_EXPR
10946
                     || TREE_CODE (t) == TRY_CATCH_EXPR)
10947
              t = expr_last (TREE_OPERAND (t, 0));
10948
            else if (TREE_CODE (t) == STATEMENT_LIST)
10949
              t = expr_last (t);
10950
            else
10951
              break;
10952
          }
10953
        if (TREE_CODE (t) == MODIFY_EXPR
10954
            && TREE_OPERAND (t, 0) == temp)
10955
          return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10956
 
10957
        return 0;
10958
      }
10959
 
10960
    case CALL_EXPR:
10961
      {
10962
        tree fndecl = get_callee_fndecl (t);
10963
        tree arglist = TREE_OPERAND (t, 1);
10964
        if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
10965
          switch (DECL_FUNCTION_CODE (fndecl))
10966
            {
10967
#define CASE_BUILTIN_F(BUILT_IN_FN) \
10968
  case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
10969
#define CASE_BUILTIN_I(BUILT_IN_FN) \
10970
  case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
10971
 
10972
            CASE_BUILTIN_F (BUILT_IN_ACOS)
10973
            CASE_BUILTIN_F (BUILT_IN_ACOSH)
10974
            CASE_BUILTIN_F (BUILT_IN_CABS)
10975
            CASE_BUILTIN_F (BUILT_IN_COSH)
10976
            CASE_BUILTIN_F (BUILT_IN_ERFC)
10977
            CASE_BUILTIN_F (BUILT_IN_EXP)
10978
            CASE_BUILTIN_F (BUILT_IN_EXP10)
10979
            CASE_BUILTIN_F (BUILT_IN_EXP2)
10980
            CASE_BUILTIN_F (BUILT_IN_FABS)
10981
            CASE_BUILTIN_F (BUILT_IN_FDIM)
10982
            CASE_BUILTIN_F (BUILT_IN_HYPOT)
10983
            CASE_BUILTIN_F (BUILT_IN_POW10)
10984
            CASE_BUILTIN_I (BUILT_IN_FFS)
10985
            CASE_BUILTIN_I (BUILT_IN_PARITY)
10986
            CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
10987
              /* Always true.  */
10988
              return 1;
10989
 
10990
            CASE_BUILTIN_F (BUILT_IN_SQRT)
10991
              /* sqrt(-0.0) is -0.0.  */
10992
              if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
10993
                return 1;
10994
              return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10995
 
10996
            CASE_BUILTIN_F (BUILT_IN_ASINH)
10997
            CASE_BUILTIN_F (BUILT_IN_ATAN)
10998
            CASE_BUILTIN_F (BUILT_IN_ATANH)
10999
            CASE_BUILTIN_F (BUILT_IN_CBRT)
11000
            CASE_BUILTIN_F (BUILT_IN_CEIL)
11001
            CASE_BUILTIN_F (BUILT_IN_ERF)
11002
            CASE_BUILTIN_F (BUILT_IN_EXPM1)
11003
            CASE_BUILTIN_F (BUILT_IN_FLOOR)
11004
            CASE_BUILTIN_F (BUILT_IN_FMOD)
11005
            CASE_BUILTIN_F (BUILT_IN_FREXP)
11006
            CASE_BUILTIN_F (BUILT_IN_LCEIL)
11007
            CASE_BUILTIN_F (BUILT_IN_LDEXP)
11008
            CASE_BUILTIN_F (BUILT_IN_LFLOOR)
11009
            CASE_BUILTIN_F (BUILT_IN_LLCEIL)
11010
            CASE_BUILTIN_F (BUILT_IN_LLFLOOR)
11011
            CASE_BUILTIN_F (BUILT_IN_LLRINT)
11012
            CASE_BUILTIN_F (BUILT_IN_LLROUND)
11013
            CASE_BUILTIN_F (BUILT_IN_LRINT)
11014
            CASE_BUILTIN_F (BUILT_IN_LROUND)
11015
            CASE_BUILTIN_F (BUILT_IN_MODF)
11016
            CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
11017
            CASE_BUILTIN_F (BUILT_IN_POW)
11018
            CASE_BUILTIN_F (BUILT_IN_RINT)
11019
            CASE_BUILTIN_F (BUILT_IN_ROUND)
11020
            CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
11021
            CASE_BUILTIN_F (BUILT_IN_SINH)
11022
            CASE_BUILTIN_F (BUILT_IN_TANH)
11023
            CASE_BUILTIN_F (BUILT_IN_TRUNC)
11024
              /* True if the 1st argument is nonnegative.  */
11025
              return tree_expr_nonnegative_p (TREE_VALUE (arglist));
11026
 
11027
            CASE_BUILTIN_F (BUILT_IN_FMAX)
11028
              /* True if the 1st OR 2nd arguments are nonnegative.  */
11029
              return tree_expr_nonnegative_p (TREE_VALUE (arglist))
11030
                || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11031
 
11032
            CASE_BUILTIN_F (BUILT_IN_FMIN)
11033
              /* True if the 1st AND 2nd arguments are nonnegative.  */
11034
              return tree_expr_nonnegative_p (TREE_VALUE (arglist))
11035
                && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11036
 
11037
            CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
11038
              /* True if the 2nd argument is nonnegative.  */
11039
              return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11040
 
11041
            default:
11042
              break;
11043
#undef CASE_BUILTIN_F
11044
#undef CASE_BUILTIN_I
11045
            }
11046
      }
11047
 
11048
      /* ... fall through ...  */
11049
 
11050
    default:
11051
      if (truth_value_p (TREE_CODE (t)))
11052
        /* Truth values evaluate to 0 or 1, which is nonnegative.  */
11053
        return 1;
11054
    }
11055
 
11056
  /* We don't know sign of `t', so be conservative and return false.  */
11057
  return 0;
11058
}
11059
 
11060
/* Return true when T is an address and is known to be nonzero.
11061
   For floating point we further ensure that T is not denormal.
11062
   Similar logic is present in nonzero_address in rtlanal.h.  */
11063
 
11064
bool
11065
tree_expr_nonzero_p (tree t)
11066
{
11067
  tree type = TREE_TYPE (t);
11068
 
11069
  /* Doing something useful for floating point would need more work.  */
11070
  if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
11071
    return false;
11072
 
11073
  switch (TREE_CODE (t))
11074
    {
11075
    case ABS_EXPR:
11076
      return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11077
 
11078
    case INTEGER_CST:
11079
      /* We used to test for !integer_zerop here.  This does not work correctly
11080
         if TREE_CONSTANT_OVERFLOW (t).  */
11081
      return (TREE_INT_CST_LOW (t) != 0
11082
              || TREE_INT_CST_HIGH (t) != 0);
11083
 
11084
    case PLUS_EXPR:
11085
      if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11086
        {
11087
          /* With the presence of negative values it is hard
11088
             to say something.  */
11089
          if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11090
              || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11091
            return false;
11092
          /* One of operands must be positive and the other non-negative.  */
11093
          return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11094
                  || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11095
        }
11096
      break;
11097
 
11098
    case MULT_EXPR:
11099
      if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11100
        {
11101
          return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11102
                  && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11103
        }
11104
      break;
11105
 
11106
    case NOP_EXPR:
11107
      {
11108
        tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
11109
        tree outer_type = TREE_TYPE (t);
11110
 
11111
        return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
11112
                && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
11113
      }
11114
      break;
11115
 
11116
   case ADDR_EXPR:
11117
      {
11118
        tree base = get_base_address (TREE_OPERAND (t, 0));
11119
 
11120
        if (!base)
11121
          return false;
11122
 
11123
        /* Weak declarations may link to NULL.  */
11124
        if (VAR_OR_FUNCTION_DECL_P (base))
11125
          return !DECL_WEAK (base);
11126
 
11127
        /* Constants are never weak.  */
11128
        if (CONSTANT_CLASS_P (base))
11129
          return true;
11130
 
11131
        return false;
11132
      }
11133
 
11134
    case COND_EXPR:
11135
      return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11136
              && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
11137
 
11138
    case MIN_EXPR:
11139
      return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11140
              && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11141
 
11142
    case MAX_EXPR:
11143
      if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
11144
        {
11145
          /* When both operands are nonzero, then MAX must be too.  */
11146
          if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
11147
            return true;
11148
 
11149
          /* MAX where operand 0 is positive is positive.  */
11150
          return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11151
        }
11152
      /* MAX where operand 1 is positive is positive.  */
11153
      else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11154
               && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11155
        return true;
11156
      break;
11157
 
11158
    case COMPOUND_EXPR:
11159
    case MODIFY_EXPR:
11160
    case BIND_EXPR:
11161
      return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
11162
 
11163
    case SAVE_EXPR:
11164
    case NON_LVALUE_EXPR:
11165
      return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11166
 
11167
    case BIT_IOR_EXPR:
11168
      return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11169
             || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11170
 
11171
    case CALL_EXPR:
11172
      return alloca_call_p (t);
11173
 
11174
    default:
11175
      break;
11176
    }
11177
  return false;
11178
}
11179
 
11180
/* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
11181
   attempt to fold the expression to a constant without modifying TYPE,
11182
   OP0 or OP1.
11183
 
11184
   If the expression could be simplified to a constant, then return
11185
   the constant.  If the expression would not be simplified to a
11186
   constant, then return NULL_TREE.  */
11187
 
11188
tree
11189
fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
11190
{
11191
  tree tem = fold_binary (code, type, op0, op1);
11192
  return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11193
}
11194
 
11195
/* Given the components of a unary expression CODE, TYPE and OP0,
11196
   attempt to fold the expression to a constant without modifying
11197
   TYPE or OP0.
11198
 
11199
   If the expression could be simplified to a constant, then return
11200
   the constant.  If the expression would not be simplified to a
11201
   constant, then return NULL_TREE.  */
11202
 
11203
tree
11204
fold_unary_to_constant (enum tree_code code, tree type, tree op0)
11205
{
11206
  tree tem = fold_unary (code, type, op0);
11207
  return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11208
}
11209
 
11210
/* If EXP represents referencing an element in a constant string
11211
   (either via pointer arithmetic or array indexing), return the
11212
   tree representing the value accessed, otherwise return NULL.  */
11213
 
11214
tree
11215
fold_read_from_constant_string (tree exp)
11216
{
11217
  if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
11218
    {
11219
      tree exp1 = TREE_OPERAND (exp, 0);
11220
      tree index;
11221
      tree string;
11222
 
11223
      if (TREE_CODE (exp) == INDIRECT_REF)
11224
        string = string_constant (exp1, &index);
11225
      else
11226
        {
11227
          tree low_bound = array_ref_low_bound (exp);
11228
          index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
11229
 
11230
          /* Optimize the special-case of a zero lower bound.
11231
 
11232
             We convert the low_bound to sizetype to avoid some problems
11233
             with constant folding.  (E.g. suppose the lower bound is 1,
11234
             and its mode is QI.  Without the conversion,l (ARRAY
11235
             +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
11236
             +INDEX), which becomes (ARRAY+255+INDEX).  Opps!)  */
11237
          if (! integer_zerop (low_bound))
11238
            index = size_diffop (index, fold_convert (sizetype, low_bound));
11239
 
11240
          string = exp1;
11241
        }
11242
 
11243
      if (string
11244
          && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
11245
          && TREE_CODE (string) == STRING_CST
11246
          && TREE_CODE (index) == INTEGER_CST
11247
          && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
11248
          && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
11249
              == MODE_INT)
11250
          && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
11251
        return fold_convert (TREE_TYPE (exp),
11252
                             build_int_cst (NULL_TREE,
11253
                                            (TREE_STRING_POINTER (string)
11254
                                             [TREE_INT_CST_LOW (index)])));
11255
    }
11256
  return NULL;
11257
}
11258
 
11259
/* Return the tree for neg (ARG0) when ARG0 is known to be either
11260
   an integer constant or real constant.
11261
 
11262
   TYPE is the type of the result.  */
11263
 
11264
static tree
11265
fold_negate_const (tree arg0, tree type)
11266
{
11267
  tree t = NULL_TREE;
11268
 
11269
  switch (TREE_CODE (arg0))
11270
    {
11271
    case INTEGER_CST:
11272
      {
11273
        unsigned HOST_WIDE_INT low;
11274
        HOST_WIDE_INT high;
11275
        int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11276
                                   TREE_INT_CST_HIGH (arg0),
11277
                                   &low, &high);
11278
        t = build_int_cst_wide (type, low, high);
11279
        t = force_fit_type (t, 1,
11280
                            (overflow | TREE_OVERFLOW (arg0))
11281
                            && !TYPE_UNSIGNED (type),
11282
                            TREE_CONSTANT_OVERFLOW (arg0));
11283
        break;
11284
      }
11285
 
11286
    case REAL_CST:
11287
      t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11288
      break;
11289
 
11290
    default:
11291
      gcc_unreachable ();
11292
    }
11293
 
11294
  return t;
11295
}
11296
 
11297
/* Return the tree for abs (ARG0) when ARG0 is known to be either
11298
   an integer constant or real constant.
11299
 
11300
   TYPE is the type of the result.  */
11301
 
11302
tree
11303
fold_abs_const (tree arg0, tree type)
11304
{
11305
  tree t = NULL_TREE;
11306
 
11307
  switch (TREE_CODE (arg0))
11308
    {
11309
    case INTEGER_CST:
11310
      /* If the value is unsigned, then the absolute value is
11311
         the same as the ordinary value.  */
11312
      if (TYPE_UNSIGNED (type))
11313
        t = arg0;
11314
      /* Similarly, if the value is non-negative.  */
11315
      else if (INT_CST_LT (integer_minus_one_node, arg0))
11316
        t = arg0;
11317
      /* If the value is negative, then the absolute value is
11318
         its negation.  */
11319
      else
11320
        {
11321
          unsigned HOST_WIDE_INT low;
11322
          HOST_WIDE_INT high;
11323
          int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11324
                                     TREE_INT_CST_HIGH (arg0),
11325
                                     &low, &high);
11326
          t = build_int_cst_wide (type, low, high);
11327
          t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
11328
                              TREE_CONSTANT_OVERFLOW (arg0));
11329
        }
11330
      break;
11331
 
11332
    case REAL_CST:
11333
      if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
11334
        t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11335
      else
11336
        t =  arg0;
11337
      break;
11338
 
11339
    default:
11340
      gcc_unreachable ();
11341
    }
11342
 
11343
  return t;
11344
}
11345
 
11346
/* Return the tree for not (ARG0) when ARG0 is known to be an integer
11347
   constant.  TYPE is the type of the result.  */
11348
 
11349
static tree
11350
fold_not_const (tree arg0, tree type)
11351
{
11352
  tree t = NULL_TREE;
11353
 
11354
  gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
11355
 
11356
  t = build_int_cst_wide (type,
11357
                          ~ TREE_INT_CST_LOW (arg0),
11358
                          ~ TREE_INT_CST_HIGH (arg0));
11359
  t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
11360
                      TREE_CONSTANT_OVERFLOW (arg0));
11361
 
11362
  return t;
11363
}
11364
 
11365
/* Given CODE, a relational operator, the target type, TYPE and two
11366
   constant operands OP0 and OP1, return the result of the
11367
   relational operation.  If the result is not a compile time
11368
   constant, then return NULL_TREE.  */
11369
 
11370
static tree
11371
fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
11372
{
11373
  int result, invert;
11374
 
11375
  /* From here on, the only cases we handle are when the result is
11376
     known to be a constant.  */
11377
 
11378
  if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
11379
    {
11380
      const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
11381
      const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
11382
 
11383
      /* Handle the cases where either operand is a NaN.  */
11384
      if (real_isnan (c0) || real_isnan (c1))
11385
        {
11386
          switch (code)
11387
            {
11388
            case EQ_EXPR:
11389
            case ORDERED_EXPR:
11390
              result = 0;
11391
              break;
11392
 
11393
            case NE_EXPR:
11394
            case UNORDERED_EXPR:
11395
            case UNLT_EXPR:
11396
            case UNLE_EXPR:
11397
            case UNGT_EXPR:
11398
            case UNGE_EXPR:
11399
            case UNEQ_EXPR:
11400
              result = 1;
11401
              break;
11402
 
11403
            case LT_EXPR:
11404
            case LE_EXPR:
11405
            case GT_EXPR:
11406
            case GE_EXPR:
11407
            case LTGT_EXPR:
11408
              if (flag_trapping_math)
11409
                return NULL_TREE;
11410
              result = 0;
11411
              break;
11412
 
11413
            default:
11414
              gcc_unreachable ();
11415
            }
11416
 
11417
          return constant_boolean_node (result, type);
11418
        }
11419
 
11420
      return constant_boolean_node (real_compare (code, c0, c1), type);
11421
    }
11422
 
11423
  /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11424
 
11425
     To compute GT, swap the arguments and do LT.
11426
     To compute GE, do LT and invert the result.
11427
     To compute LE, swap the arguments, do LT and invert the result.
11428
     To compute NE, do EQ and invert the result.
11429
 
11430
     Therefore, the code below must handle only EQ and LT.  */
11431
 
11432
  if (code == LE_EXPR || code == GT_EXPR)
11433
    {
11434
      tree tem = op0;
11435
      op0 = op1;
11436
      op1 = tem;
11437
      code = swap_tree_comparison (code);
11438
    }
11439
 
11440
  /* Note that it is safe to invert for real values here because we
11441
     have already handled the one case that it matters.  */
11442
 
11443
  invert = 0;
11444
  if (code == NE_EXPR || code == GE_EXPR)
11445
    {
11446
      invert = 1;
11447
      code = invert_tree_comparison (code, false);
11448
    }
11449
 
11450
  /* Compute a result for LT or EQ if args permit;
11451
     Otherwise return T.  */
11452
  if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11453
    {
11454
      if (code == EQ_EXPR)
11455
        result = tree_int_cst_equal (op0, op1);
11456
      else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
11457
        result = INT_CST_LT_UNSIGNED (op0, op1);
11458
      else
11459
        result = INT_CST_LT (op0, op1);
11460
    }
11461
  else
11462
    return NULL_TREE;
11463
 
11464
  if (invert)
11465
    result ^= 1;
11466
  return constant_boolean_node (result, type);
11467
}
11468
 
11469
/* Build an expression for the a clean point containing EXPR with type TYPE.
11470
   Don't build a cleanup point expression for EXPR which don't have side
11471
   effects.  */
11472
 
11473
tree
11474
fold_build_cleanup_point_expr (tree type, tree expr)
11475
{
11476
  /* If the expression does not have side effects then we don't have to wrap
11477
     it with a cleanup point expression.  */
11478
  if (!TREE_SIDE_EFFECTS (expr))
11479
    return expr;
11480
 
11481
  /* If the expression is a return, check to see if the expression inside the
11482
     return has no side effects or the right hand side of the modify expression
11483
     inside the return. If either don't have side effects set we don't need to
11484
     wrap the expression in a cleanup point expression.  Note we don't check the
11485
     left hand side of the modify because it should always be a return decl.  */
11486
  if (TREE_CODE (expr) == RETURN_EXPR)
11487
    {
11488
      tree op = TREE_OPERAND (expr, 0);
11489
      if (!op || !TREE_SIDE_EFFECTS (op))
11490
        return expr;
11491
      op = TREE_OPERAND (op, 1);
11492
      if (!TREE_SIDE_EFFECTS (op))
11493
        return expr;
11494
    }
11495
 
11496
  return build1 (CLEANUP_POINT_EXPR, type, expr);
11497
}
11498
 
11499
/* Build an expression for the address of T.  Folds away INDIRECT_REF to
11500
   avoid confusing the gimplify process.  */
11501
 
11502
tree
11503
build_fold_addr_expr_with_type (tree t, tree ptrtype)
11504
{
11505
  /* The size of the object is not relevant when talking about its address.  */
11506
  if (TREE_CODE (t) == WITH_SIZE_EXPR)
11507
    t = TREE_OPERAND (t, 0);
11508
 
11509
  /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11510
  if (TREE_CODE (t) == INDIRECT_REF
11511
      || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
11512
    {
11513
      t = TREE_OPERAND (t, 0);
11514
      if (TREE_TYPE (t) != ptrtype)
11515
        t = build1 (NOP_EXPR, ptrtype, t);
11516
    }
11517
  else
11518
    {
11519
      tree base = t;
11520
 
11521
      while (handled_component_p (base))
11522
        base = TREE_OPERAND (base, 0);
11523
      if (DECL_P (base))
11524
        TREE_ADDRESSABLE (base) = 1;
11525
 
11526
      t = build1 (ADDR_EXPR, ptrtype, t);
11527
    }
11528
 
11529
  return t;
11530
}
11531
 
11532
tree
11533
build_fold_addr_expr (tree t)
11534
{
11535
  return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
11536
}
11537
 
11538
/* Given a pointer value OP0 and a type TYPE, return a simplified version
11539
   of an indirection through OP0, or NULL_TREE if no simplification is
11540
   possible.  */
11541
 
11542
tree
11543
fold_indirect_ref_1 (tree type, tree op0)
11544
{
11545
  tree sub = op0;
11546
  tree subtype;
11547
 
11548
  STRIP_NOPS (sub);
11549
  subtype = TREE_TYPE (sub);
11550
  if (!POINTER_TYPE_P (subtype))
11551
    return NULL_TREE;
11552
 
11553
  if (TREE_CODE (sub) == ADDR_EXPR)
11554
    {
11555
      tree op = TREE_OPERAND (sub, 0);
11556
      tree optype = TREE_TYPE (op);
11557
      /* *&p => p */
11558
      if (type == optype)
11559
        return op;
11560
      /* *(foo *)&fooarray => fooarray[0] */
11561
      else if (TREE_CODE (optype) == ARRAY_TYPE
11562
               && type == TREE_TYPE (optype))
11563
        {
11564
          tree type_domain = TYPE_DOMAIN (optype);
11565
          tree min_val = size_zero_node;
11566
          if (type_domain && TYPE_MIN_VALUE (type_domain))
11567
            min_val = TYPE_MIN_VALUE (type_domain);
11568
          return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
11569
        }
11570
    }
11571
 
11572
  /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11573
  if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
11574
      && type == TREE_TYPE (TREE_TYPE (subtype)))
11575
    {
11576
      tree type_domain;
11577
      tree min_val = size_zero_node;
11578
      sub = build_fold_indirect_ref (sub);
11579
      type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
11580
      if (type_domain && TYPE_MIN_VALUE (type_domain))
11581
        min_val = TYPE_MIN_VALUE (type_domain);
11582
      return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
11583
    }
11584
 
11585
  return NULL_TREE;
11586
}
11587
 
11588
/* Builds an expression for an indirection through T, simplifying some
11589
   cases.  */
11590
 
11591
tree
11592
build_fold_indirect_ref (tree t)
11593
{
11594
  tree type = TREE_TYPE (TREE_TYPE (t));
11595
  tree sub = fold_indirect_ref_1 (type, t);
11596
 
11597
  if (sub)
11598
    return sub;
11599
  else
11600
    return build1 (INDIRECT_REF, type, t);
11601
}
11602
 
11603
/* Given an INDIRECT_REF T, return either T or a simplified version.  */
11604
 
11605
tree
11606
fold_indirect_ref (tree t)
11607
{
11608
  tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
11609
 
11610
  if (sub)
11611
    return sub;
11612
  else
11613
    return t;
11614
}
11615
 
11616
/* Strip non-trapping, non-side-effecting tree nodes from an expression
11617
   whose result is ignored.  The type of the returned tree need not be
11618
   the same as the original expression.  */
11619
 
11620
tree
11621
fold_ignored_result (tree t)
11622
{
11623
  if (!TREE_SIDE_EFFECTS (t))
11624
    return integer_zero_node;
11625
 
11626
  for (;;)
11627
    switch (TREE_CODE_CLASS (TREE_CODE (t)))
11628
      {
11629
      case tcc_unary:
11630
        t = TREE_OPERAND (t, 0);
11631
        break;
11632
 
11633
      case tcc_binary:
11634
      case tcc_comparison:
11635
        if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11636
          t = TREE_OPERAND (t, 0);
11637
        else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
11638
          t = TREE_OPERAND (t, 1);
11639
        else
11640
          return t;
11641
        break;
11642
 
11643
      case tcc_expression:
11644
        switch (TREE_CODE (t))
11645
          {
11646
          case COMPOUND_EXPR:
11647
            if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11648
              return t;
11649
            t = TREE_OPERAND (t, 0);
11650
            break;
11651
 
11652
          case COND_EXPR:
11653
            if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
11654
                || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
11655
              return t;
11656
            t = TREE_OPERAND (t, 0);
11657
            break;
11658
 
11659
          default:
11660
            return t;
11661
          }
11662
        break;
11663
 
11664
      default:
11665
        return t;
11666
      }
11667
}
11668
 
11669
/* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11670
   This can only be applied to objects of a sizetype.  */
11671
 
11672
tree
11673
round_up (tree value, int divisor)
11674
{
11675
  tree div = NULL_TREE;
11676
 
11677
  gcc_assert (divisor > 0);
11678
  if (divisor == 1)
11679
    return value;
11680
 
11681
  /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
11682
     have to do anything.  Only do this when we are not given a const,
11683
     because in that case, this check is more expensive than just
11684
     doing it.  */
11685
  if (TREE_CODE (value) != INTEGER_CST)
11686
    {
11687
      div = build_int_cst (TREE_TYPE (value), divisor);
11688
 
11689
      if (multiple_of_p (TREE_TYPE (value), value, div))
11690
        return value;
11691
    }
11692
 
11693
  /* If divisor is a power of two, simplify this to bit manipulation.  */
11694
  if (divisor == (divisor & -divisor))
11695
    {
11696
      tree t;
11697
 
11698
      t = build_int_cst (TREE_TYPE (value), divisor - 1);
11699
      value = size_binop (PLUS_EXPR, value, t);
11700
      t = build_int_cst (TREE_TYPE (value), -divisor);
11701
      value = size_binop (BIT_AND_EXPR, value, t);
11702
    }
11703
  else
11704
    {
11705
      if (!div)
11706
        div = build_int_cst (TREE_TYPE (value), divisor);
11707
      value = size_binop (CEIL_DIV_EXPR, value, div);
11708
      value = size_binop (MULT_EXPR, value, div);
11709
    }
11710
 
11711
  return value;
11712
}
11713
 
11714
/* Likewise, but round down.  */
11715
 
11716
tree
11717
round_down (tree value, int divisor)
11718
{
11719
  tree div = NULL_TREE;
11720
 
11721
  gcc_assert (divisor > 0);
11722
  if (divisor == 1)
11723
    return value;
11724
 
11725
  /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
11726
     have to do anything.  Only do this when we are not given a const,
11727
     because in that case, this check is more expensive than just
11728
     doing it.  */
11729
  if (TREE_CODE (value) != INTEGER_CST)
11730
    {
11731
      div = build_int_cst (TREE_TYPE (value), divisor);
11732
 
11733
      if (multiple_of_p (TREE_TYPE (value), value, div))
11734
        return value;
11735
    }
11736
 
11737
  /* If divisor is a power of two, simplify this to bit manipulation.  */
11738
  if (divisor == (divisor & -divisor))
11739
    {
11740
      tree t;
11741
 
11742
      t = build_int_cst (TREE_TYPE (value), -divisor);
11743
      value = size_binop (BIT_AND_EXPR, value, t);
11744
    }
11745
  else
11746
    {
11747
      if (!div)
11748
        div = build_int_cst (TREE_TYPE (value), divisor);
11749
      value = size_binop (FLOOR_DIV_EXPR, value, div);
11750
      value = size_binop (MULT_EXPR, value, div);
11751
    }
11752
 
11753
  return value;
11754
}
11755
 
11756
/* Returns the pointer to the base of the object addressed by EXP and
11757
   extracts the information about the offset of the access, storing it
11758
   to PBITPOS and POFFSET.  */
11759
 
11760
static tree
11761
split_address_to_core_and_offset (tree exp,
11762
                                  HOST_WIDE_INT *pbitpos, tree *poffset)
11763
{
11764
  tree core;
11765
  enum machine_mode mode;
11766
  int unsignedp, volatilep;
11767
  HOST_WIDE_INT bitsize;
11768
 
11769
  if (TREE_CODE (exp) == ADDR_EXPR)
11770
    {
11771
      core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11772
                                  poffset, &mode, &unsignedp, &volatilep,
11773
                                  false);
11774
      core = build_fold_addr_expr (core);
11775
    }
11776
  else
11777
    {
11778
      core = exp;
11779
      *pbitpos = 0;
11780
      *poffset = NULL_TREE;
11781
    }
11782
 
11783
  return core;
11784
}
11785
 
11786
/* Returns true if addresses of E1 and E2 differ by a constant, false
11787
   otherwise.  If they do, E1 - E2 is stored in *DIFF.  */
11788
 
11789
bool
11790
ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11791
{
11792
  tree core1, core2;
11793
  HOST_WIDE_INT bitpos1, bitpos2;
11794
  tree toffset1, toffset2, tdiff, type;
11795
 
11796
  core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11797
  core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11798
 
11799
  if (bitpos1 % BITS_PER_UNIT != 0
11800
      || bitpos2 % BITS_PER_UNIT != 0
11801
      || !operand_equal_p (core1, core2, 0))
11802
    return false;
11803
 
11804
  if (toffset1 && toffset2)
11805
    {
11806
      type = TREE_TYPE (toffset1);
11807
      if (type != TREE_TYPE (toffset2))
11808
        toffset2 = fold_convert (type, toffset2);
11809
 
11810
      tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
11811
      if (!cst_and_fits_in_hwi (tdiff))
11812
        return false;
11813
 
11814
      *diff = int_cst_value (tdiff);
11815
    }
11816
  else if (toffset1 || toffset2)
11817
    {
11818
      /* If only one of the offsets is non-constant, the difference cannot
11819
         be a constant.  */
11820
      return false;
11821
    }
11822
  else
11823
    *diff = 0;
11824
 
11825
  *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
11826
  return true;
11827
}
11828
 
11829
/* Simplify the floating point expression EXP when the sign of the
11830
   result is not significant.  Return NULL_TREE if no simplification
11831
   is possible.  */
11832
 
11833
tree
11834
fold_strip_sign_ops (tree exp)
11835
{
11836
  tree arg0, arg1;
11837
 
11838
  switch (TREE_CODE (exp))
11839
    {
11840
    case ABS_EXPR:
11841
    case NEGATE_EXPR:
11842
      arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11843
      return arg0 ? arg0 : TREE_OPERAND (exp, 0);
11844
 
11845
    case MULT_EXPR:
11846
    case RDIV_EXPR:
11847
      if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
11848
        return NULL_TREE;
11849
      arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11850
      arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
11851
      if (arg0 != NULL_TREE || arg1 != NULL_TREE)
11852
        return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
11853
                            arg0 ? arg0 : TREE_OPERAND (exp, 0),
11854
                            arg1 ? arg1 : TREE_OPERAND (exp, 1));
11855
      break;
11856
 
11857
    default:
11858
      break;
11859
    }
11860
  return NULL_TREE;
11861
}
11862
 

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.