OpenCores
URL https://opencores.org/ocsvn/openrisc_2011-10-31/openrisc_2011-10-31/trunk

Subversion Repositories openrisc_2011-10-31

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.5.1/] [gcc/] [fold-const.c] - Blame information for rev 333

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 280 jeremybenn
/* Fold a constant sub-tree into a single node for C-compiler
2
   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3
   2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4
   Free Software Foundation, Inc.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
/*@@ This file should be rewritten to use an arbitrary precision
23
  @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24
  @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25
  @@ The routines that translate from the ap rep should
26
  @@ warn if precision et. al. is lost.
27
  @@ This would also make life easier when this technology is used
28
  @@ for cross-compilers.  */
29
 
30
/* The entry points in this file are fold, size_int_wide, size_binop
31
   and force_fit_type_double.
32
 
33
   fold takes a tree as argument and returns a simplified tree.
34
 
35
   size_binop takes a tree code for an arithmetic operation
36
   and two operands that are trees, and produces a tree for the
37
   result, assuming the type comes from `sizetype'.
38
 
39
   size_int takes an integer value, and creates a tree constant
40
   with type from `sizetype'.
41
 
42
   force_fit_type_double takes a constant, an overflowable flag and a
43
   prior overflow indicator.  It forces the value to fit the type and
44
   sets TREE_OVERFLOW.
45
 
46
   Note: Since the folders get called on non-gimple code as well as
47
   gimple code, we need to handle GIMPLE tuples as well as their
48
   corresponding tree equivalents.  */
49
 
50
#include "config.h"
51
#include "system.h"
52
#include "coretypes.h"
53
#include "tm.h"
54
#include "flags.h"
55
#include "tree.h"
56
#include "real.h"
57
#include "fixed-value.h"
58
#include "rtl.h"
59
#include "expr.h"
60
#include "tm_p.h"
61
#include "target.h"
62
#include "toplev.h"
63
#include "intl.h"
64
#include "ggc.h"
65
#include "hashtab.h"
66
#include "langhooks.h"
67
#include "md5.h"
68
#include "gimple.h"
69
 
70
/* Nonzero if we are folding constants inside an initializer; zero
71
   otherwise.  */
72
int folding_initializer = 0;
73
 
74
/* The following constants represent a bit based encoding of GCC's
75
   comparison operators.  This encoding simplifies transformations
76
   on relational comparison operators, such as AND and OR.  */
77
enum comparison_code {
78
  COMPCODE_FALSE = 0,
79
  COMPCODE_LT = 1,
80
  COMPCODE_EQ = 2,
81
  COMPCODE_LE = 3,
82
  COMPCODE_GT = 4,
83
  COMPCODE_LTGT = 5,
84
  COMPCODE_GE = 6,
85
  COMPCODE_ORD = 7,
86
  COMPCODE_UNORD = 8,
87
  COMPCODE_UNLT = 9,
88
  COMPCODE_UNEQ = 10,
89
  COMPCODE_UNLE = 11,
90
  COMPCODE_UNGT = 12,
91
  COMPCODE_NE = 13,
92
  COMPCODE_UNGE = 14,
93
  COMPCODE_TRUE = 15
94
};
95
 
96
static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
97
static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
98
static bool negate_mathfn_p (enum built_in_function);
99
static bool negate_expr_p (tree);
100
static tree negate_expr (tree);
101
static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102
static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
103
static tree const_binop (enum tree_code, tree, tree, int);
104
static enum comparison_code comparison_to_compcode (enum tree_code);
105
static enum tree_code compcode_to_comparison (enum comparison_code);
106
static int operand_equal_for_comparison_p (tree, tree, tree);
107
static int twoval_comparison_p (tree, tree *, tree *, int *);
108
static tree eval_subst (location_t, tree, tree, tree, tree, tree);
109
static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
110
static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
111
static tree make_bit_field_ref (location_t, tree, tree,
112
                                HOST_WIDE_INT, HOST_WIDE_INT, int);
113
static tree optimize_bit_field_compare (location_t, enum tree_code,
114
                                        tree, tree, tree);
115
static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
116
                                    HOST_WIDE_INT *,
117
                                    enum machine_mode *, int *, int *,
118
                                    tree *, tree *);
119
static int all_ones_mask_p (const_tree, int);
120
static tree sign_bit_p (tree, const_tree);
121
static int simple_operand_p (const_tree);
122
static tree range_binop (enum tree_code, tree, tree, int, tree, int);
123
static tree range_predecessor (tree);
124
static tree range_successor (tree);
125
extern tree make_range (tree, int *, tree *, tree *, bool *);
126
extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
127
                          tree, tree);
128
static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
129
static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
130
static tree unextend (tree, int, int, tree);
131
static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
132
static tree optimize_minmax_comparison (location_t, enum tree_code,
133
                                        tree, tree, tree);
134
static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
135
static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
136
static tree fold_binary_op_with_conditional_arg (location_t,
137
                                                 enum tree_code, tree,
138
                                                 tree, tree,
139
                                                 tree, tree, int);
140
static tree fold_mathfn_compare (location_t,
141
                                 enum built_in_function, enum tree_code,
142
                                 tree, tree, tree);
143
static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
144
static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
145
static bool reorder_operands_p (const_tree, const_tree);
146
static tree fold_negate_const (tree, tree);
147
static tree fold_not_const (tree, tree);
148
static tree fold_relational_const (enum tree_code, tree, tree, tree);
149
static tree fold_convert_const (enum tree_code, tree, tree);
150
 
151
 
152
/* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
153
   overflow.  Suppose A, B and SUM have the same respective signs as A1, B1,
154
   and SUM1.  Then this yields nonzero if overflow occurred during the
155
   addition.
156
 
157
   Overflow occurs if A and B have the same sign, but A and SUM differ in
158
   sign.  Use `^' to test whether signs differ, and `< 0' to isolate the
159
   sign.  */
160
#define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
161
 
162
/* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
163
   We do that by representing the two-word integer in 4 words, with only
164
   HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
165
   number.  The value of the word is LOWPART + HIGHPART * BASE.  */
166
 
167
#define LOWPART(x) \
168
  ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
169
#define HIGHPART(x) \
170
  ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
171
#define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
172
 
173
/* Unpack a two-word integer into 4 words.
174
   LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
175
   WORDS points to the array of HOST_WIDE_INTs.  */
176
 
177
static void
178
encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
179
{
180
  words[0] = LOWPART (low);
181
  words[1] = HIGHPART (low);
182
  words[2] = LOWPART (hi);
183
  words[3] = HIGHPART (hi);
184
}
185
 
186
/* Pack an array of 4 words into a two-word integer.
187
   WORDS points to the array of words.
188
   The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces.  */
189
 
190
static void
191
decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
192
        HOST_WIDE_INT *hi)
193
{
194
  *low = words[0] + words[1] * BASE;
195
  *hi = words[2] + words[3] * BASE;
196
}
197
 
198
/* Force the double-word integer L1, H1 to be within the range of the
199
   integer type TYPE.  Stores the properly truncated and sign-extended
200
   double-word integer in *LV, *HV.  Returns true if the operation
201
   overflows, that is, argument and result are different.  */
202
 
203
int
204
fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
205
                 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
206
{
207
  unsigned HOST_WIDE_INT low0 = l1;
208
  HOST_WIDE_INT high0 = h1;
209
  unsigned int prec = TYPE_PRECISION (type);
210
  int sign_extended_type;
211
 
212
  /* Size types *are* sign extended.  */
213
  sign_extended_type = (!TYPE_UNSIGNED (type)
214
                        || (TREE_CODE (type) == INTEGER_TYPE
215
                            && TYPE_IS_SIZETYPE (type)));
216
 
217
  /* First clear all bits that are beyond the type's precision.  */
218
  if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
219
    ;
220
  else if (prec > HOST_BITS_PER_WIDE_INT)
221
    h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
222
  else
223
    {
224
      h1 = 0;
225
      if (prec < HOST_BITS_PER_WIDE_INT)
226
        l1 &= ~((HOST_WIDE_INT) (-1) << prec);
227
    }
228
 
229
  /* Then do sign extension if necessary.  */
230
  if (!sign_extended_type)
231
    /* No sign extension */;
232
  else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
233
    /* Correct width already.  */;
234
  else if (prec > HOST_BITS_PER_WIDE_INT)
235
    {
236
      /* Sign extend top half? */
237
      if (h1 & ((unsigned HOST_WIDE_INT)1
238
                << (prec - HOST_BITS_PER_WIDE_INT - 1)))
239
        h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
240
    }
241
  else if (prec == HOST_BITS_PER_WIDE_INT)
242
    {
243
      if ((HOST_WIDE_INT)l1 < 0)
244
        h1 = -1;
245
    }
246
  else
247
    {
248
      /* Sign extend bottom half? */
249
      if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
250
        {
251
          h1 = -1;
252
          l1 |= (HOST_WIDE_INT)(-1) << prec;
253
        }
254
    }
255
 
256
  *lv = l1;
257
  *hv = h1;
258
 
259
  /* If the value didn't fit, signal overflow.  */
260
  return l1 != low0 || h1 != high0;
261
}
262
 
263
/* We force the double-int HIGH:LOW to the range of the type TYPE by
264
   sign or zero extending it.
265
   OVERFLOWABLE indicates if we are interested
266
   in overflow of the value, when >0 we are only interested in signed
267
   overflow, for <0 we are interested in any overflow.  OVERFLOWED
268
   indicates whether overflow has already occurred.  CONST_OVERFLOWED
269
   indicates whether constant overflow has already occurred.  We force
270
   T's value to be within range of T's type (by setting to 0 or 1 all
271
   the bits outside the type's range).  We set TREE_OVERFLOWED if,
272
        OVERFLOWED is nonzero,
273
        or OVERFLOWABLE is >0 and signed overflow occurs
274
        or OVERFLOWABLE is <0 and any overflow occurs
275
   We return a new tree node for the extended double-int.  The node
276
   is shared if no overflow flags are set.  */
277
 
278
tree
279
force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
280
                       HOST_WIDE_INT high, int overflowable,
281
                       bool overflowed)
282
{
283
  int sign_extended_type;
284
  bool overflow;
285
 
286
  /* Size types *are* sign extended.  */
287
  sign_extended_type = (!TYPE_UNSIGNED (type)
288
                        || (TREE_CODE (type) == INTEGER_TYPE
289
                            && TYPE_IS_SIZETYPE (type)));
290
 
291
  overflow = fit_double_type (low, high, &low, &high, type);
292
 
293
  /* If we need to set overflow flags, return a new unshared node.  */
294
  if (overflowed || overflow)
295
    {
296
      if (overflowed
297
          || overflowable < 0
298
          || (overflowable > 0 && sign_extended_type))
299
        {
300
          tree t = make_node (INTEGER_CST);
301
          TREE_INT_CST_LOW (t) = low;
302
          TREE_INT_CST_HIGH (t) = high;
303
          TREE_TYPE (t) = type;
304
          TREE_OVERFLOW (t) = 1;
305
          return t;
306
        }
307
    }
308
 
309
  /* Else build a shared node.  */
310
  return build_int_cst_wide (type, low, high);
311
}
312
 
313
/* Add two doubleword integers with doubleword result.
314
   Return nonzero if the operation overflows according to UNSIGNED_P.
315
   Each argument is given as two `HOST_WIDE_INT' pieces.
316
   One argument is L1 and H1; the other, L2 and H2.
317
   The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
318
 
319
int
320
add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
321
                      unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
322
                      unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
323
                      bool unsigned_p)
324
{
325
  unsigned HOST_WIDE_INT l;
326
  HOST_WIDE_INT h;
327
 
328
  l = l1 + l2;
329
  h = (HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) h1
330
                       + (unsigned HOST_WIDE_INT) h2
331
                       + (l < l1));
332
 
333
  *lv = l;
334
  *hv = h;
335
 
336
  if (unsigned_p)
337
    return ((unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1
338
            || (h == h1
339
                && l < l1));
340
  else
341
    return OVERFLOW_SUM_SIGN (h1, h2, h);
342
}
343
 
344
/* Negate a doubleword integer with doubleword result.
345
   Return nonzero if the operation overflows, assuming it's signed.
346
   The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
347
   The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
348
 
349
int
350
neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
351
            unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
352
{
353
  if (l1 == 0)
354
    {
355
      *lv = 0;
356
      *hv = - h1;
357
      return (*hv & h1) < 0;
358
    }
359
  else
360
    {
361
      *lv = -l1;
362
      *hv = ~h1;
363
      return 0;
364
    }
365
}
366
 
367
/* Multiply two doubleword integers with doubleword result.
368
   Return nonzero if the operation overflows according to UNSIGNED_P.
369
   Each argument is given as two `HOST_WIDE_INT' pieces.
370
   One argument is L1 and H1; the other, L2 and H2.
371
   The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
372
 
373
int
374
mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
375
                      unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
376
                      unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
377
                      bool unsigned_p)
378
{
379
  HOST_WIDE_INT arg1[4];
380
  HOST_WIDE_INT arg2[4];
381
  HOST_WIDE_INT prod[4 * 2];
382
  unsigned HOST_WIDE_INT carry;
383
  int i, j, k;
384
  unsigned HOST_WIDE_INT toplow, neglow;
385
  HOST_WIDE_INT tophigh, neghigh;
386
 
387
  encode (arg1, l1, h1);
388
  encode (arg2, l2, h2);
389
 
390
  memset (prod, 0, sizeof prod);
391
 
392
  for (i = 0; i < 4; i++)
393
    {
394
      carry = 0;
395
      for (j = 0; j < 4; j++)
396
        {
397
          k = i + j;
398
          /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000.  */
399
          carry += arg1[i] * arg2[j];
400
          /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF.  */
401
          carry += prod[k];
402
          prod[k] = LOWPART (carry);
403
          carry = HIGHPART (carry);
404
        }
405
      prod[i + 4] = carry;
406
    }
407
 
408
  decode (prod, lv, hv);
409
  decode (prod + 4, &toplow, &tophigh);
410
 
411
  /* Unsigned overflow is immediate.  */
412
  if (unsigned_p)
413
    return (toplow | tophigh) != 0;
414
 
415
  /* Check for signed overflow by calculating the signed representation of the
416
     top half of the result; it should agree with the low half's sign bit.  */
417
  if (h1 < 0)
418
    {
419
      neg_double (l2, h2, &neglow, &neghigh);
420
      add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
421
    }
422
  if (h2 < 0)
423
    {
424
      neg_double (l1, h1, &neglow, &neghigh);
425
      add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
426
    }
427
  return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
428
}
429
 
430
/* Shift the doubleword integer in L1, H1 left by COUNT places
431
   keeping only PREC bits of result.
432
   Shift right if COUNT is negative.
433
   ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
434
   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
435
 
436
void
437
lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
438
               HOST_WIDE_INT count, unsigned int prec,
439
               unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
440
{
441
  unsigned HOST_WIDE_INT signmask;
442
 
443
  if (count < 0)
444
    {
445
      rshift_double (l1, h1, -count, prec, lv, hv, arith);
446
      return;
447
    }
448
 
449
  if (SHIFT_COUNT_TRUNCATED)
450
    count %= prec;
451
 
452
  if (count >= 2 * HOST_BITS_PER_WIDE_INT)
453
    {
454
      /* Shifting by the host word size is undefined according to the
455
         ANSI standard, so we must handle this as a special case.  */
456
      *hv = 0;
457
      *lv = 0;
458
    }
459
  else if (count >= HOST_BITS_PER_WIDE_INT)
460
    {
461
      *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
462
      *lv = 0;
463
    }
464
  else
465
    {
466
      *hv = (((unsigned HOST_WIDE_INT) h1 << count)
467
             | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
468
      *lv = l1 << count;
469
    }
470
 
471
  /* Sign extend all bits that are beyond the precision.  */
472
 
473
  signmask = -((prec > HOST_BITS_PER_WIDE_INT
474
                ? ((unsigned HOST_WIDE_INT) *hv
475
                   >> (prec - HOST_BITS_PER_WIDE_INT - 1))
476
                : (*lv >> (prec - 1))) & 1);
477
 
478
  if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
479
    ;
480
  else if (prec >= HOST_BITS_PER_WIDE_INT)
481
    {
482
      *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
483
      *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
484
    }
485
  else
486
    {
487
      *hv = signmask;
488
      *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
489
      *lv |= signmask << prec;
490
    }
491
}
492
 
493
/* Shift the doubleword integer in L1, H1 right by COUNT places
494
   keeping only PREC bits of result.  COUNT must be positive.
495
   ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
496
   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
497
 
498
void
499
rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
500
               HOST_WIDE_INT count, unsigned int prec,
501
               unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
502
               int arith)
503
{
504
  unsigned HOST_WIDE_INT signmask;
505
 
506
  signmask = (arith
507
              ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
508
              : 0);
509
 
510
  if (SHIFT_COUNT_TRUNCATED)
511
    count %= prec;
512
 
513
  if (count >= 2 * HOST_BITS_PER_WIDE_INT)
514
    {
515
      /* Shifting by the host word size is undefined according to the
516
         ANSI standard, so we must handle this as a special case.  */
517
      *hv = 0;
518
      *lv = 0;
519
    }
520
  else if (count >= HOST_BITS_PER_WIDE_INT)
521
    {
522
      *hv = 0;
523
      *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
524
    }
525
  else
526
    {
527
      *hv = (unsigned HOST_WIDE_INT) h1 >> count;
528
      *lv = ((l1 >> count)
529
             | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
530
    }
531
 
532
  /* Zero / sign extend all bits that are beyond the precision.  */
533
 
534
  if (count >= (HOST_WIDE_INT)prec)
535
    {
536
      *hv = signmask;
537
      *lv = signmask;
538
    }
539
  else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
540
    ;
541
  else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
542
    {
543
      *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
544
      *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
545
    }
546
  else
547
    {
548
      *hv = signmask;
549
      *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
550
      *lv |= signmask << (prec - count);
551
    }
552
}
553
 
554
/* Rotate the doubleword integer in L1, H1 left by COUNT places
555
   keeping only PREC bits of result.
556
   Rotate right if COUNT is negative.
557
   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
558
 
559
void
560
lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
561
                HOST_WIDE_INT count, unsigned int prec,
562
                unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
563
{
564
  unsigned HOST_WIDE_INT s1l, s2l;
565
  HOST_WIDE_INT s1h, s2h;
566
 
567
  count %= prec;
568
  if (count < 0)
569
    count += prec;
570
 
571
  lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
572
  rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
573
  *lv = s1l | s2l;
574
  *hv = s1h | s2h;
575
}
576
 
577
/* Rotate the doubleword integer in L1, H1 left by COUNT places
578
   keeping only PREC bits of result.  COUNT must be positive.
579
   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
580
 
581
void
582
rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
583
                HOST_WIDE_INT count, unsigned int prec,
584
                unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
585
{
586
  unsigned HOST_WIDE_INT s1l, s2l;
587
  HOST_WIDE_INT s1h, s2h;
588
 
589
  count %= prec;
590
  if (count < 0)
591
    count += prec;
592
 
593
  rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
594
  lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
595
  *lv = s1l | s2l;
596
  *hv = s1h | s2h;
597
}
598
 
599
/* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
600
   for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
601
   CODE is a tree code for a kind of division, one of
602
   TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
603
   or EXACT_DIV_EXPR
604
   It controls how the quotient is rounded to an integer.
605
   Return nonzero if the operation overflows.
606
   UNS nonzero says do unsigned division.  */
607
 
608
int
609
div_and_round_double (enum tree_code code, int uns,
610
                      unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
611
                      HOST_WIDE_INT hnum_orig,
612
                      unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
613
                      HOST_WIDE_INT hden_orig,
614
                      unsigned HOST_WIDE_INT *lquo,
615
                      HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
616
                      HOST_WIDE_INT *hrem)
617
{
618
  int quo_neg = 0;
619
  HOST_WIDE_INT num[4 + 1];     /* extra element for scaling.  */
620
  HOST_WIDE_INT den[4], quo[4];
621
  int i, j;
622
  unsigned HOST_WIDE_INT work;
623
  unsigned HOST_WIDE_INT carry = 0;
624
  unsigned HOST_WIDE_INT lnum = lnum_orig;
625
  HOST_WIDE_INT hnum = hnum_orig;
626
  unsigned HOST_WIDE_INT lden = lden_orig;
627
  HOST_WIDE_INT hden = hden_orig;
628
  int overflow = 0;
629
 
630
  if (hden == 0 && lden == 0)
631
    overflow = 1, lden = 1;
632
 
633
  /* Calculate quotient sign and convert operands to unsigned.  */
634
  if (!uns)
635
    {
636
      if (hnum < 0)
637
        {
638
          quo_neg = ~ quo_neg;
639
          /* (minimum integer) / (-1) is the only overflow case.  */
640
          if (neg_double (lnum, hnum, &lnum, &hnum)
641
              && ((HOST_WIDE_INT) lden & hden) == -1)
642
            overflow = 1;
643
        }
644
      if (hden < 0)
645
        {
646
          quo_neg = ~ quo_neg;
647
          neg_double (lden, hden, &lden, &hden);
648
        }
649
    }
650
 
651
  if (hnum == 0 && hden == 0)
652
    {                           /* single precision */
653
      *hquo = *hrem = 0;
654
      /* This unsigned division rounds toward zero.  */
655
      *lquo = lnum / lden;
656
      goto finish_up;
657
    }
658
 
659
  if (hnum == 0)
660
    {                           /* trivial case: dividend < divisor */
661
      /* hden != 0 already checked.  */
662
      *hquo = *lquo = 0;
663
      *hrem = hnum;
664
      *lrem = lnum;
665
      goto finish_up;
666
    }
667
 
668
  memset (quo, 0, sizeof quo);
669
 
670
  memset (num, 0, sizeof num);   /* to zero 9th element */
671
  memset (den, 0, sizeof den);
672
 
673
  encode (num, lnum, hnum);
674
  encode (den, lden, hden);
675
 
676
  /* Special code for when the divisor < BASE.  */
677
  if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
678
    {
679
      /* hnum != 0 already checked.  */
680
      for (i = 4 - 1; i >= 0; i--)
681
        {
682
          work = num[i] + carry * BASE;
683
          quo[i] = work / lden;
684
          carry = work % lden;
685
        }
686
    }
687
  else
688
    {
689
      /* Full double precision division,
690
         with thanks to Don Knuth's "Seminumerical Algorithms".  */
691
      int num_hi_sig, den_hi_sig;
692
      unsigned HOST_WIDE_INT quo_est, scale;
693
 
694
      /* Find the highest nonzero divisor digit.  */
695
      for (i = 4 - 1;; i--)
696
        if (den[i] != 0)
697
          {
698
            den_hi_sig = i;
699
            break;
700
          }
701
 
702
      /* Insure that the first digit of the divisor is at least BASE/2.
703
         This is required by the quotient digit estimation algorithm.  */
704
 
705
      scale = BASE / (den[den_hi_sig] + 1);
706
      if (scale > 1)
707
        {               /* scale divisor and dividend */
708
          carry = 0;
709
          for (i = 0; i <= 4 - 1; i++)
710
            {
711
              work = (num[i] * scale) + carry;
712
              num[i] = LOWPART (work);
713
              carry = HIGHPART (work);
714
            }
715
 
716
          num[4] = carry;
717
          carry = 0;
718
          for (i = 0; i <= 4 - 1; i++)
719
            {
720
              work = (den[i] * scale) + carry;
721
              den[i] = LOWPART (work);
722
              carry = HIGHPART (work);
723
              if (den[i] != 0) den_hi_sig = i;
724
            }
725
        }
726
 
727
      num_hi_sig = 4;
728
 
729
      /* Main loop */
730
      for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
731
        {
732
          /* Guess the next quotient digit, quo_est, by dividing the first
733
             two remaining dividend digits by the high order quotient digit.
734
             quo_est is never low and is at most 2 high.  */
735
          unsigned HOST_WIDE_INT tmp;
736
 
737
          num_hi_sig = i + den_hi_sig + 1;
738
          work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
739
          if (num[num_hi_sig] != den[den_hi_sig])
740
            quo_est = work / den[den_hi_sig];
741
          else
742
            quo_est = BASE - 1;
743
 
744
          /* Refine quo_est so it's usually correct, and at most one high.  */
745
          tmp = work - quo_est * den[den_hi_sig];
746
          if (tmp < BASE
747
              && (den[den_hi_sig - 1] * quo_est
748
                  > (tmp * BASE + num[num_hi_sig - 2])))
749
            quo_est--;
750
 
751
          /* Try QUO_EST as the quotient digit, by multiplying the
752
             divisor by QUO_EST and subtracting from the remaining dividend.
753
             Keep in mind that QUO_EST is the I - 1st digit.  */
754
 
755
          carry = 0;
756
          for (j = 0; j <= den_hi_sig; j++)
757
            {
758
              work = quo_est * den[j] + carry;
759
              carry = HIGHPART (work);
760
              work = num[i + j] - LOWPART (work);
761
              num[i + j] = LOWPART (work);
762
              carry += HIGHPART (work) != 0;
763
            }
764
 
765
          /* If quo_est was high by one, then num[i] went negative and
766
             we need to correct things.  */
767
          if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
768
            {
769
              quo_est--;
770
              carry = 0;         /* add divisor back in */
771
              for (j = 0; j <= den_hi_sig; j++)
772
                {
773
                  work = num[i + j] + den[j] + carry;
774
                  carry = HIGHPART (work);
775
                  num[i + j] = LOWPART (work);
776
                }
777
 
778
              num [num_hi_sig] += carry;
779
            }
780
 
781
          /* Store the quotient digit.  */
782
          quo[i] = quo_est;
783
        }
784
    }
785
 
786
  decode (quo, lquo, hquo);
787
 
788
 finish_up:
789
  /* If result is negative, make it so.  */
790
  if (quo_neg)
791
    neg_double (*lquo, *hquo, lquo, hquo);
792
 
793
  /* Compute trial remainder:  rem = num - (quo * den)  */
794
  mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
795
  neg_double (*lrem, *hrem, lrem, hrem);
796
  add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
797
 
798
  switch (code)
799
    {
800
    case TRUNC_DIV_EXPR:
801
    case TRUNC_MOD_EXPR:        /* round toward zero */
802
    case EXACT_DIV_EXPR:        /* for this one, it shouldn't matter */
803
      return overflow;
804
 
805
    case FLOOR_DIV_EXPR:
806
    case FLOOR_MOD_EXPR:        /* round toward negative infinity */
807
      if (quo_neg && (*lrem != 0 || *hrem != 0))   /* ratio < 0 && rem != 0 */
808
        {
809
          /* quo = quo - 1;  */
810
          add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT)  -1,
811
                      lquo, hquo);
812
        }
813
      else
814
        return overflow;
815
      break;
816
 
817
    case CEIL_DIV_EXPR:
818
    case CEIL_MOD_EXPR:         /* round toward positive infinity */
819
      if (!quo_neg && (*lrem != 0 || *hrem != 0))  /* ratio > 0 && rem != 0 */
820
        {
821
          add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
822
                      lquo, hquo);
823
        }
824
      else
825
        return overflow;
826
      break;
827
 
828
    case ROUND_DIV_EXPR:
829
    case ROUND_MOD_EXPR:        /* round to closest integer */
830
      {
831
        unsigned HOST_WIDE_INT labs_rem = *lrem;
832
        HOST_WIDE_INT habs_rem = *hrem;
833
        unsigned HOST_WIDE_INT labs_den = lden, ltwice;
834
        HOST_WIDE_INT habs_den = hden, htwice;
835
 
836
        /* Get absolute values.  */
837
        if (*hrem < 0)
838
          neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
839
        if (hden < 0)
840
          neg_double (lden, hden, &labs_den, &habs_den);
841
 
842
        /* If (2 * abs (lrem) >= abs (lden)), adjust the quotient.  */
843
        mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
844
                    labs_rem, habs_rem, &ltwice, &htwice);
845
 
846
        if (((unsigned HOST_WIDE_INT) habs_den
847
             < (unsigned HOST_WIDE_INT) htwice)
848
            || (((unsigned HOST_WIDE_INT) habs_den
849
                 == (unsigned HOST_WIDE_INT) htwice)
850
                && (labs_den <= ltwice)))
851
          {
852
            if (*hquo < 0)
853
              /* quo = quo - 1;  */
854
              add_double (*lquo, *hquo,
855
                          (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
856
            else
857
              /* quo = quo + 1; */
858
              add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
859
                          lquo, hquo);
860
          }
861
        else
862
          return overflow;
863
      }
864
      break;
865
 
866
    default:
867
      gcc_unreachable ();
868
    }
869
 
870
  /* Compute true remainder:  rem = num - (quo * den)  */
871
  mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
872
  neg_double (*lrem, *hrem, lrem, hrem);
873
  add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
874
  return overflow;
875
}
876
 
877
/* If ARG2 divides ARG1 with zero remainder, carries out the division
878
   of type CODE and returns the quotient.
879
   Otherwise returns NULL_TREE.  */
880
 
881
tree
882
div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
883
{
884
  unsigned HOST_WIDE_INT int1l, int2l;
885
  HOST_WIDE_INT int1h, int2h;
886
  unsigned HOST_WIDE_INT quol, reml;
887
  HOST_WIDE_INT quoh, remh;
888
  int uns;
889
 
890
  /* The sign of the division is according to operand two, that
891
     does the correct thing for POINTER_PLUS_EXPR where we want
892
     a signed division.  */
893
  uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
894
  if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
895
      && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
896
    uns = false;
897
 
898
  int1l = TREE_INT_CST_LOW (arg1);
899
  int1h = TREE_INT_CST_HIGH (arg1);
900
  int2l = TREE_INT_CST_LOW (arg2);
901
  int2h = TREE_INT_CST_HIGH (arg2);
902
 
903
  div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
904
                        &quol, &quoh, &reml, &remh);
905
  if (remh != 0 || reml != 0)
906
    return NULL_TREE;
907
 
908
  return build_int_cst_wide (TREE_TYPE (arg1), quol, quoh);
909
}
910
 
911
/* This is nonzero if we should defer warnings about undefined
912
   overflow.  This facility exists because these warnings are a
913
   special case.  The code to estimate loop iterations does not want
914
   to issue any warnings, since it works with expressions which do not
915
   occur in user code.  Various bits of cleanup code call fold(), but
916
   only use the result if it has certain characteristics (e.g., is a
917
   constant); that code only wants to issue a warning if the result is
918
   used.  */
919
 
920
static int fold_deferring_overflow_warnings;
921
 
922
/* If a warning about undefined overflow is deferred, this is the
923
   warning.  Note that this may cause us to turn two warnings into
924
   one, but that is fine since it is sufficient to only give one
925
   warning per expression.  */
926
 
927
static const char* fold_deferred_overflow_warning;
928
 
929
/* If a warning about undefined overflow is deferred, this is the
930
   level at which the warning should be emitted.  */
931
 
932
static enum warn_strict_overflow_code fold_deferred_overflow_code;
933
 
934
/* Start deferring overflow warnings.  We could use a stack here to
935
   permit nested calls, but at present it is not necessary.  */
936
 
937
void
938
fold_defer_overflow_warnings (void)
939
{
940
  ++fold_deferring_overflow_warnings;
941
}
942
 
943
/* Stop deferring overflow warnings.  If there is a pending warning,
944
   and ISSUE is true, then issue the warning if appropriate.  STMT is
945
   the statement with which the warning should be associated (used for
946
   location information); STMT may be NULL.  CODE is the level of the
947
   warning--a warn_strict_overflow_code value.  This function will use
948
   the smaller of CODE and the deferred code when deciding whether to
949
   issue the warning.  CODE may be zero to mean to always use the
950
   deferred code.  */
951
 
952
void
953
fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
954
{
955
  const char *warnmsg;
956
  location_t locus;
957
 
958
  gcc_assert (fold_deferring_overflow_warnings > 0);
959
  --fold_deferring_overflow_warnings;
960
  if (fold_deferring_overflow_warnings > 0)
961
    {
962
      if (fold_deferred_overflow_warning != NULL
963
          && code != 0
964
          && code < (int) fold_deferred_overflow_code)
965
        fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
966
      return;
967
    }
968
 
969
  warnmsg = fold_deferred_overflow_warning;
970
  fold_deferred_overflow_warning = NULL;
971
 
972
  if (!issue || warnmsg == NULL)
973
    return;
974
 
975
  if (gimple_no_warning_p (stmt))
976
    return;
977
 
978
  /* Use the smallest code level when deciding to issue the
979
     warning.  */
980
  if (code == 0 || code > (int) fold_deferred_overflow_code)
981
    code = fold_deferred_overflow_code;
982
 
983
  if (!issue_strict_overflow_warning (code))
984
    return;
985
 
986
  if (stmt == NULL)
987
    locus = input_location;
988
  else
989
    locus = gimple_location (stmt);
990
  warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
991
}
992
 
993
/* Stop deferring overflow warnings, ignoring any deferred
994
   warnings.  */
995
 
996
void
997
fold_undefer_and_ignore_overflow_warnings (void)
998
{
999
  fold_undefer_overflow_warnings (false, NULL, 0);
1000
}
1001
 
1002
/* Whether we are deferring overflow warnings.  */
1003
 
1004
bool
1005
fold_deferring_overflow_warnings_p (void)
1006
{
1007
  return fold_deferring_overflow_warnings > 0;
1008
}
1009
 
1010
/* This is called when we fold something based on the fact that signed
1011
   overflow is undefined.  */
1012
 
1013
static void
1014
fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1015
{
1016
  if (fold_deferring_overflow_warnings > 0)
1017
    {
1018
      if (fold_deferred_overflow_warning == NULL
1019
          || wc < fold_deferred_overflow_code)
1020
        {
1021
          fold_deferred_overflow_warning = gmsgid;
1022
          fold_deferred_overflow_code = wc;
1023
        }
1024
    }
1025
  else if (issue_strict_overflow_warning (wc))
1026
    warning (OPT_Wstrict_overflow, gmsgid);
1027
}
1028
 
1029
/* Return true if the built-in mathematical function specified by CODE
1030
   is odd, i.e. -f(x) == f(-x).  */
1031
 
1032
static bool
1033
negate_mathfn_p (enum built_in_function code)
1034
{
1035
  switch (code)
1036
    {
1037
    CASE_FLT_FN (BUILT_IN_ASIN):
1038
    CASE_FLT_FN (BUILT_IN_ASINH):
1039
    CASE_FLT_FN (BUILT_IN_ATAN):
1040
    CASE_FLT_FN (BUILT_IN_ATANH):
1041
    CASE_FLT_FN (BUILT_IN_CASIN):
1042
    CASE_FLT_FN (BUILT_IN_CASINH):
1043
    CASE_FLT_FN (BUILT_IN_CATAN):
1044
    CASE_FLT_FN (BUILT_IN_CATANH):
1045
    CASE_FLT_FN (BUILT_IN_CBRT):
1046
    CASE_FLT_FN (BUILT_IN_CPROJ):
1047
    CASE_FLT_FN (BUILT_IN_CSIN):
1048
    CASE_FLT_FN (BUILT_IN_CSINH):
1049
    CASE_FLT_FN (BUILT_IN_CTAN):
1050
    CASE_FLT_FN (BUILT_IN_CTANH):
1051
    CASE_FLT_FN (BUILT_IN_ERF):
1052
    CASE_FLT_FN (BUILT_IN_LLROUND):
1053
    CASE_FLT_FN (BUILT_IN_LROUND):
1054
    CASE_FLT_FN (BUILT_IN_ROUND):
1055
    CASE_FLT_FN (BUILT_IN_SIN):
1056
    CASE_FLT_FN (BUILT_IN_SINH):
1057
    CASE_FLT_FN (BUILT_IN_TAN):
1058
    CASE_FLT_FN (BUILT_IN_TANH):
1059
    CASE_FLT_FN (BUILT_IN_TRUNC):
1060
      return true;
1061
 
1062
    CASE_FLT_FN (BUILT_IN_LLRINT):
1063
    CASE_FLT_FN (BUILT_IN_LRINT):
1064
    CASE_FLT_FN (BUILT_IN_NEARBYINT):
1065
    CASE_FLT_FN (BUILT_IN_RINT):
1066
      return !flag_rounding_math;
1067
 
1068
    default:
1069
      break;
1070
    }
1071
  return false;
1072
}
1073
 
1074
/* Check whether we may negate an integer constant T without causing
1075
   overflow.  */
1076
 
1077
bool
1078
may_negate_without_overflow_p (const_tree t)
1079
{
1080
  unsigned HOST_WIDE_INT val;
1081
  unsigned int prec;
1082
  tree type;
1083
 
1084
  gcc_assert (TREE_CODE (t) == INTEGER_CST);
1085
 
1086
  type = TREE_TYPE (t);
1087
  if (TYPE_UNSIGNED (type))
1088
    return false;
1089
 
1090
  prec = TYPE_PRECISION (type);
1091
  if (prec > HOST_BITS_PER_WIDE_INT)
1092
    {
1093
      if (TREE_INT_CST_LOW (t) != 0)
1094
        return true;
1095
      prec -= HOST_BITS_PER_WIDE_INT;
1096
      val = TREE_INT_CST_HIGH (t);
1097
    }
1098
  else
1099
    val = TREE_INT_CST_LOW (t);
1100
  if (prec < HOST_BITS_PER_WIDE_INT)
1101
    val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1102
  return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1103
}
1104
 
1105
/* Determine whether an expression T can be cheaply negated using
1106
   the function negate_expr without introducing undefined overflow.  */
1107
 
1108
static bool
1109
negate_expr_p (tree t)
1110
{
1111
  tree type;
1112
 
1113
  if (t == 0)
1114
    return false;
1115
 
1116
  type = TREE_TYPE (t);
1117
 
1118
  STRIP_SIGN_NOPS (t);
1119
  switch (TREE_CODE (t))
1120
    {
1121
    case INTEGER_CST:
1122
      if (TYPE_OVERFLOW_WRAPS (type))
1123
        return true;
1124
 
1125
      /* Check that -CST will not overflow type.  */
1126
      return may_negate_without_overflow_p (t);
1127
    case BIT_NOT_EXPR:
1128
      return (INTEGRAL_TYPE_P (type)
1129
              && TYPE_OVERFLOW_WRAPS (type));
1130
 
1131
    case FIXED_CST:
1132
    case NEGATE_EXPR:
1133
      return true;
1134
 
1135
    case REAL_CST:
1136
      /* We want to canonicalize to positive real constants.  Pretend
1137
         that only negative ones can be easily negated.  */
1138
      return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
1139
 
1140
    case COMPLEX_CST:
1141
      return negate_expr_p (TREE_REALPART (t))
1142
             && negate_expr_p (TREE_IMAGPART (t));
1143
 
1144
    case COMPLEX_EXPR:
1145
      return negate_expr_p (TREE_OPERAND (t, 0))
1146
             && negate_expr_p (TREE_OPERAND (t, 1));
1147
 
1148
    case CONJ_EXPR:
1149
      return negate_expr_p (TREE_OPERAND (t, 0));
1150
 
1151
    case PLUS_EXPR:
1152
      if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1153
          || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1154
        return false;
1155
      /* -(A + B) -> (-B) - A.  */
1156
      if (negate_expr_p (TREE_OPERAND (t, 1))
1157
          && reorder_operands_p (TREE_OPERAND (t, 0),
1158
                                 TREE_OPERAND (t, 1)))
1159
        return true;
1160
      /* -(A + B) -> (-A) - B.  */
1161
      return negate_expr_p (TREE_OPERAND (t, 0));
1162
 
1163
    case MINUS_EXPR:
1164
      /* We can't turn -(A-B) into B-A when we honor signed zeros.  */
1165
      return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1166
             && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1167
             && reorder_operands_p (TREE_OPERAND (t, 0),
1168
                                    TREE_OPERAND (t, 1));
1169
 
1170
    case MULT_EXPR:
1171
      if (TYPE_UNSIGNED (TREE_TYPE (t)))
1172
        break;
1173
 
1174
      /* Fall through.  */
1175
 
1176
    case RDIV_EXPR:
1177
      if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1178
        return negate_expr_p (TREE_OPERAND (t, 1))
1179
               || negate_expr_p (TREE_OPERAND (t, 0));
1180
      break;
1181
 
1182
    case TRUNC_DIV_EXPR:
1183
    case ROUND_DIV_EXPR:
1184
    case FLOOR_DIV_EXPR:
1185
    case CEIL_DIV_EXPR:
1186
    case EXACT_DIV_EXPR:
1187
      /* In general we can't negate A / B, because if A is INT_MIN and
1188
         B is 1, we may turn this into INT_MIN / -1 which is undefined
1189
         and actually traps on some architectures.  But if overflow is
1190
         undefined, we can negate, because - (INT_MIN / 1) is an
1191
         overflow.  */
1192
      if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1193
          && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1194
        break;
1195
      return negate_expr_p (TREE_OPERAND (t, 1))
1196
             || negate_expr_p (TREE_OPERAND (t, 0));
1197
 
1198
    case NOP_EXPR:
1199
      /* Negate -((double)float) as (double)(-float).  */
1200
      if (TREE_CODE (type) == REAL_TYPE)
1201
        {
1202
          tree tem = strip_float_extensions (t);
1203
          if (tem != t)
1204
            return negate_expr_p (tem);
1205
        }
1206
      break;
1207
 
1208
    case CALL_EXPR:
1209
      /* Negate -f(x) as f(-x).  */
1210
      if (negate_mathfn_p (builtin_mathfn_code (t)))
1211
        return negate_expr_p (CALL_EXPR_ARG (t, 0));
1212
      break;
1213
 
1214
    case RSHIFT_EXPR:
1215
      /* Optimize -((int)x >> 31) into (unsigned)x >> 31.  */
1216
      if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1217
        {
1218
          tree op1 = TREE_OPERAND (t, 1);
1219
          if (TREE_INT_CST_HIGH (op1) == 0
1220
              && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1221
                 == TREE_INT_CST_LOW (op1))
1222
            return true;
1223
        }
1224
      break;
1225
 
1226
    default:
1227
      break;
1228
    }
1229
  return false;
1230
}
1231
 
1232
/* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1233
   simplification is possible.
1234
   If negate_expr_p would return true for T, NULL_TREE will never be
1235
   returned.  */
1236
 
1237
static tree
1238
fold_negate_expr (location_t loc, tree t)
1239
{
1240
  tree type = TREE_TYPE (t);
1241
  tree tem;
1242
 
1243
  switch (TREE_CODE (t))
1244
    {
1245
    /* Convert - (~A) to A + 1.  */
1246
    case BIT_NOT_EXPR:
1247
      if (INTEGRAL_TYPE_P (type))
1248
        return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
1249
                            build_int_cst (type, 1));
1250
      break;
1251
 
1252
    case INTEGER_CST:
1253
      tem = fold_negate_const (t, type);
1254
      if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1255
          || !TYPE_OVERFLOW_TRAPS (type))
1256
        return tem;
1257
      break;
1258
 
1259
    case REAL_CST:
1260
      tem = fold_negate_const (t, type);
1261
      /* Two's complement FP formats, such as c4x, may overflow.  */
1262
      if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1263
        return tem;
1264
      break;
1265
 
1266
    case FIXED_CST:
1267
      tem = fold_negate_const (t, type);
1268
      return tem;
1269
 
1270
    case COMPLEX_CST:
1271
      {
1272
        tree rpart = negate_expr (TREE_REALPART (t));
1273
        tree ipart = negate_expr (TREE_IMAGPART (t));
1274
 
1275
        if ((TREE_CODE (rpart) == REAL_CST
1276
             && TREE_CODE (ipart) == REAL_CST)
1277
            || (TREE_CODE (rpart) == INTEGER_CST
1278
                && TREE_CODE (ipart) == INTEGER_CST))
1279
          return build_complex (type, rpart, ipart);
1280
      }
1281
      break;
1282
 
1283
    case COMPLEX_EXPR:
1284
      if (negate_expr_p (t))
1285
        return fold_build2_loc (loc, COMPLEX_EXPR, type,
1286
                            fold_negate_expr (loc, TREE_OPERAND (t, 0)),
1287
                            fold_negate_expr (loc, TREE_OPERAND (t, 1)));
1288
      break;
1289
 
1290
    case CONJ_EXPR:
1291
      if (negate_expr_p (t))
1292
        return fold_build1_loc (loc, CONJ_EXPR, type,
1293
                            fold_negate_expr (loc, TREE_OPERAND (t, 0)));
1294
      break;
1295
 
1296
    case NEGATE_EXPR:
1297
      return TREE_OPERAND (t, 0);
1298
 
1299
    case PLUS_EXPR:
1300
      if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1301
          && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1302
        {
1303
          /* -(A + B) -> (-B) - A.  */
1304
          if (negate_expr_p (TREE_OPERAND (t, 1))
1305
              && reorder_operands_p (TREE_OPERAND (t, 0),
1306
                                     TREE_OPERAND (t, 1)))
1307
            {
1308
              tem = negate_expr (TREE_OPERAND (t, 1));
1309
              return fold_build2_loc (loc, MINUS_EXPR, type,
1310
                                  tem, TREE_OPERAND (t, 0));
1311
            }
1312
 
1313
          /* -(A + B) -> (-A) - B.  */
1314
          if (negate_expr_p (TREE_OPERAND (t, 0)))
1315
            {
1316
              tem = negate_expr (TREE_OPERAND (t, 0));
1317
              return fold_build2_loc (loc, MINUS_EXPR, type,
1318
                                  tem, TREE_OPERAND (t, 1));
1319
            }
1320
        }
1321
      break;
1322
 
1323
    case MINUS_EXPR:
1324
      /* - (A - B) -> B - A  */
1325
      if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1326
          && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1327
          && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1328
        return fold_build2_loc (loc, MINUS_EXPR, type,
1329
                            TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1330
      break;
1331
 
1332
    case MULT_EXPR:
1333
      if (TYPE_UNSIGNED (type))
1334
        break;
1335
 
1336
      /* Fall through.  */
1337
 
1338
    case RDIV_EXPR:
1339
      if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1340
        {
1341
          tem = TREE_OPERAND (t, 1);
1342
          if (negate_expr_p (tem))
1343
            return fold_build2_loc (loc, TREE_CODE (t), type,
1344
                                TREE_OPERAND (t, 0), negate_expr (tem));
1345
          tem = TREE_OPERAND (t, 0);
1346
          if (negate_expr_p (tem))
1347
            return fold_build2_loc (loc, TREE_CODE (t), type,
1348
                                negate_expr (tem), TREE_OPERAND (t, 1));
1349
        }
1350
      break;
1351
 
1352
    case TRUNC_DIV_EXPR:
1353
    case ROUND_DIV_EXPR:
1354
    case FLOOR_DIV_EXPR:
1355
    case CEIL_DIV_EXPR:
1356
    case EXACT_DIV_EXPR:
1357
      /* In general we can't negate A / B, because if A is INT_MIN and
1358
         B is 1, we may turn this into INT_MIN / -1 which is undefined
1359
         and actually traps on some architectures.  But if overflow is
1360
         undefined, we can negate, because - (INT_MIN / 1) is an
1361
         overflow.  */
1362
      if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1363
        {
1364
          const char * const warnmsg = G_("assuming signed overflow does not "
1365
                                          "occur when negating a division");
1366
          tem = TREE_OPERAND (t, 1);
1367
          if (negate_expr_p (tem))
1368
            {
1369
              if (INTEGRAL_TYPE_P (type)
1370
                  && (TREE_CODE (tem) != INTEGER_CST
1371
                      || integer_onep (tem)))
1372
                fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1373
              return fold_build2_loc (loc, TREE_CODE (t), type,
1374
                                  TREE_OPERAND (t, 0), negate_expr (tem));
1375
            }
1376
          tem = TREE_OPERAND (t, 0);
1377
          if (negate_expr_p (tem))
1378
            {
1379
              if (INTEGRAL_TYPE_P (type)
1380
                  && (TREE_CODE (tem) != INTEGER_CST
1381
                      || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1382
                fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1383
              return fold_build2_loc (loc, TREE_CODE (t), type,
1384
                                  negate_expr (tem), TREE_OPERAND (t, 1));
1385
            }
1386
        }
1387
      break;
1388
 
1389
    case NOP_EXPR:
1390
      /* Convert -((double)float) into (double)(-float).  */
1391
      if (TREE_CODE (type) == REAL_TYPE)
1392
        {
1393
          tem = strip_float_extensions (t);
1394
          if (tem != t && negate_expr_p (tem))
1395
            return fold_convert_loc (loc, type, negate_expr (tem));
1396
        }
1397
      break;
1398
 
1399
    case CALL_EXPR:
1400
      /* Negate -f(x) as f(-x).  */
1401
      if (negate_mathfn_p (builtin_mathfn_code (t))
1402
          && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1403
        {
1404
          tree fndecl, arg;
1405
 
1406
          fndecl = get_callee_fndecl (t);
1407
          arg = negate_expr (CALL_EXPR_ARG (t, 0));
1408
          return build_call_expr_loc (loc, fndecl, 1, arg);
1409
        }
1410
      break;
1411
 
1412
    case RSHIFT_EXPR:
1413
      /* Optimize -((int)x >> 31) into (unsigned)x >> 31.  */
1414
      if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1415
        {
1416
          tree op1 = TREE_OPERAND (t, 1);
1417
          if (TREE_INT_CST_HIGH (op1) == 0
1418
              && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1419
                 == TREE_INT_CST_LOW (op1))
1420
            {
1421
              tree ntype = TYPE_UNSIGNED (type)
1422
                           ? signed_type_for (type)
1423
                           : unsigned_type_for (type);
1424
              tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
1425
              temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
1426
              return fold_convert_loc (loc, type, temp);
1427
            }
1428
        }
1429
      break;
1430
 
1431
    default:
1432
      break;
1433
    }
1434
 
1435
  return NULL_TREE;
1436
}
1437
 
1438
/* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1439
   negated in a simpler way.  Also allow for T to be NULL_TREE, in which case
1440
   return NULL_TREE. */
1441
 
1442
static tree
1443
negate_expr (tree t)
1444
{
1445
  tree type, tem;
1446
  location_t loc;
1447
 
1448
  if (t == NULL_TREE)
1449
    return NULL_TREE;
1450
 
1451
  loc = EXPR_LOCATION (t);
1452
  type = TREE_TYPE (t);
1453
  STRIP_SIGN_NOPS (t);
1454
 
1455
  tem = fold_negate_expr (loc, t);
1456
  if (!tem)
1457
    {
1458
      tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1459
      SET_EXPR_LOCATION (tem, loc);
1460
    }
1461
  return fold_convert_loc (loc, type, tem);
1462
}
1463
 
1464
/* Split a tree IN into a constant, literal and variable parts that could be
1465
   combined with CODE to make IN.  "constant" means an expression with
1466
   TREE_CONSTANT but that isn't an actual constant.  CODE must be a
1467
   commutative arithmetic operation.  Store the constant part into *CONP,
1468
   the literal in *LITP and return the variable part.  If a part isn't
1469
   present, set it to null.  If the tree does not decompose in this way,
1470
   return the entire tree as the variable part and the other parts as null.
1471
 
1472
   If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.  In that
1473
   case, we negate an operand that was subtracted.  Except if it is a
1474
   literal for which we use *MINUS_LITP instead.
1475
 
1476
   If NEGATE_P is true, we are negating all of IN, again except a literal
1477
   for which we use *MINUS_LITP instead.
1478
 
1479
   If IN is itself a literal or constant, return it as appropriate.
1480
 
1481
   Note that we do not guarantee that any of the three values will be the
1482
   same type as IN, but they will have the same signedness and mode.  */
1483
 
1484
static tree
1485
split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1486
            tree *minus_litp, int negate_p)
1487
{
1488
  tree var = 0;
1489
 
1490
  *conp = 0;
1491
  *litp = 0;
1492
  *minus_litp = 0;
1493
 
1494
  /* Strip any conversions that don't change the machine mode or signedness.  */
1495
  STRIP_SIGN_NOPS (in);
1496
 
1497
  if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1498
      || TREE_CODE (in) == FIXED_CST)
1499
    *litp = in;
1500
  else if (TREE_CODE (in) == code
1501
           || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
1502
               && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1503
               /* We can associate addition and subtraction together (even
1504
                  though the C standard doesn't say so) for integers because
1505
                  the value is not affected.  For reals, the value might be
1506
                  affected, so we can't.  */
1507
               && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1508
                   || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1509
    {
1510
      tree op0 = TREE_OPERAND (in, 0);
1511
      tree op1 = TREE_OPERAND (in, 1);
1512
      int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1513
      int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1514
 
1515
      /* First see if either of the operands is a literal, then a constant.  */
1516
      if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1517
          || TREE_CODE (op0) == FIXED_CST)
1518
        *litp = op0, op0 = 0;
1519
      else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1520
               || TREE_CODE (op1) == FIXED_CST)
1521
        *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1522
 
1523
      if (op0 != 0 && TREE_CONSTANT (op0))
1524
        *conp = op0, op0 = 0;
1525
      else if (op1 != 0 && TREE_CONSTANT (op1))
1526
        *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1527
 
1528
      /* If we haven't dealt with either operand, this is not a case we can
1529
         decompose.  Otherwise, VAR is either of the ones remaining, if any.  */
1530
      if (op0 != 0 && op1 != 0)
1531
        var = in;
1532
      else if (op0 != 0)
1533
        var = op0;
1534
      else
1535
        var = op1, neg_var_p = neg1_p;
1536
 
1537
      /* Now do any needed negations.  */
1538
      if (neg_litp_p)
1539
        *minus_litp = *litp, *litp = 0;
1540
      if (neg_conp_p)
1541
        *conp = negate_expr (*conp);
1542
      if (neg_var_p)
1543
        var = negate_expr (var);
1544
    }
1545
  else if (TREE_CONSTANT (in))
1546
    *conp = in;
1547
  else
1548
    var = in;
1549
 
1550
  if (negate_p)
1551
    {
1552
      if (*litp)
1553
        *minus_litp = *litp, *litp = 0;
1554
      else if (*minus_litp)
1555
        *litp = *minus_litp, *minus_litp = 0;
1556
      *conp = negate_expr (*conp);
1557
      var = negate_expr (var);
1558
    }
1559
 
1560
  return var;
1561
}
1562
 
1563
/* Re-associate trees split by the above function.  T1 and T2 are
1564
   either expressions to associate or null.  Return the new
1565
   expression, if any.  LOC is the location of the new expression.  If
1566
   we build an operation, do it in TYPE and with CODE.  */
1567
 
1568
static tree
1569
associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
1570
{
1571
  tree tem;
1572
 
1573
  if (t1 == 0)
1574
    return t2;
1575
  else if (t2 == 0)
1576
    return t1;
1577
 
1578
  /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1579
     try to fold this since we will have infinite recursion.  But do
1580
     deal with any NEGATE_EXPRs.  */
1581
  if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1582
      || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1583
    {
1584
      if (code == PLUS_EXPR)
1585
        {
1586
          if (TREE_CODE (t1) == NEGATE_EXPR)
1587
            tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t2),
1588
                          fold_convert_loc (loc, type, TREE_OPERAND (t1, 0)));
1589
          else if (TREE_CODE (t2) == NEGATE_EXPR)
1590
            tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t1),
1591
                          fold_convert_loc (loc, type, TREE_OPERAND (t2, 0)));
1592
          else if (integer_zerop (t2))
1593
            return fold_convert_loc (loc, type, t1);
1594
        }
1595
      else if (code == MINUS_EXPR)
1596
        {
1597
          if (integer_zerop (t2))
1598
            return fold_convert_loc (loc, type, t1);
1599
        }
1600
 
1601
      tem = build2 (code, type, fold_convert_loc (loc, type, t1),
1602
                    fold_convert_loc (loc, type, t2));
1603
      goto associate_trees_exit;
1604
    }
1605
 
1606
  return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
1607
                      fold_convert_loc (loc, type, t2));
1608
 associate_trees_exit:
1609
  protected_set_expr_location (tem, loc);
1610
  return tem;
1611
}
1612
 
1613
/* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1614
   for use in int_const_binop, size_binop and size_diffop.  */
1615
 
1616
static bool
1617
int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1618
{
1619
  if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1620
    return false;
1621
  if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1622
    return false;
1623
 
1624
  switch (code)
1625
    {
1626
    case LSHIFT_EXPR:
1627
    case RSHIFT_EXPR:
1628
    case LROTATE_EXPR:
1629
    case RROTATE_EXPR:
1630
      return true;
1631
 
1632
    default:
1633
      break;
1634
    }
1635
 
1636
  return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1637
         && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1638
         && TYPE_MODE (type1) == TYPE_MODE (type2);
1639
}
1640
 
1641
 
1642
/* Combine two integer constants ARG1 and ARG2 under operation CODE
1643
   to produce a new constant.  Return NULL_TREE if we don't know how
1644
   to evaluate CODE at compile-time.
1645
 
1646
   If NOTRUNC is nonzero, do not truncate the result to fit the data type.  */
1647
 
1648
tree
1649
int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1650
{
1651
  unsigned HOST_WIDE_INT int1l, int2l;
1652
  HOST_WIDE_INT int1h, int2h;
1653
  unsigned HOST_WIDE_INT low;
1654
  HOST_WIDE_INT hi;
1655
  unsigned HOST_WIDE_INT garbagel;
1656
  HOST_WIDE_INT garbageh;
1657
  tree t;
1658
  tree type = TREE_TYPE (arg1);
1659
  int uns = TYPE_UNSIGNED (type);
1660
  int is_sizetype
1661
    = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1662
  int overflow = 0;
1663
 
1664
  int1l = TREE_INT_CST_LOW (arg1);
1665
  int1h = TREE_INT_CST_HIGH (arg1);
1666
  int2l = TREE_INT_CST_LOW (arg2);
1667
  int2h = TREE_INT_CST_HIGH (arg2);
1668
 
1669
  switch (code)
1670
    {
1671
    case BIT_IOR_EXPR:
1672
      low = int1l | int2l, hi = int1h | int2h;
1673
      break;
1674
 
1675
    case BIT_XOR_EXPR:
1676
      low = int1l ^ int2l, hi = int1h ^ int2h;
1677
      break;
1678
 
1679
    case BIT_AND_EXPR:
1680
      low = int1l & int2l, hi = int1h & int2h;
1681
      break;
1682
 
1683
    case RSHIFT_EXPR:
1684
      int2l = -int2l;
1685
    case LSHIFT_EXPR:
1686
      /* It's unclear from the C standard whether shifts can overflow.
1687
         The following code ignores overflow; perhaps a C standard
1688
         interpretation ruling is needed.  */
1689
      lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1690
                     &low, &hi, !uns);
1691
      break;
1692
 
1693
    case RROTATE_EXPR:
1694
      int2l = - int2l;
1695
    case LROTATE_EXPR:
1696
      lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1697
                      &low, &hi);
1698
      break;
1699
 
1700
    case PLUS_EXPR:
1701
      overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1702
      break;
1703
 
1704
    case MINUS_EXPR:
1705
      neg_double (int2l, int2h, &low, &hi);
1706
      add_double (int1l, int1h, low, hi, &low, &hi);
1707
      overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1708
      break;
1709
 
1710
    case MULT_EXPR:
1711
      overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1712
      break;
1713
 
1714
    case TRUNC_DIV_EXPR:
1715
    case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1716
    case EXACT_DIV_EXPR:
1717
      /* This is a shortcut for a common special case.  */
1718
      if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1719
          && !TREE_OVERFLOW (arg1)
1720
          && !TREE_OVERFLOW (arg2)
1721
          && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1722
        {
1723
          if (code == CEIL_DIV_EXPR)
1724
            int1l += int2l - 1;
1725
 
1726
          low = int1l / int2l, hi = 0;
1727
          break;
1728
        }
1729
 
1730
      /* ... fall through ...  */
1731
 
1732
    case ROUND_DIV_EXPR:
1733
      if (int2h == 0 && int2l == 0)
1734
        return NULL_TREE;
1735
      if (int2h == 0 && int2l == 1)
1736
        {
1737
          low = int1l, hi = int1h;
1738
          break;
1739
        }
1740
      if (int1l == int2l && int1h == int2h
1741
          && ! (int1l == 0 && int1h == 0))
1742
        {
1743
          low = 1, hi = 0;
1744
          break;
1745
        }
1746
      overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1747
                                       &low, &hi, &garbagel, &garbageh);
1748
      break;
1749
 
1750
    case TRUNC_MOD_EXPR:
1751
    case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1752
      /* This is a shortcut for a common special case.  */
1753
      if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1754
          && !TREE_OVERFLOW (arg1)
1755
          && !TREE_OVERFLOW (arg2)
1756
          && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1757
        {
1758
          if (code == CEIL_MOD_EXPR)
1759
            int1l += int2l - 1;
1760
          low = int1l % int2l, hi = 0;
1761
          break;
1762
        }
1763
 
1764
      /* ... fall through ...  */
1765
 
1766
    case ROUND_MOD_EXPR:
1767
      if (int2h == 0 && int2l == 0)
1768
        return NULL_TREE;
1769
      overflow = div_and_round_double (code, uns,
1770
                                       int1l, int1h, int2l, int2h,
1771
                                       &garbagel, &garbageh, &low, &hi);
1772
      break;
1773
 
1774
    case MIN_EXPR:
1775
    case MAX_EXPR:
1776
      if (uns)
1777
        low = (((unsigned HOST_WIDE_INT) int1h
1778
                < (unsigned HOST_WIDE_INT) int2h)
1779
               || (((unsigned HOST_WIDE_INT) int1h
1780
                    == (unsigned HOST_WIDE_INT) int2h)
1781
                   && int1l < int2l));
1782
      else
1783
        low = (int1h < int2h
1784
               || (int1h == int2h && int1l < int2l));
1785
 
1786
      if (low == (code == MIN_EXPR))
1787
        low = int1l, hi = int1h;
1788
      else
1789
        low = int2l, hi = int2h;
1790
      break;
1791
 
1792
    default:
1793
      return NULL_TREE;
1794
    }
1795
 
1796
  if (notrunc)
1797
    {
1798
      t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1799
 
1800
      /* Propagate overflow flags ourselves.  */
1801
      if (((!uns || is_sizetype) && overflow)
1802
          | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1803
        {
1804
          t = copy_node (t);
1805
          TREE_OVERFLOW (t) = 1;
1806
        }
1807
    }
1808
  else
1809
    t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1810
                               ((!uns || is_sizetype) && overflow)
1811
                               | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1812
 
1813
  return t;
1814
}
1815
 
1816
/* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1817
   constant.  We assume ARG1 and ARG2 have the same data type, or at least
1818
   are the same kind of constant and the same machine mode.  Return zero if
1819
   combining the constants is not allowed in the current operating mode.
1820
 
1821
   If NOTRUNC is nonzero, do not truncate the result to fit the data type.  */
1822
 
1823
static tree
1824
const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1825
{
1826
  /* Sanity check for the recursive cases.  */
1827
  if (!arg1 || !arg2)
1828
    return NULL_TREE;
1829
 
1830
  STRIP_NOPS (arg1);
1831
  STRIP_NOPS (arg2);
1832
 
1833
  if (TREE_CODE (arg1) == INTEGER_CST)
1834
    return int_const_binop (code, arg1, arg2, notrunc);
1835
 
1836
  if (TREE_CODE (arg1) == REAL_CST)
1837
    {
1838
      enum machine_mode mode;
1839
      REAL_VALUE_TYPE d1;
1840
      REAL_VALUE_TYPE d2;
1841
      REAL_VALUE_TYPE value;
1842
      REAL_VALUE_TYPE result;
1843
      bool inexact;
1844
      tree t, type;
1845
 
1846
      /* The following codes are handled by real_arithmetic.  */
1847
      switch (code)
1848
        {
1849
        case PLUS_EXPR:
1850
        case MINUS_EXPR:
1851
        case MULT_EXPR:
1852
        case RDIV_EXPR:
1853
        case MIN_EXPR:
1854
        case MAX_EXPR:
1855
          break;
1856
 
1857
        default:
1858
          return NULL_TREE;
1859
        }
1860
 
1861
      d1 = TREE_REAL_CST (arg1);
1862
      d2 = TREE_REAL_CST (arg2);
1863
 
1864
      type = TREE_TYPE (arg1);
1865
      mode = TYPE_MODE (type);
1866
 
1867
      /* Don't perform operation if we honor signaling NaNs and
1868
         either operand is a NaN.  */
1869
      if (HONOR_SNANS (mode)
1870
          && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1871
        return NULL_TREE;
1872
 
1873
      /* Don't perform operation if it would raise a division
1874
         by zero exception.  */
1875
      if (code == RDIV_EXPR
1876
          && REAL_VALUES_EQUAL (d2, dconst0)
1877
          && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1878
        return NULL_TREE;
1879
 
1880
      /* If either operand is a NaN, just return it.  Otherwise, set up
1881
         for floating-point trap; we return an overflow.  */
1882
      if (REAL_VALUE_ISNAN (d1))
1883
        return arg1;
1884
      else if (REAL_VALUE_ISNAN (d2))
1885
        return arg2;
1886
 
1887
      inexact = real_arithmetic (&value, code, &d1, &d2);
1888
      real_convert (&result, mode, &value);
1889
 
1890
      /* Don't constant fold this floating point operation if
1891
         the result has overflowed and flag_trapping_math.  */
1892
      if (flag_trapping_math
1893
          && MODE_HAS_INFINITIES (mode)
1894
          && REAL_VALUE_ISINF (result)
1895
          && !REAL_VALUE_ISINF (d1)
1896
          && !REAL_VALUE_ISINF (d2))
1897
        return NULL_TREE;
1898
 
1899
      /* Don't constant fold this floating point operation if the
1900
         result may dependent upon the run-time rounding mode and
1901
         flag_rounding_math is set, or if GCC's software emulation
1902
         is unable to accurately represent the result.  */
1903
      if ((flag_rounding_math
1904
           || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1905
          && (inexact || !real_identical (&result, &value)))
1906
        return NULL_TREE;
1907
 
1908
      t = build_real (type, result);
1909
 
1910
      TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1911
      return t;
1912
    }
1913
 
1914
  if (TREE_CODE (arg1) == FIXED_CST)
1915
    {
1916
      FIXED_VALUE_TYPE f1;
1917
      FIXED_VALUE_TYPE f2;
1918
      FIXED_VALUE_TYPE result;
1919
      tree t, type;
1920
      int sat_p;
1921
      bool overflow_p;
1922
 
1923
      /* The following codes are handled by fixed_arithmetic.  */
1924
      switch (code)
1925
        {
1926
        case PLUS_EXPR:
1927
        case MINUS_EXPR:
1928
        case MULT_EXPR:
1929
        case TRUNC_DIV_EXPR:
1930
          f2 = TREE_FIXED_CST (arg2);
1931
          break;
1932
 
1933
        case LSHIFT_EXPR:
1934
        case RSHIFT_EXPR:
1935
          f2.data.high = TREE_INT_CST_HIGH (arg2);
1936
          f2.data.low = TREE_INT_CST_LOW (arg2);
1937
          f2.mode = SImode;
1938
          break;
1939
 
1940
        default:
1941
          return NULL_TREE;
1942
        }
1943
 
1944
      f1 = TREE_FIXED_CST (arg1);
1945
      type = TREE_TYPE (arg1);
1946
      sat_p = TYPE_SATURATING (type);
1947
      overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1948
      t = build_fixed (type, result);
1949
      /* Propagate overflow flags.  */
1950
      if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1951
        TREE_OVERFLOW (t) = 1;
1952
      return t;
1953
    }
1954
 
1955
  if (TREE_CODE (arg1) == COMPLEX_CST)
1956
    {
1957
      tree type = TREE_TYPE (arg1);
1958
      tree r1 = TREE_REALPART (arg1);
1959
      tree i1 = TREE_IMAGPART (arg1);
1960
      tree r2 = TREE_REALPART (arg2);
1961
      tree i2 = TREE_IMAGPART (arg2);
1962
      tree real, imag;
1963
 
1964
      switch (code)
1965
        {
1966
        case PLUS_EXPR:
1967
        case MINUS_EXPR:
1968
          real = const_binop (code, r1, r2, notrunc);
1969
          imag = const_binop (code, i1, i2, notrunc);
1970
          break;
1971
 
1972
        case MULT_EXPR:
1973
          if (COMPLEX_FLOAT_TYPE_P (type))
1974
            return do_mpc_arg2 (arg1, arg2, type,
1975
                                /* do_nonfinite= */ folding_initializer,
1976
                                mpc_mul);
1977
 
1978
          real = const_binop (MINUS_EXPR,
1979
                              const_binop (MULT_EXPR, r1, r2, notrunc),
1980
                              const_binop (MULT_EXPR, i1, i2, notrunc),
1981
                              notrunc);
1982
          imag = const_binop (PLUS_EXPR,
1983
                              const_binop (MULT_EXPR, r1, i2, notrunc),
1984
                              const_binop (MULT_EXPR, i1, r2, notrunc),
1985
                              notrunc);
1986
          break;
1987
 
1988
        case RDIV_EXPR:
1989
          if (COMPLEX_FLOAT_TYPE_P (type))
1990
            return do_mpc_arg2 (arg1, arg2, type,
1991
                                /* do_nonfinite= */ folding_initializer,
1992
                                mpc_div);
1993
          /* Fallthru ... */
1994
        case TRUNC_DIV_EXPR:
1995
        case CEIL_DIV_EXPR:
1996
        case FLOOR_DIV_EXPR:
1997
        case ROUND_DIV_EXPR:
1998
          if (flag_complex_method == 0)
1999
          {
2000
            /* Keep this algorithm in sync with
2001
               tree-complex.c:expand_complex_div_straight().
2002
 
2003
               Expand complex division to scalars, straightforward algorithm.
2004
               a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
2005
               t = br*br + bi*bi
2006
            */
2007
            tree magsquared
2008
              = const_binop (PLUS_EXPR,
2009
                             const_binop (MULT_EXPR, r2, r2, notrunc),
2010
                             const_binop (MULT_EXPR, i2, i2, notrunc),
2011
                             notrunc);
2012
            tree t1
2013
              = const_binop (PLUS_EXPR,
2014
                             const_binop (MULT_EXPR, r1, r2, notrunc),
2015
                             const_binop (MULT_EXPR, i1, i2, notrunc),
2016
                             notrunc);
2017
            tree t2
2018
              = const_binop (MINUS_EXPR,
2019
                             const_binop (MULT_EXPR, i1, r2, notrunc),
2020
                             const_binop (MULT_EXPR, r1, i2, notrunc),
2021
                             notrunc);
2022
 
2023
            real = const_binop (code, t1, magsquared, notrunc);
2024
            imag = const_binop (code, t2, magsquared, notrunc);
2025
          }
2026
          else
2027
          {
2028
            /* Keep this algorithm in sync with
2029
               tree-complex.c:expand_complex_div_wide().
2030
 
2031
               Expand complex division to scalars, modified algorithm to minimize
2032
               overflow with wide input ranges.  */
2033
            tree compare = fold_build2 (LT_EXPR, boolean_type_node,
2034
                                        fold_abs_const (r2, TREE_TYPE (type)),
2035
                                        fold_abs_const (i2, TREE_TYPE (type)));
2036
 
2037
            if (integer_nonzerop (compare))
2038
              {
2039
                /* In the TRUE branch, we compute
2040
                   ratio = br/bi;
2041
                   div = (br * ratio) + bi;
2042
                   tr = (ar * ratio) + ai;
2043
                   ti = (ai * ratio) - ar;
2044
                   tr = tr / div;
2045
                   ti = ti / div;  */
2046
                tree ratio = const_binop (code, r2, i2, notrunc);
2047
                tree div = const_binop (PLUS_EXPR, i2,
2048
                                        const_binop (MULT_EXPR, r2, ratio,
2049
                                                     notrunc),
2050
                                        notrunc);
2051
                real = const_binop (MULT_EXPR, r1, ratio, notrunc);
2052
                real = const_binop (PLUS_EXPR, real, i1, notrunc);
2053
                real = const_binop (code, real, div, notrunc);
2054
 
2055
                imag = const_binop (MULT_EXPR, i1, ratio, notrunc);
2056
                imag = const_binop (MINUS_EXPR, imag, r1, notrunc);
2057
                imag = const_binop (code, imag, div, notrunc);
2058
              }
2059
            else
2060
              {
2061
                /* In the FALSE branch, we compute
2062
                   ratio = d/c;
2063
                   divisor = (d * ratio) + c;
2064
                   tr = (b * ratio) + a;
2065
                   ti = b - (a * ratio);
2066
                   tr = tr / div;
2067
                   ti = ti / div;  */
2068
                tree ratio = const_binop (code, i2, r2, notrunc);
2069
                tree div = const_binop (PLUS_EXPR, r2,
2070
                                        const_binop (MULT_EXPR, i2, ratio,
2071
                                                     notrunc),
2072
                                        notrunc);
2073
 
2074
                real = const_binop (MULT_EXPR, i1, ratio, notrunc);
2075
                real = const_binop (PLUS_EXPR, real, r1, notrunc);
2076
                real = const_binop (code, real, div, notrunc);
2077
 
2078
                imag = const_binop (MULT_EXPR, r1, ratio, notrunc);
2079
                imag = const_binop (MINUS_EXPR, i1, imag, notrunc);
2080
                imag = const_binop (code, imag, div, notrunc);
2081
              }
2082
          }
2083
          break;
2084
 
2085
        default:
2086
          return NULL_TREE;
2087
        }
2088
 
2089
      if (real && imag)
2090
        return build_complex (type, real, imag);
2091
    }
2092
 
2093
  if (TREE_CODE (arg1) == VECTOR_CST)
2094
    {
2095
      tree type = TREE_TYPE(arg1);
2096
      int count = TYPE_VECTOR_SUBPARTS (type), i;
2097
      tree elements1, elements2, list = NULL_TREE;
2098
 
2099
      if(TREE_CODE(arg2) != VECTOR_CST)
2100
        return NULL_TREE;
2101
 
2102
      elements1 = TREE_VECTOR_CST_ELTS (arg1);
2103
      elements2 = TREE_VECTOR_CST_ELTS (arg2);
2104
 
2105
      for (i = 0; i < count; i++)
2106
        {
2107
          tree elem1, elem2, elem;
2108
 
2109
          /* The trailing elements can be empty and should be treated as 0 */
2110
          if(!elements1)
2111
            elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2112
          else
2113
            {
2114
              elem1 = TREE_VALUE(elements1);
2115
              elements1 = TREE_CHAIN (elements1);
2116
            }
2117
 
2118
          if(!elements2)
2119
            elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2120
          else
2121
            {
2122
              elem2 = TREE_VALUE(elements2);
2123
              elements2 = TREE_CHAIN (elements2);
2124
            }
2125
 
2126
          elem = const_binop (code, elem1, elem2, notrunc);
2127
 
2128
          /* It is possible that const_binop cannot handle the given
2129
            code and return NULL_TREE */
2130
          if(elem == NULL_TREE)
2131
            return NULL_TREE;
2132
 
2133
          list = tree_cons (NULL_TREE, elem, list);
2134
        }
2135
      return build_vector(type, nreverse(list));
2136
    }
2137
  return NULL_TREE;
2138
}
2139
 
2140
/* Create a size type INT_CST node with NUMBER sign extended.  KIND
2141
   indicates which particular sizetype to create.  */
2142
 
2143
tree
2144
size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2145
{
2146
  return build_int_cst (sizetype_tab[(int) kind], number);
2147
}
2148
 
2149
/* Combine operands OP1 and OP2 with arithmetic operation CODE.  CODE
2150
   is a tree code.  The type of the result is taken from the operands.
2151
   Both must be equivalent integer types, ala int_binop_types_match_p.
2152
   If the operands are constant, so is the result.  */
2153
 
2154
tree
2155
size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
2156
{
2157
  tree type = TREE_TYPE (arg0);
2158
 
2159
  if (arg0 == error_mark_node || arg1 == error_mark_node)
2160
    return error_mark_node;
2161
 
2162
  gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2163
                                       TREE_TYPE (arg1)));
2164
 
2165
  /* Handle the special case of two integer constants faster.  */
2166
  if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2167
    {
2168
      /* And some specific cases even faster than that.  */
2169
      if (code == PLUS_EXPR)
2170
        {
2171
          if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2172
            return arg1;
2173
          if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2174
            return arg0;
2175
        }
2176
      else if (code == MINUS_EXPR)
2177
        {
2178
          if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2179
            return arg0;
2180
        }
2181
      else if (code == MULT_EXPR)
2182
        {
2183
          if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2184
            return arg1;
2185
        }
2186
 
2187
      /* Handle general case of two integer constants.  */
2188
      return int_const_binop (code, arg0, arg1, 0);
2189
    }
2190
 
2191
  return fold_build2_loc (loc, code, type, arg0, arg1);
2192
}
2193
 
2194
/* Given two values, either both of sizetype or both of bitsizetype,
2195
   compute the difference between the two values.  Return the value
2196
   in signed type corresponding to the type of the operands.  */
2197
 
2198
tree
2199
size_diffop_loc (location_t loc, tree arg0, tree arg1)
2200
{
2201
  tree type = TREE_TYPE (arg0);
2202
  tree ctype;
2203
 
2204
  gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2205
                                       TREE_TYPE (arg1)));
2206
 
2207
  /* If the type is already signed, just do the simple thing.  */
2208
  if (!TYPE_UNSIGNED (type))
2209
    return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
2210
 
2211
  if (type == sizetype)
2212
    ctype = ssizetype;
2213
  else if (type == bitsizetype)
2214
    ctype = sbitsizetype;
2215
  else
2216
    ctype = signed_type_for (type);
2217
 
2218
  /* If either operand is not a constant, do the conversions to the signed
2219
     type and subtract.  The hardware will do the right thing with any
2220
     overflow in the subtraction.  */
2221
  if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2222
    return size_binop_loc (loc, MINUS_EXPR,
2223
                           fold_convert_loc (loc, ctype, arg0),
2224
                           fold_convert_loc (loc, ctype, arg1));
2225
 
2226
  /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2227
     Otherwise, subtract the other way, convert to CTYPE (we know that can't
2228
     overflow) and negate (which can't either).  Special-case a result
2229
     of zero while we're here.  */
2230
  if (tree_int_cst_equal (arg0, arg1))
2231
    return build_int_cst (ctype, 0);
2232
  else if (tree_int_cst_lt (arg1, arg0))
2233
    return fold_convert_loc (loc, ctype,
2234
                             size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
2235
  else
2236
    return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
2237
                           fold_convert_loc (loc, ctype,
2238
                                             size_binop_loc (loc,
2239
                                                             MINUS_EXPR,
2240
                                                             arg1, arg0)));
2241
}
2242
 
2243
/* A subroutine of fold_convert_const handling conversions of an
2244
   INTEGER_CST to another integer type.  */
2245
 
2246
static tree
2247
fold_convert_const_int_from_int (tree type, const_tree arg1)
2248
{
2249
  tree t;
2250
 
2251
  /* Given an integer constant, make new constant with new type,
2252
     appropriately sign-extended or truncated.  */
2253
  t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2254
                             TREE_INT_CST_HIGH (arg1),
2255
                             /* Don't set the overflow when
2256
                                converting from a pointer,  */
2257
                             !POINTER_TYPE_P (TREE_TYPE (arg1))
2258
                             /* or to a sizetype with same signedness
2259
                                and the precision is unchanged.
2260
                                ???  sizetype is always sign-extended,
2261
                                but its signedness depends on the
2262
                                frontend.  Thus we see spurious overflows
2263
                                here if we do not check this.  */
2264
                             && !((TYPE_PRECISION (TREE_TYPE (arg1))
2265
                                   == TYPE_PRECISION (type))
2266
                                  && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2267
                                      == TYPE_UNSIGNED (type))
2268
                                  && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2269
                                       && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2270
                                      || (TREE_CODE (type) == INTEGER_TYPE
2271
                                          && TYPE_IS_SIZETYPE (type)))),
2272
                             (TREE_INT_CST_HIGH (arg1) < 0
2273
                              && (TYPE_UNSIGNED (type)
2274
                                  < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2275
                             | TREE_OVERFLOW (arg1));
2276
 
2277
  return t;
2278
}
2279
 
2280
/* A subroutine of fold_convert_const handling conversions a REAL_CST
2281
   to an integer type.  */
2282
 
2283
static tree
2284
fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2285
{
2286
  int overflow = 0;
2287
  tree t;
2288
 
2289
  /* The following code implements the floating point to integer
2290
     conversion rules required by the Java Language Specification,
2291
     that IEEE NaNs are mapped to zero and values that overflow
2292
     the target precision saturate, i.e. values greater than
2293
     INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2294
     are mapped to INT_MIN.  These semantics are allowed by the
2295
     C and C++ standards that simply state that the behavior of
2296
     FP-to-integer conversion is unspecified upon overflow.  */
2297
 
2298
  HOST_WIDE_INT high, low;
2299
  REAL_VALUE_TYPE r;
2300
  REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2301
 
2302
  switch (code)
2303
    {
2304
    case FIX_TRUNC_EXPR:
2305
      real_trunc (&r, VOIDmode, &x);
2306
      break;
2307
 
2308
    default:
2309
      gcc_unreachable ();
2310
    }
2311
 
2312
  /* If R is NaN, return zero and show we have an overflow.  */
2313
  if (REAL_VALUE_ISNAN (r))
2314
    {
2315
      overflow = 1;
2316
      high = 0;
2317
      low = 0;
2318
    }
2319
 
2320
  /* See if R is less than the lower bound or greater than the
2321
     upper bound.  */
2322
 
2323
  if (! overflow)
2324
    {
2325
      tree lt = TYPE_MIN_VALUE (type);
2326
      REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2327
      if (REAL_VALUES_LESS (r, l))
2328
        {
2329
          overflow = 1;
2330
          high = TREE_INT_CST_HIGH (lt);
2331
          low = TREE_INT_CST_LOW (lt);
2332
        }
2333
    }
2334
 
2335
  if (! overflow)
2336
    {
2337
      tree ut = TYPE_MAX_VALUE (type);
2338
      if (ut)
2339
        {
2340
          REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2341
          if (REAL_VALUES_LESS (u, r))
2342
            {
2343
              overflow = 1;
2344
              high = TREE_INT_CST_HIGH (ut);
2345
              low = TREE_INT_CST_LOW (ut);
2346
            }
2347
        }
2348
    }
2349
 
2350
  if (! overflow)
2351
    REAL_VALUE_TO_INT (&low, &high, r);
2352
 
2353
  t = force_fit_type_double (type, low, high, -1,
2354
                             overflow | TREE_OVERFLOW (arg1));
2355
  return t;
2356
}
2357
 
2358
/* A subroutine of fold_convert_const handling conversions of a
2359
   FIXED_CST to an integer type.  */
2360
 
2361
static tree
2362
fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2363
{
2364
  tree t;
2365
  double_int temp, temp_trunc;
2366
  unsigned int mode;
2367
 
2368
  /* Right shift FIXED_CST to temp by fbit.  */
2369
  temp = TREE_FIXED_CST (arg1).data;
2370
  mode = TREE_FIXED_CST (arg1).mode;
2371
  if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2372
    {
2373
      lshift_double (temp.low, temp.high,
2374
                     - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2375
                     &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2376
 
2377
      /* Left shift temp to temp_trunc by fbit.  */
2378
      lshift_double (temp.low, temp.high,
2379
                     GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2380
                     &temp_trunc.low, &temp_trunc.high,
2381
                     SIGNED_FIXED_POINT_MODE_P (mode));
2382
    }
2383
  else
2384
    {
2385
      temp.low = 0;
2386
      temp.high = 0;
2387
      temp_trunc.low = 0;
2388
      temp_trunc.high = 0;
2389
    }
2390
 
2391
  /* If FIXED_CST is negative, we need to round the value toward 0.
2392
     By checking if the fractional bits are not zero to add 1 to temp.  */
2393
  if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2394
      && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2395
    {
2396
      double_int one;
2397
      one.low = 1;
2398
      one.high = 0;
2399
      temp = double_int_add (temp, one);
2400
    }
2401
 
2402
  /* Given a fixed-point constant, make new constant with new type,
2403
     appropriately sign-extended or truncated.  */
2404
  t = force_fit_type_double (type, temp.low, temp.high, -1,
2405
                             (temp.high < 0
2406
                              && (TYPE_UNSIGNED (type)
2407
                                  < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2408
                             | TREE_OVERFLOW (arg1));
2409
 
2410
  return t;
2411
}
2412
 
2413
/* A subroutine of fold_convert_const handling conversions a REAL_CST
2414
   to another floating point type.  */
2415
 
2416
static tree
2417
fold_convert_const_real_from_real (tree type, const_tree arg1)
2418
{
2419
  REAL_VALUE_TYPE value;
2420
  tree t;
2421
 
2422
  real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2423
  t = build_real (type, value);
2424
 
2425
  /* If converting an infinity or NAN to a representation that doesn't
2426
     have one, set the overflow bit so that we can produce some kind of
2427
     error message at the appropriate point if necessary.  It's not the
2428
     most user-friendly message, but it's better than nothing.  */
2429
  if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2430
      && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2431
    TREE_OVERFLOW (t) = 1;
2432
  else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2433
           && !MODE_HAS_NANS (TYPE_MODE (type)))
2434
    TREE_OVERFLOW (t) = 1;
2435
  /* Regular overflow, conversion produced an infinity in a mode that
2436
     can't represent them.  */
2437
  else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2438
           && REAL_VALUE_ISINF (value)
2439
           && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2440
    TREE_OVERFLOW (t) = 1;
2441
  else
2442
    TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2443
  return t;
2444
}
2445
 
2446
/* A subroutine of fold_convert_const handling conversions a FIXED_CST
2447
   to a floating point type.  */
2448
 
2449
static tree
2450
fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2451
{
2452
  REAL_VALUE_TYPE value;
2453
  tree t;
2454
 
2455
  real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2456
  t = build_real (type, value);
2457
 
2458
  TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2459
  return t;
2460
}
2461
 
2462
/* A subroutine of fold_convert_const handling conversions a FIXED_CST
2463
   to another fixed-point type.  */
2464
 
2465
static tree
2466
fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2467
{
2468
  FIXED_VALUE_TYPE value;
2469
  tree t;
2470
  bool overflow_p;
2471
 
2472
  overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2473
                              TYPE_SATURATING (type));
2474
  t = build_fixed (type, value);
2475
 
2476
  /* Propagate overflow flags.  */
2477
  if (overflow_p | TREE_OVERFLOW (arg1))
2478
    TREE_OVERFLOW (t) = 1;
2479
  return t;
2480
}
2481
 
2482
/* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2483
   to a fixed-point type.  */
2484
 
2485
static tree
2486
fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2487
{
2488
  FIXED_VALUE_TYPE value;
2489
  tree t;
2490
  bool overflow_p;
2491
 
2492
  overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2493
                                       TREE_INT_CST (arg1),
2494
                                       TYPE_UNSIGNED (TREE_TYPE (arg1)),
2495
                                       TYPE_SATURATING (type));
2496
  t = build_fixed (type, value);
2497
 
2498
  /* Propagate overflow flags.  */
2499
  if (overflow_p | TREE_OVERFLOW (arg1))
2500
    TREE_OVERFLOW (t) = 1;
2501
  return t;
2502
}
2503
 
2504
/* A subroutine of fold_convert_const handling conversions a REAL_CST
2505
   to a fixed-point type.  */
2506
 
2507
static tree
2508
fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2509
{
2510
  FIXED_VALUE_TYPE value;
2511
  tree t;
2512
  bool overflow_p;
2513
 
2514
  overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2515
                                        &TREE_REAL_CST (arg1),
2516
                                        TYPE_SATURATING (type));
2517
  t = build_fixed (type, value);
2518
 
2519
  /* Propagate overflow flags.  */
2520
  if (overflow_p | TREE_OVERFLOW (arg1))
2521
    TREE_OVERFLOW (t) = 1;
2522
  return t;
2523
}
2524
 
2525
/* Attempt to fold type conversion operation CODE of expression ARG1 to
2526
   type TYPE.  If no simplification can be done return NULL_TREE.  */
2527
 
2528
static tree
2529
fold_convert_const (enum tree_code code, tree type, tree arg1)
2530
{
2531
  if (TREE_TYPE (arg1) == type)
2532
    return arg1;
2533
 
2534
  if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2535
      || TREE_CODE (type) == OFFSET_TYPE)
2536
    {
2537
      if (TREE_CODE (arg1) == INTEGER_CST)
2538
        return fold_convert_const_int_from_int (type, arg1);
2539
      else if (TREE_CODE (arg1) == REAL_CST)
2540
        return fold_convert_const_int_from_real (code, type, arg1);
2541
      else if (TREE_CODE (arg1) == FIXED_CST)
2542
        return fold_convert_const_int_from_fixed (type, arg1);
2543
    }
2544
  else if (TREE_CODE (type) == REAL_TYPE)
2545
    {
2546
      if (TREE_CODE (arg1) == INTEGER_CST)
2547
        return build_real_from_int_cst (type, arg1);
2548
      else if (TREE_CODE (arg1) == REAL_CST)
2549
        return fold_convert_const_real_from_real (type, arg1);
2550
      else if (TREE_CODE (arg1) == FIXED_CST)
2551
        return fold_convert_const_real_from_fixed (type, arg1);
2552
    }
2553
  else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2554
    {
2555
      if (TREE_CODE (arg1) == FIXED_CST)
2556
        return fold_convert_const_fixed_from_fixed (type, arg1);
2557
      else if (TREE_CODE (arg1) == INTEGER_CST)
2558
        return fold_convert_const_fixed_from_int (type, arg1);
2559
      else if (TREE_CODE (arg1) == REAL_CST)
2560
        return fold_convert_const_fixed_from_real (type, arg1);
2561
    }
2562
  return NULL_TREE;
2563
}
2564
 
2565
/* Construct a vector of zero elements of vector type TYPE.  */
2566
 
2567
static tree
2568
build_zero_vector (tree type)
2569
{
2570
  tree elem, list;
2571
  int i, units;
2572
 
2573
  elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2574
  units = TYPE_VECTOR_SUBPARTS (type);
2575
 
2576
  list = NULL_TREE;
2577
  for (i = 0; i < units; i++)
2578
    list = tree_cons (NULL_TREE, elem, list);
2579
  return build_vector (type, list);
2580
}
2581
 
2582
/* Returns true, if ARG is convertible to TYPE using a NOP_EXPR.  */
2583
 
2584
bool
2585
fold_convertible_p (const_tree type, const_tree arg)
2586
{
2587
  tree orig = TREE_TYPE (arg);
2588
 
2589
  if (type == orig)
2590
    return true;
2591
 
2592
  if (TREE_CODE (arg) == ERROR_MARK
2593
      || TREE_CODE (type) == ERROR_MARK
2594
      || TREE_CODE (orig) == ERROR_MARK)
2595
    return false;
2596
 
2597
  if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2598
    return true;
2599
 
2600
  switch (TREE_CODE (type))
2601
    {
2602
    case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2603
    case POINTER_TYPE: case REFERENCE_TYPE:
2604
    case OFFSET_TYPE:
2605
      if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2606
          || TREE_CODE (orig) == OFFSET_TYPE)
2607
        return true;
2608
      return (TREE_CODE (orig) == VECTOR_TYPE
2609
              && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2610
 
2611
    case REAL_TYPE:
2612
    case FIXED_POINT_TYPE:
2613
    case COMPLEX_TYPE:
2614
    case VECTOR_TYPE:
2615
    case VOID_TYPE:
2616
      return TREE_CODE (type) == TREE_CODE (orig);
2617
 
2618
    default:
2619
      return false;
2620
    }
2621
}
2622
 
2623
/* Convert expression ARG to type TYPE.  Used by the middle-end for
2624
   simple conversions in preference to calling the front-end's convert.  */
2625
 
2626
tree
2627
fold_convert_loc (location_t loc, tree type, tree arg)
2628
{
2629
  tree orig = TREE_TYPE (arg);
2630
  tree tem;
2631
 
2632
  if (type == orig)
2633
    return arg;
2634
 
2635
  if (TREE_CODE (arg) == ERROR_MARK
2636
      || TREE_CODE (type) == ERROR_MARK
2637
      || TREE_CODE (orig) == ERROR_MARK)
2638
    return error_mark_node;
2639
 
2640
  if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2641
    return fold_build1_loc (loc, NOP_EXPR, type, arg);
2642
 
2643
  switch (TREE_CODE (type))
2644
    {
2645
    case POINTER_TYPE:
2646
    case REFERENCE_TYPE:
2647
      /* Handle conversions between pointers to different address spaces.  */
2648
      if (POINTER_TYPE_P (orig)
2649
          && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2650
              != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2651
        return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2652
      /* fall through */
2653
 
2654
    case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2655
    case OFFSET_TYPE:
2656
      if (TREE_CODE (arg) == INTEGER_CST)
2657
        {
2658
          tem = fold_convert_const (NOP_EXPR, type, arg);
2659
          if (tem != NULL_TREE)
2660
            return tem;
2661
        }
2662
      if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2663
          || TREE_CODE (orig) == OFFSET_TYPE)
2664
        return fold_build1_loc (loc, NOP_EXPR, type, arg);
2665
      if (TREE_CODE (orig) == COMPLEX_TYPE)
2666
        return fold_convert_loc (loc, type,
2667
                             fold_build1_loc (loc, REALPART_EXPR,
2668
                                          TREE_TYPE (orig), arg));
2669
      gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2670
                  && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2671
      return fold_build1_loc (loc, NOP_EXPR, type, arg);
2672
 
2673
    case REAL_TYPE:
2674
      if (TREE_CODE (arg) == INTEGER_CST)
2675
        {
2676
          tem = fold_convert_const (FLOAT_EXPR, type, arg);
2677
          if (tem != NULL_TREE)
2678
            return tem;
2679
        }
2680
      else if (TREE_CODE (arg) == REAL_CST)
2681
        {
2682
          tem = fold_convert_const (NOP_EXPR, type, arg);
2683
          if (tem != NULL_TREE)
2684
            return tem;
2685
        }
2686
      else if (TREE_CODE (arg) == FIXED_CST)
2687
        {
2688
          tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2689
          if (tem != NULL_TREE)
2690
            return tem;
2691
        }
2692
 
2693
      switch (TREE_CODE (orig))
2694
        {
2695
        case INTEGER_TYPE:
2696
        case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2697
        case POINTER_TYPE: case REFERENCE_TYPE:
2698
          return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2699
 
2700
        case REAL_TYPE:
2701
          return fold_build1_loc (loc, NOP_EXPR, type, arg);
2702
 
2703
        case FIXED_POINT_TYPE:
2704
          return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2705
 
2706
        case COMPLEX_TYPE:
2707
          tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2708
          return fold_convert_loc (loc, type, tem);
2709
 
2710
        default:
2711
          gcc_unreachable ();
2712
        }
2713
 
2714
    case FIXED_POINT_TYPE:
2715
      if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2716
          || TREE_CODE (arg) == REAL_CST)
2717
        {
2718
          tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2719
          if (tem != NULL_TREE)
2720
            goto fold_convert_exit;
2721
        }
2722
 
2723
      switch (TREE_CODE (orig))
2724
        {
2725
        case FIXED_POINT_TYPE:
2726
        case INTEGER_TYPE:
2727
        case ENUMERAL_TYPE:
2728
        case BOOLEAN_TYPE:
2729
        case REAL_TYPE:
2730
          return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2731
 
2732
        case COMPLEX_TYPE:
2733
          tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2734
          return fold_convert_loc (loc, type, tem);
2735
 
2736
        default:
2737
          gcc_unreachable ();
2738
        }
2739
 
2740
    case COMPLEX_TYPE:
2741
      switch (TREE_CODE (orig))
2742
        {
2743
        case INTEGER_TYPE:
2744
        case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2745
        case POINTER_TYPE: case REFERENCE_TYPE:
2746
        case REAL_TYPE:
2747
        case FIXED_POINT_TYPE:
2748
          return fold_build2_loc (loc, COMPLEX_EXPR, type,
2749
                              fold_convert_loc (loc, TREE_TYPE (type), arg),
2750
                              fold_convert_loc (loc, TREE_TYPE (type),
2751
                                            integer_zero_node));
2752
        case COMPLEX_TYPE:
2753
          {
2754
            tree rpart, ipart;
2755
 
2756
            if (TREE_CODE (arg) == COMPLEX_EXPR)
2757
              {
2758
                rpart = fold_convert_loc (loc, TREE_TYPE (type),
2759
                                      TREE_OPERAND (arg, 0));
2760
                ipart = fold_convert_loc (loc, TREE_TYPE (type),
2761
                                      TREE_OPERAND (arg, 1));
2762
                return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2763
              }
2764
 
2765
            arg = save_expr (arg);
2766
            rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2767
            ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2768
            rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2769
            ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2770
            return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2771
          }
2772
 
2773
        default:
2774
          gcc_unreachable ();
2775
        }
2776
 
2777
    case VECTOR_TYPE:
2778
      if (integer_zerop (arg))
2779
        return build_zero_vector (type);
2780
      gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2781
      gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2782
                  || TREE_CODE (orig) == VECTOR_TYPE);
2783
      return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2784
 
2785
    case VOID_TYPE:
2786
      tem = fold_ignored_result (arg);
2787
      if (TREE_CODE (tem) == MODIFY_EXPR)
2788
        goto fold_convert_exit;
2789
      return fold_build1_loc (loc, NOP_EXPR, type, tem);
2790
 
2791
    default:
2792
      gcc_unreachable ();
2793
    }
2794
 fold_convert_exit:
2795
  protected_set_expr_location (tem, loc);
2796
  return tem;
2797
}
2798
 
2799
/* Return false if expr can be assumed not to be an lvalue, true
2800
   otherwise.  */
2801
 
2802
static bool
2803
maybe_lvalue_p (const_tree x)
2804
{
2805
  /* We only need to wrap lvalue tree codes.  */
2806
  switch (TREE_CODE (x))
2807
  {
2808
  case VAR_DECL:
2809
  case PARM_DECL:
2810
  case RESULT_DECL:
2811
  case LABEL_DECL:
2812
  case FUNCTION_DECL:
2813
  case SSA_NAME:
2814
 
2815
  case COMPONENT_REF:
2816
  case INDIRECT_REF:
2817
  case ALIGN_INDIRECT_REF:
2818
  case MISALIGNED_INDIRECT_REF:
2819
  case ARRAY_REF:
2820
  case ARRAY_RANGE_REF:
2821
  case BIT_FIELD_REF:
2822
  case OBJ_TYPE_REF:
2823
 
2824
  case REALPART_EXPR:
2825
  case IMAGPART_EXPR:
2826
  case PREINCREMENT_EXPR:
2827
  case PREDECREMENT_EXPR:
2828
  case SAVE_EXPR:
2829
  case TRY_CATCH_EXPR:
2830
  case WITH_CLEANUP_EXPR:
2831
  case COMPOUND_EXPR:
2832
  case MODIFY_EXPR:
2833
  case TARGET_EXPR:
2834
  case COND_EXPR:
2835
  case BIND_EXPR:
2836
    break;
2837
 
2838
  default:
2839
    /* Assume the worst for front-end tree codes.  */
2840
    if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2841
      break;
2842
    return false;
2843
  }
2844
 
2845
  return true;
2846
}
2847
 
2848
/* Return an expr equal to X but certainly not valid as an lvalue.  */
2849
 
2850
tree
2851
non_lvalue_loc (location_t loc, tree x)
2852
{
2853
  /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2854
     us.  */
2855
  if (in_gimple_form)
2856
    return x;
2857
 
2858
  if (! maybe_lvalue_p (x))
2859
    return x;
2860
  x = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2861
  SET_EXPR_LOCATION (x, loc);
2862
  return x;
2863
}
2864
 
2865
/* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2866
   Zero means allow extended lvalues.  */
2867
 
2868
int pedantic_lvalues;
2869
 
2870
/* When pedantic, return an expr equal to X but certainly not valid as a
2871
   pedantic lvalue.  Otherwise, return X.  */
2872
 
2873
static tree
2874
pedantic_non_lvalue_loc (location_t loc, tree x)
2875
{
2876
  if (pedantic_lvalues)
2877
    return non_lvalue_loc (loc, x);
2878
  protected_set_expr_location (x, loc);
2879
  return x;
2880
}
2881
 
2882
/* Given a tree comparison code, return the code that is the logical inverse
2883
   of the given code.  It is not safe to do this for floating-point
2884
   comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2885
   as well: if reversing the comparison is unsafe, return ERROR_MARK.  */
2886
 
2887
enum tree_code
2888
invert_tree_comparison (enum tree_code code, bool honor_nans)
2889
{
2890
  if (honor_nans && flag_trapping_math)
2891
    return ERROR_MARK;
2892
 
2893
  switch (code)
2894
    {
2895
    case EQ_EXPR:
2896
      return NE_EXPR;
2897
    case NE_EXPR:
2898
      return EQ_EXPR;
2899
    case GT_EXPR:
2900
      return honor_nans ? UNLE_EXPR : LE_EXPR;
2901
    case GE_EXPR:
2902
      return honor_nans ? UNLT_EXPR : LT_EXPR;
2903
    case LT_EXPR:
2904
      return honor_nans ? UNGE_EXPR : GE_EXPR;
2905
    case LE_EXPR:
2906
      return honor_nans ? UNGT_EXPR : GT_EXPR;
2907
    case LTGT_EXPR:
2908
      return UNEQ_EXPR;
2909
    case UNEQ_EXPR:
2910
      return LTGT_EXPR;
2911
    case UNGT_EXPR:
2912
      return LE_EXPR;
2913
    case UNGE_EXPR:
2914
      return LT_EXPR;
2915
    case UNLT_EXPR:
2916
      return GE_EXPR;
2917
    case UNLE_EXPR:
2918
      return GT_EXPR;
2919
    case ORDERED_EXPR:
2920
      return UNORDERED_EXPR;
2921
    case UNORDERED_EXPR:
2922
      return ORDERED_EXPR;
2923
    default:
2924
      gcc_unreachable ();
2925
    }
2926
}
2927
 
2928
/* Similar, but return the comparison that results if the operands are
2929
   swapped.  This is safe for floating-point.  */
2930
 
2931
enum tree_code
2932
swap_tree_comparison (enum tree_code code)
2933
{
2934
  switch (code)
2935
    {
2936
    case EQ_EXPR:
2937
    case NE_EXPR:
2938
    case ORDERED_EXPR:
2939
    case UNORDERED_EXPR:
2940
    case LTGT_EXPR:
2941
    case UNEQ_EXPR:
2942
      return code;
2943
    case GT_EXPR:
2944
      return LT_EXPR;
2945
    case GE_EXPR:
2946
      return LE_EXPR;
2947
    case LT_EXPR:
2948
      return GT_EXPR;
2949
    case LE_EXPR:
2950
      return GE_EXPR;
2951
    case UNGT_EXPR:
2952
      return UNLT_EXPR;
2953
    case UNGE_EXPR:
2954
      return UNLE_EXPR;
2955
    case UNLT_EXPR:
2956
      return UNGT_EXPR;
2957
    case UNLE_EXPR:
2958
      return UNGE_EXPR;
2959
    default:
2960
      gcc_unreachable ();
2961
    }
2962
}
2963
 
2964
 
2965
/* Convert a comparison tree code from an enum tree_code representation
2966
   into a compcode bit-based encoding.  This function is the inverse of
2967
   compcode_to_comparison.  */
2968
 
2969
static enum comparison_code
2970
comparison_to_compcode (enum tree_code code)
2971
{
2972
  switch (code)
2973
    {
2974
    case LT_EXPR:
2975
      return COMPCODE_LT;
2976
    case EQ_EXPR:
2977
      return COMPCODE_EQ;
2978
    case LE_EXPR:
2979
      return COMPCODE_LE;
2980
    case GT_EXPR:
2981
      return COMPCODE_GT;
2982
    case NE_EXPR:
2983
      return COMPCODE_NE;
2984
    case GE_EXPR:
2985
      return COMPCODE_GE;
2986
    case ORDERED_EXPR:
2987
      return COMPCODE_ORD;
2988
    case UNORDERED_EXPR:
2989
      return COMPCODE_UNORD;
2990
    case UNLT_EXPR:
2991
      return COMPCODE_UNLT;
2992
    case UNEQ_EXPR:
2993
      return COMPCODE_UNEQ;
2994
    case UNLE_EXPR:
2995
      return COMPCODE_UNLE;
2996
    case UNGT_EXPR:
2997
      return COMPCODE_UNGT;
2998
    case LTGT_EXPR:
2999
      return COMPCODE_LTGT;
3000
    case UNGE_EXPR:
3001
      return COMPCODE_UNGE;
3002
    default:
3003
      gcc_unreachable ();
3004
    }
3005
}
3006
 
3007
/* Convert a compcode bit-based encoding of a comparison operator back
3008
   to GCC's enum tree_code representation.  This function is the
3009
   inverse of comparison_to_compcode.  */
3010
 
3011
static enum tree_code
3012
compcode_to_comparison (enum comparison_code code)
3013
{
3014
  switch (code)
3015
    {
3016
    case COMPCODE_LT:
3017
      return LT_EXPR;
3018
    case COMPCODE_EQ:
3019
      return EQ_EXPR;
3020
    case COMPCODE_LE:
3021
      return LE_EXPR;
3022
    case COMPCODE_GT:
3023
      return GT_EXPR;
3024
    case COMPCODE_NE:
3025
      return NE_EXPR;
3026
    case COMPCODE_GE:
3027
      return GE_EXPR;
3028
    case COMPCODE_ORD:
3029
      return ORDERED_EXPR;
3030
    case COMPCODE_UNORD:
3031
      return UNORDERED_EXPR;
3032
    case COMPCODE_UNLT:
3033
      return UNLT_EXPR;
3034
    case COMPCODE_UNEQ:
3035
      return UNEQ_EXPR;
3036
    case COMPCODE_UNLE:
3037
      return UNLE_EXPR;
3038
    case COMPCODE_UNGT:
3039
      return UNGT_EXPR;
3040
    case COMPCODE_LTGT:
3041
      return LTGT_EXPR;
3042
    case COMPCODE_UNGE:
3043
      return UNGE_EXPR;
3044
    default:
3045
      gcc_unreachable ();
3046
    }
3047
}
3048
 
3049
/* Return a tree for the comparison which is the combination of
3050
   doing the AND or OR (depending on CODE) of the two operations LCODE
3051
   and RCODE on the identical operands LL_ARG and LR_ARG.  Take into account
3052
   the possibility of trapping if the mode has NaNs, and return NULL_TREE
3053
   if this makes the transformation invalid.  */
3054
 
3055
tree
3056
combine_comparisons (location_t loc,
3057
                     enum tree_code code, enum tree_code lcode,
3058
                     enum tree_code rcode, tree truth_type,
3059
                     tree ll_arg, tree lr_arg)
3060
{
3061
  bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
3062
  enum comparison_code lcompcode = comparison_to_compcode (lcode);
3063
  enum comparison_code rcompcode = comparison_to_compcode (rcode);
3064
  int compcode;
3065
 
3066
  switch (code)
3067
    {
3068
    case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
3069
      compcode = lcompcode & rcompcode;
3070
      break;
3071
 
3072
    case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
3073
      compcode = lcompcode | rcompcode;
3074
      break;
3075
 
3076
    default:
3077
      return NULL_TREE;
3078
    }
3079
 
3080
  if (!honor_nans)
3081
    {
3082
      /* Eliminate unordered comparisons, as well as LTGT and ORD
3083
         which are not used unless the mode has NaNs.  */
3084
      compcode &= ~COMPCODE_UNORD;
3085
      if (compcode == COMPCODE_LTGT)
3086
        compcode = COMPCODE_NE;
3087
      else if (compcode == COMPCODE_ORD)
3088
        compcode = COMPCODE_TRUE;
3089
    }
3090
   else if (flag_trapping_math)
3091
     {
3092
        /* Check that the original operation and the optimized ones will trap
3093
           under the same condition.  */
3094
        bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
3095
                     && (lcompcode != COMPCODE_EQ)
3096
                     && (lcompcode != COMPCODE_ORD);
3097
        bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
3098
                     && (rcompcode != COMPCODE_EQ)
3099
                     && (rcompcode != COMPCODE_ORD);
3100
        bool trap = (compcode & COMPCODE_UNORD) == 0
3101
                    && (compcode != COMPCODE_EQ)
3102
                    && (compcode != COMPCODE_ORD);
3103
 
3104
        /* In a short-circuited boolean expression the LHS might be
3105
           such that the RHS, if evaluated, will never trap.  For
3106
           example, in ORD (x, y) && (x < y), we evaluate the RHS only
3107
           if neither x nor y is NaN.  (This is a mixed blessing: for
3108
           example, the expression above will never trap, hence
3109
           optimizing it to x < y would be invalid).  */
3110
        if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
3111
            || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
3112
          rtrap = false;
3113
 
3114
        /* If the comparison was short-circuited, and only the RHS
3115
           trapped, we may now generate a spurious trap.  */
3116
        if (rtrap && !ltrap
3117
            && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3118
          return NULL_TREE;
3119
 
3120
        /* If we changed the conditions that cause a trap, we lose.  */
3121
        if ((ltrap || rtrap) != trap)
3122
          return NULL_TREE;
3123
      }
3124
 
3125
  if (compcode == COMPCODE_TRUE)
3126
    return constant_boolean_node (true, truth_type);
3127
  else if (compcode == COMPCODE_FALSE)
3128
    return constant_boolean_node (false, truth_type);
3129
  else
3130
    {
3131
      enum tree_code tcode;
3132
 
3133
      tcode = compcode_to_comparison ((enum comparison_code) compcode);
3134
      return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
3135
    }
3136
}
3137
 
3138
/* Return nonzero if two operands (typically of the same tree node)
3139
   are necessarily equal.  If either argument has side-effects this
3140
   function returns zero.  FLAGS modifies behavior as follows:
3141
 
3142
   If OEP_ONLY_CONST is set, only return nonzero for constants.
3143
   This function tests whether the operands are indistinguishable;
3144
   it does not test whether they are equal using C's == operation.
3145
   The distinction is important for IEEE floating point, because
3146
   (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3147
   (2) two NaNs may be indistinguishable, but NaN!=NaN.
3148
 
3149
   If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3150
   even though it may hold multiple values during a function.
3151
   This is because a GCC tree node guarantees that nothing else is
3152
   executed between the evaluation of its "operands" (which may often
3153
   be evaluated in arbitrary order).  Hence if the operands themselves
3154
   don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3155
   same value in each operand/subexpression.  Hence leaving OEP_ONLY_CONST
3156
   unset means assuming isochronic (or instantaneous) tree equivalence.
3157
   Unless comparing arbitrary expression trees, such as from different
3158
   statements, this flag can usually be left unset.
3159
 
3160
   If OEP_PURE_SAME is set, then pure functions with identical arguments
3161
   are considered the same.  It is used when the caller has other ways
3162
   to ensure that global memory is unchanged in between.  */
3163
 
3164
int
3165
operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3166
{
3167
  /* If either is ERROR_MARK, they aren't equal.  */
3168
  if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
3169
      || TREE_TYPE (arg0) == error_mark_node
3170
      || TREE_TYPE (arg1) == error_mark_node)
3171
    return 0;
3172
 
3173
  /* Check equality of integer constants before bailing out due to
3174
     precision differences.  */
3175
  if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3176
    return tree_int_cst_equal (arg0, arg1);
3177
 
3178
  /* If both types don't have the same signedness, then we can't consider
3179
     them equal.  We must check this before the STRIP_NOPS calls
3180
     because they may change the signedness of the arguments.  As pointers
3181
     strictly don't have a signedness, require either two pointers or
3182
     two non-pointers as well.  */
3183
  if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3184
      || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
3185
    return 0;
3186
 
3187
  /* We cannot consider pointers to different address space equal.  */
3188
  if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
3189
      && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
3190
          != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
3191
    return 0;
3192
 
3193
  /* If both types don't have the same precision, then it is not safe
3194
     to strip NOPs.  */
3195
  if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3196
    return 0;
3197
 
3198
  STRIP_NOPS (arg0);
3199
  STRIP_NOPS (arg1);
3200
 
3201
  /* In case both args are comparisons but with different comparison
3202
     code, try to swap the comparison operands of one arg to produce
3203
     a match and compare that variant.  */
3204
  if (TREE_CODE (arg0) != TREE_CODE (arg1)
3205
      && COMPARISON_CLASS_P (arg0)
3206
      && COMPARISON_CLASS_P (arg1))
3207
    {
3208
      enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3209
 
3210
      if (TREE_CODE (arg0) == swap_code)
3211
        return operand_equal_p (TREE_OPERAND (arg0, 0),
3212
                                TREE_OPERAND (arg1, 1), flags)
3213
               && operand_equal_p (TREE_OPERAND (arg0, 1),
3214
                                   TREE_OPERAND (arg1, 0), flags);
3215
    }
3216
 
3217
  if (TREE_CODE (arg0) != TREE_CODE (arg1)
3218
      /* This is needed for conversions and for COMPONENT_REF.
3219
         Might as well play it safe and always test this.  */
3220
      || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3221
      || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3222
      || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3223
    return 0;
3224
 
3225
  /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3226
     We don't care about side effects in that case because the SAVE_EXPR
3227
     takes care of that for us. In all other cases, two expressions are
3228
     equal if they have no side effects.  If we have two identical
3229
     expressions with side effects that should be treated the same due
3230
     to the only side effects being identical SAVE_EXPR's, that will
3231
     be detected in the recursive calls below.  */
3232
  if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3233
      && (TREE_CODE (arg0) == SAVE_EXPR
3234
          || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3235
    return 1;
3236
 
3237
  /* Next handle constant cases, those for which we can return 1 even
3238
     if ONLY_CONST is set.  */
3239
  if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3240
    switch (TREE_CODE (arg0))
3241
      {
3242
      case INTEGER_CST:
3243
        return tree_int_cst_equal (arg0, arg1);
3244
 
3245
      case FIXED_CST:
3246
        return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3247
                                       TREE_FIXED_CST (arg1));
3248
 
3249
      case REAL_CST:
3250
        if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3251
                                   TREE_REAL_CST (arg1)))
3252
          return 1;
3253
 
3254
 
3255
        if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3256
          {
3257
            /* If we do not distinguish between signed and unsigned zero,
3258
               consider them equal.  */
3259
            if (real_zerop (arg0) && real_zerop (arg1))
3260
              return 1;
3261
          }
3262
        return 0;
3263
 
3264
      case VECTOR_CST:
3265
        {
3266
          tree v1, v2;
3267
 
3268
          v1 = TREE_VECTOR_CST_ELTS (arg0);
3269
          v2 = TREE_VECTOR_CST_ELTS (arg1);
3270
          while (v1 && v2)
3271
            {
3272
              if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3273
                                    flags))
3274
                return 0;
3275
              v1 = TREE_CHAIN (v1);
3276
              v2 = TREE_CHAIN (v2);
3277
            }
3278
 
3279
          return v1 == v2;
3280
        }
3281
 
3282
      case COMPLEX_CST:
3283
        return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3284
                                 flags)
3285
                && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3286
                                    flags));
3287
 
3288
      case STRING_CST:
3289
        return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3290
                && ! memcmp (TREE_STRING_POINTER (arg0),
3291
                              TREE_STRING_POINTER (arg1),
3292
                              TREE_STRING_LENGTH (arg0)));
3293
 
3294
      case ADDR_EXPR:
3295
        return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3296
                                0);
3297
      default:
3298
        break;
3299
      }
3300
 
3301
  if (flags & OEP_ONLY_CONST)
3302
    return 0;
3303
 
3304
/* Define macros to test an operand from arg0 and arg1 for equality and a
3305
   variant that allows null and views null as being different from any
3306
   non-null value.  In the latter case, if either is null, the both
3307
   must be; otherwise, do the normal comparison.  */
3308
#define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N),     \
3309
                                    TREE_OPERAND (arg1, N), flags)
3310
 
3311
#define OP_SAME_WITH_NULL(N)                            \
3312
  ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3313
   ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3314
 
3315
  switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3316
    {
3317
    case tcc_unary:
3318
      /* Two conversions are equal only if signedness and modes match.  */
3319
      switch (TREE_CODE (arg0))
3320
        {
3321
        CASE_CONVERT:
3322
        case FIX_TRUNC_EXPR:
3323
          if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3324
              != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3325
            return 0;
3326
          break;
3327
        default:
3328
          break;
3329
        }
3330
 
3331
      return OP_SAME (0);
3332
 
3333
 
3334
    case tcc_comparison:
3335
    case tcc_binary:
3336
      if (OP_SAME (0) && OP_SAME (1))
3337
        return 1;
3338
 
3339
      /* For commutative ops, allow the other order.  */
3340
      return (commutative_tree_code (TREE_CODE (arg0))
3341
              && operand_equal_p (TREE_OPERAND (arg0, 0),
3342
                                  TREE_OPERAND (arg1, 1), flags)
3343
              && operand_equal_p (TREE_OPERAND (arg0, 1),
3344
                                  TREE_OPERAND (arg1, 0), flags));
3345
 
3346
    case tcc_reference:
3347
      /* If either of the pointer (or reference) expressions we are
3348
         dereferencing contain a side effect, these cannot be equal.  */
3349
      if (TREE_SIDE_EFFECTS (arg0)
3350
          || TREE_SIDE_EFFECTS (arg1))
3351
        return 0;
3352
 
3353
      switch (TREE_CODE (arg0))
3354
        {
3355
        case INDIRECT_REF:
3356
        case ALIGN_INDIRECT_REF:
3357
        case MISALIGNED_INDIRECT_REF:
3358
        case REALPART_EXPR:
3359
        case IMAGPART_EXPR:
3360
          return OP_SAME (0);
3361
 
3362
        case ARRAY_REF:
3363
        case ARRAY_RANGE_REF:
3364
          /* Operands 2 and 3 may be null.
3365
             Compare the array index by value if it is constant first as we
3366
             may have different types but same value here.  */
3367
          return (OP_SAME (0)
3368
                  && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3369
                                          TREE_OPERAND (arg1, 1))
3370
                      || OP_SAME (1))
3371
                  && OP_SAME_WITH_NULL (2)
3372
                  && OP_SAME_WITH_NULL (3));
3373
 
3374
        case COMPONENT_REF:
3375
          /* Handle operand 2 the same as for ARRAY_REF.  Operand 0
3376
             may be NULL when we're called to compare MEM_EXPRs.  */
3377
          return OP_SAME_WITH_NULL (0)
3378
                 && OP_SAME (1)
3379
                 && OP_SAME_WITH_NULL (2);
3380
 
3381
        case BIT_FIELD_REF:
3382
          return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3383
 
3384
        default:
3385
          return 0;
3386
        }
3387
 
3388
    case tcc_expression:
3389
      switch (TREE_CODE (arg0))
3390
        {
3391
        case ADDR_EXPR:
3392
        case TRUTH_NOT_EXPR:
3393
          return OP_SAME (0);
3394
 
3395
        case TRUTH_ANDIF_EXPR:
3396
        case TRUTH_ORIF_EXPR:
3397
          return OP_SAME (0) && OP_SAME (1);
3398
 
3399
        case TRUTH_AND_EXPR:
3400
        case TRUTH_OR_EXPR:
3401
        case TRUTH_XOR_EXPR:
3402
          if (OP_SAME (0) && OP_SAME (1))
3403
            return 1;
3404
 
3405
          /* Otherwise take into account this is a commutative operation.  */
3406
          return (operand_equal_p (TREE_OPERAND (arg0, 0),
3407
                                   TREE_OPERAND (arg1, 1), flags)
3408
                  && operand_equal_p (TREE_OPERAND (arg0, 1),
3409
                                      TREE_OPERAND (arg1, 0), flags));
3410
 
3411
        case COND_EXPR:
3412
          return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3413
 
3414
        default:
3415
          return 0;
3416
        }
3417
 
3418
    case tcc_vl_exp:
3419
      switch (TREE_CODE (arg0))
3420
        {
3421
        case CALL_EXPR:
3422
          /* If the CALL_EXPRs call different functions, then they
3423
             clearly can not be equal.  */
3424
          if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3425
                                 flags))
3426
            return 0;
3427
 
3428
          {
3429
            unsigned int cef = call_expr_flags (arg0);
3430
            if (flags & OEP_PURE_SAME)
3431
              cef &= ECF_CONST | ECF_PURE;
3432
            else
3433
              cef &= ECF_CONST;
3434
            if (!cef)
3435
              return 0;
3436
          }
3437
 
3438
          /* Now see if all the arguments are the same.  */
3439
          {
3440
            const_call_expr_arg_iterator iter0, iter1;
3441
            const_tree a0, a1;
3442
            for (a0 = first_const_call_expr_arg (arg0, &iter0),
3443
                   a1 = first_const_call_expr_arg (arg1, &iter1);
3444
                 a0 && a1;
3445
                 a0 = next_const_call_expr_arg (&iter0),
3446
                   a1 = next_const_call_expr_arg (&iter1))
3447
              if (! operand_equal_p (a0, a1, flags))
3448
                return 0;
3449
 
3450
            /* If we get here and both argument lists are exhausted
3451
               then the CALL_EXPRs are equal.  */
3452
            return ! (a0 || a1);
3453
          }
3454
        default:
3455
          return 0;
3456
        }
3457
 
3458
    case tcc_declaration:
3459
      /* Consider __builtin_sqrt equal to sqrt.  */
3460
      return (TREE_CODE (arg0) == FUNCTION_DECL
3461
              && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3462
              && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3463
              && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3464
 
3465
    default:
3466
      return 0;
3467
    }
3468
 
3469
#undef OP_SAME
3470
#undef OP_SAME_WITH_NULL
3471
}
3472
 
3473
/* Similar to operand_equal_p, but see if ARG0 might have been made by
3474
   shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3475
 
3476
   When in doubt, return 0.  */
3477
 
3478
static int
3479
operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3480
{
3481
  int unsignedp1, unsignedpo;
3482
  tree primarg0, primarg1, primother;
3483
  unsigned int correct_width;
3484
 
3485
  if (operand_equal_p (arg0, arg1, 0))
3486
    return 1;
3487
 
3488
  if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3489
      || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3490
    return 0;
3491
 
3492
  /* Discard any conversions that don't change the modes of ARG0 and ARG1
3493
     and see if the inner values are the same.  This removes any
3494
     signedness comparison, which doesn't matter here.  */
3495
  primarg0 = arg0, primarg1 = arg1;
3496
  STRIP_NOPS (primarg0);
3497
  STRIP_NOPS (primarg1);
3498
  if (operand_equal_p (primarg0, primarg1, 0))
3499
    return 1;
3500
 
3501
  /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3502
     actual comparison operand, ARG0.
3503
 
3504
     First throw away any conversions to wider types
3505
     already present in the operands.  */
3506
 
3507
  primarg1 = get_narrower (arg1, &unsignedp1);
3508
  primother = get_narrower (other, &unsignedpo);
3509
 
3510
  correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3511
  if (unsignedp1 == unsignedpo
3512
      && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3513
      && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3514
    {
3515
      tree type = TREE_TYPE (arg0);
3516
 
3517
      /* Make sure shorter operand is extended the right way
3518
         to match the longer operand.  */
3519
      primarg1 = fold_convert (signed_or_unsigned_type_for
3520
                               (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3521
 
3522
      if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3523
        return 1;
3524
    }
3525
 
3526
  return 0;
3527
}
3528
 
3529
/* See if ARG is an expression that is either a comparison or is performing
3530
   arithmetic on comparisons.  The comparisons must only be comparing
3531
   two different values, which will be stored in *CVAL1 and *CVAL2; if
3532
   they are nonzero it means that some operands have already been found.
3533
   No variables may be used anywhere else in the expression except in the
3534
   comparisons.  If SAVE_P is true it means we removed a SAVE_EXPR around
3535
   the expression and save_expr needs to be called with CVAL1 and CVAL2.
3536
 
3537
   If this is true, return 1.  Otherwise, return zero.  */
3538
 
3539
static int
3540
twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3541
{
3542
  enum tree_code code = TREE_CODE (arg);
3543
  enum tree_code_class tclass = TREE_CODE_CLASS (code);
3544
 
3545
  /* We can handle some of the tcc_expression cases here.  */
3546
  if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3547
    tclass = tcc_unary;
3548
  else if (tclass == tcc_expression
3549
           && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3550
               || code == COMPOUND_EXPR))
3551
    tclass = tcc_binary;
3552
 
3553
  else if (tclass == tcc_expression && code == SAVE_EXPR
3554
           && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3555
    {
3556
      /* If we've already found a CVAL1 or CVAL2, this expression is
3557
         two complex to handle.  */
3558
      if (*cval1 || *cval2)
3559
        return 0;
3560
 
3561
      tclass = tcc_unary;
3562
      *save_p = 1;
3563
    }
3564
 
3565
  switch (tclass)
3566
    {
3567
    case tcc_unary:
3568
      return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3569
 
3570
    case tcc_binary:
3571
      return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3572
              && twoval_comparison_p (TREE_OPERAND (arg, 1),
3573
                                      cval1, cval2, save_p));
3574
 
3575
    case tcc_constant:
3576
      return 1;
3577
 
3578
    case tcc_expression:
3579
      if (code == COND_EXPR)
3580
        return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3581
                                     cval1, cval2, save_p)
3582
                && twoval_comparison_p (TREE_OPERAND (arg, 1),
3583
                                        cval1, cval2, save_p)
3584
                && twoval_comparison_p (TREE_OPERAND (arg, 2),
3585
                                        cval1, cval2, save_p));
3586
      return 0;
3587
 
3588
    case tcc_comparison:
3589
      /* First see if we can handle the first operand, then the second.  For
3590
         the second operand, we know *CVAL1 can't be zero.  It must be that
3591
         one side of the comparison is each of the values; test for the
3592
         case where this isn't true by failing if the two operands
3593
         are the same.  */
3594
 
3595
      if (operand_equal_p (TREE_OPERAND (arg, 0),
3596
                           TREE_OPERAND (arg, 1), 0))
3597
        return 0;
3598
 
3599
      if (*cval1 == 0)
3600
        *cval1 = TREE_OPERAND (arg, 0);
3601
      else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3602
        ;
3603
      else if (*cval2 == 0)
3604
        *cval2 = TREE_OPERAND (arg, 0);
3605
      else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3606
        ;
3607
      else
3608
        return 0;
3609
 
3610
      if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3611
        ;
3612
      else if (*cval2 == 0)
3613
        *cval2 = TREE_OPERAND (arg, 1);
3614
      else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3615
        ;
3616
      else
3617
        return 0;
3618
 
3619
      return 1;
3620
 
3621
    default:
3622
      return 0;
3623
    }
3624
}
3625
 
3626
/* ARG is a tree that is known to contain just arithmetic operations and
3627
   comparisons.  Evaluate the operations in the tree substituting NEW0 for
3628
   any occurrence of OLD0 as an operand of a comparison and likewise for
3629
   NEW1 and OLD1.  */
3630
 
3631
static tree
3632
eval_subst (location_t loc, tree arg, tree old0, tree new0,
3633
            tree old1, tree new1)
3634
{
3635
  tree type = TREE_TYPE (arg);
3636
  enum tree_code code = TREE_CODE (arg);
3637
  enum tree_code_class tclass = TREE_CODE_CLASS (code);
3638
 
3639
  /* We can handle some of the tcc_expression cases here.  */
3640
  if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3641
    tclass = tcc_unary;
3642
  else if (tclass == tcc_expression
3643
           && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3644
    tclass = tcc_binary;
3645
 
3646
  switch (tclass)
3647
    {
3648
    case tcc_unary:
3649
      return fold_build1_loc (loc, code, type,
3650
                          eval_subst (loc, TREE_OPERAND (arg, 0),
3651
                                      old0, new0, old1, new1));
3652
 
3653
    case tcc_binary:
3654
      return fold_build2_loc (loc, code, type,
3655
                          eval_subst (loc, TREE_OPERAND (arg, 0),
3656
                                      old0, new0, old1, new1),
3657
                          eval_subst (loc, TREE_OPERAND (arg, 1),
3658
                                      old0, new0, old1, new1));
3659
 
3660
    case tcc_expression:
3661
      switch (code)
3662
        {
3663
        case SAVE_EXPR:
3664
          return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3665
                             old1, new1);
3666
 
3667
        case COMPOUND_EXPR:
3668
          return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3669
                             old1, new1);
3670
 
3671
        case COND_EXPR:
3672
          return fold_build3_loc (loc, code, type,
3673
                              eval_subst (loc, TREE_OPERAND (arg, 0),
3674
                                          old0, new0, old1, new1),
3675
                              eval_subst (loc, TREE_OPERAND (arg, 1),
3676
                                          old0, new0, old1, new1),
3677
                              eval_subst (loc, TREE_OPERAND (arg, 2),
3678
                                          old0, new0, old1, new1));
3679
        default:
3680
          break;
3681
        }
3682
      /* Fall through - ???  */
3683
 
3684
    case tcc_comparison:
3685
      {
3686
        tree arg0 = TREE_OPERAND (arg, 0);
3687
        tree arg1 = TREE_OPERAND (arg, 1);
3688
 
3689
        /* We need to check both for exact equality and tree equality.  The
3690
           former will be true if the operand has a side-effect.  In that
3691
           case, we know the operand occurred exactly once.  */
3692
 
3693
        if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3694
          arg0 = new0;
3695
        else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3696
          arg0 = new1;
3697
 
3698
        if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3699
          arg1 = new0;
3700
        else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3701
          arg1 = new1;
3702
 
3703
        return fold_build2_loc (loc, code, type, arg0, arg1);
3704
      }
3705
 
3706
    default:
3707
      return arg;
3708
    }
3709
}
3710
 
3711
/* Return a tree for the case when the result of an expression is RESULT
3712
   converted to TYPE and OMITTED was previously an operand of the expression
3713
   but is now not needed (e.g., we folded OMITTED * 0).
3714
 
3715
   If OMITTED has side effects, we must evaluate it.  Otherwise, just do
3716
   the conversion of RESULT to TYPE.  */
3717
 
3718
tree
3719
omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3720
{
3721
  tree t = fold_convert_loc (loc, type, result);
3722
 
3723
  /* If the resulting operand is an empty statement, just return the omitted
3724
     statement casted to void. */
3725
  if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3726
    {
3727
      t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3728
      goto omit_one_operand_exit;
3729
    }
3730
 
3731
  if (TREE_SIDE_EFFECTS (omitted))
3732
    {
3733
      t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3734
      goto omit_one_operand_exit;
3735
    }
3736
 
3737
  return non_lvalue_loc (loc, t);
3738
 
3739
 omit_one_operand_exit:
3740
  protected_set_expr_location (t, loc);
3741
  return t;
3742
}
3743
 
3744
/* Similar, but call pedantic_non_lvalue instead of non_lvalue.  */
3745
 
3746
static tree
3747
pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3748
                               tree omitted)
3749
{
3750
  tree t = fold_convert_loc (loc, type, result);
3751
 
3752
  /* If the resulting operand is an empty statement, just return the omitted
3753
     statement casted to void. */
3754
  if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3755
    {
3756
      t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3757
      goto pedantic_omit_one_operand_exit;
3758
    }
3759
 
3760
  if (TREE_SIDE_EFFECTS (omitted))
3761
    {
3762
      t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3763
      goto pedantic_omit_one_operand_exit;
3764
    }
3765
 
3766
  return pedantic_non_lvalue_loc (loc, t);
3767
 
3768
 pedantic_omit_one_operand_exit:
3769
  protected_set_expr_location (t, loc);
3770
  return t;
3771
}
3772
 
3773
/* Return a tree for the case when the result of an expression is RESULT
3774
   converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3775
   of the expression but are now not needed.
3776
 
3777
   If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3778
   If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3779
   evaluated before OMITTED2.  Otherwise, if neither has side effects,
3780
   just do the conversion of RESULT to TYPE.  */
3781
 
3782
tree
3783
omit_two_operands_loc (location_t loc, tree type, tree result,
3784
                   tree omitted1, tree omitted2)
3785
{
3786
  tree t = fold_convert_loc (loc, type, result);
3787
 
3788
  if (TREE_SIDE_EFFECTS (omitted2))
3789
    {
3790
      t = build2 (COMPOUND_EXPR, type, omitted2, t);
3791
      SET_EXPR_LOCATION (t, loc);
3792
    }
3793
  if (TREE_SIDE_EFFECTS (omitted1))
3794
    {
3795
      t = build2 (COMPOUND_EXPR, type, omitted1, t);
3796
      SET_EXPR_LOCATION (t, loc);
3797
    }
3798
 
3799
  return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3800
}
3801
 
3802
 
3803
/* Return a simplified tree node for the truth-negation of ARG.  This
3804
   never alters ARG itself.  We assume that ARG is an operation that
3805
   returns a truth value (0 or 1).
3806
 
3807
   FIXME: one would think we would fold the result, but it causes
3808
   problems with the dominator optimizer.  */
3809
 
3810
tree
3811
fold_truth_not_expr (location_t loc, tree arg)
3812
{
3813
  tree t, type = TREE_TYPE (arg);
3814
  enum tree_code code = TREE_CODE (arg);
3815
  location_t loc1, loc2;
3816
 
3817
  /* If this is a comparison, we can simply invert it, except for
3818
     floating-point non-equality comparisons, in which case we just
3819
     enclose a TRUTH_NOT_EXPR around what we have.  */
3820
 
3821
  if (TREE_CODE_CLASS (code) == tcc_comparison)
3822
    {
3823
      tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3824
      if (FLOAT_TYPE_P (op_type)
3825
          && flag_trapping_math
3826
          && code != ORDERED_EXPR && code != UNORDERED_EXPR
3827
          && code != NE_EXPR && code != EQ_EXPR)
3828
        return NULL_TREE;
3829
 
3830
      code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3831
      if (code == ERROR_MARK)
3832
        return NULL_TREE;
3833
 
3834
      t = build2 (code, type, TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3835
      SET_EXPR_LOCATION (t, loc);
3836
      return t;
3837
    }
3838
 
3839
  switch (code)
3840
    {
3841
    case INTEGER_CST:
3842
      return constant_boolean_node (integer_zerop (arg), type);
3843
 
3844
    case TRUTH_AND_EXPR:
3845
      loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3846
      loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3847
      if (loc1 == UNKNOWN_LOCATION)
3848
        loc1 = loc;
3849
      if (loc2 == UNKNOWN_LOCATION)
3850
        loc2 = loc;
3851
      t = build2 (TRUTH_OR_EXPR, type,
3852
                  invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3853
                  invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3854
      break;
3855
 
3856
    case TRUTH_OR_EXPR:
3857
      loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3858
      loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3859
      if (loc1 == UNKNOWN_LOCATION)
3860
        loc1 = loc;
3861
      if (loc2 == UNKNOWN_LOCATION)
3862
        loc2 = loc;
3863
      t = build2 (TRUTH_AND_EXPR, type,
3864
                  invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3865
                  invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3866
      break;
3867
 
3868
    case TRUTH_XOR_EXPR:
3869
      /* Here we can invert either operand.  We invert the first operand
3870
         unless the second operand is a TRUTH_NOT_EXPR in which case our
3871
         result is the XOR of the first operand with the inside of the
3872
         negation of the second operand.  */
3873
 
3874
      if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3875
        t = build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3876
                    TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3877
      else
3878
        t = build2 (TRUTH_XOR_EXPR, type,
3879
                    invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3880
                    TREE_OPERAND (arg, 1));
3881
      break;
3882
 
3883
    case TRUTH_ANDIF_EXPR:
3884
      loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3885
      loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3886
      if (loc1 == UNKNOWN_LOCATION)
3887
        loc1 = loc;
3888
      if (loc2 == UNKNOWN_LOCATION)
3889
        loc2 = loc;
3890
      t = build2 (TRUTH_ORIF_EXPR, type,
3891
                  invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3892
                  invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3893
      break;
3894
 
3895
    case TRUTH_ORIF_EXPR:
3896
      loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3897
      loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3898
      if (loc1 == UNKNOWN_LOCATION)
3899
        loc1 = loc;
3900
      if (loc2 == UNKNOWN_LOCATION)
3901
        loc2 = loc;
3902
      t = build2 (TRUTH_ANDIF_EXPR, type,
3903
                  invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3904
                  invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3905
      break;
3906
 
3907
    case TRUTH_NOT_EXPR:
3908
      return TREE_OPERAND (arg, 0);
3909
 
3910
    case COND_EXPR:
3911
      {
3912
        tree arg1 = TREE_OPERAND (arg, 1);
3913
        tree arg2 = TREE_OPERAND (arg, 2);
3914
 
3915
        loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3916
        loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 2));
3917
        if (loc1 == UNKNOWN_LOCATION)
3918
          loc1 = loc;
3919
        if (loc2 == UNKNOWN_LOCATION)
3920
          loc2 = loc;
3921
 
3922
        /* A COND_EXPR may have a throw as one operand, which
3923
           then has void type.  Just leave void operands
3924
           as they are.  */
3925
        t = build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3926
                    VOID_TYPE_P (TREE_TYPE (arg1))
3927
                    ? arg1 : invert_truthvalue_loc (loc1, arg1),
3928
                    VOID_TYPE_P (TREE_TYPE (arg2))
3929
                    ? arg2 : invert_truthvalue_loc (loc2, arg2));
3930
        break;
3931
      }
3932
 
3933
    case COMPOUND_EXPR:
3934
      loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3935
      if (loc1 == UNKNOWN_LOCATION)
3936
        loc1 = loc;
3937
      t = build2 (COMPOUND_EXPR, type,
3938
                  TREE_OPERAND (arg, 0),
3939
                  invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3940
      break;
3941
 
3942
    case NON_LVALUE_EXPR:
3943
      loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3944
      if (loc1 == UNKNOWN_LOCATION)
3945
        loc1 = loc;
3946
      return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3947
 
3948
    CASE_CONVERT:
3949
      if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3950
        {
3951
          t = build1 (TRUTH_NOT_EXPR, type, arg);
3952
          break;
3953
        }
3954
 
3955
      /* ... fall through ...  */
3956
 
3957
    case FLOAT_EXPR:
3958
      loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3959
      if (loc1 == UNKNOWN_LOCATION)
3960
        loc1 = loc;
3961
      t = build1 (TREE_CODE (arg), type,
3962
                  invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3963
      break;
3964
 
3965
    case BIT_AND_EXPR:
3966
      if (!integer_onep (TREE_OPERAND (arg, 1)))
3967
        return NULL_TREE;
3968
      t = build2 (EQ_EXPR, type, arg, build_int_cst (type, 0));
3969
      break;
3970
 
3971
    case SAVE_EXPR:
3972
      t = build1 (TRUTH_NOT_EXPR, type, arg);
3973
      break;
3974
 
3975
    case CLEANUP_POINT_EXPR:
3976
      loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3977
      if (loc1 == UNKNOWN_LOCATION)
3978
        loc1 = loc;
3979
      t = build1 (CLEANUP_POINT_EXPR, type,
3980
                  invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3981
      break;
3982
 
3983
    default:
3984
      t = NULL_TREE;
3985
      break;
3986
    }
3987
 
3988
  if (t)
3989
    SET_EXPR_LOCATION (t, loc);
3990
 
3991
  return t;
3992
}
3993
 
3994
/* Return a simplified tree node for the truth-negation of ARG.  This
3995
   never alters ARG itself.  We assume that ARG is an operation that
3996
   returns a truth value (0 or 1).
3997
 
3998
   FIXME: one would think we would fold the result, but it causes
3999
   problems with the dominator optimizer.  */
4000
 
4001
tree
4002
invert_truthvalue_loc (location_t loc, tree arg)
4003
{
4004
  tree tem;
4005
 
4006
  if (TREE_CODE (arg) == ERROR_MARK)
4007
    return arg;
4008
 
4009
  tem = fold_truth_not_expr (loc, arg);
4010
  if (!tem)
4011
    {
4012
      tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
4013
      SET_EXPR_LOCATION (tem, loc);
4014
    }
4015
 
4016
  return tem;
4017
}
4018
 
4019
/* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
4020
   operands are another bit-wise operation with a common input.  If so,
4021
   distribute the bit operations to save an operation and possibly two if
4022
   constants are involved.  For example, convert
4023
        (A | B) & (A | C) into A | (B & C)
4024
   Further simplification will occur if B and C are constants.
4025
 
4026
   If this optimization cannot be done, 0 will be returned.  */
4027
 
4028
static tree
4029
distribute_bit_expr (location_t loc, enum tree_code code, tree type,
4030
                     tree arg0, tree arg1)
4031
{
4032
  tree common;
4033
  tree left, right;
4034
 
4035
  if (TREE_CODE (arg0) != TREE_CODE (arg1)
4036
      || TREE_CODE (arg0) == code
4037
      || (TREE_CODE (arg0) != BIT_AND_EXPR
4038
          && TREE_CODE (arg0) != BIT_IOR_EXPR))
4039
    return 0;
4040
 
4041
  if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
4042
    {
4043
      common = TREE_OPERAND (arg0, 0);
4044
      left = TREE_OPERAND (arg0, 1);
4045
      right = TREE_OPERAND (arg1, 1);
4046
    }
4047
  else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
4048
    {
4049
      common = TREE_OPERAND (arg0, 0);
4050
      left = TREE_OPERAND (arg0, 1);
4051
      right = TREE_OPERAND (arg1, 0);
4052
    }
4053
  else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
4054
    {
4055
      common = TREE_OPERAND (arg0, 1);
4056
      left = TREE_OPERAND (arg0, 0);
4057
      right = TREE_OPERAND (arg1, 1);
4058
    }
4059
  else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
4060
    {
4061
      common = TREE_OPERAND (arg0, 1);
4062
      left = TREE_OPERAND (arg0, 0);
4063
      right = TREE_OPERAND (arg1, 0);
4064
    }
4065
  else
4066
    return 0;
4067
 
4068
  common = fold_convert_loc (loc, type, common);
4069
  left = fold_convert_loc (loc, type, left);
4070
  right = fold_convert_loc (loc, type, right);
4071
  return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
4072
                      fold_build2_loc (loc, code, type, left, right));
4073
}
4074
 
4075
/* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
4076
   with code CODE.  This optimization is unsafe.  */
4077
static tree
4078
distribute_real_division (location_t loc, enum tree_code code, tree type,
4079
                          tree arg0, tree arg1)
4080
{
4081
  bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
4082
  bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
4083
 
4084
  /* (A / C) +- (B / C) -> (A +- B) / C.  */
4085
  if (mul0 == mul1
4086
      && operand_equal_p (TREE_OPERAND (arg0, 1),
4087
                       TREE_OPERAND (arg1, 1), 0))
4088
    return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
4089
                        fold_build2_loc (loc, code, type,
4090
                                     TREE_OPERAND (arg0, 0),
4091
                                     TREE_OPERAND (arg1, 0)),
4092
                        TREE_OPERAND (arg0, 1));
4093
 
4094
  /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2).  */
4095
  if (operand_equal_p (TREE_OPERAND (arg0, 0),
4096
                       TREE_OPERAND (arg1, 0), 0)
4097
      && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
4098
      && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
4099
    {
4100
      REAL_VALUE_TYPE r0, r1;
4101
      r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
4102
      r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
4103
      if (!mul0)
4104
        real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
4105
      if (!mul1)
4106
        real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
4107
      real_arithmetic (&r0, code, &r0, &r1);
4108
      return fold_build2_loc (loc, MULT_EXPR, type,
4109
                          TREE_OPERAND (arg0, 0),
4110
                          build_real (type, r0));
4111
    }
4112
 
4113
  return NULL_TREE;
4114
}
4115
 
4116
/* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4117
   starting at BITPOS.  The field is unsigned if UNSIGNEDP is nonzero.  */
4118
 
4119
static tree
4120
make_bit_field_ref (location_t loc, tree inner, tree type,
4121
                    HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
4122
{
4123
  tree result, bftype;
4124
 
4125
  if (bitpos == 0)
4126
    {
4127
      tree size = TYPE_SIZE (TREE_TYPE (inner));
4128
      if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4129
           || POINTER_TYPE_P (TREE_TYPE (inner)))
4130
          && host_integerp (size, 0)
4131
          && tree_low_cst (size, 0) == bitsize)
4132
        return fold_convert_loc (loc, type, inner);
4133
    }
4134
 
4135
  bftype = type;
4136
  if (TYPE_PRECISION (bftype) != bitsize
4137
      || TYPE_UNSIGNED (bftype) == !unsignedp)
4138
    bftype = build_nonstandard_integer_type (bitsize, 0);
4139
 
4140
  result = build3 (BIT_FIELD_REF, bftype, inner,
4141
                   size_int (bitsize), bitsize_int (bitpos));
4142
  SET_EXPR_LOCATION (result, loc);
4143
 
4144
  if (bftype != type)
4145
    result = fold_convert_loc (loc, type, result);
4146
 
4147
  return result;
4148
}
4149
 
4150
/* Optimize a bit-field compare.
4151
 
4152
   There are two cases:  First is a compare against a constant and the
4153
   second is a comparison of two items where the fields are at the same
4154
   bit position relative to the start of a chunk (byte, halfword, word)
4155
   large enough to contain it.  In these cases we can avoid the shift
4156
   implicit in bitfield extractions.
4157
 
4158
   For constants, we emit a compare of the shifted constant with the
4159
   BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4160
   compared.  For two fields at the same position, we do the ANDs with the
4161
   similar mask and compare the result of the ANDs.
4162
 
4163
   CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4164
   COMPARE_TYPE is the type of the comparison, and LHS and RHS
4165
   are the left and right operands of the comparison, respectively.
4166
 
4167
   If the optimization described above can be done, we return the resulting
4168
   tree.  Otherwise we return zero.  */
4169
 
4170
static tree
4171
optimize_bit_field_compare (location_t loc, enum tree_code code,
4172
                            tree compare_type, tree lhs, tree rhs)
4173
{
4174
  HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
4175
  tree type = TREE_TYPE (lhs);
4176
  tree signed_type, unsigned_type;
4177
  int const_p = TREE_CODE (rhs) == INTEGER_CST;
4178
  enum machine_mode lmode, rmode, nmode;
4179
  int lunsignedp, runsignedp;
4180
  int lvolatilep = 0, rvolatilep = 0;
4181
  tree linner, rinner = NULL_TREE;
4182
  tree mask;
4183
  tree offset;
4184
 
4185
  /* Get all the information about the extractions being done.  If the bit size
4186
     if the same as the size of the underlying object, we aren't doing an
4187
     extraction at all and so can do nothing.  We also don't want to
4188
     do anything if the inner expression is a PLACEHOLDER_EXPR since we
4189
     then will no longer be able to replace it.  */
4190
  linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
4191
                                &lunsignedp, &lvolatilep, false);
4192
  if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
4193
      || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
4194
    return 0;
4195
 
4196
 if (!const_p)
4197
   {
4198
     /* If this is not a constant, we can only do something if bit positions,
4199
        sizes, and signedness are the same.  */
4200
     rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4201
                                   &runsignedp, &rvolatilep, false);
4202
 
4203
     if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
4204
         || lunsignedp != runsignedp || offset != 0
4205
         || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
4206
       return 0;
4207
   }
4208
 
4209
  /* See if we can find a mode to refer to this field.  We should be able to,
4210
     but fail if we can't.  */
4211
  nmode = get_best_mode (lbitsize, lbitpos,
4212
                         const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4213
                         : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4214
                                TYPE_ALIGN (TREE_TYPE (rinner))),
4215
                         word_mode, lvolatilep || rvolatilep);
4216
  if (nmode == VOIDmode)
4217
    return 0;
4218
 
4219
  /* Set signed and unsigned types of the precision of this mode for the
4220
     shifts below.  */
4221
  signed_type = lang_hooks.types.type_for_mode (nmode, 0);
4222
  unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4223
 
4224
  /* Compute the bit position and size for the new reference and our offset
4225
     within it. If the new reference is the same size as the original, we
4226
     won't optimize anything, so return zero.  */
4227
  nbitsize = GET_MODE_BITSIZE (nmode);
4228
  nbitpos = lbitpos & ~ (nbitsize - 1);
4229
  lbitpos -= nbitpos;
4230
  if (nbitsize == lbitsize)
4231
    return 0;
4232
 
4233
  if (BYTES_BIG_ENDIAN)
4234
    lbitpos = nbitsize - lbitsize - lbitpos;
4235
 
4236
  /* Make the mask to be used against the extracted field.  */
4237
  mask = build_int_cst_type (unsigned_type, -1);
4238
  mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
4239
  mask = const_binop (RSHIFT_EXPR, mask,
4240
                      size_int (nbitsize - lbitsize - lbitpos), 0);
4241
 
4242
  if (! const_p)
4243
    /* If not comparing with constant, just rework the comparison
4244
       and return.  */
4245
    return fold_build2_loc (loc, code, compare_type,
4246
                        fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4247
                                     make_bit_field_ref (loc, linner,
4248
                                                         unsigned_type,
4249
                                                         nbitsize, nbitpos,
4250
                                                         1),
4251
                                     mask),
4252
                        fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4253
                                     make_bit_field_ref (loc, rinner,
4254
                                                         unsigned_type,
4255
                                                         nbitsize, nbitpos,
4256
                                                         1),
4257
                                     mask));
4258
 
4259
  /* Otherwise, we are handling the constant case. See if the constant is too
4260
     big for the field.  Warn and return a tree of for 0 (false) if so.  We do
4261
     this not only for its own sake, but to avoid having to test for this
4262
     error case below.  If we didn't, we might generate wrong code.
4263
 
4264
     For unsigned fields, the constant shifted right by the field length should
4265
     be all zero.  For signed fields, the high-order bits should agree with
4266
     the sign bit.  */
4267
 
4268
  if (lunsignedp)
4269
    {
4270
      if (! integer_zerop (const_binop (RSHIFT_EXPR,
4271
                                        fold_convert_loc (loc,
4272
                                                          unsigned_type, rhs),
4273
                                        size_int (lbitsize), 0)))
4274
        {
4275
          warning (0, "comparison is always %d due to width of bit-field",
4276
                   code == NE_EXPR);
4277
          return constant_boolean_node (code == NE_EXPR, compare_type);
4278
        }
4279
    }
4280
  else
4281
    {
4282
      tree tem = const_binop (RSHIFT_EXPR,
4283
                              fold_convert_loc (loc, signed_type, rhs),
4284
                              size_int (lbitsize - 1), 0);
4285
      if (! integer_zerop (tem) && ! integer_all_onesp (tem))
4286
        {
4287
          warning (0, "comparison is always %d due to width of bit-field",
4288
                   code == NE_EXPR);
4289
          return constant_boolean_node (code == NE_EXPR, compare_type);
4290
        }
4291
    }
4292
 
4293
  /* Single-bit compares should always be against zero.  */
4294
  if (lbitsize == 1 && ! integer_zerop (rhs))
4295
    {
4296
      code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4297
      rhs = build_int_cst (type, 0);
4298
    }
4299
 
4300
  /* Make a new bitfield reference, shift the constant over the
4301
     appropriate number of bits and mask it with the computed mask
4302
     (in case this was a signed field).  If we changed it, make a new one.  */
4303
  lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
4304
  if (lvolatilep)
4305
    {
4306
      TREE_SIDE_EFFECTS (lhs) = 1;
4307
      TREE_THIS_VOLATILE (lhs) = 1;
4308
    }
4309
 
4310
  rhs = const_binop (BIT_AND_EXPR,
4311
                     const_binop (LSHIFT_EXPR,
4312
                                  fold_convert_loc (loc, unsigned_type, rhs),
4313
                                  size_int (lbitpos), 0),
4314
                     mask, 0);
4315
 
4316
  lhs = build2 (code, compare_type,
4317
                build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
4318
                rhs);
4319
  SET_EXPR_LOCATION (lhs, loc);
4320
  return lhs;
4321
}
4322
 
4323
/* Subroutine for fold_truthop: decode a field reference.
4324
 
4325
   If EXP is a comparison reference, we return the innermost reference.
4326
 
4327
   *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4328
   set to the starting bit number.
4329
 
4330
   If the innermost field can be completely contained in a mode-sized
4331
   unit, *PMODE is set to that mode.  Otherwise, it is set to VOIDmode.
4332
 
4333
   *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4334
   otherwise it is not changed.
4335
 
4336
   *PUNSIGNEDP is set to the signedness of the field.
4337
 
4338
   *PMASK is set to the mask used.  This is either contained in a
4339
   BIT_AND_EXPR or derived from the width of the field.
4340
 
4341
   *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4342
 
4343
   Return 0 if this is not a component reference or is one that we can't
4344
   do anything with.  */
4345
 
4346
static tree
4347
decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
4348
                        HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
4349
                        int *punsignedp, int *pvolatilep,
4350
                        tree *pmask, tree *pand_mask)
4351
{
4352
  tree outer_type = 0;
4353
  tree and_mask = 0;
4354
  tree mask, inner, offset;
4355
  tree unsigned_type;
4356
  unsigned int precision;
4357
 
4358
  /* All the optimizations using this function assume integer fields.
4359
     There are problems with FP fields since the type_for_size call
4360
     below can fail for, e.g., XFmode.  */
4361
  if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4362
    return 0;
4363
 
4364
  /* We are interested in the bare arrangement of bits, so strip everything
4365
     that doesn't affect the machine mode.  However, record the type of the
4366
     outermost expression if it may matter below.  */
4367
  if (CONVERT_EXPR_P (exp)
4368
      || TREE_CODE (exp) == NON_LVALUE_EXPR)
4369
    outer_type = TREE_TYPE (exp);
4370
  STRIP_NOPS (exp);
4371
 
4372
  if (TREE_CODE (exp) == BIT_AND_EXPR)
4373
    {
4374
      and_mask = TREE_OPERAND (exp, 1);
4375
      exp = TREE_OPERAND (exp, 0);
4376
      STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4377
      if (TREE_CODE (and_mask) != INTEGER_CST)
4378
        return 0;
4379
    }
4380
 
4381
  inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4382
                               punsignedp, pvolatilep, false);
4383
  if ((inner == exp && and_mask == 0)
4384
      || *pbitsize < 0 || offset != 0
4385
      || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4386
    return 0;
4387
 
4388
  /* If the number of bits in the reference is the same as the bitsize of
4389
     the outer type, then the outer type gives the signedness. Otherwise
4390
     (in case of a small bitfield) the signedness is unchanged.  */
4391
  if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4392
    *punsignedp = TYPE_UNSIGNED (outer_type);
4393
 
4394
  /* Compute the mask to access the bitfield.  */
4395
  unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4396
  precision = TYPE_PRECISION (unsigned_type);
4397
 
4398
  mask = build_int_cst_type (unsigned_type, -1);
4399
 
4400
  mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4401
  mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4402
 
4403
  /* Merge it with the mask we found in the BIT_AND_EXPR, if any.  */
4404
  if (and_mask != 0)
4405
    mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4406
                        fold_convert_loc (loc, unsigned_type, and_mask), mask);
4407
 
4408
  *pmask = mask;
4409
  *pand_mask = and_mask;
4410
  return inner;
4411
}
4412
 
4413
/* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4414
   bit positions.  */
4415
 
4416
static int
4417
all_ones_mask_p (const_tree mask, int size)
4418
{
4419
  tree type = TREE_TYPE (mask);
4420
  unsigned int precision = TYPE_PRECISION (type);
4421
  tree tmask;
4422
 
4423
  tmask = build_int_cst_type (signed_type_for (type), -1);
4424
 
4425
  return
4426
    tree_int_cst_equal (mask,
4427
                        const_binop (RSHIFT_EXPR,
4428
                                     const_binop (LSHIFT_EXPR, tmask,
4429
                                                  size_int (precision - size),
4430
                                                  0),
4431
                                     size_int (precision - size), 0));
4432
}
4433
 
4434
/* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4435
   represents the sign bit of EXP's type.  If EXP represents a sign
4436
   or zero extension, also test VAL against the unextended type.
4437
   The return value is the (sub)expression whose sign bit is VAL,
4438
   or NULL_TREE otherwise.  */
4439
 
4440
static tree
4441
sign_bit_p (tree exp, const_tree val)
4442
{
4443
  unsigned HOST_WIDE_INT mask_lo, lo;
4444
  HOST_WIDE_INT mask_hi, hi;
4445
  int width;
4446
  tree t;
4447
 
4448
  /* Tree EXP must have an integral type.  */
4449
  t = TREE_TYPE (exp);
4450
  if (! INTEGRAL_TYPE_P (t))
4451
    return NULL_TREE;
4452
 
4453
  /* Tree VAL must be an integer constant.  */
4454
  if (TREE_CODE (val) != INTEGER_CST
4455
      || TREE_OVERFLOW (val))
4456
    return NULL_TREE;
4457
 
4458
  width = TYPE_PRECISION (t);
4459
  if (width > HOST_BITS_PER_WIDE_INT)
4460
    {
4461
      hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
4462
      lo = 0;
4463
 
4464
      mask_hi = ((unsigned HOST_WIDE_INT) -1
4465
                 >> (2 * HOST_BITS_PER_WIDE_INT - width));
4466
      mask_lo = -1;
4467
    }
4468
  else
4469
    {
4470
      hi = 0;
4471
      lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
4472
 
4473
      mask_hi = 0;
4474
      mask_lo = ((unsigned HOST_WIDE_INT) -1
4475
                 >> (HOST_BITS_PER_WIDE_INT - width));
4476
    }
4477
 
4478
  /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4479
     treat VAL as if it were unsigned.  */
4480
  if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
4481
      && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4482
    return exp;
4483
 
4484
  /* Handle extension from a narrower type.  */
4485
  if (TREE_CODE (exp) == NOP_EXPR
4486
      && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4487
    return sign_bit_p (TREE_OPERAND (exp, 0), val);
4488
 
4489
  return NULL_TREE;
4490
}
4491
 
4492
/* Subroutine for fold_truthop: determine if an operand is simple enough
4493
   to be evaluated unconditionally.  */
4494
 
4495
static int
4496
simple_operand_p (const_tree exp)
4497
{
4498
  /* Strip any conversions that don't change the machine mode.  */
4499
  STRIP_NOPS (exp);
4500
 
4501
  return (CONSTANT_CLASS_P (exp)
4502
          || TREE_CODE (exp) == SSA_NAME
4503
          || (DECL_P (exp)
4504
              && ! TREE_ADDRESSABLE (exp)
4505
              && ! TREE_THIS_VOLATILE (exp)
4506
              && ! DECL_NONLOCAL (exp)
4507
              /* Don't regard global variables as simple.  They may be
4508
                 allocated in ways unknown to the compiler (shared memory,
4509
                 #pragma weak, etc).  */
4510
              && ! TREE_PUBLIC (exp)
4511
              && ! DECL_EXTERNAL (exp)
4512
              /* Loading a static variable is unduly expensive, but global
4513
                 registers aren't expensive.  */
4514
              && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4515
}
4516
 
4517
/* The following functions are subroutines to fold_range_test and allow it to
4518
   try to change a logical combination of comparisons into a range test.
4519
 
4520
   For example, both
4521
        X == 2 || X == 3 || X == 4 || X == 5
4522
   and
4523
        X >= 2 && X <= 5
4524
   are converted to
4525
        (unsigned) (X - 2) <= 3
4526
 
4527
   We describe each set of comparisons as being either inside or outside
4528
   a range, using a variable named like IN_P, and then describe the
4529
   range with a lower and upper bound.  If one of the bounds is omitted,
4530
   it represents either the highest or lowest value of the type.
4531
 
4532
   In the comments below, we represent a range by two numbers in brackets
4533
   preceded by a "+" to designate being inside that range, or a "-" to
4534
   designate being outside that range, so the condition can be inverted by
4535
   flipping the prefix.  An omitted bound is represented by a "-".  For
4536
   example, "- [-, 10]" means being outside the range starting at the lowest
4537
   possible value and ending at 10, in other words, being greater than 10.
4538
   The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4539
   always false.
4540
 
4541
   We set up things so that the missing bounds are handled in a consistent
4542
   manner so neither a missing bound nor "true" and "false" need to be
4543
   handled using a special case.  */
4544
 
4545
/* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4546
   of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4547
   and UPPER1_P are nonzero if the respective argument is an upper bound
4548
   and zero for a lower.  TYPE, if nonzero, is the type of the result; it
4549
   must be specified for a comparison.  ARG1 will be converted to ARG0's
4550
   type if both are specified.  */
4551
 
4552
static tree
4553
range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4554
             tree arg1, int upper1_p)
4555
{
4556
  tree tem;
4557
  int result;
4558
  int sgn0, sgn1;
4559
 
4560
  /* If neither arg represents infinity, do the normal operation.
4561
     Else, if not a comparison, return infinity.  Else handle the special
4562
     comparison rules. Note that most of the cases below won't occur, but
4563
     are handled for consistency.  */
4564
 
4565
  if (arg0 != 0 && arg1 != 0)
4566
    {
4567
      tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4568
                         arg0, fold_convert (TREE_TYPE (arg0), arg1));
4569
      STRIP_NOPS (tem);
4570
      return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4571
    }
4572
 
4573
  if (TREE_CODE_CLASS (code) != tcc_comparison)
4574
    return 0;
4575
 
4576
  /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4577
     for neither.  In real maths, we cannot assume open ended ranges are
4578
     the same. But, this is computer arithmetic, where numbers are finite.
4579
     We can therefore make the transformation of any unbounded range with
4580
     the value Z, Z being greater than any representable number. This permits
4581
     us to treat unbounded ranges as equal.  */
4582
  sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4583
  sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4584
  switch (code)
4585
    {
4586
    case EQ_EXPR:
4587
      result = sgn0 == sgn1;
4588
      break;
4589
    case NE_EXPR:
4590
      result = sgn0 != sgn1;
4591
      break;
4592
    case LT_EXPR:
4593
      result = sgn0 < sgn1;
4594
      break;
4595
    case LE_EXPR:
4596
      result = sgn0 <= sgn1;
4597
      break;
4598
    case GT_EXPR:
4599
      result = sgn0 > sgn1;
4600
      break;
4601
    case GE_EXPR:
4602
      result = sgn0 >= sgn1;
4603
      break;
4604
    default:
4605
      gcc_unreachable ();
4606
    }
4607
 
4608
  return constant_boolean_node (result, type);
4609
}
4610
 
4611
/* Given EXP, a logical expression, set the range it is testing into
4612
   variables denoted by PIN_P, PLOW, and PHIGH.  Return the expression
4613
   actually being tested.  *PLOW and *PHIGH will be made of the same
4614
   type as the returned expression.  If EXP is not a comparison, we
4615
   will most likely not be returning a useful value and range.  Set
4616
   *STRICT_OVERFLOW_P to true if the return value is only valid
4617
   because signed overflow is undefined; otherwise, do not change
4618
   *STRICT_OVERFLOW_P.  */
4619
 
4620
tree
4621
make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4622
            bool *strict_overflow_p)
4623
{
4624
  enum tree_code code;
4625
  tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4626
  tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4627
  int in_p, n_in_p;
4628
  tree low, high, n_low, n_high;
4629
  location_t loc = EXPR_LOCATION (exp);
4630
 
4631
  /* Start with simply saying "EXP != 0" and then look at the code of EXP
4632
     and see if we can refine the range.  Some of the cases below may not
4633
     happen, but it doesn't seem worth worrying about this.  We "continue"
4634
     the outer loop when we've changed something; otherwise we "break"
4635
     the switch, which will "break" the while.  */
4636
 
4637
  in_p = 0;
4638
  low = high = build_int_cst (TREE_TYPE (exp), 0);
4639
 
4640
  while (1)
4641
    {
4642
      code = TREE_CODE (exp);
4643
      exp_type = TREE_TYPE (exp);
4644
 
4645
      if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4646
        {
4647
          if (TREE_OPERAND_LENGTH (exp) > 0)
4648
            arg0 = TREE_OPERAND (exp, 0);
4649
          if (TREE_CODE_CLASS (code) == tcc_comparison
4650
              || TREE_CODE_CLASS (code) == tcc_unary
4651
              || TREE_CODE_CLASS (code) == tcc_binary)
4652
            arg0_type = TREE_TYPE (arg0);
4653
          if (TREE_CODE_CLASS (code) == tcc_binary
4654
              || TREE_CODE_CLASS (code) == tcc_comparison
4655
              || (TREE_CODE_CLASS (code) == tcc_expression
4656
                  && TREE_OPERAND_LENGTH (exp) > 1))
4657
            arg1 = TREE_OPERAND (exp, 1);
4658
        }
4659
 
4660
      switch (code)
4661
        {
4662
        case TRUTH_NOT_EXPR:
4663
          in_p = ! in_p, exp = arg0;
4664
          continue;
4665
 
4666
        case EQ_EXPR: case NE_EXPR:
4667
        case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4668
          /* We can only do something if the range is testing for zero
4669
             and if the second operand is an integer constant.  Note that
4670
             saying something is "in" the range we make is done by
4671
             complementing IN_P since it will set in the initial case of
4672
             being not equal to zero; "out" is leaving it alone.  */
4673
          if (low == 0 || high == 0
4674
              || ! integer_zerop (low) || ! integer_zerop (high)
4675
              || TREE_CODE (arg1) != INTEGER_CST)
4676
            break;
4677
 
4678
          switch (code)
4679
            {
4680
            case NE_EXPR:  /* - [c, c]  */
4681
              low = high = arg1;
4682
              break;
4683
            case EQ_EXPR:  /* + [c, c]  */
4684
              in_p = ! in_p, low = high = arg1;
4685
              break;
4686
            case GT_EXPR:  /* - [-, c] */
4687
              low = 0, high = arg1;
4688
              break;
4689
            case GE_EXPR:  /* + [c, -] */
4690
              in_p = ! in_p, low = arg1, high = 0;
4691
              break;
4692
            case LT_EXPR:  /* - [c, -] */
4693
              low = arg1, high = 0;
4694
              break;
4695
            case LE_EXPR:  /* + [-, c] */
4696
              in_p = ! in_p, low = 0, high = arg1;
4697
              break;
4698
            default:
4699
              gcc_unreachable ();
4700
            }
4701
 
4702
          /* If this is an unsigned comparison, we also know that EXP is
4703
             greater than or equal to zero.  We base the range tests we make
4704
             on that fact, so we record it here so we can parse existing
4705
             range tests.  We test arg0_type since often the return type
4706
             of, e.g. EQ_EXPR, is boolean.  */
4707
          if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4708
            {
4709
              if (! merge_ranges (&n_in_p, &n_low, &n_high,
4710
                                  in_p, low, high, 1,
4711
                                  build_int_cst (arg0_type, 0),
4712
                                  NULL_TREE))
4713
                break;
4714
 
4715
              in_p = n_in_p, low = n_low, high = n_high;
4716
 
4717
              /* If the high bound is missing, but we have a nonzero low
4718
                 bound, reverse the range so it goes from zero to the low bound
4719
                 minus 1.  */
4720
              if (high == 0 && low && ! integer_zerop (low))
4721
                {
4722
                  in_p = ! in_p;
4723
                  high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4724
                                      integer_one_node, 0);
4725
                  low = build_int_cst (arg0_type, 0);
4726
                }
4727
            }
4728
 
4729
          exp = arg0;
4730
          continue;
4731
 
4732
        case NEGATE_EXPR:
4733
          /* (-x) IN [a,b] -> x in [-b, -a]  */
4734
          n_low = range_binop (MINUS_EXPR, exp_type,
4735
                               build_int_cst (exp_type, 0),
4736
                               0, high, 1);
4737
          n_high = range_binop (MINUS_EXPR, exp_type,
4738
                                build_int_cst (exp_type, 0),
4739
                                0, low, 0);
4740
          low = n_low, high = n_high;
4741
          exp = arg0;
4742
          continue;
4743
 
4744
        case BIT_NOT_EXPR:
4745
          /* ~ X -> -X - 1  */
4746
          exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4747
                        build_int_cst (exp_type, 1));
4748
          SET_EXPR_LOCATION (exp, loc);
4749
          continue;
4750
 
4751
        case PLUS_EXPR:  case MINUS_EXPR:
4752
          if (TREE_CODE (arg1) != INTEGER_CST)
4753
            break;
4754
 
4755
          /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4756
             move a constant to the other side.  */
4757
          if (!TYPE_UNSIGNED (arg0_type)
4758
              && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4759
            break;
4760
 
4761
          /* If EXP is signed, any overflow in the computation is undefined,
4762
             so we don't worry about it so long as our computations on
4763
             the bounds don't overflow.  For unsigned, overflow is defined
4764
             and this is exactly the right thing.  */
4765
          n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4766
                               arg0_type, low, 0, arg1, 0);
4767
          n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4768
                                arg0_type, high, 1, arg1, 0);
4769
          if ((n_low != 0 && TREE_OVERFLOW (n_low))
4770
              || (n_high != 0 && TREE_OVERFLOW (n_high)))
4771
            break;
4772
 
4773
          if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4774
            *strict_overflow_p = true;
4775
 
4776
          /* Check for an unsigned range which has wrapped around the maximum
4777
             value thus making n_high < n_low, and normalize it.  */
4778
          if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4779
            {
4780
              low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4781
                                 integer_one_node, 0);
4782
              high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4783
                                  integer_one_node, 0);
4784
 
4785
              /* If the range is of the form +/- [ x+1, x ], we won't
4786
                 be able to normalize it.  But then, it represents the
4787
                 whole range or the empty set, so make it
4788
                 +/- [ -, - ].  */
4789
              if (tree_int_cst_equal (n_low, low)
4790
                  && tree_int_cst_equal (n_high, high))
4791
                low = high = 0;
4792
              else
4793
                in_p = ! in_p;
4794
            }
4795
          else
4796
            low = n_low, high = n_high;
4797
 
4798
          exp = arg0;
4799
          continue;
4800
 
4801
        CASE_CONVERT: case NON_LVALUE_EXPR:
4802
          if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4803
            break;
4804
 
4805
          if (! INTEGRAL_TYPE_P (arg0_type)
4806
              || (low != 0 && ! int_fits_type_p (low, arg0_type))
4807
              || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4808
            break;
4809
 
4810
          n_low = low, n_high = high;
4811
 
4812
          if (n_low != 0)
4813
            n_low = fold_convert_loc (loc, arg0_type, n_low);
4814
 
4815
          if (n_high != 0)
4816
            n_high = fold_convert_loc (loc, arg0_type, n_high);
4817
 
4818
 
4819
          /* If we're converting arg0 from an unsigned type, to exp,
4820
             a signed type,  we will be doing the comparison as unsigned.
4821
             The tests above have already verified that LOW and HIGH
4822
             are both positive.
4823
 
4824
             So we have to ensure that we will handle large unsigned
4825
             values the same way that the current signed bounds treat
4826
             negative values.  */
4827
 
4828
          if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4829
            {
4830
              tree high_positive;
4831
              tree equiv_type;
4832
              /* For fixed-point modes, we need to pass the saturating flag
4833
                 as the 2nd parameter.  */
4834
              if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4835
                equiv_type = lang_hooks.types.type_for_mode
4836
                             (TYPE_MODE (arg0_type),
4837
                              TYPE_SATURATING (arg0_type));
4838
              else
4839
                equiv_type = lang_hooks.types.type_for_mode
4840
                             (TYPE_MODE (arg0_type), 1);
4841
 
4842
              /* A range without an upper bound is, naturally, unbounded.
4843
                 Since convert would have cropped a very large value, use
4844
                 the max value for the destination type.  */
4845
              high_positive
4846
                = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4847
                : TYPE_MAX_VALUE (arg0_type);
4848
 
4849
              if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4850
                high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4851
                                             fold_convert_loc (loc, arg0_type,
4852
                                                               high_positive),
4853
                                             build_int_cst (arg0_type, 1));
4854
 
4855
              /* If the low bound is specified, "and" the range with the
4856
                 range for which the original unsigned value will be
4857
                 positive.  */
4858
              if (low != 0)
4859
                {
4860
                  if (! merge_ranges (&n_in_p, &n_low, &n_high,
4861
                                      1, n_low, n_high, 1,
4862
                                      fold_convert_loc (loc, arg0_type,
4863
                                                        integer_zero_node),
4864
                                      high_positive))
4865
                    break;
4866
 
4867
                  in_p = (n_in_p == in_p);
4868
                }
4869
              else
4870
                {
4871
                  /* Otherwise, "or" the range with the range of the input
4872
                     that will be interpreted as negative.  */
4873
                  if (! merge_ranges (&n_in_p, &n_low, &n_high,
4874
                                      0, n_low, n_high, 1,
4875
                                      fold_convert_loc (loc, arg0_type,
4876
                                                        integer_zero_node),
4877
                                      high_positive))
4878
                    break;
4879
 
4880
                  in_p = (in_p != n_in_p);
4881
                }
4882
            }
4883
 
4884
          exp = arg0;
4885
          low = n_low, high = n_high;
4886
          continue;
4887
 
4888
        default:
4889
          break;
4890
        }
4891
 
4892
      break;
4893
    }
4894
 
4895
  /* If EXP is a constant, we can evaluate whether this is true or false.  */
4896
  if (TREE_CODE (exp) == INTEGER_CST)
4897
    {
4898
      in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4899
                                                 exp, 0, low, 0))
4900
                      && integer_onep (range_binop (LE_EXPR, integer_type_node,
4901
                                                    exp, 1, high, 1)));
4902
      low = high = 0;
4903
      exp = 0;
4904
    }
4905
 
4906
  *pin_p = in_p, *plow = low, *phigh = high;
4907
  return exp;
4908
}
4909
 
4910
/* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4911
   type, TYPE, return an expression to test if EXP is in (or out of, depending
4912
   on IN_P) the range.  Return 0 if the test couldn't be created.  */
4913
 
4914
tree
4915
build_range_check (location_t loc, tree type, tree exp, int in_p,
4916
                   tree low, tree high)
4917
{
4918
  tree etype = TREE_TYPE (exp), value;
4919
 
4920
#ifdef HAVE_canonicalize_funcptr_for_compare
4921
  /* Disable this optimization for function pointer expressions
4922
     on targets that require function pointer canonicalization.  */
4923
  if (HAVE_canonicalize_funcptr_for_compare
4924
      && TREE_CODE (etype) == POINTER_TYPE
4925
      && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4926
    return NULL_TREE;
4927
#endif
4928
 
4929
  if (! in_p)
4930
    {
4931
      value = build_range_check (loc, type, exp, 1, low, high);
4932
      if (value != 0)
4933
        return invert_truthvalue_loc (loc, value);
4934
 
4935
      return 0;
4936
    }
4937
 
4938
  if (low == 0 && high == 0)
4939
    return build_int_cst (type, 1);
4940
 
4941
  if (low == 0)
4942
    return fold_build2_loc (loc, LE_EXPR, type, exp,
4943
                        fold_convert_loc (loc, etype, high));
4944
 
4945
  if (high == 0)
4946
    return fold_build2_loc (loc, GE_EXPR, type, exp,
4947
                        fold_convert_loc (loc, etype, low));
4948
 
4949
  if (operand_equal_p (low, high, 0))
4950
    return fold_build2_loc (loc, EQ_EXPR, type, exp,
4951
                        fold_convert_loc (loc, etype, low));
4952
 
4953
  if (integer_zerop (low))
4954
    {
4955
      if (! TYPE_UNSIGNED (etype))
4956
        {
4957
          etype = unsigned_type_for (etype);
4958
          high = fold_convert_loc (loc, etype, high);
4959
          exp = fold_convert_loc (loc, etype, exp);
4960
        }
4961
      return build_range_check (loc, type, exp, 1, 0, high);
4962
    }
4963
 
4964
  /* Optimize (c>=1) && (c<=127) into (signed char)c > 0.  */
4965
  if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4966
    {
4967
      unsigned HOST_WIDE_INT lo;
4968
      HOST_WIDE_INT hi;
4969
      int prec;
4970
 
4971
      prec = TYPE_PRECISION (etype);
4972
      if (prec <= HOST_BITS_PER_WIDE_INT)
4973
        {
4974
          hi = 0;
4975
          lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4976
        }
4977
      else
4978
        {
4979
          hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4980
          lo = (unsigned HOST_WIDE_INT) -1;
4981
        }
4982
 
4983
      if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4984
        {
4985
          if (TYPE_UNSIGNED (etype))
4986
            {
4987
              tree signed_etype = signed_type_for (etype);
4988
              if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4989
                etype
4990
                  = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4991
              else
4992
                etype = signed_etype;
4993
              exp = fold_convert_loc (loc, etype, exp);
4994
            }
4995
          return fold_build2_loc (loc, GT_EXPR, type, exp,
4996
                              build_int_cst (etype, 0));
4997
        }
4998
    }
4999
 
5000
  /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5001
     This requires wrap-around arithmetics for the type of the expression.
5002
     First make sure that arithmetics in this type is valid, then make sure
5003
     that it wraps around.  */
5004
  if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5005
    etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
5006
                                            TYPE_UNSIGNED (etype));
5007
 
5008
  if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
5009
    {
5010
      tree utype, minv, maxv;
5011
 
5012
      /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5013
         for the type in question, as we rely on this here.  */
5014
      utype = unsigned_type_for (etype);
5015
      maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
5016
      maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5017
                          integer_one_node, 1);
5018
      minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
5019
 
5020
      if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5021
                                      minv, 1, maxv, 1)))
5022
        etype = utype;
5023
      else
5024
        return 0;
5025
    }
5026
 
5027
  high = fold_convert_loc (loc, etype, high);
5028
  low = fold_convert_loc (loc, etype, low);
5029
  exp = fold_convert_loc (loc, etype, exp);
5030
 
5031
  value = const_binop (MINUS_EXPR, high, low, 0);
5032
 
5033
 
5034
  if (POINTER_TYPE_P (etype))
5035
    {
5036
      if (value != 0 && !TREE_OVERFLOW (value))
5037
        {
5038
          low = fold_convert_loc (loc, sizetype, low);
5039
          low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low);
5040
          return build_range_check (loc, type,
5041
                                    fold_build2_loc (loc, POINTER_PLUS_EXPR,
5042
                                                 etype, exp, low),
5043
                                    1, build_int_cst (etype, 0), value);
5044
        }
5045
      return 0;
5046
    }
5047
 
5048
  if (value != 0 && !TREE_OVERFLOW (value))
5049
    return build_range_check (loc, type,
5050
                              fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5051
                              1, build_int_cst (etype, 0), value);
5052
 
5053
  return 0;
5054
}
5055
 
5056
/* Return the predecessor of VAL in its type, handling the infinite case.  */
5057
 
5058
static tree
5059
range_predecessor (tree val)
5060
{
5061
  tree type = TREE_TYPE (val);
5062
 
5063
  if (INTEGRAL_TYPE_P (type)
5064
      && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5065
    return 0;
5066
  else
5067
    return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
5068
}
5069
 
5070
/* Return the successor of VAL in its type, handling the infinite case.  */
5071
 
5072
static tree
5073
range_successor (tree val)
5074
{
5075
  tree type = TREE_TYPE (val);
5076
 
5077
  if (INTEGRAL_TYPE_P (type)
5078
      && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5079
    return 0;
5080
  else
5081
    return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
5082
}
5083
 
5084
/* Given two ranges, see if we can merge them into one.  Return 1 if we
5085
   can, 0 if we can't.  Set the output range into the specified parameters.  */
5086
 
5087
bool
5088
merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5089
              tree high0, int in1_p, tree low1, tree high1)
5090
{
5091
  int no_overlap;
5092
  int subset;
5093
  int temp;
5094
  tree tem;
5095
  int in_p;
5096
  tree low, high;
5097
  int lowequal = ((low0 == 0 && low1 == 0)
5098
                  || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5099
                                                low0, 0, low1, 0)));
5100
  int highequal = ((high0 == 0 && high1 == 0)
5101
                   || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5102
                                                 high0, 1, high1, 1)));
5103
 
5104
  /* Make range 0 be the range that starts first, or ends last if they
5105
     start at the same value.  Swap them if it isn't.  */
5106
  if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5107
                                 low0, 0, low1, 0))
5108
      || (lowequal
5109
          && integer_onep (range_binop (GT_EXPR, integer_type_node,
5110
                                        high1, 1, high0, 1))))
5111
    {
5112
      temp = in0_p, in0_p = in1_p, in1_p = temp;
5113
      tem = low0, low0 = low1, low1 = tem;
5114
      tem = high0, high0 = high1, high1 = tem;
5115
    }
5116
 
5117
  /* Now flag two cases, whether the ranges are disjoint or whether the
5118
     second range is totally subsumed in the first.  Note that the tests
5119
     below are simplified by the ones above.  */
5120
  no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5121
                                          high0, 1, low1, 0));
5122
  subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5123
                                      high1, 1, high0, 1));
5124
 
5125
  /* We now have four cases, depending on whether we are including or
5126
     excluding the two ranges.  */
5127
  if (in0_p && in1_p)
5128
    {
5129
      /* If they don't overlap, the result is false.  If the second range
5130
         is a subset it is the result.  Otherwise, the range is from the start
5131
         of the second to the end of the first.  */
5132
      if (no_overlap)
5133
        in_p = 0, low = high = 0;
5134
      else if (subset)
5135
        in_p = 1, low = low1, high = high1;
5136
      else
5137
        in_p = 1, low = low1, high = high0;
5138
    }
5139
 
5140
  else if (in0_p && ! in1_p)
5141
    {
5142
      /* If they don't overlap, the result is the first range.  If they are
5143
         equal, the result is false.  If the second range is a subset of the
5144
         first, and the ranges begin at the same place, we go from just after
5145
         the end of the second range to the end of the first.  If the second
5146
         range is not a subset of the first, or if it is a subset and both
5147
         ranges end at the same place, the range starts at the start of the
5148
         first range and ends just before the second range.
5149
         Otherwise, we can't describe this as a single range.  */
5150
      if (no_overlap)
5151
        in_p = 1, low = low0, high = high0;
5152
      else if (lowequal && highequal)
5153
        in_p = 0, low = high = 0;
5154
      else if (subset && lowequal)
5155
        {
5156
          low = range_successor (high1);
5157
          high = high0;
5158
          in_p = 1;
5159
          if (low == 0)
5160
            {
5161
              /* We are in the weird situation where high0 > high1 but
5162
                 high1 has no successor.  Punt.  */
5163
              return 0;
5164
            }
5165
        }
5166
      else if (! subset || highequal)
5167
        {
5168
          low = low0;
5169
          high = range_predecessor (low1);
5170
          in_p = 1;
5171
          if (high == 0)
5172
            {
5173
              /* low0 < low1 but low1 has no predecessor.  Punt.  */
5174
              return 0;
5175
            }
5176
        }
5177
      else
5178
        return 0;
5179
    }
5180
 
5181
  else if (! in0_p && in1_p)
5182
    {
5183
      /* If they don't overlap, the result is the second range.  If the second
5184
         is a subset of the first, the result is false.  Otherwise,
5185
         the range starts just after the first range and ends at the
5186
         end of the second.  */
5187
      if (no_overlap)
5188
        in_p = 1, low = low1, high = high1;
5189
      else if (subset || highequal)
5190
        in_p = 0, low = high = 0;
5191
      else
5192
        {
5193
          low = range_successor (high0);
5194
          high = high1;
5195
          in_p = 1;
5196
          if (low == 0)
5197
            {
5198
              /* high1 > high0 but high0 has no successor.  Punt.  */
5199
              return 0;
5200
            }
5201
        }
5202
    }
5203
 
5204
  else
5205
    {
5206
      /* The case where we are excluding both ranges.  Here the complex case
5207
         is if they don't overlap.  In that case, the only time we have a
5208
         range is if they are adjacent.  If the second is a subset of the
5209
         first, the result is the first.  Otherwise, the range to exclude
5210
         starts at the beginning of the first range and ends at the end of the
5211
         second.  */
5212
      if (no_overlap)
5213
        {
5214
          if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5215
                                         range_successor (high0),
5216
                                         1, low1, 0)))
5217
            in_p = 0, low = low0, high = high1;
5218
          else
5219
            {
5220
              /* Canonicalize - [min, x] into - [-, x].  */
5221
              if (low0 && TREE_CODE (low0) == INTEGER_CST)
5222
                switch (TREE_CODE (TREE_TYPE (low0)))
5223
                  {
5224
                  case ENUMERAL_TYPE:
5225
                    if (TYPE_PRECISION (TREE_TYPE (low0))
5226
                        != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
5227
                      break;
5228
                    /* FALLTHROUGH */
5229
                  case INTEGER_TYPE:
5230
                    if (tree_int_cst_equal (low0,
5231
                                            TYPE_MIN_VALUE (TREE_TYPE (low0))))
5232
                      low0 = 0;
5233
                    break;
5234
                  case POINTER_TYPE:
5235
                    if (TYPE_UNSIGNED (TREE_TYPE (low0))
5236
                        && integer_zerop (low0))
5237
                      low0 = 0;
5238
                    break;
5239
                  default:
5240
                    break;
5241
                  }
5242
 
5243
              /* Canonicalize - [x, max] into - [x, -].  */
5244
              if (high1 && TREE_CODE (high1) == INTEGER_CST)
5245
                switch (TREE_CODE (TREE_TYPE (high1)))
5246
                  {
5247
                  case ENUMERAL_TYPE:
5248
                    if (TYPE_PRECISION (TREE_TYPE (high1))
5249
                        != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
5250
                      break;
5251
                    /* FALLTHROUGH */
5252
                  case INTEGER_TYPE:
5253
                    if (tree_int_cst_equal (high1,
5254
                                            TYPE_MAX_VALUE (TREE_TYPE (high1))))
5255
                      high1 = 0;
5256
                    break;
5257
                  case POINTER_TYPE:
5258
                    if (TYPE_UNSIGNED (TREE_TYPE (high1))
5259
                        && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5260
                                                       high1, 1,
5261
                                                       integer_one_node, 1)))
5262
                      high1 = 0;
5263
                    break;
5264
                  default:
5265
                    break;
5266
                  }
5267
 
5268
              /* The ranges might be also adjacent between the maximum and
5269
                 minimum values of the given type.  For
5270
                 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5271
                 return + [x + 1, y - 1].  */
5272
              if (low0 == 0 && high1 == 0)
5273
                {
5274
                  low = range_successor (high0);
5275
                  high = range_predecessor (low1);
5276
                  if (low == 0 || high == 0)
5277
                    return 0;
5278
 
5279
                  in_p = 1;
5280
                }
5281
              else
5282
                return 0;
5283
            }
5284
        }
5285
      else if (subset)
5286
        in_p = 0, low = low0, high = high0;
5287
      else
5288
        in_p = 0, low = low0, high = high1;
5289
    }
5290
 
5291
  *pin_p = in_p, *plow = low, *phigh = high;
5292
  return 1;
5293
}
5294
 
5295
 
5296
/* Subroutine of fold, looking inside expressions of the form
5297
   A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5298
   of the COND_EXPR.  This function is being used also to optimize
5299
   A op B ? C : A, by reversing the comparison first.
5300
 
5301
   Return a folded expression whose code is not a COND_EXPR
5302
   anymore, or NULL_TREE if no folding opportunity is found.  */
5303
 
5304
static tree
5305
fold_cond_expr_with_comparison (location_t loc, tree type,
5306
                                tree arg0, tree arg1, tree arg2)
5307
{
5308
  enum tree_code comp_code = TREE_CODE (arg0);
5309
  tree arg00 = TREE_OPERAND (arg0, 0);
5310
  tree arg01 = TREE_OPERAND (arg0, 1);
5311
  tree arg1_type = TREE_TYPE (arg1);
5312
  tree tem;
5313
 
5314
  STRIP_NOPS (arg1);
5315
  STRIP_NOPS (arg2);
5316
 
5317
  /* If we have A op 0 ? A : -A, consider applying the following
5318
     transformations:
5319
 
5320
     A == 0? A : -A    same as -A
5321
     A != 0? A : -A    same as A
5322
     A >= 0? A : -A    same as abs (A)
5323
     A > 0?  A : -A    same as abs (A)
5324
     A <= 0? A : -A    same as -abs (A)
5325
     A < 0?  A : -A    same as -abs (A)
5326
 
5327
     None of these transformations work for modes with signed
5328
     zeros.  If A is +/-0, the first two transformations will
5329
     change the sign of the result (from +0 to -0, or vice
5330
     versa).  The last four will fix the sign of the result,
5331
     even though the original expressions could be positive or
5332
     negative, depending on the sign of A.
5333
 
5334
     Note that all these transformations are correct if A is
5335
     NaN, since the two alternatives (A and -A) are also NaNs.  */
5336
  if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5337
      && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5338
          ? real_zerop (arg01)
5339
          : integer_zerop (arg01))
5340
      && ((TREE_CODE (arg2) == NEGATE_EXPR
5341
           && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5342
             /* In the case that A is of the form X-Y, '-A' (arg2) may
5343
                have already been folded to Y-X, check for that. */
5344
          || (TREE_CODE (arg1) == MINUS_EXPR
5345
              && TREE_CODE (arg2) == MINUS_EXPR
5346
              && operand_equal_p (TREE_OPERAND (arg1, 0),
5347
                                  TREE_OPERAND (arg2, 1), 0)
5348
              && operand_equal_p (TREE_OPERAND (arg1, 1),
5349
                                  TREE_OPERAND (arg2, 0), 0))))
5350
    switch (comp_code)
5351
      {
5352
      case EQ_EXPR:
5353
      case UNEQ_EXPR:
5354
        tem = fold_convert_loc (loc, arg1_type, arg1);
5355
        return pedantic_non_lvalue_loc (loc,
5356
                                    fold_convert_loc (loc, type,
5357
                                                  negate_expr (tem)));
5358
      case NE_EXPR:
5359
      case LTGT_EXPR:
5360
        return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5361
      case UNGE_EXPR:
5362
      case UNGT_EXPR:
5363
        if (flag_trapping_math)
5364
          break;
5365
        /* Fall through.  */
5366
      case GE_EXPR:
5367
      case GT_EXPR:
5368
        if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5369
          arg1 = fold_convert_loc (loc, signed_type_for
5370
                               (TREE_TYPE (arg1)), arg1);
5371
        tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5372
        return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5373
      case UNLE_EXPR:
5374
      case UNLT_EXPR:
5375
        if (flag_trapping_math)
5376
          break;
5377
      case LE_EXPR:
5378
      case LT_EXPR:
5379
        if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5380
          arg1 = fold_convert_loc (loc, signed_type_for
5381
                               (TREE_TYPE (arg1)), arg1);
5382
        tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5383
        return negate_expr (fold_convert_loc (loc, type, tem));
5384
      default:
5385
        gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5386
        break;
5387
      }
5388
 
5389
  /* A != 0 ? A : 0 is simply A, unless A is -0.  Likewise
5390
     A == 0 ? A : 0 is always 0 unless A is -0.  Note that
5391
     both transformations are correct when A is NaN: A != 0
5392
     is then true, and A == 0 is false.  */
5393
 
5394
  if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5395
      && integer_zerop (arg01) && integer_zerop (arg2))
5396
    {
5397
      if (comp_code == NE_EXPR)
5398
        return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5399
      else if (comp_code == EQ_EXPR)
5400
        return build_int_cst (type, 0);
5401
    }
5402
 
5403
  /* Try some transformations of A op B ? A : B.
5404
 
5405
     A == B? A : B    same as B
5406
     A != B? A : B    same as A
5407
     A >= B? A : B    same as max (A, B)
5408
     A > B?  A : B    same as max (B, A)
5409
     A <= B? A : B    same as min (A, B)
5410
     A < B?  A : B    same as min (B, A)
5411
 
5412
     As above, these transformations don't work in the presence
5413
     of signed zeros.  For example, if A and B are zeros of
5414
     opposite sign, the first two transformations will change
5415
     the sign of the result.  In the last four, the original
5416
     expressions give different results for (A=+0, B=-0) and
5417
     (A=-0, B=+0), but the transformed expressions do not.
5418
 
5419
     The first two transformations are correct if either A or B
5420
     is a NaN.  In the first transformation, the condition will
5421
     be false, and B will indeed be chosen.  In the case of the
5422
     second transformation, the condition A != B will be true,
5423
     and A will be chosen.
5424
 
5425
     The conversions to max() and min() are not correct if B is
5426
     a number and A is not.  The conditions in the original
5427
     expressions will be false, so all four give B.  The min()
5428
     and max() versions would give a NaN instead.  */
5429
  if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5430
      && operand_equal_for_comparison_p (arg01, arg2, arg00)
5431
      /* Avoid these transformations if the COND_EXPR may be used
5432
         as an lvalue in the C++ front-end.  PR c++/19199.  */
5433
      && (in_gimple_form
5434
          || (strcmp (lang_hooks.name, "GNU C++") != 0
5435
              && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5436
          || ! maybe_lvalue_p (arg1)
5437
          || ! maybe_lvalue_p (arg2)))
5438
    {
5439
      tree comp_op0 = arg00;
5440
      tree comp_op1 = arg01;
5441
      tree comp_type = TREE_TYPE (comp_op0);
5442
 
5443
      /* Avoid adding NOP_EXPRs in case this is an lvalue.  */
5444
      if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5445
        {
5446
          comp_type = type;
5447
          comp_op0 = arg1;
5448
          comp_op1 = arg2;
5449
        }
5450
 
5451
      switch (comp_code)
5452
        {
5453
        case EQ_EXPR:
5454
          return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5455
        case NE_EXPR:
5456
          return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5457
        case LE_EXPR:
5458
        case LT_EXPR:
5459
        case UNLE_EXPR:
5460
        case UNLT_EXPR:
5461
          /* In C++ a ?: expression can be an lvalue, so put the
5462
             operand which will be used if they are equal first
5463
             so that we can convert this back to the
5464
             corresponding COND_EXPR.  */
5465
          if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5466
            {
5467
              comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5468
              comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5469
              tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5470
                    ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5471
                    : fold_build2_loc (loc, MIN_EXPR, comp_type,
5472
                                   comp_op1, comp_op0);
5473
              return pedantic_non_lvalue_loc (loc,
5474
                                          fold_convert_loc (loc, type, tem));
5475
            }
5476
          break;
5477
        case GE_EXPR:
5478
        case GT_EXPR:
5479
        case UNGE_EXPR:
5480
        case UNGT_EXPR:
5481
          if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5482
            {
5483
              comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5484
              comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5485
              tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5486
                    ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5487
                    : fold_build2_loc (loc, MAX_EXPR, comp_type,
5488
                                   comp_op1, comp_op0);
5489
              return pedantic_non_lvalue_loc (loc,
5490
                                          fold_convert_loc (loc, type, tem));
5491
            }
5492
          break;
5493
        case UNEQ_EXPR:
5494
          if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5495
            return pedantic_non_lvalue_loc (loc,
5496
                                        fold_convert_loc (loc, type, arg2));
5497
          break;
5498
        case LTGT_EXPR:
5499
          if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5500
            return pedantic_non_lvalue_loc (loc,
5501
                                        fold_convert_loc (loc, type, arg1));
5502
          break;
5503
        default:
5504
          gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5505
          break;
5506
        }
5507
    }
5508
 
5509
  /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5510
     we might still be able to simplify this.  For example,
5511
     if C1 is one less or one more than C2, this might have started
5512
     out as a MIN or MAX and been transformed by this function.
5513
     Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE.  */
5514
 
5515
  if (INTEGRAL_TYPE_P (type)
5516
      && TREE_CODE (arg01) == INTEGER_CST
5517
      && TREE_CODE (arg2) == INTEGER_CST)
5518
    switch (comp_code)
5519
      {
5520
      case EQ_EXPR:
5521
        if (TREE_CODE (arg1) == INTEGER_CST)
5522
          break;
5523
        /* We can replace A with C1 in this case.  */
5524
        arg1 = fold_convert_loc (loc, type, arg01);
5525
        return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5526
 
5527
      case LT_EXPR:
5528
        /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5529
           MIN_EXPR, to preserve the signedness of the comparison.  */
5530
        if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5531
                               OEP_ONLY_CONST)
5532
            && operand_equal_p (arg01,
5533
                                const_binop (PLUS_EXPR, arg2,
5534
                                             build_int_cst (type, 1), 0),
5535
                                OEP_ONLY_CONST))
5536
          {
5537
            tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5538
                                   fold_convert_loc (loc, TREE_TYPE (arg00),
5539
                                                     arg2));
5540
            return pedantic_non_lvalue_loc (loc,
5541
                                            fold_convert_loc (loc, type, tem));
5542
          }
5543
        break;
5544
 
5545
      case LE_EXPR:
5546
        /* If C1 is C2 - 1, this is min(A, C2), with the same care
5547
           as above.  */
5548
        if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5549
                               OEP_ONLY_CONST)
5550
            && operand_equal_p (arg01,
5551
                                const_binop (MINUS_EXPR, arg2,
5552
                                             build_int_cst (type, 1), 0),
5553
                                OEP_ONLY_CONST))
5554
          {
5555
            tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5556
                                   fold_convert_loc (loc, TREE_TYPE (arg00),
5557
                                                     arg2));
5558
            return pedantic_non_lvalue_loc (loc,
5559
                                            fold_convert_loc (loc, type, tem));
5560
          }
5561
        break;
5562
 
5563
      case GT_EXPR:
5564
        /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5565
           MAX_EXPR, to preserve the signedness of the comparison.  */
5566
        if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5567
                               OEP_ONLY_CONST)
5568
            && operand_equal_p (arg01,
5569
                                const_binop (MINUS_EXPR, arg2,
5570
                                             build_int_cst (type, 1), 0),
5571
                                OEP_ONLY_CONST))
5572
          {
5573
            tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5574
                                   fold_convert_loc (loc, TREE_TYPE (arg00),
5575
                                                     arg2));
5576
            return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5577
          }
5578
        break;
5579
 
5580
      case GE_EXPR:
5581
        /* If C1 is C2 + 1, this is max(A, C2), with the same care as above.  */
5582
        if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5583
                               OEP_ONLY_CONST)
5584
            && operand_equal_p (arg01,
5585
                                const_binop (PLUS_EXPR, arg2,
5586
                                             build_int_cst (type, 1), 0),
5587
                                OEP_ONLY_CONST))
5588
          {
5589
            tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5590
                                   fold_convert_loc (loc, TREE_TYPE (arg00),
5591
                                                     arg2));
5592
            return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5593
          }
5594
        break;
5595
      case NE_EXPR:
5596
        break;
5597
      default:
5598
        gcc_unreachable ();
5599
      }
5600
 
5601
  return NULL_TREE;
5602
}
5603
 
5604
 
5605
 
5606
#ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5607
#define LOGICAL_OP_NON_SHORT_CIRCUIT \
5608
  (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5609
                false) >= 2)
5610
#endif
5611
 
5612
/* EXP is some logical combination of boolean tests.  See if we can
5613
   merge it into some range test.  Return the new tree if so.  */
5614
 
5615
static tree
5616
fold_range_test (location_t loc, enum tree_code code, tree type,
5617
                 tree op0, tree op1)
5618
{
5619
  int or_op = (code == TRUTH_ORIF_EXPR
5620
               || code == TRUTH_OR_EXPR);
5621
  int in0_p, in1_p, in_p;
5622
  tree low0, low1, low, high0, high1, high;
5623
  bool strict_overflow_p = false;
5624
  tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5625
  tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5626
  tree tem;
5627
  const char * const warnmsg = G_("assuming signed overflow does not occur "
5628
                                  "when simplifying range test");
5629
 
5630
  /* If this is an OR operation, invert both sides; we will invert
5631
     again at the end.  */
5632
  if (or_op)
5633
    in0_p = ! in0_p, in1_p = ! in1_p;
5634
 
5635
  /* If both expressions are the same, if we can merge the ranges, and we
5636
     can build the range test, return it or it inverted.  If one of the
5637
     ranges is always true or always false, consider it to be the same
5638
     expression as the other.  */
5639
  if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5640
      && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5641
                       in1_p, low1, high1)
5642
      && 0 != (tem = (build_range_check (UNKNOWN_LOCATION, type,
5643
                                         lhs != 0 ? lhs
5644
                                         : rhs != 0 ? rhs : integer_zero_node,
5645
                                         in_p, low, high))))
5646
    {
5647
      if (strict_overflow_p)
5648
        fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5649
      return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5650
    }
5651
 
5652
  /* On machines where the branch cost is expensive, if this is a
5653
     short-circuited branch and the underlying object on both sides
5654
     is the same, make a non-short-circuit operation.  */
5655
  else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5656
           && lhs != 0 && rhs != 0
5657
           && (code == TRUTH_ANDIF_EXPR
5658
               || code == TRUTH_ORIF_EXPR)
5659
           && operand_equal_p (lhs, rhs, 0))
5660
    {
5661
      /* If simple enough, just rewrite.  Otherwise, make a SAVE_EXPR
5662
         unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5663
         which cases we can't do this.  */
5664
      if (simple_operand_p (lhs))
5665
        {
5666
          tem = build2 (code == TRUTH_ANDIF_EXPR
5667
                        ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5668
                        type, op0, op1);
5669
          SET_EXPR_LOCATION (tem, loc);
5670
          return tem;
5671
        }
5672
 
5673
      else if (lang_hooks.decls.global_bindings_p () == 0
5674
               && ! CONTAINS_PLACEHOLDER_P (lhs))
5675
        {
5676
          tree common = save_expr (lhs);
5677
 
5678
          if (0 != (lhs = build_range_check (loc, type, common,
5679
                                             or_op ? ! in0_p : in0_p,
5680
                                             low0, high0))
5681
              && (0 != (rhs = build_range_check (loc, type, common,
5682
                                                 or_op ? ! in1_p : in1_p,
5683
                                                 low1, high1))))
5684
            {
5685
              if (strict_overflow_p)
5686
                fold_overflow_warning (warnmsg,
5687
                                       WARN_STRICT_OVERFLOW_COMPARISON);
5688
              tem = build2 (code == TRUTH_ANDIF_EXPR
5689
                            ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5690
                            type, lhs, rhs);
5691
              SET_EXPR_LOCATION (tem, loc);
5692
              return tem;
5693
            }
5694
        }
5695
    }
5696
 
5697
  return 0;
5698
}
5699
 
5700
/* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5701
   bit value.  Arrange things so the extra bits will be set to zero if and
5702
   only if C is signed-extended to its full width.  If MASK is nonzero,
5703
   it is an INTEGER_CST that should be AND'ed with the extra bits.  */
5704
 
5705
static tree
5706
unextend (tree c, int p, int unsignedp, tree mask)
5707
{
5708
  tree type = TREE_TYPE (c);
5709
  int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5710
  tree temp;
5711
 
5712
  if (p == modesize || unsignedp)
5713
    return c;
5714
 
5715
  /* We work by getting just the sign bit into the low-order bit, then
5716
     into the high-order bit, then sign-extend.  We then XOR that value
5717
     with C.  */
5718
  temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5719
  temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5720
 
5721
  /* We must use a signed type in order to get an arithmetic right shift.
5722
     However, we must also avoid introducing accidental overflows, so that
5723
     a subsequent call to integer_zerop will work.  Hence we must
5724
     do the type conversion here.  At this point, the constant is either
5725
     zero or one, and the conversion to a signed type can never overflow.
5726
     We could get an overflow if this conversion is done anywhere else.  */
5727
  if (TYPE_UNSIGNED (type))
5728
    temp = fold_convert (signed_type_for (type), temp);
5729
 
5730
  temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5731
  temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5732
  if (mask != 0)
5733
    temp = const_binop (BIT_AND_EXPR, temp,
5734
                        fold_convert (TREE_TYPE (c), mask),
5735
                        0);
5736
  /* If necessary, convert the type back to match the type of C.  */
5737
  if (TYPE_UNSIGNED (type))
5738
    temp = fold_convert (type, temp);
5739
 
5740
  return fold_convert (type,
5741
                       const_binop (BIT_XOR_EXPR, c, temp, 0));
5742
}
5743
 
5744
/* Find ways of folding logical expressions of LHS and RHS:
5745
   Try to merge two comparisons to the same innermost item.
5746
   Look for range tests like "ch >= '0' && ch <= '9'".
5747
   Look for combinations of simple terms on machines with expensive branches
5748
   and evaluate the RHS unconditionally.
5749
 
5750
   For example, if we have p->a == 2 && p->b == 4 and we can make an
5751
   object large enough to span both A and B, we can do this with a comparison
5752
   against the object ANDed with the a mask.
5753
 
5754
   If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5755
   operations to do this with one comparison.
5756
 
5757
   We check for both normal comparisons and the BIT_AND_EXPRs made this by
5758
   function and the one above.
5759
 
5760
   CODE is the logical operation being done.  It can be TRUTH_ANDIF_EXPR,
5761
   TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5762
 
5763
   TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5764
   two operands.
5765
 
5766
   We return the simplified tree or 0 if no optimization is possible.  */
5767
 
5768
static tree
5769
fold_truthop (location_t loc, enum tree_code code, tree truth_type,
5770
              tree lhs, tree rhs)
5771
{
5772
  /* If this is the "or" of two comparisons, we can do something if
5773
     the comparisons are NE_EXPR.  If this is the "and", we can do something
5774
     if the comparisons are EQ_EXPR.  I.e.,
5775
        (a->b == 2 && a->c == 4) can become (a->new == NEW).
5776
 
5777
     WANTED_CODE is this operation code.  For single bit fields, we can
5778
     convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5779
     comparison for one-bit fields.  */
5780
 
5781
  enum tree_code wanted_code;
5782
  enum tree_code lcode, rcode;
5783
  tree ll_arg, lr_arg, rl_arg, rr_arg;
5784
  tree ll_inner, lr_inner, rl_inner, rr_inner;
5785
  HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5786
  HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5787
  HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5788
  HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5789
  int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5790
  enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5791
  enum machine_mode lnmode, rnmode;
5792
  tree ll_mask, lr_mask, rl_mask, rr_mask;
5793
  tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5794
  tree l_const, r_const;
5795
  tree lntype, rntype, result;
5796
  HOST_WIDE_INT first_bit, end_bit;
5797
  int volatilep;
5798
  tree orig_lhs = lhs, orig_rhs = rhs;
5799
  enum tree_code orig_code = code;
5800
 
5801
  /* Start by getting the comparison codes.  Fail if anything is volatile.
5802
     If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5803
     it were surrounded with a NE_EXPR.  */
5804
 
5805
  if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5806
    return 0;
5807
 
5808
  lcode = TREE_CODE (lhs);
5809
  rcode = TREE_CODE (rhs);
5810
 
5811
  if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5812
    {
5813
      lhs = build2 (NE_EXPR, truth_type, lhs,
5814
                    build_int_cst (TREE_TYPE (lhs), 0));
5815
      lcode = NE_EXPR;
5816
    }
5817
 
5818
  if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5819
    {
5820
      rhs = build2 (NE_EXPR, truth_type, rhs,
5821
                    build_int_cst (TREE_TYPE (rhs), 0));
5822
      rcode = NE_EXPR;
5823
    }
5824
 
5825
  if (TREE_CODE_CLASS (lcode) != tcc_comparison
5826
      || TREE_CODE_CLASS (rcode) != tcc_comparison)
5827
    return 0;
5828
 
5829
  ll_arg = TREE_OPERAND (lhs, 0);
5830
  lr_arg = TREE_OPERAND (lhs, 1);
5831
  rl_arg = TREE_OPERAND (rhs, 0);
5832
  rr_arg = TREE_OPERAND (rhs, 1);
5833
 
5834
  /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations.  */
5835
  if (simple_operand_p (ll_arg)
5836
      && simple_operand_p (lr_arg))
5837
    {
5838
      tree result;
5839
      if (operand_equal_p (ll_arg, rl_arg, 0)
5840
          && operand_equal_p (lr_arg, rr_arg, 0))
5841
        {
5842
          result = combine_comparisons (loc, code, lcode, rcode,
5843
                                        truth_type, ll_arg, lr_arg);
5844
          if (result)
5845
            return result;
5846
        }
5847
      else if (operand_equal_p (ll_arg, rr_arg, 0)
5848
               && operand_equal_p (lr_arg, rl_arg, 0))
5849
        {
5850
          result = combine_comparisons (loc, code, lcode,
5851
                                        swap_tree_comparison (rcode),
5852
                                        truth_type, ll_arg, lr_arg);
5853
          if (result)
5854
            return result;
5855
        }
5856
    }
5857
 
5858
  code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5859
          ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5860
 
5861
  /* If the RHS can be evaluated unconditionally and its operands are
5862
     simple, it wins to evaluate the RHS unconditionally on machines
5863
     with expensive branches.  In this case, this isn't a comparison
5864
     that can be merged.  Avoid doing this if the RHS is a floating-point
5865
     comparison since those can trap.  */
5866
 
5867
  if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5868
                   false) >= 2
5869
      && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5870
      && simple_operand_p (rl_arg)
5871
      && simple_operand_p (rr_arg))
5872
    {
5873
      /* Convert (a != 0) || (b != 0) into (a | b) != 0.  */
5874
      if (code == TRUTH_OR_EXPR
5875
          && lcode == NE_EXPR && integer_zerop (lr_arg)
5876
          && rcode == NE_EXPR && integer_zerop (rr_arg)
5877
          && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5878
          && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5879
        {
5880
          result = build2 (NE_EXPR, truth_type,
5881
                           build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5882
                                   ll_arg, rl_arg),
5883
                           build_int_cst (TREE_TYPE (ll_arg), 0));
5884
          goto fold_truthop_exit;
5885
        }
5886
 
5887
      /* Convert (a == 0) && (b == 0) into (a | b) == 0.  */
5888
      if (code == TRUTH_AND_EXPR
5889
          && lcode == EQ_EXPR && integer_zerop (lr_arg)
5890
          && rcode == EQ_EXPR && integer_zerop (rr_arg)
5891
          && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5892
          && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5893
        {
5894
          result = build2 (EQ_EXPR, truth_type,
5895
                           build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5896
                                   ll_arg, rl_arg),
5897
                           build_int_cst (TREE_TYPE (ll_arg), 0));
5898
          goto fold_truthop_exit;
5899
        }
5900
 
5901
      if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5902
        {
5903
          if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5904
            {
5905
              result = build2 (code, truth_type, lhs, rhs);
5906
              goto fold_truthop_exit;
5907
            }
5908
          return NULL_TREE;
5909
        }
5910
    }
5911
 
5912
  /* See if the comparisons can be merged.  Then get all the parameters for
5913
     each side.  */
5914
 
5915
  if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5916
      || (rcode != EQ_EXPR && rcode != NE_EXPR))
5917
    return 0;
5918
 
5919
  volatilep = 0;
5920
  ll_inner = decode_field_reference (loc, ll_arg,
5921
                                     &ll_bitsize, &ll_bitpos, &ll_mode,
5922
                                     &ll_unsignedp, &volatilep, &ll_mask,
5923
                                     &ll_and_mask);
5924
  lr_inner = decode_field_reference (loc, lr_arg,
5925
                                     &lr_bitsize, &lr_bitpos, &lr_mode,
5926
                                     &lr_unsignedp, &volatilep, &lr_mask,
5927
                                     &lr_and_mask);
5928
  rl_inner = decode_field_reference (loc, rl_arg,
5929
                                     &rl_bitsize, &rl_bitpos, &rl_mode,
5930
                                     &rl_unsignedp, &volatilep, &rl_mask,
5931
                                     &rl_and_mask);
5932
  rr_inner = decode_field_reference (loc, rr_arg,
5933
                                     &rr_bitsize, &rr_bitpos, &rr_mode,
5934
                                     &rr_unsignedp, &volatilep, &rr_mask,
5935
                                     &rr_and_mask);
5936
 
5937
  /* It must be true that the inner operation on the lhs of each
5938
     comparison must be the same if we are to be able to do anything.
5939
     Then see if we have constants.  If not, the same must be true for
5940
     the rhs's.  */
5941
  if (volatilep || ll_inner == 0 || rl_inner == 0
5942
      || ! operand_equal_p (ll_inner, rl_inner, 0))
5943
    return 0;
5944
 
5945
  if (TREE_CODE (lr_arg) == INTEGER_CST
5946
      && TREE_CODE (rr_arg) == INTEGER_CST)
5947
    l_const = lr_arg, r_const = rr_arg;
5948
  else if (lr_inner == 0 || rr_inner == 0
5949
           || ! operand_equal_p (lr_inner, rr_inner, 0))
5950
    return 0;
5951
  else
5952
    l_const = r_const = 0;
5953
 
5954
  /* If either comparison code is not correct for our logical operation,
5955
     fail.  However, we can convert a one-bit comparison against zero into
5956
     the opposite comparison against that bit being set in the field.  */
5957
 
5958
  wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5959
  if (lcode != wanted_code)
5960
    {
5961
      if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5962
        {
5963
          /* Make the left operand unsigned, since we are only interested
5964
             in the value of one bit.  Otherwise we are doing the wrong
5965
             thing below.  */
5966
          ll_unsignedp = 1;
5967
          l_const = ll_mask;
5968
        }
5969
      else
5970
        return 0;
5971
    }
5972
 
5973
  /* This is analogous to the code for l_const above.  */
5974
  if (rcode != wanted_code)
5975
    {
5976
      if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5977
        {
5978
          rl_unsignedp = 1;
5979
          r_const = rl_mask;
5980
        }
5981
      else
5982
        return 0;
5983
    }
5984
 
5985
  /* See if we can find a mode that contains both fields being compared on
5986
     the left.  If we can't, fail.  Otherwise, update all constants and masks
5987
     to be relative to a field of that size.  */
5988
  first_bit = MIN (ll_bitpos, rl_bitpos);
5989
  end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5990
  lnmode = get_best_mode (end_bit - first_bit, first_bit,
5991
                          TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5992
                          volatilep);
5993
  if (lnmode == VOIDmode)
5994
    return 0;
5995
 
5996
  lnbitsize = GET_MODE_BITSIZE (lnmode);
5997
  lnbitpos = first_bit & ~ (lnbitsize - 1);
5998
  lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5999
  xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6000
 
6001
  if (BYTES_BIG_ENDIAN)
6002
    {
6003
      xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6004
      xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6005
    }
6006
 
6007
  ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6008
                         size_int (xll_bitpos), 0);
6009
  rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6010
                         size_int (xrl_bitpos), 0);
6011
 
6012
  if (l_const)
6013
    {
6014
      l_const = fold_convert_loc (loc, lntype, l_const);
6015
      l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6016
      l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
6017
      if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6018
                                        fold_build1_loc (loc, BIT_NOT_EXPR,
6019
                                                     lntype, ll_mask),
6020
                                        0)))
6021
        {
6022
          warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6023
 
6024
          return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6025
        }
6026
    }
6027
  if (r_const)
6028
    {
6029
      r_const = fold_convert_loc (loc, lntype, r_const);
6030
      r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6031
      r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
6032
      if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6033
                                        fold_build1_loc (loc, BIT_NOT_EXPR,
6034
                                                     lntype, rl_mask),
6035
                                        0)))
6036
        {
6037
          warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6038
 
6039
          return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6040
        }
6041
    }
6042
 
6043
  /* If the right sides are not constant, do the same for it.  Also,
6044
     disallow this optimization if a size or signedness mismatch occurs
6045
     between the left and right sides.  */
6046
  if (l_const == 0)
6047
    {
6048
      if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6049
          || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6050
          /* Make sure the two fields on the right
6051
             correspond to the left without being swapped.  */
6052
          || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6053
        return 0;
6054
 
6055
      first_bit = MIN (lr_bitpos, rr_bitpos);
6056
      end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6057
      rnmode = get_best_mode (end_bit - first_bit, first_bit,
6058
                              TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
6059
                              volatilep);
6060
      if (rnmode == VOIDmode)
6061
        return 0;
6062
 
6063
      rnbitsize = GET_MODE_BITSIZE (rnmode);
6064
      rnbitpos = first_bit & ~ (rnbitsize - 1);
6065
      rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6066
      xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6067
 
6068
      if (BYTES_BIG_ENDIAN)
6069
        {
6070
          xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6071
          xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6072
        }
6073
 
6074
      lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6075
                                                            rntype, lr_mask),
6076
                             size_int (xlr_bitpos), 0);
6077
      rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6078
                                                            rntype, rr_mask),
6079
                             size_int (xrr_bitpos), 0);
6080
 
6081
      /* Make a mask that corresponds to both fields being compared.
6082
         Do this for both items being compared.  If the operands are the
6083
         same size and the bits being compared are in the same position
6084
         then we can do this by masking both and comparing the masked
6085
         results.  */
6086
      ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
6087
      lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
6088
      if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
6089
        {
6090
          lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
6091
                                    ll_unsignedp || rl_unsignedp);
6092
          if (! all_ones_mask_p (ll_mask, lnbitsize))
6093
            lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6094
 
6095
          rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
6096
                                    lr_unsignedp || rr_unsignedp);
6097
          if (! all_ones_mask_p (lr_mask, rnbitsize))
6098
            rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6099
 
6100
          result = build2 (wanted_code, truth_type, lhs, rhs);
6101
          goto fold_truthop_exit;
6102
        }
6103
 
6104
      /* There is still another way we can do something:  If both pairs of
6105
         fields being compared are adjacent, we may be able to make a wider
6106
         field containing them both.
6107
 
6108
         Note that we still must mask the lhs/rhs expressions.  Furthermore,
6109
         the mask must be shifted to account for the shift done by
6110
         make_bit_field_ref.  */
6111
      if ((ll_bitsize + ll_bitpos == rl_bitpos
6112
           && lr_bitsize + lr_bitpos == rr_bitpos)
6113
          || (ll_bitpos == rl_bitpos + rl_bitsize
6114
              && lr_bitpos == rr_bitpos + rr_bitsize))
6115
        {
6116
          tree type;
6117
 
6118
          lhs = make_bit_field_ref (loc, ll_inner, lntype,
6119
                                    ll_bitsize + rl_bitsize,
6120
                                    MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
6121
          rhs = make_bit_field_ref (loc, lr_inner, rntype,
6122
                                    lr_bitsize + rr_bitsize,
6123
                                    MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
6124
 
6125
          ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6126
                                 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
6127
          lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6128
                                 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
6129
 
6130
          /* Convert to the smaller type before masking out unwanted bits.  */
6131
          type = lntype;
6132
          if (lntype != rntype)
6133
            {
6134
              if (lnbitsize > rnbitsize)
6135
                {
6136
                  lhs = fold_convert_loc (loc, rntype, lhs);
6137
                  ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6138
                  type = rntype;
6139
                }
6140
              else if (lnbitsize < rnbitsize)
6141
                {
6142
                  rhs = fold_convert_loc (loc, lntype, rhs);
6143
                  lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6144
                  type = lntype;
6145
                }
6146
            }
6147
 
6148
          if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6149
            lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6150
 
6151
          if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6152
            rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6153
 
6154
          result = build2 (wanted_code, truth_type, lhs, rhs);
6155
          goto fold_truthop_exit;
6156
        }
6157
 
6158
      return 0;
6159
    }
6160
 
6161
  /* Handle the case of comparisons with constants.  If there is something in
6162
     common between the masks, those bits of the constants must be the same.
6163
     If not, the condition is always false.  Test for this to avoid generating
6164
     incorrect code below.  */
6165
  result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
6166
  if (! integer_zerop (result)
6167
      && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
6168
                           const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
6169
    {
6170
      if (wanted_code == NE_EXPR)
6171
        {
6172
          warning (0, "%<or%> of unmatched not-equal tests is always 1");
6173
          return constant_boolean_node (true, truth_type);
6174
        }
6175
      else
6176
        {
6177
          warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6178
          return constant_boolean_node (false, truth_type);
6179
        }
6180
    }
6181
 
6182
  /* Construct the expression we will return.  First get the component
6183
     reference we will make.  Unless the mask is all ones the width of
6184
     that field, perform the mask operation.  Then compare with the
6185
     merged constant.  */
6186
  result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
6187
                               ll_unsignedp || rl_unsignedp);
6188
 
6189
  ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
6190
  if (! all_ones_mask_p (ll_mask, lnbitsize))
6191
    {
6192
      result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
6193
      SET_EXPR_LOCATION (result, loc);
6194
    }
6195
 
6196
  result = build2 (wanted_code, truth_type, result,
6197
                   const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
6198
 
6199
 fold_truthop_exit:
6200
  SET_EXPR_LOCATION (result, loc);
6201
  return result;
6202
}
6203
 
6204
/* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
6205
   constant.  */
6206
 
6207
static tree
6208
optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
6209
                            tree op0, tree op1)
6210
{
6211
  tree arg0 = op0;
6212
  enum tree_code op_code;
6213
  tree comp_const;
6214
  tree minmax_const;
6215
  int consts_equal, consts_lt;
6216
  tree inner;
6217
 
6218
  STRIP_SIGN_NOPS (arg0);
6219
 
6220
  op_code = TREE_CODE (arg0);
6221
  minmax_const = TREE_OPERAND (arg0, 1);
6222
  comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
6223
  consts_equal = tree_int_cst_equal (minmax_const, comp_const);
6224
  consts_lt = tree_int_cst_lt (minmax_const, comp_const);
6225
  inner = TREE_OPERAND (arg0, 0);
6226
 
6227
  /* If something does not permit us to optimize, return the original tree.  */
6228
  if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
6229
      || TREE_CODE (comp_const) != INTEGER_CST
6230
      || TREE_OVERFLOW (comp_const)
6231
      || TREE_CODE (minmax_const) != INTEGER_CST
6232
      || TREE_OVERFLOW (minmax_const))
6233
    return NULL_TREE;
6234
 
6235
  /* Now handle all the various comparison codes.  We only handle EQ_EXPR
6236
     and GT_EXPR, doing the rest with recursive calls using logical
6237
     simplifications.  */
6238
  switch (code)
6239
    {
6240
    case NE_EXPR:  case LT_EXPR:  case LE_EXPR:
6241
      {
6242
        tree tem
6243
          = optimize_minmax_comparison (loc,
6244
                                        invert_tree_comparison (code, false),
6245
                                        type, op0, op1);
6246
        if (tem)
6247
          return invert_truthvalue_loc (loc, tem);
6248
        return NULL_TREE;
6249
      }
6250
 
6251
    case GE_EXPR:
6252
      return
6253
        fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
6254
                     optimize_minmax_comparison
6255
                     (loc, EQ_EXPR, type, arg0, comp_const),
6256
                     optimize_minmax_comparison
6257
                     (loc, GT_EXPR, type, arg0, comp_const));
6258
 
6259
    case EQ_EXPR:
6260
      if (op_code == MAX_EXPR && consts_equal)
6261
        /* MAX (X, 0) == 0  ->  X <= 0  */
6262
        return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
6263
 
6264
      else if (op_code == MAX_EXPR && consts_lt)
6265
        /* MAX (X, 0) == 5  ->  X == 5   */
6266
        return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
6267
 
6268
      else if (op_code == MAX_EXPR)
6269
        /* MAX (X, 0) == -1  ->  false  */
6270
        return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6271
 
6272
      else if (consts_equal)
6273
        /* MIN (X, 0) == 0  ->  X >= 0  */
6274
        return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
6275
 
6276
      else if (consts_lt)
6277
        /* MIN (X, 0) == 5  ->  false  */
6278
        return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6279
 
6280
      else
6281
        /* MIN (X, 0) == -1  ->  X == -1  */
6282
        return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
6283
 
6284
    case GT_EXPR:
6285
      if (op_code == MAX_EXPR && (consts_equal || consts_lt))
6286
        /* MAX (X, 0) > 0  ->  X > 0
6287
           MAX (X, 0) > 5  ->  X > 5  */
6288
        return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
6289
 
6290
      else if (op_code == MAX_EXPR)
6291
        /* MAX (X, 0) > -1  ->  true  */
6292
        return omit_one_operand_loc (loc, type, integer_one_node, inner);
6293
 
6294
      else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
6295
        /* MIN (X, 0) > 0  ->  false
6296
           MIN (X, 0) > 5  ->  false  */
6297
        return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6298
 
6299
      else
6300
        /* MIN (X, 0) > -1  ->  X > -1  */
6301
        return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
6302
 
6303
    default:
6304
      return NULL_TREE;
6305
    }
6306
}
6307
 
6308
/* T is an integer expression that is being multiplied, divided, or taken a
6309
   modulus (CODE says which and what kind of divide or modulus) by a
6310
   constant C.  See if we can eliminate that operation by folding it with
6311
   other operations already in T.  WIDE_TYPE, if non-null, is a type that
6312
   should be used for the computation if wider than our type.
6313
 
6314
   For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6315
   (X * 2) + (Y * 4).  We must, however, be assured that either the original
6316
   expression would not overflow or that overflow is undefined for the type
6317
   in the language in question.
6318
 
6319
   If we return a non-null expression, it is an equivalent form of the
6320
   original computation, but need not be in the original type.
6321
 
6322
   We set *STRICT_OVERFLOW_P to true if the return values depends on
6323
   signed overflow being undefined.  Otherwise we do not change
6324
   *STRICT_OVERFLOW_P.  */
6325
 
6326
static tree
6327
extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6328
                bool *strict_overflow_p)
6329
{
6330
  /* To avoid exponential search depth, refuse to allow recursion past
6331
     three levels.  Beyond that (1) it's highly unlikely that we'll find
6332
     something interesting and (2) we've probably processed it before
6333
     when we built the inner expression.  */
6334
 
6335
  static int depth;
6336
  tree ret;
6337
 
6338
  if (depth > 3)
6339
    return NULL;
6340
 
6341
  depth++;
6342
  ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6343
  depth--;
6344
 
6345
  return ret;
6346
}
6347
 
6348
static tree
6349
extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6350
                  bool *strict_overflow_p)
6351
{
6352
  tree type = TREE_TYPE (t);
6353
  enum tree_code tcode = TREE_CODE (t);
6354
  tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6355
                                   > GET_MODE_SIZE (TYPE_MODE (type)))
6356
                ? wide_type : type);
6357
  tree t1, t2;
6358
  int same_p = tcode == code;
6359
  tree op0 = NULL_TREE, op1 = NULL_TREE;
6360
  bool sub_strict_overflow_p;
6361
 
6362
  /* Don't deal with constants of zero here; they confuse the code below.  */
6363
  if (integer_zerop (c))
6364
    return NULL_TREE;
6365
 
6366
  if (TREE_CODE_CLASS (tcode) == tcc_unary)
6367
    op0 = TREE_OPERAND (t, 0);
6368
 
6369
  if (TREE_CODE_CLASS (tcode) == tcc_binary)
6370
    op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6371
 
6372
  /* Note that we need not handle conditional operations here since fold
6373
     already handles those cases.  So just do arithmetic here.  */
6374
  switch (tcode)
6375
    {
6376
    case INTEGER_CST:
6377
      /* For a constant, we can always simplify if we are a multiply
6378
         or (for divide and modulus) if it is a multiple of our constant.  */
6379
      if (code == MULT_EXPR
6380
          || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
6381
        return const_binop (code, fold_convert (ctype, t),
6382
                            fold_convert (ctype, c), 0);
6383
      break;
6384
 
6385
    CASE_CONVERT: case NON_LVALUE_EXPR:
6386
      /* If op0 is an expression ...  */
6387
      if ((COMPARISON_CLASS_P (op0)
6388
           || UNARY_CLASS_P (op0)
6389
           || BINARY_CLASS_P (op0)
6390
           || VL_EXP_CLASS_P (op0)
6391
           || EXPRESSION_CLASS_P (op0))
6392
          /* ... and has wrapping overflow, and its type is smaller
6393
             than ctype, then we cannot pass through as widening.  */
6394
          && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6395
               && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
6396
                     && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
6397
               && (TYPE_PRECISION (ctype)
6398
                   > TYPE_PRECISION (TREE_TYPE (op0))))
6399
              /* ... or this is a truncation (t is narrower than op0),
6400
                 then we cannot pass through this narrowing.  */
6401
              || (TYPE_PRECISION (type)
6402
                  < TYPE_PRECISION (TREE_TYPE (op0)))
6403
              /* ... or signedness changes for division or modulus,
6404
                 then we cannot pass through this conversion.  */
6405
              || (code != MULT_EXPR
6406
                  && (TYPE_UNSIGNED (ctype)
6407
                      != TYPE_UNSIGNED (TREE_TYPE (op0))))
6408
              /* ... or has undefined overflow while the converted to
6409
                 type has not, we cannot do the operation in the inner type
6410
                 as that would introduce undefined overflow.  */
6411
              || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6412
                  && !TYPE_OVERFLOW_UNDEFINED (type))))
6413
        break;
6414
 
6415
      /* Pass the constant down and see if we can make a simplification.  If
6416
         we can, replace this expression with the inner simplification for
6417
         possible later conversion to our or some other type.  */
6418
      if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6419
          && TREE_CODE (t2) == INTEGER_CST
6420
          && !TREE_OVERFLOW (t2)
6421
          && (0 != (t1 = extract_muldiv (op0, t2, code,
6422
                                         code == MULT_EXPR
6423
                                         ? ctype : NULL_TREE,
6424
                                         strict_overflow_p))))
6425
        return t1;
6426
      break;
6427
 
6428
    case ABS_EXPR:
6429
      /* If widening the type changes it from signed to unsigned, then we
6430
         must avoid building ABS_EXPR itself as unsigned.  */
6431
      if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6432
        {
6433
          tree cstype = (*signed_type_for) (ctype);
6434
          if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6435
              != 0)
6436
            {
6437
              t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6438
              return fold_convert (ctype, t1);
6439
            }
6440
          break;
6441
        }
6442
      /* If the constant is negative, we cannot simplify this.  */
6443
      if (tree_int_cst_sgn (c) == -1)
6444
        break;
6445
      /* FALLTHROUGH */
6446
    case NEGATE_EXPR:
6447
      if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6448
          != 0)
6449
        return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6450
      break;
6451
 
6452
    case MIN_EXPR:  case MAX_EXPR:
6453
      /* If widening the type changes the signedness, then we can't perform
6454
         this optimization as that changes the result.  */
6455
      if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6456
        break;
6457
 
6458
      /* MIN (a, b) / 5 -> MIN (a / 5, b / 5)  */
6459
      sub_strict_overflow_p = false;
6460
      if ((t1 = extract_muldiv (op0, c, code, wide_type,
6461
                                &sub_strict_overflow_p)) != 0
6462
          && (t2 = extract_muldiv (op1, c, code, wide_type,
6463
                                   &sub_strict_overflow_p)) != 0)
6464
        {
6465
          if (tree_int_cst_sgn (c) < 0)
6466
            tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6467
          if (sub_strict_overflow_p)
6468
            *strict_overflow_p = true;
6469
          return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6470
                              fold_convert (ctype, t2));
6471
        }
6472
      break;
6473
 
6474
    case LSHIFT_EXPR:  case RSHIFT_EXPR:
6475
      /* If the second operand is constant, this is a multiplication
6476
         or floor division, by a power of two, so we can treat it that
6477
         way unless the multiplier or divisor overflows.  Signed
6478
         left-shift overflow is implementation-defined rather than
6479
         undefined in C90, so do not convert signed left shift into
6480
         multiplication.  */
6481
      if (TREE_CODE (op1) == INTEGER_CST
6482
          && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6483
          /* const_binop may not detect overflow correctly,
6484
             so check for it explicitly here.  */
6485
          && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
6486
          && TREE_INT_CST_HIGH (op1) == 0
6487
          && 0 != (t1 = fold_convert (ctype,
6488
                                      const_binop (LSHIFT_EXPR,
6489
                                                   size_one_node,
6490
                                                   op1, 0)))
6491
          && !TREE_OVERFLOW (t1))
6492
        return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6493
                                       ? MULT_EXPR : FLOOR_DIV_EXPR,
6494
                                       ctype,
6495
                                       fold_convert (ctype, op0),
6496
                                       t1),
6497
                               c, code, wide_type, strict_overflow_p);
6498
      break;
6499
 
6500
    case PLUS_EXPR:  case MINUS_EXPR:
6501
      /* See if we can eliminate the operation on both sides.  If we can, we
6502
         can return a new PLUS or MINUS.  If we can't, the only remaining
6503
         cases where we can do anything are if the second operand is a
6504
         constant.  */
6505
      sub_strict_overflow_p = false;
6506
      t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6507
      t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6508
      if (t1 != 0 && t2 != 0
6509
          && (code == MULT_EXPR
6510
              /* If not multiplication, we can only do this if both operands
6511
                 are divisible by c.  */
6512
              || (multiple_of_p (ctype, op0, c)
6513
                  && multiple_of_p (ctype, op1, c))))
6514
        {
6515
          if (sub_strict_overflow_p)
6516
            *strict_overflow_p = true;
6517
          return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6518
                              fold_convert (ctype, t2));
6519
        }
6520
 
6521
      /* If this was a subtraction, negate OP1 and set it to be an addition.
6522
         This simplifies the logic below.  */
6523
      if (tcode == MINUS_EXPR)
6524
        {
6525
          tcode = PLUS_EXPR, op1 = negate_expr (op1);
6526
          /* If OP1 was not easily negatable, the constant may be OP0.  */
6527
          if (TREE_CODE (op0) == INTEGER_CST)
6528
            {
6529
              tree tem = op0;
6530
              op0 = op1;
6531
              op1 = tem;
6532
              tem = t1;
6533
              t1 = t2;
6534
              t2 = tem;
6535
            }
6536
        }
6537
 
6538
      if (TREE_CODE (op1) != INTEGER_CST)
6539
        break;
6540
 
6541
      /* If either OP1 or C are negative, this optimization is not safe for
6542
         some of the division and remainder types while for others we need
6543
         to change the code.  */
6544
      if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6545
        {
6546
          if (code == CEIL_DIV_EXPR)
6547
            code = FLOOR_DIV_EXPR;
6548
          else if (code == FLOOR_DIV_EXPR)
6549
            code = CEIL_DIV_EXPR;
6550
          else if (code != MULT_EXPR
6551
                   && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6552
            break;
6553
        }
6554
 
6555
      /* If it's a multiply or a division/modulus operation of a multiple
6556
         of our constant, do the operation and verify it doesn't overflow.  */
6557
      if (code == MULT_EXPR
6558
          || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6559
        {
6560
          op1 = const_binop (code, fold_convert (ctype, op1),
6561
                             fold_convert (ctype, c), 0);
6562
          /* We allow the constant to overflow with wrapping semantics.  */
6563
          if (op1 == 0
6564
              || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6565
            break;
6566
        }
6567
      else
6568
        break;
6569
 
6570
      /* If we have an unsigned type is not a sizetype, we cannot widen
6571
         the operation since it will change the result if the original
6572
         computation overflowed.  */
6573
      if (TYPE_UNSIGNED (ctype)
6574
          && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
6575
          && ctype != type)
6576
        break;
6577
 
6578
      /* If we were able to eliminate our operation from the first side,
6579
         apply our operation to the second side and reform the PLUS.  */
6580
      if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6581
        return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6582
 
6583
      /* The last case is if we are a multiply.  In that case, we can
6584
         apply the distributive law to commute the multiply and addition
6585
         if the multiplication of the constants doesn't overflow.  */
6586
      if (code == MULT_EXPR)
6587
        return fold_build2 (tcode, ctype,
6588
                            fold_build2 (code, ctype,
6589
                                         fold_convert (ctype, op0),
6590
                                         fold_convert (ctype, c)),
6591
                            op1);
6592
 
6593
      break;
6594
 
6595
    case MULT_EXPR:
6596
      /* We have a special case here if we are doing something like
6597
         (C * 8) % 4 since we know that's zero.  */
6598
      if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6599
           || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6600
          /* If the multiplication can overflow we cannot optimize this.
6601
             ???  Until we can properly mark individual operations as
6602
             not overflowing we need to treat sizetype special here as
6603
             stor-layout relies on this opimization to make
6604
             DECL_FIELD_BIT_OFFSET always a constant.  */
6605
          && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6606
              || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
6607
                  && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
6608
          && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6609
          && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6610
        {
6611
          *strict_overflow_p = true;
6612
          return omit_one_operand (type, integer_zero_node, op0);
6613
        }
6614
 
6615
      /* ... fall through ...  */
6616
 
6617
    case TRUNC_DIV_EXPR:  case CEIL_DIV_EXPR:  case FLOOR_DIV_EXPR:
6618
    case ROUND_DIV_EXPR:  case EXACT_DIV_EXPR:
6619
      /* If we can extract our operation from the LHS, do so and return a
6620
         new operation.  Likewise for the RHS from a MULT_EXPR.  Otherwise,
6621
         do something only if the second operand is a constant.  */
6622
      if (same_p
6623
          && (t1 = extract_muldiv (op0, c, code, wide_type,
6624
                                   strict_overflow_p)) != 0)
6625
        return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6626
                            fold_convert (ctype, op1));
6627
      else if (tcode == MULT_EXPR && code == MULT_EXPR
6628
               && (t1 = extract_muldiv (op1, c, code, wide_type,
6629
                                        strict_overflow_p)) != 0)
6630
        return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6631
                            fold_convert (ctype, t1));
6632
      else if (TREE_CODE (op1) != INTEGER_CST)
6633
        return 0;
6634
 
6635
      /* If these are the same operation types, we can associate them
6636
         assuming no overflow.  */
6637
      if (tcode == code
6638
          && 0 != (t1 = int_const_binop (MULT_EXPR,
6639
                                         fold_convert (ctype, op1),
6640
                                         fold_convert (ctype, c), 1))
6641
          && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
6642
                                               TREE_INT_CST_HIGH (t1),
6643
                                               (TYPE_UNSIGNED (ctype)
6644
                                                && tcode != MULT_EXPR) ? -1 : 1,
6645
                                               TREE_OVERFLOW (t1)))
6646
          && !TREE_OVERFLOW (t1))
6647
        return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
6648
 
6649
      /* If these operations "cancel" each other, we have the main
6650
         optimizations of this pass, which occur when either constant is a
6651
         multiple of the other, in which case we replace this with either an
6652
         operation or CODE or TCODE.
6653
 
6654
         If we have an unsigned type that is not a sizetype, we cannot do
6655
         this since it will change the result if the original computation
6656
         overflowed.  */
6657
      if ((TYPE_OVERFLOW_UNDEFINED (ctype)
6658
           || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
6659
          && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6660
              || (tcode == MULT_EXPR
6661
                  && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6662
                  && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6663
                  && code != MULT_EXPR)))
6664
        {
6665
          if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6666
            {
6667
              if (TYPE_OVERFLOW_UNDEFINED (ctype))
6668
                *strict_overflow_p = true;
6669
              return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6670
                                  fold_convert (ctype,
6671
                                                const_binop (TRUNC_DIV_EXPR,
6672
                                                             op1, c, 0)));
6673
            }
6674
          else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
6675
            {
6676
              if (TYPE_OVERFLOW_UNDEFINED (ctype))
6677
                *strict_overflow_p = true;
6678
              return fold_build2 (code, ctype, fold_convert (ctype, op0),
6679
                                  fold_convert (ctype,
6680
                                                const_binop (TRUNC_DIV_EXPR,
6681
                                                             c, op1, 0)));
6682
            }
6683
        }
6684
      break;
6685
 
6686
    default:
6687
      break;
6688
    }
6689
 
6690
  return 0;
6691
}
6692
 
6693
/* Return a node which has the indicated constant VALUE (either 0 or
6694
   1), and is of the indicated TYPE.  */
6695
 
6696
tree
6697
constant_boolean_node (int value, tree type)
6698
{
6699
  if (type == integer_type_node)
6700
    return value ? integer_one_node : integer_zero_node;
6701
  else if (type == boolean_type_node)
6702
    return value ? boolean_true_node : boolean_false_node;
6703
  else
6704
    return build_int_cst (type, value);
6705
}
6706
 
6707
 
6708
/* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6709
   Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'.  Here
6710
   CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6711
   expression, and ARG to `a'.  If COND_FIRST_P is nonzero, then the
6712
   COND is the first argument to CODE; otherwise (as in the example
6713
   given here), it is the second argument.  TYPE is the type of the
6714
   original expression.  Return NULL_TREE if no simplification is
6715
   possible.  */
6716
 
6717
static tree
6718
fold_binary_op_with_conditional_arg (location_t loc,
6719
                                     enum tree_code code,
6720
                                     tree type, tree op0, tree op1,
6721
                                     tree cond, tree arg, int cond_first_p)
6722
{
6723
  tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6724
  tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6725
  tree test, true_value, false_value;
6726
  tree lhs = NULL_TREE;
6727
  tree rhs = NULL_TREE;
6728
 
6729
  /* This transformation is only worthwhile if we don't have to wrap
6730
     arg in a SAVE_EXPR, and the operation can be simplified on at least
6731
     one of the branches once its pushed inside the COND_EXPR.  */
6732
  if (!TREE_CONSTANT (arg))
6733
    return NULL_TREE;
6734
 
6735
  if (TREE_CODE (cond) == COND_EXPR)
6736
    {
6737
      test = TREE_OPERAND (cond, 0);
6738
      true_value = TREE_OPERAND (cond, 1);
6739
      false_value = TREE_OPERAND (cond, 2);
6740
      /* If this operand throws an expression, then it does not make
6741
         sense to try to perform a logical or arithmetic operation
6742
         involving it.  */
6743
      if (VOID_TYPE_P (TREE_TYPE (true_value)))
6744
        lhs = true_value;
6745
      if (VOID_TYPE_P (TREE_TYPE (false_value)))
6746
        rhs = false_value;
6747
    }
6748
  else
6749
    {
6750
      tree testtype = TREE_TYPE (cond);
6751
      test = cond;
6752
      true_value = constant_boolean_node (true, testtype);
6753
      false_value = constant_boolean_node (false, testtype);
6754
    }
6755
 
6756
  arg = fold_convert_loc (loc, arg_type, arg);
6757
  if (lhs == 0)
6758
    {
6759
      true_value = fold_convert_loc (loc, cond_type, true_value);
6760
      if (cond_first_p)
6761
        lhs = fold_build2_loc (loc, code, type, true_value, arg);
6762
      else
6763
        lhs = fold_build2_loc (loc, code, type, arg, true_value);
6764
    }
6765
  if (rhs == 0)
6766
    {
6767
      false_value = fold_convert_loc (loc, cond_type, false_value);
6768
      if (cond_first_p)
6769
        rhs = fold_build2_loc (loc, code, type, false_value, arg);
6770
      else
6771
        rhs = fold_build2_loc (loc, code, type, arg, false_value);
6772
    }
6773
 
6774
  test = fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6775
  return fold_convert_loc (loc, type, test);
6776
}
6777
 
6778
 
6779
/* Subroutine of fold() that checks for the addition of +/- 0.0.
6780
 
6781
   If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6782
   TYPE, X + ADDEND is the same as X.  If NEGATE, return true if X -
6783
   ADDEND is the same as X.
6784
 
6785
   X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6786
   and finite.  The problematic cases are when X is zero, and its mode
6787
   has signed zeros.  In the case of rounding towards -infinity,
6788
   X - 0 is not the same as X because 0 - 0 is -0.  In other rounding
6789
   modes, X + 0 is not the same as X because -0 + 0 is 0.  */
6790
 
6791
bool
6792
fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6793
{
6794
  if (!real_zerop (addend))
6795
    return false;
6796
 
6797
  /* Don't allow the fold with -fsignaling-nans.  */
6798
  if (HONOR_SNANS (TYPE_MODE (type)))
6799
    return false;
6800
 
6801
  /* Allow the fold if zeros aren't signed, or their sign isn't important.  */
6802
  if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6803
    return true;
6804
 
6805
  /* Treat x + -0 as x - 0 and x - -0 as x + 0.  */
6806
  if (TREE_CODE (addend) == REAL_CST
6807
      && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6808
    negate = !negate;
6809
 
6810
  /* The mode has signed zeros, and we have to honor their sign.
6811
     In this situation, there is only one case we can return true for.
6812
     X - 0 is the same as X unless rounding towards -infinity is
6813
     supported.  */
6814
  return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6815
}
6816
 
6817
/* Subroutine of fold() that checks comparisons of built-in math
6818
   functions against real constants.
6819
 
6820
   FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6821
   operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR.  TYPE
6822
   is the type of the result and ARG0 and ARG1 are the operands of the
6823
   comparison.  ARG1 must be a TREE_REAL_CST.
6824
 
6825
   The function returns the constant folded tree if a simplification
6826
   can be made, and NULL_TREE otherwise.  */
6827
 
6828
static tree
6829
fold_mathfn_compare (location_t loc,
6830
                     enum built_in_function fcode, enum tree_code code,
6831
                     tree type, tree arg0, tree arg1)
6832
{
6833
  REAL_VALUE_TYPE c;
6834
 
6835
  if (BUILTIN_SQRT_P (fcode))
6836
    {
6837
      tree arg = CALL_EXPR_ARG (arg0, 0);
6838
      enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6839
 
6840
      c = TREE_REAL_CST (arg1);
6841
      if (REAL_VALUE_NEGATIVE (c))
6842
        {
6843
          /* sqrt(x) < y is always false, if y is negative.  */
6844
          if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6845
            return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6846
 
6847
          /* sqrt(x) > y is always true, if y is negative and we
6848
             don't care about NaNs, i.e. negative values of x.  */
6849
          if (code == NE_EXPR || !HONOR_NANS (mode))
6850
            return omit_one_operand_loc (loc, type, integer_one_node, arg);
6851
 
6852
          /* sqrt(x) > y is the same as x >= 0, if y is negative.  */
6853
          return fold_build2_loc (loc, GE_EXPR, type, arg,
6854
                              build_real (TREE_TYPE (arg), dconst0));
6855
        }
6856
      else if (code == GT_EXPR || code == GE_EXPR)
6857
        {
6858
          REAL_VALUE_TYPE c2;
6859
 
6860
          REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6861
          real_convert (&c2, mode, &c2);
6862
 
6863
          if (REAL_VALUE_ISINF (c2))
6864
            {
6865
              /* sqrt(x) > y is x == +Inf, when y is very large.  */
6866
              if (HONOR_INFINITIES (mode))
6867
                return fold_build2_loc (loc, EQ_EXPR, type, arg,
6868
                                    build_real (TREE_TYPE (arg), c2));
6869
 
6870
              /* sqrt(x) > y is always false, when y is very large
6871
                 and we don't care about infinities.  */
6872
              return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6873
            }
6874
 
6875
          /* sqrt(x) > c is the same as x > c*c.  */
6876
          return fold_build2_loc (loc, code, type, arg,
6877
                              build_real (TREE_TYPE (arg), c2));
6878
        }
6879
      else if (code == LT_EXPR || code == LE_EXPR)
6880
        {
6881
          REAL_VALUE_TYPE c2;
6882
 
6883
          REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6884
          real_convert (&c2, mode, &c2);
6885
 
6886
          if (REAL_VALUE_ISINF (c2))
6887
            {
6888
              /* sqrt(x) < y is always true, when y is a very large
6889
                 value and we don't care about NaNs or Infinities.  */
6890
              if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6891
                return omit_one_operand_loc (loc, type, integer_one_node, arg);
6892
 
6893
              /* sqrt(x) < y is x != +Inf when y is very large and we
6894
                 don't care about NaNs.  */
6895
              if (! HONOR_NANS (mode))
6896
                return fold_build2_loc (loc, NE_EXPR, type, arg,
6897
                                    build_real (TREE_TYPE (arg), c2));
6898
 
6899
              /* sqrt(x) < y is x >= 0 when y is very large and we
6900
                 don't care about Infinities.  */
6901
              if (! HONOR_INFINITIES (mode))
6902
                return fold_build2_loc (loc, GE_EXPR, type, arg,
6903
                                    build_real (TREE_TYPE (arg), dconst0));
6904
 
6905
              /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large.  */
6906
              if (lang_hooks.decls.global_bindings_p () != 0
6907
                  || CONTAINS_PLACEHOLDER_P (arg))
6908
                return NULL_TREE;
6909
 
6910
              arg = save_expr (arg);
6911
              return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6912
                                  fold_build2_loc (loc, GE_EXPR, type, arg,
6913
                                               build_real (TREE_TYPE (arg),
6914
                                                           dconst0)),
6915
                                  fold_build2_loc (loc, NE_EXPR, type, arg,
6916
                                               build_real (TREE_TYPE (arg),
6917
                                                           c2)));
6918
            }
6919
 
6920
          /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs.  */
6921
          if (! HONOR_NANS (mode))
6922
            return fold_build2_loc (loc, code, type, arg,
6923
                                build_real (TREE_TYPE (arg), c2));
6924
 
6925
          /* sqrt(x) < c is the same as x >= 0 && x < c*c.  */
6926
          if (lang_hooks.decls.global_bindings_p () == 0
6927
              && ! CONTAINS_PLACEHOLDER_P (arg))
6928
            {
6929
              arg = save_expr (arg);
6930
              return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6931
                                  fold_build2_loc (loc, GE_EXPR, type, arg,
6932
                                               build_real (TREE_TYPE (arg),
6933
                                                           dconst0)),
6934
                                  fold_build2_loc (loc, code, type, arg,
6935
                                               build_real (TREE_TYPE (arg),
6936
                                                           c2)));
6937
            }
6938
        }
6939
    }
6940
 
6941
  return NULL_TREE;
6942
}
6943
 
6944
/* Subroutine of fold() that optimizes comparisons against Infinities,
6945
   either +Inf or -Inf.
6946
 
6947
   CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6948
   GE_EXPR or LE_EXPR.  TYPE is the type of the result and ARG0 and ARG1
6949
   are the operands of the comparison.  ARG1 must be a TREE_REAL_CST.
6950
 
6951
   The function returns the constant folded tree if a simplification
6952
   can be made, and NULL_TREE otherwise.  */
6953
 
6954
static tree
6955
fold_inf_compare (location_t loc, enum tree_code code, tree type,
6956
                  tree arg0, tree arg1)
6957
{
6958
  enum machine_mode mode;
6959
  REAL_VALUE_TYPE max;
6960
  tree temp;
6961
  bool neg;
6962
 
6963
  mode = TYPE_MODE (TREE_TYPE (arg0));
6964
 
6965
  /* For negative infinity swap the sense of the comparison.  */
6966
  neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6967
  if (neg)
6968
    code = swap_tree_comparison (code);
6969
 
6970
  switch (code)
6971
    {
6972
    case GT_EXPR:
6973
      /* x > +Inf is always false, if with ignore sNANs.  */
6974
      if (HONOR_SNANS (mode))
6975
        return NULL_TREE;
6976
      return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6977
 
6978
    case LE_EXPR:
6979
      /* x <= +Inf is always true, if we don't case about NaNs.  */
6980
      if (! HONOR_NANS (mode))
6981
        return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6982
 
6983
      /* x <= +Inf is the same as x == x, i.e. isfinite(x).  */
6984
      if (lang_hooks.decls.global_bindings_p () == 0
6985
          && ! CONTAINS_PLACEHOLDER_P (arg0))
6986
        {
6987
          arg0 = save_expr (arg0);
6988
          return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6989
        }
6990
      break;
6991
 
6992
    case EQ_EXPR:
6993
    case GE_EXPR:
6994
      /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX.  */
6995
      real_maxval (&max, neg, mode);
6996
      return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6997
                          arg0, build_real (TREE_TYPE (arg0), max));
6998
 
6999
    case LT_EXPR:
7000
      /* x < +Inf is always equal to x <= DBL_MAX.  */
7001
      real_maxval (&max, neg, mode);
7002
      return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
7003
                          arg0, build_real (TREE_TYPE (arg0), max));
7004
 
7005
    case NE_EXPR:
7006
      /* x != +Inf is always equal to !(x > DBL_MAX).  */
7007
      real_maxval (&max, neg, mode);
7008
      if (! HONOR_NANS (mode))
7009
        return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
7010
                            arg0, build_real (TREE_TYPE (arg0), max));
7011
 
7012
      temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
7013
                          arg0, build_real (TREE_TYPE (arg0), max));
7014
      return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
7015
 
7016
    default:
7017
      break;
7018
    }
7019
 
7020
  return NULL_TREE;
7021
}
7022
 
7023
/* Subroutine of fold() that optimizes comparisons of a division by
7024
   a nonzero integer constant against an integer constant, i.e.
7025
   X/C1 op C2.
7026
 
7027
   CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7028
   GE_EXPR or LE_EXPR.  TYPE is the type of the result and ARG0 and ARG1
7029
   are the operands of the comparison.  ARG1 must be a TREE_REAL_CST.
7030
 
7031
   The function returns the constant folded tree if a simplification
7032
   can be made, and NULL_TREE otherwise.  */
7033
 
7034
static tree
7035
fold_div_compare (location_t loc,
7036
                  enum tree_code code, tree type, tree arg0, tree arg1)
7037
{
7038
  tree prod, tmp, hi, lo;
7039
  tree arg00 = TREE_OPERAND (arg0, 0);
7040
  tree arg01 = TREE_OPERAND (arg0, 1);
7041
  unsigned HOST_WIDE_INT lpart;
7042
  HOST_WIDE_INT hpart;
7043
  bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
7044
  bool neg_overflow;
7045
  int overflow;
7046
 
7047
  /* We have to do this the hard way to detect unsigned overflow.
7048
     prod = int_const_binop (MULT_EXPR, arg01, arg1, 0);  */
7049
  overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
7050
                                   TREE_INT_CST_HIGH (arg01),
7051
                                   TREE_INT_CST_LOW (arg1),
7052
                                   TREE_INT_CST_HIGH (arg1),
7053
                                   &lpart, &hpart, unsigned_p);
7054
  prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
7055
                                -1, overflow);
7056
  neg_overflow = false;
7057
 
7058
  if (unsigned_p)
7059
    {
7060
      tmp = int_const_binop (MINUS_EXPR, arg01,
7061
                             build_int_cst (TREE_TYPE (arg01), 1), 0);
7062
      lo = prod;
7063
 
7064
      /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0).  */
7065
      overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
7066
                                       TREE_INT_CST_HIGH (prod),
7067
                                       TREE_INT_CST_LOW (tmp),
7068
                                       TREE_INT_CST_HIGH (tmp),
7069
                                       &lpart, &hpart, unsigned_p);
7070
      hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
7071
                                  -1, overflow | TREE_OVERFLOW (prod));
7072
    }
7073
  else if (tree_int_cst_sgn (arg01) >= 0)
7074
    {
7075
      tmp = int_const_binop (MINUS_EXPR, arg01,
7076
                             build_int_cst (TREE_TYPE (arg01), 1), 0);
7077
      switch (tree_int_cst_sgn (arg1))
7078
        {
7079
        case -1:
7080
          neg_overflow = true;
7081
          lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
7082
          hi = prod;
7083
          break;
7084
 
7085
        case  0:
7086
          lo = fold_negate_const (tmp, TREE_TYPE (arg0));
7087
          hi = tmp;
7088
          break;
7089
 
7090
        case  1:
7091
          hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
7092
          lo = prod;
7093
          break;
7094
 
7095
        default:
7096
          gcc_unreachable ();
7097
        }
7098
    }
7099
  else
7100
    {
7101
      /* A negative divisor reverses the relational operators.  */
7102
      code = swap_tree_comparison (code);
7103
 
7104
      tmp = int_const_binop (PLUS_EXPR, arg01,
7105
                             build_int_cst (TREE_TYPE (arg01), 1), 0);
7106
      switch (tree_int_cst_sgn (arg1))
7107
        {
7108
        case -1:
7109
          hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
7110
          lo = prod;
7111
          break;
7112
 
7113
        case  0:
7114
          hi = fold_negate_const (tmp, TREE_TYPE (arg0));
7115
          lo = tmp;
7116
          break;
7117
 
7118
        case  1:
7119
          neg_overflow = true;
7120
          lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
7121
          hi = prod;
7122
          break;
7123
 
7124
        default:
7125
          gcc_unreachable ();
7126
        }
7127
    }
7128
 
7129
  switch (code)
7130
    {
7131
    case EQ_EXPR:
7132
      if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
7133
        return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
7134
      if (TREE_OVERFLOW (hi))
7135
        return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
7136
      if (TREE_OVERFLOW (lo))
7137
        return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
7138
      return build_range_check (loc, type, arg00, 1, lo, hi);
7139
 
7140
    case NE_EXPR:
7141
      if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
7142
        return omit_one_operand_loc (loc, type, integer_one_node, arg00);
7143
      if (TREE_OVERFLOW (hi))
7144
        return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
7145
      if (TREE_OVERFLOW (lo))
7146
        return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
7147
      return build_range_check (loc, type, arg00, 0, lo, hi);
7148
 
7149
    case LT_EXPR:
7150
      if (TREE_OVERFLOW (lo))
7151
        {
7152
          tmp = neg_overflow ? integer_zero_node : integer_one_node;
7153
          return omit_one_operand_loc (loc, type, tmp, arg00);
7154
        }
7155
      return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
7156
 
7157
    case LE_EXPR:
7158
      if (TREE_OVERFLOW (hi))
7159
        {
7160
          tmp = neg_overflow ? integer_zero_node : integer_one_node;
7161
          return omit_one_operand_loc (loc, type, tmp, arg00);
7162
        }
7163
      return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
7164
 
7165
    case GT_EXPR:
7166
      if (TREE_OVERFLOW (hi))
7167
        {
7168
          tmp = neg_overflow ? integer_one_node : integer_zero_node;
7169
          return omit_one_operand_loc (loc, type, tmp, arg00);
7170
        }
7171
      return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
7172
 
7173
    case GE_EXPR:
7174
      if (TREE_OVERFLOW (lo))
7175
        {
7176
          tmp = neg_overflow ? integer_one_node : integer_zero_node;
7177
          return omit_one_operand_loc (loc, type, tmp, arg00);
7178
        }
7179
      return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
7180
 
7181
    default:
7182
      break;
7183
    }
7184
 
7185
  return NULL_TREE;
7186
}
7187
 
7188
 
7189
/* If CODE with arguments ARG0 and ARG1 represents a single bit
7190
   equality/inequality test, then return a simplified form of the test
7191
   using a sign testing.  Otherwise return NULL.  TYPE is the desired
7192
   result type.  */
7193
 
7194
static tree
7195
fold_single_bit_test_into_sign_test (location_t loc,
7196
                                     enum tree_code code, tree arg0, tree arg1,
7197
                                     tree result_type)
7198
{
7199
  /* If this is testing a single bit, we can optimize the test.  */
7200
  if ((code == NE_EXPR || code == EQ_EXPR)
7201
      && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7202
      && integer_pow2p (TREE_OPERAND (arg0, 1)))
7203
    {
7204
      /* If we have (A & C) != 0 where C is the sign bit of A, convert
7205
         this into A < 0.  Similarly for (A & C) == 0 into A >= 0.  */
7206
      tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
7207
 
7208
      if (arg00 != NULL_TREE
7209
          /* This is only a win if casting to a signed type is cheap,
7210
             i.e. when arg00's type is not a partial mode.  */
7211
          && TYPE_PRECISION (TREE_TYPE (arg00))
7212
             == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
7213
        {
7214
          tree stype = signed_type_for (TREE_TYPE (arg00));
7215
          return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
7216
                              result_type,
7217
                              fold_convert_loc (loc, stype, arg00),
7218
                              build_int_cst (stype, 0));
7219
        }
7220
    }
7221
 
7222
  return NULL_TREE;
7223
}
7224
 
7225
/* If CODE with arguments ARG0 and ARG1 represents a single bit
7226
   equality/inequality test, then return a simplified form of
7227
   the test using shifts and logical operations.  Otherwise return
7228
   NULL.  TYPE is the desired result type.  */
7229
 
7230
tree
7231
fold_single_bit_test (location_t loc, enum tree_code code,
7232
                      tree arg0, tree arg1, tree result_type)
7233
{
7234
  /* If this is testing a single bit, we can optimize the test.  */
7235
  if ((code == NE_EXPR || code == EQ_EXPR)
7236
      && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7237
      && integer_pow2p (TREE_OPERAND (arg0, 1)))
7238
    {
7239
      tree inner = TREE_OPERAND (arg0, 0);
7240
      tree type = TREE_TYPE (arg0);
7241
      int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
7242
      enum machine_mode operand_mode = TYPE_MODE (type);
7243
      int ops_unsigned;
7244
      tree signed_type, unsigned_type, intermediate_type;
7245
      tree tem, one;
7246
 
7247
      /* First, see if we can fold the single bit test into a sign-bit
7248
         test.  */
7249
      tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
7250
                                                 result_type);
7251
      if (tem)
7252
        return tem;
7253
 
7254
      /* Otherwise we have (A & C) != 0 where C is a single bit,
7255
         convert that into ((A >> C2) & 1).  Where C2 = log2(C).
7256
         Similarly for (A & C) == 0.  */
7257
 
7258
      /* If INNER is a right shift of a constant and it plus BITNUM does
7259
         not overflow, adjust BITNUM and INNER.  */
7260
      if (TREE_CODE (inner) == RSHIFT_EXPR
7261
          && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7262
          && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
7263
          && bitnum < TYPE_PRECISION (type)
7264
          && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
7265
                                   bitnum - TYPE_PRECISION (type)))
7266
        {
7267
          bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
7268
          inner = TREE_OPERAND (inner, 0);
7269
        }
7270
 
7271
      /* If we are going to be able to omit the AND below, we must do our
7272
         operations as unsigned.  If we must use the AND, we have a choice.
7273
         Normally unsigned is faster, but for some machines signed is.  */
7274
#ifdef LOAD_EXTEND_OP
7275
      ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
7276
                      && !flag_syntax_only) ? 0 : 1;
7277
#else
7278
      ops_unsigned = 1;
7279
#endif
7280
 
7281
      signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
7282
      unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7283
      intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7284
      inner = fold_convert_loc (loc, intermediate_type, inner);
7285
 
7286
      if (bitnum != 0)
7287
        inner = build2 (RSHIFT_EXPR, intermediate_type,
7288
                        inner, size_int (bitnum));
7289
 
7290
      one = build_int_cst (intermediate_type, 1);
7291
 
7292
      if (code == EQ_EXPR)
7293
        inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
7294
 
7295
      /* Put the AND last so it can combine with more things.  */
7296
      inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7297
 
7298
      /* Make sure to return the proper type.  */
7299
      inner = fold_convert_loc (loc, result_type, inner);
7300
 
7301
      return inner;
7302
    }
7303
  return NULL_TREE;
7304
}
7305
 
7306
/* Check whether we are allowed to reorder operands arg0 and arg1,
7307
   such that the evaluation of arg1 occurs before arg0.  */
7308
 
7309
static bool
7310
reorder_operands_p (const_tree arg0, const_tree arg1)
7311
{
7312
  if (! flag_evaluation_order)
7313
      return true;
7314
  if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
7315
    return true;
7316
  return ! TREE_SIDE_EFFECTS (arg0)
7317
         && ! TREE_SIDE_EFFECTS (arg1);
7318
}
7319
 
7320
/* Test whether it is preferable two swap two operands, ARG0 and
7321
   ARG1, for example because ARG0 is an integer constant and ARG1
7322
   isn't.  If REORDER is true, only recommend swapping if we can
7323
   evaluate the operands in reverse order.  */
7324
 
7325
bool
7326
tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
7327
{
7328
  STRIP_SIGN_NOPS (arg0);
7329
  STRIP_SIGN_NOPS (arg1);
7330
 
7331
  if (TREE_CODE (arg1) == INTEGER_CST)
7332
    return 0;
7333
  if (TREE_CODE (arg0) == INTEGER_CST)
7334
    return 1;
7335
 
7336
  if (TREE_CODE (arg1) == REAL_CST)
7337
    return 0;
7338
  if (TREE_CODE (arg0) == REAL_CST)
7339
    return 1;
7340
 
7341
  if (TREE_CODE (arg1) == FIXED_CST)
7342
    return 0;
7343
  if (TREE_CODE (arg0) == FIXED_CST)
7344
    return 1;
7345
 
7346
  if (TREE_CODE (arg1) == COMPLEX_CST)
7347
    return 0;
7348
  if (TREE_CODE (arg0) == COMPLEX_CST)
7349
    return 1;
7350
 
7351
  if (TREE_CONSTANT (arg1))
7352
    return 0;
7353
  if (TREE_CONSTANT (arg0))
7354
    return 1;
7355
 
7356
  if (optimize_function_for_size_p (cfun))
7357
    return 0;
7358
 
7359
  if (reorder && flag_evaluation_order
7360
      && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
7361
    return 0;
7362
 
7363
  /* It is preferable to swap two SSA_NAME to ensure a canonical form
7364
     for commutative and comparison operators.  Ensuring a canonical
7365
     form allows the optimizers to find additional redundancies without
7366
     having to explicitly check for both orderings.  */
7367
  if (TREE_CODE (arg0) == SSA_NAME
7368
      && TREE_CODE (arg1) == SSA_NAME
7369
      && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7370
    return 1;
7371
 
7372
  /* Put SSA_NAMEs last.  */
7373
  if (TREE_CODE (arg1) == SSA_NAME)
7374
    return 0;
7375
  if (TREE_CODE (arg0) == SSA_NAME)
7376
    return 1;
7377
 
7378
  /* Put variables last.  */
7379
  if (DECL_P (arg1))
7380
    return 0;
7381
  if (DECL_P (arg0))
7382
    return 1;
7383
 
7384
  return 0;
7385
}
7386
 
7387
/* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
7388
   ARG0 is extended to a wider type.  */
7389
 
7390
static tree
7391
fold_widened_comparison (location_t loc, enum tree_code code,
7392
                         tree type, tree arg0, tree arg1)
7393
{
7394
  tree arg0_unw = get_unwidened (arg0, NULL_TREE);
7395
  tree arg1_unw;
7396
  tree shorter_type, outer_type;
7397
  tree min, max;
7398
  bool above, below;
7399
 
7400
  if (arg0_unw == arg0)
7401
    return NULL_TREE;
7402
  shorter_type = TREE_TYPE (arg0_unw);
7403
 
7404
#ifdef HAVE_canonicalize_funcptr_for_compare
7405
  /* Disable this optimization if we're casting a function pointer
7406
     type on targets that require function pointer canonicalization.  */
7407
  if (HAVE_canonicalize_funcptr_for_compare
7408
      && TREE_CODE (shorter_type) == POINTER_TYPE
7409
      && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
7410
    return NULL_TREE;
7411
#endif
7412
 
7413
  if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
7414
    return NULL_TREE;
7415
 
7416
  arg1_unw = get_unwidened (arg1, NULL_TREE);
7417
 
7418
  /* If possible, express the comparison in the shorter mode.  */
7419
  if ((code == EQ_EXPR || code == NE_EXPR
7420
       || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
7421
      && (TREE_TYPE (arg1_unw) == shorter_type
7422
          || ((TYPE_PRECISION (shorter_type)
7423
               >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
7424
              && (TYPE_UNSIGNED (shorter_type)
7425
                  == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
7426
          || (TREE_CODE (arg1_unw) == INTEGER_CST
7427
              && (TREE_CODE (shorter_type) == INTEGER_TYPE
7428
                  || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
7429
              && int_fits_type_p (arg1_unw, shorter_type))))
7430
    return fold_build2_loc (loc, code, type, arg0_unw,
7431
                        fold_convert_loc (loc, shorter_type, arg1_unw));
7432
 
7433
  if (TREE_CODE (arg1_unw) != INTEGER_CST
7434
      || TREE_CODE (shorter_type) != INTEGER_TYPE
7435
      || !int_fits_type_p (arg1_unw, shorter_type))
7436
    return NULL_TREE;
7437
 
7438
  /* If we are comparing with the integer that does not fit into the range
7439
     of the shorter type, the result is known.  */
7440
  outer_type = TREE_TYPE (arg1_unw);
7441
  min = lower_bound_in_type (outer_type, shorter_type);
7442
  max = upper_bound_in_type (outer_type, shorter_type);
7443
 
7444
  above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7445
                                                   max, arg1_unw));
7446
  below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7447
                                                   arg1_unw, min));
7448
 
7449
  switch (code)
7450
    {
7451
    case EQ_EXPR:
7452
      if (above || below)
7453
        return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7454
      break;
7455
 
7456
    case NE_EXPR:
7457
      if (above || below)
7458
        return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7459
      break;
7460
 
7461
    case LT_EXPR:
7462
    case LE_EXPR:
7463
      if (above)
7464
        return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7465
      else if (below)
7466
        return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7467
 
7468
    case GT_EXPR:
7469
    case GE_EXPR:
7470
      if (above)
7471
        return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7472
      else if (below)
7473
        return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7474
 
7475
    default:
7476
      break;
7477
    }
7478
 
7479
  return NULL_TREE;
7480
}
7481
 
7482
/* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7483
   ARG0 just the signedness is changed.  */
7484
 
7485
static tree
7486
fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
7487
                              tree arg0, tree arg1)
7488
{
7489
  tree arg0_inner;
7490
  tree inner_type, outer_type;
7491
 
7492
  if (!CONVERT_EXPR_P (arg0))
7493
    return NULL_TREE;
7494
 
7495
  outer_type = TREE_TYPE (arg0);
7496
  arg0_inner = TREE_OPERAND (arg0, 0);
7497
  inner_type = TREE_TYPE (arg0_inner);
7498
 
7499
#ifdef HAVE_canonicalize_funcptr_for_compare
7500
  /* Disable this optimization if we're casting a function pointer
7501
     type on targets that require function pointer canonicalization.  */
7502
  if (HAVE_canonicalize_funcptr_for_compare
7503
      && TREE_CODE (inner_type) == POINTER_TYPE
7504
      && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7505
    return NULL_TREE;
7506
#endif
7507
 
7508
  if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7509
    return NULL_TREE;
7510
 
7511
  if (TREE_CODE (arg1) != INTEGER_CST
7512
      && !(CONVERT_EXPR_P (arg1)
7513
           && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7514
    return NULL_TREE;
7515
 
7516
  if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7517
       || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
7518
      && code != NE_EXPR
7519
      && code != EQ_EXPR)
7520
    return NULL_TREE;
7521
 
7522
  if (TREE_CODE (arg1) == INTEGER_CST)
7523
    arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
7524
                                  TREE_INT_CST_HIGH (arg1), 0,
7525
                                  TREE_OVERFLOW (arg1));
7526
  else
7527
    arg1 = fold_convert_loc (loc, inner_type, arg1);
7528
 
7529
  return fold_build2_loc (loc, code, type, arg0_inner, arg1);
7530
}
7531
 
7532
/* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
7533
   step of the array.  Reconstructs s and delta in the case of s *
7534
   delta being an integer constant (and thus already folded).  ADDR is
7535
   the address. MULT is the multiplicative expression.  If the
7536
   function succeeds, the new address expression is returned.
7537
   Otherwise NULL_TREE is returned.  LOC is the location of the
7538
   resulting expression.  */
7539
 
7540
static tree
7541
try_move_mult_to_index (location_t loc, tree addr, tree op1)
7542
{
7543
  tree s, delta, step;
7544
  tree ref = TREE_OPERAND (addr, 0), pref;
7545
  tree ret, pos;
7546
  tree itype;
7547
  bool mdim = false;
7548
 
7549
  /*  Strip the nops that might be added when converting op1 to sizetype. */
7550
  STRIP_NOPS (op1);
7551
 
7552
  /* Canonicalize op1 into a possibly non-constant delta
7553
     and an INTEGER_CST s.  */
7554
  if (TREE_CODE (op1) == MULT_EXPR)
7555
    {
7556
      tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
7557
 
7558
      STRIP_NOPS (arg0);
7559
      STRIP_NOPS (arg1);
7560
 
7561
      if (TREE_CODE (arg0) == INTEGER_CST)
7562
        {
7563
          s = arg0;
7564
          delta = arg1;
7565
        }
7566
      else if (TREE_CODE (arg1) == INTEGER_CST)
7567
        {
7568
          s = arg1;
7569
          delta = arg0;
7570
        }
7571
      else
7572
        return NULL_TREE;
7573
    }
7574
  else if (TREE_CODE (op1) == INTEGER_CST)
7575
    {
7576
      delta = op1;
7577
      s = NULL_TREE;
7578
    }
7579
  else
7580
    {
7581
      /* Simulate we are delta * 1.  */
7582
      delta = op1;
7583
      s = integer_one_node;
7584
    }
7585
 
7586
  for (;; ref = TREE_OPERAND (ref, 0))
7587
    {
7588
      if (TREE_CODE (ref) == ARRAY_REF)
7589
        {
7590
          tree domain;
7591
 
7592
          /* Remember if this was a multi-dimensional array.  */
7593
          if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7594
            mdim = true;
7595
 
7596
          domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7597
          if (! domain)
7598
            continue;
7599
          itype = TREE_TYPE (domain);
7600
 
7601
          step = array_ref_element_size (ref);
7602
          if (TREE_CODE (step) != INTEGER_CST)
7603
            continue;
7604
 
7605
          if (s)
7606
            {
7607
              if (! tree_int_cst_equal (step, s))
7608
                continue;
7609
            }
7610
          else
7611
            {
7612
              /* Try if delta is a multiple of step.  */
7613
              tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7614
              if (! tmp)
7615
                continue;
7616
              delta = tmp;
7617
            }
7618
 
7619
          /* Only fold here if we can verify we do not overflow one
7620
             dimension of a multi-dimensional array.  */
7621
          if (mdim)
7622
            {
7623
              tree tmp;
7624
 
7625
              if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7626
                  || !TYPE_MAX_VALUE (domain)
7627
                  || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7628
                continue;
7629
 
7630
              tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7631
                                     fold_convert_loc (loc, itype,
7632
                                                       TREE_OPERAND (ref, 1)),
7633
                                     fold_convert_loc (loc, itype, delta));
7634
              if (!tmp
7635
                  || TREE_CODE (tmp) != INTEGER_CST
7636
                  || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7637
                continue;
7638
            }
7639
 
7640
          break;
7641
        }
7642
      else
7643
        mdim = false;
7644
 
7645
      if (!handled_component_p (ref))
7646
        return NULL_TREE;
7647
    }
7648
 
7649
  /* We found the suitable array reference.  So copy everything up to it,
7650
     and replace the index.  */
7651
 
7652
  pref = TREE_OPERAND (addr, 0);
7653
  ret = copy_node (pref);
7654
  SET_EXPR_LOCATION (ret, loc);
7655
  pos = ret;
7656
 
7657
  while (pref != ref)
7658
    {
7659
      pref = TREE_OPERAND (pref, 0);
7660
      TREE_OPERAND (pos, 0) = copy_node (pref);
7661
      pos = TREE_OPERAND (pos, 0);
7662
    }
7663
 
7664
  TREE_OPERAND (pos, 1) = fold_build2_loc (loc, PLUS_EXPR, itype,
7665
                                       fold_convert_loc (loc, itype,
7666
                                                         TREE_OPERAND (pos, 1)),
7667
                                       fold_convert_loc (loc, itype, delta));
7668
 
7669
  return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7670
}
7671
 
7672
 
7673
/* Fold A < X && A + 1 > Y to A < X && A >= Y.  Normally A + 1 > Y
7674
   means A >= Y && A != MAX, but in this case we know that
7675
   A < X <= MAX.  INEQ is A + 1 > Y, BOUND is A < X.  */
7676
 
7677
static tree
7678
fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7679
{
7680
  tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7681
 
7682
  if (TREE_CODE (bound) == LT_EXPR)
7683
    a = TREE_OPERAND (bound, 0);
7684
  else if (TREE_CODE (bound) == GT_EXPR)
7685
    a = TREE_OPERAND (bound, 1);
7686
  else
7687
    return NULL_TREE;
7688
 
7689
  typea = TREE_TYPE (a);
7690
  if (!INTEGRAL_TYPE_P (typea)
7691
      && !POINTER_TYPE_P (typea))
7692
    return NULL_TREE;
7693
 
7694
  if (TREE_CODE (ineq) == LT_EXPR)
7695
    {
7696
      a1 = TREE_OPERAND (ineq, 1);
7697
      y = TREE_OPERAND (ineq, 0);
7698
    }
7699
  else if (TREE_CODE (ineq) == GT_EXPR)
7700
    {
7701
      a1 = TREE_OPERAND (ineq, 0);
7702
      y = TREE_OPERAND (ineq, 1);
7703
    }
7704
  else
7705
    return NULL_TREE;
7706
 
7707
  if (TREE_TYPE (a1) != typea)
7708
    return NULL_TREE;
7709
 
7710
  if (POINTER_TYPE_P (typea))
7711
    {
7712
      /* Convert the pointer types into integer before taking the difference.  */
7713
      tree ta = fold_convert_loc (loc, ssizetype, a);
7714
      tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7715
      diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7716
    }
7717
  else
7718
    diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7719
 
7720
  if (!diff || !integer_onep (diff))
7721
   return NULL_TREE;
7722
 
7723
  return fold_build2_loc (loc, GE_EXPR, type, a, y);
7724
}
7725
 
7726
/* Fold a sum or difference of at least one multiplication.
7727
   Returns the folded tree or NULL if no simplification could be made.  */
7728
 
7729
static tree
7730
fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7731
                          tree arg0, tree arg1)
7732
{
7733
  tree arg00, arg01, arg10, arg11;
7734
  tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7735
 
7736
  /* (A * C) +- (B * C) -> (A+-B) * C.
7737
     (A * C) +- A -> A * (C+-1).
7738
     We are most concerned about the case where C is a constant,
7739
     but other combinations show up during loop reduction.  Since
7740
     it is not difficult, try all four possibilities.  */
7741
 
7742
  if (TREE_CODE (arg0) == MULT_EXPR)
7743
    {
7744
      arg00 = TREE_OPERAND (arg0, 0);
7745
      arg01 = TREE_OPERAND (arg0, 1);
7746
    }
7747
  else if (TREE_CODE (arg0) == INTEGER_CST)
7748
    {
7749
      arg00 = build_one_cst (type);
7750
      arg01 = arg0;
7751
    }
7752
  else
7753
    {
7754
      /* We cannot generate constant 1 for fract.  */
7755
      if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7756
        return NULL_TREE;
7757
      arg00 = arg0;
7758
      arg01 = build_one_cst (type);
7759
    }
7760
  if (TREE_CODE (arg1) == MULT_EXPR)
7761
    {
7762
      arg10 = TREE_OPERAND (arg1, 0);
7763
      arg11 = TREE_OPERAND (arg1, 1);
7764
    }
7765
  else if (TREE_CODE (arg1) == INTEGER_CST)
7766
    {
7767
      arg10 = build_one_cst (type);
7768
      /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7769
         the purpose of this canonicalization.  */
7770
      if (TREE_INT_CST_HIGH (arg1) == -1
7771
          && negate_expr_p (arg1)
7772
          && code == PLUS_EXPR)
7773
        {
7774
          arg11 = negate_expr (arg1);
7775
          code = MINUS_EXPR;
7776
        }
7777
      else
7778
        arg11 = arg1;
7779
    }
7780
  else
7781
    {
7782
      /* We cannot generate constant 1 for fract.  */
7783
      if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7784
        return NULL_TREE;
7785
      arg10 = arg1;
7786
      arg11 = build_one_cst (type);
7787
    }
7788
  same = NULL_TREE;
7789
 
7790
  if (operand_equal_p (arg01, arg11, 0))
7791
    same = arg01, alt0 = arg00, alt1 = arg10;
7792
  else if (operand_equal_p (arg00, arg10, 0))
7793
    same = arg00, alt0 = arg01, alt1 = arg11;
7794
  else if (operand_equal_p (arg00, arg11, 0))
7795
    same = arg00, alt0 = arg01, alt1 = arg10;
7796
  else if (operand_equal_p (arg01, arg10, 0))
7797
    same = arg01, alt0 = arg00, alt1 = arg11;
7798
 
7799
  /* No identical multiplicands; see if we can find a common
7800
     power-of-two factor in non-power-of-two multiplies.  This
7801
     can help in multi-dimensional array access.  */
7802
  else if (host_integerp (arg01, 0)
7803
           && host_integerp (arg11, 0))
7804
    {
7805
      HOST_WIDE_INT int01, int11, tmp;
7806
      bool swap = false;
7807
      tree maybe_same;
7808
      int01 = TREE_INT_CST_LOW (arg01);
7809
      int11 = TREE_INT_CST_LOW (arg11);
7810
 
7811
      /* Move min of absolute values to int11.  */
7812
      if ((int01 >= 0 ? int01 : -int01)
7813
          < (int11 >= 0 ? int11 : -int11))
7814
        {
7815
          tmp = int01, int01 = int11, int11 = tmp;
7816
          alt0 = arg00, arg00 = arg10, arg10 = alt0;
7817
          maybe_same = arg01;
7818
          swap = true;
7819
        }
7820
      else
7821
        maybe_same = arg11;
7822
 
7823
      if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7824
          /* The remainder should not be a constant, otherwise we
7825
             end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7826
             increased the number of multiplications necessary.  */
7827
          && TREE_CODE (arg10) != INTEGER_CST)
7828
        {
7829
          alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7830
                              build_int_cst (TREE_TYPE (arg00),
7831
                                             int01 / int11));
7832
          alt1 = arg10;
7833
          same = maybe_same;
7834
          if (swap)
7835
            maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7836
        }
7837
    }
7838
 
7839
  if (same)
7840
    return fold_build2_loc (loc, MULT_EXPR, type,
7841
                        fold_build2_loc (loc, code, type,
7842
                                     fold_convert_loc (loc, type, alt0),
7843
                                     fold_convert_loc (loc, type, alt1)),
7844
                        fold_convert_loc (loc, type, same));
7845
 
7846
  return NULL_TREE;
7847
}
7848
 
7849
/* Subroutine of native_encode_expr.  Encode the INTEGER_CST
7850
   specified by EXPR into the buffer PTR of length LEN bytes.
7851
   Return the number of bytes placed in the buffer, or zero
7852
   upon failure.  */
7853
 
7854
static int
7855
native_encode_int (const_tree expr, unsigned char *ptr, int len)
7856
{
7857
  tree type = TREE_TYPE (expr);
7858
  int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7859
  int byte, offset, word, words;
7860
  unsigned char value;
7861
 
7862
  if (total_bytes > len)
7863
    return 0;
7864
  words = total_bytes / UNITS_PER_WORD;
7865
 
7866
  for (byte = 0; byte < total_bytes; byte++)
7867
    {
7868
      int bitpos = byte * BITS_PER_UNIT;
7869
      if (bitpos < HOST_BITS_PER_WIDE_INT)
7870
        value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7871
      else
7872
        value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7873
                                 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7874
 
7875
      if (total_bytes > UNITS_PER_WORD)
7876
        {
7877
          word = byte / UNITS_PER_WORD;
7878
          if (WORDS_BIG_ENDIAN)
7879
            word = (words - 1) - word;
7880
          offset = word * UNITS_PER_WORD;
7881
          if (BYTES_BIG_ENDIAN)
7882
            offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7883
          else
7884
            offset += byte % UNITS_PER_WORD;
7885
        }
7886
      else
7887
        offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7888
      ptr[offset] = value;
7889
    }
7890
  return total_bytes;
7891
}
7892
 
7893
 
7894
/* Subroutine of native_encode_expr.  Encode the REAL_CST
7895
   specified by EXPR into the buffer PTR of length LEN bytes.
7896
   Return the number of bytes placed in the buffer, or zero
7897
   upon failure.  */
7898
 
7899
static int
7900
native_encode_real (const_tree expr, unsigned char *ptr, int len)
7901
{
7902
  tree type = TREE_TYPE (expr);
7903
  int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7904
  int byte, offset, word, words, bitpos;
7905
  unsigned char value;
7906
 
7907
  /* There are always 32 bits in each long, no matter the size of
7908
     the hosts long.  We handle floating point representations with
7909
     up to 192 bits.  */
7910
  long tmp[6];
7911
 
7912
  if (total_bytes > len)
7913
    return 0;
7914
  words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7915
 
7916
  real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7917
 
7918
  for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7919
       bitpos += BITS_PER_UNIT)
7920
    {
7921
      byte = (bitpos / BITS_PER_UNIT) & 3;
7922
      value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7923
 
7924
      if (UNITS_PER_WORD < 4)
7925
        {
7926
          word = byte / UNITS_PER_WORD;
7927
          if (WORDS_BIG_ENDIAN)
7928
            word = (words - 1) - word;
7929
          offset = word * UNITS_PER_WORD;
7930
          if (BYTES_BIG_ENDIAN)
7931
            offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7932
          else
7933
            offset += byte % UNITS_PER_WORD;
7934
        }
7935
      else
7936
        offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7937
      ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7938
    }
7939
  return total_bytes;
7940
}
7941
 
7942
/* Subroutine of native_encode_expr.  Encode the COMPLEX_CST
7943
   specified by EXPR into the buffer PTR of length LEN bytes.
7944
   Return the number of bytes placed in the buffer, or zero
7945
   upon failure.  */
7946
 
7947
static int
7948
native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7949
{
7950
  int rsize, isize;
7951
  tree part;
7952
 
7953
  part = TREE_REALPART (expr);
7954
  rsize = native_encode_expr (part, ptr, len);
7955
  if (rsize == 0)
7956
    return 0;
7957
  part = TREE_IMAGPART (expr);
7958
  isize = native_encode_expr (part, ptr+rsize, len-rsize);
7959
  if (isize != rsize)
7960
    return 0;
7961
  return rsize + isize;
7962
}
7963
 
7964
 
7965
/* Subroutine of native_encode_expr.  Encode the VECTOR_CST
7966
   specified by EXPR into the buffer PTR of length LEN bytes.
7967
   Return the number of bytes placed in the buffer, or zero
7968
   upon failure.  */
7969
 
7970
static int
7971
native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7972
{
7973
  int i, size, offset, count;
7974
  tree itype, elem, elements;
7975
 
7976
  offset = 0;
7977
  elements = TREE_VECTOR_CST_ELTS (expr);
7978
  count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7979
  itype = TREE_TYPE (TREE_TYPE (expr));
7980
  size = GET_MODE_SIZE (TYPE_MODE (itype));
7981
  for (i = 0; i < count; i++)
7982
    {
7983
      if (elements)
7984
        {
7985
          elem = TREE_VALUE (elements);
7986
          elements = TREE_CHAIN (elements);
7987
        }
7988
      else
7989
        elem = NULL_TREE;
7990
 
7991
      if (elem)
7992
        {
7993
          if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7994
            return 0;
7995
        }
7996
      else
7997
        {
7998
          if (offset + size > len)
7999
            return 0;
8000
          memset (ptr+offset, 0, size);
8001
        }
8002
      offset += size;
8003
    }
8004
  return offset;
8005
}
8006
 
8007
 
8008
/* Subroutine of native_encode_expr.  Encode the STRING_CST
8009
   specified by EXPR into the buffer PTR of length LEN bytes.
8010
   Return the number of bytes placed in the buffer, or zero
8011
   upon failure.  */
8012
 
8013
static int
8014
native_encode_string (const_tree expr, unsigned char *ptr, int len)
8015
{
8016
  tree type = TREE_TYPE (expr);
8017
  HOST_WIDE_INT total_bytes;
8018
 
8019
  if (TREE_CODE (type) != ARRAY_TYPE
8020
      || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
8021
      || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
8022
      || !host_integerp (TYPE_SIZE_UNIT (type), 0))
8023
    return 0;
8024
  total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
8025
  if (total_bytes > len)
8026
    return 0;
8027
  if (TREE_STRING_LENGTH (expr) < total_bytes)
8028
    {
8029
      memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
8030
      memset (ptr + TREE_STRING_LENGTH (expr), 0,
8031
              total_bytes - TREE_STRING_LENGTH (expr));
8032
    }
8033
  else
8034
    memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
8035
  return total_bytes;
8036
}
8037
 
8038
 
8039
/* Subroutine of fold_view_convert_expr.  Encode the INTEGER_CST,
8040
   REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
8041
   buffer PTR of length LEN bytes.  Return the number of bytes
8042
   placed in the buffer, or zero upon failure.  */
8043
 
8044
int
8045
native_encode_expr (const_tree expr, unsigned char *ptr, int len)
8046
{
8047
  switch (TREE_CODE (expr))
8048
    {
8049
    case INTEGER_CST:
8050
      return native_encode_int (expr, ptr, len);
8051
 
8052
    case REAL_CST:
8053
      return native_encode_real (expr, ptr, len);
8054
 
8055
    case COMPLEX_CST:
8056
      return native_encode_complex (expr, ptr, len);
8057
 
8058
    case VECTOR_CST:
8059
      return native_encode_vector (expr, ptr, len);
8060
 
8061
    case STRING_CST:
8062
      return native_encode_string (expr, ptr, len);
8063
 
8064
    default:
8065
      return 0;
8066
    }
8067
}
8068
 
8069
 
8070
/* Subroutine of native_interpret_expr.  Interpret the contents of
8071
   the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8072
   If the buffer cannot be interpreted, return NULL_TREE.  */
8073
 
8074
static tree
8075
native_interpret_int (tree type, const unsigned char *ptr, int len)
8076
{
8077
  int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
8078
  int byte, offset, word, words;
8079
  unsigned char value;
8080
  unsigned int HOST_WIDE_INT lo = 0;
8081
  HOST_WIDE_INT hi = 0;
8082
 
8083
  if (total_bytes > len)
8084
    return NULL_TREE;
8085
  if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
8086
    return NULL_TREE;
8087
  words = total_bytes / UNITS_PER_WORD;
8088
 
8089
  for (byte = 0; byte < total_bytes; byte++)
8090
    {
8091
      int bitpos = byte * BITS_PER_UNIT;
8092
      if (total_bytes > UNITS_PER_WORD)
8093
        {
8094
          word = byte / UNITS_PER_WORD;
8095
          if (WORDS_BIG_ENDIAN)
8096
            word = (words - 1) - word;
8097
          offset = word * UNITS_PER_WORD;
8098
          if (BYTES_BIG_ENDIAN)
8099
            offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8100
          else
8101
            offset += byte % UNITS_PER_WORD;
8102
        }
8103
      else
8104
        offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
8105
      value = ptr[offset];
8106
 
8107
      if (bitpos < HOST_BITS_PER_WIDE_INT)
8108
        lo |= (unsigned HOST_WIDE_INT) value << bitpos;
8109
      else
8110
        hi |= (unsigned HOST_WIDE_INT) value
8111
              << (bitpos - HOST_BITS_PER_WIDE_INT);
8112
    }
8113
 
8114
  return build_int_cst_wide_type (type, lo, hi);
8115
}
8116
 
8117
 
8118
/* Subroutine of native_interpret_expr.  Interpret the contents of
8119
   the buffer PTR of length LEN as a REAL_CST of type TYPE.
8120
   If the buffer cannot be interpreted, return NULL_TREE.  */
8121
 
8122
static tree
8123
native_interpret_real (tree type, const unsigned char *ptr, int len)
8124
{
8125
  enum machine_mode mode = TYPE_MODE (type);
8126
  int total_bytes = GET_MODE_SIZE (mode);
8127
  int byte, offset, word, words, bitpos;
8128
  unsigned char value;
8129
  /* There are always 32 bits in each long, no matter the size of
8130
     the hosts long.  We handle floating point representations with
8131
     up to 192 bits.  */
8132
  REAL_VALUE_TYPE r;
8133
  long tmp[6];
8134
 
8135
  total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
8136
  if (total_bytes > len || total_bytes > 24)
8137
    return NULL_TREE;
8138
  words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8139
 
8140
  memset (tmp, 0, sizeof (tmp));
8141
  for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8142
       bitpos += BITS_PER_UNIT)
8143
    {
8144
      byte = (bitpos / BITS_PER_UNIT) & 3;
8145
      if (UNITS_PER_WORD < 4)
8146
        {
8147
          word = byte / UNITS_PER_WORD;
8148
          if (WORDS_BIG_ENDIAN)
8149
            word = (words - 1) - word;
8150
          offset = word * UNITS_PER_WORD;
8151
          if (BYTES_BIG_ENDIAN)
8152
            offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8153
          else
8154
            offset += byte % UNITS_PER_WORD;
8155
        }
8156
      else
8157
        offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
8158
      value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8159
 
8160
      tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8161
    }
8162
 
8163
  real_from_target (&r, tmp, mode);
8164
  return build_real (type, r);
8165
}
8166
 
8167
 
8168
/* Subroutine of native_interpret_expr.  Interpret the contents of
8169
   the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8170
   If the buffer cannot be interpreted, return NULL_TREE.  */
8171
 
8172
static tree
8173
native_interpret_complex (tree type, const unsigned char *ptr, int len)
8174
{
8175
  tree etype, rpart, ipart;
8176
  int size;
8177
 
8178
  etype = TREE_TYPE (type);
8179
  size = GET_MODE_SIZE (TYPE_MODE (etype));
8180
  if (size * 2 > len)
8181
    return NULL_TREE;
8182
  rpart = native_interpret_expr (etype, ptr, size);
8183
  if (!rpart)
8184
    return NULL_TREE;
8185
  ipart = native_interpret_expr (etype, ptr+size, size);
8186
  if (!ipart)
8187
    return NULL_TREE;
8188
  return build_complex (type, rpart, ipart);
8189
}
8190
 
8191
 
8192
/* Subroutine of native_interpret_expr.  Interpret the contents of
8193
   the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8194
   If the buffer cannot be interpreted, return NULL_TREE.  */
8195
 
8196
static tree
8197
native_interpret_vector (tree type, const unsigned char *ptr, int len)
8198
{
8199
  tree etype, elem, elements;
8200
  int i, size, count;
8201
 
8202
  etype = TREE_TYPE (type);
8203
  size = GET_MODE_SIZE (TYPE_MODE (etype));
8204
  count = TYPE_VECTOR_SUBPARTS (type);
8205
  if (size * count > len)
8206
    return NULL_TREE;
8207
 
8208
  elements = NULL_TREE;
8209
  for (i = count - 1; i >= 0; i--)
8210
    {
8211
      elem = native_interpret_expr (etype, ptr+(i*size), size);
8212
      if (!elem)
8213
        return NULL_TREE;
8214
      elements = tree_cons (NULL_TREE, elem, elements);
8215
    }
8216
  return build_vector (type, elements);
8217
}
8218
 
8219
 
8220
/* Subroutine of fold_view_convert_expr.  Interpret the contents of
8221
   the buffer PTR of length LEN as a constant of type TYPE.  For
8222
   INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8223
   we return a REAL_CST, etc...  If the buffer cannot be interpreted,
8224
   return NULL_TREE.  */
8225
 
8226
tree
8227
native_interpret_expr (tree type, const unsigned char *ptr, int len)
8228
{
8229
  switch (TREE_CODE (type))
8230
    {
8231
    case INTEGER_TYPE:
8232
    case ENUMERAL_TYPE:
8233
    case BOOLEAN_TYPE:
8234
      return native_interpret_int (type, ptr, len);
8235
 
8236
    case REAL_TYPE:
8237
      return native_interpret_real (type, ptr, len);
8238
 
8239
    case COMPLEX_TYPE:
8240
      return native_interpret_complex (type, ptr, len);
8241
 
8242
    case VECTOR_TYPE:
8243
      return native_interpret_vector (type, ptr, len);
8244
 
8245
    default:
8246
      return NULL_TREE;
8247
    }
8248
}
8249
 
8250
 
8251
/* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
8252
   TYPE at compile-time.  If we're unable to perform the conversion
8253
   return NULL_TREE.  */
8254
 
8255
static tree
8256
fold_view_convert_expr (tree type, tree expr)
8257
{
8258
  /* We support up to 512-bit values (for V8DFmode).  */
8259
  unsigned char buffer[64];
8260
  int len;
8261
 
8262
  /* Check that the host and target are sane.  */
8263
  if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
8264
    return NULL_TREE;
8265
 
8266
  len = native_encode_expr (expr, buffer, sizeof (buffer));
8267
  if (len == 0)
8268
    return NULL_TREE;
8269
 
8270
  return native_interpret_expr (type, buffer, len);
8271
}
8272
 
8273
/* Build an expression for the address of T.  Folds away INDIRECT_REF
8274
   to avoid confusing the gimplify process.  */
8275
 
8276
tree
8277
build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
8278
{
8279
  /* The size of the object is not relevant when talking about its address.  */
8280
  if (TREE_CODE (t) == WITH_SIZE_EXPR)
8281
    t = TREE_OPERAND (t, 0);
8282
 
8283
  /* Note: doesn't apply to ALIGN_INDIRECT_REF */
8284
  if (TREE_CODE (t) == INDIRECT_REF
8285
      || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
8286
    {
8287
      t = TREE_OPERAND (t, 0);
8288
 
8289
      if (TREE_TYPE (t) != ptrtype)
8290
        {
8291
          t = build1 (NOP_EXPR, ptrtype, t);
8292
          SET_EXPR_LOCATION (t, loc);
8293
        }
8294
    }
8295
  else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
8296
    {
8297
      t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
8298
 
8299
      if (TREE_TYPE (t) != ptrtype)
8300
        t = fold_convert_loc (loc, ptrtype, t);
8301
    }
8302
  else
8303
    {
8304
      t = build1 (ADDR_EXPR, ptrtype, t);
8305
      SET_EXPR_LOCATION (t, loc);
8306
    }
8307
 
8308
  return t;
8309
}
8310
 
8311
/* Build an expression for the address of T.  */
8312
 
8313
tree
8314
build_fold_addr_expr_loc (location_t loc, tree t)
8315
{
8316
  tree ptrtype = build_pointer_type (TREE_TYPE (t));
8317
 
8318
  return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
8319
}
8320
 
8321
/* Fold a unary expression of code CODE and type TYPE with operand
8322
   OP0.  Return the folded expression if folding is successful.
8323
   Otherwise, return NULL_TREE.  */
8324
 
8325
tree
8326
fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
8327
{
8328
  tree tem;
8329
  tree arg0;
8330
  enum tree_code_class kind = TREE_CODE_CLASS (code);
8331
 
8332
  gcc_assert (IS_EXPR_CODE_CLASS (kind)
8333
              && TREE_CODE_LENGTH (code) == 1);
8334
 
8335
  arg0 = op0;
8336
  if (arg0)
8337
    {
8338
      if (CONVERT_EXPR_CODE_P (code)
8339
          || code == FLOAT_EXPR || code == ABS_EXPR)
8340
        {
8341
          /* Don't use STRIP_NOPS, because signedness of argument type
8342
             matters.  */
8343
          STRIP_SIGN_NOPS (arg0);
8344
        }
8345
      else
8346
        {
8347
          /* Strip any conversions that don't change the mode.  This
8348
             is safe for every expression, except for a comparison
8349
             expression because its signedness is derived from its
8350
             operands.
8351
 
8352
             Note that this is done as an internal manipulation within
8353
             the constant folder, in order to find the simplest
8354
             representation of the arguments so that their form can be
8355
             studied.  In any cases, the appropriate type conversions
8356
             should be put back in the tree that will get out of the
8357
             constant folder.  */
8358
          STRIP_NOPS (arg0);
8359
        }
8360
    }
8361
 
8362
  if (TREE_CODE_CLASS (code) == tcc_unary)
8363
    {
8364
      if (TREE_CODE (arg0) == COMPOUND_EXPR)
8365
        return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8366
                       fold_build1_loc (loc, code, type,
8367
                                    fold_convert_loc (loc, TREE_TYPE (op0),
8368
                                                      TREE_OPERAND (arg0, 1))));
8369
      else if (TREE_CODE (arg0) == COND_EXPR)
8370
        {
8371
          tree arg01 = TREE_OPERAND (arg0, 1);
8372
          tree arg02 = TREE_OPERAND (arg0, 2);
8373
          if (! VOID_TYPE_P (TREE_TYPE (arg01)))
8374
            arg01 = fold_build1_loc (loc, code, type,
8375
                                 fold_convert_loc (loc,
8376
                                                   TREE_TYPE (op0), arg01));
8377
          if (! VOID_TYPE_P (TREE_TYPE (arg02)))
8378
            arg02 = fold_build1_loc (loc, code, type,
8379
                                 fold_convert_loc (loc,
8380
                                                   TREE_TYPE (op0), arg02));
8381
          tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
8382
                             arg01, arg02);
8383
 
8384
          /* If this was a conversion, and all we did was to move into
8385
             inside the COND_EXPR, bring it back out.  But leave it if
8386
             it is a conversion from integer to integer and the
8387
             result precision is no wider than a word since such a
8388
             conversion is cheap and may be optimized away by combine,
8389
             while it couldn't if it were outside the COND_EXPR.  Then return
8390
             so we don't get into an infinite recursion loop taking the
8391
             conversion out and then back in.  */
8392
 
8393
          if ((CONVERT_EXPR_CODE_P (code)
8394
               || code == NON_LVALUE_EXPR)
8395
              && TREE_CODE (tem) == COND_EXPR
8396
              && TREE_CODE (TREE_OPERAND (tem, 1)) == code
8397
              && TREE_CODE (TREE_OPERAND (tem, 2)) == code
8398
              && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
8399
              && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
8400
              && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
8401
                  == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
8402
              && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8403
                     && (INTEGRAL_TYPE_P
8404
                         (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8405
                     && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8406
                  || flag_syntax_only))
8407
            {
8408
              tem = build1 (code, type,
8409
                            build3 (COND_EXPR,
8410
                                    TREE_TYPE (TREE_OPERAND
8411
                                               (TREE_OPERAND (tem, 1), 0)),
8412
                                    TREE_OPERAND (tem, 0),
8413
                                    TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8414
                                    TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
8415
              SET_EXPR_LOCATION (tem, loc);
8416
            }
8417
          return tem;
8418
        }
8419
      else if (COMPARISON_CLASS_P (arg0))
8420
        {
8421
          if (TREE_CODE (type) == BOOLEAN_TYPE)
8422
            {
8423
              arg0 = copy_node (arg0);
8424
              TREE_TYPE (arg0) = type;
8425
              return arg0;
8426
            }
8427
          else if (TREE_CODE (type) != INTEGER_TYPE)
8428
            return fold_build3_loc (loc, COND_EXPR, type, arg0,
8429
                                fold_build1_loc (loc, code, type,
8430
                                             integer_one_node),
8431
                                fold_build1_loc (loc, code, type,
8432
                                             integer_zero_node));
8433
        }
8434
   }
8435
 
8436
  switch (code)
8437
    {
8438
    case PAREN_EXPR:
8439
      /* Re-association barriers around constants and other re-association
8440
         barriers can be removed.  */
8441
      if (CONSTANT_CLASS_P (op0)
8442
          || TREE_CODE (op0) == PAREN_EXPR)
8443
        return fold_convert_loc (loc, type, op0);
8444
      return NULL_TREE;
8445
 
8446
    CASE_CONVERT:
8447
    case FLOAT_EXPR:
8448
    case FIX_TRUNC_EXPR:
8449
      if (TREE_TYPE (op0) == type)
8450
        return op0;
8451
 
8452
      /* If we have (type) (a CMP b) and type is an integral type, return
8453
         new expression involving the new type.  */
8454
      if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
8455
        return fold_build2_loc (loc, TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
8456
                            TREE_OPERAND (op0, 1));
8457
 
8458
      /* Handle cases of two conversions in a row.  */
8459
      if (CONVERT_EXPR_P (op0))
8460
        {
8461
          tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
8462
          tree inter_type = TREE_TYPE (op0);
8463
          int inside_int = INTEGRAL_TYPE_P (inside_type);
8464
          int inside_ptr = POINTER_TYPE_P (inside_type);
8465
          int inside_float = FLOAT_TYPE_P (inside_type);
8466
          int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
8467
          unsigned int inside_prec = TYPE_PRECISION (inside_type);
8468
          int inside_unsignedp = TYPE_UNSIGNED (inside_type);
8469
          int inter_int = INTEGRAL_TYPE_P (inter_type);
8470
          int inter_ptr = POINTER_TYPE_P (inter_type);
8471
          int inter_float = FLOAT_TYPE_P (inter_type);
8472
          int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
8473
          unsigned int inter_prec = TYPE_PRECISION (inter_type);
8474
          int inter_unsignedp = TYPE_UNSIGNED (inter_type);
8475
          int final_int = INTEGRAL_TYPE_P (type);
8476
          int final_ptr = POINTER_TYPE_P (type);
8477
          int final_float = FLOAT_TYPE_P (type);
8478
          int final_vec = TREE_CODE (type) == VECTOR_TYPE;
8479
          unsigned int final_prec = TYPE_PRECISION (type);
8480
          int final_unsignedp = TYPE_UNSIGNED (type);
8481
 
8482
          /* In addition to the cases of two conversions in a row
8483
             handled below, if we are converting something to its own
8484
             type via an object of identical or wider precision, neither
8485
             conversion is needed.  */
8486
          if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
8487
              && (((inter_int || inter_ptr) && final_int)
8488
                  || (inter_float && final_float))
8489
              && inter_prec >= final_prec)
8490
            return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8491
 
8492
          /* Likewise, if the intermediate and initial types are either both
8493
             float or both integer, we don't need the middle conversion if the
8494
             former is wider than the latter and doesn't change the signedness
8495
             (for integers).  Avoid this if the final type is a pointer since
8496
             then we sometimes need the middle conversion.  Likewise if the
8497
             final type has a precision not equal to the size of its mode.  */
8498
          if (((inter_int && inside_int)
8499
               || (inter_float && inside_float)
8500
               || (inter_vec && inside_vec))
8501
              && inter_prec >= inside_prec
8502
              && (inter_float || inter_vec
8503
                  || inter_unsignedp == inside_unsignedp)
8504
              && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8505
                    && TYPE_MODE (type) == TYPE_MODE (inter_type))
8506
              && ! final_ptr
8507
              && (! final_vec || inter_prec == inside_prec))
8508
            return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8509
 
8510
          /* If we have a sign-extension of a zero-extended value, we can
8511
             replace that by a single zero-extension.  */
8512
          if (inside_int && inter_int && final_int
8513
              && inside_prec < inter_prec && inter_prec < final_prec
8514
              && inside_unsignedp && !inter_unsignedp)
8515
            return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8516
 
8517
          /* Two conversions in a row are not needed unless:
8518
             - some conversion is floating-point (overstrict for now), or
8519
             - some conversion is a vector (overstrict for now), or
8520
             - the intermediate type is narrower than both initial and
8521
               final, or
8522
             - the intermediate type and innermost type differ in signedness,
8523
               and the outermost type is wider than the intermediate, or
8524
             - the initial type is a pointer type and the precisions of the
8525
               intermediate and final types differ, or
8526
             - the final type is a pointer type and the precisions of the
8527
               initial and intermediate types differ.  */
8528
          if (! inside_float && ! inter_float && ! final_float
8529
              && ! inside_vec && ! inter_vec && ! final_vec
8530
              && (inter_prec >= inside_prec || inter_prec >= final_prec)
8531
              && ! (inside_int && inter_int
8532
                    && inter_unsignedp != inside_unsignedp
8533
                    && inter_prec < final_prec)
8534
              && ((inter_unsignedp && inter_prec > inside_prec)
8535
                  == (final_unsignedp && final_prec > inter_prec))
8536
              && ! (inside_ptr && inter_prec != final_prec)
8537
              && ! (final_ptr && inside_prec != inter_prec)
8538
              && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8539
                    && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8540
            return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8541
        }
8542
 
8543
      /* Handle (T *)&A.B.C for A being of type T and B and C
8544
         living at offset zero.  This occurs frequently in
8545
         C++ upcasting and then accessing the base.  */
8546
      if (TREE_CODE (op0) == ADDR_EXPR
8547
          && POINTER_TYPE_P (type)
8548
          && handled_component_p (TREE_OPERAND (op0, 0)))
8549
        {
8550
          HOST_WIDE_INT bitsize, bitpos;
8551
          tree offset;
8552
          enum machine_mode mode;
8553
          int unsignedp, volatilep;
8554
          tree base = TREE_OPERAND (op0, 0);
8555
          base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8556
                                      &mode, &unsignedp, &volatilep, false);
8557
          /* If the reference was to a (constant) zero offset, we can use
8558
             the address of the base if it has the same base type
8559
             as the result type.  */
8560
          if (! offset && bitpos == 0
8561
              && TYPE_MAIN_VARIANT (TREE_TYPE (type))
8562
                  == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8563
            return fold_convert_loc (loc, type,
8564
                                     build_fold_addr_expr_loc (loc, base));
8565
        }
8566
 
8567
      if (TREE_CODE (op0) == MODIFY_EXPR
8568
          && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8569
          /* Detect assigning a bitfield.  */
8570
          && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8571
               && DECL_BIT_FIELD
8572
               (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8573
        {
8574
          /* Don't leave an assignment inside a conversion
8575
             unless assigning a bitfield.  */
8576
          tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8577
          /* First do the assignment, then return converted constant.  */
8578
          tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8579
          TREE_NO_WARNING (tem) = 1;
8580
          TREE_USED (tem) = 1;
8581
          SET_EXPR_LOCATION (tem, loc);
8582
          return tem;
8583
        }
8584
 
8585
      /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8586
         constants (if x has signed type, the sign bit cannot be set
8587
         in c).  This folds extension into the BIT_AND_EXPR.
8588
         ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8589
         very likely don't have maximal range for their precision and this
8590
         transformation effectively doesn't preserve non-maximal ranges.  */
8591
      if (TREE_CODE (type) == INTEGER_TYPE
8592
          && TREE_CODE (op0) == BIT_AND_EXPR
8593
          && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8594
        {
8595
          tree and_expr = op0;
8596
          tree and0 = TREE_OPERAND (and_expr, 0);
8597
          tree and1 = TREE_OPERAND (and_expr, 1);
8598
          int change = 0;
8599
 
8600
          if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8601
              || (TYPE_PRECISION (type)
8602
                  <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8603
            change = 1;
8604
          else if (TYPE_PRECISION (TREE_TYPE (and1))
8605
                   <= HOST_BITS_PER_WIDE_INT
8606
                   && host_integerp (and1, 1))
8607
            {
8608
              unsigned HOST_WIDE_INT cst;
8609
 
8610
              cst = tree_low_cst (and1, 1);
8611
              cst &= (HOST_WIDE_INT) -1
8612
                     << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8613
              change = (cst == 0);
8614
#ifdef LOAD_EXTEND_OP
8615
              if (change
8616
                  && !flag_syntax_only
8617
                  && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8618
                      == ZERO_EXTEND))
8619
                {
8620
                  tree uns = unsigned_type_for (TREE_TYPE (and0));
8621
                  and0 = fold_convert_loc (loc, uns, and0);
8622
                  and1 = fold_convert_loc (loc, uns, and1);
8623
                }
8624
#endif
8625
            }
8626
          if (change)
8627
            {
8628
              tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
8629
                                           TREE_INT_CST_HIGH (and1), 0,
8630
                                           TREE_OVERFLOW (and1));
8631
              return fold_build2_loc (loc, BIT_AND_EXPR, type,
8632
                                  fold_convert_loc (loc, type, and0), tem);
8633
            }
8634
        }
8635
 
8636
      /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8637
         when one of the new casts will fold away. Conservatively we assume
8638
         that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8639
      if (POINTER_TYPE_P (type)
8640
          && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8641
          && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8642
              || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8643
              || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8644
        {
8645
          tree arg00 = TREE_OPERAND (arg0, 0);
8646
          tree arg01 = TREE_OPERAND (arg0, 1);
8647
 
8648
          return fold_build2_loc (loc,
8649
                              TREE_CODE (arg0), type,
8650
                              fold_convert_loc (loc, type, arg00),
8651
                              fold_convert_loc (loc, sizetype, arg01));
8652
        }
8653
 
8654
      /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8655
         of the same precision, and X is an integer type not narrower than
8656
         types T1 or T2, i.e. the cast (T2)X isn't an extension.  */
8657
      if (INTEGRAL_TYPE_P (type)
8658
          && TREE_CODE (op0) == BIT_NOT_EXPR
8659
          && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8660
          && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8661
          && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8662
        {
8663
          tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8664
          if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8665
              && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8666
            return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8667
                                fold_convert_loc (loc, type, tem));
8668
        }
8669
 
8670
      /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8671
         type of X and Y (integer types only).  */
8672
      if (INTEGRAL_TYPE_P (type)
8673
          && TREE_CODE (op0) == MULT_EXPR
8674
          && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8675
          && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8676
        {
8677
          /* Be careful not to introduce new overflows.  */
8678
          tree mult_type;
8679
          if (TYPE_OVERFLOW_WRAPS (type))
8680
            mult_type = type;
8681
          else
8682
            mult_type = unsigned_type_for (type);
8683
 
8684
          if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8685
            {
8686
              tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8687
                                 fold_convert_loc (loc, mult_type,
8688
                                                   TREE_OPERAND (op0, 0)),
8689
                                 fold_convert_loc (loc, mult_type,
8690
                                                   TREE_OPERAND (op0, 1)));
8691
              return fold_convert_loc (loc, type, tem);
8692
            }
8693
        }
8694
 
8695
      tem = fold_convert_const (code, type, op0);
8696
      return tem ? tem : NULL_TREE;
8697
 
8698
    case ADDR_SPACE_CONVERT_EXPR:
8699
      if (integer_zerop (arg0))
8700
        return fold_convert_const (code, type, arg0);
8701
      return NULL_TREE;
8702
 
8703
    case FIXED_CONVERT_EXPR:
8704
      tem = fold_convert_const (code, type, arg0);
8705
      return tem ? tem : NULL_TREE;
8706
 
8707
    case VIEW_CONVERT_EXPR:
8708
      if (TREE_TYPE (op0) == type)
8709
        return op0;
8710
      if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8711
        return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8712
                            type, TREE_OPERAND (op0, 0));
8713
 
8714
      /* For integral conversions with the same precision or pointer
8715
         conversions use a NOP_EXPR instead.  */
8716
      if ((INTEGRAL_TYPE_P (type)
8717
           || POINTER_TYPE_P (type))
8718
          && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8719
              || POINTER_TYPE_P (TREE_TYPE (op0)))
8720
          && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8721
        return fold_convert_loc (loc, type, op0);
8722
 
8723
      /* Strip inner integral conversions that do not change the precision.  */
8724
      if (CONVERT_EXPR_P (op0)
8725
          && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8726
              || POINTER_TYPE_P (TREE_TYPE (op0)))
8727
          && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8728
              || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8729
          && (TYPE_PRECISION (TREE_TYPE (op0))
8730
              == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8731
        return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8732
                            type, TREE_OPERAND (op0, 0));
8733
 
8734
      return fold_view_convert_expr (type, op0);
8735
 
8736
    case NEGATE_EXPR:
8737
      tem = fold_negate_expr (loc, arg0);
8738
      if (tem)
8739
        return fold_convert_loc (loc, type, tem);
8740
      return NULL_TREE;
8741
 
8742
    case ABS_EXPR:
8743
      if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8744
        return fold_abs_const (arg0, type);
8745
      else if (TREE_CODE (arg0) == NEGATE_EXPR)
8746
        return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8747
      /* Convert fabs((double)float) into (double)fabsf(float).  */
8748
      else if (TREE_CODE (arg0) == NOP_EXPR
8749
               && TREE_CODE (type) == REAL_TYPE)
8750
        {
8751
          tree targ0 = strip_float_extensions (arg0);
8752
          if (targ0 != arg0)
8753
            return fold_convert_loc (loc, type,
8754
                                     fold_build1_loc (loc, ABS_EXPR,
8755
                                                  TREE_TYPE (targ0),
8756
                                                  targ0));
8757
        }
8758
      /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on.  */
8759
      else if (TREE_CODE (arg0) == ABS_EXPR)
8760
        return arg0;
8761
      else if (tree_expr_nonnegative_p (arg0))
8762
        return arg0;
8763
 
8764
      /* Strip sign ops from argument.  */
8765
      if (TREE_CODE (type) == REAL_TYPE)
8766
        {
8767
          tem = fold_strip_sign_ops (arg0);
8768
          if (tem)
8769
            return fold_build1_loc (loc, ABS_EXPR, type,
8770
                                fold_convert_loc (loc, type, tem));
8771
        }
8772
      return NULL_TREE;
8773
 
8774
    case CONJ_EXPR:
8775
      if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8776
        return fold_convert_loc (loc, type, arg0);
8777
      if (TREE_CODE (arg0) == COMPLEX_EXPR)
8778
        {
8779
          tree itype = TREE_TYPE (type);
8780
          tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8781
          tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8782
          return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8783
                              negate_expr (ipart));
8784
        }
8785
      if (TREE_CODE (arg0) == COMPLEX_CST)
8786
        {
8787
          tree itype = TREE_TYPE (type);
8788
          tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8789
          tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8790
          return build_complex (type, rpart, negate_expr (ipart));
8791
        }
8792
      if (TREE_CODE (arg0) == CONJ_EXPR)
8793
        return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8794
      return NULL_TREE;
8795
 
8796
    case BIT_NOT_EXPR:
8797
      if (TREE_CODE (arg0) == INTEGER_CST)
8798
        return fold_not_const (arg0, type);
8799
      else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8800
        return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8801
      /* Convert ~ (-A) to A - 1.  */
8802
      else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8803
        return fold_build2_loc (loc, MINUS_EXPR, type,
8804
                            fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8805
                            build_int_cst (type, 1));
8806
      /* Convert ~ (A - 1) or ~ (A + -1) to -A.  */
8807
      else if (INTEGRAL_TYPE_P (type)
8808
               && ((TREE_CODE (arg0) == MINUS_EXPR
8809
                    && integer_onep (TREE_OPERAND (arg0, 1)))
8810
                   || (TREE_CODE (arg0) == PLUS_EXPR
8811
                       && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8812
        return fold_build1_loc (loc, NEGATE_EXPR, type,
8813
                            fold_convert_loc (loc, type,
8814
                                              TREE_OPERAND (arg0, 0)));
8815
      /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify.  */
8816
      else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8817
               && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8818
                                     fold_convert_loc (loc, type,
8819
                                                       TREE_OPERAND (arg0, 0)))))
8820
        return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8821
                            fold_convert_loc (loc, type,
8822
                                              TREE_OPERAND (arg0, 1)));
8823
      else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8824
               && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8825
                                     fold_convert_loc (loc, type,
8826
                                                       TREE_OPERAND (arg0, 1)))))
8827
        return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8828
                            fold_convert_loc (loc, type,
8829
                                              TREE_OPERAND (arg0, 0)), tem);
8830
      /* Perform BIT_NOT_EXPR on each element individually.  */
8831
      else if (TREE_CODE (arg0) == VECTOR_CST)
8832
        {
8833
          tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8834
          int count = TYPE_VECTOR_SUBPARTS (type), i;
8835
 
8836
          for (i = 0; i < count; i++)
8837
            {
8838
              if (elements)
8839
                {
8840
                  elem = TREE_VALUE (elements);
8841
                  elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8842
                  if (elem == NULL_TREE)
8843
                    break;
8844
                  elements = TREE_CHAIN (elements);
8845
                }
8846
              else
8847
                elem = build_int_cst (TREE_TYPE (type), -1);
8848
              list = tree_cons (NULL_TREE, elem, list);
8849
            }
8850
          if (i == count)
8851
            return build_vector (type, nreverse (list));
8852
        }
8853
 
8854
      return NULL_TREE;
8855
 
8856
    case TRUTH_NOT_EXPR:
8857
      /* The argument to invert_truthvalue must have Boolean type.  */
8858
      if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8859
          arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8860
 
8861
      /* Note that the operand of this must be an int
8862
         and its values must be 0 or 1.
8863
         ("true" is a fixed value perhaps depending on the language,
8864
         but we don't handle values other than 1 correctly yet.)  */
8865
      tem = fold_truth_not_expr (loc, arg0);
8866
      if (!tem)
8867
        return NULL_TREE;
8868
      return fold_convert_loc (loc, type, tem);
8869
 
8870
    case REALPART_EXPR:
8871
      if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8872
        return fold_convert_loc (loc, type, arg0);
8873
      if (TREE_CODE (arg0) == COMPLEX_EXPR)
8874
        return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8875
                                 TREE_OPERAND (arg0, 1));
8876
      if (TREE_CODE (arg0) == COMPLEX_CST)
8877
        return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8878
      if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8879
        {
8880
          tree itype = TREE_TYPE (TREE_TYPE (arg0));
8881
          tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8882
                             fold_build1_loc (loc, REALPART_EXPR, itype,
8883
                                          TREE_OPERAND (arg0, 0)),
8884
                             fold_build1_loc (loc, REALPART_EXPR, itype,
8885
                                          TREE_OPERAND (arg0, 1)));
8886
          return fold_convert_loc (loc, type, tem);
8887
        }
8888
      if (TREE_CODE (arg0) == CONJ_EXPR)
8889
        {
8890
          tree itype = TREE_TYPE (TREE_TYPE (arg0));
8891
          tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8892
                             TREE_OPERAND (arg0, 0));
8893
          return fold_convert_loc (loc, type, tem);
8894
        }
8895
      if (TREE_CODE (arg0) == CALL_EXPR)
8896
        {
8897
          tree fn = get_callee_fndecl (arg0);
8898
          if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8899
            switch (DECL_FUNCTION_CODE (fn))
8900
              {
8901
              CASE_FLT_FN (BUILT_IN_CEXPI):
8902
                fn = mathfn_built_in (type, BUILT_IN_COS);
8903
                if (fn)
8904
                  return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8905
                break;
8906
 
8907
              default:
8908
                break;
8909
              }
8910
        }
8911
      return NULL_TREE;
8912
 
8913
    case IMAGPART_EXPR:
8914
      if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8915
        return fold_convert_loc (loc, type, integer_zero_node);
8916
      if (TREE_CODE (arg0) == COMPLEX_EXPR)
8917
        return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8918
                                 TREE_OPERAND (arg0, 0));
8919
      if (TREE_CODE (arg0) == COMPLEX_CST)
8920
        return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8921
      if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8922
        {
8923
          tree itype = TREE_TYPE (TREE_TYPE (arg0));
8924
          tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8925
                             fold_build1_loc (loc, IMAGPART_EXPR, itype,
8926
                                          TREE_OPERAND (arg0, 0)),
8927
                             fold_build1_loc (loc, IMAGPART_EXPR, itype,
8928
                                          TREE_OPERAND (arg0, 1)));
8929
          return fold_convert_loc (loc, type, tem);
8930
        }
8931
      if (TREE_CODE (arg0) == CONJ_EXPR)
8932
        {
8933
          tree itype = TREE_TYPE (TREE_TYPE (arg0));
8934
          tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8935
          return fold_convert_loc (loc, type, negate_expr (tem));
8936
        }
8937
      if (TREE_CODE (arg0) == CALL_EXPR)
8938
        {
8939
          tree fn = get_callee_fndecl (arg0);
8940
          if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8941
            switch (DECL_FUNCTION_CODE (fn))
8942
              {
8943
              CASE_FLT_FN (BUILT_IN_CEXPI):
8944
                fn = mathfn_built_in (type, BUILT_IN_SIN);
8945
                if (fn)
8946
                  return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8947
                break;
8948
 
8949
              default:
8950
                break;
8951
              }
8952
        }
8953
      return NULL_TREE;
8954
 
8955
    case INDIRECT_REF:
8956
      /* Fold *&X to X if X is an lvalue.  */
8957
      if (TREE_CODE (op0) == ADDR_EXPR)
8958
        {
8959
          tree op00 = TREE_OPERAND (op0, 0);
8960
          if ((TREE_CODE (op00) == VAR_DECL
8961
               || TREE_CODE (op00) == PARM_DECL
8962
               || TREE_CODE (op00) == RESULT_DECL)
8963
              && !TREE_READONLY (op00))
8964
            return op00;
8965
        }
8966
      return NULL_TREE;
8967
 
8968
    default:
8969
      return NULL_TREE;
8970
    } /* switch (code) */
8971
}
8972
 
8973
 
8974
/* If the operation was a conversion do _not_ mark a resulting constant
8975
   with TREE_OVERFLOW if the original constant was not.  These conversions
8976
   have implementation defined behavior and retaining the TREE_OVERFLOW
8977
   flag here would confuse later passes such as VRP.  */
8978
tree
8979
fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8980
                                tree type, tree op0)
8981
{
8982
  tree res = fold_unary_loc (loc, code, type, op0);
8983
  if (res
8984
      && TREE_CODE (res) == INTEGER_CST
8985
      && TREE_CODE (op0) == INTEGER_CST
8986
      && CONVERT_EXPR_CODE_P (code))
8987
    TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8988
 
8989
  return res;
8990
}
8991
 
8992
/* Fold a binary expression of code CODE and type TYPE with operands
8993
   OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8994
   Return the folded expression if folding is successful.  Otherwise,
8995
   return NULL_TREE.  */
8996
 
8997
static tree
8998
fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8999
{
9000
  enum tree_code compl_code;
9001
 
9002
  if (code == MIN_EXPR)
9003
    compl_code = MAX_EXPR;
9004
  else if (code == MAX_EXPR)
9005
    compl_code = MIN_EXPR;
9006
  else
9007
    gcc_unreachable ();
9008
 
9009
  /* MIN (MAX (a, b), b) == b.  */
9010
  if (TREE_CODE (op0) == compl_code
9011
      && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
9012
    return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
9013
 
9014
  /* MIN (MAX (b, a), b) == b.  */
9015
  if (TREE_CODE (op0) == compl_code
9016
      && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
9017
      && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
9018
    return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
9019
 
9020
  /* MIN (a, MAX (a, b)) == a.  */
9021
  if (TREE_CODE (op1) == compl_code
9022
      && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
9023
      && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
9024
    return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
9025
 
9026
  /* MIN (a, MAX (b, a)) == a.  */
9027
  if (TREE_CODE (op1) == compl_code
9028
      && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
9029
      && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
9030
    return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
9031
 
9032
  return NULL_TREE;
9033
}
9034
 
9035
/* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9036
   by changing CODE to reduce the magnitude of constants involved in
9037
   ARG0 of the comparison.
9038
   Returns a canonicalized comparison tree if a simplification was
9039
   possible, otherwise returns NULL_TREE.
9040
   Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9041
   valid if signed overflow is undefined.  */
9042
 
9043
static tree
9044
maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
9045
                                 tree arg0, tree arg1,
9046
                                 bool *strict_overflow_p)
9047
{
9048
  enum tree_code code0 = TREE_CODE (arg0);
9049
  tree t, cst0 = NULL_TREE;
9050
  int sgn0;
9051
  bool swap = false;
9052
 
9053
  /* Match A +- CST code arg1 and CST code arg1.  We can change the
9054
     first form only if overflow is undefined.  */
9055
  if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9056
         /* In principle pointers also have undefined overflow behavior,
9057
            but that causes problems elsewhere.  */
9058
         && !POINTER_TYPE_P (TREE_TYPE (arg0))
9059
         && (code0 == MINUS_EXPR
9060
             || code0 == PLUS_EXPR)
9061
         && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9062
        || code0 == INTEGER_CST))
9063
    return NULL_TREE;
9064
 
9065
  /* Identify the constant in arg0 and its sign.  */
9066
  if (code0 == INTEGER_CST)
9067
    cst0 = arg0;
9068
  else
9069
    cst0 = TREE_OPERAND (arg0, 1);
9070
  sgn0 = tree_int_cst_sgn (cst0);
9071
 
9072
  /* Overflowed constants and zero will cause problems.  */
9073
  if (integer_zerop (cst0)
9074
      || TREE_OVERFLOW (cst0))
9075
    return NULL_TREE;
9076
 
9077
  /* See if we can reduce the magnitude of the constant in
9078
     arg0 by changing the comparison code.  */
9079
  if (code0 == INTEGER_CST)
9080
    {
9081
      /* CST <= arg1  ->  CST-1 < arg1.  */
9082
      if (code == LE_EXPR && sgn0 == 1)
9083
        code = LT_EXPR;
9084
      /* -CST < arg1  ->  -CST-1 <= arg1.  */
9085
      else if (code == LT_EXPR && sgn0 == -1)
9086
        code = LE_EXPR;
9087
      /* CST > arg1  ->  CST-1 >= arg1.  */
9088
      else if (code == GT_EXPR && sgn0 == 1)
9089
        code = GE_EXPR;
9090
      /* -CST >= arg1  ->  -CST-1 > arg1.  */
9091
      else if (code == GE_EXPR && sgn0 == -1)
9092
        code = GT_EXPR;
9093
      else
9094
        return NULL_TREE;
9095
      /* arg1 code' CST' might be more canonical.  */
9096
      swap = true;
9097
    }
9098
  else
9099
    {
9100
      /* A - CST < arg1  ->  A - CST-1 <= arg1.  */
9101
      if (code == LT_EXPR
9102
          && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9103
        code = LE_EXPR;
9104
      /* A + CST > arg1  ->  A + CST-1 >= arg1.  */
9105
      else if (code == GT_EXPR
9106
               && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9107
        code = GE_EXPR;
9108
      /* A + CST <= arg1  ->  A + CST-1 < arg1.  */
9109
      else if (code == LE_EXPR
9110
               && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9111
        code = LT_EXPR;
9112
      /* A - CST >= arg1  ->  A - CST-1 > arg1.  */
9113
      else if (code == GE_EXPR
9114
               && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9115
        code = GT_EXPR;
9116
      else
9117
        return NULL_TREE;
9118
      *strict_overflow_p = true;
9119
    }
9120
 
9121
  /* Now build the constant reduced in magnitude.  But not if that
9122
     would produce one outside of its types range.  */
9123
  if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9124
      && ((sgn0 == 1
9125
           && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9126
           && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9127
          || (sgn0 == -1
9128
              && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9129
              && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9130
    /* We cannot swap the comparison here as that would cause us to
9131
       endlessly recurse.  */
9132
    return NULL_TREE;
9133
 
9134
  t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9135
                       cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
9136
  if (code0 != INTEGER_CST)
9137
    t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9138
 
9139
  /* If swapping might yield to a more canonical form, do so.  */
9140
  if (swap)
9141
    return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
9142
  else
9143
    return fold_build2_loc (loc, code, type, t, arg1);
9144
}
9145
 
9146
/* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9147
   overflow further.  Try to decrease the magnitude of constants involved
9148
   by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9149
   and put sole constants at the second argument position.
9150
   Returns the canonicalized tree if changed, otherwise NULL_TREE.  */
9151
 
9152
static tree
9153
maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9154
                               tree arg0, tree arg1)
9155
{
9156
  tree t;
9157
  bool strict_overflow_p;
9158
  const char * const warnmsg = G_("assuming signed overflow does not occur "
9159
                                  "when reducing constant in comparison");
9160
 
9161
  /* Try canonicalization by simplifying arg0.  */
9162
  strict_overflow_p = false;
9163
  t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9164
                                       &strict_overflow_p);
9165
  if (t)
9166
    {
9167
      if (strict_overflow_p)
9168
        fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9169
      return t;
9170
    }
9171
 
9172
  /* Try canonicalization by simplifying arg1 using the swapped
9173
     comparison.  */
9174
  code = swap_tree_comparison (code);
9175
  strict_overflow_p = false;
9176
  t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
9177
                                       &strict_overflow_p);
9178
  if (t && strict_overflow_p)
9179
    fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9180
  return t;
9181
}
9182
 
9183
/* Return whether BASE + OFFSET + BITPOS may wrap around the address
9184
   space.  This is used to avoid issuing overflow warnings for
9185
   expressions like &p->x which can not wrap.  */
9186
 
9187
static bool
9188
pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
9189
{
9190
  unsigned HOST_WIDE_INT offset_low, total_low;
9191
  HOST_WIDE_INT size, offset_high, total_high;
9192
 
9193
  if (!POINTER_TYPE_P (TREE_TYPE (base)))
9194
    return true;
9195
 
9196
  if (bitpos < 0)
9197
    return true;
9198
 
9199
  if (offset == NULL_TREE)
9200
    {
9201
      offset_low = 0;
9202
      offset_high = 0;
9203
    }
9204
  else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
9205
    return true;
9206
  else
9207
    {
9208
      offset_low = TREE_INT_CST_LOW (offset);
9209
      offset_high = TREE_INT_CST_HIGH (offset);
9210
    }
9211
 
9212
  if (add_double_with_sign (offset_low, offset_high,
9213
                            bitpos / BITS_PER_UNIT, 0,
9214
                            &total_low, &total_high,
9215
                            true))
9216
    return true;
9217
 
9218
  if (total_high != 0)
9219
    return true;
9220
 
9221
  size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
9222
  if (size <= 0)
9223
    return true;
9224
 
9225
  /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9226
     array.  */
9227
  if (TREE_CODE (base) == ADDR_EXPR)
9228
    {
9229
      HOST_WIDE_INT base_size;
9230
 
9231
      base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
9232
      if (base_size > 0 && size < base_size)
9233
        size = base_size;
9234
    }
9235
 
9236
  return total_low > (unsigned HOST_WIDE_INT) size;
9237
}
9238
 
9239
/* Subroutine of fold_binary.  This routine performs all of the
9240
   transformations that are common to the equality/inequality
9241
   operators (EQ_EXPR and NE_EXPR) and the ordering operators
9242
   (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR).  Callers other than
9243
   fold_binary should call fold_binary.  Fold a comparison with
9244
   tree code CODE and type TYPE with operands OP0 and OP1.  Return
9245
   the folded comparison or NULL_TREE.  */
9246
 
9247
static tree
9248
fold_comparison (location_t loc, enum tree_code code, tree type,
9249
                 tree op0, tree op1)
9250
{
9251
  tree arg0, arg1, tem;
9252
 
9253
  arg0 = op0;
9254
  arg1 = op1;
9255
 
9256
  STRIP_SIGN_NOPS (arg0);
9257
  STRIP_SIGN_NOPS (arg1);
9258
 
9259
  tem = fold_relational_const (code, type, arg0, arg1);
9260
  if (tem != NULL_TREE)
9261
    return tem;
9262
 
9263
  /* If one arg is a real or integer constant, put it last.  */
9264
  if (tree_swap_operands_p (arg0, arg1, true))
9265
    return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9266
 
9267
  /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1.  */
9268
  if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9269
      && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9270
          && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9271
          && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
9272
      && (TREE_CODE (arg1) == INTEGER_CST
9273
          && !TREE_OVERFLOW (arg1)))
9274
    {
9275
      tree const1 = TREE_OPERAND (arg0, 1);
9276
      tree const2 = arg1;
9277
      tree variable = TREE_OPERAND (arg0, 0);
9278
      tree lhs;
9279
      int lhs_add;
9280
      lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
9281
 
9282
      lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
9283
                         TREE_TYPE (arg1), const2, const1);
9284
 
9285
      /* If the constant operation overflowed this can be
9286
         simplified as a comparison against INT_MAX/INT_MIN.  */
9287
      if (TREE_CODE (lhs) == INTEGER_CST
9288
          && TREE_OVERFLOW (lhs))
9289
        {
9290
          int const1_sgn = tree_int_cst_sgn (const1);
9291
          enum tree_code code2 = code;
9292
 
9293
          /* Get the sign of the constant on the lhs if the
9294
             operation were VARIABLE + CONST1.  */
9295
          if (TREE_CODE (arg0) == MINUS_EXPR)
9296
            const1_sgn = -const1_sgn;
9297
 
9298
          /* The sign of the constant determines if we overflowed
9299
             INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
9300
             Canonicalize to the INT_MIN overflow by swapping the comparison
9301
             if necessary.  */
9302
          if (const1_sgn == -1)
9303
            code2 = swap_tree_comparison (code);
9304
 
9305
          /* We now can look at the canonicalized case
9306
               VARIABLE + 1  CODE2  INT_MIN
9307
             and decide on the result.  */
9308
          if (code2 == LT_EXPR
9309
              || code2 == LE_EXPR
9310
              || code2 == EQ_EXPR)
9311
            return omit_one_operand_loc (loc, type, boolean_false_node, variable);
9312
          else if (code2 == NE_EXPR
9313
                   || code2 == GE_EXPR
9314
                   || code2 == GT_EXPR)
9315
            return omit_one_operand_loc (loc, type, boolean_true_node, variable);
9316
        }
9317
 
9318
      if (TREE_CODE (lhs) == TREE_CODE (arg1)
9319
          && (TREE_CODE (lhs) != INTEGER_CST
9320
              || !TREE_OVERFLOW (lhs)))
9321
        {
9322
          fold_overflow_warning (("assuming signed overflow does not occur "
9323
                                  "when changing X +- C1 cmp C2 to "
9324
                                  "X cmp C1 +- C2"),
9325
                                 WARN_STRICT_OVERFLOW_COMPARISON);
9326
          return fold_build2_loc (loc, code, type, variable, lhs);
9327
        }
9328
    }
9329
 
9330
  /* For comparisons of pointers we can decompose it to a compile time
9331
     comparison of the base objects and the offsets into the object.
9332
     This requires at least one operand being an ADDR_EXPR or a
9333
     POINTER_PLUS_EXPR to do more than the operand_equal_p test below.  */
9334
  if (POINTER_TYPE_P (TREE_TYPE (arg0))
9335
      && (TREE_CODE (arg0) == ADDR_EXPR
9336
          || TREE_CODE (arg1) == ADDR_EXPR
9337
          || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9338
          || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9339
    {
9340
      tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9341
      HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9342
      enum machine_mode mode;
9343
      int volatilep, unsignedp;
9344
      bool indirect_base0 = false, indirect_base1 = false;
9345
 
9346
      /* Get base and offset for the access.  Strip ADDR_EXPR for
9347
         get_inner_reference, but put it back by stripping INDIRECT_REF
9348
         off the base object if possible.  indirect_baseN will be true
9349
         if baseN is not an address but refers to the object itself.  */
9350
      base0 = arg0;
9351
      if (TREE_CODE (arg0) == ADDR_EXPR)
9352
        {
9353
          base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9354
                                       &bitsize, &bitpos0, &offset0, &mode,
9355
                                       &unsignedp, &volatilep, false);
9356
          if (TREE_CODE (base0) == INDIRECT_REF)
9357
            base0 = TREE_OPERAND (base0, 0);
9358
          else
9359
            indirect_base0 = true;
9360
        }
9361
      else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9362
        {
9363
          base0 = TREE_OPERAND (arg0, 0);
9364
          offset0 = TREE_OPERAND (arg0, 1);
9365
        }
9366
 
9367
      base1 = arg1;
9368
      if (TREE_CODE (arg1) == ADDR_EXPR)
9369
        {
9370
          base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9371
                                       &bitsize, &bitpos1, &offset1, &mode,
9372
                                       &unsignedp, &volatilep, false);
9373
          if (TREE_CODE (base1) == INDIRECT_REF)
9374
            base1 = TREE_OPERAND (base1, 0);
9375
          else
9376
            indirect_base1 = true;
9377
        }
9378
      else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9379
        {
9380
          base1 = TREE_OPERAND (arg1, 0);
9381
          offset1 = TREE_OPERAND (arg1, 1);
9382
        }
9383
 
9384
      /* If we have equivalent bases we might be able to simplify.  */
9385
      if (indirect_base0 == indirect_base1
9386
          && operand_equal_p (base0, base1, 0))
9387
        {
9388
          /* We can fold this expression to a constant if the non-constant
9389
             offset parts are equal.  */
9390
          if ((offset0 == offset1
9391
               || (offset0 && offset1
9392
                   && operand_equal_p (offset0, offset1, 0)))
9393
              && (code == EQ_EXPR
9394
                  || code == NE_EXPR
9395
                  || POINTER_TYPE_OVERFLOW_UNDEFINED))
9396
 
9397
            {
9398
              if (code != EQ_EXPR
9399
                  && code != NE_EXPR
9400
                  && bitpos0 != bitpos1
9401
                  && (pointer_may_wrap_p (base0, offset0, bitpos0)
9402
                      || pointer_may_wrap_p (base1, offset1, bitpos1)))
9403
                fold_overflow_warning (("assuming pointer wraparound does not "
9404
                                        "occur when comparing P +- C1 with "
9405
                                        "P +- C2"),
9406
                                       WARN_STRICT_OVERFLOW_CONDITIONAL);
9407
 
9408
              switch (code)
9409
                {
9410
                case EQ_EXPR:
9411
                  return constant_boolean_node (bitpos0 == bitpos1, type);
9412
                case NE_EXPR:
9413
                  return constant_boolean_node (bitpos0 != bitpos1, type);
9414
                case LT_EXPR:
9415
                  return constant_boolean_node (bitpos0 < bitpos1, type);
9416
                case LE_EXPR:
9417
                  return constant_boolean_node (bitpos0 <= bitpos1, type);
9418
                case GE_EXPR:
9419
                  return constant_boolean_node (bitpos0 >= bitpos1, type);
9420
                case GT_EXPR:
9421
                  return constant_boolean_node (bitpos0 > bitpos1, type);
9422
                default:;
9423
                }
9424
            }
9425
          /* We can simplify the comparison to a comparison of the variable
9426
             offset parts if the constant offset parts are equal.
9427
             Be careful to use signed size type here because otherwise we
9428
             mess with array offsets in the wrong way.  This is possible
9429
             because pointer arithmetic is restricted to retain within an
9430
             object and overflow on pointer differences is undefined as of
9431
             6.5.6/8 and /9 with respect to the signed ptrdiff_t.  */
9432
          else if (bitpos0 == bitpos1
9433
                   && ((code == EQ_EXPR || code == NE_EXPR)
9434
                       || POINTER_TYPE_OVERFLOW_UNDEFINED))
9435
            {
9436
              tree signed_size_type_node;
9437
              signed_size_type_node = signed_type_for (size_type_node);
9438
 
9439
              /* By converting to signed size type we cover middle-end pointer
9440
                 arithmetic which operates on unsigned pointer types of size
9441
                 type size and ARRAY_REF offsets which are properly sign or
9442
                 zero extended from their type in case it is narrower than
9443
                 size type.  */
9444
              if (offset0 == NULL_TREE)
9445
                offset0 = build_int_cst (signed_size_type_node, 0);
9446
              else
9447
                offset0 = fold_convert_loc (loc, signed_size_type_node,
9448
                                            offset0);
9449
              if (offset1 == NULL_TREE)
9450
                offset1 = build_int_cst (signed_size_type_node, 0);
9451
              else
9452
                offset1 = fold_convert_loc (loc, signed_size_type_node,
9453
                                            offset1);
9454
 
9455
              if (code != EQ_EXPR
9456
                  && code != NE_EXPR
9457
                  && (pointer_may_wrap_p (base0, offset0, bitpos0)
9458
                      || pointer_may_wrap_p (base1, offset1, bitpos1)))
9459
                fold_overflow_warning (("assuming pointer wraparound does not "
9460
                                        "occur when comparing P +- C1 with "
9461
                                        "P +- C2"),
9462
                                       WARN_STRICT_OVERFLOW_COMPARISON);
9463
 
9464
              return fold_build2_loc (loc, code, type, offset0, offset1);
9465
            }
9466
        }
9467
      /* For non-equal bases we can simplify if they are addresses
9468
         of local binding decls or constants.  */
9469
      else if (indirect_base0 && indirect_base1
9470
               /* We know that !operand_equal_p (base0, base1, 0)
9471
                  because the if condition was false.  But make
9472
                  sure two decls are not the same.  */
9473
               && base0 != base1
9474
               && TREE_CODE (arg0) == ADDR_EXPR
9475
               && TREE_CODE (arg1) == ADDR_EXPR
9476
               && (((TREE_CODE (base0) == VAR_DECL
9477
                     || TREE_CODE (base0) == PARM_DECL)
9478
                    && (targetm.binds_local_p (base0)
9479
                        || CONSTANT_CLASS_P (base1)))
9480
                   || CONSTANT_CLASS_P (base0))
9481
               && (((TREE_CODE (base1) == VAR_DECL
9482
                     || TREE_CODE (base1) == PARM_DECL)
9483
                    && (targetm.binds_local_p (base1)
9484
                        || CONSTANT_CLASS_P (base0)))
9485
                   || CONSTANT_CLASS_P (base1)))
9486
        {
9487
          if (code == EQ_EXPR)
9488
            return omit_two_operands_loc (loc, type, boolean_false_node,
9489
                                      arg0, arg1);
9490
          else if (code == NE_EXPR)
9491
            return omit_two_operands_loc (loc, type, boolean_true_node,
9492
                                      arg0, arg1);
9493
        }
9494
      /* For equal offsets we can simplify to a comparison of the
9495
         base addresses.  */
9496
      else if (bitpos0 == bitpos1
9497
               && (indirect_base0
9498
                   ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9499
               && (indirect_base1
9500
                   ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9501
               && ((offset0 == offset1)
9502
                   || (offset0 && offset1
9503
                       && operand_equal_p (offset0, offset1, 0))))
9504
        {
9505
          if (indirect_base0)
9506
            base0 = build_fold_addr_expr_loc (loc, base0);
9507
          if (indirect_base1)
9508
            base1 = build_fold_addr_expr_loc (loc, base1);
9509
          return fold_build2_loc (loc, code, type, base0, base1);
9510
        }
9511
    }
9512
 
9513
  /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9514
     X CMP Y +- C2 +- C1 for signed X, Y.  This is valid if
9515
     the resulting offset is smaller in absolute value than the
9516
     original one.  */
9517
  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9518
      && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9519
      && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9520
          && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9521
      && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9522
      && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9523
          && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9524
    {
9525
      tree const1 = TREE_OPERAND (arg0, 1);
9526
      tree const2 = TREE_OPERAND (arg1, 1);
9527
      tree variable1 = TREE_OPERAND (arg0, 0);
9528
      tree variable2 = TREE_OPERAND (arg1, 0);
9529
      tree cst;
9530
      const char * const warnmsg = G_("assuming signed overflow does not "
9531
                                      "occur when combining constants around "
9532
                                      "a comparison");
9533
 
9534
      /* Put the constant on the side where it doesn't overflow and is
9535
         of lower absolute value than before.  */
9536
      cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9537
                             ? MINUS_EXPR : PLUS_EXPR,
9538
                             const2, const1, 0);
9539
      if (!TREE_OVERFLOW (cst)
9540
          && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9541
        {
9542
          fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9543
          return fold_build2_loc (loc, code, type,
9544
                              variable1,
9545
                              fold_build2_loc (loc,
9546
                                           TREE_CODE (arg1), TREE_TYPE (arg1),
9547
                                           variable2, cst));
9548
        }
9549
 
9550
      cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9551
                             ? MINUS_EXPR : PLUS_EXPR,
9552
                             const1, const2, 0);
9553
      if (!TREE_OVERFLOW (cst)
9554
          && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9555
        {
9556
          fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9557
          return fold_build2_loc (loc, code, type,
9558
                              fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9559
                                           variable1, cst),
9560
                              variable2);
9561
        }
9562
    }
9563
 
9564
  /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9565
     signed arithmetic case.  That form is created by the compiler
9566
     often enough for folding it to be of value.  One example is in
9567
     computing loop trip counts after Operator Strength Reduction.  */
9568
  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9569
      && TREE_CODE (arg0) == MULT_EXPR
9570
      && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9571
          && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9572
      && integer_zerop (arg1))
9573
    {
9574
      tree const1 = TREE_OPERAND (arg0, 1);
9575
      tree const2 = arg1;                       /* zero */
9576
      tree variable1 = TREE_OPERAND (arg0, 0);
9577
      enum tree_code cmp_code = code;
9578
 
9579
      /* Handle unfolded multiplication by zero.  */
9580
      if (integer_zerop (const1))
9581
        return fold_build2_loc (loc, cmp_code, type, const1, const2);
9582
 
9583
      fold_overflow_warning (("assuming signed overflow does not occur when "
9584
                              "eliminating multiplication in comparison "
9585
                              "with zero"),
9586
                             WARN_STRICT_OVERFLOW_COMPARISON);
9587
 
9588
      /* If const1 is negative we swap the sense of the comparison.  */
9589
      if (tree_int_cst_sgn (const1) < 0)
9590
        cmp_code = swap_tree_comparison (cmp_code);
9591
 
9592
      return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9593
    }
9594
 
9595
  tem = maybe_canonicalize_comparison (loc, code, type, op0, op1);
9596
  if (tem)
9597
    return tem;
9598
 
9599
  if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9600
    {
9601
      tree targ0 = strip_float_extensions (arg0);
9602
      tree targ1 = strip_float_extensions (arg1);
9603
      tree newtype = TREE_TYPE (targ0);
9604
 
9605
      if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9606
        newtype = TREE_TYPE (targ1);
9607
 
9608
      /* Fold (double)float1 CMP (double)float2 into float1 CMP float2.  */
9609
      if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9610
        return fold_build2_loc (loc, code, type,
9611
                            fold_convert_loc (loc, newtype, targ0),
9612
                            fold_convert_loc (loc, newtype, targ1));
9613
 
9614
      /* (-a) CMP (-b) -> b CMP a  */
9615
      if (TREE_CODE (arg0) == NEGATE_EXPR
9616
          && TREE_CODE (arg1) == NEGATE_EXPR)
9617
        return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9618
                            TREE_OPERAND (arg0, 0));
9619
 
9620
      if (TREE_CODE (arg1) == REAL_CST)
9621
        {
9622
          REAL_VALUE_TYPE cst;
9623
          cst = TREE_REAL_CST (arg1);
9624
 
9625
          /* (-a) CMP CST -> a swap(CMP) (-CST)  */
9626
          if (TREE_CODE (arg0) == NEGATE_EXPR)
9627
            return fold_build2_loc (loc, swap_tree_comparison (code), type,
9628
                                TREE_OPERAND (arg0, 0),
9629
                                build_real (TREE_TYPE (arg1),
9630
                                            REAL_VALUE_NEGATE (cst)));
9631
 
9632
          /* IEEE doesn't distinguish +0 and -0 in comparisons.  */
9633
          /* a CMP (-0) -> a CMP 0  */
9634
          if (REAL_VALUE_MINUS_ZERO (cst))
9635
            return fold_build2_loc (loc, code, type, arg0,
9636
                                build_real (TREE_TYPE (arg1), dconst0));
9637
 
9638
          /* x != NaN is always true, other ops are always false.  */
9639
          if (REAL_VALUE_ISNAN (cst)
9640
              && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9641
            {
9642
              tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9643
              return omit_one_operand_loc (loc, type, tem, arg0);
9644
            }
9645
 
9646
          /* Fold comparisons against infinity.  */
9647
          if (REAL_VALUE_ISINF (cst)
9648
              && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9649
            {
9650
              tem = fold_inf_compare (loc, code, type, arg0, arg1);
9651
              if (tem != NULL_TREE)
9652
                return tem;
9653
            }
9654
        }
9655
 
9656
      /* If this is a comparison of a real constant with a PLUS_EXPR
9657
         or a MINUS_EXPR of a real constant, we can convert it into a
9658
         comparison with a revised real constant as long as no overflow
9659
         occurs when unsafe_math_optimizations are enabled.  */
9660
      if (flag_unsafe_math_optimizations
9661
          && TREE_CODE (arg1) == REAL_CST
9662
          && (TREE_CODE (arg0) == PLUS_EXPR
9663
              || TREE_CODE (arg0) == MINUS_EXPR)
9664
          && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9665
          && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9666
                                      ? MINUS_EXPR : PLUS_EXPR,
9667
                                      arg1, TREE_OPERAND (arg0, 1), 0))
9668
          && !TREE_OVERFLOW (tem))
9669
        return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9670
 
9671
      /* Likewise, we can simplify a comparison of a real constant with
9672
         a MINUS_EXPR whose first operand is also a real constant, i.e.
9673
         (c1 - x) < c2 becomes x > c1-c2.  Reordering is allowed on
9674
         floating-point types only if -fassociative-math is set.  */
9675
      if (flag_associative_math
9676
          && TREE_CODE (arg1) == REAL_CST
9677
          && TREE_CODE (arg0) == MINUS_EXPR
9678
          && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9679
          && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9680
                                      arg1, 0))
9681
          && !TREE_OVERFLOW (tem))
9682
        return fold_build2_loc (loc, swap_tree_comparison (code), type,
9683
                            TREE_OPERAND (arg0, 1), tem);
9684
 
9685
      /* Fold comparisons against built-in math functions.  */
9686
      if (TREE_CODE (arg1) == REAL_CST
9687
          && flag_unsafe_math_optimizations
9688
          && ! flag_errno_math)
9689
        {
9690
          enum built_in_function fcode = builtin_mathfn_code (arg0);
9691
 
9692
          if (fcode != END_BUILTINS)
9693
            {
9694
              tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9695
              if (tem != NULL_TREE)
9696
                return tem;
9697
            }
9698
        }
9699
    }
9700
 
9701
  if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9702
      && CONVERT_EXPR_P (arg0))
9703
    {
9704
      /* If we are widening one operand of an integer comparison,
9705
         see if the other operand is similarly being widened.  Perhaps we
9706
         can do the comparison in the narrower type.  */
9707
      tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9708
      if (tem)
9709
        return tem;
9710
 
9711
      /* Or if we are changing signedness.  */
9712
      tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9713
      if (tem)
9714
        return tem;
9715
    }
9716
 
9717
  /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9718
     constant, we can simplify it.  */
9719
  if (TREE_CODE (arg1) == INTEGER_CST
9720
      && (TREE_CODE (arg0) == MIN_EXPR
9721
          || TREE_CODE (arg0) == MAX_EXPR)
9722
      && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9723
    {
9724
      tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9725
      if (tem)
9726
        return tem;
9727
    }
9728
 
9729
  /* Simplify comparison of something with itself.  (For IEEE
9730
     floating-point, we can only do some of these simplifications.)  */
9731
  if (operand_equal_p (arg0, arg1, 0))
9732
    {
9733
      switch (code)
9734
        {
9735
        case EQ_EXPR:
9736
          if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9737
              || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9738
            return constant_boolean_node (1, type);
9739
          break;
9740
 
9741
        case GE_EXPR:
9742
        case LE_EXPR:
9743
          if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9744
              || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9745
            return constant_boolean_node (1, type);
9746
          return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9747
 
9748
        case NE_EXPR:
9749
          /* For NE, we can only do this simplification if integer
9750
             or we don't honor IEEE floating point NaNs.  */
9751
          if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9752
              && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9753
            break;
9754
          /* ... fall through ...  */
9755
        case GT_EXPR:
9756
        case LT_EXPR:
9757
          return constant_boolean_node (0, type);
9758
        default:
9759
          gcc_unreachable ();
9760
        }
9761
    }
9762
 
9763
  /* If we are comparing an expression that just has comparisons
9764
     of two integer values, arithmetic expressions of those comparisons,
9765
     and constants, we can simplify it.  There are only three cases
9766
     to check: the two values can either be equal, the first can be
9767
     greater, or the second can be greater.  Fold the expression for
9768
     those three values.  Since each value must be 0 or 1, we have
9769
     eight possibilities, each of which corresponds to the constant 0
9770
     or 1 or one of the six possible comparisons.
9771
 
9772
     This handles common cases like (a > b) == 0 but also handles
9773
     expressions like  ((x > y) - (y > x)) > 0, which supposedly
9774
     occur in macroized code.  */
9775
 
9776
  if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9777
    {
9778
      tree cval1 = 0, cval2 = 0;
9779
      int save_p = 0;
9780
 
9781
      if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9782
          /* Don't handle degenerate cases here; they should already
9783
             have been handled anyway.  */
9784
          && cval1 != 0 && cval2 != 0
9785
          && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9786
          && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9787
          && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9788
          && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9789
          && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9790
          && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9791
                                TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9792
        {
9793
          tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9794
          tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9795
 
9796
          /* We can't just pass T to eval_subst in case cval1 or cval2
9797
             was the same as ARG1.  */
9798
 
9799
          tree high_result
9800
                = fold_build2_loc (loc, code, type,
9801
                               eval_subst (loc, arg0, cval1, maxval,
9802
                                           cval2, minval),
9803
                               arg1);
9804
          tree equal_result
9805
                = fold_build2_loc (loc, code, type,
9806
                               eval_subst (loc, arg0, cval1, maxval,
9807
                                           cval2, maxval),
9808
                               arg1);
9809
          tree low_result
9810
                = fold_build2_loc (loc, code, type,
9811
                               eval_subst (loc, arg0, cval1, minval,
9812
                                           cval2, maxval),
9813
                               arg1);
9814
 
9815
          /* All three of these results should be 0 or 1.  Confirm they are.
9816
             Then use those values to select the proper code to use.  */
9817
 
9818
          if (TREE_CODE (high_result) == INTEGER_CST
9819
              && TREE_CODE (equal_result) == INTEGER_CST
9820
              && TREE_CODE (low_result) == INTEGER_CST)
9821
            {
9822
              /* Make a 3-bit mask with the high-order bit being the
9823
                 value for `>', the next for '=', and the low for '<'.  */
9824
              switch ((integer_onep (high_result) * 4)
9825
                      + (integer_onep (equal_result) * 2)
9826
                      + integer_onep (low_result))
9827
                {
9828
                case 0:
9829
                  /* Always false.  */
9830
                  return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9831
                case 1:
9832
                  code = LT_EXPR;
9833
                  break;
9834
                case 2:
9835
                  code = EQ_EXPR;
9836
                  break;
9837
                case 3:
9838
                  code = LE_EXPR;
9839
                  break;
9840
                case 4:
9841
                  code = GT_EXPR;
9842
                  break;
9843
                case 5:
9844
                  code = NE_EXPR;
9845
                  break;
9846
                case 6:
9847
                  code = GE_EXPR;
9848
                  break;
9849
                case 7:
9850
                  /* Always true.  */
9851
                  return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9852
                }
9853
 
9854
              if (save_p)
9855
                {
9856
                  tem = save_expr (build2 (code, type, cval1, cval2));
9857
                  SET_EXPR_LOCATION (tem, loc);
9858
                  return tem;
9859
                }
9860
              return fold_build2_loc (loc, code, type, cval1, cval2);
9861
            }
9862
        }
9863
    }
9864
 
9865
  /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9866
     into a single range test.  */
9867
  if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9868
       || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9869
      && TREE_CODE (arg1) == INTEGER_CST
9870
      && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9871
      && !integer_zerop (TREE_OPERAND (arg0, 1))
9872
      && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9873
      && !TREE_OVERFLOW (arg1))
9874
    {
9875
      tem = fold_div_compare (loc, code, type, arg0, arg1);
9876
      if (tem != NULL_TREE)
9877
        return tem;
9878
    }
9879
 
9880
  /* Fold ~X op ~Y as Y op X.  */
9881
  if (TREE_CODE (arg0) == BIT_NOT_EXPR
9882
      && TREE_CODE (arg1) == BIT_NOT_EXPR)
9883
    {
9884
      tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9885
      return fold_build2_loc (loc, code, type,
9886
                          fold_convert_loc (loc, cmp_type,
9887
                                            TREE_OPERAND (arg1, 0)),
9888
                          TREE_OPERAND (arg0, 0));
9889
    }
9890
 
9891
  /* Fold ~X op C as X op' ~C, where op' is the swapped comparison.  */
9892
  if (TREE_CODE (arg0) == BIT_NOT_EXPR
9893
      && TREE_CODE (arg1) == INTEGER_CST)
9894
    {
9895
      tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9896
      return fold_build2_loc (loc, swap_tree_comparison (code), type,
9897
                          TREE_OPERAND (arg0, 0),
9898
                          fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9899
                                       fold_convert_loc (loc, cmp_type, arg1)));
9900
    }
9901
 
9902
  return NULL_TREE;
9903
}
9904
 
9905
 
9906
/* Subroutine of fold_binary.  Optimize complex multiplications of the
9907
   form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2).  The
9908
   argument EXPR represents the expression "z" of type TYPE.  */
9909
 
9910
static tree
9911
fold_mult_zconjz (location_t loc, tree type, tree expr)
9912
{
9913
  tree itype = TREE_TYPE (type);
9914
  tree rpart, ipart, tem;
9915
 
9916
  if (TREE_CODE (expr) == COMPLEX_EXPR)
9917
    {
9918
      rpart = TREE_OPERAND (expr, 0);
9919
      ipart = TREE_OPERAND (expr, 1);
9920
    }
9921
  else if (TREE_CODE (expr) == COMPLEX_CST)
9922
    {
9923
      rpart = TREE_REALPART (expr);
9924
      ipart = TREE_IMAGPART (expr);
9925
    }
9926
  else
9927
    {
9928
      expr = save_expr (expr);
9929
      rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9930
      ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9931
    }
9932
 
9933
  rpart = save_expr (rpart);
9934
  ipart = save_expr (ipart);
9935
  tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9936
                     fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9937
                     fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9938
  return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9939
                      fold_convert_loc (loc, itype, integer_zero_node));
9940
}
9941
 
9942
 
9943
/* Subroutine of fold_binary.  If P is the value of EXPR, computes
9944
   power-of-two M and (arbitrary) N such that M divides (P-N).  This condition
9945
   guarantees that P and N have the same least significant log2(M) bits.
9946
   N is not otherwise constrained.  In particular, N is not normalized to
9947
 
9948
   M is chosen as large as possible such that constant N can be determined.
9949
 
9950
   Returns M and sets *RESIDUE to N.
9951
 
9952
   If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9953
   account.  This is not always possible due to PR 35705.
9954
 */
9955
 
9956
static unsigned HOST_WIDE_INT
9957
get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9958
                                 bool allow_func_align)
9959
{
9960
  enum tree_code code;
9961
 
9962
  *residue = 0;
9963
 
9964
  code = TREE_CODE (expr);
9965
  if (code == ADDR_EXPR)
9966
    {
9967
      expr = TREE_OPERAND (expr, 0);
9968
      if (handled_component_p (expr))
9969
        {
9970
          HOST_WIDE_INT bitsize, bitpos;
9971
          tree offset;
9972
          enum machine_mode mode;
9973
          int unsignedp, volatilep;
9974
 
9975
          expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9976
                                      &mode, &unsignedp, &volatilep, false);
9977
          *residue = bitpos / BITS_PER_UNIT;
9978
          if (offset)
9979
            {
9980
              if (TREE_CODE (offset) == INTEGER_CST)
9981
                *residue += TREE_INT_CST_LOW (offset);
9982
              else
9983
                /* We don't handle more complicated offset expressions.  */
9984
                return 1;
9985
            }
9986
        }
9987
 
9988
      if (DECL_P (expr)
9989
          && (allow_func_align || TREE_CODE (expr) != FUNCTION_DECL))
9990
        return DECL_ALIGN_UNIT (expr);
9991
    }
9992
  else if (code == POINTER_PLUS_EXPR)
9993
    {
9994
      tree op0, op1;
9995
      unsigned HOST_WIDE_INT modulus;
9996
      enum tree_code inner_code;
9997
 
9998
      op0 = TREE_OPERAND (expr, 0);
9999
      STRIP_NOPS (op0);
10000
      modulus = get_pointer_modulus_and_residue (op0, residue,
10001
                                                 allow_func_align);
10002
 
10003
      op1 = TREE_OPERAND (expr, 1);
10004
      STRIP_NOPS (op1);
10005
      inner_code = TREE_CODE (op1);
10006
      if (inner_code == INTEGER_CST)
10007
        {
10008
          *residue += TREE_INT_CST_LOW (op1);
10009
          return modulus;
10010
        }
10011
      else if (inner_code == MULT_EXPR)
10012
        {
10013
          op1 = TREE_OPERAND (op1, 1);
10014
          if (TREE_CODE (op1) == INTEGER_CST)
10015
            {
10016
              unsigned HOST_WIDE_INT align;
10017
 
10018
              /* Compute the greatest power-of-2 divisor of op1.  */
10019
              align = TREE_INT_CST_LOW (op1);
10020
              align &= -align;
10021
 
10022
              /* If align is non-zero and less than *modulus, replace
10023
                 *modulus with align., If align is 0, then either op1 is 0
10024
                 or the greatest power-of-2 divisor of op1 doesn't fit in an
10025
                 unsigned HOST_WIDE_INT.  In either case, no additional
10026
                 constraint is imposed.  */
10027
              if (align)
10028
                modulus = MIN (modulus, align);
10029
 
10030
              return modulus;
10031
            }
10032
        }
10033
    }
10034
 
10035
    /* If we get here, we were unable to determine anything useful about the
10036
       expression.  */
10037
    return 1;
10038
}
10039
 
10040
 
10041
/* Fold a binary expression of code CODE and type TYPE with operands
10042
   OP0 and OP1.  LOC is the location of the resulting expression.
10043
   Return the folded expression if folding is successful.  Otherwise,
10044
   return NULL_TREE.  */
10045
 
10046
tree
10047
fold_binary_loc (location_t loc,
10048
             enum tree_code code, tree type, tree op0, tree op1)
10049
{
10050
  enum tree_code_class kind = TREE_CODE_CLASS (code);
10051
  tree arg0, arg1, tem;
10052
  tree t1 = NULL_TREE;
10053
  bool strict_overflow_p;
10054
 
10055
  gcc_assert (IS_EXPR_CODE_CLASS (kind)
10056
              && TREE_CODE_LENGTH (code) == 2
10057
              && op0 != NULL_TREE
10058
              && op1 != NULL_TREE);
10059
 
10060
  arg0 = op0;
10061
  arg1 = op1;
10062
 
10063
  /* Strip any conversions that don't change the mode.  This is
10064
     safe for every expression, except for a comparison expression
10065
     because its signedness is derived from its operands.  So, in
10066
     the latter case, only strip conversions that don't change the
10067
     signedness.  MIN_EXPR/MAX_EXPR also need signedness of arguments
10068
     preserved.
10069
 
10070
     Note that this is done as an internal manipulation within the
10071
     constant folder, in order to find the simplest representation
10072
     of the arguments so that their form can be studied.  In any
10073
     cases, the appropriate type conversions should be put back in
10074
     the tree that will get out of the constant folder.  */
10075
 
10076
  if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10077
    {
10078
      STRIP_SIGN_NOPS (arg0);
10079
      STRIP_SIGN_NOPS (arg1);
10080
    }
10081
  else
10082
    {
10083
      STRIP_NOPS (arg0);
10084
      STRIP_NOPS (arg1);
10085
    }
10086
 
10087
  /* Note that TREE_CONSTANT isn't enough: static var addresses are
10088
     constant but we can't do arithmetic on them.  */
10089
  if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10090
      || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10091
      || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
10092
      || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
10093
      || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
10094
      || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
10095
    {
10096
      if (kind == tcc_binary)
10097
        {
10098
          /* Make sure type and arg0 have the same saturating flag.  */
10099
          gcc_assert (TYPE_SATURATING (type)
10100
                      == TYPE_SATURATING (TREE_TYPE (arg0)));
10101
          tem = const_binop (code, arg0, arg1, 0);
10102
        }
10103
      else if (kind == tcc_comparison)
10104
        tem = fold_relational_const (code, type, arg0, arg1);
10105
      else
10106
        tem = NULL_TREE;
10107
 
10108
      if (tem != NULL_TREE)
10109
        {
10110
          if (TREE_TYPE (tem) != type)
10111
            tem = fold_convert_loc (loc, type, tem);
10112
          return tem;
10113
        }
10114
    }
10115
 
10116
  /* If this is a commutative operation, and ARG0 is a constant, move it
10117
     to ARG1 to reduce the number of tests below.  */
10118
  if (commutative_tree_code (code)
10119
      && tree_swap_operands_p (arg0, arg1, true))
10120
    return fold_build2_loc (loc, code, type, op1, op0);
10121
 
10122
  /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10123
 
10124
     First check for cases where an arithmetic operation is applied to a
10125
     compound, conditional, or comparison operation.  Push the arithmetic
10126
     operation inside the compound or conditional to see if any folding
10127
     can then be done.  Convert comparison to conditional for this purpose.
10128
     The also optimizes non-constant cases that used to be done in
10129
     expand_expr.
10130
 
10131
     Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10132
     one of the operands is a comparison and the other is a comparison, a
10133
     BIT_AND_EXPR with the constant 1, or a truth value.  In that case, the
10134
     code below would make the expression more complex.  Change it to a
10135
     TRUTH_{AND,OR}_EXPR.  Likewise, convert a similar NE_EXPR to
10136
     TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR.  */
10137
 
10138
  if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10139
       || code == EQ_EXPR || code == NE_EXPR)
10140
      && ((truth_value_p (TREE_CODE (arg0))
10141
           && (truth_value_p (TREE_CODE (arg1))
10142
               || (TREE_CODE (arg1) == BIT_AND_EXPR
10143
                   && integer_onep (TREE_OPERAND (arg1, 1)))))
10144
          || (truth_value_p (TREE_CODE (arg1))
10145
              && (truth_value_p (TREE_CODE (arg0))
10146
                  || (TREE_CODE (arg0) == BIT_AND_EXPR
10147
                      && integer_onep (TREE_OPERAND (arg0, 1)))))))
10148
    {
10149
      tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10150
                         : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10151
                         : TRUTH_XOR_EXPR,
10152
                         boolean_type_node,
10153
                         fold_convert_loc (loc, boolean_type_node, arg0),
10154
                         fold_convert_loc (loc, boolean_type_node, arg1));
10155
 
10156
      if (code == EQ_EXPR)
10157
        tem = invert_truthvalue_loc (loc, tem);
10158
 
10159
      return fold_convert_loc (loc, type, tem);
10160
    }
10161
 
10162
  if (TREE_CODE_CLASS (code) == tcc_binary
10163
      || TREE_CODE_CLASS (code) == tcc_comparison)
10164
    {
10165
      if (TREE_CODE (arg0) == COMPOUND_EXPR)
10166
        {
10167
          tem = fold_build2_loc (loc, code, type,
10168
                             fold_convert_loc (loc, TREE_TYPE (op0),
10169
                                               TREE_OPERAND (arg0, 1)), op1);
10170
          tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0), tem);
10171
          goto fold_binary_exit;
10172
        }
10173
      if (TREE_CODE (arg1) == COMPOUND_EXPR
10174
          && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10175
        {
10176
          tem = fold_build2_loc (loc, code, type, op0,
10177
                             fold_convert_loc (loc, TREE_TYPE (op1),
10178
                                               TREE_OPERAND (arg1, 1)));
10179
          tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0), tem);
10180
          goto fold_binary_exit;
10181
        }
10182
 
10183
      if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
10184
        {
10185
          tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10186
                                                     arg0, arg1,
10187
                                                     /*cond_first_p=*/1);
10188
          if (tem != NULL_TREE)
10189
            return tem;
10190
        }
10191
 
10192
      if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
10193
        {
10194
          tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10195
                                                     arg1, arg0,
10196
                                                     /*cond_first_p=*/0);
10197
          if (tem != NULL_TREE)
10198
            return tem;
10199
        }
10200
    }
10201
 
10202
  switch (code)
10203
    {
10204
    case POINTER_PLUS_EXPR:
10205
      /* 0 +p index -> (type)index */
10206
      if (integer_zerop (arg0))
10207
        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10208
 
10209
      /* PTR +p 0 -> PTR */
10210
      if (integer_zerop (arg1))
10211
        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10212
 
10213
      /* INT +p INT -> (PTR)(INT + INT).  Stripping types allows for this. */
10214
      if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10215
           && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10216
        return fold_convert_loc (loc, type,
10217
                                 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10218
                                              fold_convert_loc (loc, sizetype,
10219
                                                                arg1),
10220
                                              fold_convert_loc (loc, sizetype,
10221
                                                                arg0)));
10222
 
10223
      /* index +p PTR -> PTR +p index */
10224
      if (POINTER_TYPE_P (TREE_TYPE (arg1))
10225
          && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10226
        return fold_build2_loc (loc, POINTER_PLUS_EXPR, type,
10227
                            fold_convert_loc (loc, type, arg1),
10228
                            fold_convert_loc (loc, sizetype, arg0));
10229
 
10230
      /* (PTR +p B) +p A -> PTR +p (B + A) */
10231
      if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10232
        {
10233
          tree inner;
10234
          tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10235
          tree arg00 = TREE_OPERAND (arg0, 0);
10236
          inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10237
                               arg01, fold_convert_loc (loc, sizetype, arg1));
10238
          return fold_convert_loc (loc, type,
10239
                                   fold_build2_loc (loc, POINTER_PLUS_EXPR,
10240
                                                TREE_TYPE (arg00),
10241
                                                arg00, inner));
10242
        }
10243
 
10244
      /* PTR_CST +p CST -> CST1 */
10245
      if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10246
        return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10247
                            fold_convert_loc (loc, type, arg1));
10248
 
10249
     /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10250
        of the array.  Loop optimizer sometimes produce this type of
10251
        expressions.  */
10252
      if (TREE_CODE (arg0) == ADDR_EXPR)
10253
        {
10254
          tem = try_move_mult_to_index (loc, arg0,
10255
                                        fold_convert_loc (loc, sizetype, arg1));
10256
          if (tem)
10257
            return fold_convert_loc (loc, type, tem);
10258
        }
10259
 
10260
      return NULL_TREE;
10261
 
10262
    case PLUS_EXPR:
10263
      /* A + (-B) -> A - B */
10264
      if (TREE_CODE (arg1) == NEGATE_EXPR)
10265
        return fold_build2_loc (loc, MINUS_EXPR, type,
10266
                            fold_convert_loc (loc, type, arg0),
10267
                            fold_convert_loc (loc, type,
10268
                                              TREE_OPERAND (arg1, 0)));
10269
      /* (-A) + B -> B - A */
10270
      if (TREE_CODE (arg0) == NEGATE_EXPR
10271
          && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10272
        return fold_build2_loc (loc, MINUS_EXPR, type,
10273
                            fold_convert_loc (loc, type, arg1),
10274
                            fold_convert_loc (loc, type,
10275
                                              TREE_OPERAND (arg0, 0)));
10276
 
10277
      if (INTEGRAL_TYPE_P (type))
10278
        {
10279
          /* Convert ~A + 1 to -A.  */
10280
          if (TREE_CODE (arg0) == BIT_NOT_EXPR
10281
              && integer_onep (arg1))
10282
            return fold_build1_loc (loc, NEGATE_EXPR, type,
10283
                                fold_convert_loc (loc, type,
10284
                                                  TREE_OPERAND (arg0, 0)));
10285
 
10286
          /* ~X + X is -1.  */
10287
          if (TREE_CODE (arg0) == BIT_NOT_EXPR
10288
              && !TYPE_OVERFLOW_TRAPS (type))
10289
            {
10290
              tree tem = TREE_OPERAND (arg0, 0);
10291
 
10292
              STRIP_NOPS (tem);
10293
              if (operand_equal_p (tem, arg1, 0))
10294
                {
10295
                  t1 = build_int_cst_type (type, -1);
10296
                  return omit_one_operand_loc (loc, type, t1, arg1);
10297
                }
10298
            }
10299
 
10300
          /* X + ~X is -1.  */
10301
          if (TREE_CODE (arg1) == BIT_NOT_EXPR
10302
              && !TYPE_OVERFLOW_TRAPS (type))
10303
            {
10304
              tree tem = TREE_OPERAND (arg1, 0);
10305
 
10306
              STRIP_NOPS (tem);
10307
              if (operand_equal_p (arg0, tem, 0))
10308
                {
10309
                  t1 = build_int_cst_type (type, -1);
10310
                  return omit_one_operand_loc (loc, type, t1, arg0);
10311
                }
10312
            }
10313
 
10314
          /* X + (X / CST) * -CST is X % CST.  */
10315
          if (TREE_CODE (arg1) == MULT_EXPR
10316
              && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10317
              && operand_equal_p (arg0,
10318
                                  TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10319
            {
10320
              tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10321
              tree cst1 = TREE_OPERAND (arg1, 1);
10322
              tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10323
                                      cst1, cst0);
10324
              if (sum && integer_zerop (sum))
10325
                return fold_convert_loc (loc, type,
10326
                                         fold_build2_loc (loc, TRUNC_MOD_EXPR,
10327
                                                      TREE_TYPE (arg0), arg0,
10328
                                                      cst0));
10329
            }
10330
        }
10331
 
10332
      /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
10333
         same or one.  Make sure type is not saturating.
10334
         fold_plusminus_mult_expr will re-associate.  */
10335
      if ((TREE_CODE (arg0) == MULT_EXPR
10336
           || TREE_CODE (arg1) == MULT_EXPR)
10337
          && !TYPE_SATURATING (type)
10338
          && (!FLOAT_TYPE_P (type) || flag_associative_math))
10339
        {
10340
          tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10341
          if (tem)
10342
            return tem;
10343
        }
10344
 
10345
      if (! FLOAT_TYPE_P (type))
10346
        {
10347
          if (integer_zerop (arg1))
10348
            return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10349
 
10350
          /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10351
             with a constant, and the two constants have no bits in common,
10352
             we should treat this as a BIT_IOR_EXPR since this may produce more
10353
             simplifications.  */
10354
          if (TREE_CODE (arg0) == BIT_AND_EXPR
10355
              && TREE_CODE (arg1) == BIT_AND_EXPR
10356
              && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10357
              && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10358
              && integer_zerop (const_binop (BIT_AND_EXPR,
10359
                                             TREE_OPERAND (arg0, 1),
10360
                                             TREE_OPERAND (arg1, 1), 0)))
10361
            {
10362
              code = BIT_IOR_EXPR;
10363
              goto bit_ior;
10364
            }
10365
 
10366
          /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10367
             (plus (plus (mult) (mult)) (foo)) so that we can
10368
             take advantage of the factoring cases below.  */
10369
          if (((TREE_CODE (arg0) == PLUS_EXPR
10370
                || TREE_CODE (arg0) == MINUS_EXPR)
10371
               && TREE_CODE (arg1) == MULT_EXPR)
10372
              || ((TREE_CODE (arg1) == PLUS_EXPR
10373
                   || TREE_CODE (arg1) == MINUS_EXPR)
10374
                  && TREE_CODE (arg0) == MULT_EXPR))
10375
            {
10376
              tree parg0, parg1, parg, marg;
10377
              enum tree_code pcode;
10378
 
10379
              if (TREE_CODE (arg1) == MULT_EXPR)
10380
                parg = arg0, marg = arg1;
10381
              else
10382
                parg = arg1, marg = arg0;
10383
              pcode = TREE_CODE (parg);
10384
              parg0 = TREE_OPERAND (parg, 0);
10385
              parg1 = TREE_OPERAND (parg, 1);
10386
              STRIP_NOPS (parg0);
10387
              STRIP_NOPS (parg1);
10388
 
10389
              if (TREE_CODE (parg0) == MULT_EXPR
10390
                  && TREE_CODE (parg1) != MULT_EXPR)
10391
                return fold_build2_loc (loc, pcode, type,
10392
                                    fold_build2_loc (loc, PLUS_EXPR, type,
10393
                                                 fold_convert_loc (loc, type,
10394
                                                                   parg0),
10395
                                                 fold_convert_loc (loc, type,
10396
                                                                   marg)),
10397
                                    fold_convert_loc (loc, type, parg1));
10398
              if (TREE_CODE (parg0) != MULT_EXPR
10399
                  && TREE_CODE (parg1) == MULT_EXPR)
10400
                return
10401
                  fold_build2_loc (loc, PLUS_EXPR, type,
10402
                               fold_convert_loc (loc, type, parg0),
10403
                               fold_build2_loc (loc, pcode, type,
10404
                                            fold_convert_loc (loc, type, marg),
10405
                                            fold_convert_loc (loc, type,
10406
                                                              parg1)));
10407
            }
10408
        }
10409
      else
10410
        {
10411
          /* See if ARG1 is zero and X + ARG1 reduces to X.  */
10412
          if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10413
            return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10414
 
10415
          /* Likewise if the operands are reversed.  */
10416
          if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10417
            return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10418
 
10419
          /* Convert X + -C into X - C.  */
10420
          if (TREE_CODE (arg1) == REAL_CST
10421
              && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10422
            {
10423
              tem = fold_negate_const (arg1, type);
10424
              if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10425
                return fold_build2_loc (loc, MINUS_EXPR, type,
10426
                                    fold_convert_loc (loc, type, arg0),
10427
                                    fold_convert_loc (loc, type, tem));
10428
            }
10429
 
10430
          /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10431
             to __complex__ ( x, y ).  This is not the same for SNaNs or
10432
             if signed zeros are involved.  */
10433
          if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10434
              && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10435
              && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10436
            {
10437
              tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10438
              tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10439
              tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10440
              bool arg0rz = false, arg0iz = false;
10441
              if ((arg0r && (arg0rz = real_zerop (arg0r)))
10442
                  || (arg0i && (arg0iz = real_zerop (arg0i))))
10443
                {
10444
                  tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10445
                  tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10446
                  if (arg0rz && arg1i && real_zerop (arg1i))
10447
                    {
10448
                      tree rp = arg1r ? arg1r
10449
                                  : build1 (REALPART_EXPR, rtype, arg1);
10450
                      tree ip = arg0i ? arg0i
10451
                                  : build1 (IMAGPART_EXPR, rtype, arg0);
10452
                      return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10453
                    }
10454
                  else if (arg0iz && arg1r && real_zerop (arg1r))
10455
                    {
10456
                      tree rp = arg0r ? arg0r
10457
                                  : build1 (REALPART_EXPR, rtype, arg0);
10458
                      tree ip = arg1i ? arg1i
10459
                                  : build1 (IMAGPART_EXPR, rtype, arg1);
10460
                      return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10461
                    }
10462
                }
10463
            }
10464
 
10465
          if (flag_unsafe_math_optimizations
10466
              && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10467
              && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10468
              && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10469
            return tem;
10470
 
10471
          /* Convert x+x into x*2.0.  */
10472
          if (operand_equal_p (arg0, arg1, 0)
10473
              && SCALAR_FLOAT_TYPE_P (type))
10474
            return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10475
                                build_real (type, dconst2));
10476
 
10477
          /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10478
             We associate floats only if the user has specified
10479
             -fassociative-math.  */
10480
          if (flag_associative_math
10481
              && TREE_CODE (arg1) == PLUS_EXPR
10482
              && TREE_CODE (arg0) != MULT_EXPR)
10483
            {
10484
              tree tree10 = TREE_OPERAND (arg1, 0);
10485
              tree tree11 = TREE_OPERAND (arg1, 1);
10486
              if (TREE_CODE (tree11) == MULT_EXPR
10487
                  && TREE_CODE (tree10) == MULT_EXPR)
10488
                {
10489
                  tree tree0;
10490
                  tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10491
                  return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10492
                }
10493
            }
10494
          /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10495
             We associate floats only if the user has specified
10496
             -fassociative-math.  */
10497
          if (flag_associative_math
10498
              && TREE_CODE (arg0) == PLUS_EXPR
10499
              && TREE_CODE (arg1) != MULT_EXPR)
10500
            {
10501
              tree tree00 = TREE_OPERAND (arg0, 0);
10502
              tree tree01 = TREE_OPERAND (arg0, 1);
10503
              if (TREE_CODE (tree01) == MULT_EXPR
10504
                  && TREE_CODE (tree00) == MULT_EXPR)
10505
                {
10506
                  tree tree0;
10507
                  tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10508
                  return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10509
                }
10510
            }
10511
        }
10512
 
10513
     bit_rotate:
10514
      /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10515
         is a rotate of A by C1 bits.  */
10516
      /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10517
         is a rotate of A by B bits.  */
10518
      {
10519
        enum tree_code code0, code1;
10520
        tree rtype;
10521
        code0 = TREE_CODE (arg0);
10522
        code1 = TREE_CODE (arg1);
10523
        if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10524
             || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10525
            && operand_equal_p (TREE_OPERAND (arg0, 0),
10526
                                TREE_OPERAND (arg1, 0), 0)
10527
            && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10528
                TYPE_UNSIGNED (rtype))
10529
            /* Only create rotates in complete modes.  Other cases are not
10530
               expanded properly.  */
10531
            && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10532
          {
10533
            tree tree01, tree11;
10534
            enum tree_code code01, code11;
10535
 
10536
            tree01 = TREE_OPERAND (arg0, 1);
10537
            tree11 = TREE_OPERAND (arg1, 1);
10538
            STRIP_NOPS (tree01);
10539
            STRIP_NOPS (tree11);
10540
            code01 = TREE_CODE (tree01);
10541
            code11 = TREE_CODE (tree11);
10542
            if (code01 == INTEGER_CST
10543
                && code11 == INTEGER_CST
10544
                && TREE_INT_CST_HIGH (tree01) == 0
10545
                && TREE_INT_CST_HIGH (tree11) == 0
10546
                && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10547
                    == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10548
              {
10549
                tem = build2 (LROTATE_EXPR,
10550
                              TREE_TYPE (TREE_OPERAND (arg0, 0)),
10551
                              TREE_OPERAND (arg0, 0),
10552
                              code0 == LSHIFT_EXPR
10553
                              ? tree01 : tree11);
10554
                SET_EXPR_LOCATION (tem, loc);
10555
                return fold_convert_loc (loc, type, tem);
10556
              }
10557
            else if (code11 == MINUS_EXPR)
10558
              {
10559
                tree tree110, tree111;
10560
                tree110 = TREE_OPERAND (tree11, 0);
10561
                tree111 = TREE_OPERAND (tree11, 1);
10562
                STRIP_NOPS (tree110);
10563
                STRIP_NOPS (tree111);
10564
                if (TREE_CODE (tree110) == INTEGER_CST
10565
                    && 0 == compare_tree_int (tree110,
10566
                                              TYPE_PRECISION
10567
                                              (TREE_TYPE (TREE_OPERAND
10568
                                                          (arg0, 0))))
10569
                    && operand_equal_p (tree01, tree111, 0))
10570
                  return
10571
                    fold_convert_loc (loc, type,
10572
                                      build2 ((code0 == LSHIFT_EXPR
10573
                                               ? LROTATE_EXPR
10574
                                               : RROTATE_EXPR),
10575
                                              TREE_TYPE (TREE_OPERAND (arg0, 0)),
10576
                                              TREE_OPERAND (arg0, 0), tree01));
10577
              }
10578
            else if (code01 == MINUS_EXPR)
10579
              {
10580
                tree tree010, tree011;
10581
                tree010 = TREE_OPERAND (tree01, 0);
10582
                tree011 = TREE_OPERAND (tree01, 1);
10583
                STRIP_NOPS (tree010);
10584
                STRIP_NOPS (tree011);
10585
                if (TREE_CODE (tree010) == INTEGER_CST
10586
                    && 0 == compare_tree_int (tree010,
10587
                                              TYPE_PRECISION
10588
                                              (TREE_TYPE (TREE_OPERAND
10589
                                                          (arg0, 0))))
10590
                    && operand_equal_p (tree11, tree011, 0))
10591
                    return fold_convert_loc
10592
                      (loc, type,
10593
                       build2 ((code0 != LSHIFT_EXPR
10594
                                ? LROTATE_EXPR
10595
                                : RROTATE_EXPR),
10596
                               TREE_TYPE (TREE_OPERAND (arg0, 0)),
10597
                               TREE_OPERAND (arg0, 0), tree11));
10598
              }
10599
          }
10600
      }
10601
 
10602
    associate:
10603
      /* In most languages, can't associate operations on floats through
10604
         parentheses.  Rather than remember where the parentheses were, we
10605
         don't associate floats at all, unless the user has specified
10606
         -fassociative-math.
10607
         And, we need to make sure type is not saturating.  */
10608
 
10609
      if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10610
          && !TYPE_SATURATING (type))
10611
        {
10612
          tree var0, con0, lit0, minus_lit0;
10613
          tree var1, con1, lit1, minus_lit1;
10614
          bool ok = true;
10615
 
10616
          /* Split both trees into variables, constants, and literals.  Then
10617
             associate each group together, the constants with literals,
10618
             then the result with variables.  This increases the chances of
10619
             literals being recombined later and of generating relocatable
10620
             expressions for the sum of a constant and literal.  */
10621
          var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10622
          var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10623
                             code == MINUS_EXPR);
10624
 
10625
          /* With undefined overflow we can only associate constants
10626
             with one variable.  */
10627
          if (((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10628
               || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10629
              && var0 && var1)
10630
            {
10631
              tree tmp0 = var0;
10632
              tree tmp1 = var1;
10633
 
10634
              if (TREE_CODE (tmp0) == NEGATE_EXPR)
10635
                tmp0 = TREE_OPERAND (tmp0, 0);
10636
              if (TREE_CODE (tmp1) == NEGATE_EXPR)
10637
                tmp1 = TREE_OPERAND (tmp1, 0);
10638
              /* The only case we can still associate with two variables
10639
                 is if they are the same, modulo negation.  */
10640
              if (!operand_equal_p (tmp0, tmp1, 0))
10641
                ok = false;
10642
            }
10643
 
10644
          /* Only do something if we found more than two objects.  Otherwise,
10645
             nothing has changed and we risk infinite recursion.  */
10646
          if (ok
10647
              && (2 < ((var0 != 0) + (var1 != 0)
10648
                       + (con0 != 0) + (con1 != 0)
10649
                       + (lit0 != 0) + (lit1 != 0)
10650
                       + (minus_lit0 != 0) + (minus_lit1 != 0))))
10651
            {
10652
              /* Recombine MINUS_EXPR operands by using PLUS_EXPR.  */
10653
              if (code == MINUS_EXPR)
10654
                code = PLUS_EXPR;
10655
 
10656
              var0 = associate_trees (loc, var0, var1, code, type);
10657
              con0 = associate_trees (loc, con0, con1, code, type);
10658
              lit0 = associate_trees (loc, lit0, lit1, code, type);
10659
              minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10660
 
10661
              /* Preserve the MINUS_EXPR if the negative part of the literal is
10662
                 greater than the positive part.  Otherwise, the multiplicative
10663
                 folding code (i.e extract_muldiv) may be fooled in case
10664
                 unsigned constants are subtracted, like in the following
10665
                 example: ((X*2 + 4) - 8U)/2.  */
10666
              if (minus_lit0 && lit0)
10667
                {
10668
                  if (TREE_CODE (lit0) == INTEGER_CST
10669
                      && TREE_CODE (minus_lit0) == INTEGER_CST
10670
                      && tree_int_cst_lt (lit0, minus_lit0))
10671
                    {
10672
                      minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10673
                                                    MINUS_EXPR, type);
10674
                      lit0 = 0;
10675
                    }
10676
                  else
10677
                    {
10678
                      lit0 = associate_trees (loc, lit0, minus_lit0,
10679
                                              MINUS_EXPR, type);
10680
                      minus_lit0 = 0;
10681
                    }
10682
                }
10683
              if (minus_lit0)
10684
                {
10685
                  if (con0 == 0)
10686
                    return
10687
                      fold_convert_loc (loc, type,
10688
                                        associate_trees (loc, var0, minus_lit0,
10689
                                                         MINUS_EXPR, type));
10690
                  else
10691
                    {
10692
                      con0 = associate_trees (loc, con0, minus_lit0,
10693
                                              MINUS_EXPR, type);
10694
                      return
10695
                        fold_convert_loc (loc, type,
10696
                                          associate_trees (loc, var0, con0,
10697
                                                           PLUS_EXPR, type));
10698
                    }
10699
                }
10700
 
10701
              con0 = associate_trees (loc, con0, lit0, code, type);
10702
              return
10703
                fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10704
                                                              code, type));
10705
            }
10706
        }
10707
 
10708
      return NULL_TREE;
10709
 
10710
    case MINUS_EXPR:
10711
      /* Pointer simplifications for subtraction, simple reassociations. */
10712
      if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10713
        {
10714
          /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10715
          if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10716
              && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10717
            {
10718
              tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10719
              tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10720
              tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10721
              tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10722
              return fold_build2_loc (loc, PLUS_EXPR, type,
10723
                                  fold_build2_loc (loc, MINUS_EXPR, type,
10724
                                               arg00, arg10),
10725
                                  fold_build2_loc (loc, MINUS_EXPR, type,
10726
                                               arg01, arg11));
10727
            }
10728
          /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10729
          else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10730
            {
10731
              tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10732
              tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10733
              tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10734
                                      fold_convert_loc (loc, type, arg1));
10735
              if (tmp)
10736
                return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10737
            }
10738
        }
10739
      /* A - (-B) -> A + B */
10740
      if (TREE_CODE (arg1) == NEGATE_EXPR)
10741
        return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10742
                            fold_convert_loc (loc, type,
10743
                                              TREE_OPERAND (arg1, 0)));
10744
      /* (-A) - B -> (-B) - A  where B is easily negated and we can swap.  */
10745
      if (TREE_CODE (arg0) == NEGATE_EXPR
10746
          && (FLOAT_TYPE_P (type)
10747
              || INTEGRAL_TYPE_P (type))
10748
          && negate_expr_p (arg1)
10749
          && reorder_operands_p (arg0, arg1))
10750
        return fold_build2_loc (loc, MINUS_EXPR, type,
10751
                            fold_convert_loc (loc, type,
10752
                                              negate_expr (arg1)),
10753
                            fold_convert_loc (loc, type,
10754
                                              TREE_OPERAND (arg0, 0)));
10755
      /* Convert -A - 1 to ~A.  */
10756
      if (INTEGRAL_TYPE_P (type)
10757
          && TREE_CODE (arg0) == NEGATE_EXPR
10758
          && integer_onep (arg1)
10759
          && !TYPE_OVERFLOW_TRAPS (type))
10760
        return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10761
                            fold_convert_loc (loc, type,
10762
                                              TREE_OPERAND (arg0, 0)));
10763
 
10764
      /* Convert -1 - A to ~A.  */
10765
      if (INTEGRAL_TYPE_P (type)
10766
          && integer_all_onesp (arg0))
10767
        return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10768
 
10769
 
10770
      /* X - (X / CST) * CST is X % CST.  */
10771
      if (INTEGRAL_TYPE_P (type)
10772
          && TREE_CODE (arg1) == MULT_EXPR
10773
          && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10774
          && operand_equal_p (arg0,
10775
                              TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10776
          && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10777
                              TREE_OPERAND (arg1, 1), 0))
10778
        return
10779
          fold_convert_loc (loc, type,
10780
                            fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10781
                                         arg0, TREE_OPERAND (arg1, 1)));
10782
 
10783
      if (! FLOAT_TYPE_P (type))
10784
        {
10785
          if (integer_zerop (arg0))
10786
            return negate_expr (fold_convert_loc (loc, type, arg1));
10787
          if (integer_zerop (arg1))
10788
            return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10789
 
10790
          /* Fold A - (A & B) into ~B & A.  */
10791
          if (!TREE_SIDE_EFFECTS (arg0)
10792
              && TREE_CODE (arg1) == BIT_AND_EXPR)
10793
            {
10794
              if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10795
                {
10796
                  tree arg10 = fold_convert_loc (loc, type,
10797
                                                 TREE_OPERAND (arg1, 0));
10798
                  return fold_build2_loc (loc, BIT_AND_EXPR, type,
10799
                                      fold_build1_loc (loc, BIT_NOT_EXPR,
10800
                                                   type, arg10),
10801
                                      fold_convert_loc (loc, type, arg0));
10802
                }
10803
              if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10804
                {
10805
                  tree arg11 = fold_convert_loc (loc,
10806
                                                 type, TREE_OPERAND (arg1, 1));
10807
                  return fold_build2_loc (loc, BIT_AND_EXPR, type,
10808
                                      fold_build1_loc (loc, BIT_NOT_EXPR,
10809
                                                   type, arg11),
10810
                                      fold_convert_loc (loc, type, arg0));
10811
                }
10812
            }
10813
 
10814
          /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10815
             any power of 2 minus 1.  */
10816
          if (TREE_CODE (arg0) == BIT_AND_EXPR
10817
              && TREE_CODE (arg1) == BIT_AND_EXPR
10818
              && operand_equal_p (TREE_OPERAND (arg0, 0),
10819
                                  TREE_OPERAND (arg1, 0), 0))
10820
            {
10821
              tree mask0 = TREE_OPERAND (arg0, 1);
10822
              tree mask1 = TREE_OPERAND (arg1, 1);
10823
              tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10824
 
10825
              if (operand_equal_p (tem, mask1, 0))
10826
                {
10827
                  tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10828
                                     TREE_OPERAND (arg0, 0), mask1);
10829
                  return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10830
                }
10831
            }
10832
        }
10833
 
10834
      /* See if ARG1 is zero and X - ARG1 reduces to X.  */
10835
      else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10836
        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10837
 
10838
      /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0).  So check whether
10839
         ARG0 is zero and X + ARG0 reduces to X, since that would mean
10840
         (-ARG1 + ARG0) reduces to -ARG1.  */
10841
      else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10842
        return negate_expr (fold_convert_loc (loc, type, arg1));
10843
 
10844
      /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10845
         __complex__ ( x, -y ).  This is not the same for SNaNs or if
10846
         signed zeros are involved.  */
10847
      if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10848
          && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10849
          && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10850
        {
10851
          tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10852
          tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10853
          tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10854
          bool arg0rz = false, arg0iz = false;
10855
          if ((arg0r && (arg0rz = real_zerop (arg0r)))
10856
              || (arg0i && (arg0iz = real_zerop (arg0i))))
10857
            {
10858
              tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10859
              tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10860
              if (arg0rz && arg1i && real_zerop (arg1i))
10861
                {
10862
                  tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10863
                                         arg1r ? arg1r
10864
                                         : build1 (REALPART_EXPR, rtype, arg1));
10865
                  tree ip = arg0i ? arg0i
10866
                    : build1 (IMAGPART_EXPR, rtype, arg0);
10867
                  return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10868
                }
10869
              else if (arg0iz && arg1r && real_zerop (arg1r))
10870
                {
10871
                  tree rp = arg0r ? arg0r
10872
                    : build1 (REALPART_EXPR, rtype, arg0);
10873
                  tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10874
                                         arg1i ? arg1i
10875
                                         : build1 (IMAGPART_EXPR, rtype, arg1));
10876
                  return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10877
                }
10878
            }
10879
        }
10880
 
10881
      /* Fold &x - &x.  This can happen from &x.foo - &x.
10882
         This is unsafe for certain floats even in non-IEEE formats.
10883
         In IEEE, it is unsafe because it does wrong for NaNs.
10884
         Also note that operand_equal_p is always false if an operand
10885
         is volatile.  */
10886
 
10887
      if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10888
          && operand_equal_p (arg0, arg1, 0))
10889
        return fold_convert_loc (loc, type, integer_zero_node);
10890
 
10891
      /* A - B -> A + (-B) if B is easily negatable.  */
10892
      if (negate_expr_p (arg1)
10893
          && ((FLOAT_TYPE_P (type)
10894
               /* Avoid this transformation if B is a positive REAL_CST.  */
10895
               && (TREE_CODE (arg1) != REAL_CST
10896
                   ||  REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10897
              || INTEGRAL_TYPE_P (type)))
10898
        return fold_build2_loc (loc, PLUS_EXPR, type,
10899
                            fold_convert_loc (loc, type, arg0),
10900
                            fold_convert_loc (loc, type,
10901
                                              negate_expr (arg1)));
10902
 
10903
      /* Try folding difference of addresses.  */
10904
      {
10905
        HOST_WIDE_INT diff;
10906
 
10907
        if ((TREE_CODE (arg0) == ADDR_EXPR
10908
             || TREE_CODE (arg1) == ADDR_EXPR)
10909
            && ptr_difference_const (arg0, arg1, &diff))
10910
          return build_int_cst_type (type, diff);
10911
      }
10912
 
10913
      /* Fold &a[i] - &a[j] to i-j.  */
10914
      if (TREE_CODE (arg0) == ADDR_EXPR
10915
          && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10916
          && TREE_CODE (arg1) == ADDR_EXPR
10917
          && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10918
        {
10919
          tree aref0 = TREE_OPERAND (arg0, 0);
10920
          tree aref1 = TREE_OPERAND (arg1, 0);
10921
          if (operand_equal_p (TREE_OPERAND (aref0, 0),
10922
                               TREE_OPERAND (aref1, 0), 0))
10923
            {
10924
              tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10925
              tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10926
              tree esz = array_ref_element_size (aref0);
10927
              tree diff = build2 (MINUS_EXPR, type, op0, op1);
10928
              return fold_build2_loc (loc, MULT_EXPR, type, diff,
10929
                                  fold_convert_loc (loc, type, esz));
10930
 
10931
            }
10932
        }
10933
 
10934
      if (FLOAT_TYPE_P (type)
10935
          && flag_unsafe_math_optimizations
10936
          && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10937
          && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10938
          && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10939
        return tem;
10940
 
10941
      /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10942
         same or one.  Make sure type is not saturating.
10943
         fold_plusminus_mult_expr will re-associate.  */
10944
      if ((TREE_CODE (arg0) == MULT_EXPR
10945
           || TREE_CODE (arg1) == MULT_EXPR)
10946
          && !TYPE_SATURATING (type)
10947
          && (!FLOAT_TYPE_P (type) || flag_associative_math))
10948
        {
10949
          tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10950
          if (tem)
10951
            return tem;
10952
        }
10953
 
10954
      goto associate;
10955
 
10956
    case MULT_EXPR:
10957
      /* (-A) * (-B) -> A * B  */
10958
      if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10959
        return fold_build2_loc (loc, MULT_EXPR, type,
10960
                            fold_convert_loc (loc, type,
10961
                                              TREE_OPERAND (arg0, 0)),
10962
                            fold_convert_loc (loc, type,
10963
                                              negate_expr (arg1)));
10964
      if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10965
        return fold_build2_loc (loc, MULT_EXPR, type,
10966
                            fold_convert_loc (loc, type,
10967
                                              negate_expr (arg0)),
10968
                            fold_convert_loc (loc, type,
10969
                                              TREE_OPERAND (arg1, 0)));
10970
 
10971
      if (! FLOAT_TYPE_P (type))
10972
        {
10973
          if (integer_zerop (arg1))
10974
            return omit_one_operand_loc (loc, type, arg1, arg0);
10975
          if (integer_onep (arg1))
10976
            return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10977
          /* Transform x * -1 into -x.  Make sure to do the negation
10978
             on the original operand with conversions not stripped
10979
             because we can only strip non-sign-changing conversions.  */
10980
          if (integer_all_onesp (arg1))
10981
            return fold_convert_loc (loc, type, negate_expr (op0));
10982
          /* Transform x * -C into -x * C if x is easily negatable.  */
10983
          if (TREE_CODE (arg1) == INTEGER_CST
10984
              && tree_int_cst_sgn (arg1) == -1
10985
              && negate_expr_p (arg0)
10986
              && (tem = negate_expr (arg1)) != arg1
10987
              && !TREE_OVERFLOW (tem))
10988
            return fold_build2_loc (loc, MULT_EXPR, type,
10989
                                fold_convert_loc (loc, type,
10990
                                                  negate_expr (arg0)),
10991
                                tem);
10992
 
10993
          /* (a * (1 << b)) is (a << b)  */
10994
          if (TREE_CODE (arg1) == LSHIFT_EXPR
10995
              && integer_onep (TREE_OPERAND (arg1, 0)))
10996
            return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10997
                                TREE_OPERAND (arg1, 1));
10998
          if (TREE_CODE (arg0) == LSHIFT_EXPR
10999
              && integer_onep (TREE_OPERAND (arg0, 0)))
11000
            return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
11001
                                TREE_OPERAND (arg0, 1));
11002
 
11003
          /* (A + A) * C -> A * 2 * C  */
11004
          if (TREE_CODE (arg0) == PLUS_EXPR
11005
              && TREE_CODE (arg1) == INTEGER_CST
11006
              && operand_equal_p (TREE_OPERAND (arg0, 0),
11007
                                  TREE_OPERAND (arg0, 1), 0))
11008
            return fold_build2_loc (loc, MULT_EXPR, type,
11009
                                omit_one_operand_loc (loc, type,
11010
                                                  TREE_OPERAND (arg0, 0),
11011
                                                  TREE_OPERAND (arg0, 1)),
11012
                                fold_build2_loc (loc, MULT_EXPR, type,
11013
                                             build_int_cst (type, 2) , arg1));
11014
 
11015
          strict_overflow_p = false;
11016
          if (TREE_CODE (arg1) == INTEGER_CST
11017
              && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11018
                                             &strict_overflow_p)))
11019
            {
11020
              if (strict_overflow_p)
11021
                fold_overflow_warning (("assuming signed overflow does not "
11022
                                        "occur when simplifying "
11023
                                        "multiplication"),
11024
                                       WARN_STRICT_OVERFLOW_MISC);
11025
              return fold_convert_loc (loc, type, tem);
11026
            }
11027
 
11028
          /* Optimize z * conj(z) for integer complex numbers.  */
11029
          if (TREE_CODE (arg0) == CONJ_EXPR
11030
              && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11031
            return fold_mult_zconjz (loc, type, arg1);
11032
          if (TREE_CODE (arg1) == CONJ_EXPR
11033
              && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11034
            return fold_mult_zconjz (loc, type, arg0);
11035
        }
11036
      else
11037
        {
11038
          /* Maybe fold x * 0 to 0.  The expressions aren't the same
11039
             when x is NaN, since x * 0 is also NaN.  Nor are they the
11040
             same in modes with signed zeros, since multiplying a
11041
             negative value by 0 gives -0, not +0.  */
11042
          if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11043
              && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11044
              && real_zerop (arg1))
11045
            return omit_one_operand_loc (loc, type, arg1, arg0);
11046
          /* In IEEE floating point, x*1 is not equivalent to x for snans.
11047
             Likewise for complex arithmetic with signed zeros.  */
11048
          if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11049
              && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11050
                  || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11051
              && real_onep (arg1))
11052
            return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11053
 
11054
          /* Transform x * -1.0 into -x.  */
11055
          if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11056
              && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11057
                  || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11058
              && real_minus_onep (arg1))
11059
            return fold_convert_loc (loc, type, negate_expr (arg0));
11060
 
11061
          /* Convert (C1/X)*C2 into (C1*C2)/X.  This transformation may change
11062
             the result for floating point types due to rounding so it is applied
11063
             only if -fassociative-math was specify.  */
11064
          if (flag_associative_math
11065
              && TREE_CODE (arg0) == RDIV_EXPR
11066
              && TREE_CODE (arg1) == REAL_CST
11067
              && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
11068
            {
11069
              tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
11070
                                      arg1, 0);
11071
              if (tem)
11072
                return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11073
                                    TREE_OPERAND (arg0, 1));
11074
            }
11075
 
11076
          /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y.  */
11077
          if (operand_equal_p (arg0, arg1, 0))
11078
            {
11079
              tree tem = fold_strip_sign_ops (arg0);
11080
              if (tem != NULL_TREE)
11081
                {
11082
                  tem = fold_convert_loc (loc, type, tem);
11083
                  return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11084
                }
11085
            }
11086
 
11087
          /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11088
             This is not the same for NaNs or if signed zeros are
11089
             involved.  */
11090
          if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11091
              && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11092
              && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11093
              && TREE_CODE (arg1) == COMPLEX_CST
11094
              && real_zerop (TREE_REALPART (arg1)))
11095
            {
11096
              tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11097
              if (real_onep (TREE_IMAGPART (arg1)))
11098
                return
11099
                  fold_build2_loc (loc, COMPLEX_EXPR, type,
11100
                               negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11101
                                                             rtype, arg0)),
11102
                               fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11103
              else if (real_minus_onep (TREE_IMAGPART (arg1)))
11104
                return
11105
                  fold_build2_loc (loc, COMPLEX_EXPR, type,
11106
                               fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11107
                               negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11108
                                                             rtype, arg0)));
11109
            }
11110
 
11111
          /* Optimize z * conj(z) for floating point complex numbers.
11112
             Guarded by flag_unsafe_math_optimizations as non-finite
11113
             imaginary components don't produce scalar results.  */
11114
          if (flag_unsafe_math_optimizations
11115
              && TREE_CODE (arg0) == CONJ_EXPR
11116
              && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11117
            return fold_mult_zconjz (loc, type, arg1);
11118
          if (flag_unsafe_math_optimizations
11119
              && TREE_CODE (arg1) == CONJ_EXPR
11120
              && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11121
            return fold_mult_zconjz (loc, type, arg0);
11122
 
11123
          if (flag_unsafe_math_optimizations)
11124
            {
11125
              enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11126
              enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11127
 
11128
              /* Optimizations of root(...)*root(...).  */
11129
              if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11130
                {
11131
                  tree rootfn, arg;
11132
                  tree arg00 = CALL_EXPR_ARG (arg0, 0);
11133
                  tree arg10 = CALL_EXPR_ARG (arg1, 0);
11134
 
11135
                  /* Optimize sqrt(x)*sqrt(x) as x.  */
11136
                  if (BUILTIN_SQRT_P (fcode0)
11137
                      && operand_equal_p (arg00, arg10, 0)
11138
                      && ! HONOR_SNANS (TYPE_MODE (type)))
11139
                    return arg00;
11140
 
11141
                  /* Optimize root(x)*root(y) as root(x*y).  */
11142
                  rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11143
                  arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11144
                  return build_call_expr_loc (loc, rootfn, 1, arg);
11145
                }
11146
 
11147
              /* Optimize expN(x)*expN(y) as expN(x+y).  */
11148
              if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11149
                {
11150
                  tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11151
                  tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11152
                                          CALL_EXPR_ARG (arg0, 0),
11153
                                          CALL_EXPR_ARG (arg1, 0));
11154
                  return build_call_expr_loc (loc, expfn, 1, arg);
11155
                }
11156
 
11157
              /* Optimizations of pow(...)*pow(...).  */
11158
              if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11159
                  || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11160
                  || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11161
                {
11162
                  tree arg00 = CALL_EXPR_ARG (arg0, 0);
11163
                  tree arg01 = CALL_EXPR_ARG (arg0, 1);
11164
                  tree arg10 = CALL_EXPR_ARG (arg1, 0);
11165
                  tree arg11 = CALL_EXPR_ARG (arg1, 1);
11166
 
11167
                  /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y).  */
11168
                  if (operand_equal_p (arg01, arg11, 0))
11169
                    {
11170
                      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11171
                      tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11172
                                              arg00, arg10);
11173
                      return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11174
                    }
11175
 
11176
                  /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z).  */
11177
                  if (operand_equal_p (arg00, arg10, 0))
11178
                    {
11179
                      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11180
                      tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11181
                                              arg01, arg11);
11182
                      return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11183
                    }
11184
                }
11185
 
11186
              /* Optimize tan(x)*cos(x) as sin(x).  */
11187
              if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11188
                   || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11189
                   || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11190
                   || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11191
                   || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11192
                   || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11193
                  && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11194
                                      CALL_EXPR_ARG (arg1, 0), 0))
11195
                {
11196
                  tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11197
 
11198
                  if (sinfn != NULL_TREE)
11199
                    return build_call_expr_loc (loc, sinfn, 1,
11200
                                            CALL_EXPR_ARG (arg0, 0));
11201
                }
11202
 
11203
              /* Optimize x*pow(x,c) as pow(x,c+1).  */
11204
              if (fcode1 == BUILT_IN_POW
11205
                  || fcode1 == BUILT_IN_POWF
11206
                  || fcode1 == BUILT_IN_POWL)
11207
                {
11208
                  tree arg10 = CALL_EXPR_ARG (arg1, 0);
11209
                  tree arg11 = CALL_EXPR_ARG (arg1, 1);
11210
                  if (TREE_CODE (arg11) == REAL_CST
11211
                      && !TREE_OVERFLOW (arg11)
11212
                      && operand_equal_p (arg0, arg10, 0))
11213
                    {
11214
                      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11215
                      REAL_VALUE_TYPE c;
11216
                      tree arg;
11217
 
11218
                      c = TREE_REAL_CST (arg11);
11219
                      real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11220
                      arg = build_real (type, c);
11221
                      return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11222
                    }
11223
                }
11224
 
11225
              /* Optimize pow(x,c)*x as pow(x,c+1).  */
11226
              if (fcode0 == BUILT_IN_POW
11227
                  || fcode0 == BUILT_IN_POWF
11228
                  || fcode0 == BUILT_IN_POWL)
11229
                {
11230
                  tree arg00 = CALL_EXPR_ARG (arg0, 0);
11231
                  tree arg01 = CALL_EXPR_ARG (arg0, 1);
11232
                  if (TREE_CODE (arg01) == REAL_CST
11233
                      && !TREE_OVERFLOW (arg01)
11234
                      && operand_equal_p (arg1, arg00, 0))
11235
                    {
11236
                      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11237
                      REAL_VALUE_TYPE c;
11238
                      tree arg;
11239
 
11240
                      c = TREE_REAL_CST (arg01);
11241
                      real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11242
                      arg = build_real (type, c);
11243
                      return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11244
                    }
11245
                }
11246
 
11247
              /* Optimize x*x as pow(x,2.0), which is expanded as x*x.  */
11248
              if (optimize_function_for_speed_p (cfun)
11249
                  && operand_equal_p (arg0, arg1, 0))
11250
                {
11251
                  tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11252
 
11253
                  if (powfn)
11254
                    {
11255
                      tree arg = build_real (type, dconst2);
11256
                      return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11257
                    }
11258
                }
11259
            }
11260
        }
11261
      goto associate;
11262
 
11263
    case BIT_IOR_EXPR:
11264
    bit_ior:
11265
      if (integer_all_onesp (arg1))
11266
        return omit_one_operand_loc (loc, type, arg1, arg0);
11267
      if (integer_zerop (arg1))
11268
        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11269
      if (operand_equal_p (arg0, arg1, 0))
11270
        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11271
 
11272
      /* ~X | X is -1.  */
11273
      if (TREE_CODE (arg0) == BIT_NOT_EXPR
11274
          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11275
        {
11276
          t1 = fold_convert_loc (loc, type, integer_zero_node);
11277
          t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11278
          return omit_one_operand_loc (loc, type, t1, arg1);
11279
        }
11280
 
11281
      /* X | ~X is -1.  */
11282
      if (TREE_CODE (arg1) == BIT_NOT_EXPR
11283
          && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11284
        {
11285
          t1 = fold_convert_loc (loc, type, integer_zero_node);
11286
          t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11287
          return omit_one_operand_loc (loc, type, t1, arg0);
11288
        }
11289
 
11290
      /* Canonicalize (X & C1) | C2.  */
11291
      if (TREE_CODE (arg0) == BIT_AND_EXPR
11292
          && TREE_CODE (arg1) == INTEGER_CST
11293
          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11294
        {
11295
          unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
11296
          int width = TYPE_PRECISION (type), w;
11297
          hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
11298
          lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11299
          hi2 = TREE_INT_CST_HIGH (arg1);
11300
          lo2 = TREE_INT_CST_LOW (arg1);
11301
 
11302
          /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2).  */
11303
          if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
11304
            return omit_one_operand_loc (loc, type, arg1,
11305
                                     TREE_OPERAND (arg0, 0));
11306
 
11307
          if (width > HOST_BITS_PER_WIDE_INT)
11308
            {
11309
              mhi = (unsigned HOST_WIDE_INT) -1
11310
                    >> (2 * HOST_BITS_PER_WIDE_INT - width);
11311
              mlo = -1;
11312
            }
11313
          else
11314
            {
11315
              mhi = 0;
11316
              mlo = (unsigned HOST_WIDE_INT) -1
11317
                    >> (HOST_BITS_PER_WIDE_INT - width);
11318
            }
11319
 
11320
          /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2.  */
11321
          if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
11322
            return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11323
                                TREE_OPERAND (arg0, 0), arg1);
11324
 
11325
          /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11326
             unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11327
             mode which allows further optimizations.  */
11328
          hi1 &= mhi;
11329
          lo1 &= mlo;
11330
          hi2 &= mhi;
11331
          lo2 &= mlo;
11332
          hi3 = hi1 & ~hi2;
11333
          lo3 = lo1 & ~lo2;
11334
          for (w = BITS_PER_UNIT;
11335
               w <= width && w <= HOST_BITS_PER_WIDE_INT;
11336
               w <<= 1)
11337
            {
11338
              unsigned HOST_WIDE_INT mask
11339
                = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11340
              if (((lo1 | lo2) & mask) == mask
11341
                  && (lo1 & ~mask) == 0 && hi1 == 0)
11342
                {
11343
                  hi3 = 0;
11344
                  lo3 = mask;
11345
                  break;
11346
                }
11347
            }
11348
          if (hi3 != hi1 || lo3 != lo1)
11349
            return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11350
                                fold_build2_loc (loc, BIT_AND_EXPR, type,
11351
                                             TREE_OPERAND (arg0, 0),
11352
                                             build_int_cst_wide (type,
11353
                                                                 lo3, hi3)),
11354
                                arg1);
11355
        }
11356
 
11357
      /* (X & Y) | Y is (X, Y).  */
11358
      if (TREE_CODE (arg0) == BIT_AND_EXPR
11359
          && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11360
        return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11361
      /* (X & Y) | X is (Y, X).  */
11362
      if (TREE_CODE (arg0) == BIT_AND_EXPR
11363
          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11364
          && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11365
        return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11366
      /* X | (X & Y) is (Y, X).  */
11367
      if (TREE_CODE (arg1) == BIT_AND_EXPR
11368
          && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11369
          && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11370
        return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11371
      /* X | (Y & X) is (Y, X).  */
11372
      if (TREE_CODE (arg1) == BIT_AND_EXPR
11373
          && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11374
          && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11375
        return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11376
 
11377
      t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11378
      if (t1 != NULL_TREE)
11379
        return t1;
11380
 
11381
      /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11382
 
11383
         This results in more efficient code for machines without a NAND
11384
         instruction.  Combine will canonicalize to the first form
11385
         which will allow use of NAND instructions provided by the
11386
         backend if they exist.  */
11387
      if (TREE_CODE (arg0) == BIT_NOT_EXPR
11388
          && TREE_CODE (arg1) == BIT_NOT_EXPR)
11389
        {
11390
          return
11391
            fold_build1_loc (loc, BIT_NOT_EXPR, type,
11392
                         build2 (BIT_AND_EXPR, type,
11393
                                 fold_convert_loc (loc, type,
11394
                                                   TREE_OPERAND (arg0, 0)),
11395
                                 fold_convert_loc (loc, type,
11396
                                                   TREE_OPERAND (arg1, 0))));
11397
        }
11398
 
11399
      /* See if this can be simplified into a rotate first.  If that
11400
         is unsuccessful continue in the association code.  */
11401
      goto bit_rotate;
11402
 
11403
    case BIT_XOR_EXPR:
11404
      if (integer_zerop (arg1))
11405
        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11406
      if (integer_all_onesp (arg1))
11407
        return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11408
      if (operand_equal_p (arg0, arg1, 0))
11409
        return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11410
 
11411
      /* ~X ^ X is -1.  */
11412
      if (TREE_CODE (arg0) == BIT_NOT_EXPR
11413
          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11414
        {
11415
          t1 = fold_convert_loc (loc, type, integer_zero_node);
11416
          t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11417
          return omit_one_operand_loc (loc, type, t1, arg1);
11418
        }
11419
 
11420
      /* X ^ ~X is -1.  */
11421
      if (TREE_CODE (arg1) == BIT_NOT_EXPR
11422
          && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11423
        {
11424
          t1 = fold_convert_loc (loc, type, integer_zero_node);
11425
          t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11426
          return omit_one_operand_loc (loc, type, t1, arg0);
11427
        }
11428
 
11429
      /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11430
         with a constant, and the two constants have no bits in common,
11431
         we should treat this as a BIT_IOR_EXPR since this may produce more
11432
         simplifications.  */
11433
      if (TREE_CODE (arg0) == BIT_AND_EXPR
11434
          && TREE_CODE (arg1) == BIT_AND_EXPR
11435
          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11436
          && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11437
          && integer_zerop (const_binop (BIT_AND_EXPR,
11438
                                         TREE_OPERAND (arg0, 1),
11439
                                         TREE_OPERAND (arg1, 1), 0)))
11440
        {
11441
          code = BIT_IOR_EXPR;
11442
          goto bit_ior;
11443
        }
11444
 
11445
      /* (X | Y) ^ X -> Y & ~ X*/
11446
      if (TREE_CODE (arg0) == BIT_IOR_EXPR
11447
          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11448
        {
11449
          tree t2 = TREE_OPERAND (arg0, 1);
11450
          t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11451
                            arg1);
11452
          t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11453
                            fold_convert_loc (loc, type, t2),
11454
                            fold_convert_loc (loc, type, t1));
11455
          return t1;
11456
        }
11457
 
11458
      /* (Y | X) ^ X -> Y & ~ X*/
11459
      if (TREE_CODE (arg0) == BIT_IOR_EXPR
11460
          && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11461
        {
11462
          tree t2 = TREE_OPERAND (arg0, 0);
11463
          t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11464
                            arg1);
11465
          t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11466
                            fold_convert_loc (loc, type, t2),
11467
                            fold_convert_loc (loc, type, t1));
11468
          return t1;
11469
        }
11470
 
11471
      /* X ^ (X | Y) -> Y & ~ X*/
11472
      if (TREE_CODE (arg1) == BIT_IOR_EXPR
11473
          && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11474
        {
11475
          tree t2 = TREE_OPERAND (arg1, 1);
11476
          t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11477
                            arg0);
11478
          t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11479
                            fold_convert_loc (loc, type, t2),
11480
                            fold_convert_loc (loc, type, t1));
11481
          return t1;
11482
        }
11483
 
11484
      /* X ^ (Y | X) -> Y & ~ X*/
11485
      if (TREE_CODE (arg1) == BIT_IOR_EXPR
11486
          && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11487
        {
11488
          tree t2 = TREE_OPERAND (arg1, 0);
11489
          t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11490
                            arg0);
11491
          t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11492
                            fold_convert_loc (loc, type, t2),
11493
                            fold_convert_loc (loc, type, t1));
11494
          return t1;
11495
        }
11496
 
11497
      /* Convert ~X ^ ~Y to X ^ Y.  */
11498
      if (TREE_CODE (arg0) == BIT_NOT_EXPR
11499
          && TREE_CODE (arg1) == BIT_NOT_EXPR)
11500
        return fold_build2_loc (loc, code, type,
11501
                            fold_convert_loc (loc, type,
11502
                                              TREE_OPERAND (arg0, 0)),
11503
                            fold_convert_loc (loc, type,
11504
                                              TREE_OPERAND (arg1, 0)));
11505
 
11506
      /* Convert ~X ^ C to X ^ ~C.  */
11507
      if (TREE_CODE (arg0) == BIT_NOT_EXPR
11508
          && TREE_CODE (arg1) == INTEGER_CST)
11509
        return fold_build2_loc (loc, code, type,
11510
                            fold_convert_loc (loc, type,
11511
                                              TREE_OPERAND (arg0, 0)),
11512
                            fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11513
 
11514
      /* Fold (X & 1) ^ 1 as (X & 1) == 0.  */
11515
      if (TREE_CODE (arg0) == BIT_AND_EXPR
11516
          && integer_onep (TREE_OPERAND (arg0, 1))
11517
          && integer_onep (arg1))
11518
        return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11519
                            build_int_cst (TREE_TYPE (arg0), 0));
11520
 
11521
      /* Fold (X & Y) ^ Y as ~X & Y.  */
11522
      if (TREE_CODE (arg0) == BIT_AND_EXPR
11523
          && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11524
        {
11525
          tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11526
          return fold_build2_loc (loc, BIT_AND_EXPR, type,
11527
                              fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11528
                              fold_convert_loc (loc, type, arg1));
11529
        }
11530
      /* Fold (X & Y) ^ X as ~Y & X.  */
11531
      if (TREE_CODE (arg0) == BIT_AND_EXPR
11532
          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11533
          && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11534
        {
11535
          tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11536
          return fold_build2_loc (loc, BIT_AND_EXPR, type,
11537
                              fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11538
                              fold_convert_loc (loc, type, arg1));
11539
        }
11540
      /* Fold X ^ (X & Y) as X & ~Y.  */
11541
      if (TREE_CODE (arg1) == BIT_AND_EXPR
11542
          && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11543
        {
11544
          tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11545
          return fold_build2_loc (loc, BIT_AND_EXPR, type,
11546
                              fold_convert_loc (loc, type, arg0),
11547
                              fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11548
        }
11549
      /* Fold X ^ (Y & X) as ~Y & X.  */
11550
      if (TREE_CODE (arg1) == BIT_AND_EXPR
11551
          && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11552
          && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11553
        {
11554
          tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11555
          return fold_build2_loc (loc, BIT_AND_EXPR, type,
11556
                              fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11557
                              fold_convert_loc (loc, type, arg0));
11558
        }
11559
 
11560
      /* See if this can be simplified into a rotate first.  If that
11561
         is unsuccessful continue in the association code.  */
11562
      goto bit_rotate;
11563
 
11564
    case BIT_AND_EXPR:
11565
      if (integer_all_onesp (arg1))
11566
        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11567
      if (integer_zerop (arg1))
11568
        return omit_one_operand_loc (loc, type, arg1, arg0);
11569
      if (operand_equal_p (arg0, arg1, 0))
11570
        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11571
 
11572
      /* ~X & X is always zero.  */
11573
      if (TREE_CODE (arg0) == BIT_NOT_EXPR
11574
          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11575
        return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11576
 
11577
      /* X & ~X is always zero.  */
11578
      if (TREE_CODE (arg1) == BIT_NOT_EXPR
11579
          && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11580
        return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11581
 
11582
      /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2).  */
11583
      if (TREE_CODE (arg0) == BIT_IOR_EXPR
11584
          && TREE_CODE (arg1) == INTEGER_CST
11585
          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11586
        {
11587
          tree tmp1 = fold_convert_loc (loc, type, arg1);
11588
          tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11589
          tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11590
          tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11591
          tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11592
          return
11593
            fold_convert_loc (loc, type,
11594
                              fold_build2_loc (loc, BIT_IOR_EXPR,
11595
                                           type, tmp2, tmp3));
11596
        }
11597
 
11598
      /* (X | Y) & Y is (X, Y).  */
11599
      if (TREE_CODE (arg0) == BIT_IOR_EXPR
11600
          && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11601
        return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11602
      /* (X | Y) & X is (Y, X).  */
11603
      if (TREE_CODE (arg0) == BIT_IOR_EXPR
11604
          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11605
          && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11606
        return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11607
      /* X & (X | Y) is (Y, X).  */
11608
      if (TREE_CODE (arg1) == BIT_IOR_EXPR
11609
          && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11610
          && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11611
        return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11612
      /* X & (Y | X) is (Y, X).  */
11613
      if (TREE_CODE (arg1) == BIT_IOR_EXPR
11614
          && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11615
          && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11616
        return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11617
 
11618
      /* Fold (X ^ 1) & 1 as (X & 1) == 0.  */
11619
      if (TREE_CODE (arg0) == BIT_XOR_EXPR
11620
          && integer_onep (TREE_OPERAND (arg0, 1))
11621
          && integer_onep (arg1))
11622
        {
11623
          tem = TREE_OPERAND (arg0, 0);
11624
          return fold_build2_loc (loc, EQ_EXPR, type,
11625
                              fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11626
                                           build_int_cst (TREE_TYPE (tem), 1)),
11627
                              build_int_cst (TREE_TYPE (tem), 0));
11628
        }
11629
      /* Fold ~X & 1 as (X & 1) == 0.  */
11630
      if (TREE_CODE (arg0) == BIT_NOT_EXPR
11631
          && integer_onep (arg1))
11632
        {
11633
          tem = TREE_OPERAND (arg0, 0);
11634
          return fold_build2_loc (loc, EQ_EXPR, type,
11635
                              fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11636
                                           build_int_cst (TREE_TYPE (tem), 1)),
11637
                              build_int_cst (TREE_TYPE (tem), 0));
11638
        }
11639
 
11640
      /* Fold (X ^ Y) & Y as ~X & Y.  */
11641
      if (TREE_CODE (arg0) == BIT_XOR_EXPR
11642
          && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11643
        {
11644
          tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11645
          return fold_build2_loc (loc, BIT_AND_EXPR, type,
11646
                              fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11647
                              fold_convert_loc (loc, type, arg1));
11648
        }
11649
      /* Fold (X ^ Y) & X as ~Y & X.  */
11650
      if (TREE_CODE (arg0) == BIT_XOR_EXPR
11651
          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11652
          && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11653
        {
11654
          tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11655
          return fold_build2_loc (loc, BIT_AND_EXPR, type,
11656
                              fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11657
                              fold_convert_loc (loc, type, arg1));
11658
        }
11659
      /* Fold X & (X ^ Y) as X & ~Y.  */
11660
      if (TREE_CODE (arg1) == BIT_XOR_EXPR
11661
          && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11662
        {
11663
          tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11664
          return fold_build2_loc (loc, BIT_AND_EXPR, type,
11665
                              fold_convert_loc (loc, type, arg0),
11666
                              fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11667
        }
11668
      /* Fold X & (Y ^ X) as ~Y & X.  */
11669
      if (TREE_CODE (arg1) == BIT_XOR_EXPR
11670
          && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11671
          && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11672
        {
11673
          tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11674
          return fold_build2_loc (loc, BIT_AND_EXPR, type,
11675
                              fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11676
                              fold_convert_loc (loc, type, arg0));
11677
        }
11678
 
11679
      t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11680
      if (t1 != NULL_TREE)
11681
        return t1;
11682
      /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char.  */
11683
      if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11684
          && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11685
        {
11686
          unsigned int prec
11687
            = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11688
 
11689
          if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11690
              && (~TREE_INT_CST_LOW (arg1)
11691
                  & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11692
            return
11693
              fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11694
        }
11695
 
11696
      /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11697
 
11698
         This results in more efficient code for machines without a NOR
11699
         instruction.  Combine will canonicalize to the first form
11700
         which will allow use of NOR instructions provided by the
11701
         backend if they exist.  */
11702
      if (TREE_CODE (arg0) == BIT_NOT_EXPR
11703
          && TREE_CODE (arg1) == BIT_NOT_EXPR)
11704
        {
11705
          return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11706
                              build2 (BIT_IOR_EXPR, type,
11707
                                      fold_convert_loc (loc, type,
11708
                                                        TREE_OPERAND (arg0, 0)),
11709
                                      fold_convert_loc (loc, type,
11710
                                                        TREE_OPERAND (arg1, 0))));
11711
        }
11712
 
11713
      /* If arg0 is derived from the address of an object or function, we may
11714
         be able to fold this expression using the object or function's
11715
         alignment.  */
11716
      if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11717
        {
11718
          unsigned HOST_WIDE_INT modulus, residue;
11719
          unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11720
 
11721
          modulus = get_pointer_modulus_and_residue (arg0, &residue,
11722
                                                     integer_onep (arg1));
11723
 
11724
          /* This works because modulus is a power of 2.  If this weren't the
11725
             case, we'd have to replace it by its greatest power-of-2
11726
             divisor: modulus & -modulus.  */
11727
          if (low < modulus)
11728
            return build_int_cst (type, residue & low);
11729
        }
11730
 
11731
      /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11732
              (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11733
         if the new mask might be further optimized.  */
11734
      if ((TREE_CODE (arg0) == LSHIFT_EXPR
11735
           || TREE_CODE (arg0) == RSHIFT_EXPR)
11736
          && host_integerp (TREE_OPERAND (arg0, 1), 1)
11737
          && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11738
          && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11739
             < TYPE_PRECISION (TREE_TYPE (arg0))
11740
          && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11741
          && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11742
        {
11743
          unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11744
          unsigned HOST_WIDE_INT mask
11745
            = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11746
          unsigned HOST_WIDE_INT newmask, zerobits = 0;
11747
          tree shift_type = TREE_TYPE (arg0);
11748
 
11749
          if (TREE_CODE (arg0) == LSHIFT_EXPR)
11750
            zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11751
          else if (TREE_CODE (arg0) == RSHIFT_EXPR
11752
                   && TYPE_PRECISION (TREE_TYPE (arg0))
11753
                      == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11754
            {
11755
              unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11756
              tree arg00 = TREE_OPERAND (arg0, 0);
11757
              /* See if more bits can be proven as zero because of
11758
                 zero extension.  */
11759
              if (TREE_CODE (arg00) == NOP_EXPR
11760
                  && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11761
                {
11762
                  tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11763
                  if (TYPE_PRECISION (inner_type)
11764
                      == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11765
                      && TYPE_PRECISION (inner_type) < prec)
11766
                    {
11767
                      prec = TYPE_PRECISION (inner_type);
11768
                      /* See if we can shorten the right shift.  */
11769
                      if (shiftc < prec)
11770
                        shift_type = inner_type;
11771
                    }
11772
                }
11773
              zerobits = ~(unsigned HOST_WIDE_INT) 0;
11774
              zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11775
              zerobits <<= prec - shiftc;
11776
              /* For arithmetic shift if sign bit could be set, zerobits
11777
                 can contain actually sign bits, so no transformation is
11778
                 possible, unless MASK masks them all away.  In that
11779
                 case the shift needs to be converted into logical shift.  */
11780
              if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11781
                  && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11782
                {
11783
                  if ((mask & zerobits) == 0)
11784
                    shift_type = unsigned_type_for (TREE_TYPE (arg0));
11785
                  else
11786
                    zerobits = 0;
11787
                }
11788
            }
11789
 
11790
          /* ((X << 16) & 0xff00) is (X, 0).  */
11791
          if ((mask & zerobits) == mask)
11792
            return omit_one_operand_loc (loc, type,
11793
                                     build_int_cst (type, 0), arg0);
11794
 
11795
          newmask = mask | zerobits;
11796
          if (newmask != mask && (newmask & (newmask + 1)) == 0)
11797
            {
11798
              unsigned int prec;
11799
 
11800
              /* Only do the transformation if NEWMASK is some integer
11801
                 mode's mask.  */
11802
              for (prec = BITS_PER_UNIT;
11803
                   prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11804
                if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11805
                  break;
11806
              if (prec < HOST_BITS_PER_WIDE_INT
11807
                  || newmask == ~(unsigned HOST_WIDE_INT) 0)
11808
                {
11809
                  tree newmaskt;
11810
 
11811
                  if (shift_type != TREE_TYPE (arg0))
11812
                    {
11813
                      tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11814
                                         fold_convert_loc (loc, shift_type,
11815
                                                           TREE_OPERAND (arg0, 0)),
11816
                                         TREE_OPERAND (arg0, 1));
11817
                      tem = fold_convert_loc (loc, type, tem);
11818
                    }
11819
                  else
11820
                    tem = op0;
11821
                  newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11822
                  if (!tree_int_cst_equal (newmaskt, arg1))
11823
                    return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11824
                }
11825
            }
11826
        }
11827
 
11828
      goto associate;
11829
 
11830
    case RDIV_EXPR:
11831
      /* Don't touch a floating-point divide by zero unless the mode
11832
         of the constant can represent infinity.  */
11833
      if (TREE_CODE (arg1) == REAL_CST
11834
          && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11835
          && real_zerop (arg1))
11836
        return NULL_TREE;
11837
 
11838
      /* Optimize A / A to 1.0 if we don't care about
11839
         NaNs or Infinities.  Skip the transformation
11840
         for non-real operands.  */
11841
      if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11842
          && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11843
          && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11844
          && operand_equal_p (arg0, arg1, 0))
11845
        {
11846
          tree r = build_real (TREE_TYPE (arg0), dconst1);
11847
 
11848
          return omit_two_operands_loc (loc, type, r, arg0, arg1);
11849
        }
11850
 
11851
      /* The complex version of the above A / A optimization.  */
11852
      if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11853
          && operand_equal_p (arg0, arg1, 0))
11854
        {
11855
          tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11856
          if (! HONOR_NANS (TYPE_MODE (elem_type))
11857
              && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11858
            {
11859
              tree r = build_real (elem_type, dconst1);
11860
              /* omit_two_operands will call fold_convert for us.  */
11861
              return omit_two_operands_loc (loc, type, r, arg0, arg1);
11862
            }
11863
        }
11864
 
11865
      /* (-A) / (-B) -> A / B  */
11866
      if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11867
        return fold_build2_loc (loc, RDIV_EXPR, type,
11868
                            TREE_OPERAND (arg0, 0),
11869
                            negate_expr (arg1));
11870
      if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11871
        return fold_build2_loc (loc, RDIV_EXPR, type,
11872
                            negate_expr (arg0),
11873
                            TREE_OPERAND (arg1, 0));
11874
 
11875
      /* In IEEE floating point, x/1 is not equivalent to x for snans.  */
11876
      if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11877
          && real_onep (arg1))
11878
        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11879
 
11880
      /* In IEEE floating point, x/-1 is not equivalent to -x for snans.  */
11881
      if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11882
          && real_minus_onep (arg1))
11883
        return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11884
                                                  negate_expr (arg0)));
11885
 
11886
      /* If ARG1 is a constant, we can convert this to a multiply by the
11887
         reciprocal.  This does not have the same rounding properties,
11888
         so only do this if -freciprocal-math.  We can actually
11889
         always safely do it if ARG1 is a power of two, but it's hard to
11890
         tell if it is or not in a portable manner.  */
11891
      if (TREE_CODE (arg1) == REAL_CST)
11892
        {
11893
          if (flag_reciprocal_math
11894
              && 0 != (tem = const_binop (code, build_real (type, dconst1),
11895
                                          arg1, 0)))
11896
            return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11897
          /* Find the reciprocal if optimizing and the result is exact.  */
11898
          if (optimize)
11899
            {
11900
              REAL_VALUE_TYPE r;
11901
              r = TREE_REAL_CST (arg1);
11902
              if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11903
                {
11904
                  tem = build_real (type, r);
11905
                  return fold_build2_loc (loc, MULT_EXPR, type,
11906
                                      fold_convert_loc (loc, type, arg0), tem);
11907
                }
11908
            }
11909
        }
11910
      /* Convert A/B/C to A/(B*C).  */
11911
      if (flag_reciprocal_math
11912
          && TREE_CODE (arg0) == RDIV_EXPR)
11913
        return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11914
                            fold_build2_loc (loc, MULT_EXPR, type,
11915
                                         TREE_OPERAND (arg0, 1), arg1));
11916
 
11917
      /* Convert A/(B/C) to (A/B)*C.  */
11918
      if (flag_reciprocal_math
11919
          && TREE_CODE (arg1) == RDIV_EXPR)
11920
        return fold_build2_loc (loc, MULT_EXPR, type,
11921
                            fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11922
                                         TREE_OPERAND (arg1, 0)),
11923
                            TREE_OPERAND (arg1, 1));
11924
 
11925
      /* Convert C1/(X*C2) into (C1/C2)/X.  */
11926
      if (flag_reciprocal_math
11927
          && TREE_CODE (arg1) == MULT_EXPR
11928
          && TREE_CODE (arg0) == REAL_CST
11929
          && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11930
        {
11931
          tree tem = const_binop (RDIV_EXPR, arg0,
11932
                                  TREE_OPERAND (arg1, 1), 0);
11933
          if (tem)
11934
            return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11935
                                TREE_OPERAND (arg1, 0));
11936
        }
11937
 
11938
      if (flag_unsafe_math_optimizations)
11939
        {
11940
          enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11941
          enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11942
 
11943
          /* Optimize sin(x)/cos(x) as tan(x).  */
11944
          if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11945
               || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11946
               || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11947
              && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11948
                                  CALL_EXPR_ARG (arg1, 0), 0))
11949
            {
11950
              tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11951
 
11952
              if (tanfn != NULL_TREE)
11953
                return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11954
            }
11955
 
11956
          /* Optimize cos(x)/sin(x) as 1.0/tan(x).  */
11957
          if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11958
               || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11959
               || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11960
              && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11961
                                  CALL_EXPR_ARG (arg1, 0), 0))
11962
            {
11963
              tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11964
 
11965
              if (tanfn != NULL_TREE)
11966
                {
11967
                  tree tmp = build_call_expr_loc (loc, tanfn, 1,
11968
                                              CALL_EXPR_ARG (arg0, 0));
11969
                  return fold_build2_loc (loc, RDIV_EXPR, type,
11970
                                      build_real (type, dconst1), tmp);
11971
                }
11972
            }
11973
 
11974
          /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11975
             NaNs or Infinities.  */
11976
          if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11977
               || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11978
               || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11979
            {
11980
              tree arg00 = CALL_EXPR_ARG (arg0, 0);
11981
              tree arg01 = CALL_EXPR_ARG (arg1, 0);
11982
 
11983
              if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11984
                  && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11985
                  && operand_equal_p (arg00, arg01, 0))
11986
                {
11987
                  tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11988
 
11989
                  if (cosfn != NULL_TREE)
11990
                    return build_call_expr_loc (loc, cosfn, 1, arg00);
11991
                }
11992
            }
11993
 
11994
          /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11995
             NaNs or Infinities.  */
11996
          if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11997
               || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11998
               || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11999
            {
12000
              tree arg00 = CALL_EXPR_ARG (arg0, 0);
12001
              tree arg01 = CALL_EXPR_ARG (arg1, 0);
12002
 
12003
              if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12004
                  && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12005
                  && operand_equal_p (arg00, arg01, 0))
12006
                {
12007
                  tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12008
 
12009
                  if (cosfn != NULL_TREE)
12010
                    {
12011
                      tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12012
                      return fold_build2_loc (loc, RDIV_EXPR, type,
12013
                                          build_real (type, dconst1),
12014
                                          tmp);
12015
                    }
12016
                }
12017
            }
12018
 
12019
          /* Optimize pow(x,c)/x as pow(x,c-1).  */
12020
          if (fcode0 == BUILT_IN_POW
12021
              || fcode0 == BUILT_IN_POWF
12022
              || fcode0 == BUILT_IN_POWL)
12023
            {
12024
              tree arg00 = CALL_EXPR_ARG (arg0, 0);
12025
              tree arg01 = CALL_EXPR_ARG (arg0, 1);
12026
              if (TREE_CODE (arg01) == REAL_CST
12027
                  && !TREE_OVERFLOW (arg01)
12028
                  && operand_equal_p (arg1, arg00, 0))
12029
                {
12030
                  tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12031
                  REAL_VALUE_TYPE c;
12032
                  tree arg;
12033
 
12034
                  c = TREE_REAL_CST (arg01);
12035
                  real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12036
                  arg = build_real (type, c);
12037
                  return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12038
                }
12039
            }
12040
 
12041
          /* Optimize a/root(b/c) into a*root(c/b).  */
12042
          if (BUILTIN_ROOT_P (fcode1))
12043
            {
12044
              tree rootarg = CALL_EXPR_ARG (arg1, 0);
12045
 
12046
              if (TREE_CODE (rootarg) == RDIV_EXPR)
12047
                {
12048
                  tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12049
                  tree b = TREE_OPERAND (rootarg, 0);
12050
                  tree c = TREE_OPERAND (rootarg, 1);
12051
 
12052
                  tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12053
 
12054
                  tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12055
                  return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12056
                }
12057
            }
12058
 
12059
          /* Optimize x/expN(y) into x*expN(-y).  */
12060
          if (BUILTIN_EXPONENT_P (fcode1))
12061
            {
12062
              tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12063
              tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12064
              arg1 = build_call_expr_loc (loc,
12065
                                      expfn, 1,
12066
                                      fold_convert_loc (loc, type, arg));
12067
              return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12068
            }
12069
 
12070
          /* Optimize x/pow(y,z) into x*pow(y,-z).  */
12071
          if (fcode1 == BUILT_IN_POW
12072
              || fcode1 == BUILT_IN_POWF
12073
              || fcode1 == BUILT_IN_POWL)
12074
            {
12075
              tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12076
              tree arg10 = CALL_EXPR_ARG (arg1, 0);
12077
              tree arg11 = CALL_EXPR_ARG (arg1, 1);
12078
              tree neg11 = fold_convert_loc (loc, type,
12079
                                             negate_expr (arg11));
12080
              arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12081
              return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12082
            }
12083
        }
12084
      return NULL_TREE;
12085
 
12086
    case TRUNC_DIV_EXPR:
12087
    case FLOOR_DIV_EXPR:
12088
      /* Simplify A / (B << N) where A and B are positive and B is
12089
         a power of 2, to A >> (N + log2(B)).  */
12090
      strict_overflow_p = false;
12091
      if (TREE_CODE (arg1) == LSHIFT_EXPR
12092
          && (TYPE_UNSIGNED (type)
12093
              || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12094
        {
12095
          tree sval = TREE_OPERAND (arg1, 0);
12096
          if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12097
            {
12098
              tree sh_cnt = TREE_OPERAND (arg1, 1);
12099
              unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12100
 
12101
              if (strict_overflow_p)
12102
                fold_overflow_warning (("assuming signed overflow does not "
12103
                                        "occur when simplifying A / (B << N)"),
12104
                                       WARN_STRICT_OVERFLOW_MISC);
12105
 
12106
              sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12107
                                    sh_cnt, build_int_cst (NULL_TREE, pow2));
12108
              return fold_build2_loc (loc, RSHIFT_EXPR, type,
12109
                                  fold_convert_loc (loc, type, arg0), sh_cnt);
12110
            }
12111
        }
12112
 
12113
      /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12114
         TRUNC_DIV_EXPR.  Rewrite into the latter in this case.  */
12115
      if (INTEGRAL_TYPE_P (type)
12116
          && TYPE_UNSIGNED (type)
12117
          && code == FLOOR_DIV_EXPR)
12118
        return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12119
 
12120
      /* Fall thru */
12121
 
12122
    case ROUND_DIV_EXPR:
12123
    case CEIL_DIV_EXPR:
12124
    case EXACT_DIV_EXPR:
12125
      if (integer_onep (arg1))
12126
        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12127
      if (integer_zerop (arg1))
12128
        return NULL_TREE;
12129
      /* X / -1 is -X.  */
12130
      if (!TYPE_UNSIGNED (type)
12131
          && TREE_CODE (arg1) == INTEGER_CST
12132
          && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12133
          && TREE_INT_CST_HIGH (arg1) == -1)
12134
        return fold_convert_loc (loc, type, negate_expr (arg0));
12135
 
12136
      /* Convert -A / -B to A / B when the type is signed and overflow is
12137
         undefined.  */
12138
      if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12139
          && TREE_CODE (arg0) == NEGATE_EXPR
12140
          && negate_expr_p (arg1))
12141
        {
12142
          if (INTEGRAL_TYPE_P (type))
12143
            fold_overflow_warning (("assuming signed overflow does not occur "
12144
                                    "when distributing negation across "
12145
                                    "division"),
12146
                                   WARN_STRICT_OVERFLOW_MISC);
12147
          return fold_build2_loc (loc, code, type,
12148
                              fold_convert_loc (loc, type,
12149
                                                TREE_OPERAND (arg0, 0)),
12150
                              fold_convert_loc (loc, type,
12151
                                                negate_expr (arg1)));
12152
        }
12153
      if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12154
          && TREE_CODE (arg1) == NEGATE_EXPR
12155
          && negate_expr_p (arg0))
12156
        {
12157
          if (INTEGRAL_TYPE_P (type))
12158
            fold_overflow_warning (("assuming signed overflow does not occur "
12159
                                    "when distributing negation across "
12160
                                    "division"),
12161
                                   WARN_STRICT_OVERFLOW_MISC);
12162
          return fold_build2_loc (loc, code, type,
12163
                              fold_convert_loc (loc, type,
12164
                                                negate_expr (arg0)),
12165
                              fold_convert_loc (loc, type,
12166
                                                TREE_OPERAND (arg1, 0)));
12167
        }
12168
 
12169
      /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12170
         operation, EXACT_DIV_EXPR.
12171
 
12172
         Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12173
         At one time others generated faster code, it's not clear if they do
12174
         after the last round to changes to the DIV code in expmed.c.  */
12175
      if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12176
          && multiple_of_p (type, arg0, arg1))
12177
        return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12178
 
12179
      strict_overflow_p = false;
12180
      if (TREE_CODE (arg1) == INTEGER_CST
12181
          && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12182
                                         &strict_overflow_p)))
12183
        {
12184
          if (strict_overflow_p)
12185
            fold_overflow_warning (("assuming signed overflow does not occur "
12186
                                    "when simplifying division"),
12187
                                   WARN_STRICT_OVERFLOW_MISC);
12188
          return fold_convert_loc (loc, type, tem);
12189
        }
12190
 
12191
      return NULL_TREE;
12192
 
12193
    case CEIL_MOD_EXPR:
12194
    case FLOOR_MOD_EXPR:
12195
    case ROUND_MOD_EXPR:
12196
    case TRUNC_MOD_EXPR:
12197
      /* X % 1 is always zero, but be sure to preserve any side
12198
         effects in X.  */
12199
      if (integer_onep (arg1))
12200
        return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12201
 
12202
      /* X % 0, return X % 0 unchanged so that we can get the
12203
         proper warnings and errors.  */
12204
      if (integer_zerop (arg1))
12205
        return NULL_TREE;
12206
 
12207
      /* 0 % X is always zero, but be sure to preserve any side
12208
         effects in X.  Place this after checking for X == 0.  */
12209
      if (integer_zerop (arg0))
12210
        return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12211
 
12212
      /* X % -1 is zero.  */
12213
      if (!TYPE_UNSIGNED (type)
12214
          && TREE_CODE (arg1) == INTEGER_CST
12215
          && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12216
          && TREE_INT_CST_HIGH (arg1) == -1)
12217
        return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12218
 
12219
      /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12220
         i.e. "X % C" into "X & (C - 1)", if X and C are positive.  */
12221
      strict_overflow_p = false;
12222
      if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12223
          && (TYPE_UNSIGNED (type)
12224
              || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12225
        {
12226
          tree c = arg1;
12227
          /* Also optimize A % (C << N)  where C is a power of 2,
12228
             to A & ((C << N) - 1).  */
12229
          if (TREE_CODE (arg1) == LSHIFT_EXPR)
12230
            c = TREE_OPERAND (arg1, 0);
12231
 
12232
          if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12233
            {
12234
              tree mask = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12235
                                       build_int_cst (TREE_TYPE (arg1), 1));
12236
              if (strict_overflow_p)
12237
                fold_overflow_warning (("assuming signed overflow does not "
12238
                                        "occur when simplifying "
12239
                                        "X % (power of two)"),
12240
                                       WARN_STRICT_OVERFLOW_MISC);
12241
              return fold_build2_loc (loc, BIT_AND_EXPR, type,
12242
                                  fold_convert_loc (loc, type, arg0),
12243
                                  fold_convert_loc (loc, type, mask));
12244
            }
12245
        }
12246
 
12247
      /* X % -C is the same as X % C.  */
12248
      if (code == TRUNC_MOD_EXPR
12249
          && !TYPE_UNSIGNED (type)
12250
          && TREE_CODE (arg1) == INTEGER_CST
12251
          && !TREE_OVERFLOW (arg1)
12252
          && TREE_INT_CST_HIGH (arg1) < 0
12253
          && !TYPE_OVERFLOW_TRAPS (type)
12254
          /* Avoid this transformation if C is INT_MIN, i.e. C == -C.  */
12255
          && !sign_bit_p (arg1, arg1))
12256
        return fold_build2_loc (loc, code, type,
12257
                            fold_convert_loc (loc, type, arg0),
12258
                            fold_convert_loc (loc, type,
12259
                                              negate_expr (arg1)));
12260
 
12261
      /* X % -Y is the same as X % Y.  */
12262
      if (code == TRUNC_MOD_EXPR
12263
          && !TYPE_UNSIGNED (type)
12264
          && TREE_CODE (arg1) == NEGATE_EXPR
12265
          && !TYPE_OVERFLOW_TRAPS (type))
12266
        return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12267
                            fold_convert_loc (loc, type,
12268
                                              TREE_OPERAND (arg1, 0)));
12269
 
12270
      if (TREE_CODE (arg1) == INTEGER_CST
12271
          && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12272
                                         &strict_overflow_p)))
12273
        {
12274
          if (strict_overflow_p)
12275
            fold_overflow_warning (("assuming signed overflow does not occur "
12276
                                    "when simplifying modulus"),
12277
                                   WARN_STRICT_OVERFLOW_MISC);
12278
          return fold_convert_loc (loc, type, tem);
12279
        }
12280
 
12281
      return NULL_TREE;
12282
 
12283
    case LROTATE_EXPR:
12284
    case RROTATE_EXPR:
12285
      if (integer_all_onesp (arg0))
12286
        return omit_one_operand_loc (loc, type, arg0, arg1);
12287
      goto shift;
12288
 
12289
    case RSHIFT_EXPR:
12290
      /* Optimize -1 >> x for arithmetic right shifts.  */
12291
      if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12292
          && tree_expr_nonnegative_p (arg1))
12293
        return omit_one_operand_loc (loc, type, arg0, arg1);
12294
      /* ... fall through ...  */
12295
 
12296
    case LSHIFT_EXPR:
12297
    shift:
12298
      if (integer_zerop (arg1))
12299
        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12300
      if (integer_zerop (arg0))
12301
        return omit_one_operand_loc (loc, type, arg0, arg1);
12302
 
12303
      /* Since negative shift count is not well-defined,
12304
         don't try to compute it in the compiler.  */
12305
      if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12306
        return NULL_TREE;
12307
 
12308
      /* Turn (a OP c1) OP c2 into a OP (c1+c2).  */
12309
      if (TREE_CODE (op0) == code && host_integerp (arg1, false)
12310
          && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12311
          && host_integerp (TREE_OPERAND (arg0, 1), false)
12312
          && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12313
        {
12314
          HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12315
                               + TREE_INT_CST_LOW (arg1));
12316
 
12317
          /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12318
             being well defined.  */
12319
          if (low >= TYPE_PRECISION (type))
12320
            {
12321
              if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12322
                low = low % TYPE_PRECISION (type);
12323
              else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12324
                return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
12325
                                         TREE_OPERAND (arg0, 0));
12326
              else
12327
                low = TYPE_PRECISION (type) - 1;
12328
            }
12329
 
12330
          return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12331
                              build_int_cst (type, low));
12332
        }
12333
 
12334
      /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12335
         into x & ((unsigned)-1 >> c) for unsigned types.  */
12336
      if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12337
           || (TYPE_UNSIGNED (type)
12338
               && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12339
          && host_integerp (arg1, false)
12340
          && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12341
          && host_integerp (TREE_OPERAND (arg0, 1), false)
12342
          && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12343
        {
12344
          HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12345
          HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12346
          tree lshift;
12347
          tree arg00;
12348
 
12349
          if (low0 == low1)
12350
            {
12351
              arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12352
 
12353
              lshift = build_int_cst (type, -1);
12354
              lshift = int_const_binop (code, lshift, arg1, 0);
12355
 
12356
              return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12357
            }
12358
        }
12359
 
12360
      /* Rewrite an LROTATE_EXPR by a constant into an
12361
         RROTATE_EXPR by a new constant.  */
12362
      if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12363
        {
12364
          tree tem = build_int_cst (TREE_TYPE (arg1),
12365
                                    TYPE_PRECISION (type));
12366
          tem = const_binop (MINUS_EXPR, tem, arg1, 0);
12367
          return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12368
        }
12369
 
12370
      /* If we have a rotate of a bit operation with the rotate count and
12371
         the second operand of the bit operation both constant,
12372
         permute the two operations.  */
12373
      if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12374
          && (TREE_CODE (arg0) == BIT_AND_EXPR
12375
              || TREE_CODE (arg0) == BIT_IOR_EXPR
12376
              || TREE_CODE (arg0) == BIT_XOR_EXPR)
12377
          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12378
        return fold_build2_loc (loc, TREE_CODE (arg0), type,
12379
                            fold_build2_loc (loc, code, type,
12380
                                         TREE_OPERAND (arg0, 0), arg1),
12381
                            fold_build2_loc (loc, code, type,
12382
                                         TREE_OPERAND (arg0, 1), arg1));
12383
 
12384
      /* Two consecutive rotates adding up to the precision of the
12385
         type can be ignored.  */
12386
      if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12387
          && TREE_CODE (arg0) == RROTATE_EXPR
12388
          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12389
          && TREE_INT_CST_HIGH (arg1) == 0
12390
          && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12391
          && ((TREE_INT_CST_LOW (arg1)
12392
               + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12393
              == (unsigned int) TYPE_PRECISION (type)))
12394
        return TREE_OPERAND (arg0, 0);
12395
 
12396
      /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12397
              (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12398
         if the latter can be further optimized.  */
12399
      if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12400
          && TREE_CODE (arg0) == BIT_AND_EXPR
12401
          && TREE_CODE (arg1) == INTEGER_CST
12402
          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12403
        {
12404
          tree mask = fold_build2_loc (loc, code, type,
12405
                                   fold_convert_loc (loc, type,
12406
                                                     TREE_OPERAND (arg0, 1)),
12407
                                   arg1);
12408
          tree shift = fold_build2_loc (loc, code, type,
12409
                                    fold_convert_loc (loc, type,
12410
                                                      TREE_OPERAND (arg0, 0)),
12411
                                    arg1);
12412
          tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12413
          if (tem)
12414
            return tem;
12415
        }
12416
 
12417
      return NULL_TREE;
12418
 
12419
    case MIN_EXPR:
12420
      if (operand_equal_p (arg0, arg1, 0))
12421
        return omit_one_operand_loc (loc, type, arg0, arg1);
12422
      if (INTEGRAL_TYPE_P (type)
12423
          && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12424
        return omit_one_operand_loc (loc, type, arg1, arg0);
12425
      tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12426
      if (tem)
12427
        return tem;
12428
      goto associate;
12429
 
12430
    case MAX_EXPR:
12431
      if (operand_equal_p (arg0, arg1, 0))
12432
        return omit_one_operand_loc (loc, type, arg0, arg1);
12433
      if (INTEGRAL_TYPE_P (type)
12434
          && TYPE_MAX_VALUE (type)
12435
          && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12436
        return omit_one_operand_loc (loc, type, arg1, arg0);
12437
      tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12438
      if (tem)
12439
        return tem;
12440
      goto associate;
12441
 
12442
    case TRUTH_ANDIF_EXPR:
12443
      /* Note that the operands of this must be ints
12444
         and their values must be 0 or 1.
12445
         ("true" is a fixed value perhaps depending on the language.)  */
12446
      /* If first arg is constant zero, return it.  */
12447
      if (integer_zerop (arg0))
12448
        return fold_convert_loc (loc, type, arg0);
12449
    case TRUTH_AND_EXPR:
12450
      /* If either arg is constant true, drop it.  */
12451
      if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12452
        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12453
      if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12454
          /* Preserve sequence points.  */
12455
          && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12456
        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12457
      /* If second arg is constant zero, result is zero, but first arg
12458
         must be evaluated.  */
12459
      if (integer_zerop (arg1))
12460
        return omit_one_operand_loc (loc, type, arg1, arg0);
12461
      /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12462
         case will be handled here.  */
12463
      if (integer_zerop (arg0))
12464
        return omit_one_operand_loc (loc, type, arg0, arg1);
12465
 
12466
      /* !X && X is always false.  */
12467
      if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12468
          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12469
        return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12470
      /* X && !X is always false.  */
12471
      if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12472
          && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12473
        return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12474
 
12475
      /* A < X && A + 1 > Y ==> A < X && A >= Y.  Normally A + 1 > Y
12476
         means A >= Y && A != MAX, but in this case we know that
12477
         A < X <= MAX.  */
12478
 
12479
      if (!TREE_SIDE_EFFECTS (arg0)
12480
          && !TREE_SIDE_EFFECTS (arg1))
12481
        {
12482
          tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12483
          if (tem && !operand_equal_p (tem, arg0, 0))
12484
            return fold_build2_loc (loc, code, type, tem, arg1);
12485
 
12486
          tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12487
          if (tem && !operand_equal_p (tem, arg1, 0))
12488
            return fold_build2_loc (loc, code, type, arg0, tem);
12489
        }
12490
 
12491
    truth_andor:
12492
      /* We only do these simplifications if we are optimizing.  */
12493
      if (!optimize)
12494
        return NULL_TREE;
12495
 
12496
      /* Check for things like (A || B) && (A || C).  We can convert this
12497
         to A || (B && C).  Note that either operator can be any of the four
12498
         truth and/or operations and the transformation will still be
12499
         valid.   Also note that we only care about order for the
12500
         ANDIF and ORIF operators.  If B contains side effects, this
12501
         might change the truth-value of A.  */
12502
      if (TREE_CODE (arg0) == TREE_CODE (arg1)
12503
          && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
12504
              || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
12505
              || TREE_CODE (arg0) == TRUTH_AND_EXPR
12506
              || TREE_CODE (arg0) == TRUTH_OR_EXPR)
12507
          && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
12508
        {
12509
          tree a00 = TREE_OPERAND (arg0, 0);
12510
          tree a01 = TREE_OPERAND (arg0, 1);
12511
          tree a10 = TREE_OPERAND (arg1, 0);
12512
          tree a11 = TREE_OPERAND (arg1, 1);
12513
          int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
12514
                              || TREE_CODE (arg0) == TRUTH_AND_EXPR)
12515
                             && (code == TRUTH_AND_EXPR
12516
                                 || code == TRUTH_OR_EXPR));
12517
 
12518
          if (operand_equal_p (a00, a10, 0))
12519
            return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
12520
                                fold_build2_loc (loc, code, type, a01, a11));
12521
          else if (commutative && operand_equal_p (a00, a11, 0))
12522
            return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
12523
                                fold_build2_loc (loc, code, type, a01, a10));
12524
          else if (commutative && operand_equal_p (a01, a10, 0))
12525
            return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
12526
                                fold_build2_loc (loc, code, type, a00, a11));
12527
 
12528
          /* This case if tricky because we must either have commutative
12529
             operators or else A10 must not have side-effects.  */
12530
 
12531
          else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
12532
                   && operand_equal_p (a01, a11, 0))
12533
            return fold_build2_loc (loc, TREE_CODE (arg0), type,
12534
                                fold_build2_loc (loc, code, type, a00, a10),
12535
                                a01);
12536
        }
12537
 
12538
      /* See if we can build a range comparison.  */
12539
      if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
12540
        return tem;
12541
 
12542
      /* Check for the possibility of merging component references.  If our
12543
         lhs is another similar operation, try to merge its rhs with our
12544
         rhs.  Then try to merge our lhs and rhs.  */
12545
      if (TREE_CODE (arg0) == code
12546
          && 0 != (tem = fold_truthop (loc, code, type,
12547
                                       TREE_OPERAND (arg0, 1), arg1)))
12548
        return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12549
 
12550
      if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
12551
        return tem;
12552
 
12553
      return NULL_TREE;
12554
 
12555
    case TRUTH_ORIF_EXPR:
12556
      /* Note that the operands of this must be ints
12557
         and their values must be 0 or true.
12558
         ("true" is a fixed value perhaps depending on the language.)  */
12559
      /* If first arg is constant true, return it.  */
12560
      if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12561
        return fold_convert_loc (loc, type, arg0);
12562
    case TRUTH_OR_EXPR:
12563
      /* If either arg is constant zero, drop it.  */
12564
      if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12565
        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12566
      if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12567
          /* Preserve sequence points.  */
12568
          && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12569
        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12570
      /* If second arg is constant true, result is true, but we must
12571
         evaluate first arg.  */
12572
      if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12573
        return omit_one_operand_loc (loc, type, arg1, arg0);
12574
      /* Likewise for first arg, but note this only occurs here for
12575
         TRUTH_OR_EXPR.  */
12576
      if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12577
        return omit_one_operand_loc (loc, type, arg0, arg1);
12578
 
12579
      /* !X || X is always true.  */
12580
      if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12581
          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12582
        return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12583
      /* X || !X is always true.  */
12584
      if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12585
          && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12586
        return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12587
 
12588
      goto truth_andor;
12589
 
12590
    case TRUTH_XOR_EXPR:
12591
      /* If the second arg is constant zero, drop it.  */
12592
      if (integer_zerop (arg1))
12593
        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12594
      /* If the second arg is constant true, this is a logical inversion.  */
12595
      if (integer_onep (arg1))
12596
        {
12597
          /* Only call invert_truthvalue if operand is a truth value.  */
12598
          if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12599
            tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12600
          else
12601
            tem = invert_truthvalue_loc (loc, arg0);
12602
          return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12603
        }
12604
      /* Identical arguments cancel to zero.  */
12605
      if (operand_equal_p (arg0, arg1, 0))
12606
        return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12607
 
12608
      /* !X ^ X is always true.  */
12609
      if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12610
          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12611
        return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12612
 
12613
      /* X ^ !X is always true.  */
12614
      if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12615
          && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12616
        return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12617
 
12618
      return NULL_TREE;
12619
 
12620
    case EQ_EXPR:
12621
    case NE_EXPR:
12622
      tem = fold_comparison (loc, code, type, op0, op1);
12623
      if (tem != NULL_TREE)
12624
        return tem;
12625
 
12626
      /* bool_var != 0 becomes bool_var. */
12627
      if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12628
          && code == NE_EXPR)
12629
        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12630
 
12631
      /* bool_var == 1 becomes bool_var. */
12632
      if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12633
          && code == EQ_EXPR)
12634
        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12635
 
12636
      /* bool_var != 1 becomes !bool_var. */
12637
      if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12638
          && code == NE_EXPR)
12639
        return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12640
                            fold_convert_loc (loc, type, arg0));
12641
 
12642
      /* bool_var == 0 becomes !bool_var. */
12643
      if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12644
          && code == EQ_EXPR)
12645
        return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12646
                            fold_convert_loc (loc, type, arg0));
12647
 
12648
      /* !exp != 0 becomes !exp */
12649
      if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12650
          && code == NE_EXPR)
12651
        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12652
 
12653
      /* If this is an equality comparison of the address of two non-weak,
12654
         unaliased symbols neither of which are extern (since we do not
12655
         have access to attributes for externs), then we know the result.  */
12656
      if (TREE_CODE (arg0) == ADDR_EXPR
12657
          && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12658
          && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12659
          && ! lookup_attribute ("alias",
12660
                                 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12661
          && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12662
          && TREE_CODE (arg1) == ADDR_EXPR
12663
          && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12664
          && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12665
          && ! lookup_attribute ("alias",
12666
                                 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12667
          && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12668
        {
12669
          /* We know that we're looking at the address of two
12670
             non-weak, unaliased, static _DECL nodes.
12671
 
12672
             It is both wasteful and incorrect to call operand_equal_p
12673
             to compare the two ADDR_EXPR nodes.  It is wasteful in that
12674
             all we need to do is test pointer equality for the arguments
12675
             to the two ADDR_EXPR nodes.  It is incorrect to use
12676
             operand_equal_p as that function is NOT equivalent to a
12677
             C equality test.  It can in fact return false for two
12678
             objects which would test as equal using the C equality
12679
             operator.  */
12680
          bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12681
          return constant_boolean_node (equal
12682
                                        ? code == EQ_EXPR : code != EQ_EXPR,
12683
                                        type);
12684
        }
12685
 
12686
      /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12687
         a MINUS_EXPR of a constant, we can convert it into a comparison with
12688
         a revised constant as long as no overflow occurs.  */
12689
      if (TREE_CODE (arg1) == INTEGER_CST
12690
          && (TREE_CODE (arg0) == PLUS_EXPR
12691
              || TREE_CODE (arg0) == MINUS_EXPR)
12692
          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12693
          && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12694
                                      ? MINUS_EXPR : PLUS_EXPR,
12695
                                      fold_convert_loc (loc, TREE_TYPE (arg0),
12696
                                                        arg1),
12697
                                      TREE_OPERAND (arg0, 1), 0))
12698
          && !TREE_OVERFLOW (tem))
12699
        return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12700
 
12701
      /* Similarly for a NEGATE_EXPR.  */
12702
      if (TREE_CODE (arg0) == NEGATE_EXPR
12703
          && TREE_CODE (arg1) == INTEGER_CST
12704
          && 0 != (tem = negate_expr (arg1))
12705
          && TREE_CODE (tem) == INTEGER_CST
12706
          && !TREE_OVERFLOW (tem))
12707
        return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12708
 
12709
      /* Similarly for a BIT_XOR_EXPR;  X ^ C1 == C2 is X == (C1 ^ C2).  */
12710
      if (TREE_CODE (arg0) == BIT_XOR_EXPR
12711
          && TREE_CODE (arg1) == INTEGER_CST
12712
          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12713
        return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12714
                            fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12715
                                         fold_convert_loc (loc,
12716
                                                           TREE_TYPE (arg0),
12717
                                                           arg1),
12718
                                         TREE_OPERAND (arg0, 1)));
12719
 
12720
      /* Transform comparisons of the form X +- Y CMP X to Y CMP 0.  */
12721
      if ((TREE_CODE (arg0) == PLUS_EXPR
12722
           || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12723
           || TREE_CODE (arg0) == MINUS_EXPR)
12724
          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12725
          && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12726
              || POINTER_TYPE_P (TREE_TYPE (arg0))))
12727
        {
12728
          tree val = TREE_OPERAND (arg0, 1);
12729
          return omit_two_operands_loc (loc, type,
12730
                                    fold_build2_loc (loc, code, type,
12731
                                                 val,
12732
                                                 build_int_cst (TREE_TYPE (val),
12733
                                                                0)),
12734
                                    TREE_OPERAND (arg0, 0), arg1);
12735
        }
12736
 
12737
      /* Transform comparisons of the form C - X CMP X if C % 2 == 1.  */
12738
      if (TREE_CODE (arg0) == MINUS_EXPR
12739
          && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12740
          && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)
12741
          && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12742
        {
12743
          return omit_two_operands_loc (loc, type,
12744
                                    code == NE_EXPR
12745
                                    ? boolean_true_node : boolean_false_node,
12746
                                    TREE_OPERAND (arg0, 1), arg1);
12747
        }
12748
 
12749
      /* If we have X - Y == 0, we can convert that to X == Y and similarly
12750
         for !=.  Don't do this for ordered comparisons due to overflow.  */
12751
      if (TREE_CODE (arg0) == MINUS_EXPR
12752
          && integer_zerop (arg1))
12753
        return fold_build2_loc (loc, code, type,
12754
                            TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12755
 
12756
      /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0.  */
12757
      if (TREE_CODE (arg0) == ABS_EXPR
12758
          && (integer_zerop (arg1) || real_zerop (arg1)))
12759
        return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12760
 
12761
      /* If this is an EQ or NE comparison with zero and ARG0 is
12762
         (1 << foo) & bar, convert it to (bar >> foo) & 1.  Both require
12763
         two operations, but the latter can be done in one less insn
12764
         on machines that have only two-operand insns or on which a
12765
         constant cannot be the first operand.  */
12766
      if (TREE_CODE (arg0) == BIT_AND_EXPR
12767
          && integer_zerop (arg1))
12768
        {
12769
          tree arg00 = TREE_OPERAND (arg0, 0);
12770
          tree arg01 = TREE_OPERAND (arg0, 1);
12771
          if (TREE_CODE (arg00) == LSHIFT_EXPR
12772
              && integer_onep (TREE_OPERAND (arg00, 0)))
12773
            {
12774
              tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12775
                                      arg01, TREE_OPERAND (arg00, 1));
12776
              tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12777
                                 build_int_cst (TREE_TYPE (arg0), 1));
12778
              return fold_build2_loc (loc, code, type,
12779
                                  fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12780
                                  arg1);
12781
            }
12782
          else if (TREE_CODE (arg01) == LSHIFT_EXPR
12783
                   && integer_onep (TREE_OPERAND (arg01, 0)))
12784
            {
12785
              tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12786
                                      arg00, TREE_OPERAND (arg01, 1));
12787
              tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12788
                                 build_int_cst (TREE_TYPE (arg0), 1));
12789
              return fold_build2_loc (loc, code, type,
12790
                                  fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12791
                                  arg1);
12792
            }
12793
        }
12794
 
12795
      /* If this is an NE or EQ comparison of zero against the result of a
12796
         signed MOD operation whose second operand is a power of 2, make
12797
         the MOD operation unsigned since it is simpler and equivalent.  */
12798
      if (integer_zerop (arg1)
12799
          && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12800
          && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12801
              || TREE_CODE (arg0) == CEIL_MOD_EXPR
12802
              || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12803
              || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12804
          && integer_pow2p (TREE_OPERAND (arg0, 1)))
12805
        {
12806
          tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12807
          tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12808
                                     fold_convert_loc (loc, newtype,
12809
                                                       TREE_OPERAND (arg0, 0)),
12810
                                     fold_convert_loc (loc, newtype,
12811
                                                       TREE_OPERAND (arg0, 1)));
12812
 
12813
          return fold_build2_loc (loc, code, type, newmod,
12814
                              fold_convert_loc (loc, newtype, arg1));
12815
        }
12816
 
12817
      /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12818
         C1 is a valid shift constant, and C2 is a power of two, i.e.
12819
         a single bit.  */
12820
      if (TREE_CODE (arg0) == BIT_AND_EXPR
12821
          && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12822
          && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12823
             == INTEGER_CST
12824
          && integer_pow2p (TREE_OPERAND (arg0, 1))
12825
          && integer_zerop (arg1))
12826
        {
12827
          tree itype = TREE_TYPE (arg0);
12828
          unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12829
          tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12830
 
12831
          /* Check for a valid shift count.  */
12832
          if (TREE_INT_CST_HIGH (arg001) == 0
12833
              && TREE_INT_CST_LOW (arg001) < prec)
12834
            {
12835
              tree arg01 = TREE_OPERAND (arg0, 1);
12836
              tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12837
              unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12838
              /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12839
                 can be rewritten as (X & (C2 << C1)) != 0.  */
12840
              if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12841
                {
12842
                  tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12843
                  tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12844
                  return fold_build2_loc (loc, code, type, tem, arg1);
12845
                }
12846
              /* Otherwise, for signed (arithmetic) shifts,
12847
                 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12848
                 ((X >> C1) & C2) == 0 is rewritten as X >= 0.  */
12849
              else if (!TYPE_UNSIGNED (itype))
12850
                return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12851
                                    arg000, build_int_cst (itype, 0));
12852
              /* Otherwise, of unsigned (logical) shifts,
12853
                 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12854
                 ((X >> C1) & C2) == 0 is rewritten as (X,true).  */
12855
              else
12856
                return omit_one_operand_loc (loc, type,
12857
                                         code == EQ_EXPR ? integer_one_node
12858
                                                         : integer_zero_node,
12859
                                         arg000);
12860
            }
12861
        }
12862
 
12863
      /* If this is an NE comparison of zero with an AND of one, remove the
12864
         comparison since the AND will give the correct value.  */
12865
      if (code == NE_EXPR
12866
          && integer_zerop (arg1)
12867
          && TREE_CODE (arg0) == BIT_AND_EXPR
12868
          && integer_onep (TREE_OPERAND (arg0, 1)))
12869
        return fold_convert_loc (loc, type, arg0);
12870
 
12871
      /* If we have (A & C) == C where C is a power of 2, convert this into
12872
         (A & C) != 0.  Similarly for NE_EXPR.  */
12873
      if (TREE_CODE (arg0) == BIT_AND_EXPR
12874
          && integer_pow2p (TREE_OPERAND (arg0, 1))
12875
          && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12876
        return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12877
                            arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12878
                                                    integer_zero_node));
12879
 
12880
      /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12881
         bit, then fold the expression into A < 0 or A >= 0.  */
12882
      tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12883
      if (tem)
12884
        return tem;
12885
 
12886
      /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12887
         Similarly for NE_EXPR.  */
12888
      if (TREE_CODE (arg0) == BIT_AND_EXPR
12889
          && TREE_CODE (arg1) == INTEGER_CST
12890
          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12891
        {
12892
          tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12893
                                   TREE_TYPE (TREE_OPERAND (arg0, 1)),
12894
                                   TREE_OPERAND (arg0, 1));
12895
          tree dandnotc = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12896
                                       arg1, notc);
12897
          tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12898
          if (integer_nonzerop (dandnotc))
12899
            return omit_one_operand_loc (loc, type, rslt, arg0);
12900
        }
12901
 
12902
      /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12903
         Similarly for NE_EXPR.  */
12904
      if (TREE_CODE (arg0) == BIT_IOR_EXPR
12905
          && TREE_CODE (arg1) == INTEGER_CST
12906
          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12907
        {
12908
          tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12909
          tree candnotd = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12910
                                       TREE_OPERAND (arg0, 1), notd);
12911
          tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12912
          if (integer_nonzerop (candnotd))
12913
            return omit_one_operand_loc (loc, type, rslt, arg0);
12914
        }
12915
 
12916
      /* If this is a comparison of a field, we may be able to simplify it.  */
12917
      if ((TREE_CODE (arg0) == COMPONENT_REF
12918
           || TREE_CODE (arg0) == BIT_FIELD_REF)
12919
          /* Handle the constant case even without -O
12920
             to make sure the warnings are given.  */
12921
          && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12922
        {
12923
          t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12924
          if (t1)
12925
            return t1;
12926
        }
12927
 
12928
      /* Optimize comparisons of strlen vs zero to a compare of the
12929
         first character of the string vs zero.  To wit,
12930
                strlen(ptr) == 0   =>  *ptr == 0
12931
                strlen(ptr) != 0   =>  *ptr != 0
12932
         Other cases should reduce to one of these two (or a constant)
12933
         due to the return value of strlen being unsigned.  */
12934
      if (TREE_CODE (arg0) == CALL_EXPR
12935
          && integer_zerop (arg1))
12936
        {
12937
          tree fndecl = get_callee_fndecl (arg0);
12938
 
12939
          if (fndecl
12940
              && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12941
              && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12942
              && call_expr_nargs (arg0) == 1
12943
              && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12944
            {
12945
              tree iref = build_fold_indirect_ref_loc (loc,
12946
                                                   CALL_EXPR_ARG (arg0, 0));
12947
              return fold_build2_loc (loc, code, type, iref,
12948
                                  build_int_cst (TREE_TYPE (iref), 0));
12949
            }
12950
        }
12951
 
12952
      /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12953
         of X.  Similarly fold (X >> C) == 0 into X >= 0.  */
12954
      if (TREE_CODE (arg0) == RSHIFT_EXPR
12955
          && integer_zerop (arg1)
12956
          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12957
        {
12958
          tree arg00 = TREE_OPERAND (arg0, 0);
12959
          tree arg01 = TREE_OPERAND (arg0, 1);
12960
          tree itype = TREE_TYPE (arg00);
12961
          if (TREE_INT_CST_HIGH (arg01) == 0
12962
              && TREE_INT_CST_LOW (arg01)
12963
                 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12964
            {
12965
              if (TYPE_UNSIGNED (itype))
12966
                {
12967
                  itype = signed_type_for (itype);
12968
                  arg00 = fold_convert_loc (loc, itype, arg00);
12969
                }
12970
              return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12971
                                  type, arg00, build_int_cst (itype, 0));
12972
            }
12973
        }
12974
 
12975
      /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y.  */
12976
      if (integer_zerop (arg1)
12977
          && TREE_CODE (arg0) == BIT_XOR_EXPR)
12978
        return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12979
                            TREE_OPERAND (arg0, 1));
12980
 
12981
      /* (X ^ Y) == Y becomes X == 0.  We know that Y has no side-effects.  */
12982
      if (TREE_CODE (arg0) == BIT_XOR_EXPR
12983
          && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12984
        return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12985
                            build_int_cst (TREE_TYPE (arg1), 0));
12986
      /* Likewise (X ^ Y) == X becomes Y == 0.  X has no side-effects.  */
12987
      if (TREE_CODE (arg0) == BIT_XOR_EXPR
12988
          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12989
          && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12990
        return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12991
                            build_int_cst (TREE_TYPE (arg1), 0));
12992
 
12993
      /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2).  */
12994
      if (TREE_CODE (arg0) == BIT_XOR_EXPR
12995
          && TREE_CODE (arg1) == INTEGER_CST
12996
          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12997
        return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12998
                            fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12999
                                         TREE_OPERAND (arg0, 1), arg1));
13000
 
13001
      /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13002
         (X & C) == 0 when C is a single bit.  */
13003
      if (TREE_CODE (arg0) == BIT_AND_EXPR
13004
          && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13005
          && integer_zerop (arg1)
13006
          && integer_pow2p (TREE_OPERAND (arg0, 1)))
13007
        {
13008
          tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13009
                             TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13010
                             TREE_OPERAND (arg0, 1));
13011
          return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13012
                              type, tem, arg1);
13013
        }
13014
 
13015
      /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13016
         constant C is a power of two, i.e. a single bit.  */
13017
      if (TREE_CODE (arg0) == BIT_XOR_EXPR
13018
          && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13019
          && integer_zerop (arg1)
13020
          && integer_pow2p (TREE_OPERAND (arg0, 1))
13021
          && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13022
                              TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13023
        {
13024
          tree arg00 = TREE_OPERAND (arg0, 0);
13025
          return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13026
                              arg00, build_int_cst (TREE_TYPE (arg00), 0));
13027
        }
13028
 
13029
      /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13030
         when is C is a power of two, i.e. a single bit.  */
13031
      if (TREE_CODE (arg0) == BIT_AND_EXPR
13032
          && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13033
          && integer_zerop (arg1)
13034
          && integer_pow2p (TREE_OPERAND (arg0, 1))
13035
          && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13036
                              TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13037
        {
13038
          tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13039
          tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13040
                             arg000, TREE_OPERAND (arg0, 1));
13041
          return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13042
                              tem, build_int_cst (TREE_TYPE (tem), 0));
13043
        }
13044
 
13045
      if (integer_zerop (arg1)
13046
          && tree_expr_nonzero_p (arg0))
13047
        {
13048
          tree res = constant_boolean_node (code==NE_EXPR, type);
13049
          return omit_one_operand_loc (loc, type, res, arg0);
13050
        }
13051
 
13052
      /* Fold -X op -Y as X op Y, where op is eq/ne.  */
13053
      if (TREE_CODE (arg0) == NEGATE_EXPR
13054
          && TREE_CODE (arg1) == NEGATE_EXPR)
13055
        return fold_build2_loc (loc, code, type,
13056
                            TREE_OPERAND (arg0, 0),
13057
                            TREE_OPERAND (arg1, 0));
13058
 
13059
      /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries.  */
13060
      if (TREE_CODE (arg0) == BIT_AND_EXPR
13061
          && TREE_CODE (arg1) == BIT_AND_EXPR)
13062
        {
13063
          tree arg00 = TREE_OPERAND (arg0, 0);
13064
          tree arg01 = TREE_OPERAND (arg0, 1);
13065
          tree arg10 = TREE_OPERAND (arg1, 0);
13066
          tree arg11 = TREE_OPERAND (arg1, 1);
13067
          tree itype = TREE_TYPE (arg0);
13068
 
13069
          if (operand_equal_p (arg01, arg11, 0))
13070
            return fold_build2_loc (loc, code, type,
13071
                                fold_build2_loc (loc, BIT_AND_EXPR, itype,
13072
                                             fold_build2_loc (loc,
13073
                                                          BIT_XOR_EXPR, itype,
13074
                                                          arg00, arg10),
13075
                                             arg01),
13076
                                build_int_cst (itype, 0));
13077
 
13078
          if (operand_equal_p (arg01, arg10, 0))
13079
            return fold_build2_loc (loc, code, type,
13080
                                fold_build2_loc (loc, BIT_AND_EXPR, itype,
13081
                                             fold_build2_loc (loc,
13082
                                                          BIT_XOR_EXPR, itype,
13083
                                                          arg00, arg11),
13084
                                             arg01),
13085
                                build_int_cst (itype, 0));
13086
 
13087
          if (operand_equal_p (arg00, arg11, 0))
13088
            return fold_build2_loc (loc, code, type,
13089
                                fold_build2_loc (loc, BIT_AND_EXPR, itype,
13090
                                             fold_build2_loc (loc,
13091
                                                          BIT_XOR_EXPR, itype,
13092
                                                          arg01, arg10),
13093
                                             arg00),
13094
                                build_int_cst (itype, 0));
13095
 
13096
          if (operand_equal_p (arg00, arg10, 0))
13097
            return fold_build2_loc (loc, code, type,
13098
                                fold_build2_loc (loc, BIT_AND_EXPR, itype,
13099
                                             fold_build2_loc (loc,
13100
                                                          BIT_XOR_EXPR, itype,
13101
                                                          arg01, arg11),
13102
                                             arg00),
13103
                                build_int_cst (itype, 0));
13104
        }
13105
 
13106
      if (TREE_CODE (arg0) == BIT_XOR_EXPR
13107
          && TREE_CODE (arg1) == BIT_XOR_EXPR)
13108
        {
13109
          tree arg00 = TREE_OPERAND (arg0, 0);
13110
          tree arg01 = TREE_OPERAND (arg0, 1);
13111
          tree arg10 = TREE_OPERAND (arg1, 0);
13112
          tree arg11 = TREE_OPERAND (arg1, 1);
13113
          tree itype = TREE_TYPE (arg0);
13114
 
13115
          /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13116
             operand_equal_p guarantees no side-effects so we don't need
13117
             to use omit_one_operand on Z.  */
13118
          if (operand_equal_p (arg01, arg11, 0))
13119
            return fold_build2_loc (loc, code, type, arg00, arg10);
13120
          if (operand_equal_p (arg01, arg10, 0))
13121
            return fold_build2_loc (loc, code, type, arg00, arg11);
13122
          if (operand_equal_p (arg00, arg11, 0))
13123
            return fold_build2_loc (loc, code, type, arg01, arg10);
13124
          if (operand_equal_p (arg00, arg10, 0))
13125
            return fold_build2_loc (loc, code, type, arg01, arg11);
13126
 
13127
          /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y.  */
13128
          if (TREE_CODE (arg01) == INTEGER_CST
13129
              && TREE_CODE (arg11) == INTEGER_CST)
13130
            return fold_build2_loc (loc, code, type,
13131
                                fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00,
13132
                                             fold_build2_loc (loc,
13133
                                                          BIT_XOR_EXPR, itype,
13134
                                                          arg01, arg11)),
13135
                                arg10);
13136
        }
13137
 
13138
      /* Attempt to simplify equality/inequality comparisons of complex
13139
         values.  Only lower the comparison if the result is known or
13140
         can be simplified to a single scalar comparison.  */
13141
      if ((TREE_CODE (arg0) == COMPLEX_EXPR
13142
           || TREE_CODE (arg0) == COMPLEX_CST)
13143
          && (TREE_CODE (arg1) == COMPLEX_EXPR
13144
              || TREE_CODE (arg1) == COMPLEX_CST))
13145
        {
13146
          tree real0, imag0, real1, imag1;
13147
          tree rcond, icond;
13148
 
13149
          if (TREE_CODE (arg0) == COMPLEX_EXPR)
13150
            {
13151
              real0 = TREE_OPERAND (arg0, 0);
13152
              imag0 = TREE_OPERAND (arg0, 1);
13153
            }
13154
          else
13155
            {
13156
              real0 = TREE_REALPART (arg0);
13157
              imag0 = TREE_IMAGPART (arg0);
13158
            }
13159
 
13160
          if (TREE_CODE (arg1) == COMPLEX_EXPR)
13161
            {
13162
              real1 = TREE_OPERAND (arg1, 0);
13163
              imag1 = TREE_OPERAND (arg1, 1);
13164
            }
13165
          else
13166
            {
13167
              real1 = TREE_REALPART (arg1);
13168
              imag1 = TREE_IMAGPART (arg1);
13169
            }
13170
 
13171
          rcond = fold_binary_loc (loc, code, type, real0, real1);
13172
          if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13173
            {
13174
              if (integer_zerop (rcond))
13175
                {
13176
                  if (code == EQ_EXPR)
13177
                    return omit_two_operands_loc (loc, type, boolean_false_node,
13178
                                              imag0, imag1);
13179
                  return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13180
                }
13181
              else
13182
                {
13183
                  if (code == NE_EXPR)
13184
                    return omit_two_operands_loc (loc, type, boolean_true_node,
13185
                                              imag0, imag1);
13186
                  return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13187
                }
13188
            }
13189
 
13190
          icond = fold_binary_loc (loc, code, type, imag0, imag1);
13191
          if (icond && TREE_CODE (icond) == INTEGER_CST)
13192
            {
13193
              if (integer_zerop (icond))
13194
                {
13195
                  if (code == EQ_EXPR)
13196
                    return omit_two_operands_loc (loc, type, boolean_false_node,
13197
                                              real0, real1);
13198
                  return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13199
                }
13200
              else
13201
                {
13202
                  if (code == NE_EXPR)
13203
                    return omit_two_operands_loc (loc, type, boolean_true_node,
13204
                                              real0, real1);
13205
                  return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13206
                }
13207
            }
13208
        }
13209
 
13210
      return NULL_TREE;
13211
 
13212
    case LT_EXPR:
13213
    case GT_EXPR:
13214
    case LE_EXPR:
13215
    case GE_EXPR:
13216
      tem = fold_comparison (loc, code, type, op0, op1);
13217
      if (tem != NULL_TREE)
13218
        return tem;
13219
 
13220
      /* Transform comparisons of the form X +- C CMP X.  */
13221
      if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13222
          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13223
          && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13224
               && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13225
              || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13226
                  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13227
        {
13228
          tree arg01 = TREE_OPERAND (arg0, 1);
13229
          enum tree_code code0 = TREE_CODE (arg0);
13230
          int is_positive;
13231
 
13232
          if (TREE_CODE (arg01) == REAL_CST)
13233
            is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13234
          else
13235
            is_positive = tree_int_cst_sgn (arg01);
13236
 
13237
          /* (X - c) > X becomes false.  */
13238
          if (code == GT_EXPR
13239
              && ((code0 == MINUS_EXPR && is_positive >= 0)
13240
                  || (code0 == PLUS_EXPR && is_positive <= 0)))
13241
            {
13242
              if (TREE_CODE (arg01) == INTEGER_CST
13243
                  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13244
                fold_overflow_warning (("assuming signed overflow does not "
13245
                                        "occur when assuming that (X - c) > X "
13246
                                        "is always false"),
13247
                                       WARN_STRICT_OVERFLOW_ALL);
13248
              return constant_boolean_node (0, type);
13249
            }
13250
 
13251
          /* Likewise (X + c) < X becomes false.  */
13252
          if (code == LT_EXPR
13253
              && ((code0 == PLUS_EXPR && is_positive >= 0)
13254
                  || (code0 == MINUS_EXPR && is_positive <= 0)))
13255
            {
13256
              if (TREE_CODE (arg01) == INTEGER_CST
13257
                  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13258
                fold_overflow_warning (("assuming signed overflow does not "
13259
                                        "occur when assuming that "
13260
                                        "(X + c) < X is always false"),
13261
                                       WARN_STRICT_OVERFLOW_ALL);
13262
              return constant_boolean_node (0, type);
13263
            }
13264
 
13265
          /* Convert (X - c) <= X to true.  */
13266
          if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13267
              && code == LE_EXPR
13268
              && ((code0 == MINUS_EXPR && is_positive >= 0)
13269
                  || (code0 == PLUS_EXPR && is_positive <= 0)))
13270
            {
13271
              if (TREE_CODE (arg01) == INTEGER_CST
13272
                  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13273
                fold_overflow_warning (("assuming signed overflow does not "
13274
                                        "occur when assuming that "
13275
                                        "(X - c) <= X is always true"),
13276
                                       WARN_STRICT_OVERFLOW_ALL);
13277
              return constant_boolean_node (1, type);
13278
            }
13279
 
13280
          /* Convert (X + c) >= X to true.  */
13281
          if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13282
              && code == GE_EXPR
13283
              && ((code0 == PLUS_EXPR && is_positive >= 0)
13284
                  || (code0 == MINUS_EXPR && is_positive <= 0)))
13285
            {
13286
              if (TREE_CODE (arg01) == INTEGER_CST
13287
                  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13288
                fold_overflow_warning (("assuming signed overflow does not "
13289
                                        "occur when assuming that "
13290
                                        "(X + c) >= X is always true"),
13291
                                       WARN_STRICT_OVERFLOW_ALL);
13292
              return constant_boolean_node (1, type);
13293
            }
13294
 
13295
          if (TREE_CODE (arg01) == INTEGER_CST)
13296
            {
13297
              /* Convert X + c > X and X - c < X to true for integers.  */
13298
              if (code == GT_EXPR
13299
                  && ((code0 == PLUS_EXPR && is_positive > 0)
13300
                      || (code0 == MINUS_EXPR && is_positive < 0)))
13301
                {
13302
                  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13303
                    fold_overflow_warning (("assuming signed overflow does "
13304
                                            "not occur when assuming that "
13305
                                            "(X + c) > X is always true"),
13306
                                           WARN_STRICT_OVERFLOW_ALL);
13307
                  return constant_boolean_node (1, type);
13308
                }
13309
 
13310
              if (code == LT_EXPR
13311
                  && ((code0 == MINUS_EXPR && is_positive > 0)
13312
                      || (code0 == PLUS_EXPR && is_positive < 0)))
13313
                {
13314
                  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13315
                    fold_overflow_warning (("assuming signed overflow does "
13316
                                            "not occur when assuming that "
13317
                                            "(X - c) < X is always true"),
13318
                                           WARN_STRICT_OVERFLOW_ALL);
13319
                  return constant_boolean_node (1, type);
13320
                }
13321
 
13322
              /* Convert X + c <= X and X - c >= X to false for integers.  */
13323
              if (code == LE_EXPR
13324
                  && ((code0 == PLUS_EXPR && is_positive > 0)
13325
                      || (code0 == MINUS_EXPR && is_positive < 0)))
13326
                {
13327
                  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13328
                    fold_overflow_warning (("assuming signed overflow does "
13329
                                            "not occur when assuming that "
13330
                                            "(X + c) <= X is always false"),
13331
                                           WARN_STRICT_OVERFLOW_ALL);
13332
                  return constant_boolean_node (0, type);
13333
                }
13334
 
13335
              if (code == GE_EXPR
13336
                  && ((code0 == MINUS_EXPR && is_positive > 0)
13337
                      || (code0 == PLUS_EXPR && is_positive < 0)))
13338
                {
13339
                  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13340
                    fold_overflow_warning (("assuming signed overflow does "
13341
                                            "not occur when assuming that "
13342
                                            "(X - c) >= X is always false"),
13343
                                           WARN_STRICT_OVERFLOW_ALL);
13344
                  return constant_boolean_node (0, type);
13345
                }
13346
            }
13347
        }
13348
 
13349
      /* Comparisons with the highest or lowest possible integer of
13350
         the specified precision will have known values.  */
13351
      {
13352
        tree arg1_type = TREE_TYPE (arg1);
13353
        unsigned int width = TYPE_PRECISION (arg1_type);
13354
 
13355
        if (TREE_CODE (arg1) == INTEGER_CST
13356
            && width <= 2 * HOST_BITS_PER_WIDE_INT
13357
            && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13358
          {
13359
            HOST_WIDE_INT signed_max_hi;
13360
            unsigned HOST_WIDE_INT signed_max_lo;
13361
            unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13362
 
13363
            if (width <= HOST_BITS_PER_WIDE_INT)
13364
              {
13365
                signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13366
                                - 1;
13367
                signed_max_hi = 0;
13368
                max_hi = 0;
13369
 
13370
                if (TYPE_UNSIGNED (arg1_type))
13371
                  {
13372
                    max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13373
                    min_lo = 0;
13374
                    min_hi = 0;
13375
                  }
13376
                else
13377
                  {
13378
                    max_lo = signed_max_lo;
13379
                    min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13380
                    min_hi = -1;
13381
                  }
13382
              }
13383
            else
13384
              {
13385
                width -= HOST_BITS_PER_WIDE_INT;
13386
                signed_max_lo = -1;
13387
                signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13388
                                - 1;
13389
                max_lo = -1;
13390
                min_lo = 0;
13391
 
13392
                if (TYPE_UNSIGNED (arg1_type))
13393
                  {
13394
                    max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13395
                    min_hi = 0;
13396
                  }
13397
                else
13398
                  {
13399
                    max_hi = signed_max_hi;
13400
                    min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13401
                  }
13402
              }
13403
 
13404
            if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13405
                && TREE_INT_CST_LOW (arg1) == max_lo)
13406
              switch (code)
13407
                {
13408
                case GT_EXPR:
13409
                  return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13410
 
13411
                case GE_EXPR:
13412
                  return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13413
 
13414
                case LE_EXPR:
13415
                  return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13416
 
13417
                case LT_EXPR:
13418
                  return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13419
 
13420
                /* The GE_EXPR and LT_EXPR cases above are not normally
13421
                   reached because of previous transformations.  */
13422
 
13423
                default:
13424
                  break;
13425
                }
13426
            else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13427
                     == max_hi
13428
                     && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13429
              switch (code)
13430
                {
13431
                case GT_EXPR:
13432
                  arg1 = const_binop (PLUS_EXPR, arg1,
13433
                                      build_int_cst (TREE_TYPE (arg1), 1), 0);
13434
                  return fold_build2_loc (loc, EQ_EXPR, type,
13435
                                      fold_convert_loc (loc,
13436
                                                        TREE_TYPE (arg1), arg0),
13437
                                      arg1);
13438
                case LE_EXPR:
13439
                  arg1 = const_binop (PLUS_EXPR, arg1,
13440
                                      build_int_cst (TREE_TYPE (arg1), 1), 0);
13441
                  return fold_build2_loc (loc, NE_EXPR, type,
13442
                                      fold_convert_loc (loc, TREE_TYPE (arg1),
13443
                                                        arg0),
13444
                                      arg1);
13445
                default:
13446
                  break;
13447
                }
13448
            else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13449
                     == min_hi
13450
                     && TREE_INT_CST_LOW (arg1) == min_lo)
13451
              switch (code)
13452
                {
13453
                case LT_EXPR:
13454
                  return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13455
 
13456
                case LE_EXPR:
13457
                  return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13458
 
13459
                case GE_EXPR:
13460
                  return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13461
 
13462
                case GT_EXPR:
13463
                  return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13464
 
13465
                default:
13466
                  break;
13467
                }
13468
            else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13469
                     == min_hi
13470
                     && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13471
              switch (code)
13472
                {
13473
                case GE_EXPR:
13474
                  arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
13475
                  return fold_build2_loc (loc, NE_EXPR, type,
13476
                                      fold_convert_loc (loc,
13477
                                                        TREE_TYPE (arg1), arg0),
13478
                                      arg1);
13479
                case LT_EXPR:
13480
                  arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
13481
                  return fold_build2_loc (loc, EQ_EXPR, type,
13482
                                      fold_convert_loc (loc, TREE_TYPE (arg1),
13483
                                                        arg0),
13484
                                      arg1);
13485
                default:
13486
                  break;
13487
                }
13488
 
13489
            else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13490
                     && TREE_INT_CST_LOW (arg1) == signed_max_lo
13491
                     && TYPE_UNSIGNED (arg1_type)
13492
                     /* We will flip the signedness of the comparison operator
13493
                        associated with the mode of arg1, so the sign bit is
13494
                        specified by this mode.  Check that arg1 is the signed
13495
                        max associated with this sign bit.  */
13496
                     && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13497
                     /* signed_type does not work on pointer types.  */
13498
                     && INTEGRAL_TYPE_P (arg1_type))
13499
              {
13500
                /* The following case also applies to X < signed_max+1
13501
                   and X >= signed_max+1 because previous transformations.  */
13502
                if (code == LE_EXPR || code == GT_EXPR)
13503
                  {
13504
                    tree st;
13505
                    st = signed_type_for (TREE_TYPE (arg1));
13506
                    return fold_build2_loc (loc,
13507
                                        code == LE_EXPR ? GE_EXPR : LT_EXPR,
13508
                                        type, fold_convert_loc (loc, st, arg0),
13509
                                        build_int_cst (st, 0));
13510
                  }
13511
              }
13512
          }
13513
      }
13514
 
13515
      /* If we are comparing an ABS_EXPR with a constant, we can
13516
         convert all the cases into explicit comparisons, but they may
13517
         well not be faster than doing the ABS and one comparison.
13518
         But ABS (X) <= C is a range comparison, which becomes a subtraction
13519
         and a comparison, and is probably faster.  */
13520
      if (code == LE_EXPR
13521
          && TREE_CODE (arg1) == INTEGER_CST
13522
          && TREE_CODE (arg0) == ABS_EXPR
13523
          && ! TREE_SIDE_EFFECTS (arg0)
13524
          && (0 != (tem = negate_expr (arg1)))
13525
          && TREE_CODE (tem) == INTEGER_CST
13526
          && !TREE_OVERFLOW (tem))
13527
        return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13528
                            build2 (GE_EXPR, type,
13529
                                    TREE_OPERAND (arg0, 0), tem),
13530
                            build2 (LE_EXPR, type,
13531
                                    TREE_OPERAND (arg0, 0), arg1));
13532
 
13533
      /* Convert ABS_EXPR<x> >= 0 to true.  */
13534
      strict_overflow_p = false;
13535
      if (code == GE_EXPR
13536
          && (integer_zerop (arg1)
13537
              || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13538
                  && real_zerop (arg1)))
13539
          && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13540
        {
13541
          if (strict_overflow_p)
13542
            fold_overflow_warning (("assuming signed overflow does not occur "
13543
                                    "when simplifying comparison of "
13544
                                    "absolute value and zero"),
13545
                                   WARN_STRICT_OVERFLOW_CONDITIONAL);
13546
          return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13547
        }
13548
 
13549
      /* Convert ABS_EXPR<x> < 0 to false.  */
13550
      strict_overflow_p = false;
13551
      if (code == LT_EXPR
13552
          && (integer_zerop (arg1) || real_zerop (arg1))
13553
          && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13554
        {
13555
          if (strict_overflow_p)
13556
            fold_overflow_warning (("assuming signed overflow does not occur "
13557
                                    "when simplifying comparison of "
13558
                                    "absolute value and zero"),
13559
                                   WARN_STRICT_OVERFLOW_CONDITIONAL);
13560
          return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13561
        }
13562
 
13563
      /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13564
         and similarly for >= into !=.  */
13565
      if ((code == LT_EXPR || code == GE_EXPR)
13566
          && TYPE_UNSIGNED (TREE_TYPE (arg0))
13567
          && TREE_CODE (arg1) == LSHIFT_EXPR
13568
          && integer_onep (TREE_OPERAND (arg1, 0)))
13569
        {
13570
          tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13571
                        build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13572
                                TREE_OPERAND (arg1, 1)),
13573
                        build_int_cst (TREE_TYPE (arg0), 0));
13574
          goto fold_binary_exit;
13575
        }
13576
 
13577
      if ((code == LT_EXPR || code == GE_EXPR)
13578
          && TYPE_UNSIGNED (TREE_TYPE (arg0))
13579
          && CONVERT_EXPR_P (arg1)
13580
          && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13581
          && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13582
        {
13583
          tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13584
                        fold_convert_loc (loc, TREE_TYPE (arg0),
13585
                                          build2 (RSHIFT_EXPR,
13586
                                                  TREE_TYPE (arg0), arg0,
13587
                                                  TREE_OPERAND (TREE_OPERAND (arg1, 0),
13588
                                                                1))),
13589
                        build_int_cst (TREE_TYPE (arg0), 0));
13590
          goto fold_binary_exit;
13591
        }
13592
 
13593
      return NULL_TREE;
13594
 
13595
    case UNORDERED_EXPR:
13596
    case ORDERED_EXPR:
13597
    case UNLT_EXPR:
13598
    case UNLE_EXPR:
13599
    case UNGT_EXPR:
13600
    case UNGE_EXPR:
13601
    case UNEQ_EXPR:
13602
    case LTGT_EXPR:
13603
      if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13604
        {
13605
          t1 = fold_relational_const (code, type, arg0, arg1);
13606
          if (t1 != NULL_TREE)
13607
            return t1;
13608
        }
13609
 
13610
      /* If the first operand is NaN, the result is constant.  */
13611
      if (TREE_CODE (arg0) == REAL_CST
13612
          && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13613
          && (code != LTGT_EXPR || ! flag_trapping_math))
13614
        {
13615
          t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13616
               ? integer_zero_node
13617
               : integer_one_node;
13618
          return omit_one_operand_loc (loc, type, t1, arg1);
13619
        }
13620
 
13621
      /* If the second operand is NaN, the result is constant.  */
13622
      if (TREE_CODE (arg1) == REAL_CST
13623
          && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13624
          && (code != LTGT_EXPR || ! flag_trapping_math))
13625
        {
13626
          t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13627
               ? integer_zero_node
13628
               : integer_one_node;
13629
          return omit_one_operand_loc (loc, type, t1, arg0);
13630
        }
13631
 
13632
      /* Simplify unordered comparison of something with itself.  */
13633
      if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13634
          && operand_equal_p (arg0, arg1, 0))
13635
        return constant_boolean_node (1, type);
13636
 
13637
      if (code == LTGT_EXPR
13638
          && !flag_trapping_math
13639
          && operand_equal_p (arg0, arg1, 0))
13640
        return constant_boolean_node (0, type);
13641
 
13642
      /* Fold (double)float1 CMP (double)float2 into float1 CMP float2.  */
13643
      {
13644
        tree targ0 = strip_float_extensions (arg0);
13645
        tree targ1 = strip_float_extensions (arg1);
13646
        tree newtype = TREE_TYPE (targ0);
13647
 
13648
        if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13649
          newtype = TREE_TYPE (targ1);
13650
 
13651
        if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13652
          return fold_build2_loc (loc, code, type,
13653
                              fold_convert_loc (loc, newtype, targ0),
13654
                              fold_convert_loc (loc, newtype, targ1));
13655
      }
13656
 
13657
      return NULL_TREE;
13658
 
13659
    case COMPOUND_EXPR:
13660
      /* When pedantic, a compound expression can be neither an lvalue
13661
         nor an integer constant expression.  */
13662
      if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13663
        return NULL_TREE;
13664
      /* Don't let (0, 0) be null pointer constant.  */
13665
      tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13666
                                 : fold_convert_loc (loc, type, arg1);
13667
      return pedantic_non_lvalue_loc (loc, tem);
13668
 
13669
    case COMPLEX_EXPR:
13670
      if ((TREE_CODE (arg0) == REAL_CST
13671
           && TREE_CODE (arg1) == REAL_CST)
13672
          || (TREE_CODE (arg0) == INTEGER_CST
13673
              && TREE_CODE (arg1) == INTEGER_CST))
13674
        return build_complex (type, arg0, arg1);
13675
      return NULL_TREE;
13676
 
13677
    case ASSERT_EXPR:
13678
      /* An ASSERT_EXPR should never be passed to fold_binary.  */
13679
      gcc_unreachable ();
13680
 
13681
    default:
13682
      return NULL_TREE;
13683
    } /* switch (code) */
13684
 fold_binary_exit:
13685
  protected_set_expr_location (tem, loc);
13686
  return tem;
13687
}
13688
 
13689
/* Callback for walk_tree, looking for LABEL_EXPR.  Return *TP if it is
13690
   a LABEL_EXPR; otherwise return NULL_TREE.  Do not check the subtrees
13691
   of GOTO_EXPR.  */
13692
 
13693
static tree
13694
contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13695
{
13696
  switch (TREE_CODE (*tp))
13697
    {
13698
    case LABEL_EXPR:
13699
      return *tp;
13700
 
13701
    case GOTO_EXPR:
13702
      *walk_subtrees = 0;
13703
 
13704
      /* ... fall through ...  */
13705
 
13706
    default:
13707
      return NULL_TREE;
13708
    }
13709
}
13710
 
13711
/* Return whether the sub-tree ST contains a label which is accessible from
13712
   outside the sub-tree.  */
13713
 
13714
static bool
13715
contains_label_p (tree st)
13716
{
13717
  return
13718
   (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13719
}
13720
 
13721
/* Fold a ternary expression of code CODE and type TYPE with operands
13722
   OP0, OP1, and OP2.  Return the folded expression if folding is
13723
   successful.  Otherwise, return NULL_TREE.  */
13724
 
13725
tree
13726
fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13727
              tree op0, tree op1, tree op2)
13728
{
13729
  tree tem;
13730
  tree arg0 = NULL_TREE, arg1 = NULL_TREE;
13731
  enum tree_code_class kind = TREE_CODE_CLASS (code);
13732
 
13733
  gcc_assert (IS_EXPR_CODE_CLASS (kind)
13734
              && TREE_CODE_LENGTH (code) == 3);
13735
 
13736
  /* Strip any conversions that don't change the mode.  This is safe
13737
     for every expression, except for a comparison expression because
13738
     its signedness is derived from its operands.  So, in the latter
13739
     case, only strip conversions that don't change the signedness.
13740
 
13741
     Note that this is done as an internal manipulation within the
13742
     constant folder, in order to find the simplest representation of
13743
     the arguments so that their form can be studied.  In any cases,
13744
     the appropriate type conversions should be put back in the tree
13745
     that will get out of the constant folder.  */
13746
  if (op0)
13747
    {
13748
      arg0 = op0;
13749
      STRIP_NOPS (arg0);
13750
    }
13751
 
13752
  if (op1)
13753
    {
13754
      arg1 = op1;
13755
      STRIP_NOPS (arg1);
13756
    }
13757
 
13758
  switch (code)
13759
    {
13760
    case COMPONENT_REF:
13761
      if (TREE_CODE (arg0) == CONSTRUCTOR
13762
          && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13763
        {
13764
          unsigned HOST_WIDE_INT idx;
13765
          tree field, value;
13766
          FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13767
            if (field == arg1)
13768
              return value;
13769
        }
13770
      return NULL_TREE;
13771
 
13772
    case COND_EXPR:
13773
      /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13774
         so all simple results must be passed through pedantic_non_lvalue.  */
13775
      if (TREE_CODE (arg0) == INTEGER_CST)
13776
        {
13777
          tree unused_op = integer_zerop (arg0) ? op1 : op2;
13778
          tem = integer_zerop (arg0) ? op2 : op1;
13779
          /* Only optimize constant conditions when the selected branch
13780
             has the same type as the COND_EXPR.  This avoids optimizing
13781
             away "c ? x : throw", where the throw has a void type.
13782
             Avoid throwing away that operand which contains label.  */
13783
          if ((!TREE_SIDE_EFFECTS (unused_op)
13784
               || !contains_label_p (unused_op))
13785
              && (! VOID_TYPE_P (TREE_TYPE (tem))
13786
                  || VOID_TYPE_P (type)))
13787
            return pedantic_non_lvalue_loc (loc, tem);
13788
          return NULL_TREE;
13789
        }
13790
      if (operand_equal_p (arg1, op2, 0))
13791
        return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13792
 
13793
      /* If we have A op B ? A : C, we may be able to convert this to a
13794
         simpler expression, depending on the operation and the values
13795
         of B and C.  Signed zeros prevent all of these transformations,
13796
         for reasons given above each one.
13797
 
13798
         Also try swapping the arguments and inverting the conditional.  */
13799
      if (COMPARISON_CLASS_P (arg0)
13800
          && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13801
                                             arg1, TREE_OPERAND (arg0, 1))
13802
          && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13803
        {
13804
          tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13805
          if (tem)
13806
            return tem;
13807
        }
13808
 
13809
      if (COMPARISON_CLASS_P (arg0)
13810
          && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13811
                                             op2,
13812
                                             TREE_OPERAND (arg0, 1))
13813
          && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13814
        {
13815
          tem = fold_truth_not_expr (loc, arg0);
13816
          if (tem && COMPARISON_CLASS_P (tem))
13817
            {
13818
              tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13819
              if (tem)
13820
                return tem;
13821
            }
13822
        }
13823
 
13824
      /* If the second operand is simpler than the third, swap them
13825
         since that produces better jump optimization results.  */
13826
      if (truth_value_p (TREE_CODE (arg0))
13827
          && tree_swap_operands_p (op1, op2, false))
13828
        {
13829
          /* See if this can be inverted.  If it can't, possibly because
13830
             it was a floating-point inequality comparison, don't do
13831
             anything.  */
13832
          tem = fold_truth_not_expr (loc, arg0);
13833
          if (tem)
13834
            return fold_build3_loc (loc, code, type, tem, op2, op1);
13835
        }
13836
 
13837
      /* Convert A ? 1 : 0 to simply A.  */
13838
      if (integer_onep (op1)
13839
          && integer_zerop (op2)
13840
          /* If we try to convert OP0 to our type, the
13841
             call to fold will try to move the conversion inside
13842
             a COND, which will recurse.  In that case, the COND_EXPR
13843
             is probably the best choice, so leave it alone.  */
13844
          && type == TREE_TYPE (arg0))
13845
        return pedantic_non_lvalue_loc (loc, arg0);
13846
 
13847
      /* Convert A ? 0 : 1 to !A.  This prefers the use of NOT_EXPR
13848
         over COND_EXPR in cases such as floating point comparisons.  */
13849
      if (integer_zerop (op1)
13850
          && integer_onep (op2)
13851
          && truth_value_p (TREE_CODE (arg0)))
13852
        return pedantic_non_lvalue_loc (loc,
13853
                                    fold_convert_loc (loc, type,
13854
                                              invert_truthvalue_loc (loc,
13855
                                                                     arg0)));
13856
 
13857
      /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>).  */
13858
      if (TREE_CODE (arg0) == LT_EXPR
13859
          && integer_zerop (TREE_OPERAND (arg0, 1))
13860
          && integer_zerop (op2)
13861
          && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13862
        {
13863
          /* sign_bit_p only checks ARG1 bits within A's precision.
13864
             If <sign bit of A> has wider type than A, bits outside
13865
             of A's precision in <sign bit of A> need to be checked.
13866
             If they are all 0, this optimization needs to be done
13867
             in unsigned A's type, if they are all 1 in signed A's type,
13868
             otherwise this can't be done.  */
13869
          if (TYPE_PRECISION (TREE_TYPE (tem))
13870
              < TYPE_PRECISION (TREE_TYPE (arg1))
13871
              && TYPE_PRECISION (TREE_TYPE (tem))
13872
                 < TYPE_PRECISION (type))
13873
            {
13874
              unsigned HOST_WIDE_INT mask_lo;
13875
              HOST_WIDE_INT mask_hi;
13876
              int inner_width, outer_width;
13877
              tree tem_type;
13878
 
13879
              inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13880
              outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13881
              if (outer_width > TYPE_PRECISION (type))
13882
                outer_width = TYPE_PRECISION (type);
13883
 
13884
              if (outer_width > HOST_BITS_PER_WIDE_INT)
13885
                {
13886
                  mask_hi = ((unsigned HOST_WIDE_INT) -1
13887
                             >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13888
                  mask_lo = -1;
13889
                }
13890
              else
13891
                {
13892
                  mask_hi = 0;
13893
                  mask_lo = ((unsigned HOST_WIDE_INT) -1
13894
                             >> (HOST_BITS_PER_WIDE_INT - outer_width));
13895
                }
13896
              if (inner_width > HOST_BITS_PER_WIDE_INT)
13897
                {
13898
                  mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13899
                               >> (HOST_BITS_PER_WIDE_INT - inner_width));
13900
                  mask_lo = 0;
13901
                }
13902
              else
13903
                mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13904
                             >> (HOST_BITS_PER_WIDE_INT - inner_width));
13905
 
13906
              if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13907
                  && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13908
                {
13909
                  tem_type = signed_type_for (TREE_TYPE (tem));
13910
                  tem = fold_convert_loc (loc, tem_type, tem);
13911
                }
13912
              else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13913
                       && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13914
                {
13915
                  tem_type = unsigned_type_for (TREE_TYPE (tem));
13916
                  tem = fold_convert_loc (loc, tem_type, tem);
13917
                }
13918
              else
13919
                tem = NULL;
13920
            }
13921
 
13922
          if (tem)
13923
            return
13924
              fold_convert_loc (loc, type,
13925
                                fold_build2_loc (loc, BIT_AND_EXPR,
13926
                                             TREE_TYPE (tem), tem,
13927
                                             fold_convert_loc (loc,
13928
                                                               TREE_TYPE (tem),
13929
                                                               arg1)));
13930
        }
13931
 
13932
      /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N).  A & 1 was
13933
         already handled above.  */
13934
      if (TREE_CODE (arg0) == BIT_AND_EXPR
13935
          && integer_onep (TREE_OPERAND (arg0, 1))
13936
          && integer_zerop (op2)
13937
          && integer_pow2p (arg1))
13938
        {
13939
          tree tem = TREE_OPERAND (arg0, 0);
13940
          STRIP_NOPS (tem);
13941
          if (TREE_CODE (tem) == RSHIFT_EXPR
13942
              && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13943
              && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13944
                 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13945
            return fold_build2_loc (loc, BIT_AND_EXPR, type,
13946
                                TREE_OPERAND (tem, 0), arg1);
13947
        }
13948
 
13949
      /* A & N ? N : 0 is simply A & N if N is a power of two.  This
13950
         is probably obsolete because the first operand should be a
13951
         truth value (that's why we have the two cases above), but let's
13952
         leave it in until we can confirm this for all front-ends.  */
13953
      if (integer_zerop (op2)
13954
          && TREE_CODE (arg0) == NE_EXPR
13955
          && integer_zerop (TREE_OPERAND (arg0, 1))
13956
          && integer_pow2p (arg1)
13957
          && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13958
          && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13959
                              arg1, OEP_ONLY_CONST))
13960
        return pedantic_non_lvalue_loc (loc,
13961
                                    fold_convert_loc (loc, type,
13962
                                                      TREE_OPERAND (arg0, 0)));
13963
 
13964
      /* Convert A ? B : 0 into A && B if A and B are truth values.  */
13965
      if (integer_zerop (op2)
13966
          && truth_value_p (TREE_CODE (arg0))
13967
          && truth_value_p (TREE_CODE (arg1)))
13968
        return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13969
                            fold_convert_loc (loc, type, arg0),
13970
                            arg1);
13971
 
13972
      /* Convert A ? B : 1 into !A || B if A and B are truth values.  */
13973
      if (integer_onep (op2)
13974
          && truth_value_p (TREE_CODE (arg0))
13975
          && truth_value_p (TREE_CODE (arg1)))
13976
        {
13977
          /* Only perform transformation if ARG0 is easily inverted.  */
13978
          tem = fold_truth_not_expr (loc, arg0);
13979
          if (tem)
13980
            return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13981
                                fold_convert_loc (loc, type, tem),
13982
                                arg1);
13983
        }
13984
 
13985
      /* Convert A ? 0 : B into !A && B if A and B are truth values.  */
13986
      if (integer_zerop (arg1)
13987
          && truth_value_p (TREE_CODE (arg0))
13988
          && truth_value_p (TREE_CODE (op2)))
13989
        {
13990
          /* Only perform transformation if ARG0 is easily inverted.  */
13991
          tem = fold_truth_not_expr (loc, arg0);
13992
          if (tem)
13993
            return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13994
                                fold_convert_loc (loc, type, tem),
13995
                                op2);
13996
        }
13997
 
13998
      /* Convert A ? 1 : B into A || B if A and B are truth values.  */
13999
      if (integer_onep (arg1)
14000
          && truth_value_p (TREE_CODE (arg0))
14001
          && truth_value_p (TREE_CODE (op2)))
14002
        return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14003
                            fold_convert_loc (loc, type, arg0),
14004
                            op2);
14005
 
14006
      return NULL_TREE;
14007
 
14008
    case CALL_EXPR:
14009
      /* CALL_EXPRs used to be ternary exprs.  Catch any mistaken uses
14010
         of fold_ternary on them.  */
14011
      gcc_unreachable ();
14012
 
14013
    case BIT_FIELD_REF:
14014
      if ((TREE_CODE (arg0) == VECTOR_CST
14015
           || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
14016
          && type == TREE_TYPE (TREE_TYPE (arg0)))
14017
        {
14018
          unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
14019
          unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14020
 
14021
          if (width != 0
14022
              && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
14023
              && (idx % width) == 0
14024
              && (idx = idx / width)
14025
                 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14026
            {
14027
              tree elements = NULL_TREE;
14028
 
14029
              if (TREE_CODE (arg0) == VECTOR_CST)
14030
                elements = TREE_VECTOR_CST_ELTS (arg0);
14031
              else
14032
                {
14033
                  unsigned HOST_WIDE_INT idx;
14034
                  tree value;
14035
 
14036
                  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
14037
                    elements = tree_cons (NULL_TREE, value, elements);
14038
                }
14039
              while (idx-- > 0 && elements)
14040
                elements = TREE_CHAIN (elements);
14041
              if (elements)
14042
                return TREE_VALUE (elements);
14043
              else
14044
                return fold_convert_loc (loc, type, integer_zero_node);
14045
            }
14046
        }
14047
 
14048
      /* A bit-field-ref that referenced the full argument can be stripped.  */
14049
      if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14050
          && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14051
          && integer_zerop (op2))
14052
        return fold_convert_loc (loc, type, arg0);
14053
 
14054
      return NULL_TREE;
14055
 
14056
    default:
14057
      return NULL_TREE;
14058
    } /* switch (code) */
14059
}
14060
 
14061
/* Perform constant folding and related simplification of EXPR.
14062
   The related simplifications include x*1 => x, x*0 => 0, etc.,
14063
   and application of the associative law.
14064
   NOP_EXPR conversions may be removed freely (as long as we
14065
   are careful not to change the type of the overall expression).
14066
   We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14067
   but we can constant-fold them if they have constant operands.  */
14068
 
14069
#ifdef ENABLE_FOLD_CHECKING
14070
# define fold(x) fold_1 (x)
14071
static tree fold_1 (tree);
14072
static
14073
#endif
14074
tree
14075
fold (tree expr)
14076
{
14077
  const tree t = expr;
14078
  enum tree_code code = TREE_CODE (t);
14079
  enum tree_code_class kind = TREE_CODE_CLASS (code);
14080
  tree tem;
14081
  location_t loc = EXPR_LOCATION (expr);
14082
 
14083
  /* Return right away if a constant.  */
14084
  if (kind == tcc_constant)
14085
    return t;
14086
 
14087
  /* CALL_EXPR-like objects with variable numbers of operands are
14088
     treated specially.  */
14089
  if (kind == tcc_vl_exp)
14090
    {
14091
      if (code == CALL_EXPR)
14092
        {
14093
          tem = fold_call_expr (loc, expr, false);
14094
          return tem ? tem : expr;
14095
        }
14096
      return expr;
14097
    }
14098
 
14099
  if (IS_EXPR_CODE_CLASS (kind))
14100
    {
14101
      tree type = TREE_TYPE (t);
14102
      tree op0, op1, op2;
14103
 
14104
      switch (TREE_CODE_LENGTH (code))
14105
        {
14106
        case 1:
14107
          op0 = TREE_OPERAND (t, 0);
14108
          tem = fold_unary_loc (loc, code, type, op0);
14109
          return tem ? tem : expr;
14110
        case 2:
14111
          op0 = TREE_OPERAND (t, 0);
14112
          op1 = TREE_OPERAND (t, 1);
14113
          tem = fold_binary_loc (loc, code, type, op0, op1);
14114
          return tem ? tem : expr;
14115
        case 3:
14116
          op0 = TREE_OPERAND (t, 0);
14117
          op1 = TREE_OPERAND (t, 1);
14118
          op2 = TREE_OPERAND (t, 2);
14119
          tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14120
          return tem ? tem : expr;
14121
        default:
14122
          break;
14123
        }
14124
    }
14125
 
14126
  switch (code)
14127
    {
14128
    case ARRAY_REF:
14129
      {
14130
        tree op0 = TREE_OPERAND (t, 0);
14131
        tree op1 = TREE_OPERAND (t, 1);
14132
 
14133
        if (TREE_CODE (op1) == INTEGER_CST
14134
            && TREE_CODE (op0) == CONSTRUCTOR
14135
            && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14136
          {
14137
            VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
14138
            unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
14139
            unsigned HOST_WIDE_INT begin = 0;
14140
 
14141
            /* Find a matching index by means of a binary search.  */
14142
            while (begin != end)
14143
              {
14144
                unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14145
                tree index = VEC_index (constructor_elt, elts, middle)->index;
14146
 
14147
                if (TREE_CODE (index) == INTEGER_CST
14148
                    && tree_int_cst_lt (index, op1))
14149
                  begin = middle + 1;
14150
                else if (TREE_CODE (index) == INTEGER_CST
14151
                         && tree_int_cst_lt (op1, index))
14152
                  end = middle;
14153
                else if (TREE_CODE (index) == RANGE_EXPR
14154
                         && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14155
                  begin = middle + 1;
14156
                else if (TREE_CODE (index) == RANGE_EXPR
14157
                         && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14158
                  end = middle;
14159
                else
14160
                  return VEC_index (constructor_elt, elts, middle)->value;
14161
              }
14162
          }
14163
 
14164
        return t;
14165
      }
14166
 
14167
    case CONST_DECL:
14168
      return fold (DECL_INITIAL (t));
14169
 
14170
    default:
14171
      return t;
14172
    } /* switch (code) */
14173
}
14174
 
14175
#ifdef ENABLE_FOLD_CHECKING
14176
#undef fold
14177
 
14178
static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
14179
static void fold_check_failed (const_tree, const_tree);
14180
void print_fold_checksum (const_tree);
14181
 
14182
/* When --enable-checking=fold, compute a digest of expr before
14183
   and after actual fold call to see if fold did not accidentally
14184
   change original expr.  */
14185
 
14186
tree
14187
fold (tree expr)
14188
{
14189
  tree ret;
14190
  struct md5_ctx ctx;
14191
  unsigned char checksum_before[16], checksum_after[16];
14192
  htab_t ht;
14193
 
14194
  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14195
  md5_init_ctx (&ctx);
14196
  fold_checksum_tree (expr, &ctx, ht);
14197
  md5_finish_ctx (&ctx, checksum_before);
14198
  htab_empty (ht);
14199
 
14200
  ret = fold_1 (expr);
14201
 
14202
  md5_init_ctx (&ctx);
14203
  fold_checksum_tree (expr, &ctx, ht);
14204
  md5_finish_ctx (&ctx, checksum_after);
14205
  htab_delete (ht);
14206
 
14207
  if (memcmp (checksum_before, checksum_after, 16))
14208
    fold_check_failed (expr, ret);
14209
 
14210
  return ret;
14211
}
14212
 
14213
void
14214
print_fold_checksum (const_tree expr)
14215
{
14216
  struct md5_ctx ctx;
14217
  unsigned char checksum[16], cnt;
14218
  htab_t ht;
14219
 
14220
  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14221
  md5_init_ctx (&ctx);
14222
  fold_checksum_tree (expr, &ctx, ht);
14223
  md5_finish_ctx (&ctx, checksum);
14224
  htab_delete (ht);
14225
  for (cnt = 0; cnt < 16; ++cnt)
14226
    fprintf (stderr, "%02x", checksum[cnt]);
14227
  putc ('\n', stderr);
14228
}
14229
 
14230
static void
14231
fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14232
{
14233
  internal_error ("fold check: original tree changed by fold");
14234
}
14235
 
14236
static void
14237
fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
14238
{
14239
  const void **slot;
14240
  enum tree_code code;
14241
  union tree_node buf;
14242
  int i, len;
14243
 
14244
recursive_label:
14245
 
14246
  gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
14247
               <= sizeof (struct tree_function_decl))
14248
              && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
14249
  if (expr == NULL)
14250
    return;
14251
  slot = (const void **) htab_find_slot (ht, expr, INSERT);
14252
  if (*slot != NULL)
14253
    return;
14254
  *slot = expr;
14255
  code = TREE_CODE (expr);
14256
  if (TREE_CODE_CLASS (code) == tcc_declaration
14257
      && DECL_ASSEMBLER_NAME_SET_P (expr))
14258
    {
14259
      /* Allow DECL_ASSEMBLER_NAME to be modified.  */
14260
      memcpy ((char *) &buf, expr, tree_size (expr));
14261
      SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14262
      expr = (tree) &buf;
14263
    }
14264
  else if (TREE_CODE_CLASS (code) == tcc_type
14265
           && (TYPE_POINTER_TO (expr)
14266
               || TYPE_REFERENCE_TO (expr)
14267
               || TYPE_CACHED_VALUES_P (expr)
14268
               || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14269
               || TYPE_NEXT_VARIANT (expr)))
14270
    {
14271
      /* Allow these fields to be modified.  */
14272
      tree tmp;
14273
      memcpy ((char *) &buf, expr, tree_size (expr));
14274
      expr = tmp = (tree) &buf;
14275
      TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14276
      TYPE_POINTER_TO (tmp) = NULL;
14277
      TYPE_REFERENCE_TO (tmp) = NULL;
14278
      TYPE_NEXT_VARIANT (tmp) = NULL;
14279
      if (TYPE_CACHED_VALUES_P (tmp))
14280
        {
14281
          TYPE_CACHED_VALUES_P (tmp) = 0;
14282
          TYPE_CACHED_VALUES (tmp) = NULL;
14283
        }
14284
    }
14285
  md5_process_bytes (expr, tree_size (expr), ctx);
14286
  fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14287
  if (TREE_CODE_CLASS (code) != tcc_type
14288
      && TREE_CODE_CLASS (code) != tcc_declaration
14289
      && code != TREE_LIST
14290
      && code != SSA_NAME)
14291
    fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14292
  switch (TREE_CODE_CLASS (code))
14293
    {
14294
    case tcc_constant:
14295
      switch (code)
14296
        {
14297
        case STRING_CST:
14298
          md5_process_bytes (TREE_STRING_POINTER (expr),
14299
                             TREE_STRING_LENGTH (expr), ctx);
14300
          break;
14301
        case COMPLEX_CST:
14302
          fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14303
          fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14304
          break;
14305
        case VECTOR_CST:
14306
          fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
14307
          break;
14308
        default:
14309
          break;
14310
        }
14311
      break;
14312
    case tcc_exceptional:
14313
      switch (code)
14314
        {
14315
        case TREE_LIST:
14316
          fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14317
          fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14318
          expr = TREE_CHAIN (expr);
14319
          goto recursive_label;
14320
          break;
14321
        case TREE_VEC:
14322
          for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14323
            fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14324
          break;
14325
        default:
14326
          break;
14327
        }
14328
      break;
14329
    case tcc_expression:
14330
    case tcc_reference:
14331
    case tcc_comparison:
14332
    case tcc_unary:
14333
    case tcc_binary:
14334
    case tcc_statement:
14335
    case tcc_vl_exp:
14336
      len = TREE_OPERAND_LENGTH (expr);
14337
      for (i = 0; i < len; ++i)
14338
        fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14339
      break;
14340
    case tcc_declaration:
14341
      fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14342
      fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14343
      if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14344
        {
14345
          fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14346
          fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14347
          fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14348
          fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14349
          fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14350
        }
14351
      if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14352
        fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14353
 
14354
      if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14355
        {
14356
          fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14357
          fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14358
          fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14359
        }
14360
      break;
14361
    case tcc_type:
14362
      if (TREE_CODE (expr) == ENUMERAL_TYPE)
14363
        fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14364
      fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14365
      fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14366
      fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14367
      fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14368
      if (INTEGRAL_TYPE_P (expr)
14369
          || SCALAR_FLOAT_TYPE_P (expr))
14370
        {
14371
          fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14372
          fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14373
        }
14374
      fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14375
      if (TREE_CODE (expr) == RECORD_TYPE
14376
          || TREE_CODE (expr) == UNION_TYPE
14377
          || TREE_CODE (expr) == QUAL_UNION_TYPE)
14378
        fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14379
      fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14380
      break;
14381
    default:
14382
      break;
14383
    }
14384
}
14385
 
14386
/* Helper function for outputting the checksum of a tree T.  When
14387
   debugging with gdb, you can "define mynext" to be "next" followed
14388
   by "call debug_fold_checksum (op0)", then just trace down till the
14389
   outputs differ.  */
14390
 
14391
void
14392
debug_fold_checksum (const_tree t)
14393
{
14394
  int i;
14395
  unsigned char checksum[16];
14396
  struct md5_ctx ctx;
14397
  htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14398
 
14399
  md5_init_ctx (&ctx);
14400
  fold_checksum_tree (t, &ctx, ht);
14401
  md5_finish_ctx (&ctx, checksum);
14402
  htab_empty (ht);
14403
 
14404
  for (i = 0; i < 16; i++)
14405
    fprintf (stderr, "%d ", checksum[i]);
14406
 
14407
  fprintf (stderr, "\n");
14408
}
14409
 
14410
#endif
14411
 
14412
/* Fold a unary tree expression with code CODE of type TYPE with an
14413
   operand OP0.  LOC is the location of the resulting expression.
14414
   Return a folded expression if successful.  Otherwise, return a tree
14415
   expression with code CODE of type TYPE with an operand OP0.  */
14416
 
14417
tree
14418
fold_build1_stat_loc (location_t loc,
14419
                      enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14420
{
14421
  tree tem;
14422
#ifdef ENABLE_FOLD_CHECKING
14423
  unsigned char checksum_before[16], checksum_after[16];
14424
  struct md5_ctx ctx;
14425
  htab_t ht;
14426
 
14427
  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14428
  md5_init_ctx (&ctx);
14429
  fold_checksum_tree (op0, &ctx, ht);
14430
  md5_finish_ctx (&ctx, checksum_before);
14431
  htab_empty (ht);
14432
#endif
14433
 
14434
  tem = fold_unary_loc (loc, code, type, op0);
14435
  if (!tem)
14436
    {
14437
      tem = build1_stat (code, type, op0 PASS_MEM_STAT);
14438
      SET_EXPR_LOCATION (tem, loc);
14439
    }
14440
 
14441
#ifdef ENABLE_FOLD_CHECKING
14442
  md5_init_ctx (&ctx);
14443
  fold_checksum_tree (op0, &ctx, ht);
14444
  md5_finish_ctx (&ctx, checksum_after);
14445
  htab_delete (ht);
14446
 
14447
  if (memcmp (checksum_before, checksum_after, 16))
14448
    fold_check_failed (op0, tem);
14449
#endif
14450
  return tem;
14451
}
14452
 
14453
/* Fold a binary tree expression with code CODE of type TYPE with
14454
   operands OP0 and OP1.  LOC is the location of the resulting
14455
   expression.  Return a folded expression if successful.  Otherwise,
14456
   return a tree expression with code CODE of type TYPE with operands
14457
   OP0 and OP1.  */
14458
 
14459
tree
14460
fold_build2_stat_loc (location_t loc,
14461
                      enum tree_code code, tree type, tree op0, tree op1
14462
                      MEM_STAT_DECL)
14463
{
14464
  tree tem;
14465
#ifdef ENABLE_FOLD_CHECKING
14466
  unsigned char checksum_before_op0[16],
14467
                checksum_before_op1[16],
14468
                checksum_after_op0[16],
14469
                checksum_after_op1[16];
14470
  struct md5_ctx ctx;
14471
  htab_t ht;
14472
 
14473
  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14474
  md5_init_ctx (&ctx);
14475
  fold_checksum_tree (op0, &ctx, ht);
14476
  md5_finish_ctx (&ctx, checksum_before_op0);
14477
  htab_empty (ht);
14478
 
14479
  md5_init_ctx (&ctx);
14480
  fold_checksum_tree (op1, &ctx, ht);
14481
  md5_finish_ctx (&ctx, checksum_before_op1);
14482
  htab_empty (ht);
14483
#endif
14484
 
14485
  tem = fold_binary_loc (loc, code, type, op0, op1);
14486
  if (!tem)
14487
    {
14488
      tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
14489
      SET_EXPR_LOCATION (tem, loc);
14490
    }
14491
 
14492
#ifdef ENABLE_FOLD_CHECKING
14493
  md5_init_ctx (&ctx);
14494
  fold_checksum_tree (op0, &ctx, ht);
14495
  md5_finish_ctx (&ctx, checksum_after_op0);
14496
  htab_empty (ht);
14497
 
14498
  if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14499
    fold_check_failed (op0, tem);
14500
 
14501
  md5_init_ctx (&ctx);
14502
  fold_checksum_tree (op1, &ctx, ht);
14503
  md5_finish_ctx (&ctx, checksum_after_op1);
14504
  htab_delete (ht);
14505
 
14506
  if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14507
    fold_check_failed (op1, tem);
14508
#endif
14509
  return tem;
14510
}
14511
 
14512
/* Fold a ternary tree expression with code CODE of type TYPE with
14513
   operands OP0, OP1, and OP2.  Return a folded expression if
14514
   successful.  Otherwise, return a tree expression with code CODE of
14515
   type TYPE with operands OP0, OP1, and OP2.  */
14516
 
14517
tree
14518
fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14519
                      tree op0, tree op1, tree op2 MEM_STAT_DECL)
14520
{
14521
  tree tem;
14522
#ifdef ENABLE_FOLD_CHECKING
14523
  unsigned char checksum_before_op0[16],
14524
                checksum_before_op1[16],
14525
                checksum_before_op2[16],
14526
                checksum_after_op0[16],
14527
                checksum_after_op1[16],
14528
                checksum_after_op2[16];
14529
  struct md5_ctx ctx;
14530
  htab_t ht;
14531
 
14532
  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14533
  md5_init_ctx (&ctx);
14534
  fold_checksum_tree (op0, &ctx, ht);
14535
  md5_finish_ctx (&ctx, checksum_before_op0);
14536
  htab_empty (ht);
14537
 
14538
  md5_init_ctx (&ctx);
14539
  fold_checksum_tree (op1, &ctx, ht);
14540
  md5_finish_ctx (&ctx, checksum_before_op1);
14541
  htab_empty (ht);
14542
 
14543
  md5_init_ctx (&ctx);
14544
  fold_checksum_tree (op2, &ctx, ht);
14545
  md5_finish_ctx (&ctx, checksum_before_op2);
14546
  htab_empty (ht);
14547
#endif
14548
 
14549
  gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14550
  tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14551
  if (!tem)
14552
    {
14553
      tem =  build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
14554
      SET_EXPR_LOCATION (tem, loc);
14555
    }
14556
 
14557
#ifdef ENABLE_FOLD_CHECKING
14558
  md5_init_ctx (&ctx);
14559
  fold_checksum_tree (op0, &ctx, ht);
14560
  md5_finish_ctx (&ctx, checksum_after_op0);
14561
  htab_empty (ht);
14562
 
14563
  if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14564
    fold_check_failed (op0, tem);
14565
 
14566
  md5_init_ctx (&ctx);
14567
  fold_checksum_tree (op1, &ctx, ht);
14568
  md5_finish_ctx (&ctx, checksum_after_op1);
14569
  htab_empty (ht);
14570
 
14571
  if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14572
    fold_check_failed (op1, tem);
14573
 
14574
  md5_init_ctx (&ctx);
14575
  fold_checksum_tree (op2, &ctx, ht);
14576
  md5_finish_ctx (&ctx, checksum_after_op2);
14577
  htab_delete (ht);
14578
 
14579
  if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14580
    fold_check_failed (op2, tem);
14581
#endif
14582
  return tem;
14583
}
14584
 
14585
/* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14586
   arguments in ARGARRAY, and a null static chain.
14587
   Return a folded expression if successful.  Otherwise, return a CALL_EXPR
14588
   of type TYPE from the given operands as constructed by build_call_array.  */
14589
 
14590
tree
14591
fold_build_call_array_loc (location_t loc, tree type, tree fn,
14592
                           int nargs, tree *argarray)
14593
{
14594
  tree tem;
14595
#ifdef ENABLE_FOLD_CHECKING
14596
  unsigned char checksum_before_fn[16],
14597
                checksum_before_arglist[16],
14598
                checksum_after_fn[16],
14599
                checksum_after_arglist[16];
14600
  struct md5_ctx ctx;
14601
  htab_t ht;
14602
  int i;
14603
 
14604
  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14605
  md5_init_ctx (&ctx);
14606
  fold_checksum_tree (fn, &ctx, ht);
14607
  md5_finish_ctx (&ctx, checksum_before_fn);
14608
  htab_empty (ht);
14609
 
14610
  md5_init_ctx (&ctx);
14611
  for (i = 0; i < nargs; i++)
14612
    fold_checksum_tree (argarray[i], &ctx, ht);
14613
  md5_finish_ctx (&ctx, checksum_before_arglist);
14614
  htab_empty (ht);
14615
#endif
14616
 
14617
  tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14618
 
14619
#ifdef ENABLE_FOLD_CHECKING
14620
  md5_init_ctx (&ctx);
14621
  fold_checksum_tree (fn, &ctx, ht);
14622
  md5_finish_ctx (&ctx, checksum_after_fn);
14623
  htab_empty (ht);
14624
 
14625
  if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14626
    fold_check_failed (fn, tem);
14627
 
14628
  md5_init_ctx (&ctx);
14629
  for (i = 0; i < nargs; i++)
14630
    fold_checksum_tree (argarray[i], &ctx, ht);
14631
  md5_finish_ctx (&ctx, checksum_after_arglist);
14632
  htab_delete (ht);
14633
 
14634
  if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14635
    fold_check_failed (NULL_TREE, tem);
14636
#endif
14637
  return tem;
14638
}
14639
 
14640
/* Perform constant folding and related simplification of initializer
14641
   expression EXPR.  These behave identically to "fold_buildN" but ignore
14642
   potential run-time traps and exceptions that fold must preserve.  */
14643
 
14644
#define START_FOLD_INIT \
14645
  int saved_signaling_nans = flag_signaling_nans;\
14646
  int saved_trapping_math = flag_trapping_math;\
14647
  int saved_rounding_math = flag_rounding_math;\
14648
  int saved_trapv = flag_trapv;\
14649
  int saved_folding_initializer = folding_initializer;\
14650
  flag_signaling_nans = 0;\
14651
  flag_trapping_math = 0;\
14652
  flag_rounding_math = 0;\
14653
  flag_trapv = 0;\
14654
  folding_initializer = 1;
14655
 
14656
#define END_FOLD_INIT \
14657
  flag_signaling_nans = saved_signaling_nans;\
14658
  flag_trapping_math = saved_trapping_math;\
14659
  flag_rounding_math = saved_rounding_math;\
14660
  flag_trapv = saved_trapv;\
14661
  folding_initializer = saved_folding_initializer;
14662
 
14663
tree
14664
fold_build1_initializer_loc (location_t loc, enum tree_code code,
14665
                             tree type, tree op)
14666
{
14667
  tree result;
14668
  START_FOLD_INIT;
14669
 
14670
  result = fold_build1_loc (loc, code, type, op);
14671
 
14672
  END_FOLD_INIT;
14673
  return result;
14674
}
14675
 
14676
tree
14677
fold_build2_initializer_loc (location_t loc, enum tree_code code,
14678
                             tree type, tree op0, tree op1)
14679
{
14680
  tree result;
14681
  START_FOLD_INIT;
14682
 
14683
  result = fold_build2_loc (loc, code, type, op0, op1);
14684
 
14685
  END_FOLD_INIT;
14686
  return result;
14687
}
14688
 
14689
tree
14690
fold_build3_initializer_loc (location_t loc, enum tree_code code,
14691
                             tree type, tree op0, tree op1, tree op2)
14692
{
14693
  tree result;
14694
  START_FOLD_INIT;
14695
 
14696
  result = fold_build3_loc (loc, code, type, op0, op1, op2);
14697
 
14698
  END_FOLD_INIT;
14699
  return result;
14700
}
14701
 
14702
tree
14703
fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14704
                                       int nargs, tree *argarray)
14705
{
14706
  tree result;
14707
  START_FOLD_INIT;
14708
 
14709
  result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14710
 
14711
  END_FOLD_INIT;
14712
  return result;
14713
}
14714
 
14715
#undef START_FOLD_INIT
14716
#undef END_FOLD_INIT
14717
 
14718
/* Determine if first argument is a multiple of second argument.  Return 0 if
14719
   it is not, or we cannot easily determined it to be.
14720
 
14721
   An example of the sort of thing we care about (at this point; this routine
14722
   could surely be made more general, and expanded to do what the *_DIV_EXPR's
14723
   fold cases do now) is discovering that
14724
 
14725
     SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14726
 
14727
   is a multiple of
14728
 
14729
     SAVE_EXPR (J * 8)
14730
 
14731
   when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14732
 
14733
   This code also handles discovering that
14734
 
14735
     SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14736
 
14737
   is a multiple of 8 so we don't have to worry about dealing with a
14738
   possible remainder.
14739
 
14740
   Note that we *look* inside a SAVE_EXPR only to determine how it was
14741
   calculated; it is not safe for fold to do much of anything else with the
14742
   internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14743
   at run time.  For example, the latter example above *cannot* be implemented
14744
   as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14745
   evaluation time of the original SAVE_EXPR is not necessarily the same at
14746
   the time the new expression is evaluated.  The only optimization of this
14747
   sort that would be valid is changing
14748
 
14749
     SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14750
 
14751
   divided by 8 to
14752
 
14753
     SAVE_EXPR (I) * SAVE_EXPR (J)
14754
 
14755
   (where the same SAVE_EXPR (J) is used in the original and the
14756
   transformed version).  */
14757
 
14758
int
14759
multiple_of_p (tree type, const_tree top, const_tree bottom)
14760
{
14761
  if (operand_equal_p (top, bottom, 0))
14762
    return 1;
14763
 
14764
  if (TREE_CODE (type) != INTEGER_TYPE)
14765
    return 0;
14766
 
14767
  switch (TREE_CODE (top))
14768
    {
14769
    case BIT_AND_EXPR:
14770
      /* Bitwise and provides a power of two multiple.  If the mask is
14771
         a multiple of BOTTOM then TOP is a multiple of BOTTOM.  */
14772
      if (!integer_pow2p (bottom))
14773
        return 0;
14774
      /* FALLTHRU */
14775
 
14776
    case MULT_EXPR:
14777
      return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14778
              || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14779
 
14780
    case PLUS_EXPR:
14781
    case MINUS_EXPR:
14782
      return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14783
              && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14784
 
14785
    case LSHIFT_EXPR:
14786
      if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14787
        {
14788
          tree op1, t1;
14789
 
14790
          op1 = TREE_OPERAND (top, 1);
14791
          /* const_binop may not detect overflow correctly,
14792
             so check for it explicitly here.  */
14793
          if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14794
              > TREE_INT_CST_LOW (op1)
14795
              && TREE_INT_CST_HIGH (op1) == 0
14796
              && 0 != (t1 = fold_convert (type,
14797
                                          const_binop (LSHIFT_EXPR,
14798
                                                       size_one_node,
14799
                                                       op1, 0)))
14800
              && !TREE_OVERFLOW (t1))
14801
            return multiple_of_p (type, t1, bottom);
14802
        }
14803
      return 0;
14804
 
14805
    case NOP_EXPR:
14806
      /* Can't handle conversions from non-integral or wider integral type.  */
14807
      if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14808
          || (TYPE_PRECISION (type)
14809
              < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14810
        return 0;
14811
 
14812
      /* .. fall through ...  */
14813
 
14814
    case SAVE_EXPR:
14815
      return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14816
 
14817
    case INTEGER_CST:
14818
      if (TREE_CODE (bottom) != INTEGER_CST
14819
          || integer_zerop (bottom)
14820
          || (TYPE_UNSIGNED (type)
14821
              && (tree_int_cst_sgn (top) < 0
14822
                  || tree_int_cst_sgn (bottom) < 0)))
14823
        return 0;
14824
      return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14825
                                             top, bottom, 0));
14826
 
14827
    default:
14828
      return 0;
14829
    }
14830
}
14831
 
14832
/* Return true if CODE or TYPE is known to be non-negative. */
14833
 
14834
static bool
14835
tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14836
{
14837
  if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14838
      && truth_value_p (code))
14839
    /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14840
       have a signed:1 type (where the value is -1 and 0).  */
14841
    return true;
14842
  return false;
14843
}
14844
 
14845
/* Return true if (CODE OP0) is known to be non-negative.  If the return
14846
   value is based on the assumption that signed overflow is undefined,
14847
   set *STRICT_OVERFLOW_P to true; otherwise, don't change
14848
   *STRICT_OVERFLOW_P.  */
14849
 
14850
bool
14851
tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14852
                                bool *strict_overflow_p)
14853
{
14854
  if (TYPE_UNSIGNED (type))
14855
    return true;
14856
 
14857
  switch (code)
14858
    {
14859
    case ABS_EXPR:
14860
      /* We can't return 1 if flag_wrapv is set because
14861
         ABS_EXPR<INT_MIN> = INT_MIN.  */
14862
      if (!INTEGRAL_TYPE_P (type))
14863
        return true;
14864
      if (TYPE_OVERFLOW_UNDEFINED (type))
14865
        {
14866
          *strict_overflow_p = true;
14867
          return true;
14868
        }
14869
      break;
14870
 
14871
    case NON_LVALUE_EXPR:
14872
    case FLOAT_EXPR:
14873
    case FIX_TRUNC_EXPR:
14874
      return tree_expr_nonnegative_warnv_p (op0,
14875
                                            strict_overflow_p);
14876
 
14877
    case NOP_EXPR:
14878
      {
14879
        tree inner_type = TREE_TYPE (op0);
14880
        tree outer_type = type;
14881
 
14882
        if (TREE_CODE (outer_type) == REAL_TYPE)
14883
          {
14884
            if (TREE_CODE (inner_type) == REAL_TYPE)
14885
              return tree_expr_nonnegative_warnv_p (op0,
14886
                                                    strict_overflow_p);
14887
            if (TREE_CODE (inner_type) == INTEGER_TYPE)
14888
              {
14889
                if (TYPE_UNSIGNED (inner_type))
14890
                  return true;
14891
                return tree_expr_nonnegative_warnv_p (op0,
14892
                                                      strict_overflow_p);
14893
              }
14894
          }
14895
        else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14896
          {
14897
            if (TREE_CODE (inner_type) == REAL_TYPE)
14898
              return tree_expr_nonnegative_warnv_p (op0,
14899
                                                    strict_overflow_p);
14900
            if (TREE_CODE (inner_type) == INTEGER_TYPE)
14901
              return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14902
                      && TYPE_UNSIGNED (inner_type);
14903
          }
14904
      }
14905
      break;
14906
 
14907
    default:
14908
      return tree_simple_nonnegative_warnv_p (code, type);
14909
    }
14910
 
14911
  /* We don't know sign of `t', so be conservative and return false.  */
14912
  return false;
14913
}
14914
 
14915
/* Return true if (CODE OP0 OP1) is known to be non-negative.  If the return
14916
   value is based on the assumption that signed overflow is undefined,
14917
   set *STRICT_OVERFLOW_P to true; otherwise, don't change
14918
   *STRICT_OVERFLOW_P.  */
14919
 
14920
bool
14921
tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14922
                                      tree op1, bool *strict_overflow_p)
14923
{
14924
  if (TYPE_UNSIGNED (type))
14925
    return true;
14926
 
14927
  switch (code)
14928
    {
14929
    case POINTER_PLUS_EXPR:
14930
    case PLUS_EXPR:
14931
      if (FLOAT_TYPE_P (type))
14932
        return (tree_expr_nonnegative_warnv_p (op0,
14933
                                               strict_overflow_p)
14934
                && tree_expr_nonnegative_warnv_p (op1,
14935
                                                  strict_overflow_p));
14936
 
14937
      /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14938
         both unsigned and at least 2 bits shorter than the result.  */
14939
      if (TREE_CODE (type) == INTEGER_TYPE
14940
          && TREE_CODE (op0) == NOP_EXPR
14941
          && TREE_CODE (op1) == NOP_EXPR)
14942
        {
14943
          tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14944
          tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14945
          if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14946
              && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14947
            {
14948
              unsigned int prec = MAX (TYPE_PRECISION (inner1),
14949
                                       TYPE_PRECISION (inner2)) + 1;
14950
              return prec < TYPE_PRECISION (type);
14951
            }
14952
        }
14953
      break;
14954
 
14955
    case MULT_EXPR:
14956
      if (FLOAT_TYPE_P (type))
14957
        {
14958
          /* x * x for floating point x is always non-negative.  */
14959
          if (operand_equal_p (op0, op1, 0))
14960
            return true;
14961
          return (tree_expr_nonnegative_warnv_p (op0,
14962
                                                 strict_overflow_p)
14963
                  && tree_expr_nonnegative_warnv_p (op1,
14964
                                                    strict_overflow_p));
14965
        }
14966
 
14967
      /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14968
         both unsigned and their total bits is shorter than the result.  */
14969
      if (TREE_CODE (type) == INTEGER_TYPE
14970
          && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14971
          && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14972
        {
14973
          tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14974
            ? TREE_TYPE (TREE_OPERAND (op0, 0))
14975
            : TREE_TYPE (op0);
14976
          tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14977
            ? TREE_TYPE (TREE_OPERAND (op1, 0))
14978
            : TREE_TYPE (op1);
14979
 
14980
          bool unsigned0 = TYPE_UNSIGNED (inner0);
14981
          bool unsigned1 = TYPE_UNSIGNED (inner1);
14982
 
14983
          if (TREE_CODE (op0) == INTEGER_CST)
14984
            unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14985
 
14986
          if (TREE_CODE (op1) == INTEGER_CST)
14987
            unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14988
 
14989
          if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14990
              && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14991
            {
14992
              unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14993
                ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14994
                : TYPE_PRECISION (inner0);
14995
 
14996
              unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14997
                ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14998
                : TYPE_PRECISION (inner1);
14999
 
15000
              return precision0 + precision1 < TYPE_PRECISION (type);
15001
            }
15002
        }
15003
      return false;
15004
 
15005
    case BIT_AND_EXPR:
15006
    case MAX_EXPR:
15007
      return (tree_expr_nonnegative_warnv_p (op0,
15008
                                             strict_overflow_p)
15009
              || tree_expr_nonnegative_warnv_p (op1,
15010
                                                strict_overflow_p));
15011
 
15012
    case BIT_IOR_EXPR:
15013
    case BIT_XOR_EXPR:
15014
    case MIN_EXPR:
15015
    case RDIV_EXPR:
15016
    case TRUNC_DIV_EXPR:
15017
    case CEIL_DIV_EXPR:
15018
    case FLOOR_DIV_EXPR:
15019
    case ROUND_DIV_EXPR:
15020
      return (tree_expr_nonnegative_warnv_p (op0,
15021
                                             strict_overflow_p)
15022
              && tree_expr_nonnegative_warnv_p (op1,
15023
                                                strict_overflow_p));
15024
 
15025
    case TRUNC_MOD_EXPR:
15026
    case CEIL_MOD_EXPR:
15027
    case FLOOR_MOD_EXPR:
15028
    case ROUND_MOD_EXPR:
15029
      return tree_expr_nonnegative_warnv_p (op0,
15030
                                            strict_overflow_p);
15031
    default:
15032
      return tree_simple_nonnegative_warnv_p (code, type);
15033
    }
15034
 
15035
  /* We don't know sign of `t', so be conservative and return false.  */
15036
  return false;
15037
}
15038
 
15039
/* Return true if T is known to be non-negative.  If the return
15040
   value is based on the assumption that signed overflow is undefined,
15041
   set *STRICT_OVERFLOW_P to true; otherwise, don't change
15042
   *STRICT_OVERFLOW_P.  */
15043
 
15044
bool
15045
tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15046
{
15047
  if (TYPE_UNSIGNED (TREE_TYPE (t)))
15048
    return true;
15049
 
15050
  switch (TREE_CODE (t))
15051
    {
15052
    case INTEGER_CST:
15053
      return tree_int_cst_sgn (t) >= 0;
15054
 
15055
    case REAL_CST:
15056
      return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15057
 
15058
    case FIXED_CST:
15059
      return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15060
 
15061
    case COND_EXPR:
15062
      return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15063
                                             strict_overflow_p)
15064
              && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15065
                                                strict_overflow_p));
15066
    default:
15067
      return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15068
                                                   TREE_TYPE (t));
15069
    }
15070
  /* We don't know sign of `t', so be conservative and return false.  */
15071
  return false;
15072
}
15073
 
15074
/* Return true if T is known to be non-negative.  If the return
15075
   value is based on the assumption that signed overflow is undefined,
15076
   set *STRICT_OVERFLOW_P to true; otherwise, don't change
15077
   *STRICT_OVERFLOW_P.  */
15078
 
15079
bool
15080
tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15081
                               tree arg0, tree arg1, bool *strict_overflow_p)
15082
{
15083
  if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15084
    switch (DECL_FUNCTION_CODE (fndecl))
15085
      {
15086
        CASE_FLT_FN (BUILT_IN_ACOS):
15087
        CASE_FLT_FN (BUILT_IN_ACOSH):
15088
        CASE_FLT_FN (BUILT_IN_CABS):
15089
        CASE_FLT_FN (BUILT_IN_COSH):
15090
        CASE_FLT_FN (BUILT_IN_ERFC):
15091
        CASE_FLT_FN (BUILT_IN_EXP):
15092
        CASE_FLT_FN (BUILT_IN_EXP10):
15093
        CASE_FLT_FN (BUILT_IN_EXP2):
15094
        CASE_FLT_FN (BUILT_IN_FABS):
15095
        CASE_FLT_FN (BUILT_IN_FDIM):
15096
        CASE_FLT_FN (BUILT_IN_HYPOT):
15097
        CASE_FLT_FN (BUILT_IN_POW10):
15098
        CASE_INT_FN (BUILT_IN_FFS):
15099
        CASE_INT_FN (BUILT_IN_PARITY):
15100
        CASE_INT_FN (BUILT_IN_POPCOUNT):
15101
      case BUILT_IN_BSWAP32:
15102
      case BUILT_IN_BSWAP64:
15103
        /* Always true.  */
15104
        return true;
15105
 
15106
        CASE_FLT_FN (BUILT_IN_SQRT):
15107
        /* sqrt(-0.0) is -0.0.  */
15108
        if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15109
          return true;
15110
        return tree_expr_nonnegative_warnv_p (arg0,
15111
                                              strict_overflow_p);
15112
 
15113
        CASE_FLT_FN (BUILT_IN_ASINH):
15114
        CASE_FLT_FN (BUILT_IN_ATAN):
15115
        CASE_FLT_FN (BUILT_IN_ATANH):
15116
        CASE_FLT_FN (BUILT_IN_CBRT):
15117
        CASE_FLT_FN (BUILT_IN_CEIL):
15118
        CASE_FLT_FN (BUILT_IN_ERF):
15119
        CASE_FLT_FN (BUILT_IN_EXPM1):
15120
        CASE_FLT_FN (BUILT_IN_FLOOR):
15121
        CASE_FLT_FN (BUILT_IN_FMOD):
15122
        CASE_FLT_FN (BUILT_IN_FREXP):
15123
        CASE_FLT_FN (BUILT_IN_LCEIL):
15124
        CASE_FLT_FN (BUILT_IN_LDEXP):
15125
        CASE_FLT_FN (BUILT_IN_LFLOOR):
15126
        CASE_FLT_FN (BUILT_IN_LLCEIL):
15127
        CASE_FLT_FN (BUILT_IN_LLFLOOR):
15128
        CASE_FLT_FN (BUILT_IN_LLRINT):
15129
        CASE_FLT_FN (BUILT_IN_LLROUND):
15130
        CASE_FLT_FN (BUILT_IN_LRINT):
15131
        CASE_FLT_FN (BUILT_IN_LROUND):
15132
        CASE_FLT_FN (BUILT_IN_MODF):
15133
        CASE_FLT_FN (BUILT_IN_NEARBYINT):
15134
        CASE_FLT_FN (BUILT_IN_RINT):
15135
        CASE_FLT_FN (BUILT_IN_ROUND):
15136
        CASE_FLT_FN (BUILT_IN_SCALB):
15137
        CASE_FLT_FN (BUILT_IN_SCALBLN):
15138
        CASE_FLT_FN (BUILT_IN_SCALBN):
15139
        CASE_FLT_FN (BUILT_IN_SIGNBIT):
15140
        CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15141
        CASE_FLT_FN (BUILT_IN_SINH):
15142
        CASE_FLT_FN (BUILT_IN_TANH):
15143
        CASE_FLT_FN (BUILT_IN_TRUNC):
15144
        /* True if the 1st argument is nonnegative.  */
15145
        return tree_expr_nonnegative_warnv_p (arg0,
15146
                                              strict_overflow_p);
15147
 
15148
        CASE_FLT_FN (BUILT_IN_FMAX):
15149
        /* True if the 1st OR 2nd arguments are nonnegative.  */
15150
        return (tree_expr_nonnegative_warnv_p (arg0,
15151
                                               strict_overflow_p)
15152
                || (tree_expr_nonnegative_warnv_p (arg1,
15153
                                                   strict_overflow_p)));
15154
 
15155
        CASE_FLT_FN (BUILT_IN_FMIN):
15156
        /* True if the 1st AND 2nd arguments are nonnegative.  */
15157
        return (tree_expr_nonnegative_warnv_p (arg0,
15158
                                               strict_overflow_p)
15159
                && (tree_expr_nonnegative_warnv_p (arg1,
15160
                                                   strict_overflow_p)));
15161
 
15162
        CASE_FLT_FN (BUILT_IN_COPYSIGN):
15163
        /* True if the 2nd argument is nonnegative.  */
15164
        return tree_expr_nonnegative_warnv_p (arg1,
15165
                                              strict_overflow_p);
15166
 
15167
        CASE_FLT_FN (BUILT_IN_POWI):
15168
        /* True if the 1st argument is nonnegative or the second
15169
           argument is an even integer.  */
15170
        if (TREE_CODE (arg1) == INTEGER_CST
15171
            && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15172
          return true;
15173
        return tree_expr_nonnegative_warnv_p (arg0,
15174
                                              strict_overflow_p);
15175
 
15176
        CASE_FLT_FN (BUILT_IN_POW):
15177
        /* True if the 1st argument is nonnegative or the second
15178
           argument is an even integer valued real.  */
15179
        if (TREE_CODE (arg1) == REAL_CST)
15180
          {
15181
            REAL_VALUE_TYPE c;
15182
            HOST_WIDE_INT n;
15183
 
15184
            c = TREE_REAL_CST (arg1);
15185
            n = real_to_integer (&c);
15186
            if ((n & 1) == 0)
15187
              {
15188
                REAL_VALUE_TYPE cint;
15189
                real_from_integer (&cint, VOIDmode, n,
15190
                                   n < 0 ? -1 : 0, 0);
15191
                if (real_identical (&c, &cint))
15192
                  return true;
15193
              }
15194
          }
15195
        return tree_expr_nonnegative_warnv_p (arg0,
15196
                                              strict_overflow_p);
15197
 
15198
      default:
15199
        break;
15200
      }
15201
  return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15202
                                          type);
15203
}
15204
 
15205
/* Return true if T is known to be non-negative.  If the return
15206
   value is based on the assumption that signed overflow is undefined,
15207
   set *STRICT_OVERFLOW_P to true; otherwise, don't change
15208
   *STRICT_OVERFLOW_P.  */
15209
 
15210
bool
15211
tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15212
{
15213
  enum tree_code code = TREE_CODE (t);
15214
  if (TYPE_UNSIGNED (TREE_TYPE (t)))
15215
    return true;
15216
 
15217
  switch (code)
15218
    {
15219
    case TARGET_EXPR:
15220
      {
15221
        tree temp = TARGET_EXPR_SLOT (t);
15222
        t = TARGET_EXPR_INITIAL (t);
15223
 
15224
        /* If the initializer is non-void, then it's a normal expression
15225
           that will be assigned to the slot.  */
15226
        if (!VOID_TYPE_P (t))
15227
          return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15228
 
15229
        /* Otherwise, the initializer sets the slot in some way.  One common
15230
           way is an assignment statement at the end of the initializer.  */
15231
        while (1)
15232
          {
15233
            if (TREE_CODE (t) == BIND_EXPR)
15234
              t = expr_last (BIND_EXPR_BODY (t));
15235
            else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15236
                     || TREE_CODE (t) == TRY_CATCH_EXPR)
15237
              t = expr_last (TREE_OPERAND (t, 0));
15238
            else if (TREE_CODE (t) == STATEMENT_LIST)
15239
              t = expr_last (t);
15240
            else
15241
              break;
15242
          }
15243
        if (TREE_CODE (t) == MODIFY_EXPR
15244
            && TREE_OPERAND (t, 0) == temp)
15245
          return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15246
                                                strict_overflow_p);
15247
 
15248
        return false;
15249
      }
15250
 
15251
    case CALL_EXPR:
15252
      {
15253
        tree arg0 = call_expr_nargs (t) > 0 ?  CALL_EXPR_ARG (t, 0) : NULL_TREE;
15254
        tree arg1 = call_expr_nargs (t) > 1 ?  CALL_EXPR_ARG (t, 1) : NULL_TREE;
15255
 
15256
        return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15257
                                              get_callee_fndecl (t),
15258
                                              arg0,
15259
                                              arg1,
15260
                                              strict_overflow_p);
15261
      }
15262
    case COMPOUND_EXPR:
15263
    case MODIFY_EXPR:
15264
      return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15265
                                            strict_overflow_p);
15266
    case BIND_EXPR:
15267
      return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15268
                                            strict_overflow_p);
15269
    case SAVE_EXPR:
15270
      return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15271
                                            strict_overflow_p);
15272
 
15273
    default:
15274
      return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15275
                                                   TREE_TYPE (t));
15276
    }
15277
 
15278
  /* We don't know sign of `t', so be conservative and return false.  */
15279
  return false;
15280
}
15281
 
15282
/* Return true if T is known to be non-negative.  If the return
15283
   value is based on the assumption that signed overflow is undefined,
15284
   set *STRICT_OVERFLOW_P to true; otherwise, don't change
15285
   *STRICT_OVERFLOW_P.  */
15286
 
15287
bool
15288
tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15289
{
15290
  enum tree_code code;
15291
  if (t == error_mark_node)
15292
    return false;
15293
 
15294
  code = TREE_CODE (t);
15295
  switch (TREE_CODE_CLASS (code))
15296
    {
15297
    case tcc_binary:
15298
    case tcc_comparison:
15299
      return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15300
                                              TREE_TYPE (t),
15301
                                              TREE_OPERAND (t, 0),
15302
                                              TREE_OPERAND (t, 1),
15303
                                              strict_overflow_p);
15304
 
15305
    case tcc_unary:
15306
      return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15307
                                             TREE_TYPE (t),
15308
                                             TREE_OPERAND (t, 0),
15309
                                             strict_overflow_p);
15310
 
15311
    case tcc_constant:
15312
    case tcc_declaration:
15313
    case tcc_reference:
15314
      return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15315
 
15316
    default:
15317
      break;
15318
    }
15319
 
15320
  switch (code)
15321
    {
15322
    case TRUTH_AND_EXPR:
15323
    case TRUTH_OR_EXPR:
15324
    case TRUTH_XOR_EXPR:
15325
      return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15326
                                              TREE_TYPE (t),
15327
                                              TREE_OPERAND (t, 0),
15328
                                              TREE_OPERAND (t, 1),
15329
                                              strict_overflow_p);
15330
    case TRUTH_NOT_EXPR:
15331
      return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15332
                                             TREE_TYPE (t),
15333
                                             TREE_OPERAND (t, 0),
15334
                                             strict_overflow_p);
15335
 
15336
    case COND_EXPR:
15337
    case CONSTRUCTOR:
15338
    case OBJ_TYPE_REF:
15339
    case ASSERT_EXPR:
15340
    case ADDR_EXPR:
15341
    case WITH_SIZE_EXPR:
15342
    case SSA_NAME:
15343
      return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15344
 
15345
    default:
15346
      return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15347
    }
15348
}
15349
 
15350
/* Return true if `t' is known to be non-negative.  Handle warnings
15351
   about undefined signed overflow.  */
15352
 
15353
bool
15354
tree_expr_nonnegative_p (tree t)
15355
{
15356
  bool ret, strict_overflow_p;
15357
 
15358
  strict_overflow_p = false;
15359
  ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15360
  if (strict_overflow_p)
15361
    fold_overflow_warning (("assuming signed overflow does not occur when "
15362
                            "determining that expression is always "
15363
                            "non-negative"),
15364
                           WARN_STRICT_OVERFLOW_MISC);
15365
  return ret;
15366
}
15367
 
15368
 
15369
/* Return true when (CODE OP0) is an address and is known to be nonzero.
15370
   For floating point we further ensure that T is not denormal.
15371
   Similar logic is present in nonzero_address in rtlanal.h.
15372
 
15373
   If the return value is based on the assumption that signed overflow
15374
   is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15375
   change *STRICT_OVERFLOW_P.  */
15376
 
15377
bool
15378
tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15379
                                 bool *strict_overflow_p)
15380
{
15381
  switch (code)
15382
    {
15383
    case ABS_EXPR:
15384
      return tree_expr_nonzero_warnv_p (op0,
15385
                                        strict_overflow_p);
15386
 
15387
    case NOP_EXPR:
15388
      {
15389
        tree inner_type = TREE_TYPE (op0);
15390
        tree outer_type = type;
15391
 
15392
        return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15393
                && tree_expr_nonzero_warnv_p (op0,
15394
                                              strict_overflow_p));
15395
      }
15396
      break;
15397
 
15398
    case NON_LVALUE_EXPR:
15399
      return tree_expr_nonzero_warnv_p (op0,
15400
                                        strict_overflow_p);
15401
 
15402
    default:
15403
      break;
15404
  }
15405
 
15406
  return false;
15407
}
15408
 
15409
/* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15410
   For floating point we further ensure that T is not denormal.
15411
   Similar logic is present in nonzero_address in rtlanal.h.
15412
 
15413
   If the return value is based on the assumption that signed overflow
15414
   is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15415
   change *STRICT_OVERFLOW_P.  */
15416
 
15417
bool
15418
tree_binary_nonzero_warnv_p (enum tree_code code,
15419
                             tree type,
15420
                             tree op0,
15421
                             tree op1, bool *strict_overflow_p)
15422
{
15423
  bool sub_strict_overflow_p;
15424
  switch (code)
15425
    {
15426
    case POINTER_PLUS_EXPR:
15427
    case PLUS_EXPR:
15428
      if (TYPE_OVERFLOW_UNDEFINED (type))
15429
        {
15430
          /* With the presence of negative values it is hard
15431
             to say something.  */
15432
          sub_strict_overflow_p = false;
15433
          if (!tree_expr_nonnegative_warnv_p (op0,
15434
                                              &sub_strict_overflow_p)
15435
              || !tree_expr_nonnegative_warnv_p (op1,
15436
                                                 &sub_strict_overflow_p))
15437
            return false;
15438
          /* One of operands must be positive and the other non-negative.  */
15439
          /* We don't set *STRICT_OVERFLOW_P here: even if this value
15440
             overflows, on a twos-complement machine the sum of two
15441
             nonnegative numbers can never be zero.  */
15442
          return (tree_expr_nonzero_warnv_p (op0,
15443
                                             strict_overflow_p)
15444
                  || tree_expr_nonzero_warnv_p (op1,
15445
                                                strict_overflow_p));
15446
        }
15447
      break;
15448
 
15449
    case MULT_EXPR:
15450
      if (TYPE_OVERFLOW_UNDEFINED (type))
15451
        {
15452
          if (tree_expr_nonzero_warnv_p (op0,
15453
                                         strict_overflow_p)
15454
              && tree_expr_nonzero_warnv_p (op1,
15455
                                            strict_overflow_p))
15456
            {
15457
              *strict_overflow_p = true;
15458
              return true;
15459
            }
15460
        }
15461
      break;
15462
 
15463
    case MIN_EXPR:
15464
      sub_strict_overflow_p = false;
15465
      if (tree_expr_nonzero_warnv_p (op0,
15466
                                     &sub_strict_overflow_p)
15467
          && tree_expr_nonzero_warnv_p (op1,
15468
                                        &sub_strict_overflow_p))
15469
        {
15470
          if (sub_strict_overflow_p)
15471
            *strict_overflow_p = true;
15472
        }
15473
      break;
15474
 
15475
    case MAX_EXPR:
15476
      sub_strict_overflow_p = false;
15477
      if (tree_expr_nonzero_warnv_p (op0,
15478
                                     &sub_strict_overflow_p))
15479
        {
15480
          if (sub_strict_overflow_p)
15481
            *strict_overflow_p = true;
15482
 
15483
          /* When both operands are nonzero, then MAX must be too.  */
15484
          if (tree_expr_nonzero_warnv_p (op1,
15485
                                         strict_overflow_p))
15486
            return true;
15487
 
15488
          /* MAX where operand 0 is positive is positive.  */
15489
          return tree_expr_nonnegative_warnv_p (op0,
15490
                                               strict_overflow_p);
15491
        }
15492
      /* MAX where operand 1 is positive is positive.  */
15493
      else if (tree_expr_nonzero_warnv_p (op1,
15494
                                          &sub_strict_overflow_p)
15495
               && tree_expr_nonnegative_warnv_p (op1,
15496
                                                 &sub_strict_overflow_p))
15497
        {
15498
          if (sub_strict_overflow_p)
15499
            *strict_overflow_p = true;
15500
          return true;
15501
        }
15502
      break;
15503
 
15504
    case BIT_IOR_EXPR:
15505
      return (tree_expr_nonzero_warnv_p (op1,
15506
                                         strict_overflow_p)
15507
              || tree_expr_nonzero_warnv_p (op0,
15508
                                            strict_overflow_p));
15509
 
15510
    default:
15511
      break;
15512
  }
15513
 
15514
  return false;
15515
}
15516
 
15517
/* Return true when T is an address and is known to be nonzero.
15518
   For floating point we further ensure that T is not denormal.
15519
   Similar logic is present in nonzero_address in rtlanal.h.
15520
 
15521
   If the return value is based on the assumption that signed overflow
15522
   is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15523
   change *STRICT_OVERFLOW_P.  */
15524
 
15525
bool
15526
tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15527
{
15528
  bool sub_strict_overflow_p;
15529
  switch (TREE_CODE (t))
15530
    {
15531
    case INTEGER_CST:
15532
      return !integer_zerop (t);
15533
 
15534
    case ADDR_EXPR:
15535
      {
15536
        tree base = get_base_address (TREE_OPERAND (t, 0));
15537
 
15538
        if (!base)
15539
          return false;
15540
 
15541
        /* Weak declarations may link to NULL.  Other things may also be NULL
15542
           so protect with -fdelete-null-pointer-checks; but not variables
15543
           allocated on the stack.  */
15544
        if (DECL_P (base)
15545
            && (flag_delete_null_pointer_checks
15546
                || (TREE_CODE (base) == VAR_DECL && !TREE_STATIC (base))))
15547
          return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15548
 
15549
        /* Constants are never weak.  */
15550
        if (CONSTANT_CLASS_P (base))
15551
          return true;
15552
 
15553
        return false;
15554
      }
15555
 
15556
    case COND_EXPR:
15557
      sub_strict_overflow_p = false;
15558
      if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15559
                                     &sub_strict_overflow_p)
15560
          && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15561
                                        &sub_strict_overflow_p))
15562
        {
15563
          if (sub_strict_overflow_p)
15564
            *strict_overflow_p = true;
15565
          return true;
15566
        }
15567
      break;
15568
 
15569
    default:
15570
      break;
15571
    }
15572
  return false;
15573
}
15574
 
15575
/* Return true when T is an address and is known to be nonzero.
15576
   For floating point we further ensure that T is not denormal.
15577
   Similar logic is present in nonzero_address in rtlanal.h.
15578
 
15579
   If the return value is based on the assumption that signed overflow
15580
   is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15581
   change *STRICT_OVERFLOW_P.  */
15582
 
15583
bool
15584
tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15585
{
15586
  tree type = TREE_TYPE (t);
15587
  enum tree_code code;
15588
 
15589
  /* Doing something useful for floating point would need more work.  */
15590
  if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15591
    return false;
15592
 
15593
  code = TREE_CODE (t);
15594
  switch (TREE_CODE_CLASS (code))
15595
    {
15596
    case tcc_unary:
15597
      return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15598
                                              strict_overflow_p);
15599
    case tcc_binary:
15600
    case tcc_comparison:
15601
      return tree_binary_nonzero_warnv_p (code, type,
15602
                                               TREE_OPERAND (t, 0),
15603
                                               TREE_OPERAND (t, 1),
15604
                                               strict_overflow_p);
15605
    case tcc_constant:
15606
    case tcc_declaration:
15607
    case tcc_reference:
15608
      return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15609
 
15610
    default:
15611
      break;
15612
    }
15613
 
15614
  switch (code)
15615
    {
15616
    case TRUTH_NOT_EXPR:
15617
      return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15618
                                              strict_overflow_p);
15619
 
15620
    case TRUTH_AND_EXPR:
15621
    case TRUTH_OR_EXPR:
15622
    case TRUTH_XOR_EXPR:
15623
      return tree_binary_nonzero_warnv_p (code, type,
15624
                                               TREE_OPERAND (t, 0),
15625
                                               TREE_OPERAND (t, 1),
15626
                                               strict_overflow_p);
15627
 
15628
    case COND_EXPR:
15629
    case CONSTRUCTOR:
15630
    case OBJ_TYPE_REF:
15631
    case ASSERT_EXPR:
15632
    case ADDR_EXPR:
15633
    case WITH_SIZE_EXPR:
15634
    case SSA_NAME:
15635
      return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15636
 
15637
    case COMPOUND_EXPR:
15638
    case MODIFY_EXPR:
15639
    case BIND_EXPR:
15640
      return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15641
                                        strict_overflow_p);
15642
 
15643
    case SAVE_EXPR:
15644
      return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15645
                                        strict_overflow_p);
15646
 
15647
    case CALL_EXPR:
15648
      return alloca_call_p (t);
15649
 
15650
    default:
15651
      break;
15652
    }
15653
  return false;
15654
}
15655
 
15656
/* Return true when T is an address and is known to be nonzero.
15657
   Handle warnings about undefined signed overflow.  */
15658
 
15659
bool
15660
tree_expr_nonzero_p (tree t)
15661
{
15662
  bool ret, strict_overflow_p;
15663
 
15664
  strict_overflow_p = false;
15665
  ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15666
  if (strict_overflow_p)
15667
    fold_overflow_warning (("assuming signed overflow does not occur when "
15668
                            "determining that expression is always "
15669
                            "non-zero"),
15670
                           WARN_STRICT_OVERFLOW_MISC);
15671
  return ret;
15672
}
15673
 
15674
/* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15675
   attempt to fold the expression to a constant without modifying TYPE,
15676
   OP0 or OP1.
15677
 
15678
   If the expression could be simplified to a constant, then return
15679
   the constant.  If the expression would not be simplified to a
15680
   constant, then return NULL_TREE.  */
15681
 
15682
tree
15683
fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15684
{
15685
  tree tem = fold_binary (code, type, op0, op1);
15686
  return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15687
}
15688
 
15689
/* Given the components of a unary expression CODE, TYPE and OP0,
15690
   attempt to fold the expression to a constant without modifying
15691
   TYPE or OP0.
15692
 
15693
   If the expression could be simplified to a constant, then return
15694
   the constant.  If the expression would not be simplified to a
15695
   constant, then return NULL_TREE.  */
15696
 
15697
tree
15698
fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15699
{
15700
  tree tem = fold_unary (code, type, op0);
15701
  return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15702
}
15703
 
15704
/* If EXP represents referencing an element in a constant string
15705
   (either via pointer arithmetic or array indexing), return the
15706
   tree representing the value accessed, otherwise return NULL.  */
15707
 
15708
tree
15709
fold_read_from_constant_string (tree exp)
15710
{
15711
  if ((TREE_CODE (exp) == INDIRECT_REF
15712
       || TREE_CODE (exp) == ARRAY_REF)
15713
      && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15714
    {
15715
      tree exp1 = TREE_OPERAND (exp, 0);
15716
      tree index;
15717
      tree string;
15718
      location_t loc = EXPR_LOCATION (exp);
15719
 
15720
      if (TREE_CODE (exp) == INDIRECT_REF)
15721
        string = string_constant (exp1, &index);
15722
      else
15723
        {
15724
          tree low_bound = array_ref_low_bound (exp);
15725
          index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15726
 
15727
          /* Optimize the special-case of a zero lower bound.
15728
 
15729
             We convert the low_bound to sizetype to avoid some problems
15730
             with constant folding.  (E.g. suppose the lower bound is 1,
15731
             and its mode is QI.  Without the conversion,l (ARRAY
15732
             +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15733
             +INDEX), which becomes (ARRAY+255+INDEX).  Oops!)  */
15734
          if (! integer_zerop (low_bound))
15735
            index = size_diffop_loc (loc, index,
15736
                                 fold_convert_loc (loc, sizetype, low_bound));
15737
 
15738
          string = exp1;
15739
        }
15740
 
15741
      if (string
15742
          && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15743
          && TREE_CODE (string) == STRING_CST
15744
          && TREE_CODE (index) == INTEGER_CST
15745
          && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15746
          && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15747
              == MODE_INT)
15748
          && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15749
        return build_int_cst_type (TREE_TYPE (exp),
15750
                                   (TREE_STRING_POINTER (string)
15751
                                    [TREE_INT_CST_LOW (index)]));
15752
    }
15753
  return NULL;
15754
}
15755
 
15756
/* Return the tree for neg (ARG0) when ARG0 is known to be either
15757
   an integer constant, real, or fixed-point constant.
15758
 
15759
   TYPE is the type of the result.  */
15760
 
15761
static tree
15762
fold_negate_const (tree arg0, tree type)
15763
{
15764
  tree t = NULL_TREE;
15765
 
15766
  switch (TREE_CODE (arg0))
15767
    {
15768
    case INTEGER_CST:
15769
      {
15770
        unsigned HOST_WIDE_INT low;
15771
        HOST_WIDE_INT high;
15772
        int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15773
                                   TREE_INT_CST_HIGH (arg0),
15774
                                   &low, &high);
15775
        t = force_fit_type_double (type, low, high, 1,
15776
                                   (overflow | TREE_OVERFLOW (arg0))
15777
                                   && !TYPE_UNSIGNED (type));
15778
        break;
15779
      }
15780
 
15781
    case REAL_CST:
15782
      t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15783
      break;
15784
 
15785
    case FIXED_CST:
15786
      {
15787
        FIXED_VALUE_TYPE f;
15788
        bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15789
                                            &(TREE_FIXED_CST (arg0)), NULL,
15790
                                            TYPE_SATURATING (type));
15791
        t = build_fixed (type, f);
15792
        /* Propagate overflow flags.  */
15793
        if (overflow_p | TREE_OVERFLOW (arg0))
15794
          TREE_OVERFLOW (t) = 1;
15795
        break;
15796
      }
15797
 
15798
    default:
15799
      gcc_unreachable ();
15800
    }
15801
 
15802
  return t;
15803
}
15804
 
15805
/* Return the tree for abs (ARG0) when ARG0 is known to be either
15806
   an integer constant or real constant.
15807
 
15808
   TYPE is the type of the result.  */
15809
 
15810
tree
15811
fold_abs_const (tree arg0, tree type)
15812
{
15813
  tree t = NULL_TREE;
15814
 
15815
  switch (TREE_CODE (arg0))
15816
    {
15817
    case INTEGER_CST:
15818
      /* If the value is unsigned, then the absolute value is
15819
         the same as the ordinary value.  */
15820
      if (TYPE_UNSIGNED (type))
15821
        t = arg0;
15822
      /* Similarly, if the value is non-negative.  */
15823
      else if (INT_CST_LT (integer_minus_one_node, arg0))
15824
        t = arg0;
15825
      /* If the value is negative, then the absolute value is
15826
         its negation.  */
15827
      else
15828
        {
15829
          unsigned HOST_WIDE_INT low;
15830
          HOST_WIDE_INT high;
15831
          int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15832
                                     TREE_INT_CST_HIGH (arg0),
15833
                                     &low, &high);
15834
          t = force_fit_type_double (type, low, high, -1,
15835
                                     overflow | TREE_OVERFLOW (arg0));
15836
        }
15837
      break;
15838
 
15839
    case REAL_CST:
15840
      if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15841
        t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15842
      else
15843
        t =  arg0;
15844
      break;
15845
 
15846
    default:
15847
      gcc_unreachable ();
15848
    }
15849
 
15850
  return t;
15851
}
15852
 
15853
/* Return the tree for not (ARG0) when ARG0 is known to be an integer
15854
   constant.  TYPE is the type of the result.  */
15855
 
15856
static tree
15857
fold_not_const (tree arg0, tree type)
15858
{
15859
  tree t = NULL_TREE;
15860
 
15861
  gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15862
 
15863
  t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
15864
                             ~TREE_INT_CST_HIGH (arg0), 0,
15865
                             TREE_OVERFLOW (arg0));
15866
 
15867
  return t;
15868
}
15869
 
15870
/* Given CODE, a relational operator, the target type, TYPE and two
15871
   constant operands OP0 and OP1, return the result of the
15872
   relational operation.  If the result is not a compile time
15873
   constant, then return NULL_TREE.  */
15874
 
15875
static tree
15876
fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15877
{
15878
  int result, invert;
15879
 
15880
  /* From here on, the only cases we handle are when the result is
15881
     known to be a constant.  */
15882
 
15883
  if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15884
    {
15885
      const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15886
      const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15887
 
15888
      /* Handle the cases where either operand is a NaN.  */
15889
      if (real_isnan (c0) || real_isnan (c1))
15890
        {
15891
          switch (code)
15892
            {
15893
            case EQ_EXPR:
15894
            case ORDERED_EXPR:
15895
              result = 0;
15896
              break;
15897
 
15898
            case NE_EXPR:
15899
            case UNORDERED_EXPR:
15900
            case UNLT_EXPR:
15901
            case UNLE_EXPR:
15902
            case UNGT_EXPR:
15903
            case UNGE_EXPR:
15904
            case UNEQ_EXPR:
15905
              result = 1;
15906
              break;
15907
 
15908
            case LT_EXPR:
15909
            case LE_EXPR:
15910
            case GT_EXPR:
15911
            case GE_EXPR:
15912
            case LTGT_EXPR:
15913
              if (flag_trapping_math)
15914
                return NULL_TREE;
15915
              result = 0;
15916
              break;
15917
 
15918
            default:
15919
              gcc_unreachable ();
15920
            }
15921
 
15922
          return constant_boolean_node (result, type);
15923
        }
15924
 
15925
      return constant_boolean_node (real_compare (code, c0, c1), type);
15926
    }
15927
 
15928
  if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15929
    {
15930
      const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15931
      const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15932
      return constant_boolean_node (fixed_compare (code, c0, c1), type);
15933
    }
15934
 
15935
  /* Handle equality/inequality of complex constants.  */
15936
  if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15937
    {
15938
      tree rcond = fold_relational_const (code, type,
15939
                                          TREE_REALPART (op0),
15940
                                          TREE_REALPART (op1));
15941
      tree icond = fold_relational_const (code, type,
15942
                                          TREE_IMAGPART (op0),
15943
                                          TREE_IMAGPART (op1));
15944
      if (code == EQ_EXPR)
15945
        return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15946
      else if (code == NE_EXPR)
15947
        return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15948
      else
15949
        return NULL_TREE;
15950
    }
15951
 
15952
  /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15953
 
15954
     To compute GT, swap the arguments and do LT.
15955
     To compute GE, do LT and invert the result.
15956
     To compute LE, swap the arguments, do LT and invert the result.
15957
     To compute NE, do EQ and invert the result.
15958
 
15959
     Therefore, the code below must handle only EQ and LT.  */
15960
 
15961
  if (code == LE_EXPR || code == GT_EXPR)
15962
    {
15963
      tree tem = op0;
15964
      op0 = op1;
15965
      op1 = tem;
15966
      code = swap_tree_comparison (code);
15967
    }
15968
 
15969
  /* Note that it is safe to invert for real values here because we
15970
     have already handled the one case that it matters.  */
15971
 
15972
  invert = 0;
15973
  if (code == NE_EXPR || code == GE_EXPR)
15974
    {
15975
      invert = 1;
15976
      code = invert_tree_comparison (code, false);
15977
    }
15978
 
15979
  /* Compute a result for LT or EQ if args permit;
15980
     Otherwise return T.  */
15981
  if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15982
    {
15983
      if (code == EQ_EXPR)
15984
        result = tree_int_cst_equal (op0, op1);
15985
      else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15986
        result = INT_CST_LT_UNSIGNED (op0, op1);
15987
      else
15988
        result = INT_CST_LT (op0, op1);
15989
    }
15990
  else
15991
    return NULL_TREE;
15992
 
15993
  if (invert)
15994
    result ^= 1;
15995
  return constant_boolean_node (result, type);
15996
}
15997
 
15998
/* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15999
   indicated TYPE.  If no CLEANUP_POINT_EXPR is necessary, return EXPR
16000
   itself.  */
16001
 
16002
tree
16003
fold_build_cleanup_point_expr (tree type, tree expr)
16004
{
16005
  /* If the expression does not have side effects then we don't have to wrap
16006
     it with a cleanup point expression.  */
16007
  if (!TREE_SIDE_EFFECTS (expr))
16008
    return expr;
16009
 
16010
  /* If the expression is a return, check to see if the expression inside the
16011
     return has no side effects or the right hand side of the modify expression
16012
     inside the return. If either don't have side effects set we don't need to
16013
     wrap the expression in a cleanup point expression.  Note we don't check the
16014
     left hand side of the modify because it should always be a return decl.  */
16015
  if (TREE_CODE (expr) == RETURN_EXPR)
16016
    {
16017
      tree op = TREE_OPERAND (expr, 0);
16018
      if (!op || !TREE_SIDE_EFFECTS (op))
16019
        return expr;
16020
      op = TREE_OPERAND (op, 1);
16021
      if (!TREE_SIDE_EFFECTS (op))
16022
        return expr;
16023
    }
16024
 
16025
  return build1 (CLEANUP_POINT_EXPR, type, expr);
16026
}
16027
 
16028
/* Given a pointer value OP0 and a type TYPE, return a simplified version
16029
   of an indirection through OP0, or NULL_TREE if no simplification is
16030
   possible.  */
16031
 
16032
tree
16033
fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16034
{
16035
  tree sub = op0;
16036
  tree subtype;
16037
 
16038
  STRIP_NOPS (sub);
16039
  subtype = TREE_TYPE (sub);
16040
  if (!POINTER_TYPE_P (subtype))
16041
    return NULL_TREE;
16042
 
16043
  if (TREE_CODE (sub) == ADDR_EXPR)
16044
    {
16045
      tree op = TREE_OPERAND (sub, 0);
16046
      tree optype = TREE_TYPE (op);
16047
      /* *&CONST_DECL -> to the value of the const decl.  */
16048
      if (TREE_CODE (op) == CONST_DECL)
16049
        return DECL_INITIAL (op);
16050
      /* *&p => p;  make sure to handle *&"str"[cst] here.  */
16051
      if (type == optype)
16052
        {
16053
          tree fop = fold_read_from_constant_string (op);
16054
          if (fop)
16055
            return fop;
16056
          else
16057
            return op;
16058
        }
16059
      /* *(foo *)&fooarray => fooarray[0] */
16060
      else if (TREE_CODE (optype) == ARRAY_TYPE
16061
               && type == TREE_TYPE (optype))
16062
        {
16063
          tree type_domain = TYPE_DOMAIN (optype);
16064
          tree min_val = size_zero_node;
16065
          if (type_domain && TYPE_MIN_VALUE (type_domain))
16066
            min_val = TYPE_MIN_VALUE (type_domain);
16067
          op0 = build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
16068
          SET_EXPR_LOCATION (op0, loc);
16069
          return op0;
16070
        }
16071
      /* *(foo *)&complexfoo => __real__ complexfoo */
16072
      else if (TREE_CODE (optype) == COMPLEX_TYPE
16073
               && type == TREE_TYPE (optype))
16074
        return fold_build1_loc (loc, REALPART_EXPR, type, op);
16075
      /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16076
      else if (TREE_CODE (optype) == VECTOR_TYPE
16077
               && type == TREE_TYPE (optype))
16078
        {
16079
          tree part_width = TYPE_SIZE (type);
16080
          tree index = bitsize_int (0);
16081
          return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16082
        }
16083
    }
16084
 
16085
  /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16086
  if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16087
      && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16088
    {
16089
      tree op00 = TREE_OPERAND (sub, 0);
16090
      tree op01 = TREE_OPERAND (sub, 1);
16091
      tree op00type;
16092
 
16093
      STRIP_NOPS (op00);
16094
      op00type = TREE_TYPE (op00);
16095
      if (TREE_CODE (op00) == ADDR_EXPR
16096
          && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
16097
          && type == TREE_TYPE (TREE_TYPE (op00type)))
16098
        {
16099
          HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16100
          tree part_width = TYPE_SIZE (type);
16101
          unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16102
          unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16103
          tree index = bitsize_int (indexi);
16104
 
16105
          if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
16106
            return fold_build3_loc (loc,
16107
                                BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
16108
                                part_width, index);
16109
 
16110
        }
16111
    }
16112
 
16113
 
16114
  /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16115
  if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16116
      && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16117
    {
16118
      tree op00 = TREE_OPERAND (sub, 0);
16119
      tree op01 = TREE_OPERAND (sub, 1);
16120
      tree op00type;
16121
 
16122
      STRIP_NOPS (op00);
16123
      op00type = TREE_TYPE (op00);
16124
      if (TREE_CODE (op00) == ADDR_EXPR
16125
          && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
16126
          && type == TREE_TYPE (TREE_TYPE (op00type)))
16127
        {
16128
          tree size = TYPE_SIZE_UNIT (type);
16129
          if (tree_int_cst_equal (size, op01))
16130
            return fold_build1_loc (loc, IMAGPART_EXPR, type,
16131
                                TREE_OPERAND (op00, 0));
16132
        }
16133
    }
16134
 
16135
  /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16136
  if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16137
      && type == TREE_TYPE (TREE_TYPE (subtype)))
16138
    {
16139
      tree type_domain;
16140
      tree min_val = size_zero_node;
16141
      sub = build_fold_indirect_ref_loc (loc, sub);
16142
      type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16143
      if (type_domain && TYPE_MIN_VALUE (type_domain))
16144
        min_val = TYPE_MIN_VALUE (type_domain);
16145
      op0 = build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
16146
      SET_EXPR_LOCATION (op0, loc);
16147
      return op0;
16148
    }
16149
 
16150
  return NULL_TREE;
16151
}
16152
 
16153
/* Builds an expression for an indirection through T, simplifying some
16154
   cases.  */
16155
 
16156
tree
16157
build_fold_indirect_ref_loc (location_t loc, tree t)
16158
{
16159
  tree type = TREE_TYPE (TREE_TYPE (t));
16160
  tree sub = fold_indirect_ref_1 (loc, type, t);
16161
 
16162
  if (sub)
16163
    return sub;
16164
 
16165
  t = build1 (INDIRECT_REF, type, t);
16166
  SET_EXPR_LOCATION (t, loc);
16167
  return t;
16168
}
16169
 
16170
/* Given an INDIRECT_REF T, return either T or a simplified version.  */
16171
 
16172
tree
16173
fold_indirect_ref_loc (location_t loc, tree t)
16174
{
16175
  tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16176
 
16177
  if (sub)
16178
    return sub;
16179
  else
16180
    return t;
16181
}
16182
 
16183
/* Strip non-trapping, non-side-effecting tree nodes from an expression
16184
   whose result is ignored.  The type of the returned tree need not be
16185
   the same as the original expression.  */
16186
 
16187
tree
16188
fold_ignored_result (tree t)
16189
{
16190
  if (!TREE_SIDE_EFFECTS (t))
16191
    return integer_zero_node;
16192
 
16193
  for (;;)
16194
    switch (TREE_CODE_CLASS (TREE_CODE (t)))
16195
      {
16196
      case tcc_unary:
16197
        t = TREE_OPERAND (t, 0);
16198
        break;
16199
 
16200
      case tcc_binary:
16201
      case tcc_comparison:
16202
        if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16203
          t = TREE_OPERAND (t, 0);
16204
        else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16205
          t = TREE_OPERAND (t, 1);
16206
        else
16207
          return t;
16208
        break;
16209
 
16210
      case tcc_expression:
16211
        switch (TREE_CODE (t))
16212
          {
16213
          case COMPOUND_EXPR:
16214
            if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16215
              return t;
16216
            t = TREE_OPERAND (t, 0);
16217
            break;
16218
 
16219
          case COND_EXPR:
16220
            if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16221
                || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16222
              return t;
16223
            t = TREE_OPERAND (t, 0);
16224
            break;
16225
 
16226
          default:
16227
            return t;
16228
          }
16229
        break;
16230
 
16231
      default:
16232
        return t;
16233
      }
16234
}
16235
 
16236
/* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16237
   This can only be applied to objects of a sizetype.  */
16238
 
16239
tree
16240
round_up_loc (location_t loc, tree value, int divisor)
16241
{
16242
  tree div = NULL_TREE;
16243
 
16244
  gcc_assert (divisor > 0);
16245
  if (divisor == 1)
16246
    return value;
16247
 
16248
  /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
16249
     have to do anything.  Only do this when we are not given a const,
16250
     because in that case, this check is more expensive than just
16251
     doing it.  */
16252
  if (TREE_CODE (value) != INTEGER_CST)
16253
    {
16254
      div = build_int_cst (TREE_TYPE (value), divisor);
16255
 
16256
      if (multiple_of_p (TREE_TYPE (value), value, div))
16257
        return value;
16258
    }
16259
 
16260
  /* If divisor is a power of two, simplify this to bit manipulation.  */
16261
  if (divisor == (divisor & -divisor))
16262
    {
16263
      if (TREE_CODE (value) == INTEGER_CST)
16264
        {
16265
          unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
16266
          unsigned HOST_WIDE_INT high;
16267
          bool overflow_p;
16268
 
16269
          if ((low & (divisor - 1)) == 0)
16270
            return value;
16271
 
16272
          overflow_p = TREE_OVERFLOW (value);
16273
          high = TREE_INT_CST_HIGH (value);
16274
          low &= ~(divisor - 1);
16275
          low += divisor;
16276
          if (low == 0)
16277
            {
16278
              high++;
16279
              if (high == 0)
16280
                overflow_p = true;
16281
            }
16282
 
16283
          return force_fit_type_double (TREE_TYPE (value), low, high,
16284
                                        -1, overflow_p);
16285
        }
16286
      else
16287
        {
16288
          tree t;
16289
 
16290
          t = build_int_cst (TREE_TYPE (value), divisor - 1);
16291
          value = size_binop_loc (loc, PLUS_EXPR, value, t);
16292
          t = build_int_cst (TREE_TYPE (value), -divisor);
16293
          value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16294
        }
16295
    }
16296
  else
16297
    {
16298
      if (!div)
16299
        div = build_int_cst (TREE_TYPE (value), divisor);
16300
      value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16301
      value = size_binop_loc (loc, MULT_EXPR, value, div);
16302
    }
16303
 
16304
  return value;
16305
}
16306
 
16307
/* Likewise, but round down.  */
16308
 
16309
tree
16310
round_down_loc (location_t loc, tree value, int divisor)
16311
{
16312
  tree div = NULL_TREE;
16313
 
16314
  gcc_assert (divisor > 0);
16315
  if (divisor == 1)
16316
    return value;
16317
 
16318
  /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
16319
     have to do anything.  Only do this when we are not given a const,
16320
     because in that case, this check is more expensive than just
16321
     doing it.  */
16322
  if (TREE_CODE (value) != INTEGER_CST)
16323
    {
16324
      div = build_int_cst (TREE_TYPE (value), divisor);
16325
 
16326
      if (multiple_of_p (TREE_TYPE (value), value, div))
16327
        return value;
16328
    }
16329
 
16330
  /* If divisor is a power of two, simplify this to bit manipulation.  */
16331
  if (divisor == (divisor & -divisor))
16332
    {
16333
      tree t;
16334
 
16335
      t = build_int_cst (TREE_TYPE (value), -divisor);
16336
      value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16337
    }
16338
  else
16339
    {
16340
      if (!div)
16341
        div = build_int_cst (TREE_TYPE (value), divisor);
16342
      value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16343
      value = size_binop_loc (loc, MULT_EXPR, value, div);
16344
    }
16345
 
16346
  return value;
16347
}
16348
 
16349
/* Returns the pointer to the base of the object addressed by EXP and
16350
   extracts the information about the offset of the access, storing it
16351
   to PBITPOS and POFFSET.  */
16352
 
16353
static tree
16354
split_address_to_core_and_offset (tree exp,
16355
                                  HOST_WIDE_INT *pbitpos, tree *poffset)
16356
{
16357
  tree core;
16358
  enum machine_mode mode;
16359
  int unsignedp, volatilep;
16360
  HOST_WIDE_INT bitsize;
16361
  location_t loc = EXPR_LOCATION (exp);
16362
 
16363
  if (TREE_CODE (exp) == ADDR_EXPR)
16364
    {
16365
      core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16366
                                  poffset, &mode, &unsignedp, &volatilep,
16367
                                  false);
16368
      core = build_fold_addr_expr_loc (loc, core);
16369
    }
16370
  else
16371
    {
16372
      core = exp;
16373
      *pbitpos = 0;
16374
      *poffset = NULL_TREE;
16375
    }
16376
 
16377
  return core;
16378
}
16379
 
16380
/* Returns true if addresses of E1 and E2 differ by a constant, false
16381
   otherwise.  If they do, E1 - E2 is stored in *DIFF.  */
16382
 
16383
bool
16384
ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16385
{
16386
  tree core1, core2;
16387
  HOST_WIDE_INT bitpos1, bitpos2;
16388
  tree toffset1, toffset2, tdiff, type;
16389
 
16390
  core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16391
  core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16392
 
16393
  if (bitpos1 % BITS_PER_UNIT != 0
16394
      || bitpos2 % BITS_PER_UNIT != 0
16395
      || !operand_equal_p (core1, core2, 0))
16396
    return false;
16397
 
16398
  if (toffset1 && toffset2)
16399
    {
16400
      type = TREE_TYPE (toffset1);
16401
      if (type != TREE_TYPE (toffset2))
16402
        toffset2 = fold_convert (type, toffset2);
16403
 
16404
      tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16405
      if (!cst_and_fits_in_hwi (tdiff))
16406
        return false;
16407
 
16408
      *diff = int_cst_value (tdiff);
16409
    }
16410
  else if (toffset1 || toffset2)
16411
    {
16412
      /* If only one of the offsets is non-constant, the difference cannot
16413
         be a constant.  */
16414
      return false;
16415
    }
16416
  else
16417
    *diff = 0;
16418
 
16419
  *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16420
  return true;
16421
}
16422
 
16423
/* Simplify the floating point expression EXP when the sign of the
16424
   result is not significant.  Return NULL_TREE if no simplification
16425
   is possible.  */
16426
 
16427
tree
16428
fold_strip_sign_ops (tree exp)
16429
{
16430
  tree arg0, arg1;
16431
  location_t loc = EXPR_LOCATION (exp);
16432
 
16433
  switch (TREE_CODE (exp))
16434
    {
16435
    case ABS_EXPR:
16436
    case NEGATE_EXPR:
16437
      arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16438
      return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16439
 
16440
    case MULT_EXPR:
16441
    case RDIV_EXPR:
16442
      if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16443
        return NULL_TREE;
16444
      arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16445
      arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16446
      if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16447
        return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16448
                            arg0 ? arg0 : TREE_OPERAND (exp, 0),
16449
                            arg1 ? arg1 : TREE_OPERAND (exp, 1));
16450
      break;
16451
 
16452
    case COMPOUND_EXPR:
16453
      arg0 = TREE_OPERAND (exp, 0);
16454
      arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16455
      if (arg1)
16456
        return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16457
      break;
16458
 
16459
    case COND_EXPR:
16460
      arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16461
      arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16462
      if (arg0 || arg1)
16463
        return fold_build3_loc (loc,
16464
                            COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16465
                            arg0 ? arg0 : TREE_OPERAND (exp, 1),
16466
                            arg1 ? arg1 : TREE_OPERAND (exp, 2));
16467
      break;
16468
 
16469
    case CALL_EXPR:
16470
      {
16471
        const enum built_in_function fcode = builtin_mathfn_code (exp);
16472
        switch (fcode)
16473
        {
16474
        CASE_FLT_FN (BUILT_IN_COPYSIGN):
16475
          /* Strip copysign function call, return the 1st argument. */
16476
          arg0 = CALL_EXPR_ARG (exp, 0);
16477
          arg1 = CALL_EXPR_ARG (exp, 1);
16478
          return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16479
 
16480
        default:
16481
          /* Strip sign ops from the argument of "odd" math functions.  */
16482
          if (negate_mathfn_p (fcode))
16483
            {
16484
              arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16485
              if (arg0)
16486
                return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16487
            }
16488
          break;
16489
        }
16490
      }
16491
      break;
16492
 
16493
    default:
16494
      break;
16495
    }
16496
  return NULL_TREE;
16497
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.