OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-stable/] [gcc-4.5.1/] [gcc/] [fold-const.c] - Blame information for rev 826

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 280 jeremybenn
/* Fold a constant sub-tree into a single node for C-compiler
2
   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3
   2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4
   Free Software Foundation, Inc.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
/*@@ This file should be rewritten to use an arbitrary precision
23
  @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24
  @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25
  @@ The routines that translate from the ap rep should
26
  @@ warn if precision et. al. is lost.
27
  @@ This would also make life easier when this technology is used
28
  @@ for cross-compilers.  */
29
 
30
/* The entry points in this file are fold, size_int_wide, size_binop
31
   and force_fit_type_double.
32
 
33
   fold takes a tree as argument and returns a simplified tree.
34
 
35
   size_binop takes a tree code for an arithmetic operation
36
   and two operands that are trees, and produces a tree for the
37
   result, assuming the type comes from `sizetype'.
38
 
39
   size_int takes an integer value, and creates a tree constant
40
   with type from `sizetype'.
41
 
42
   force_fit_type_double takes a constant, an overflowable flag and a
43
   prior overflow indicator.  It forces the value to fit the type and
44
   sets TREE_OVERFLOW.
45
 
46
   Note: Since the folders get called on non-gimple code as well as
47
   gimple code, we need to handle GIMPLE tuples as well as their
48
   corresponding tree equivalents.  */
49
 
50
#include "config.h"
51
#include "system.h"
52
#include "coretypes.h"
53
#include "tm.h"
54
#include "flags.h"
55
#include "tree.h"
56
#include "real.h"
57
#include "fixed-value.h"
58
#include "rtl.h"
59
#include "expr.h"
60
#include "tm_p.h"
61
#include "target.h"
62
#include "toplev.h"
63
#include "intl.h"
64
#include "ggc.h"
65
#include "hashtab.h"
66
#include "langhooks.h"
67
#include "md5.h"
68
#include "gimple.h"
69
 
70
/* Nonzero if we are folding constants inside an initializer; zero
71
   otherwise.  */
72
int folding_initializer = 0;
73
 
74
/* The following constants represent a bit based encoding of GCC's
75
   comparison operators.  This encoding simplifies transformations
76
   on relational comparison operators, such as AND and OR.  */
77
enum comparison_code {
78
  COMPCODE_FALSE = 0,
79
  COMPCODE_LT = 1,
80
  COMPCODE_EQ = 2,
81
  COMPCODE_LE = 3,
82
  COMPCODE_GT = 4,
83
  COMPCODE_LTGT = 5,
84
  COMPCODE_GE = 6,
85
  COMPCODE_ORD = 7,
86
  COMPCODE_UNORD = 8,
87
  COMPCODE_UNLT = 9,
88
  COMPCODE_UNEQ = 10,
89
  COMPCODE_UNLE = 11,
90
  COMPCODE_UNGT = 12,
91
  COMPCODE_NE = 13,
92
  COMPCODE_UNGE = 14,
93
  COMPCODE_TRUE = 15
94
};
95
 
96
static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
97
static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
98
static bool negate_mathfn_p (enum built_in_function);
99
static bool negate_expr_p (tree);
100
static tree negate_expr (tree);
101
static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102
static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
103
static tree const_binop (enum tree_code, tree, tree, int);
104
static enum comparison_code comparison_to_compcode (enum tree_code);
105
static enum tree_code compcode_to_comparison (enum comparison_code);
106
static int operand_equal_for_comparison_p (tree, tree, tree);
107
static int twoval_comparison_p (tree, tree *, tree *, int *);
108
static tree eval_subst (location_t, tree, tree, tree, tree, tree);
109
static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
110
static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
111
static tree make_bit_field_ref (location_t, tree, tree,
112
                                HOST_WIDE_INT, HOST_WIDE_INT, int);
113
static tree optimize_bit_field_compare (location_t, enum tree_code,
114
                                        tree, tree, tree);
115
static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
116
                                    HOST_WIDE_INT *,
117
                                    enum machine_mode *, int *, int *,
118
                                    tree *, tree *);
119
static int all_ones_mask_p (const_tree, int);
120
static tree sign_bit_p (tree, const_tree);
121
static int simple_operand_p (const_tree);
122
static tree range_binop (enum tree_code, tree, tree, int, tree, int);
123
static tree range_predecessor (tree);
124
static tree range_successor (tree);
125
extern tree make_range (tree, int *, tree *, tree *, bool *);
126
extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
127
                          tree, tree);
128
static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
129
static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
130
static tree unextend (tree, int, int, tree);
131
static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
132
static tree optimize_minmax_comparison (location_t, enum tree_code,
133
                                        tree, tree, tree);
134
static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
135
static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
136
static tree fold_binary_op_with_conditional_arg (location_t,
137
                                                 enum tree_code, tree,
138
                                                 tree, tree,
139
                                                 tree, tree, int);
140
static tree fold_mathfn_compare (location_t,
141
                                 enum built_in_function, enum tree_code,
142
                                 tree, tree, tree);
143
static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
144
static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
145
static bool reorder_operands_p (const_tree, const_tree);
146
static tree fold_negate_const (tree, tree);
147
static tree fold_not_const (tree, tree);
148
static tree fold_relational_const (enum tree_code, tree, tree, tree);
149
static tree fold_convert_const (enum tree_code, tree, tree);
150
 
151
 
152
/* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
153
   overflow.  Suppose A, B and SUM have the same respective signs as A1, B1,
154
   and SUM1.  Then this yields nonzero if overflow occurred during the
155
   addition.
156
 
157
   Overflow occurs if A and B have the same sign, but A and SUM differ in
158
   sign.  Use `^' to test whether signs differ, and `< 0' to isolate the
159
   sign.  */
160
#define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
161
 
162
/* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
163
   We do that by representing the two-word integer in 4 words, with only
164
   HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
165
   number.  The value of the word is LOWPART + HIGHPART * BASE.  */
166
 
167
#define LOWPART(x) \
168
  ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
169
#define HIGHPART(x) \
170
  ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
171
#define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
172
 
173
/* Unpack a two-word integer into 4 words.
174
   LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
175
   WORDS points to the array of HOST_WIDE_INTs.  */
176
 
177
static void
178
encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
179
{
180
  words[0] = LOWPART (low);
181
  words[1] = HIGHPART (low);
182
  words[2] = LOWPART (hi);
183
  words[3] = HIGHPART (hi);
184
}
185
 
186
/* Pack an array of 4 words into a two-word integer.
187
   WORDS points to the array of words.
188
   The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces.  */
189
 
190
static void
191
decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
192
        HOST_WIDE_INT *hi)
193
{
194
  *low = words[0] + words[1] * BASE;
195
  *hi = words[2] + words[3] * BASE;
196
}
197
 
198
/* Force the double-word integer L1, H1 to be within the range of the
199
   integer type TYPE.  Stores the properly truncated and sign-extended
200
   double-word integer in *LV, *HV.  Returns true if the operation
201
   overflows, that is, argument and result are different.  */
202
 
203
int
204
fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
205
                 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
206
{
207
  unsigned HOST_WIDE_INT low0 = l1;
208
  HOST_WIDE_INT high0 = h1;
209
  unsigned int prec = TYPE_PRECISION (type);
210
  int sign_extended_type;
211
 
212
  /* Size types *are* sign extended.  */
213
  sign_extended_type = (!TYPE_UNSIGNED (type)
214
                        || (TREE_CODE (type) == INTEGER_TYPE
215
                            && TYPE_IS_SIZETYPE (type)));
216
 
217
  /* First clear all bits that are beyond the type's precision.  */
218
  if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
219
    ;
220
  else if (prec > HOST_BITS_PER_WIDE_INT)
221
    h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
222
  else
223
    {
224
      h1 = 0;
225
      if (prec < HOST_BITS_PER_WIDE_INT)
226
        l1 &= ~((HOST_WIDE_INT) (-1) << prec);
227
    }
228
 
229
  /* Then do sign extension if necessary.  */
230
  if (!sign_extended_type)
231
    /* No sign extension */;
232
  else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
233
    /* Correct width already.  */;
234
  else if (prec > HOST_BITS_PER_WIDE_INT)
235
    {
236
      /* Sign extend top half? */
237
      if (h1 & ((unsigned HOST_WIDE_INT)1
238
                << (prec - HOST_BITS_PER_WIDE_INT - 1)))
239
        h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
240
    }
241
  else if (prec == HOST_BITS_PER_WIDE_INT)
242
    {
243
      if ((HOST_WIDE_INT)l1 < 0)
244
        h1 = -1;
245
    }
246
  else
247
    {
248
      /* Sign extend bottom half? */
249
      if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
250
        {
251
          h1 = -1;
252
          l1 |= (HOST_WIDE_INT)(-1) << prec;
253
        }
254
    }
255
 
256
  *lv = l1;
257
  *hv = h1;
258
 
259
  /* If the value didn't fit, signal overflow.  */
260
  return l1 != low0 || h1 != high0;
261
}
262
 
263
/* We force the double-int HIGH:LOW to the range of the type TYPE by
264
   sign or zero extending it.
265
   OVERFLOWABLE indicates if we are interested
266
   in overflow of the value, when >0 we are only interested in signed
267
   overflow, for <0 we are interested in any overflow.  OVERFLOWED
268
   indicates whether overflow has already occurred.  CONST_OVERFLOWED
269
   indicates whether constant overflow has already occurred.  We force
270
   T's value to be within range of T's type (by setting to 0 or 1 all
271
   the bits outside the type's range).  We set TREE_OVERFLOWED if,
272
        OVERFLOWED is nonzero,
273
        or OVERFLOWABLE is >0 and signed overflow occurs
274
        or OVERFLOWABLE is <0 and any overflow occurs
275
   We return a new tree node for the extended double-int.  The node
276
   is shared if no overflow flags are set.  */
277
 
278
tree
279
force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
280
                       HOST_WIDE_INT high, int overflowable,
281
                       bool overflowed)
282
{
283
  int sign_extended_type;
284
  bool overflow;
285
 
286
  /* Size types *are* sign extended.  */
287
  sign_extended_type = (!TYPE_UNSIGNED (type)
288
                        || (TREE_CODE (type) == INTEGER_TYPE
289
                            && TYPE_IS_SIZETYPE (type)));
290
 
291
  overflow = fit_double_type (low, high, &low, &high, type);
292
 
293
  /* If we need to set overflow flags, return a new unshared node.  */
294
  if (overflowed || overflow)
295
    {
296
      if (overflowed
297
          || overflowable < 0
298
          || (overflowable > 0 && sign_extended_type))
299
        {
300
          tree t = make_node (INTEGER_CST);
301
          TREE_INT_CST_LOW (t) = low;
302
          TREE_INT_CST_HIGH (t) = high;
303
          TREE_TYPE (t) = type;
304
          TREE_OVERFLOW (t) = 1;
305
          return t;
306
        }
307
    }
308
 
309
  /* Else build a shared node.  */
310
  return build_int_cst_wide (type, low, high);
311
}
312
 
313
/* Add two doubleword integers with doubleword result.
314
   Return nonzero if the operation overflows according to UNSIGNED_P.
315
   Each argument is given as two `HOST_WIDE_INT' pieces.
316
   One argument is L1 and H1; the other, L2 and H2.
317
   The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
318
 
319
int
320
add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
321
                      unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
322
                      unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
323
                      bool unsigned_p)
324
{
325
  unsigned HOST_WIDE_INT l;
326
  HOST_WIDE_INT h;
327
 
328
  l = l1 + l2;
329
  h = (HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) h1
330
                       + (unsigned HOST_WIDE_INT) h2
331
                       + (l < l1));
332
 
333
  *lv = l;
334
  *hv = h;
335
 
336
  if (unsigned_p)
337
    return ((unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1
338
            || (h == h1
339
                && l < l1));
340
  else
341
    return OVERFLOW_SUM_SIGN (h1, h2, h);
342
}
343
 
344
/* Negate a doubleword integer with doubleword result.
345
   Return nonzero if the operation overflows, assuming it's signed.
346
   The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
347
   The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
348
 
349
int
350
neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
351
            unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
352
{
353
  if (l1 == 0)
354
    {
355
      *lv = 0;
356
      *hv = - h1;
357
      return (*hv & h1) < 0;
358
    }
359
  else
360
    {
361
      *lv = -l1;
362
      *hv = ~h1;
363
      return 0;
364
    }
365
}
366
 
367
/* Multiply two doubleword integers with doubleword result.
368
   Return nonzero if the operation overflows according to UNSIGNED_P.
369
   Each argument is given as two `HOST_WIDE_INT' pieces.
370
   One argument is L1 and H1; the other, L2 and H2.
371
   The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
372
 
373
int
374
mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
375
                      unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
376
                      unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
377
                      bool unsigned_p)
378
{
379
  HOST_WIDE_INT arg1[4];
380
  HOST_WIDE_INT arg2[4];
381
  HOST_WIDE_INT prod[4 * 2];
382
  unsigned HOST_WIDE_INT carry;
383
  int i, j, k;
384
  unsigned HOST_WIDE_INT toplow, neglow;
385
  HOST_WIDE_INT tophigh, neghigh;
386
 
387
  encode (arg1, l1, h1);
388
  encode (arg2, l2, h2);
389
 
390
  memset (prod, 0, sizeof prod);
391
 
392
  for (i = 0; i < 4; i++)
393
    {
394
      carry = 0;
395
      for (j = 0; j < 4; j++)
396
        {
397
          k = i + j;
398
          /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000.  */
399
          carry += arg1[i] * arg2[j];
400
          /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF.  */
401
          carry += prod[k];
402
          prod[k] = LOWPART (carry);
403
          carry = HIGHPART (carry);
404
        }
405
      prod[i + 4] = carry;
406
    }
407
 
408
  decode (prod, lv, hv);
409
  decode (prod + 4, &toplow, &tophigh);
410
 
411
  /* Unsigned overflow is immediate.  */
412
  if (unsigned_p)
413
    return (toplow | tophigh) != 0;
414
 
415
  /* Check for signed overflow by calculating the signed representation of the
416
     top half of the result; it should agree with the low half's sign bit.  */
417
  if (h1 < 0)
418
    {
419
      neg_double (l2, h2, &neglow, &neghigh);
420
      add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
421
    }
422
  if (h2 < 0)
423
    {
424
      neg_double (l1, h1, &neglow, &neghigh);
425
      add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
426
    }
427
  return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
428
}
429
 
430
/* Shift the doubleword integer in L1, H1 left by COUNT places
431
   keeping only PREC bits of result.
432
   Shift right if COUNT is negative.
433
   ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
434
   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
435
 
436
void
437
lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
438
               HOST_WIDE_INT count, unsigned int prec,
439
               unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
440
{
441
  unsigned HOST_WIDE_INT signmask;
442
 
443
  if (count < 0)
444
    {
445
      rshift_double (l1, h1, -count, prec, lv, hv, arith);
446
      return;
447
    }
448
 
449
  if (SHIFT_COUNT_TRUNCATED)
450
    count %= prec;
451
 
452
  if (count >= 2 * HOST_BITS_PER_WIDE_INT)
453
    {
454
      /* Shifting by the host word size is undefined according to the
455
         ANSI standard, so we must handle this as a special case.  */
456
      *hv = 0;
457
      *lv = 0;
458
    }
459
  else if (count >= HOST_BITS_PER_WIDE_INT)
460
    {
461
      *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
462
      *lv = 0;
463
    }
464
  else
465
    {
466
      *hv = (((unsigned HOST_WIDE_INT) h1 << count)
467
             | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
468
      *lv = l1 << count;
469
    }
470
 
471
  /* Sign extend all bits that are beyond the precision.  */
472
 
473
  signmask = -((prec > HOST_BITS_PER_WIDE_INT
474
                ? ((unsigned HOST_WIDE_INT) *hv
475
                   >> (prec - HOST_BITS_PER_WIDE_INT - 1))
476
                : (*lv >> (prec - 1))) & 1);
477
 
478
  if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
479
    ;
480
  else if (prec >= HOST_BITS_PER_WIDE_INT)
481
    {
482
      *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
483
      *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
484
    }
485
  else
486
    {
487
      *hv = signmask;
488
      *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
489
      *lv |= signmask << prec;
490
    }
491
}
492
 
493
/* Shift the doubleword integer in L1, H1 right by COUNT places
494
   keeping only PREC bits of result.  COUNT must be positive.
495
   ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
496
   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
497
 
498
void
499
rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
500
               HOST_WIDE_INT count, unsigned int prec,
501
               unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
502
               int arith)
503
{
504
  unsigned HOST_WIDE_INT signmask;
505
 
506
  signmask = (arith
507
              ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
508
              : 0);
509
 
510
  if (SHIFT_COUNT_TRUNCATED)
511
    count %= prec;
512
 
513
  if (count >= 2 * HOST_BITS_PER_WIDE_INT)
514
    {
515
      /* Shifting by the host word size is undefined according to the
516
         ANSI standard, so we must handle this as a special case.  */
517
      *hv = 0;
518
      *lv = 0;
519
    }
520
  else if (count >= HOST_BITS_PER_WIDE_INT)
521
    {
522
      *hv = 0;
523
      *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
524
    }
525
  else
526
    {
527
      *hv = (unsigned HOST_WIDE_INT) h1 >> count;
528
      *lv = ((l1 >> count)
529
             | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
530
    }
531
 
532
  /* Zero / sign extend all bits that are beyond the precision.  */
533
 
534
  if (count >= (HOST_WIDE_INT)prec)
535
    {
536
      *hv = signmask;
537
      *lv = signmask;
538
    }
539
  else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
540
    ;
541
  else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
542
    {
543
      *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
544
      *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
545
    }
546
  else
547
    {
548
      *hv = signmask;
549
      *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
550
      *lv |= signmask << (prec - count);
551
    }
552
}
553
 
554
/* Rotate the doubleword integer in L1, H1 left by COUNT places
555
   keeping only PREC bits of result.
556
   Rotate right if COUNT is negative.
557
   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
558
 
559
void
560
lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
561
                HOST_WIDE_INT count, unsigned int prec,
562
                unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
563
{
564
  unsigned HOST_WIDE_INT s1l, s2l;
565
  HOST_WIDE_INT s1h, s2h;
566
 
567
  count %= prec;
568
  if (count < 0)
569
    count += prec;
570
 
571
  lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
572
  rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
573
  *lv = s1l | s2l;
574
  *hv = s1h | s2h;
575
}
576
 
577
/* Rotate the doubleword integer in L1, H1 left by COUNT places
578
   keeping only PREC bits of result.  COUNT must be positive.
579
   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
580
 
581
void
582
rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
583
                HOST_WIDE_INT count, unsigned int prec,
584
                unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
585
{
586
  unsigned HOST_WIDE_INT s1l, s2l;
587
  HOST_WIDE_INT s1h, s2h;
588
 
589
  count %= prec;
590
  if (count < 0)
591
    count += prec;
592
 
593
  rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
594
  lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
595
  *lv = s1l | s2l;
596
  *hv = s1h | s2h;
597
}
598
 
599
/* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
600
   for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
601
   CODE is a tree code for a kind of division, one of
602
   TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
603
   or EXACT_DIV_EXPR
604
   It controls how the quotient is rounded to an integer.
605
   Return nonzero if the operation overflows.
606
   UNS nonzero says do unsigned division.  */
607
 
608
int
609
div_and_round_double (enum tree_code code, int uns,
610
                      unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
611
                      HOST_WIDE_INT hnum_orig,
612
                      unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
613
                      HOST_WIDE_INT hden_orig,
614
                      unsigned HOST_WIDE_INT *lquo,
615
                      HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
616
                      HOST_WIDE_INT *hrem)
617
{
618
  int quo_neg = 0;
619
  HOST_WIDE_INT num[4 + 1];     /* extra element for scaling.  */
620
  HOST_WIDE_INT den[4], quo[4];
621
  int i, j;
622
  unsigned HOST_WIDE_INT work;
623
  unsigned HOST_WIDE_INT carry = 0;
624
  unsigned HOST_WIDE_INT lnum = lnum_orig;
625
  HOST_WIDE_INT hnum = hnum_orig;
626
  unsigned HOST_WIDE_INT lden = lden_orig;
627
  HOST_WIDE_INT hden = hden_orig;
628
  int overflow = 0;
629
 
630
  if (hden == 0 && lden == 0)
631
    overflow = 1, lden = 1;
632
 
633
  /* Calculate quotient sign and convert operands to unsigned.  */
634
  if (!uns)
635
    {
636
      if (hnum < 0)
637
        {
638
          quo_neg = ~ quo_neg;
639
          /* (minimum integer) / (-1) is the only overflow case.  */
640
          if (neg_double (lnum, hnum, &lnum, &hnum)
641
              && ((HOST_WIDE_INT) lden & hden) == -1)
642
            overflow = 1;
643
        }
644
      if (hden < 0)
645
        {
646
          quo_neg = ~ quo_neg;
647
          neg_double (lden, hden, &lden, &hden);
648
        }
649
    }
650
 
651
  if (hnum == 0 && hden == 0)
652
    {                           /* single precision */
653
      *hquo = *hrem = 0;
654
      /* This unsigned division rounds toward zero.  */
655
      *lquo = lnum / lden;
656
      goto finish_up;
657
    }
658
 
659
  if (hnum == 0)
660
    {                           /* trivial case: dividend < divisor */
661
      /* hden != 0 already checked.  */
662
      *hquo = *lquo = 0;
663
      *hrem = hnum;
664
      *lrem = lnum;
665
      goto finish_up;
666
    }
667
 
668
  memset (quo, 0, sizeof quo);
669
 
670
  memset (num, 0, sizeof num);   /* to zero 9th element */
671
  memset (den, 0, sizeof den);
672
 
673
  encode (num, lnum, hnum);
674
  encode (den, lden, hden);
675
 
676
  /* Special code for when the divisor < BASE.  */
677
  if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
678
    {
679
      /* hnum != 0 already checked.  */
680
      for (i = 4 - 1; i >= 0; i--)
681
        {
682
          work = num[i] + carry * BASE;
683
          quo[i] = work / lden;
684
          carry = work % lden;
685
        }
686
    }
687
  else
688
    {
689
      /* Full double precision division,
690
         with thanks to Don Knuth's "Seminumerical Algorithms".  */
691
      int num_hi_sig, den_hi_sig;
692
      unsigned HOST_WIDE_INT quo_est, scale;
693
 
694
      /* Find the highest nonzero divisor digit.  */
695
      for (i = 4 - 1;; i--)
696
        if (den[i] != 0)
697
          {
698
            den_hi_sig = i;
699
            break;
700
          }
701
 
702
      /* Insure that the first digit of the divisor is at least BASE/2.
703
         This is required by the quotient digit estimation algorithm.  */
704
 
705
      scale = BASE / (den[den_hi_sig] + 1);
706
      if (scale > 1)
707
        {               /* scale divisor and dividend */
708
          carry = 0;
709
          for (i = 0; i <= 4 - 1; i++)
710
            {
711
              work = (num[i] * scale) + carry;
712
              num[i] = LOWPART (work);
713
              carry = HIGHPART (work);
714
            }
715
 
716
          num[4] = carry;
717
          carry = 0;
718
          for (i = 0; i <= 4 - 1; i++)
719
            {
720
              work = (den[i] * scale) + carry;
721
              den[i] = LOWPART (work);
722
              carry = HIGHPART (work);
723
              if (den[i] != 0) den_hi_sig = i;
724
            }
725
        }
726
 
727
      num_hi_sig = 4;
728
 
729
      /* Main loop */
730
      for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
731
        {
732
          /* Guess the next quotient digit, quo_est, by dividing the first
733
             two remaining dividend digits by the high order quotient digit.
734
             quo_est is never low and is at most 2 high.  */
735
          unsigned HOST_WIDE_INT tmp;
736
 
737
          num_hi_sig = i + den_hi_sig + 1;
738
          work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
739
          if (num[num_hi_sig] != den[den_hi_sig])
740
            quo_est = work / den[den_hi_sig];
741
          else
742
            quo_est = BASE - 1;
743
 
744
          /* Refine quo_est so it's usually correct, and at most one high.  */
745
          tmp = work - quo_est * den[den_hi_sig];
746
          if (tmp < BASE
747
              && (den[den_hi_sig - 1] * quo_est
748
                  > (tmp * BASE + num[num_hi_sig - 2])))
749
            quo_est--;
750
 
751
          /* Try QUO_EST as the quotient digit, by multiplying the
752
             divisor by QUO_EST and subtracting from the remaining dividend.
753
             Keep in mind that QUO_EST is the I - 1st digit.  */
754
 
755
          carry = 0;
756
          for (j = 0; j <= den_hi_sig; j++)
757
            {
758
              work = quo_est * den[j] + carry;
759
              carry = HIGHPART (work);
760
              work = num[i + j] - LOWPART (work);
761
              num[i + j] = LOWPART (work);
762
              carry += HIGHPART (work) != 0;
763
            }
764
 
765
          /* If quo_est was high by one, then num[i] went negative and
766
             we need to correct things.  */
767
          if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
768
            {
769
              quo_est--;
770
              carry = 0;         /* add divisor back in */
771
              for (j = 0; j <= den_hi_sig; j++)
772
                {
773
                  work = num[i + j] + den[j] + carry;
774
                  carry = HIGHPART (work);
775
                  num[i + j] = LOWPART (work);
776
                }
777
 
778
              num [num_hi_sig] += carry;
779
            }
780
 
781
          /* Store the quotient digit.  */
782
          quo[i] = quo_est;
783
        }
784
    }
785
 
786
  decode (quo, lquo, hquo);
787
 
788
 finish_up:
789
  /* If result is negative, make it so.  */
790
  if (quo_neg)
791
    neg_double (*lquo, *hquo, lquo, hquo);
792
 
793
  /* Compute trial remainder:  rem = num - (quo * den)  */
794
  mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
795
  neg_double (*lrem, *hrem, lrem, hrem);
796
  add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
797
 
798
  switch (code)
799
    {
800
    case TRUNC_DIV_EXPR:
801
    case TRUNC_MOD_EXPR:        /* round toward zero */
802
    case EXACT_DIV_EXPR:        /* for this one, it shouldn't matter */
803
      return overflow;
804
 
805
    case FLOOR_DIV_EXPR:
806
    case FLOOR_MOD_EXPR:        /* round toward negative infinity */
807
      if (quo_neg && (*lrem != 0 || *hrem != 0))   /* ratio < 0 && rem != 0 */
808
        {
809
          /* quo = quo - 1;  */
810
          add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT)  -1,
811
                      lquo, hquo);
812
        }
813
      else
814
        return overflow;
815
      break;
816
 
817
    case CEIL_DIV_EXPR:
818
    case CEIL_MOD_EXPR:         /* round toward positive infinity */
819
      if (!quo_neg && (*lrem != 0 || *hrem != 0))  /* ratio > 0 && rem != 0 */
820
        {
821
          add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
822
                      lquo, hquo);
823
        }
824
      else
825
        return overflow;
826
      break;
827
 
828
    case ROUND_DIV_EXPR:
829
    case ROUND_MOD_EXPR:        /* round to closest integer */
830
      {
831
        unsigned HOST_WIDE_INT labs_rem = *lrem;
832
        HOST_WIDE_INT habs_rem = *hrem;
833
        unsigned HOST_WIDE_INT labs_den = lden, ltwice;
834
        HOST_WIDE_INT habs_den = hden, htwice;
835
 
836
        /* Get absolute values.  */
837
        if (*hrem < 0)
838
          neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
839
        if (hden < 0)
840
          neg_double (lden, hden, &labs_den, &habs_den);
841
 
842
        /* If (2 * abs (lrem) >= abs (lden)), adjust the quotient.  */
843
        mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
844
                    labs_rem, habs_rem, &ltwice, &htwice);
845
 
846
        if (((unsigned HOST_WIDE_INT) habs_den
847
             < (unsigned HOST_WIDE_INT) htwice)
848
            || (((unsigned HOST_WIDE_INT) habs_den
849
                 == (unsigned HOST_WIDE_INT) htwice)
850
                && (labs_den <= ltwice)))
851
          {
852
            if (*hquo < 0)
853
              /* quo = quo - 1;  */
854
              add_double (*lquo, *hquo,
855
                          (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
856
            else
857
              /* quo = quo + 1; */
858
              add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
859
                          lquo, hquo);
860
          }
861
        else
862
          return overflow;
863
      }
864
      break;
865
 
866
    default:
867
      gcc_unreachable ();
868
    }
869
 
870
  /* Compute true remainder:  rem = num - (quo * den)  */
871
  mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
872
  neg_double (*lrem, *hrem, lrem, hrem);
873
  add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
874
  return overflow;
875
}
876
 
877
/* If ARG2 divides ARG1 with zero remainder, carries out the division
878
   of type CODE and returns the quotient.
879
   Otherwise returns NULL_TREE.  */
880
 
881
tree
882
div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
883
{
884
  unsigned HOST_WIDE_INT int1l, int2l;
885
  HOST_WIDE_INT int1h, int2h;
886
  unsigned HOST_WIDE_INT quol, reml;
887
  HOST_WIDE_INT quoh, remh;
888
  int uns;
889
 
890
  /* The sign of the division is according to operand two, that
891
     does the correct thing for POINTER_PLUS_EXPR where we want
892
     a signed division.  */
893
  uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
894
  if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
895
      && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
896
    uns = false;
897
 
898
  int1l = TREE_INT_CST_LOW (arg1);
899
  int1h = TREE_INT_CST_HIGH (arg1);
900
  int2l = TREE_INT_CST_LOW (arg2);
901
  int2h = TREE_INT_CST_HIGH (arg2);
902
 
903
  div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
904
                        &quol, &quoh, &reml, &remh);
905
  if (remh != 0 || reml != 0)
906
    return NULL_TREE;
907
 
908
  return build_int_cst_wide (TREE_TYPE (arg1), quol, quoh);
909
}
910
 
911
/* This is nonzero if we should defer warnings about undefined
912
   overflow.  This facility exists because these warnings are a
913
   special case.  The code to estimate loop iterations does not want
914
   to issue any warnings, since it works with expressions which do not
915
   occur in user code.  Various bits of cleanup code call fold(), but
916
   only use the result if it has certain characteristics (e.g., is a
917
   constant); that code only wants to issue a warning if the result is
918
   used.  */
919
 
920
static int fold_deferring_overflow_warnings;
921
 
922
/* If a warning about undefined overflow is deferred, this is the
923
   warning.  Note that this may cause us to turn two warnings into
924
   one, but that is fine since it is sufficient to only give one
925
   warning per expression.  */
926
 
927
static const char* fold_deferred_overflow_warning;
928
 
929
/* If a warning about undefined overflow is deferred, this is the
930
   level at which the warning should be emitted.  */
931
 
932
static enum warn_strict_overflow_code fold_deferred_overflow_code;
933
 
934
/* Start deferring overflow warnings.  We could use a stack here to
935
   permit nested calls, but at present it is not necessary.  */
936
 
937
void
938
fold_defer_overflow_warnings (void)
939
{
940
  ++fold_deferring_overflow_warnings;
941
}
942
 
943
/* Stop deferring overflow warnings.  If there is a pending warning,
944
   and ISSUE is true, then issue the warning if appropriate.  STMT is
945
   the statement with which the warning should be associated (used for
946
   location information); STMT may be NULL.  CODE is the level of the
947
   warning--a warn_strict_overflow_code value.  This function will use
948
   the smaller of CODE and the deferred code when deciding whether to
949
   issue the warning.  CODE may be zero to mean to always use the
950
   deferred code.  */
951
 
952
void
953
fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
954
{
955
  const char *warnmsg;
956
  location_t locus;
957
 
958
  gcc_assert (fold_deferring_overflow_warnings > 0);
959
  --fold_deferring_overflow_warnings;
960
  if (fold_deferring_overflow_warnings > 0)
961
    {
962
      if (fold_deferred_overflow_warning != NULL
963
          && code != 0
964
          && code < (int) fold_deferred_overflow_code)
965
        fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
966
      return;
967
    }
968
 
969
  warnmsg = fold_deferred_overflow_warning;
970
  fold_deferred_overflow_warning = NULL;
971
 
972
  if (!issue || warnmsg == NULL)
973
    return;
974
 
975
  if (gimple_no_warning_p (stmt))
976
    return;
977
 
978
  /* Use the smallest code level when deciding to issue the
979
     warning.  */
980
  if (code == 0 || code > (int) fold_deferred_overflow_code)
981
    code = fold_deferred_overflow_code;
982
 
983
  if (!issue_strict_overflow_warning (code))
984
    return;
985
 
986
  if (stmt == NULL)
987
    locus = input_location;
988
  else
989
    locus = gimple_location (stmt);
990
  warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
991
}
992
 
993
/* Stop deferring overflow warnings, ignoring any deferred
994
   warnings.  */
995
 
996
void
997
fold_undefer_and_ignore_overflow_warnings (void)
998
{
999
  fold_undefer_overflow_warnings (false, NULL, 0);
1000
}
1001
 
1002
/* Whether we are deferring overflow warnings.  */
1003
 
1004
bool
1005
fold_deferring_overflow_warnings_p (void)
1006
{
1007
  return fold_deferring_overflow_warnings > 0;
1008
}
1009
 
1010
/* This is called when we fold something based on the fact that signed
1011
   overflow is undefined.  */
1012
 
1013
static void
1014
fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1015
{
1016
  if (fold_deferring_overflow_warnings > 0)
1017
    {
1018
      if (fold_deferred_overflow_warning == NULL
1019
          || wc < fold_deferred_overflow_code)
1020
        {
1021
          fold_deferred_overflow_warning = gmsgid;
1022
          fold_deferred_overflow_code = wc;
1023
        }
1024
    }
1025
  else if (issue_strict_overflow_warning (wc))
1026
    warning (OPT_Wstrict_overflow, gmsgid);
1027
}
1028
 
1029
/* Return true if the built-in mathematical function specified by CODE
1030
   is odd, i.e. -f(x) == f(-x).  */
1031
 
1032
static bool
1033
negate_mathfn_p (enum built_in_function code)
1034
{
1035
  switch (code)
1036
    {
1037
    CASE_FLT_FN (BUILT_IN_ASIN):
1038
    CASE_FLT_FN (BUILT_IN_ASINH):
1039
    CASE_FLT_FN (BUILT_IN_ATAN):
1040
    CASE_FLT_FN (BUILT_IN_ATANH):
1041
    CASE_FLT_FN (BUILT_IN_CASIN):
1042
    CASE_FLT_FN (BUILT_IN_CASINH):
1043
    CASE_FLT_FN (BUILT_IN_CATAN):
1044
    CASE_FLT_FN (BUILT_IN_CATANH):
1045
    CASE_FLT_FN (BUILT_IN_CBRT):
1046
    CASE_FLT_FN (BUILT_IN_CPROJ):
1047
    CASE_FLT_FN (BUILT_IN_CSIN):
1048
    CASE_FLT_FN (BUILT_IN_CSINH):
1049
    CASE_FLT_FN (BUILT_IN_CTAN):
1050
    CASE_FLT_FN (BUILT_IN_CTANH):
1051
    CASE_FLT_FN (BUILT_IN_ERF):
1052
    CASE_FLT_FN (BUILT_IN_LLROUND):
1053
    CASE_FLT_FN (BUILT_IN_LROUND):
1054
    CASE_FLT_FN (BUILT_IN_ROUND):
1055
    CASE_FLT_FN (BUILT_IN_SIN):
1056
    CASE_FLT_FN (BUILT_IN_SINH):
1057
    CASE_FLT_FN (BUILT_IN_TAN):
1058
    CASE_FLT_FN (BUILT_IN_TANH):
1059
    CASE_FLT_FN (BUILT_IN_TRUNC):
1060
      return true;
1061
 
1062
    CASE_FLT_FN (BUILT_IN_LLRINT):
1063
    CASE_FLT_FN (BUILT_IN_LRINT):
1064
    CASE_FLT_FN (BUILT_IN_NEARBYINT):
1065
    CASE_FLT_FN (BUILT_IN_RINT):
1066
      return !flag_rounding_math;
1067
 
1068
    default:
1069
      break;
1070
    }
1071
  return false;
1072
}
1073
 
1074
/* Check whether we may negate an integer constant T without causing
1075
   overflow.  */
1076
 
1077
bool
1078
may_negate_without_overflow_p (const_tree t)
1079
{
1080
  unsigned HOST_WIDE_INT val;
1081
  unsigned int prec;
1082
  tree type;
1083
 
1084
  gcc_assert (TREE_CODE (t) == INTEGER_CST);
1085
 
1086
  type = TREE_TYPE (t);
1087
  if (TYPE_UNSIGNED (type))
1088
    return false;
1089
 
1090
  prec = TYPE_PRECISION (type);
1091
  if (prec > HOST_BITS_PER_WIDE_INT)
1092
    {
1093
      if (TREE_INT_CST_LOW (t) != 0)
1094
        return true;
1095
      prec -= HOST_BITS_PER_WIDE_INT;
1096
      val = TREE_INT_CST_HIGH (t);
1097
    }
1098
  else
1099
    val = TREE_INT_CST_LOW (t);
1100
  if (prec < HOST_BITS_PER_WIDE_INT)
1101
    val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1102
  return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1103
}
1104
 
1105
/* Determine whether an expression T can be cheaply negated using
1106
   the function negate_expr without introducing undefined overflow.  */
1107
 
1108
static bool
1109
negate_expr_p (tree t)
1110
{
1111
  tree type;
1112
 
1113
  if (t == 0)
1114
    return false;
1115
 
1116
  type = TREE_TYPE (t);
1117
 
1118
  STRIP_SIGN_NOPS (t);
1119
  switch (TREE_CODE (t))
1120
    {
1121
    case INTEGER_CST:
1122
      if (TYPE_OVERFLOW_WRAPS (type))
1123
        return true;
1124
 
1125
      /* Check that -CST will not overflow type.  */
1126
      return may_negate_without_overflow_p (t);
1127
    case BIT_NOT_EXPR:
1128
      return (INTEGRAL_TYPE_P (type)
1129
              && TYPE_OVERFLOW_WRAPS (type));
1130
 
1131
    case FIXED_CST:
1132
    case NEGATE_EXPR:
1133
      return true;
1134
 
1135
    case REAL_CST:
1136
      /* We want to canonicalize to positive real constants.  Pretend
1137
         that only negative ones can be easily negated.  */
1138
      return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
1139
 
1140
    case COMPLEX_CST:
1141
      return negate_expr_p (TREE_REALPART (t))
1142
             && negate_expr_p (TREE_IMAGPART (t));
1143
 
1144
    case COMPLEX_EXPR:
1145
      return negate_expr_p (TREE_OPERAND (t, 0))
1146
             && negate_expr_p (TREE_OPERAND (t, 1));
1147
 
1148
    case CONJ_EXPR:
1149
      return negate_expr_p (TREE_OPERAND (t, 0));
1150
 
1151
    case PLUS_EXPR:
1152
      if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1153
          || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1154
        return false;
1155
      /* -(A + B) -> (-B) - A.  */
1156
      if (negate_expr_p (TREE_OPERAND (t, 1))
1157
          && reorder_operands_p (TREE_OPERAND (t, 0),
1158
                                 TREE_OPERAND (t, 1)))
1159
        return true;
1160
      /* -(A + B) -> (-A) - B.  */
1161
      return negate_expr_p (TREE_OPERAND (t, 0));
1162
 
1163
    case MINUS_EXPR:
1164
      /* We can't turn -(A-B) into B-A when we honor signed zeros.  */
1165
      return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1166
             && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1167
             && reorder_operands_p (TREE_OPERAND (t, 0),
1168
                                    TREE_OPERAND (t, 1));
1169
 
1170
    case MULT_EXPR:
1171
      if (TYPE_UNSIGNED (TREE_TYPE (t)))
1172
        break;
1173
 
1174
      /* Fall through.  */
1175
 
1176
    case RDIV_EXPR:
1177
      if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1178
        return negate_expr_p (TREE_OPERAND (t, 1))
1179
               || negate_expr_p (TREE_OPERAND (t, 0));
1180
      break;
1181
 
1182
    case TRUNC_DIV_EXPR:
1183
    case ROUND_DIV_EXPR:
1184
    case FLOOR_DIV_EXPR:
1185
    case CEIL_DIV_EXPR:
1186
    case EXACT_DIV_EXPR:
1187
      /* In general we can't negate A / B, because if A is INT_MIN and
1188
         B is 1, we may turn this into INT_MIN / -1 which is undefined
1189
         and actually traps on some architectures.  But if overflow is
1190
         undefined, we can negate, because - (INT_MIN / 1) is an
1191
         overflow.  */
1192
      if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1193
          && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1194
        break;
1195
      return negate_expr_p (TREE_OPERAND (t, 1))
1196
             || negate_expr_p (TREE_OPERAND (t, 0));
1197
 
1198
    case NOP_EXPR:
1199
      /* Negate -((double)float) as (double)(-float).  */
1200
      if (TREE_CODE (type) == REAL_TYPE)
1201
        {
1202
          tree tem = strip_float_extensions (t);
1203
          if (tem != t)
1204
            return negate_expr_p (tem);
1205
        }
1206
      break;
1207
 
1208
    case CALL_EXPR:
1209
      /* Negate -f(x) as f(-x).  */
1210
      if (negate_mathfn_p (builtin_mathfn_code (t)))
1211
        return negate_expr_p (CALL_EXPR_ARG (t, 0));
1212
      break;
1213
 
1214
    case RSHIFT_EXPR:
1215
      /* Optimize -((int)x >> 31) into (unsigned)x >> 31.  */
1216
      if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1217
        {
1218
          tree op1 = TREE_OPERAND (t, 1);
1219
          if (TREE_INT_CST_HIGH (op1) == 0
1220
              && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1221
                 == TREE_INT_CST_LOW (op1))
1222
            return true;
1223
        }
1224
      break;
1225
 
1226
    default:
1227
      break;
1228
    }
1229
  return false;
1230
}
1231
 
1232
/* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1233
   simplification is possible.
1234
   If negate_expr_p would return true for T, NULL_TREE will never be
1235
   returned.  */
1236
 
1237
static tree
1238
fold_negate_expr (location_t loc, tree t)
1239
{
1240
  tree type = TREE_TYPE (t);
1241
  tree tem;
1242
 
1243
  switch (TREE_CODE (t))
1244
    {
1245
    /* Convert - (~A) to A + 1.  */
1246
    case BIT_NOT_EXPR:
1247
      if (INTEGRAL_TYPE_P (type))
1248
        return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
1249
                            build_int_cst (type, 1));
1250
      break;
1251
 
1252
    case INTEGER_CST:
1253
      tem = fold_negate_const (t, type);
1254
      if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1255
          || !TYPE_OVERFLOW_TRAPS (type))
1256
        return tem;
1257
      break;
1258
 
1259
    case REAL_CST:
1260
      tem = fold_negate_const (t, type);
1261
      /* Two's complement FP formats, such as c4x, may overflow.  */
1262
      if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1263
        return tem;
1264
      break;
1265
 
1266
    case FIXED_CST:
1267
      tem = fold_negate_const (t, type);
1268
      return tem;
1269
 
1270
    case COMPLEX_CST:
1271
      {
1272
        tree rpart = negate_expr (TREE_REALPART (t));
1273
        tree ipart = negate_expr (TREE_IMAGPART (t));
1274
 
1275
        if ((TREE_CODE (rpart) == REAL_CST
1276
             && TREE_CODE (ipart) == REAL_CST)
1277
            || (TREE_CODE (rpart) == INTEGER_CST
1278
                && TREE_CODE (ipart) == INTEGER_CST))
1279
          return build_complex (type, rpart, ipart);
1280
      }
1281
      break;
1282
 
1283
    case COMPLEX_EXPR:
1284
      if (negate_expr_p (t))
1285
        return fold_build2_loc (loc, COMPLEX_EXPR, type,
1286
                            fold_negate_expr (loc, TREE_OPERAND (t, 0)),
1287
                            fold_negate_expr (loc, TREE_OPERAND (t, 1)));
1288
      break;
1289
 
1290
    case CONJ_EXPR:
1291
      if (negate_expr_p (t))
1292
        return fold_build1_loc (loc, CONJ_EXPR, type,
1293
                            fold_negate_expr (loc, TREE_OPERAND (t, 0)));
1294
      break;
1295
 
1296
    case NEGATE_EXPR:
1297
      return TREE_OPERAND (t, 0);
1298
 
1299
    case PLUS_EXPR:
1300
      if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1301
          && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1302
        {
1303
          /* -(A + B) -> (-B) - A.  */
1304
          if (negate_expr_p (TREE_OPERAND (t, 1))
1305
              && reorder_operands_p (TREE_OPERAND (t, 0),
1306
                                     TREE_OPERAND (t, 1)))
1307
            {
1308
              tem = negate_expr (TREE_OPERAND (t, 1));
1309
              return fold_build2_loc (loc, MINUS_EXPR, type,
1310
                                  tem, TREE_OPERAND (t, 0));
1311
            }
1312
 
1313
          /* -(A + B) -> (-A) - B.  */
1314
          if (negate_expr_p (TREE_OPERAND (t, 0)))
1315
            {
1316
              tem = negate_expr (TREE_OPERAND (t, 0));
1317
              return fold_build2_loc (loc, MINUS_EXPR, type,
1318
                                  tem, TREE_OPERAND (t, 1));
1319
            }
1320
        }
1321
      break;
1322
 
1323
    case MINUS_EXPR:
1324
      /* - (A - B) -> B - A  */
1325
      if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1326
          && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1327
          && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1328
        return fold_build2_loc (loc, MINUS_EXPR, type,
1329
                            TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1330
      break;
1331
 
1332
    case MULT_EXPR:
1333
      if (TYPE_UNSIGNED (type))
1334
        break;
1335
 
1336
      /* Fall through.  */
1337
 
1338
    case RDIV_EXPR:
1339
      if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1340
        {
1341
          tem = TREE_OPERAND (t, 1);
1342
          if (negate_expr_p (tem))
1343
            return fold_build2_loc (loc, TREE_CODE (t), type,
1344
                                TREE_OPERAND (t, 0), negate_expr (tem));
1345
          tem = TREE_OPERAND (t, 0);
1346
          if (negate_expr_p (tem))
1347
            return fold_build2_loc (loc, TREE_CODE (t), type,
1348
                                negate_expr (tem), TREE_OPERAND (t, 1));
1349
        }
1350
      break;
1351
 
1352
    case TRUNC_DIV_EXPR:
1353
    case ROUND_DIV_EXPR:
1354
    case FLOOR_DIV_EXPR:
1355
    case CEIL_DIV_EXPR:
1356
    case EXACT_DIV_EXPR:
1357
      /* In general we can't negate A / B, because if A is INT_MIN and
1358
         B is 1, we may turn this into INT_MIN / -1 which is undefined
1359
         and actually traps on some architectures.  But if overflow is
1360
         undefined, we can negate, because - (INT_MIN / 1) is an
1361
         overflow.  */
1362
      if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1363
        {
1364
          const char * const warnmsg = G_("assuming signed overflow does not "
1365
                                          "occur when negating a division");
1366
          tem = TREE_OPERAND (t, 1);
1367
          if (negate_expr_p (tem))
1368
            {
1369
              if (INTEGRAL_TYPE_P (type)
1370
                  && (TREE_CODE (tem) != INTEGER_CST
1371
                      || integer_onep (tem)))
1372
                fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1373
              return fold_build2_loc (loc, TREE_CODE (t), type,
1374
                                  TREE_OPERAND (t, 0), negate_expr (tem));
1375
            }
1376
          tem = TREE_OPERAND (t, 0);
1377
          if (negate_expr_p (tem))
1378
            {
1379
              if (INTEGRAL_TYPE_P (type)
1380
                  && (TREE_CODE (tem) != INTEGER_CST
1381
                      || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1382
                fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1383
              return fold_build2_loc (loc, TREE_CODE (t), type,
1384
                                  negate_expr (tem), TREE_OPERAND (t, 1));
1385
            }
1386
        }
1387
      break;
1388
 
1389
    case NOP_EXPR:
1390
      /* Convert -((double)float) into (double)(-float).  */
1391
      if (TREE_CODE (type) == REAL_TYPE)
1392
        {
1393
          tem = strip_float_extensions (t);
1394
          if (tem != t && negate_expr_p (tem))
1395
            return fold_convert_loc (loc, type, negate_expr (tem));
1396
        }
1397
      break;
1398
 
1399
    case CALL_EXPR:
1400
      /* Negate -f(x) as f(-x).  */
1401
      if (negate_mathfn_p (builtin_mathfn_code (t))
1402
          && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1403
        {
1404
          tree fndecl, arg;
1405
 
1406
          fndecl = get_callee_fndecl (t);
1407
          arg = negate_expr (CALL_EXPR_ARG (t, 0));
1408
          return build_call_expr_loc (loc, fndecl, 1, arg);
1409
        }
1410
      break;
1411
 
1412
    case RSHIFT_EXPR:
1413
      /* Optimize -((int)x >> 31) into (unsigned)x >> 31.  */
1414
      if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1415
        {
1416
          tree op1 = TREE_OPERAND (t, 1);
1417
          if (TREE_INT_CST_HIGH (op1) == 0
1418
              && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1419
                 == TREE_INT_CST_LOW (op1))
1420
            {
1421
              tree ntype = TYPE_UNSIGNED (type)
1422
                           ? signed_type_for (type)
1423
                           : unsigned_type_for (type);
1424
              tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
1425
              temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
1426
              return fold_convert_loc (loc, type, temp);
1427
            }
1428
        }
1429
      break;
1430
 
1431
    default:
1432
      break;
1433
    }
1434
 
1435
  return NULL_TREE;
1436
}
1437
 
1438
/* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1439
   negated in a simpler way.  Also allow for T to be NULL_TREE, in which case
1440
   return NULL_TREE. */
1441
 
1442
static tree
1443
negate_expr (tree t)
1444
{
1445
  tree type, tem;
1446
  location_t loc;
1447
 
1448
  if (t == NULL_TREE)
1449
    return NULL_TREE;
1450
 
1451
  loc = EXPR_LOCATION (t);
1452
  type = TREE_TYPE (t);
1453
  STRIP_SIGN_NOPS (t);
1454
 
1455
  tem = fold_negate_expr (loc, t);
1456
  if (!tem)
1457
    {
1458
      tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1459
      SET_EXPR_LOCATION (tem, loc);
1460
    }
1461
  return fold_convert_loc (loc, type, tem);
1462
}
1463
 
1464
/* Split a tree IN into a constant, literal and variable parts that could be
1465
   combined with CODE to make IN.  "constant" means an expression with
1466
   TREE_CONSTANT but that isn't an actual constant.  CODE must be a
1467
   commutative arithmetic operation.  Store the constant part into *CONP,
1468
   the literal in *LITP and return the variable part.  If a part isn't
1469
   present, set it to null.  If the tree does not decompose in this way,
1470
   return the entire tree as the variable part and the other parts as null.
1471
 
1472
   If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.  In that
1473
   case, we negate an operand that was subtracted.  Except if it is a
1474
   literal for which we use *MINUS_LITP instead.
1475
 
1476
   If NEGATE_P is true, we are negating all of IN, again except a literal
1477
   for which we use *MINUS_LITP instead.
1478
 
1479
   If IN is itself a literal or constant, return it as appropriate.
1480
 
1481
   Note that we do not guarantee that any of the three values will be the
1482
   same type as IN, but they will have the same signedness and mode.  */
1483
 
1484
static tree
1485
split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1486
            tree *minus_litp, int negate_p)
1487
{
1488
  tree var = 0;
1489
 
1490
  *conp = 0;
1491
  *litp = 0;
1492
  *minus_litp = 0;
1493
 
1494
  /* Strip any conversions that don't change the machine mode or signedness.  */
1495
  STRIP_SIGN_NOPS (in);
1496
 
1497
  if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1498
      || TREE_CODE (in) == FIXED_CST)
1499
    *litp = in;
1500
  else if (TREE_CODE (in) == code
1501
           || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
1502
               && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1503
               /* We can associate addition and subtraction together (even
1504
                  though the C standard doesn't say so) for integers because
1505
                  the value is not affected.  For reals, the value might be
1506
                  affected, so we can't.  */
1507
               && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1508
                   || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1509
    {
1510
      tree op0 = TREE_OPERAND (in, 0);
1511
      tree op1 = TREE_OPERAND (in, 1);
1512
      int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1513
      int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1514
 
1515
      /* First see if either of the operands is a literal, then a constant.  */
1516
      if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1517
          || TREE_CODE (op0) == FIXED_CST)
1518
        *litp = op0, op0 = 0;
1519
      else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1520
               || TREE_CODE (op1) == FIXED_CST)
1521
        *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1522
 
1523
      if (op0 != 0 && TREE_CONSTANT (op0))
1524
        *conp = op0, op0 = 0;
1525
      else if (op1 != 0 && TREE_CONSTANT (op1))
1526
        *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1527
 
1528
      /* If we haven't dealt with either operand, this is not a case we can
1529
         decompose.  Otherwise, VAR is either of the ones remaining, if any.  */
1530
      if (op0 != 0 && op1 != 0)
1531
        var = in;
1532
      else if (op0 != 0)
1533
        var = op0;
1534
      else
1535
        var = op1, neg_var_p = neg1_p;
1536
 
1537
      /* Now do any needed negations.  */
1538
      if (neg_litp_p)
1539
        *minus_litp = *litp, *litp = 0;
1540
      if (neg_conp_p)
1541
        *conp = negate_expr (*conp);
1542
      if (neg_var_p)
1543
        var = negate_expr (var);
1544
    }
1545
  else if (TREE_CONSTANT (in))
1546
    *conp = in;
1547
  else
1548
    var = in;
1549
 
1550
  if (negate_p)
1551
    {
1552
      if (*litp)
1553
        *minus_litp = *litp, *litp = 0;
1554
      else if (*minus_litp)
1555
        *litp = *minus_litp, *minus_litp = 0;
1556
      *conp = negate_expr (*conp);
1557
      var = negate_expr (var);
1558
    }
1559
 
1560
  return var;
1561
}
1562
 
1563
/* Re-associate trees split by the above function.  T1 and T2 are
1564
   either expressions to associate or null.  Return the new
1565
   expression, if any.  LOC is the location of the new expression.  If
1566
   we build an operation, do it in TYPE and with CODE.  */
1567
 
1568
static tree
1569
associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
1570
{
1571
  tree tem;
1572
 
1573
  if (t1 == 0)
1574
    return t2;
1575
  else if (t2 == 0)
1576
    return t1;
1577
 
1578
  /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1579
     try to fold this since we will have infinite recursion.  But do
1580
     deal with any NEGATE_EXPRs.  */
1581
  if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1582
      || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1583
    {
1584
      if (code == PLUS_EXPR)
1585
        {
1586
          if (TREE_CODE (t1) == NEGATE_EXPR)
1587
            tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t2),
1588
                          fold_convert_loc (loc, type, TREE_OPERAND (t1, 0)));
1589
          else if (TREE_CODE (t2) == NEGATE_EXPR)
1590
            tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t1),
1591
                          fold_convert_loc (loc, type, TREE_OPERAND (t2, 0)));
1592
          else if (integer_zerop (t2))
1593
            return fold_convert_loc (loc, type, t1);
1594
        }
1595
      else if (code == MINUS_EXPR)
1596
        {
1597
          if (integer_zerop (t2))
1598
            return fold_convert_loc (loc, type, t1);
1599
        }
1600
 
1601
      tem = build2 (code, type, fold_convert_loc (loc, type, t1),
1602
                    fold_convert_loc (loc, type, t2));
1603
      goto associate_trees_exit;
1604
    }
1605
 
1606
  return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
1607
                      fold_convert_loc (loc, type, t2));
1608
 associate_trees_exit:
1609
  protected_set_expr_location (tem, loc);
1610
  return tem;
1611
}
1612
 
1613
/* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1614
   for use in int_const_binop, size_binop and size_diffop.  */
1615
 
1616
static bool
1617
int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1618
{
1619
  if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1620
    return false;
1621
  if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1622
    return false;
1623
 
1624
  switch (code)
1625
    {
1626
    case LSHIFT_EXPR:
1627
    case RSHIFT_EXPR:
1628
    case LROTATE_EXPR:
1629
    case RROTATE_EXPR:
1630
      return true;
1631
 
1632
    default:
1633
      break;
1634
    }
1635
 
1636
  return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1637
         && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1638
         && TYPE_MODE (type1) == TYPE_MODE (type2);
1639
}
1640
 
1641
 
1642
/* Combine two integer constants ARG1 and ARG2 under operation CODE
1643
   to produce a new constant.  Return NULL_TREE if we don't know how
1644
   to evaluate CODE at compile-time.
1645
 
1646
   If NOTRUNC is nonzero, do not truncate the result to fit the data type.  */
1647
 
1648
tree
1649
int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1650
{
1651
  unsigned HOST_WIDE_INT int1l, int2l;
1652
  HOST_WIDE_INT int1h, int2h;
1653
  unsigned HOST_WIDE_INT low;
1654
  HOST_WIDE_INT hi;
1655
  unsigned HOST_WIDE_INT garbagel;
1656
  HOST_WIDE_INT garbageh;
1657
  tree t;
1658
  tree type = TREE_TYPE (arg1);
1659
  int uns = TYPE_UNSIGNED (type);
1660
  int is_sizetype
1661
    = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1662
  int overflow = 0;
1663
 
1664
  int1l = TREE_INT_CST_LOW (arg1);
1665
  int1h = TREE_INT_CST_HIGH (arg1);
1666
  int2l = TREE_INT_CST_LOW (arg2);
1667
  int2h = TREE_INT_CST_HIGH (arg2);
1668
 
1669
  switch (code)
1670
    {
1671
    case BIT_IOR_EXPR:
1672
      low = int1l | int2l, hi = int1h | int2h;
1673
      break;
1674
 
1675
    case BIT_XOR_EXPR:
1676
      low = int1l ^ int2l, hi = int1h ^ int2h;
1677
      break;
1678
 
1679
    case BIT_AND_EXPR:
1680
      low = int1l & int2l, hi = int1h & int2h;
1681
      break;
1682
 
1683
    case RSHIFT_EXPR:
1684
      int2l = -int2l;
1685
    case LSHIFT_EXPR:
1686
      /* It's unclear from the C standard whether shifts can overflow.
1687
         The following code ignores overflow; perhaps a C standard
1688
         interpretation ruling is needed.  */
1689
      lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1690
                     &low, &hi, !uns);
1691
      break;
1692
 
1693
    case RROTATE_EXPR:
1694
      int2l = - int2l;
1695
    case LROTATE_EXPR:
1696
      lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1697
                      &low, &hi);
1698
      break;
1699
 
1700
    case PLUS_EXPR:
1701
      overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1702
      break;
1703
 
1704
    case MINUS_EXPR:
1705
      neg_double (int2l, int2h, &low, &hi);
1706
      add_double (int1l, int1h, low, hi, &low, &hi);
1707
      overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1708
      break;
1709
 
1710
    case MULT_EXPR:
1711
      overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1712
      break;
1713
 
1714
    case TRUNC_DIV_EXPR:
1715
    case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1716
    case EXACT_DIV_EXPR:
1717
      /* This is a shortcut for a common special case.  */
1718
      if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1719
          && !TREE_OVERFLOW (arg1)
1720
          && !TREE_OVERFLOW (arg2)
1721
          && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1722
        {
1723
          if (code == CEIL_DIV_EXPR)
1724
            int1l += int2l - 1;
1725
 
1726
          low = int1l / int2l, hi = 0;
1727
          break;
1728
        }
1729
 
1730
      /* ... fall through ...  */
1731
 
1732
    case ROUND_DIV_EXPR:
1733
      if (int2h == 0 && int2l == 0)
1734
        return NULL_TREE;
1735
      if (int2h == 0 && int2l == 1)
1736
        {
1737
          low = int1l, hi = int1h;
1738
          break;
1739
        }
1740
      if (int1l == int2l && int1h == int2h
1741
          && ! (int1l == 0 && int1h == 0))
1742
        {
1743
          low = 1, hi = 0;
1744
          break;
1745
        }
1746
      overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1747
                                       &low, &hi, &garbagel, &garbageh);
1748
      break;
1749
 
1750
    case TRUNC_MOD_EXPR:
1751
    case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1752
      /* This is a shortcut for a common special case.  */
1753
      if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1754
          && !TREE_OVERFLOW (arg1)
1755
          && !TREE_OVERFLOW (arg2)
1756
          && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1757
        {
1758
          if (code == CEIL_MOD_EXPR)
1759
            int1l += int2l - 1;
1760
          low = int1l % int2l, hi = 0;
1761
          break;
1762
        }
1763
 
1764
      /* ... fall through ...  */
1765
 
1766
    case ROUND_MOD_EXPR:
1767
      if (int2h == 0 && int2l == 0)
1768
        return NULL_TREE;
1769
      overflow = div_and_round_double (code, uns,
1770
                                       int1l, int1h, int2l, int2h,
1771
                                       &garbagel, &garbageh, &low, &hi);
1772
      break;
1773
 
1774
    case MIN_EXPR:
1775
    case MAX_EXPR:
1776
      if (uns)
1777
        low = (((unsigned HOST_WIDE_INT) int1h
1778
                < (unsigned HOST_WIDE_INT) int2h)
1779
               || (((unsigned HOST_WIDE_INT) int1h
1780
                    == (unsigned HOST_WIDE_INT) int2h)
1781
                   && int1l < int2l));
1782
      else
1783
        low = (int1h < int2h
1784
               || (int1h == int2h && int1l < int2l));
1785
 
1786
      if (low == (code == MIN_EXPR))
1787
        low = int1l, hi = int1h;
1788
      else
1789
        low = int2l, hi = int2h;
1790
      break;
1791
 
1792
    default:
1793
      return NULL_TREE;
1794
    }
1795
 
1796
  if (notrunc)
1797
    {
1798
      t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1799
 
1800
      /* Propagate overflow flags ourselves.  */
1801
      if (((!uns || is_sizetype) && overflow)
1802
          | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1803
        {
1804
          t = copy_node (t);
1805
          TREE_OVERFLOW (t) = 1;
1806
        }
1807
    }
1808
  else
1809
    t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1810
                               ((!uns || is_sizetype) && overflow)
1811
                               | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1812
 
1813
  return t;
1814
}
1815
 
1816
/* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1817
   constant.  We assume ARG1 and ARG2 have the same data type, or at least
1818
   are the same kind of constant and the same machine mode.  Return zero if
1819
   combining the constants is not allowed in the current operating mode.
1820
 
1821
   If NOTRUNC is nonzero, do not truncate the result to fit the data type.  */
1822
 
1823
static tree
1824
const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1825
{
1826
  /* Sanity check for the recursive cases.  */
1827
  if (!arg1 || !arg2)
1828
    return NULL_TREE;
1829
 
1830
  STRIP_NOPS (arg1);
1831
  STRIP_NOPS (arg2);
1832
 
1833
  if (TREE_CODE (arg1) == INTEGER_CST)
1834
    return int_const_binop (code, arg1, arg2, notrunc);
1835
 
1836
  if (TREE_CODE (arg1) == REAL_CST)
1837
    {
1838
      enum machine_mode mode;
1839
      REAL_VALUE_TYPE d1;
1840
      REAL_VALUE_TYPE d2;
1841
      REAL_VALUE_TYPE value;
1842
      REAL_VALUE_TYPE result;
1843
      bool inexact;
1844
      tree t, type;
1845
 
1846
      /* The following codes are handled by real_arithmetic.  */
1847
      switch (code)
1848
        {
1849
        case PLUS_EXPR:
1850
        case MINUS_EXPR:
1851
        case MULT_EXPR:
1852
        case RDIV_EXPR:
1853
        case MIN_EXPR:
1854
        case MAX_EXPR:
1855
          break;
1856
 
1857
        default:
1858
          return NULL_TREE;
1859
        }
1860
 
1861
      d1 = TREE_REAL_CST (arg1);
1862
      d2 = TREE_REAL_CST (arg2);
1863
 
1864
      type = TREE_TYPE (arg1);
1865
      mode = TYPE_MODE (type);
1866
 
1867
      /* Don't perform operation if we honor signaling NaNs and
1868
         either operand is a NaN.  */
1869
      if (HONOR_SNANS (mode)
1870
          && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1871
        return NULL_TREE;
1872
 
1873
      /* Don't perform operation if it would raise a division
1874
         by zero exception.  */
1875
      if (code == RDIV_EXPR
1876
          && REAL_VALUES_EQUAL (d2, dconst0)
1877
          && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1878
        return NULL_TREE;
1879
 
1880
      /* If either operand is a NaN, just return it.  Otherwise, set up
1881
         for floating-point trap; we return an overflow.  */
1882
      if (REAL_VALUE_ISNAN (d1))
1883
        return arg1;
1884
      else if (REAL_VALUE_ISNAN (d2))
1885
        return arg2;
1886
 
1887
      inexact = real_arithmetic (&value, code, &d1, &d2);
1888
      real_convert (&result, mode, &value);
1889
 
1890
      /* Don't constant fold this floating point operation if
1891
         the result has overflowed and flag_trapping_math.  */
1892
      if (flag_trapping_math
1893
          && MODE_HAS_INFINITIES (mode)
1894
          && REAL_VALUE_ISINF (result)
1895
          && !REAL_VALUE_ISINF (d1)
1896
          && !REAL_VALUE_ISINF (d2))
1897
        return NULL_TREE;
1898
 
1899
      /* Don't constant fold this floating point operation if the
1900
         result may dependent upon the run-time rounding mode and
1901
         flag_rounding_math is set, or if GCC's software emulation
1902
         is unable to accurately represent the result.  */
1903
      if ((flag_rounding_math
1904
           || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1905
          && (inexact || !real_identical (&result, &value)))
1906
        return NULL_TREE;
1907
 
1908
      t = build_real (type, result);
1909
 
1910
      TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1911
      return t;
1912
    }
1913
 
1914
  if (TREE_CODE (arg1) == FIXED_CST)
1915
    {
1916
      FIXED_VALUE_TYPE f1;
1917
      FIXED_VALUE_TYPE f2;
1918
      FIXED_VALUE_TYPE result;
1919
      tree t, type;
1920
      int sat_p;
1921
      bool overflow_p;
1922
 
1923
      /* The following codes are handled by fixed_arithmetic.  */
1924
      switch (code)
1925
        {
1926
        case PLUS_EXPR:
1927
        case MINUS_EXPR:
1928
        case MULT_EXPR:
1929
        case TRUNC_DIV_EXPR:
1930
          f2 = TREE_FIXED_CST (arg2);
1931
          break;
1932
 
1933
        case LSHIFT_EXPR:
1934
        case RSHIFT_EXPR:
1935
          f2.data.high = TREE_INT_CST_HIGH (arg2);
1936
          f2.data.low = TREE_INT_CST_LOW (arg2);
1937
          f2.mode = SImode;
1938
          break;
1939
 
1940
        default:
1941
          return NULL_TREE;
1942
        }
1943
 
1944
      f1 = TREE_FIXED_CST (arg1);
1945
      type = TREE_TYPE (arg1);
1946
      sat_p = TYPE_SATURATING (type);
1947
      overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1948
      t = build_fixed (type, result);
1949
      /* Propagate overflow flags.  */
1950
      if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1951
        TREE_OVERFLOW (t) = 1;
1952
      return t;
1953
    }
1954
 
1955
  if (TREE_CODE (arg1) == COMPLEX_CST)
1956
    {
1957
      tree type = TREE_TYPE (arg1);
1958
      tree r1 = TREE_REALPART (arg1);
1959
      tree i1 = TREE_IMAGPART (arg1);
1960
      tree r2 = TREE_REALPART (arg2);
1961
      tree i2 = TREE_IMAGPART (arg2);
1962
      tree real, imag;
1963
 
1964
      switch (code)
1965
        {
1966
        case PLUS_EXPR:
1967
        case MINUS_EXPR:
1968
          real = const_binop (code, r1, r2, notrunc);
1969
          imag = const_binop (code, i1, i2, notrunc);
1970
          break;
1971
 
1972
        case MULT_EXPR:
1973
          if (COMPLEX_FLOAT_TYPE_P (type))
1974
            return do_mpc_arg2 (arg1, arg2, type,
1975
                                /* do_nonfinite= */ folding_initializer,
1976
                                mpc_mul);
1977
 
1978
          real = const_binop (MINUS_EXPR,
1979
                              const_binop (MULT_EXPR, r1, r2, notrunc),
1980
                              const_binop (MULT_EXPR, i1, i2, notrunc),
1981
                              notrunc);
1982
          imag = const_binop (PLUS_EXPR,
1983
                              const_binop (MULT_EXPR, r1, i2, notrunc),
1984
                              const_binop (MULT_EXPR, i1, r2, notrunc),
1985
                              notrunc);
1986
          break;
1987
 
1988
        case RDIV_EXPR:
1989
          if (COMPLEX_FLOAT_TYPE_P (type))
1990
            return do_mpc_arg2 (arg1, arg2, type,
1991
                                /* do_nonfinite= */ folding_initializer,
1992
                                mpc_div);
1993
          /* Fallthru ... */
1994
        case TRUNC_DIV_EXPR:
1995
        case CEIL_DIV_EXPR:
1996
        case FLOOR_DIV_EXPR:
1997
        case ROUND_DIV_EXPR:
1998
          if (flag_complex_method == 0)
1999
          {
2000
            /* Keep this algorithm in sync with
2001
               tree-complex.c:expand_complex_div_straight().
2002
 
2003
               Expand complex division to scalars, straightforward algorithm.
2004
               a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
2005
               t = br*br + bi*bi
2006
            */
2007
            tree magsquared
2008
              = const_binop (PLUS_EXPR,
2009
                             const_binop (MULT_EXPR, r2, r2, notrunc),
2010
                             const_binop (MULT_EXPR, i2, i2, notrunc),
2011
                             notrunc);
2012
            tree t1
2013
              = const_binop (PLUS_EXPR,
2014
                             const_binop (MULT_EXPR, r1, r2, notrunc),
2015
                             const_binop (MULT_EXPR, i1, i2, notrunc),
2016
                             notrunc);
2017
            tree t2
2018
              = const_binop (MINUS_EXPR,
2019
                             const_binop (MULT_EXPR, i1, r2, notrunc),
2020
                             const_binop (MULT_EXPR, r1, i2, notrunc),
2021
                             notrunc);
2022
 
2023
            real = const_binop (code, t1, magsquared, notrunc);
2024
            imag = const_binop (code, t2, magsquared, notrunc);
2025
          }
2026
          else
2027
          {
2028
            /* Keep this algorithm in sync with
2029
               tree-complex.c:expand_complex_div_wide().
2030
 
2031
               Expand complex division to scalars, modified algorithm to minimize
2032
               overflow with wide input ranges.  */
2033
            tree compare = fold_build2 (LT_EXPR, boolean_type_node,
2034
                                        fold_abs_const (r2, TREE_TYPE (type)),
2035
                                        fold_abs_const (i2, TREE_TYPE (type)));
2036
 
2037
            if (integer_nonzerop (compare))
2038
              {
2039
                /* In the TRUE branch, we compute
2040
                   ratio = br/bi;
2041
                   div = (br * ratio) + bi;
2042
                   tr = (ar * ratio) + ai;
2043
                   ti = (ai * ratio) - ar;
2044
                   tr = tr / div;
2045
                   ti = ti / div;  */
2046
                tree ratio = const_binop (code, r2, i2, notrunc);
2047
                tree div = const_binop (PLUS_EXPR, i2,
2048
                                        const_binop (MULT_EXPR, r2, ratio,
2049
                                                     notrunc),
2050
                                        notrunc);
2051
                real = const_binop (MULT_EXPR, r1, ratio, notrunc);
2052
                real = const_binop (PLUS_EXPR, real, i1, notrunc);
2053
                real = const_binop (code, real, div, notrunc);
2054
 
2055
                imag = const_binop (MULT_EXPR, i1, ratio, notrunc);
2056
                imag = const_binop (MINUS_EXPR, imag, r1, notrunc);
2057
                imag = const_binop (code, imag, div, notrunc);
2058
              }
2059
            else
2060
              {
2061
                /* In the FALSE branch, we compute
2062
                   ratio = d/c;
2063
                   divisor = (d * ratio) + c;
2064
                   tr = (b * ratio) + a;
2065
                   ti = b - (a * ratio);
2066
                   tr = tr / div;
2067
                   ti = ti / div;  */
2068
                tree ratio = const_binop (code, i2, r2, notrunc);
2069
                tree div = const_binop (PLUS_EXPR, r2,
2070
                                        const_binop (MULT_EXPR, i2, ratio,
2071
                                                     notrunc),
2072
                                        notrunc);
2073
 
2074
                real = const_binop (MULT_EXPR, i1, ratio, notrunc);
2075
                real = const_binop (PLUS_EXPR, real, r1, notrunc);
2076
                real = const_binop (code, real, div, notrunc);
2077
 
2078
                imag = const_binop (MULT_EXPR, r1, ratio, notrunc);
2079
                imag = const_binop (MINUS_EXPR, i1, imag, notrunc);
2080
                imag = const_binop (code, imag, div, notrunc);
2081
              }
2082
          }
2083
          break;
2084
 
2085
        default:
2086
          return NULL_TREE;
2087
        }
2088
 
2089
      if (real && imag)
2090
        return build_complex (type, real, imag);
2091
    }
2092
 
2093
  if (TREE_CODE (arg1) == VECTOR_CST)
2094
    {
2095
      tree type = TREE_TYPE(arg1);
2096
      int count = TYPE_VECTOR_SUBPARTS (type), i;
2097
      tree elements1, elements2, list = NULL_TREE;
2098
 
2099
      if(TREE_CODE(arg2) != VECTOR_CST)
2100
        return NULL_TREE;
2101
 
2102
      elements1 = TREE_VECTOR_CST_ELTS (arg1);
2103
      elements2 = TREE_VECTOR_CST_ELTS (arg2);
2104
 
2105
      for (i = 0; i < count; i++)
2106
        {
2107
          tree elem1, elem2, elem;
2108
 
2109
          /* The trailing elements can be empty and should be treated as 0 */
2110
          if(!elements1)
2111
            elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2112
          else
2113
            {
2114
              elem1 = TREE_VALUE(elements1);
2115
              elements1 = TREE_CHAIN (elements1);
2116
            }
2117
 
2118
          if(!elements2)
2119
            elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2120
          else
2121
            {
2122
              elem2 = TREE_VALUE(elements2);
2123
              elements2 = TREE_CHAIN (elements2);
2124
            }
2125
 
2126
          elem = const_binop (code, elem1, elem2, notrunc);
2127
 
2128
          /* It is possible that const_binop cannot handle the given
2129
            code and return NULL_TREE */
2130
          if(elem == NULL_TREE)
2131
            return NULL_TREE;
2132
 
2133
          list = tree_cons (NULL_TREE, elem, list);
2134
        }
2135
      return build_vector(type, nreverse(list));
2136
    }
2137
  return NULL_TREE;
2138
}
2139
 
2140
/* Create a size type INT_CST node with NUMBER sign extended.  KIND
2141
   indicates which particular sizetype to create.  */
2142
 
2143
tree
2144
size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2145
{
2146
  return build_int_cst (sizetype_tab[(int) kind], number);
2147
}
2148
 
2149
/* Combine operands OP1 and OP2 with arithmetic operation CODE.  CODE
2150
   is a tree code.  The type of the result is taken from the operands.
2151
   Both must be equivalent integer types, ala int_binop_types_match_p.
2152
   If the operands are constant, so is the result.  */
2153
 
2154
tree
2155
size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
2156
{
2157
  tree type = TREE_TYPE (arg0);
2158
 
2159
  if (arg0 == error_mark_node || arg1 == error_mark_node)
2160
    return error_mark_node;
2161
 
2162
  gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2163
                                       TREE_TYPE (arg1)));
2164
 
2165
  /* Handle the special case of two integer constants faster.  */
2166
  if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2167
    {
2168
      /* And some specific cases even faster than that.  */
2169
      if (code == PLUS_EXPR)
2170
        {
2171
          if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2172
            return arg1;
2173
          if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2174
            return arg0;
2175
        }
2176
      else if (code == MINUS_EXPR)
2177
        {
2178
          if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2179
            return arg0;
2180
        }
2181
      else if (code == MULT_EXPR)
2182
        {
2183
          if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2184
            return arg1;
2185
        }
2186
 
2187
      /* Handle general case of two integer constants.  */
2188
      return int_const_binop (code, arg0, arg1, 0);
2189
    }
2190
 
2191
  return fold_build2_loc (loc, code, type, arg0, arg1);
2192
}
2193
 
2194
/* Given two values, either both of sizetype or both of bitsizetype,
2195
   compute the difference between the two values.  Return the value
2196
   in signed type corresponding to the type of the operands.  */
2197
 
2198
tree
2199
size_diffop_loc (location_t loc, tree arg0, tree arg1)
2200
{
2201
  tree type = TREE_TYPE (arg0);
2202
  tree ctype;
2203
 
2204
  gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2205
                                       TREE_TYPE (arg1)));
2206
 
2207
  /* If the type is already signed, just do the simple thing.  */
2208
  if (!TYPE_UNSIGNED (type))
2209
    return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
2210
 
2211
  if (type == sizetype)
2212
    ctype = ssizetype;
2213
  else if (type == bitsizetype)
2214
    ctype = sbitsizetype;
2215
  else
2216
    ctype = signed_type_for (type);
2217
 
2218
  /* If either operand is not a constant, do the conversions to the signed
2219
     type and subtract.  The hardware will do the right thing with any
2220
     overflow in the subtraction.  */
2221
  if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2222
    return size_binop_loc (loc, MINUS_EXPR,
2223
                           fold_convert_loc (loc, ctype, arg0),
2224
                           fold_convert_loc (loc, ctype, arg1));
2225
 
2226
  /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2227
     Otherwise, subtract the other way, convert to CTYPE (we know that can't
2228
     overflow) and negate (which can't either).  Special-case a result
2229
     of zero while we're here.  */
2230
  if (tree_int_cst_equal (arg0, arg1))
2231
    return build_int_cst (ctype, 0);
2232
  else if (tree_int_cst_lt (arg1, arg0))
2233
    return fold_convert_loc (loc, ctype,
2234
                             size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
2235
  else
2236
    return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
2237
                           fold_convert_loc (loc, ctype,
2238
                                             size_binop_loc (loc,
2239
                                                             MINUS_EXPR,
2240
                                                             arg1, arg0)));
2241
}
2242
 
2243
/* A subroutine of fold_convert_const handling conversions of an
2244
   INTEGER_CST to another integer type.  */
2245
 
2246
static tree
2247
fold_convert_const_int_from_int (tree type, const_tree arg1)
2248
{
2249
  tree t;
2250
 
2251
  /* Given an integer constant, make new constant with new type,
2252
     appropriately sign-extended or truncated.  */
2253
  t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2254
                             TREE_INT_CST_HIGH (arg1),
2255
                             /* Don't set the overflow when
2256
                                converting from a pointer,  */
2257
                             !POINTER_TYPE_P (TREE_TYPE (arg1))
2258
                             /* or to a sizetype with same signedness
2259
                                and the precision is unchanged.
2260
                                ???  sizetype is always sign-extended,
2261
                                but its signedness depends on the
2262
                                frontend.  Thus we see spurious overflows
2263
                                here if we do not check this.  */
2264
                             && !((TYPE_PRECISION (TREE_TYPE (arg1))
2265
                                   == TYPE_PRECISION (type))
2266
                                  && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2267
                                      == TYPE_UNSIGNED (type))
2268
                                  && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2269
                                       && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2270
                                      || (TREE_CODE (type) == INTEGER_TYPE
2271
                                          && TYPE_IS_SIZETYPE (type)))),
2272
                             (TREE_INT_CST_HIGH (arg1) < 0
2273
                              && (TYPE_UNSIGNED (type)
2274
                                  < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2275
                             | TREE_OVERFLOW (arg1));
2276
 
2277
  return t;
2278
}
2279
 
2280
/* A subroutine of fold_convert_const handling conversions a REAL_CST
2281
   to an integer type.  */
2282
 
2283
static tree
2284
fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2285
{
2286
  int overflow = 0;
2287
  tree t;
2288
 
2289
  /* The following code implements the floating point to integer
2290
     conversion rules required by the Java Language Specification,
2291
     that IEEE NaNs are mapped to zero and values that overflow
2292
     the target precision saturate, i.e. values greater than
2293
     INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2294
     are mapped to INT_MIN.  These semantics are allowed by the
2295
     C and C++ standards that simply state that the behavior of
2296
     FP-to-integer conversion is unspecified upon overflow.  */
2297
 
2298
  HOST_WIDE_INT high, low;
2299
  REAL_VALUE_TYPE r;
2300
  REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2301
 
2302
  switch (code)
2303
    {
2304
    case FIX_TRUNC_EXPR:
2305
      real_trunc (&r, VOIDmode, &x);
2306
      break;
2307
 
2308
    default:
2309
      gcc_unreachable ();
2310
    }
2311
 
2312
  /* If R is NaN, return zero and show we have an overflow.  */
2313
  if (REAL_VALUE_ISNAN (r))
2314
    {
2315
      overflow = 1;
2316
      high = 0;
2317
      low = 0;
2318
    }
2319
 
2320
  /* See if R is less than the lower bound or greater than the
2321
     upper bound.  */
2322
 
2323
  if (! overflow)
2324
    {
2325
      tree lt = TYPE_MIN_VALUE (type);
2326
      REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2327
      if (REAL_VALUES_LESS (r, l))
2328
        {
2329
          overflow = 1;
2330
          high = TREE_INT_CST_HIGH (lt);
2331
          low = TREE_INT_CST_LOW (lt);
2332
        }
2333
    }
2334
 
2335
  if (! overflow)
2336
    {
2337
      tree ut = TYPE_MAX_VALUE (type);
2338
      if (ut)
2339
        {
2340
          REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2341
          if (REAL_VALUES_LESS (u, r))
2342
            {
2343
              overflow = 1;
2344
              high = TREE_INT_CST_HIGH (ut);
2345
              low = TREE_INT_CST_LOW (ut);
2346
            }
2347
        }
2348
    }
2349
 
2350
  if (! overflow)
2351
    REAL_VALUE_TO_INT (&low, &high, r);
2352
 
2353
  t = force_fit_type_double (type, low, high, -1,
2354
                             overflow | TREE_OVERFLOW (arg1));
2355
  return t;
2356
}
2357
 
2358
/* A subroutine of fold_convert_const handling conversions of a
2359
   FIXED_CST to an integer type.  */
2360
 
2361
static tree
2362
fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2363
{
2364
  tree t;
2365
  double_int temp, temp_trunc;
2366
  unsigned int mode;
2367
 
2368
  /* Right shift FIXED_CST to temp by fbit.  */
2369
  temp = TREE_FIXED_CST (arg1).data;
2370
  mode = TREE_FIXED_CST (arg1).mode;
2371
  if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2372
    {
2373
      lshift_double (temp.low, temp.high,
2374
                     - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2375
                     &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2376
 
2377
      /* Left shift temp to temp_trunc by fbit.  */
2378
      lshift_double (temp.low, temp.high,
2379
                     GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2380
                     &temp_trunc.low, &temp_trunc.high,
2381
                     SIGNED_FIXED_POINT_MODE_P (mode));
2382
    }
2383
  else
2384
    {
2385
      temp.low = 0;
2386
      temp.high = 0;
2387
      temp_trunc.low = 0;
2388
      temp_trunc.high = 0;
2389
    }
2390
 
2391
  /* If FIXED_CST is negative, we need to round the value toward 0.
2392
     By checking if the fractional bits are not zero to add 1 to temp.  */
2393
  if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2394
      && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2395
    {
2396
      double_int one;
2397
      one.low = 1;
2398
      one.high = 0;
2399
      temp = double_int_add (temp, one);
2400
    }
2401
 
2402
  /* Given a fixed-point constant, make new constant with new type,
2403
     appropriately sign-extended or truncated.  */
2404
  t = force_fit_type_double (type, temp.low, temp.high, -1,
2405
                             (temp.high < 0
2406
                              && (TYPE_UNSIGNED (type)
2407
                                  < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2408
                             | TREE_OVERFLOW (arg1));
2409
 
2410
  return t;
2411
}
2412
 
2413
/* A subroutine of fold_convert_const handling conversions a REAL_CST
2414
   to another floating point type.  */
2415
 
2416
static tree
2417
fold_convert_const_real_from_real (tree type, const_tree arg1)
2418
{
2419
  REAL_VALUE_TYPE value;
2420
  tree t;
2421
 
2422
  real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2423
  t = build_real (type, value);
2424
 
2425
  /* If converting an infinity or NAN to a representation that doesn't
2426
     have one, set the overflow bit so that we can produce some kind of
2427
     error message at the appropriate point if necessary.  It's not the
2428
     most user-friendly message, but it's better than nothing.  */
2429
  if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2430
      && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2431
    TREE_OVERFLOW (t) = 1;
2432
  else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2433
           && !MODE_HAS_NANS (TYPE_MODE (type)))
2434
    TREE_OVERFLOW (t) = 1;
2435
  /* Regular overflow, conversion produced an infinity in a mode that
2436
     can't represent them.  */
2437
  else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2438
           && REAL_VALUE_ISINF (value)
2439
           && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2440
    TREE_OVERFLOW (t) = 1;
2441
  else
2442
    TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2443
  return t;
2444
}
2445
 
2446
/* A subroutine of fold_convert_const handling conversions a FIXED_CST
2447
   to a floating point type.  */
2448
 
2449
static tree
2450
fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2451
{
2452
  REAL_VALUE_TYPE value;
2453
  tree t;
2454
 
2455
  real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2456
  t = build_real (type, value);
2457
 
2458
  TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2459
  return t;
2460
}
2461
 
2462
/* A subroutine of fold_convert_const handling conversions a FIXED_CST
2463
   to another fixed-point type.  */
2464
 
2465
static tree
2466
fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2467
{
2468
  FIXED_VALUE_TYPE value;
2469
  tree t;
2470
  bool overflow_p;
2471
 
2472
  overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2473
                              TYPE_SATURATING (type));
2474
  t = build_fixed (type, value);
2475
 
2476
  /* Propagate overflow flags.  */
2477
  if (overflow_p | TREE_OVERFLOW (arg1))
2478
    TREE_OVERFLOW (t) = 1;
2479
  return t;
2480
}
2481
 
2482
/* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2483
   to a fixed-point type.  */
2484
 
2485
static tree
2486
fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2487
{
2488
  FIXED_VALUE_TYPE value;
2489
  tree t;
2490
  bool overflow_p;
2491
 
2492
  overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2493
                                       TREE_INT_CST (arg1),
2494
                                       TYPE_UNSIGNED (TREE_TYPE (arg1)),
2495
                                       TYPE_SATURATING (type));
2496
  t = build_fixed (type, value);
2497
 
2498
  /* Propagate overflow flags.  */
2499
  if (overflow_p | TREE_OVERFLOW (arg1))
2500
    TREE_OVERFLOW (t) = 1;
2501
  return t;
2502
}
2503
 
2504
/* A subroutine of fold_convert_const handling conversions a REAL_CST
2505
   to a fixed-point type.  */
2506
 
2507
static tree
2508
fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2509
{
2510
  FIXED_VALUE_TYPE value;
2511
  tree t;
2512
  bool overflow_p;
2513
 
2514
  overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2515
                                        &TREE_REAL_CST (arg1),
2516
                                        TYPE_SATURATING (type));
2517
  t = build_fixed (type, value);
2518
 
2519
  /* Propagate overflow flags.  */
2520
  if (overflow_p | TREE_OVERFLOW (arg1))
2521
    TREE_OVERFLOW (t) = 1;
2522
  return t;
2523
}
2524
 
2525
/* Attempt to fold type conversion operation CODE of expression ARG1 to
2526
   type TYPE.  If no simplification can be done return NULL_TREE.  */
2527
 
2528
static tree
2529
fold_convert_const (enum tree_code code, tree type, tree arg1)
2530
{
2531
  if (TREE_TYPE (arg1) == type)
2532
    return arg1;
2533
 
2534
  if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2535
      || TREE_CODE (type) == OFFSET_TYPE)
2536
    {
2537
      if (TREE_CODE (arg1) == INTEGER_CST)
2538
        return fold_convert_const_int_from_int (type, arg1);
2539
      else if (TREE_CODE (arg1) == REAL_CST)
2540
        return fold_convert_const_int_from_real (code, type, arg1);
2541
      else if (TREE_CODE (arg1) == FIXED_CST)
2542
        return fold_convert_const_int_from_fixed (type, arg1);
2543
    }
2544
  else if (TREE_CODE (type) == REAL_TYPE)
2545
    {
2546
      if (TREE_CODE (arg1) == INTEGER_CST)
2547
        return build_real_from_int_cst (type, arg1);
2548
      else if (TREE_CODE (arg1) == REAL_CST)
2549
        return fold_convert_const_real_from_real (type, arg1);
2550
      else if (TREE_CODE (arg1) == FIXED_CST)
2551
        return fold_convert_const_real_from_fixed (type, arg1);
2552
    }
2553
  else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2554
    {
2555
      if (TREE_CODE (arg1) == FIXED_CST)
2556
        return fold_convert_const_fixed_from_fixed (type, arg1);
2557
      else if (TREE_CODE (arg1) == INTEGER_CST)
2558
        return fold_convert_const_fixed_from_int (type, arg1);
2559
      else if (TREE_CODE (arg1) == REAL_CST)
2560
        return fold_convert_const_fixed_from_real (type, arg1);
2561
    }
2562
  return NULL_TREE;
2563
}
2564
 
2565
/* Construct a vector of zero elements of vector type TYPE.  */
2566
 
2567
static tree
2568
build_zero_vector (tree type)
2569
{
2570
  tree elem, list;
2571
  int i, units;
2572
 
2573
  elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2574
  units = TYPE_VECTOR_SUBPARTS (type);
2575
 
2576
  list = NULL_TREE;
2577
  for (i = 0; i < units; i++)
2578
    list = tree_cons (NULL_TREE, elem, list);
2579
  return build_vector (type, list);
2580
}
2581
 
2582
/* Returns true, if ARG is convertible to TYPE using a NOP_EXPR.  */
2583
 
2584
bool
2585
fold_convertible_p (const_tree type, const_tree arg)
2586
{
2587
  tree orig = TREE_TYPE (arg);
2588
 
2589
  if (type == orig)
2590
    return true;
2591
 
2592
  if (TREE_CODE (arg) == ERROR_MARK
2593
      || TREE_CODE (type) == ERROR_MARK
2594
      || TREE_CODE (orig) == ERROR_MARK)
2595
    return false;
2596
 
2597
  if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2598
    return true;
2599
 
2600
  switch (TREE_CODE (type))
2601
    {
2602
    case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2603
    case POINTER_TYPE: case REFERENCE_TYPE:
2604
    case OFFSET_TYPE:
2605
      if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2606
          || TREE_CODE (orig) == OFFSET_TYPE)
2607
        return true;
2608
      return (TREE_CODE (orig) == VECTOR_TYPE
2609
              && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2610
 
2611
    case REAL_TYPE:
2612
    case FIXED_POINT_TYPE:
2613
    case COMPLEX_TYPE:
2614
    case VECTOR_TYPE:
2615
    case VOID_TYPE:
2616
      return TREE_CODE (type) == TREE_CODE (orig);
2617
 
2618
    default:
2619
      return false;
2620
    }
2621
}
2622
 
2623
/* Convert expression ARG to type TYPE.  Used by the middle-end for
2624
   simple conversions in preference to calling the front-end's convert.  */
2625
 
2626
tree
2627
fold_convert_loc (location_t loc, tree type, tree arg)
2628
{
2629
  tree orig = TREE_TYPE (arg);
2630
  tree tem;
2631
 
2632
  if (type == orig)
2633
    return arg;
2634
 
2635
  if (TREE_CODE (arg) == ERROR_MARK
2636
      || TREE_CODE (type) == ERROR_MARK
2637
      || TREE_CODE (orig) == ERROR_MARK)
2638
    return error_mark_node;
2639
 
2640
  if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2641
    return fold_build1_loc (loc, NOP_EXPR, type, arg);
2642
 
2643
  switch (TREE_CODE (type))
2644
    {
2645
    case POINTER_TYPE:
2646
    case REFERENCE_TYPE:
2647
      /* Handle conversions between pointers to different address spaces.  */
2648
      if (POINTER_TYPE_P (orig)
2649
          && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2650
              != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2651
        return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2652
      /* fall through */
2653
 
2654
    case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2655
    case OFFSET_TYPE:
2656
      if (TREE_CODE (arg) == INTEGER_CST)
2657
        {
2658
          tem = fold_convert_const (NOP_EXPR, type, arg);
2659
          if (tem != NULL_TREE)
2660
            return tem;
2661
        }
2662
      if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2663
          || TREE_CODE (orig) == OFFSET_TYPE)
2664
        return fold_build1_loc (loc, NOP_EXPR, type, arg);
2665
      if (TREE_CODE (orig) == COMPLEX_TYPE)
2666
        return fold_convert_loc (loc, type,
2667
                             fold_build1_loc (loc, REALPART_EXPR,
2668
                                          TREE_TYPE (orig), arg));
2669
      gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2670
                  && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2671
      return fold_build1_loc (loc, NOP_EXPR, type, arg);
2672
 
2673
    case REAL_TYPE:
2674
      if (TREE_CODE (arg) == INTEGER_CST)
2675
        {
2676
          tem = fold_convert_const (FLOAT_EXPR, type, arg);
2677
          if (tem != NULL_TREE)
2678
            return tem;
2679
        }
2680
      else if (TREE_CODE (arg) == REAL_CST)
2681
        {
2682
          tem = fold_convert_const (NOP_EXPR, type, arg);
2683
          if (tem != NULL_TREE)
2684
            return tem;
2685
        }
2686
      else if (TREE_CODE (arg) == FIXED_CST)
2687
        {
2688
          tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2689
          if (tem != NULL_TREE)
2690
            return tem;
2691
        }
2692
 
2693
      switch (TREE_CODE (orig))
2694
        {
2695
        case INTEGER_TYPE:
2696
        case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2697
        case POINTER_TYPE: case REFERENCE_TYPE:
2698
          return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2699
 
2700
        case REAL_TYPE:
2701
          return fold_build1_loc (loc, NOP_EXPR, type, arg);
2702
 
2703
        case FIXED_POINT_TYPE:
2704
          return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2705
 
2706
        case COMPLEX_TYPE:
2707
          tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2708
          return fold_convert_loc (loc, type, tem);
2709
 
2710
        default:
2711
          gcc_unreachable ();
2712
        }
2713
 
2714
    case FIXED_POINT_TYPE:
2715
      if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2716
          || TREE_CODE (arg) == REAL_CST)
2717
        {
2718
          tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2719
          if (tem != NULL_TREE)
2720
            goto fold_convert_exit;
2721
        }
2722
 
2723
      switch (TREE_CODE (orig))
2724
        {
2725
        case FIXED_POINT_TYPE:
2726
        case INTEGER_TYPE:
2727
        case ENUMERAL_TYPE:
2728
        case BOOLEAN_TYPE:
2729
        case REAL_TYPE:
2730
          return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2731
 
2732
        case COMPLEX_TYPE:
2733
          tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2734
          return fold_convert_loc (loc, type, tem);
2735
 
2736
        default:
2737
          gcc_unreachable ();
2738
        }
2739
 
2740
    case COMPLEX_TYPE:
2741
      switch (TREE_CODE (orig))
2742
        {
2743
        case INTEGER_TYPE:
2744
        case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2745
        case POINTER_TYPE: case REFERENCE_TYPE:
2746
        case REAL_TYPE:
2747
        case FIXED_POINT_TYPE:
2748
          return fold_build2_loc (loc, COMPLEX_EXPR, type,
2749
                              fold_convert_loc (loc, TREE_TYPE (type), arg),
2750
                              fold_convert_loc (loc, TREE_TYPE (type),
2751
                                            integer_zero_node));
2752
        case COMPLEX_TYPE:
2753
          {
2754
            tree rpart, ipart;
2755
 
2756
            if (TREE_CODE (arg) == COMPLEX_EXPR)
2757
              {
2758
                rpart = fold_convert_loc (loc, TREE_TYPE (type),
2759
                                      TREE_OPERAND (arg, 0));
2760
                ipart = fold_convert_loc (loc, TREE_TYPE (type),
2761
                                      TREE_OPERAND (arg, 1));
2762
                return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2763
              }
2764
 
2765
            arg = save_expr (arg);
2766
            rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2767
            ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2768
            rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2769
            ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2770
            return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2771
          }
2772
 
2773
        default:
2774
          gcc_unreachable ();
2775
        }
2776
 
2777
    case VECTOR_TYPE:
2778
      if (integer_zerop (arg))
2779
        return build_zero_vector (type);
2780
      gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2781
      gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2782
                  || TREE_CODE (orig) == VECTOR_TYPE);
2783
      return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2784
 
2785
    case VOID_TYPE:
2786
      tem = fold_ignored_result (arg);
2787
      if (TREE_CODE (tem) == MODIFY_EXPR)
2788
        goto fold_convert_exit;
2789
      return fold_build1_loc (loc, NOP_EXPR, type, tem);
2790
 
2791
    default:
2792
      gcc_unreachable ();
2793
    }
2794
 fold_convert_exit:
2795
  protected_set_expr_location (tem, loc);
2796
  return tem;
2797
}
2798
 
2799
/* Return false if expr can be assumed not to be an lvalue, true
2800
   otherwise.  */
2801
 
2802
static bool
2803
maybe_lvalue_p (const_tree x)
2804
{
2805
  /* We only need to wrap lvalue tree codes.  */
2806
  switch (TREE_CODE (x))
2807
  {
2808
  case VAR_DECL:
2809
  case PARM_DECL:
2810
  case RESULT_DECL:
2811
  case LABEL_DECL:
2812
  case FUNCTION_DECL:
2813
  case SSA_NAME:
2814
 
2815
  case COMPONENT_REF:
2816
  case INDIRECT_REF:
2817
  case ALIGN_INDIRECT_REF:
2818
  case MISALIGNED_INDIRECT_REF:
2819
  case ARRAY_REF:
2820
  case ARRAY_RANGE_REF:
2821
  case BIT_FIELD_REF:
2822
  case OBJ_TYPE_REF:
2823
 
2824
  case REALPART_EXPR:
2825
  case IMAGPART_EXPR:
2826
  case PREINCREMENT_EXPR:
2827
  case PREDECREMENT_EXPR:
2828
  case SAVE_EXPR:
2829
  case TRY_CATCH_EXPR:
2830
  case WITH_CLEANUP_EXPR:
2831
  case COMPOUND_EXPR:
2832
  case MODIFY_EXPR:
2833
  case TARGET_EXPR:
2834
  case COND_EXPR:
2835
  case BIND_EXPR:
2836
    break;
2837
 
2838
  default:
2839
    /* Assume the worst for front-end tree codes.  */
2840
    if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2841
      break;
2842
    return false;
2843
  }
2844
 
2845
  return true;
2846
}
2847
 
2848
/* Return an expr equal to X but certainly not valid as an lvalue.  */
2849
 
2850
tree
2851
non_lvalue_loc (location_t loc, tree x)
2852
{
2853
  /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2854
     us.  */
2855
  if (in_gimple_form)
2856
    return x;
2857
 
2858
  if (! maybe_lvalue_p (x))
2859
    return x;
2860
  x = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2861
  SET_EXPR_LOCATION (x, loc);
2862
  return x;
2863
}
2864
 
2865
/* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2866
   Zero means allow extended lvalues.  */
2867
 
2868
int pedantic_lvalues;
2869
 
2870
/* When pedantic, return an expr equal to X but certainly not valid as a
2871
   pedantic lvalue.  Otherwise, return X.  */
2872
 
2873
static tree
2874
pedantic_non_lvalue_loc (location_t loc, tree x)
2875
{
2876
  if (pedantic_lvalues)
2877
    return non_lvalue_loc (loc, x);
2878
  protected_set_expr_location (x, loc);
2879
  return x;
2880
}
2881
 
2882
/* Given a tree comparison code, return the code that is the logical inverse
2883
   of the given code.  It is not safe to do this for floating-point
2884
   comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2885
   as well: if reversing the comparison is unsafe, return ERROR_MARK.  */
2886
 
2887
enum tree_code
2888
invert_tree_comparison (enum tree_code code, bool honor_nans)
2889
{
2890
  if (honor_nans && flag_trapping_math)
2891
    return ERROR_MARK;
2892
 
2893
  switch (code)
2894
    {
2895
    case EQ_EXPR:
2896
      return NE_EXPR;
2897
    case NE_EXPR:
2898
      return EQ_EXPR;
2899
    case GT_EXPR:
2900
      return honor_nans ? UNLE_EXPR : LE_EXPR;
2901
    case GE_EXPR:
2902
      return honor_nans ? UNLT_EXPR : LT_EXPR;
2903
    case LT_EXPR:
2904
      return honor_nans ? UNGE_EXPR : GE_EXPR;
2905
    case LE_EXPR:
2906
      return honor_nans ? UNGT_EXPR : GT_EXPR;
2907
    case LTGT_EXPR:
2908
      return UNEQ_EXPR;
2909
    case UNEQ_EXPR:
2910
      return LTGT_EXPR;
2911
    case UNGT_EXPR:
2912
      return LE_EXPR;
2913
    case UNGE_EXPR:
2914
      return LT_EXPR;
2915
    case UNLT_EXPR:
2916
      return GE_EXPR;
2917
    case UNLE_EXPR:
2918
      return GT_EXPR;
2919
    case ORDERED_EXPR:
2920
      return UNORDERED_EXPR;
2921
    case UNORDERED_EXPR:
2922
      return ORDERED_EXPR;
2923
    default:
2924
      gcc_unreachable ();
2925
    }
2926
}
2927
 
2928
/* Similar, but return the comparison that results if the operands are
2929
   swapped.  This is safe for floating-point.  */
2930
 
2931
enum tree_code
2932
swap_tree_comparison (enum tree_code code)
2933
{
2934
  switch (code)
2935
    {
2936
    case EQ_EXPR:
2937
    case NE_EXPR:
2938
    case ORDERED_EXPR:
2939
    case UNORDERED_EXPR:
2940
    case LTGT_EXPR:
2941
    case UNEQ_EXPR:
2942
      return code;
2943
    case GT_EXPR:
2944
      return LT_EXPR;
2945
    case GE_EXPR:
2946
      return LE_EXPR;
2947
    case LT_EXPR:
2948
      return GT_EXPR;
2949
    case LE_EXPR:
2950
      return GE_EXPR;
2951
    case UNGT_EXPR:
2952
      return UNLT_EXPR;
2953
    case UNGE_EXPR:
2954
      return UNLE_EXPR;
2955
    case UNLT_EXPR:
2956
      return UNGT_EXPR;
2957
    case UNLE_EXPR:
2958
      return UNGE_EXPR;
2959
    default:
2960
      gcc_unreachable ();
2961
    }
2962
}
2963
 
2964
 
2965
/* Convert a comparison tree code from an enum tree_code representation
2966
   into a compcode bit-based encoding.  This function is the inverse of
2967
   compcode_to_comparison.  */
2968
 
2969
static enum comparison_code
2970
comparison_to_compcode (enum tree_code code)
2971
{
2972
  switch (code)
2973
    {
2974
    case LT_EXPR:
2975
      return COMPCODE_LT;
2976
    case EQ_EXPR:
2977
      return COMPCODE_EQ;
2978
    case LE_EXPR:
2979
      return COMPCODE_LE;
2980
    case GT_EXPR:
2981
      return COMPCODE_GT;
2982
    case NE_EXPR:
2983
      return COMPCODE_NE;
2984
    case GE_EXPR:
2985
      return COMPCODE_GE;
2986
    case ORDERED_EXPR:
2987
      return COMPCODE_ORD;
2988
    case UNORDERED_EXPR:
2989
      return COMPCODE_UNORD;
2990
    case UNLT_EXPR:
2991
      return COMPCODE_UNLT;
2992
    case UNEQ_EXPR:
2993
      return COMPCODE_UNEQ;
2994
    case UNLE_EXPR:
2995
      return COMPCODE_UNLE;
2996
    case UNGT_EXPR:
2997
      return COMPCODE_UNGT;
2998
    case LTGT_EXPR:
2999
      return COMPCODE_LTGT;
3000
    case UNGE_EXPR:
3001
      return COMPCODE_UNGE;
3002
    default:
3003
      gcc_unreachable ();
3004
    }
3005
}
3006
 
3007
/* Convert a compcode bit-based encoding of a comparison operator back
3008
   to GCC's enum tree_code representation.  This function is the
3009
   inverse of comparison_to_compcode.  */
3010
 
3011
static enum tree_code
3012
compcode_to_comparison (enum comparison_code code)
3013
{
3014
  switch (code)
3015
    {
3016
    case COMPCODE_LT:
3017
      return LT_EXPR;
3018
    case COMPCODE_EQ:
3019
      return EQ_EXPR;
3020
    case COMPCODE_LE:
3021
      return LE_EXPR;
3022
    case COMPCODE_GT:
3023
      return GT_EXPR;
3024
    case COMPCODE_NE:
3025
      return NE_EXPR;
3026
    case COMPCODE_GE:
3027
      return GE_EXPR;
3028
    case COMPCODE_ORD:
3029
      return ORDERED_EXPR;
3030
    case COMPCODE_UNORD:
3031
      return UNORDERED_EXPR;
3032
    case COMPCODE_UNLT:
3033
      return UNLT_EXPR;
3034
    case COMPCODE_UNEQ:
3035
      return UNEQ_EXPR;
3036
    case COMPCODE_UNLE:
3037
      return UNLE_EXPR;
3038
    case COMPCODE_UNGT:
3039
      return UNGT_EXPR;
3040
    case COMPCODE_LTGT:
3041
      return LTGT_EXPR;
3042
    case COMPCODE_UNGE:
3043
      return UNGE_EXPR;
3044
    default:
3045
      gcc_unreachable ();
3046
    }
3047
}
3048
 
3049
/* Return a tree for the comparison which is the combination of
3050
   doing the AND or OR (depending on CODE) of the two operations LCODE
3051
   and RCODE on the identical operands LL_ARG and LR_ARG.  Take into account
3052
   the possibility of trapping if the mode has NaNs, and return NULL_TREE
3053
   if this makes the transformation invalid.  */
3054
 
3055
tree
3056
combine_comparisons (location_t loc,
3057
                     enum tree_code code, enum tree_code lcode,
3058
                     enum tree_code rcode, tree truth_type,
3059
                     tree ll_arg, tree lr_arg)
3060
{
3061
  bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
3062
  enum comparison_code lcompcode = comparison_to_compcode (lcode);
3063
  enum comparison_code rcompcode = comparison_to_compcode (rcode);
3064
  int compcode;
3065
 
3066
  switch (code)
3067
    {
3068
    case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
3069
      compcode = lcompcode & rcompcode;
3070
      break;
3071
 
3072
    case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
3073
      compcode = lcompcode | rcompcode;
3074
      break;
3075
 
3076
    default:
3077
      return NULL_TREE;
3078
    }
3079
 
3080
  if (!honor_nans)
3081
    {
3082
      /* Eliminate unordered comparisons, as well as LTGT and ORD
3083
         which are not used unless the mode has NaNs.  */
3084
      compcode &= ~COMPCODE_UNORD;
3085
      if (compcode == COMPCODE_LTGT)
3086
        compcode = COMPCODE_NE;
3087
      else if (compcode == COMPCODE_ORD)
3088
        compcode = COMPCODE_TRUE;
3089
    }
3090
   else if (flag_trapping_math)
3091
     {
3092
        /* Check that the original operation and the optimized ones will trap
3093
           under the same condition.  */
3094
        bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
3095
                     && (lcompcode != COMPCODE_EQ)
3096
                     && (lcompcode != COMPCODE_ORD);
3097
        bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
3098
                     && (rcompcode != COMPCODE_EQ)
3099
                     && (rcompcode != COMPCODE_ORD);
3100
        bool trap = (compcode & COMPCODE_UNORD) == 0
3101
                    && (compcode != COMPCODE_EQ)
3102
                    && (compcode != COMPCODE_ORD);
3103
 
3104
        /* In a short-circuited boolean expression the LHS might be
3105
           such that the RHS, if evaluated, will never trap.  For
3106
           example, in ORD (x, y) && (x < y), we evaluate the RHS only
3107
           if neither x nor y is NaN.  (This is a mixed blessing: for
3108
           example, the expression above will never trap, hence
3109
           optimizing it to x < y would be invalid).  */
3110
        if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
3111
            || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
3112
          rtrap = false;
3113
 
3114
        /* If the comparison was short-circuited, and only the RHS
3115
           trapped, we may now generate a spurious trap.  */
3116
        if (rtrap && !ltrap
3117
            && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3118
          return NULL_TREE;
3119
 
3120
        /* If we changed the conditions that cause a trap, we lose.  */
3121
        if ((ltrap || rtrap) != trap)
3122
          return NULL_TREE;
3123
      }
3124
 
3125
  if (compcode == COMPCODE_TRUE)
3126
    return constant_boolean_node (true, truth_type);
3127
  else if (compcode == COMPCODE_FALSE)
3128
    return constant_boolean_node (false, truth_type);
3129
  else
3130
    {
3131
      enum tree_code tcode;
3132
 
3133
      tcode = compcode_to_comparison ((enum comparison_code) compcode);
3134
      return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
3135
    }
3136
}
3137
 
3138
/* Return nonzero if two operands (typically of the same tree node)
3139
   are necessarily equal.  If either argument has side-effects this
3140
   function returns zero.  FLAGS modifies behavior as follows:
3141
 
3142
   If OEP_ONLY_CONST is set, only return nonzero for constants.
3143
   This function tests whether the operands are indistinguishable;
3144
   it does not test whether they are equal using C's == operation.
3145
   The distinction is important for IEEE floating point, because
3146
   (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3147
   (2) two NaNs may be indistinguishable, but NaN!=NaN.
3148
 
3149
   If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3150
   even though it may hold multiple values during a function.
3151
   This is because a GCC tree node guarantees that nothing else is
3152
   executed between the evaluation of its "operands" (which may often
3153
   be evaluated in arbitrary order).  Hence if the operands themselves
3154
   don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3155
   same value in each operand/subexpression.  Hence leaving OEP_ONLY_CONST
3156
   unset means assuming isochronic (or instantaneous) tree equivalence.
3157
   Unless comparing arbitrary expression trees, such as from different
3158
   statements, this flag can usually be left unset.
3159
 
3160
   If OEP_PURE_SAME is set, then pure functions with identical arguments
3161
   are considered the same.  It is used when the caller has other ways
3162
   to ensure that global memory is unchanged in between.  */
3163
 
3164
int
3165
operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3166
{
3167
  /* If either is ERROR_MARK, they aren't equal.  */
3168
  if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
3169
      || TREE_TYPE (arg0) == error_mark_node
3170
      || TREE_TYPE (arg1) == error_mark_node)
3171
    return 0;
3172
 
3173
  /* Check equality of integer constants before bailing out due to
3174
     precision differences.  */
3175
  if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3176
    return tree_int_cst_equal (arg0, arg1);
3177
 
3178
  /* If both types don't have the same signedness, then we can't consider
3179
     them equal.  We must check this before the STRIP_NOPS calls
3180
     because they may change the signedness of the arguments.  As pointers
3181
     strictly don't have a signedness, require either two pointers or
3182
     two non-pointers as well.  */
3183
  if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3184
      || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
3185
    return 0;
3186
 
3187
  /* We cannot consider pointers to different address space equal.  */
3188
  if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
3189
      && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
3190
          != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
3191
    return 0;
3192
 
3193
  /* If both types don't have the same precision, then it is not safe
3194
     to strip NOPs.  */
3195
  if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3196
    return 0;
3197
 
3198
  STRIP_NOPS (arg0);
3199
  STRIP_NOPS (arg1);
3200
 
3201
  /* In case both args are comparisons but with different comparison
3202
     code, try to swap the comparison operands of one arg to produce
3203
     a match and compare that variant.  */
3204
  if (TREE_CODE (arg0) != TREE_CODE (arg1)
3205
      && COMPARISON_CLASS_P (arg0)
3206
      && COMPARISON_CLASS_P (arg1))
3207
    {
3208
      enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3209
 
3210
      if (TREE_CODE (arg0) == swap_code)
3211
        return operand_equal_p (TREE_OPERAND (arg0, 0),
3212
                                TREE_OPERAND (arg1, 1), flags)
3213
               && operand_equal_p (TREE_OPERAND (arg0, 1),
3214
                                   TREE_OPERAND (arg1, 0), flags);
3215
    }
3216
 
3217
  if (TREE_CODE (arg0) != TREE_CODE (arg1)
3218
      /* This is needed for conversions and for COMPONENT_REF.
3219
         Might as well play it safe and always test this.  */
3220
      || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3221
      || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3222
      || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3223
    return 0;
3224
 
3225
  /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3226
     We don't care about side effects in that case because the SAVE_EXPR
3227
     takes care of that for us. In all other cases, two expressions are
3228
     equal if they have no side effects.  If we have two identical
3229
     expressions with side effects that should be treated the same due
3230
     to the only side effects being identical SAVE_EXPR's, that will
3231
     be detected in the recursive calls below.  */
3232
  if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3233
      && (TREE_CODE (arg0) == SAVE_EXPR
3234
          || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3235
    return 1;
3236
 
3237
  /* Next handle constant cases, those for which we can return 1 even
3238
     if ONLY_CONST is set.  */
3239
  if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3240
    switch (TREE_CODE (arg0))
3241
      {
3242
      case INTEGER_CST:
3243
        return tree_int_cst_equal (arg0, arg1);
3244
 
3245
      case FIXED_CST:
3246
        return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3247
                                       TREE_FIXED_CST (arg1));
3248
 
3249
      case REAL_CST:
3250
        if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3251
                                   TREE_REAL_CST (arg1)))
3252
          return 1;
3253
 
3254
 
3255
        if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3256
          {
3257
            /* If we do not distinguish between signed and unsigned zero,
3258
               consider them equal.  */
3259
            if (real_zerop (arg0) && real_zerop (arg1))
3260
              return 1;
3261
          }
3262
        return 0;
3263
 
3264
      case VECTOR_CST:
3265
        {
3266
          tree v1, v2;
3267
 
3268
          v1 = TREE_VECTOR_CST_ELTS (arg0);
3269
          v2 = TREE_VECTOR_CST_ELTS (arg1);
3270
          while (v1 && v2)
3271
            {
3272
              if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3273
                                    flags))
3274
                return 0;
3275
              v1 = TREE_CHAIN (v1);
3276
              v2 = TREE_CHAIN (v2);
3277
            }
3278
 
3279
          return v1 == v2;
3280
        }
3281
 
3282
      case COMPLEX_CST:
3283
        return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3284
                                 flags)
3285
                && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3286
                                    flags));
3287
 
3288
      case STRING_CST:
3289
        return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3290
                && ! memcmp (TREE_STRING_POINTER (arg0),
3291
                              TREE_STRING_POINTER (arg1),
3292
                              TREE_STRING_LENGTH (arg0)));
3293
 
3294
      case ADDR_EXPR:
3295
        return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3296
                                0);
3297
      default:
3298
        break;
3299
      }
3300
 
3301
  if (flags & OEP_ONLY_CONST)
3302
    return 0;
3303
 
3304
/* Define macros to test an operand from arg0 and arg1 for equality and a
3305
   variant that allows null and views null as being different from any
3306
   non-null value.  In the latter case, if either is null, the both
3307
   must be; otherwise, do the normal comparison.  */
3308
#define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N),     \
3309
                                    TREE_OPERAND (arg1, N), flags)
3310
 
3311
#define OP_SAME_WITH_NULL(N)                            \
3312
  ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3313
   ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3314
 
3315
  switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3316
    {
3317
    case tcc_unary:
3318
      /* Two conversions are equal only if signedness and modes match.  */
3319
      switch (TREE_CODE (arg0))
3320
        {
3321
        CASE_CONVERT:
3322
        case FIX_TRUNC_EXPR:
3323
          if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3324
              != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3325
            return 0;
3326
          break;
3327
        default:
3328
          break;
3329
        }
3330
 
3331
      return OP_SAME (0);
3332
 
3333
 
3334
    case tcc_comparison:
3335
    case tcc_binary:
3336
      if (OP_SAME (0) && OP_SAME (1))
3337
        return 1;
3338
 
3339
      /* For commutative ops, allow the other order.  */
3340
      return (commutative_tree_code (TREE_CODE (arg0))
3341
              && operand_equal_p (TREE_OPERAND (arg0, 0),
3342
                                  TREE_OPERAND (arg1, 1), flags)
3343
              && operand_equal_p (TREE_OPERAND (arg0, 1),
3344
                                  TREE_OPERAND (arg1, 0), flags));
3345
 
3346
    case tcc_reference:
3347
      /* If either of the pointer (or reference) expressions we are
3348
         dereferencing contain a side effect, these cannot be equal.  */
3349
      if (TREE_SIDE_EFFECTS (arg0)
3350
          || TREE_SIDE_EFFECTS (arg1))
3351
        return 0;
3352
 
3353
      switch (TREE_CODE (arg0))
3354
        {
3355
        case INDIRECT_REF:
3356
        case ALIGN_INDIRECT_REF:
3357
        case MISALIGNED_INDIRECT_REF:
3358
        case REALPART_EXPR:
3359
        case IMAGPART_EXPR:
3360
          return OP_SAME (0);
3361
 
3362
        case ARRAY_REF:
3363
        case ARRAY_RANGE_REF:
3364
          /* Operands 2 and 3 may be null.
3365
             Compare the array index by value if it is constant first as we
3366
             may have different types but same value here.  */
3367
          return (OP_SAME (0)
3368
                  && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3369
                                          TREE_OPERAND (arg1, 1))
3370
                      || OP_SAME (1))
3371
                  && OP_SAME_WITH_NULL (2)
3372
                  && OP_SAME_WITH_NULL (3));
3373
 
3374
        case COMPONENT_REF:
3375
          /* Handle operand 2 the same as for ARRAY_REF.  Operand 0
3376
             may be NULL when we're called to compare MEM_EXPRs.  */
3377
          return OP_SAME_WITH_NULL (0)
3378
                 && OP_SAME (1)
3379
                 && OP_SAME_WITH_NULL (2);
3380
 
3381
        case BIT_FIELD_REF:
3382
          return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3383
 
3384
        default:
3385
          return 0;
3386
        }
3387
 
3388
    case tcc_expression:
3389
      switch (TREE_CODE (arg0))
3390
        {
3391
        case ADDR_EXPR:
3392
        case TRUTH_NOT_EXPR:
3393
          return OP_SAME (0);
3394
 
3395
        case TRUTH_ANDIF_EXPR:
3396
        case TRUTH_ORIF_EXPR:
3397
          return OP_SAME (0) && OP_SAME (1);
3398
 
3399
        case TRUTH_AND_EXPR:
3400
        case TRUTH_OR_EXPR:
3401
        case TRUTH_XOR_EXPR:
3402
          if (OP_SAME (0) && OP_SAME (1))
3403
            return 1;
3404
 
3405
          /* Otherwise take into account this is a commutative operation.  */
3406
          return (operand_equal_p (TREE_OPERAND (arg0, 0),
3407
                                   TREE_OPERAND (arg1, 1), flags)
3408
                  && operand_equal_p (TREE_OPERAND (arg0, 1),
3409
                                      TREE_OPERAND (arg1, 0), flags));
3410
 
3411
        case COND_EXPR:
3412
          return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3413
 
3414
        default:
3415
          return 0;
3416
        }
3417
 
3418
    case tcc_vl_exp:
3419
      switch (TREE_CODE (arg0))
3420
        {
3421
        case CALL_EXPR:
3422
          /* If the CALL_EXPRs call different functions, then they
3423
             clearly can not be equal.  */
3424
          if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3425
                                 flags))
3426
            return 0;
3427
 
3428
          {
3429
            unsigned int cef = call_expr_flags (arg0);
3430
            if (flags & OEP_PURE_SAME)
3431
              cef &= ECF_CONST | ECF_PURE;
3432
            else
3433
              cef &= ECF_CONST;
3434
            if (!cef)
3435
              return 0;
3436
          }
3437
 
3438
          /* Now see if all the arguments are the same.  */
3439
          {
3440
            const_call_expr_arg_iterator iter0, iter1;
3441
            const_tree a0, a1;
3442
            for (a0 = first_const_call_expr_arg (arg0, &iter0),
3443
                   a1 = first_const_call_expr_arg (arg1, &iter1);
3444
                 a0 && a1;
3445
                 a0 = next_const_call_expr_arg (&iter0),
3446
                   a1 = next_const_call_expr_arg (&iter1))
3447
              if (! operand_equal_p (a0, a1, flags))
3448
                return 0;
3449
 
3450
            /* If we get here and both argument lists are exhausted
3451
               then the CALL_EXPRs are equal.  */
3452
            return ! (a0 || a1);
3453
          }
3454
        default:
3455
          return 0;
3456
        }
3457
 
3458
    case tcc_declaration:
3459
      /* Consider __builtin_sqrt equal to sqrt.  */
3460
      return (TREE_CODE (arg0) == FUNCTION_DECL
3461
              && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3462
              && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3463
              && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3464
 
3465
    default:
3466
      return 0;
3467
    }
3468
 
3469
#undef OP_SAME
3470
#undef OP_SAME_WITH_NULL
3471
}
3472
 
3473
/* Similar to operand_equal_p, but see if ARG0 might have been made by
3474
   shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3475
 
3476
   When in doubt, return 0.  */
3477
 
3478
static int
3479
operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3480
{
3481
  int unsignedp1, unsignedpo;
3482
  tree primarg0, primarg1, primother;
3483
  unsigned int correct_width;
3484
 
3485
  if (operand_equal_p (arg0, arg1, 0))
3486
    return 1;
3487
 
3488
  if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3489
      || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3490
    return 0;
3491
 
3492
  /* Discard any conversions that don't change the modes of ARG0 and ARG1
3493
     and see if the inner values are the same.  This removes any
3494
     signedness comparison, which doesn't matter here.  */
3495
  primarg0 = arg0, primarg1 = arg1;
3496
  STRIP_NOPS (primarg0);
3497
  STRIP_NOPS (primarg1);
3498
  if (operand_equal_p (primarg0, primarg1, 0))
3499
    return 1;
3500
 
3501
  /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3502
     actual comparison operand, ARG0.
3503
 
3504
     First throw away any conversions to wider types
3505
     already present in the operands.  */
3506
 
3507
  primarg1 = get_narrower (arg1, &unsignedp1);
3508
  primother = get_narrower (other, &unsignedpo);
3509
 
3510
  correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3511
  if (unsignedp1 == unsignedpo
3512
      && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3513
      && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3514
    {
3515
      tree type = TREE_TYPE (arg0);
3516
 
3517
      /* Make sure shorter operand is extended the right way
3518
         to match the longer operand.  */
3519
      primarg1 = fold_convert (signed_or_unsigned_type_for
3520
                               (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3521
 
3522
      if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3523
        return 1;
3524
    }
3525
 
3526
  return 0;
3527
}
3528
 
3529
/* See if ARG is an expression that is either a comparison or is performing
3530
   arithmetic on comparisons.  The comparisons must only be comparing
3531
   two different values, which will be stored in *CVAL1 and *CVAL2; if
3532
   they are nonzero it means that some operands have already been found.
3533
   No variables may be used anywhere else in the expression except in the
3534
   comparisons.  If SAVE_P is true it means we removed a SAVE_EXPR around
3535
   the expression and save_expr needs to be called with CVAL1 and CVAL2.
3536
 
3537
   If this is true, return 1.  Otherwise, return zero.  */
3538
 
3539
static int
3540
twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3541
{
3542
  enum tree_code code = TREE_CODE (arg);
3543
  enum tree_code_class tclass = TREE_CODE_CLASS (code);
3544
 
3545
  /* We can handle some of the tcc_expression cases here.  */
3546
  if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3547
    tclass = tcc_unary;
3548
  else if (tclass == tcc_expression
3549
           && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3550
               || code == COMPOUND_EXPR))
3551
    tclass = tcc_binary;
3552
 
3553
  else if (tclass == tcc_expression && code == SAVE_EXPR
3554
           && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3555
    {
3556
      /* If we've already found a CVAL1 or CVAL2, this expression is
3557
         two complex to handle.  */
3558
      if (*cval1 || *cval2)
3559
        return 0;
3560
 
3561
      tclass = tcc_unary;
3562
      *save_p = 1;
3563
    }
3564
 
3565
  switch (tclass)
3566
    {
3567
    case tcc_unary:
3568
      return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3569
 
3570
    case tcc_binary:
3571
      return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3572
              && twoval_comparison_p (TREE_OPERAND (arg, 1),
3573
                                      cval1, cval2, save_p));
3574
 
3575
    case tcc_constant:
3576
      return 1;
3577
 
3578
    case tcc_expression:
3579
      if (code == COND_EXPR)
3580
        return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3581
                                     cval1, cval2, save_p)
3582
                && twoval_comparison_p (TREE_OPERAND (arg, 1),
3583
                                        cval1, cval2, save_p)
3584
                && twoval_comparison_p (TREE_OPERAND (arg, 2),
3585
                                        cval1, cval2, save_p));
3586
      return 0;
3587
 
3588
    case tcc_comparison:
3589
      /* First see if we can handle the first operand, then the second.  For
3590
         the second operand, we know *CVAL1 can't be zero.  It must be that
3591
         one side of the comparison is each of the values; test for the
3592
         case where this isn't true by failing if the two operands
3593
         are the same.  */
3594
 
3595
      if (operand_equal_p (TREE_OPERAND (arg, 0),
3596
                           TREE_OPERAND (arg, 1), 0))
3597
        return 0;
3598
 
3599
      if (*cval1 == 0)
3600
        *cval1 = TREE_OPERAND (arg, 0);
3601
      else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3602
        ;
3603
      else if (*cval2 == 0)
3604
        *cval2 = TREE_OPERAND (arg, 0);
3605
      else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3606
        ;
3607
      else
3608
        return 0;
3609
 
3610
      if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3611
        ;
3612
      else if (*cval2 == 0)
3613
        *cval2 = TREE_OPERAND (arg, 1);
3614
      else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3615
        ;
3616
      else
3617
        return 0;
3618
 
3619
      return 1;
3620
 
3621
    default:
3622
      return 0;
3623
    }
3624
}
3625
 
3626
/* ARG is a tree that is known to contain just arithmetic operations and
3627
   comparisons.  Evaluate the operations in the tree substituting NEW0 for
3628
   any occurrence of OLD0 as an operand of a comparison and likewise for
3629
   NEW1 and OLD1.  */
3630
 
3631
static tree
3632
eval_subst (location_t loc, tree arg, tree old0, tree new0,
3633
            tree old1, tree new1)
3634
{
3635
  tree type = TREE_TYPE (arg);
3636
  enum tree_code code = TREE_CODE (arg);
3637
  enum tree_code_class tclass = TREE_CODE_CLASS (code);
3638
 
3639
  /* We can handle some of the tcc_expression cases here.  */
3640
  if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3641
    tclass = tcc_unary;
3642
  else if (tclass == tcc_expression
3643
           && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3644
    tclass = tcc_binary;
3645
 
3646
  switch (tclass)
3647
    {
3648
    case tcc_unary:
3649
      return fold_build1_loc (loc, code, type,
3650
                          eval_subst (loc, TREE_OPERAND (arg, 0),
3651
                                      old0, new0, old1, new1));
3652
 
3653
    case tcc_binary:
3654
      return fold_build2_loc (loc, code, type,
3655
                          eval_subst (loc, TREE_OPERAND (arg, 0),
3656
                                      old0, new0, old1, new1),
3657
                          eval_subst (loc, TREE_OPERAND (arg, 1),
3658
                                      old0, new0, old1, new1));
3659
 
3660
    case tcc_expression:
3661
      switch (code)
3662
        {
3663
        case SAVE_EXPR:
3664
          return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3665
                             old1, new1);
3666
 
3667
        case COMPOUND_EXPR:
3668
          return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3669
                             old1, new1);
3670
 
3671
        case COND_EXPR:
3672
          return fold_build3_loc (loc, code, type,
3673
                              eval_subst (loc, TREE_OPERAND (arg, 0),
3674
                                          old0, new0, old1, new1),
3675
                              eval_subst (loc, TREE_OPERAND (arg, 1),
3676
                                          old0, new0, old1, new1),
3677
                              eval_subst (loc, TREE_OPERAND (arg, 2),
3678
                                          old0, new0, old1, new1));
3679
        default:
3680
          break;
3681
        }
3682
      /* Fall through - ???  */
3683
 
3684
    case tcc_comparison:
3685
      {
3686
        tree arg0 = TREE_OPERAND (arg, 0);
3687
        tree arg1 = TREE_OPERAND (arg, 1);
3688
 
3689
        /* We need to check both for exact equality and tree equality.  The
3690
           former will be true if the operand has a side-effect.  In that
3691
           case, we know the operand occurred exactly once.  */
3692
 
3693
        if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3694
          arg0 = new0;
3695
        else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3696
          arg0 = new1;
3697
 
3698
        if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3699
          arg1 = new0;
3700
        else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3701
          arg1 = new1;
3702
 
3703
        return fold_build2_loc (loc, code, type, arg0, arg1);
3704
      }
3705
 
3706
    default:
3707
      return arg;
3708
    }
3709
}
3710
 
3711
/* Return a tree for the case when the result of an expression is RESULT
3712
   converted to TYPE and OMITTED was previously an operand of the expression
3713
   but is now not needed (e.g., we folded OMITTED * 0).
3714
 
3715
   If OMITTED has side effects, we must evaluate it.  Otherwise, just do
3716
   the conversion of RESULT to TYPE.  */
3717
 
3718
tree
3719
omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3720
{
3721
  tree t = fold_convert_loc (loc, type, result);
3722
 
3723
  /* If the resulting operand is an empty statement, just return the omitted
3724
     statement casted to void. */
3725
  if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3726
    {
3727
      t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3728
      goto omit_one_operand_exit;
3729
    }
3730
 
3731
  if (TREE_SIDE_EFFECTS (omitted))
3732
    {
3733
      t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3734
      goto omit_one_operand_exit;
3735
    }
3736
 
3737
  return non_lvalue_loc (loc, t);
3738
 
3739
 omit_one_operand_exit:
3740
  protected_set_expr_location (t, loc);
3741
  return t;
3742
}
3743
 
3744
/* Similar, but call pedantic_non_lvalue instead of non_lvalue.  */
3745
 
3746
static tree
3747
pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3748
                               tree omitted)
3749
{
3750
  tree t = fold_convert_loc (loc, type, result);
3751
 
3752
  /* If the resulting operand is an empty statement, just return the omitted
3753
     statement casted to void. */
3754
  if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3755
    {
3756
      t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3757
      goto pedantic_omit_one_operand_exit;
3758
    }
3759
 
3760
  if (TREE_SIDE_EFFECTS (omitted))
3761
    {
3762
      t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3763
      goto pedantic_omit_one_operand_exit;
3764
    }
3765
 
3766
  return pedantic_non_lvalue_loc (loc, t);
3767
 
3768
 pedantic_omit_one_operand_exit:
3769
  protected_set_expr_location (t, loc);
3770
  return t;
3771
}
3772
 
3773
/* Return a tree for the case when the result of an expression is RESULT
3774
   converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3775
   of the expression but are now not needed.
3776
 
3777
   If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3778
   If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3779
   evaluated before OMITTED2.  Otherwise, if neither has side effects,
3780
   just do the conversion of RESULT to TYPE.  */
3781
 
3782
tree
3783
omit_two_operands_loc (location_t loc, tree type, tree result,
3784
                   tree omitted1, tree omitted2)
3785
{
3786
  tree t = fold_convert_loc (loc, type, result);
3787
 
3788
  if (TREE_SIDE_EFFECTS (omitted2))
3789
    {
3790
      t = build2 (COMPOUND_EXPR, type, omitted2, t);
3791
      SET_EXPR_LOCATION (t, loc);
3792
    }
3793
  if (TREE_SIDE_EFFECTS (omitted1))
3794
    {
3795
      t = build2 (COMPOUND_EXPR, type, omitted1, t);
3796
      SET_EXPR_LOCATION (t, loc);
3797
    }
3798
 
3799
  return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3800
}
3801
 
3802
 
3803
/* Return a simplified tree node for the truth-negation of ARG.  This
3804
   never alters ARG itself.  We assume that ARG is an operation that
3805
   returns a truth value (0 or 1).
3806
 
3807
   FIXME: one would think we would fold the result, but it causes
3808
   problems with the dominator optimizer.  */
3809
 
3810
tree
3811
fold_truth_not_expr (location_t loc, tree arg)
3812
{
3813
  tree t, type = TREE_TYPE (arg);
3814
  enum tree_code code = TREE_CODE (arg);
3815
  location_t loc1, loc2;
3816
 
3817
  /* If this is a comparison, we can simply invert it, except for
3818
     floating-point non-equality comparisons, in which case we just
3819
     enclose a TRUTH_NOT_EXPR around what we have.  */
3820
 
3821
  if (TREE_CODE_CLASS (code) == tcc_comparison)
3822
    {
3823
      tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3824
      if (FLOAT_TYPE_P (op_type)
3825
          && flag_trapping_math
3826
          && code != ORDERED_EXPR && code != UNORDERED_EXPR
3827
          && code != NE_EXPR && code != EQ_EXPR)
3828
        return NULL_TREE;
3829
 
3830
      code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3831
      if (code == ERROR_MARK)
3832
        return NULL_TREE;
3833
 
3834
      t = build2 (code, type, TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3835
      SET_EXPR_LOCATION (t, loc);
3836
      return t;
3837
    }
3838
 
3839
  switch (code)
3840
    {
3841
    case INTEGER_CST:
3842
      return constant_boolean_node (integer_zerop (arg), type);
3843
 
3844
    case TRUTH_AND_EXPR:
3845
      loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3846
      loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3847
      if (loc1 == UNKNOWN_LOCATION)
3848
        loc1 = loc;
3849
      if (loc2 == UNKNOWN_LOCATION)
3850
        loc2 = loc;
3851
      t = build2 (TRUTH_OR_EXPR, type,
3852
                  invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3853
                  invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3854
      break;
3855
 
3856
    case TRUTH_OR_EXPR:
3857
      loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3858
      loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3859
      if (loc1 == UNKNOWN_LOCATION)
3860
        loc1 = loc;
3861
      if (loc2 == UNKNOWN_LOCATION)
3862
        loc2 = loc;
3863
      t = build2 (TRUTH_AND_EXPR, type,
3864
                  invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3865
                  invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3866
      break;
3867
 
3868
    case TRUTH_XOR_EXPR:
3869
      /* Here we can invert either operand.  We invert the first operand
3870
         unless the second operand is a TRUTH_NOT_EXPR in which case our
3871
         result is the XOR of the first operand with the inside of the
3872
         negation of the second operand.  */
3873
 
3874
      if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3875
        t = build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3876
                    TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3877
      else
3878
        t = build2 (TRUTH_XOR_EXPR, type,
3879
                    invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3880
                    TREE_OPERAND (arg, 1));
3881
      break;
3882
 
3883
    case TRUTH_ANDIF_EXPR:
3884
      loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3885
      loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3886
      if (loc1 == UNKNOWN_LOCATION)
3887
        loc1 = loc;
3888
      if (loc2 == UNKNOWN_LOCATION)
3889
        loc2 = loc;
3890
      t = build2 (TRUTH_ORIF_EXPR, type,
3891
                  invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3892
                  invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3893
      break;
3894
 
3895
    case TRUTH_ORIF_EXPR:
3896
      loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3897
      loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3898
      if (loc1 == UNKNOWN_LOCATION)
3899
        loc1 = loc;
3900
      if (loc2 == UNKNOWN_LOCATION)
3901
        loc2 = loc;
3902
      t = build2 (TRUTH_ANDIF_EXPR, type,
3903
                  invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3904
                  invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3905
      break;
3906
 
3907
    case TRUTH_NOT_EXPR:
3908
      return TREE_OPERAND (arg, 0);
3909
 
3910
    case COND_EXPR:
3911
      {
3912
        tree arg1 = TREE_OPERAND (arg, 1);
3913
        tree arg2 = TREE_OPERAND (arg, 2);
3914
 
3915
        loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3916
        loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 2));
3917
        if (loc1 == UNKNOWN_LOCATION)
3918
          loc1 = loc;
3919
        if (loc2 == UNKNOWN_LOCATION)
3920
          loc2 = loc;
3921
 
3922
        /* A COND_EXPR may have a throw as one operand, which
3923
           then has void type.  Just leave void operands
3924
           as they are.  */
3925
        t = build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3926
                    VOID_TYPE_P (TREE_TYPE (arg1))
3927
                    ? arg1 : invert_truthvalue_loc (loc1, arg1),
3928
                    VOID_TYPE_P (TREE_TYPE (arg2))
3929
                    ? arg2 : invert_truthvalue_loc (loc2, arg2));
3930
        break;
3931
      }
3932
 
3933
    case COMPOUND_EXPR:
3934
      loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3935
      if (loc1 == UNKNOWN_LOCATION)
3936
        loc1 = loc;
3937
      t = build2 (COMPOUND_EXPR, type,
3938
                  TREE_OPERAND (arg, 0),
3939
                  invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3940
      break;
3941
 
3942
    case NON_LVALUE_EXPR:
3943
      loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3944
      if (loc1 == UNKNOWN_LOCATION)
3945
        loc1 = loc;
3946
      return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3947
 
3948
    CASE_CONVERT:
3949
      if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3950
        {
3951
          t = build1 (TRUTH_NOT_EXPR, type, arg);
3952
          break;
3953
        }
3954
 
3955
      /* ... fall through ...  */
3956
 
3957
    case FLOAT_EXPR:
3958
      loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3959
      if (loc1 == UNKNOWN_LOCATION)
3960
        loc1 = loc;
3961
      t = build1 (TREE_CODE (arg), type,
3962
                  invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3963
      break;
3964
 
3965
    case BIT_AND_EXPR:
3966
      if (!integer_onep (TREE_OPERAND (arg, 1)))
3967
        return NULL_TREE;
3968
      t = build2 (EQ_EXPR, type, arg, build_int_cst (type, 0));
3969
      break;
3970
 
3971
    case SAVE_EXPR:
3972
      t = build1 (TRUTH_NOT_EXPR, type, arg);
3973
      break;
3974
 
3975
    case CLEANUP_POINT_EXPR:
3976
      loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3977
      if (loc1 == UNKNOWN_LOCATION)
3978
        loc1 = loc;
3979
      t = build1 (CLEANUP_POINT_EXPR, type,
3980
                  invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3981
      break;
3982
 
3983
    default:
3984
      t = NULL_TREE;
3985
      break;
3986
    }
3987
 
3988
  if (t)
3989
    SET_EXPR_LOCATION (t, loc);
3990
 
3991
  return t;
3992
}
3993
 
3994
/* Return a simplified tree node for the truth-negation of ARG.  This
3995
   never alters ARG itself.  We assume that ARG is an operation that
3996
   returns a truth value (0 or 1).
3997
 
3998
   FIXME: one would think we would fold the result, but it causes
3999
   problems with the dominator optimizer.  */
4000
 
4001
tree
4002
invert_truthvalue_loc (location_t loc, tree arg)
4003
{
4004
  tree tem;
4005
 
4006
  if (TREE_CODE (arg) == ERROR_MARK)
4007
    return arg;
4008
 
4009
  tem = fold_truth_not_expr (loc, arg);
4010
  if (!tem)
4011
    {
4012
      tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
4013
      SET_EXPR_LOCATION (tem, loc);
4014
    }
4015
 
4016
  return tem;
4017
}
4018
 
4019
/* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
4020
   operands are another bit-wise operation with a common input.  If so,
4021
   distribute the bit operations to save an operation and possibly two if
4022
   constants are involved.  For example, convert
4023
        (A | B) & (A | C) into A | (B & C)
4024
   Further simplification will occur if B and C are constants.
4025
 
4026
   If this optimization cannot be done, 0 will be returned.  */
4027
 
4028
static tree
4029
distribute_bit_expr (location_t loc, enum tree_code code, tree type,
4030
                     tree arg0, tree arg1)
4031
{
4032
  tree common;
4033
  tree left, right;
4034
 
4035
  if (TREE_CODE (arg0) != TREE_CODE (arg1)
4036
      || TREE_CODE (arg0) == code
4037
      || (TREE_CODE (arg0) != BIT_AND_EXPR
4038
          && TREE_CODE (arg0) != BIT_IOR_EXPR))
4039
    return 0;
4040
 
4041
  if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
4042
    {
4043
      common = TREE_OPERAND (arg0, 0);
4044
      left = TREE_OPERAND (arg0, 1);
4045
      right = TREE_OPERAND (arg1, 1);
4046
    }
4047
  else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
4048
    {
4049
      common = TREE_OPERAND (arg0, 0);
4050
      left = TREE_OPERAND (arg0, 1);
4051
      right = TREE_OPERAND (arg1, 0);
4052
    }
4053
  else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
4054
    {
4055
      common = TREE_OPERAND (arg0, 1);
4056
      left = TREE_OPERAND (arg0, 0);
4057
      right = TREE_OPERAND (arg1, 1);
4058
    }
4059
  else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
4060
    {
4061
      common = TREE_OPERAND (arg0, 1);
4062
      left = TREE_OPERAND (arg0, 0);
4063
      right = TREE_OPERAND (arg1, 0);
4064
    }
4065
  else
4066
    return 0;
4067
 
4068
  common = fold_convert_loc (loc, type, common);
4069
  left = fold_convert_loc (loc, type, left);
4070
  right = fold_convert_loc (loc, type, right);
4071
  return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
4072
                      fold_build2_loc (loc, code, type, left, right));
4073
}
4074
 
4075
/* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
4076
   with code CODE.  This optimization is unsafe.  */
4077
static tree
4078
distribute_real_division (location_t loc, enum tree_code code, tree type,
4079
                          tree arg0, tree arg1)
4080
{
4081
  bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
4082
  bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
4083
 
4084
  /* (A / C) +- (B / C) -> (A +- B) / C.  */
4085
  if (mul0 == mul1
4086
      && operand_equal_p (TREE_OPERAND (arg0, 1),
4087
                       TREE_OPERAND (arg1, 1), 0))
4088
    return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
4089
                        fold_build2_loc (loc, code, type,
4090
                                     TREE_OPERAND (arg0, 0),
4091
                                     TREE_OPERAND (arg1, 0)),
4092
                        TREE_OPERAND (arg0, 1));
4093
 
4094
  /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2).  */
4095
  if (operand_equal_p (TREE_OPERAND (arg0, 0),
4096
                       TREE_OPERAND (arg1, 0), 0)
4097
      && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
4098
      && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
4099
    {
4100
      REAL_VALUE_TYPE r0, r1;
4101
      r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
4102
      r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
4103
      if (!mul0)
4104
        real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
4105
      if (!mul1)
4106
        real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
4107
      real_arithmetic (&r0, code, &r0, &r1);
4108
      return fold_build2_loc (loc, MULT_EXPR, type,
4109
                          TREE_OPERAND (arg0, 0),
4110
                          build_real (type, r0));
4111
    }
4112
 
4113
  return NULL_TREE;
4114
}
4115
 
4116
/* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4117
   starting at BITPOS.  The field is unsigned if UNSIGNEDP is nonzero.  */
4118
 
4119
static tree
4120
make_bit_field_ref (location_t loc, tree inner, tree type,
4121
                    HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
4122
{
4123
  tree result, bftype;
4124
 
4125
  if (bitpos == 0)
4126
    {
4127
      tree size = TYPE_SIZE (TREE_TYPE (inner));
4128
      if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4129
           || POINTER_TYPE_P (TREE_TYPE (inner)))
4130
          && host_integerp (size, 0)
4131
          && tree_low_cst (size, 0) == bitsize)
4132
        return fold_convert_loc (loc, type, inner);
4133
    }
4134
 
4135
  bftype = type;
4136
  if (TYPE_PRECISION (bftype) != bitsize
4137
      || TYPE_UNSIGNED (bftype) == !unsignedp)
4138
    bftype = build_nonstandard_integer_type (bitsize, 0);
4139
 
4140
  result = build3 (BIT_FIELD_REF, bftype, inner,
4141
                   size_int (bitsize), bitsize_int (bitpos));
4142
  SET_EXPR_LOCATION (result, loc);
4143
 
4144
  if (bftype != type)
4145
    result = fold_convert_loc (loc, type, result);
4146
 
4147
  return result;
4148
}
4149
 
4150
/* Optimize a bit-field compare.
4151
 
4152
   There are two cases:  First is a compare against a constant and the
4153
   second is a comparison of two items where the fields are at the same
4154
   bit position relative to the start of a chunk (byte, halfword, word)
4155
   large enough to contain it.  In these cases we can avoid the shift
4156
   implicit in bitfield extractions.
4157
 
4158
   For constants, we emit a compare of the shifted constant with the
4159
   BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4160
   compared.  For two fields at the same position, we do the ANDs with the
4161
   similar mask and compare the result of the ANDs.
4162
 
4163
   CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4164
   COMPARE_TYPE is the type of the comparison, and LHS and RHS
4165
   are the left and right operands of the comparison, respectively.
4166
 
4167
   If the optimization described above can be done, we return the resulting
4168
   tree.  Otherwise we return zero.  */
4169
 
4170
static tree
4171
optimize_bit_field_compare (location_t loc, enum tree_code code,
4172
                            tree compare_type, tree lhs, tree rhs)
4173
{
4174
  HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
4175
  tree type = TREE_TYPE (lhs);
4176
  tree signed_type, unsigned_type;
4177
  int const_p = TREE_CODE (rhs) == INTEGER_CST;
4178
  enum machine_mode lmode, rmode, nmode;
4179
  int lunsignedp, runsignedp;
4180
  int lvolatilep = 0, rvolatilep = 0;
4181
  tree linner, rinner = NULL_TREE;
4182
  tree mask;
4183
  tree offset;
4184
 
4185
  /* Get all the information about the extractions being done.  If the bit size
4186
     if the same as the size of the underlying object, we aren't doing an
4187
     extraction at all and so can do nothing.  We also don't want to
4188
     do anything if the inner expression is a PLACEHOLDER_EXPR since we
4189
     then will no longer be able to replace it.  */
4190
  linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
4191
                                &lunsignedp, &lvolatilep, false);
4192
  if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
4193
      || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
4194
    return 0;
4195
 
4196
 if (!const_p)
4197
   {
4198
     /* If this is not a constant, we can only do something if bit positions,
4199
        sizes, and signedness are the same.  */
4200
     rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4201
                                   &runsignedp, &rvolatilep, false);
4202
 
4203
     if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
4204
         || lunsignedp != runsignedp || offset != 0
4205
         || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
4206
       return 0;
4207
   }
4208
 
4209
  /* See if we can find a mode to refer to this field.  We should be able to,
4210
     but fail if we can't.  */
4211
  nmode = get_best_mode (lbitsize, lbitpos,
4212
                         const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4213
                         : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4214
                                TYPE_ALIGN (TREE_TYPE (rinner))),
4215
                         word_mode, lvolatilep || rvolatilep);
4216
  if (nmode == VOIDmode)
4217
    return 0;
4218
 
4219
  /* Set signed and unsigned types of the precision of this mode for the
4220
     shifts below.  */
4221
  signed_type = lang_hooks.types.type_for_mode (nmode, 0);
4222
  unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4223
 
4224
  /* Compute the bit position and size for the new reference and our offset
4225
     within it. If the new reference is the same size as the original, we
4226
     won't optimize anything, so return zero.  */
4227
  nbitsize = GET_MODE_BITSIZE (nmode);
4228
  nbitpos = lbitpos & ~ (nbitsize - 1);
4229
  lbitpos -= nbitpos;
4230
  if (nbitsize == lbitsize)
4231
    return 0;
4232
 
4233
  if (BYTES_BIG_ENDIAN)
4234
    lbitpos = nbitsize - lbitsize - lbitpos;
4235
 
4236
  /* Make the mask to be used against the extracted field.  */
4237
  mask = build_int_cst_type (unsigned_type, -1);
4238
  mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
4239
  mask = const_binop (RSHIFT_EXPR, mask,
4240
                      size_int (nbitsize - lbitsize - lbitpos), 0);
4241
 
4242
  if (! const_p)
4243
    /* If not comparing with constant, just rework the comparison
4244
       and return.  */
4245
    return fold_build2_loc (loc, code, compare_type,
4246
                        fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4247
                                     make_bit_field_ref (loc, linner,
4248
                                                         unsigned_type,
4249
                                                         nbitsize, nbitpos,
4250
                                                         1),
4251
                                     mask),
4252
                        fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4253
                                     make_bit_field_ref (loc, rinner,
4254
                                                         unsigned_type,
4255
                                                         nbitsize, nbitpos,
4256
                                                         1),
4257
                                     mask));
4258
 
4259
  /* Otherwise, we are handling the constant case. See if the constant is too
4260
     big for the field.  Warn and return a tree of for 0 (false) if so.  We do
4261
     this not only for its own sake, but to avoid having to test for this
4262
     error case below.  If we didn't, we might generate wrong code.
4263
 
4264
     For unsigned fields, the constant shifted right by the field length should
4265
     be all zero.  For signed fields, the high-order bits should agree with
4266
     the sign bit.  */
4267
 
4268
  if (lunsignedp)
4269
    {
4270
      if (! integer_zerop (const_binop (RSHIFT_EXPR,
4271
                                        fold_convert_loc (loc,
4272
                                                          unsigned_type, rhs),
4273
                                        size_int (lbitsize), 0)))
4274
        {
4275
          warning (0, "comparison is always %d due to width of bit-field",
4276
                   code == NE_EXPR);
4277
          return constant_boolean_node (code == NE_EXPR, compare_type);
4278
        }
4279
    }
4280
  else
4281
    {
4282
      tree tem = const_binop (RSHIFT_EXPR,
4283
                              fold_convert_loc (loc, signed_type, rhs),
4284
                              size_int (lbitsize - 1), 0);
4285
      if (! integer_zerop (tem) && ! integer_all_onesp (tem))
4286
        {
4287
          warning (0, "comparison is always %d due to width of bit-field",
4288
                   code == NE_EXPR);
4289
          return constant_boolean_node (code == NE_EXPR, compare_type);
4290
        }
4291
    }
4292
 
4293
  /* Single-bit compares should always be against zero.  */
4294
  if (lbitsize == 1 && ! integer_zerop (rhs))
4295
    {
4296
      code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4297
      rhs = build_int_cst (type, 0);
4298
    }
4299
 
4300
  /* Make a new bitfield reference, shift the constant over the
4301
     appropriate number of bits and mask it with the computed mask
4302
     (in case this was a signed field).  If we changed it, make a new one.  */
4303
  lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
4304
  if (lvolatilep)
4305
    {
4306
      TREE_SIDE_EFFECTS (lhs) = 1;
4307
      TREE_THIS_VOLATILE (lhs) = 1;
4308
    }
4309
 
4310
  rhs = const_binop (BIT_AND_EXPR,
4311
                     const_binop (LSHIFT_EXPR,
4312
                                  fold_convert_loc (loc, unsigned_type, rhs),
4313
                                  size_int (lbitpos), 0),
4314
                     mask, 0);
4315
 
4316
  lhs = build2 (code, compare_type,
4317
                build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
4318
                rhs);
4319
  SET_EXPR_LOCATION (lhs, loc);
4320
  return lhs;
4321
}
4322
 
4323
/* Subroutine for fold_truthop: decode a field reference.
4324
 
4325
   If EXP is a comparison reference, we return the innermost reference.
4326
 
4327
   *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4328
   set to the starting bit number.
4329
 
4330
   If the innermost field can be completely contained in a mode-sized
4331
   unit, *PMODE is set to that mode.  Otherwise, it is set to VOIDmode.
4332
 
4333
   *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4334
   otherwise it is not changed.
4335
 
4336
   *PUNSIGNEDP is set to the signedness of the field.
4337
 
4338
   *PMASK is set to the mask used.  This is either contained in a
4339
   BIT_AND_EXPR or derived from the width of the field.
4340
 
4341
   *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4342
 
4343
   Return 0 if this is not a component reference or is one that we can't
4344
   do anything with.  */
4345
 
4346
static tree
4347
decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
4348
                        HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
4349
                        int *punsignedp, int *pvolatilep,
4350
                        tree *pmask, tree *pand_mask)
4351
{
4352
  tree outer_type = 0;
4353
  tree and_mask = 0;
4354
  tree mask, inner, offset;
4355
  tree unsigned_type;
4356
  unsigned int precision;
4357
 
4358
  /* All the optimizations using this function assume integer fields.
4359
     There are problems with FP fields since the type_for_size call
4360
     below can fail for, e.g., XFmode.  */
4361
  if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4362
    return 0;
4363
 
4364
  /* We are interested in the bare arrangement of bits, so strip everything
4365
     that doesn't affect the machine mode.  However, record the type of the
4366
     outermost expression if it may matter below.  */
4367
  if (CONVERT_EXPR_P (exp)
4368
      || TREE_CODE (exp) == NON_LVALUE_EXPR)
4369
    outer_type = TREE_TYPE (exp);
4370
  STRIP_NOPS (exp);
4371
 
4372
  if (TREE_CODE (exp) == BIT_AND_EXPR)
4373
    {
4374
      and_mask = TREE_OPERAND (exp, 1);
4375
      exp = TREE_OPERAND (exp, 0);
4376
      STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4377
      if (TREE_CODE (and_mask) != INTEGER_CST)
4378
        return 0;
4379
    }
4380
 
4381
  inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4382
                               punsignedp, pvolatilep, false);
4383
  if ((inner == exp && and_mask == 0)
4384
      || *pbitsize < 0 || offset != 0
4385
      || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4386
    return 0;
4387
 
4388
  /* If the number of bits in the reference is the same as the bitsize of
4389
     the outer type, then the outer type gives the signedness. Otherwise
4390
     (in case of a small bitfield) the signedness is unchanged.  */
4391
  if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4392
    *punsignedp = TYPE_UNSIGNED (outer_type);
4393
 
4394
  /* Compute the mask to access the bitfield.  */
4395
  unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4396
  precision = TYPE_PRECISION (unsigned_type);
4397
 
4398
  mask = build_int_cst_type (unsigned_type, -1);
4399
 
4400
  mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4401
  mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4402
 
4403
  /* Merge it with the mask we found in the BIT_AND_EXPR, if any.  */
4404
  if (and_mask != 0)
4405
    mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4406
                        fold_convert_loc (loc, unsigned_type, and_mask), mask);
4407
 
4408
  *pmask = mask;
4409
  *pand_mask = and_mask;
4410
  return inner;
4411
}
4412
 
4413
/* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4414
   bit positions.  */
4415
 
4416
static int
4417
all_ones_mask_p (const_tree mask, int size)
4418
{
4419
  tree type = TREE_TYPE (mask);
4420
  unsigned int precision = TYPE_PRECISION (type);
4421
  tree tmask;
4422
 
4423
  tmask = build_int_cst_type (signed_type_for (type), -1);
4424
 
4425
  return
4426
    tree_int_cst_equal (mask,
4427
                        const_binop (RSHIFT_EXPR,
4428
                                     const_binop (LSHIFT_EXPR, tmask,
4429
                                                  size_int (precision - size),
4430
                                                  0),
4431
                                     size_int (precision - size), 0));
4432
}
4433
 
4434
/* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4435
   represents the sign bit of EXP's type.  If EXP represents a sign
4436
   or zero extension, also test VAL against the unextended type.
4437
   The return value is the (sub)expression whose sign bit is VAL,
4438
   or NULL_TREE otherwise.  */
4439
 
4440
static tree
4441
sign_bit_p (tree exp, const_tree val)
4442
{
4443
  unsigned HOST_WIDE_INT mask_lo, lo;
4444
  HOST_WIDE_INT mask_hi, hi;
4445
  int width;
4446
  tree t;
4447
 
4448
  /* Tree EXP must have an integral type.  */
4449
  t = TREE_TYPE (exp);
4450
  if (! INTEGRAL_TYPE_P (t))
4451
    return NULL_TREE;
4452
 
4453
  /* Tree VAL must be an integer constant.  */
4454
  if (TREE_CODE (val) != INTEGER_CST
4455
      || TREE_OVERFLOW (val))
4456
    return NULL_TREE;
4457
 
4458
  width = TYPE_PRECISION (t);
4459
  if (width > HOST_BITS_PER_WIDE_INT)
4460
    {
4461
      hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
4462
      lo = 0;
4463
 
4464
      mask_hi = ((unsigned HOST_WIDE_INT) -1
4465
                 >> (2 * HOST_BITS_PER_WIDE_INT - width));
4466
      mask_lo = -1;
4467
    }
4468
  else
4469
    {
4470
      hi = 0;
4471
      lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
4472
 
4473
      mask_hi = 0;
4474
      mask_lo = ((unsigned HOST_WIDE_INT) -1
4475
                 >> (HOST_BITS_PER_WIDE_INT - width));
4476
    }
4477
 
4478
  /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4479
     treat VAL as if it were unsigned.  */
4480
  if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
4481
      && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4482
    return exp;
4483
 
4484
  /* Handle extension from a narrower type.  */
4485
  if (TREE_CODE (exp) == NOP_EXPR
4486
      && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4487
    return sign_bit_p (TREE_OPERAND (exp, 0), val);
4488
 
4489
  return NULL_TREE;
4490
}
4491
 
4492
/* Subroutine for fold_truthop: determine if an operand is simple enough
4493
   to be evaluated unconditionally.  */
4494
 
4495
static int
4496
simple_operand_p (const_tree exp)
4497
{
4498
  /* Strip any conversions that don't change the machine mode.  */
4499
  STRIP_NOPS (exp);
4500
 
4501
  return (CONSTANT_CLASS_P (exp)
4502
          || TREE_CODE (exp) == SSA_NAME
4503
          || (DECL_P (exp)
4504
              && ! TREE_ADDRESSABLE (exp)
4505
              && ! TREE_THIS_VOLATILE (exp)
4506
              && ! DECL_NONLOCAL (exp)
4507
              /* Don't regard global variables as simple.  They may be
4508
                 allocated in ways unknown to the compiler (shared memory,
4509
                 #pragma weak, etc).  */
4510
              && ! TREE_PUBLIC (exp)
4511
              && ! DECL_EXTERNAL (exp)
4512
              /* Loading a static variable is unduly expensive, but global
4513
                 registers aren't expensive.  */
4514
              && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4515
}
4516
 
4517
/* The following functions are subroutines to fold_range_test and allow it to
4518
   try to change a logical combination of comparisons into a range test.
4519
 
4520
   For example, both
4521
        X == 2 || X == 3 || X == 4 || X == 5
4522
   and
4523
        X >= 2 && X <= 5
4524
   are converted to
4525
        (unsigned) (X - 2) <= 3
4526
 
4527
   We describe each set of comparisons as being either inside or outside
4528
   a range, using a variable named like IN_P, and then describe the
4529
   range with a lower and upper bound.  If one of the bounds is omitted,
4530
   it represents either the highest or lowest value of the type.
4531
 
4532
   In the comments below, we represent a range by two numbers in brackets
4533
   preceded by a "+" to designate being inside that range, or a "-" to
4534
   designate being outside that range, so the condition can be inverted by
4535
   flipping the prefix.  An omitted bound is represented by a "-".  For
4536
   example, "- [-, 10]" means being outside the range starting at the lowest
4537
   possible value and ending at 10, in other words, being greater than 10.
4538
   The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4539
   always false.
4540
 
4541
   We set up things so that the missing bounds are handled in a consistent
4542
   manner so neither a missing bound nor "true" and "false" need to be
4543
   handled using a special case.  */
4544
 
4545
/* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4546
   of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4547
   and UPPER1_P are nonzero if the respective argument is an upper bound
4548
   and zero for a lower.  TYPE, if nonzero, is the type of the result; it
4549
   must be specified for a comparison.  ARG1 will be converted to ARG0's
4550
   type if both are specified.  */
4551
 
4552
static tree
4553
range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4554
             tree arg1, int upper1_p)
4555
{
4556
  tree tem;
4557
  int result;
4558
  int sgn0, sgn1;
4559
 
4560
  /* If neither arg represents infinity, do the normal operation.
4561
     Else, if not a comparison, return infinity.  Else handle the special
4562
     comparison rules. Note that most of the cases below won't occur, but
4563
     are handled for consistency.  */
4564
 
4565
  if (arg0 != 0 && arg1 != 0)
4566
    {
4567
      tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4568
                         arg0, fold_convert (TREE_TYPE (arg0), arg1));
4569
      STRIP_NOPS (tem);
4570
      return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4571
    }
4572
 
4573
  if (TREE_CODE_CLASS (code) != tcc_comparison)
4574
    return 0;
4575
 
4576
  /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4577
     for neither.  In real maths, we cannot assume open ended ranges are
4578
     the same. But, this is computer arithmetic, where numbers are finite.
4579
     We can therefore make the transformation of any unbounded range with
4580
     the value Z, Z being greater than any representable number. This permits
4581
     us to treat unbounded ranges as equal.  */
4582
  sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4583
  sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4584
  switch (code)
4585
    {
4586
    case EQ_EXPR:
4587
      result = sgn0 == sgn1;
4588
      break;
4589
    case NE_EXPR:
4590
      result = sgn0 != sgn1;
4591
      break;
4592
    case LT_EXPR:
4593
      result = sgn0 < sgn1;
4594
      break;
4595
    case LE_EXPR:
4596
      result = sgn0 <= sgn1;
4597
      break;
4598
    case GT_EXPR:
4599
      result = sgn0 > sgn1;
4600
      break;
4601
    case GE_EXPR:
4602
      result = sgn0 >= sgn1;
4603
      break;
4604
    default:
4605
      gcc_unreachable ();
4606
    }
4607
 
4608
  return constant_boolean_node (result, type);
4609
}
4610
 
4611
/* Given EXP, a logical expression, set the range it is testing into
4612
   variables denoted by PIN_P, PLOW, and PHIGH.  Return the expression
4613
   actually being tested.  *PLOW and *PHIGH will be made of the same
4614
   type as the returned expression.  If EXP is not a comparison, we
4615
   will most likely not be returning a useful value and range.  Set
4616
   *STRICT_OVERFLOW_P to true if the return value is only valid
4617
   because signed overflow is undefined; otherwise, do not change
4618
   *STRICT_OVERFLOW_P.  */
4619
 
4620
tree
4621
make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4622
            bool *strict_overflow_p)
4623
{
4624
  enum tree_code code;
4625
  tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4626
  tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4627
  int in_p, n_in_p;
4628
  tree low, high, n_low, n_high;
4629
  location_t loc = EXPR_LOCATION (exp);
4630
 
4631
  /* Start with simply saying "EXP != 0" and then look at the code of EXP
4632
     and see if we can refine the range.  Some of the cases below may not
4633
     happen, but it doesn't seem worth worrying about this.  We "continue"
4634
     the outer loop when we've changed something; otherwise we "break"
4635
     the switch, which will "break" the while.  */
4636
 
4637
  in_p = 0;
4638
  low = high = build_int_cst (TREE_TYPE (exp), 0);
4639
 
4640
  while (1)
4641
    {
4642
      code = TREE_CODE (exp);
4643
      exp_type = TREE_TYPE (exp);
4644
 
4645
      if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4646
        {
4647
          if (TREE_OPERAND_LENGTH (exp) > 0)
4648
            arg0 = TREE_OPERAND (exp, 0);
4649
          if (TREE_CODE_CLASS (code) == tcc_comparison
4650
              || TREE_CODE_CLASS (code) == tcc_unary
4651
              || TREE_CODE_CLASS (code) == tcc_binary)
4652
            arg0_type = TREE_TYPE (arg0);
4653
          if (TREE_CODE_CLASS (code) == tcc_binary
4654
              || TREE_CODE_CLASS (code) == tcc_comparison
4655
              || (TREE_CODE_CLASS (code) == tcc_expression
4656
                  && TREE_OPERAND_LENGTH (exp) > 1))
4657
            arg1 = TREE_OPERAND (exp, 1);
4658
        }
4659
 
4660
      switch (code)
4661
        {
4662
        case TRUTH_NOT_EXPR:
4663
          in_p = ! in_p, exp = arg0;
4664
          continue;
4665
 
4666
        case EQ_EXPR: case NE_EXPR:
4667
        case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4668
          /* We can only do something if the range is testing for zero
4669
             and if the second operand is an integer constant.  Note that
4670
             saying something is "in" the range we make is done by
4671
             complementing IN_P since it will set in the initial case of
4672
             being not equal to zero; "out" is leaving it alone.  */
4673
          if (low == 0 || high == 0
4674
              || ! integer_zerop (low) || ! integer_zerop (high)
4675
              || TREE_CODE (arg1) != INTEGER_CST)
4676
            break;
4677
 
4678
          switch (code)
4679
            {
4680
            case NE_EXPR:  /* - [c, c]  */
4681
              low = high = arg1;
4682
              break;
4683
            case EQ_EXPR:  /* + [c, c]  */
4684
              in_p = ! in_p, low = high = arg1;
4685
              break;
4686
            case GT_EXPR:  /* - [-, c] */
4687
              low = 0, high = arg1;
4688
              break;
4689
            case GE_EXPR:  /* + [c, -] */
4690
              in_p = ! in_p, low = arg1, high = 0;
4691
              break;
4692
            case LT_EXPR:  /* - [c, -] */
4693
              low = arg1, high = 0;
4694
              break;
4695
            case LE_EXPR:  /* + [-, c] */
4696
              in_p = ! in_p, low = 0, high = arg1;
4697
              break;
4698
            default:
4699
              gcc_unreachable ();
4700
            }
4701
 
4702
          /* If this is an unsigned comparison, we also know that EXP is
4703
             greater than or equal to zero.  We base the range tests we make
4704
             on that fact, so we record it here so we can parse existing
4705
             range tests.  We test arg0_type since often the return type
4706
             of, e.g. EQ_EXPR, is boolean.  */
4707
          if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4708
            {
4709
              if (! merge_ranges (&n_in_p, &n_low, &n_high,
4710
                                  in_p, low, high, 1,
4711
                                  build_int_cst (arg0_type, 0),
4712
                                  NULL_TREE))
4713
                break;
4714
 
4715
              in_p = n_in_p, low = n_low, high = n_high;
4716
 
4717
              /* If the high bound is missing, but we have a nonzero low
4718
                 bound, reverse the range so it goes from zero to the low bound
4719
                 minus 1.  */
4720
              if (high == 0 && low && ! integer_zerop (low))
4721
                {
4722
                  in_p = ! in_p;
4723
                  high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4724
                                      integer_one_node, 0);
4725
                  low = build_int_cst (arg0_type, 0);
4726
                }
4727
            }
4728
 
4729
          exp = arg0;
4730
          continue;
4731
 
4732
        case NEGATE_EXPR:
4733
          /* (-x) IN [a,b] -> x in [-b, -a]  */
4734
          n_low = range_binop (MINUS_EXPR, exp_type,
4735
                               build_int_cst (exp_type, 0),
4736
                               0, high, 1);
4737
          n_high = range_binop (MINUS_EXPR, exp_type,
4738
                                build_int_cst (exp_type, 0),
4739
                                0, low, 0);
4740 378 julius
          if (n_high != 0 && TREE_OVERFLOW (n_high))
4741
            break;
4742
          goto normalize;
4743 280 jeremybenn
 
4744
        case BIT_NOT_EXPR:
4745
          /* ~ X -> -X - 1  */
4746
          exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4747
                        build_int_cst (exp_type, 1));
4748
          SET_EXPR_LOCATION (exp, loc);
4749
          continue;
4750
 
4751
        case PLUS_EXPR:  case MINUS_EXPR:
4752
          if (TREE_CODE (arg1) != INTEGER_CST)
4753
            break;
4754
 
4755
          /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4756
             move a constant to the other side.  */
4757
          if (!TYPE_UNSIGNED (arg0_type)
4758
              && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4759
            break;
4760
 
4761
          /* If EXP is signed, any overflow in the computation is undefined,
4762
             so we don't worry about it so long as our computations on
4763
             the bounds don't overflow.  For unsigned, overflow is defined
4764
             and this is exactly the right thing.  */
4765
          n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4766
                               arg0_type, low, 0, arg1, 0);
4767
          n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4768
                                arg0_type, high, 1, arg1, 0);
4769
          if ((n_low != 0 && TREE_OVERFLOW (n_low))
4770
              || (n_high != 0 && TREE_OVERFLOW (n_high)))
4771
            break;
4772
 
4773
          if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4774
            *strict_overflow_p = true;
4775
 
4776 378 julius
        normalize:
4777 280 jeremybenn
          /* Check for an unsigned range which has wrapped around the maximum
4778
             value thus making n_high < n_low, and normalize it.  */
4779
          if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4780
            {
4781
              low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4782
                                 integer_one_node, 0);
4783
              high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4784
                                  integer_one_node, 0);
4785
 
4786
              /* If the range is of the form +/- [ x+1, x ], we won't
4787
                 be able to normalize it.  But then, it represents the
4788
                 whole range or the empty set, so make it
4789
                 +/- [ -, - ].  */
4790
              if (tree_int_cst_equal (n_low, low)
4791
                  && tree_int_cst_equal (n_high, high))
4792
                low = high = 0;
4793
              else
4794
                in_p = ! in_p;
4795
            }
4796
          else
4797
            low = n_low, high = n_high;
4798
 
4799
          exp = arg0;
4800
          continue;
4801
 
4802
        CASE_CONVERT: case NON_LVALUE_EXPR:
4803
          if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4804
            break;
4805
 
4806
          if (! INTEGRAL_TYPE_P (arg0_type)
4807
              || (low != 0 && ! int_fits_type_p (low, arg0_type))
4808
              || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4809
            break;
4810
 
4811
          n_low = low, n_high = high;
4812
 
4813
          if (n_low != 0)
4814
            n_low = fold_convert_loc (loc, arg0_type, n_low);
4815
 
4816
          if (n_high != 0)
4817
            n_high = fold_convert_loc (loc, arg0_type, n_high);
4818
 
4819
 
4820
          /* If we're converting arg0 from an unsigned type, to exp,
4821
             a signed type,  we will be doing the comparison as unsigned.
4822
             The tests above have already verified that LOW and HIGH
4823
             are both positive.
4824
 
4825
             So we have to ensure that we will handle large unsigned
4826
             values the same way that the current signed bounds treat
4827
             negative values.  */
4828
 
4829
          if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4830
            {
4831
              tree high_positive;
4832
              tree equiv_type;
4833
              /* For fixed-point modes, we need to pass the saturating flag
4834
                 as the 2nd parameter.  */
4835
              if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4836
                equiv_type = lang_hooks.types.type_for_mode
4837
                             (TYPE_MODE (arg0_type),
4838
                              TYPE_SATURATING (arg0_type));
4839
              else
4840
                equiv_type = lang_hooks.types.type_for_mode
4841
                             (TYPE_MODE (arg0_type), 1);
4842
 
4843
              /* A range without an upper bound is, naturally, unbounded.
4844
                 Since convert would have cropped a very large value, use
4845
                 the max value for the destination type.  */
4846
              high_positive
4847
                = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4848
                : TYPE_MAX_VALUE (arg0_type);
4849
 
4850
              if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4851
                high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4852
                                             fold_convert_loc (loc, arg0_type,
4853
                                                               high_positive),
4854
                                             build_int_cst (arg0_type, 1));
4855
 
4856
              /* If the low bound is specified, "and" the range with the
4857
                 range for which the original unsigned value will be
4858
                 positive.  */
4859
              if (low != 0)
4860
                {
4861
                  if (! merge_ranges (&n_in_p, &n_low, &n_high,
4862
                                      1, n_low, n_high, 1,
4863
                                      fold_convert_loc (loc, arg0_type,
4864
                                                        integer_zero_node),
4865
                                      high_positive))
4866
                    break;
4867
 
4868
                  in_p = (n_in_p == in_p);
4869
                }
4870
              else
4871
                {
4872
                  /* Otherwise, "or" the range with the range of the input
4873
                     that will be interpreted as negative.  */
4874
                  if (! merge_ranges (&n_in_p, &n_low, &n_high,
4875
                                      0, n_low, n_high, 1,
4876
                                      fold_convert_loc (loc, arg0_type,
4877
                                                        integer_zero_node),
4878
                                      high_positive))
4879
                    break;
4880
 
4881
                  in_p = (in_p != n_in_p);
4882
                }
4883
            }
4884
 
4885
          exp = arg0;
4886
          low = n_low, high = n_high;
4887
          continue;
4888
 
4889
        default:
4890
          break;
4891
        }
4892
 
4893
      break;
4894
    }
4895
 
4896
  /* If EXP is a constant, we can evaluate whether this is true or false.  */
4897
  if (TREE_CODE (exp) == INTEGER_CST)
4898
    {
4899
      in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4900
                                                 exp, 0, low, 0))
4901
                      && integer_onep (range_binop (LE_EXPR, integer_type_node,
4902
                                                    exp, 1, high, 1)));
4903
      low = high = 0;
4904
      exp = 0;
4905
    }
4906
 
4907
  *pin_p = in_p, *plow = low, *phigh = high;
4908
  return exp;
4909
}
4910
 
4911
/* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4912
   type, TYPE, return an expression to test if EXP is in (or out of, depending
4913
   on IN_P) the range.  Return 0 if the test couldn't be created.  */
4914
 
4915
tree
4916
build_range_check (location_t loc, tree type, tree exp, int in_p,
4917
                   tree low, tree high)
4918
{
4919
  tree etype = TREE_TYPE (exp), value;
4920
 
4921
#ifdef HAVE_canonicalize_funcptr_for_compare
4922
  /* Disable this optimization for function pointer expressions
4923
     on targets that require function pointer canonicalization.  */
4924
  if (HAVE_canonicalize_funcptr_for_compare
4925
      && TREE_CODE (etype) == POINTER_TYPE
4926
      && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4927
    return NULL_TREE;
4928
#endif
4929
 
4930
  if (! in_p)
4931
    {
4932
      value = build_range_check (loc, type, exp, 1, low, high);
4933
      if (value != 0)
4934
        return invert_truthvalue_loc (loc, value);
4935
 
4936
      return 0;
4937
    }
4938
 
4939
  if (low == 0 && high == 0)
4940
    return build_int_cst (type, 1);
4941
 
4942
  if (low == 0)
4943
    return fold_build2_loc (loc, LE_EXPR, type, exp,
4944
                        fold_convert_loc (loc, etype, high));
4945
 
4946
  if (high == 0)
4947
    return fold_build2_loc (loc, GE_EXPR, type, exp,
4948
                        fold_convert_loc (loc, etype, low));
4949
 
4950
  if (operand_equal_p (low, high, 0))
4951
    return fold_build2_loc (loc, EQ_EXPR, type, exp,
4952
                        fold_convert_loc (loc, etype, low));
4953
 
4954
  if (integer_zerop (low))
4955
    {
4956
      if (! TYPE_UNSIGNED (etype))
4957
        {
4958
          etype = unsigned_type_for (etype);
4959
          high = fold_convert_loc (loc, etype, high);
4960
          exp = fold_convert_loc (loc, etype, exp);
4961
        }
4962
      return build_range_check (loc, type, exp, 1, 0, high);
4963
    }
4964
 
4965
  /* Optimize (c>=1) && (c<=127) into (signed char)c > 0.  */
4966
  if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4967
    {
4968
      unsigned HOST_WIDE_INT lo;
4969
      HOST_WIDE_INT hi;
4970
      int prec;
4971
 
4972
      prec = TYPE_PRECISION (etype);
4973
      if (prec <= HOST_BITS_PER_WIDE_INT)
4974
        {
4975
          hi = 0;
4976
          lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4977
        }
4978
      else
4979
        {
4980
          hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4981
          lo = (unsigned HOST_WIDE_INT) -1;
4982
        }
4983
 
4984
      if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4985
        {
4986
          if (TYPE_UNSIGNED (etype))
4987
            {
4988
              tree signed_etype = signed_type_for (etype);
4989
              if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4990
                etype
4991
                  = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4992
              else
4993
                etype = signed_etype;
4994
              exp = fold_convert_loc (loc, etype, exp);
4995
            }
4996
          return fold_build2_loc (loc, GT_EXPR, type, exp,
4997
                              build_int_cst (etype, 0));
4998
        }
4999
    }
5000
 
5001
  /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5002
     This requires wrap-around arithmetics for the type of the expression.
5003
     First make sure that arithmetics in this type is valid, then make sure
5004
     that it wraps around.  */
5005
  if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5006
    etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
5007
                                            TYPE_UNSIGNED (etype));
5008
 
5009
  if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
5010
    {
5011
      tree utype, minv, maxv;
5012
 
5013
      /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5014
         for the type in question, as we rely on this here.  */
5015
      utype = unsigned_type_for (etype);
5016
      maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
5017
      maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5018
                          integer_one_node, 1);
5019
      minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
5020
 
5021
      if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5022
                                      minv, 1, maxv, 1)))
5023
        etype = utype;
5024
      else
5025
        return 0;
5026
    }
5027
 
5028
  high = fold_convert_loc (loc, etype, high);
5029
  low = fold_convert_loc (loc, etype, low);
5030
  exp = fold_convert_loc (loc, etype, exp);
5031
 
5032
  value = const_binop (MINUS_EXPR, high, low, 0);
5033
 
5034
 
5035
  if (POINTER_TYPE_P (etype))
5036
    {
5037
      if (value != 0 && !TREE_OVERFLOW (value))
5038
        {
5039
          low = fold_convert_loc (loc, sizetype, low);
5040
          low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low);
5041
          return build_range_check (loc, type,
5042
                                    fold_build2_loc (loc, POINTER_PLUS_EXPR,
5043
                                                 etype, exp, low),
5044
                                    1, build_int_cst (etype, 0), value);
5045
        }
5046
      return 0;
5047
    }
5048
 
5049
  if (value != 0 && !TREE_OVERFLOW (value))
5050
    return build_range_check (loc, type,
5051
                              fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5052
                              1, build_int_cst (etype, 0), value);
5053
 
5054
  return 0;
5055
}
5056
 
5057
/* Return the predecessor of VAL in its type, handling the infinite case.  */
5058
 
5059
static tree
5060
range_predecessor (tree val)
5061
{
5062
  tree type = TREE_TYPE (val);
5063
 
5064
  if (INTEGRAL_TYPE_P (type)
5065
      && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5066
    return 0;
5067
  else
5068
    return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
5069
}
5070
 
5071
/* Return the successor of VAL in its type, handling the infinite case.  */
5072
 
5073
static tree
5074
range_successor (tree val)
5075
{
5076
  tree type = TREE_TYPE (val);
5077
 
5078
  if (INTEGRAL_TYPE_P (type)
5079
      && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5080
    return 0;
5081
  else
5082
    return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
5083
}
5084
 
5085
/* Given two ranges, see if we can merge them into one.  Return 1 if we
5086
   can, 0 if we can't.  Set the output range into the specified parameters.  */
5087
 
5088
bool
5089
merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5090
              tree high0, int in1_p, tree low1, tree high1)
5091
{
5092
  int no_overlap;
5093
  int subset;
5094
  int temp;
5095
  tree tem;
5096
  int in_p;
5097
  tree low, high;
5098
  int lowequal = ((low0 == 0 && low1 == 0)
5099
                  || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5100
                                                low0, 0, low1, 0)));
5101
  int highequal = ((high0 == 0 && high1 == 0)
5102
                   || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5103
                                                 high0, 1, high1, 1)));
5104
 
5105
  /* Make range 0 be the range that starts first, or ends last if they
5106
     start at the same value.  Swap them if it isn't.  */
5107
  if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5108
                                 low0, 0, low1, 0))
5109
      || (lowequal
5110
          && integer_onep (range_binop (GT_EXPR, integer_type_node,
5111
                                        high1, 1, high0, 1))))
5112
    {
5113
      temp = in0_p, in0_p = in1_p, in1_p = temp;
5114
      tem = low0, low0 = low1, low1 = tem;
5115
      tem = high0, high0 = high1, high1 = tem;
5116
    }
5117
 
5118
  /* Now flag two cases, whether the ranges are disjoint or whether the
5119
     second range is totally subsumed in the first.  Note that the tests
5120
     below are simplified by the ones above.  */
5121
  no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5122
                                          high0, 1, low1, 0));
5123
  subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5124
                                      high1, 1, high0, 1));
5125
 
5126
  /* We now have four cases, depending on whether we are including or
5127
     excluding the two ranges.  */
5128
  if (in0_p && in1_p)
5129
    {
5130
      /* If they don't overlap, the result is false.  If the second range
5131
         is a subset it is the result.  Otherwise, the range is from the start
5132
         of the second to the end of the first.  */
5133
      if (no_overlap)
5134
        in_p = 0, low = high = 0;
5135
      else if (subset)
5136
        in_p = 1, low = low1, high = high1;
5137
      else
5138
        in_p = 1, low = low1, high = high0;
5139
    }
5140
 
5141
  else if (in0_p && ! in1_p)
5142
    {
5143
      /* If they don't overlap, the result is the first range.  If they are
5144
         equal, the result is false.  If the second range is a subset of the
5145
         first, and the ranges begin at the same place, we go from just after
5146
         the end of the second range to the end of the first.  If the second
5147
         range is not a subset of the first, or if it is a subset and both
5148
         ranges end at the same place, the range starts at the start of the
5149
         first range and ends just before the second range.
5150
         Otherwise, we can't describe this as a single range.  */
5151
      if (no_overlap)
5152
        in_p = 1, low = low0, high = high0;
5153
      else if (lowequal && highequal)
5154
        in_p = 0, low = high = 0;
5155
      else if (subset && lowequal)
5156
        {
5157
          low = range_successor (high1);
5158
          high = high0;
5159
          in_p = 1;
5160
          if (low == 0)
5161
            {
5162
              /* We are in the weird situation where high0 > high1 but
5163
                 high1 has no successor.  Punt.  */
5164
              return 0;
5165
            }
5166
        }
5167
      else if (! subset || highequal)
5168
        {
5169
          low = low0;
5170
          high = range_predecessor (low1);
5171
          in_p = 1;
5172
          if (high == 0)
5173
            {
5174
              /* low0 < low1 but low1 has no predecessor.  Punt.  */
5175
              return 0;
5176
            }
5177
        }
5178
      else
5179
        return 0;
5180
    }
5181
 
5182
  else if (! in0_p && in1_p)
5183
    {
5184
      /* If they don't overlap, the result is the second range.  If the second
5185
         is a subset of the first, the result is false.  Otherwise,
5186
         the range starts just after the first range and ends at the
5187
         end of the second.  */
5188
      if (no_overlap)
5189
        in_p = 1, low = low1, high = high1;
5190
      else if (subset || highequal)
5191
        in_p = 0, low = high = 0;
5192
      else
5193
        {
5194
          low = range_successor (high0);
5195
          high = high1;
5196
          in_p = 1;
5197
          if (low == 0)
5198
            {
5199
              /* high1 > high0 but high0 has no successor.  Punt.  */
5200
              return 0;
5201
            }
5202
        }
5203
    }
5204
 
5205
  else
5206
    {
5207
      /* The case where we are excluding both ranges.  Here the complex case
5208
         is if they don't overlap.  In that case, the only time we have a
5209
         range is if they are adjacent.  If the second is a subset of the
5210
         first, the result is the first.  Otherwise, the range to exclude
5211
         starts at the beginning of the first range and ends at the end of the
5212
         second.  */
5213
      if (no_overlap)
5214
        {
5215
          if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5216
                                         range_successor (high0),
5217
                                         1, low1, 0)))
5218
            in_p = 0, low = low0, high = high1;
5219
          else
5220
            {
5221
              /* Canonicalize - [min, x] into - [-, x].  */
5222
              if (low0 && TREE_CODE (low0) == INTEGER_CST)
5223
                switch (TREE_CODE (TREE_TYPE (low0)))
5224
                  {
5225
                  case ENUMERAL_TYPE:
5226
                    if (TYPE_PRECISION (TREE_TYPE (low0))
5227
                        != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
5228
                      break;
5229
                    /* FALLTHROUGH */
5230
                  case INTEGER_TYPE:
5231
                    if (tree_int_cst_equal (low0,
5232
                                            TYPE_MIN_VALUE (TREE_TYPE (low0))))
5233
                      low0 = 0;
5234
                    break;
5235
                  case POINTER_TYPE:
5236
                    if (TYPE_UNSIGNED (TREE_TYPE (low0))
5237
                        && integer_zerop (low0))
5238
                      low0 = 0;
5239
                    break;
5240
                  default:
5241
                    break;
5242
                  }
5243
 
5244
              /* Canonicalize - [x, max] into - [x, -].  */
5245
              if (high1 && TREE_CODE (high1) == INTEGER_CST)
5246
                switch (TREE_CODE (TREE_TYPE (high1)))
5247
                  {
5248
                  case ENUMERAL_TYPE:
5249
                    if (TYPE_PRECISION (TREE_TYPE (high1))
5250
                        != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
5251
                      break;
5252
                    /* FALLTHROUGH */
5253
                  case INTEGER_TYPE:
5254
                    if (tree_int_cst_equal (high1,
5255
                                            TYPE_MAX_VALUE (TREE_TYPE (high1))))
5256
                      high1 = 0;
5257
                    break;
5258
                  case POINTER_TYPE:
5259
                    if (TYPE_UNSIGNED (TREE_TYPE (high1))
5260
                        && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5261
                                                       high1, 1,
5262
                                                       integer_one_node, 1)))
5263
                      high1 = 0;
5264
                    break;
5265
                  default:
5266
                    break;
5267
                  }
5268
 
5269
              /* The ranges might be also adjacent between the maximum and
5270
                 minimum values of the given type.  For
5271
                 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5272
                 return + [x + 1, y - 1].  */
5273
              if (low0 == 0 && high1 == 0)
5274
                {
5275
                  low = range_successor (high0);
5276
                  high = range_predecessor (low1);
5277
                  if (low == 0 || high == 0)
5278
                    return 0;
5279
 
5280
                  in_p = 1;
5281
                }
5282
              else
5283
                return 0;
5284
            }
5285
        }
5286
      else if (subset)
5287
        in_p = 0, low = low0, high = high0;
5288
      else
5289
        in_p = 0, low = low0, high = high1;
5290
    }
5291
 
5292
  *pin_p = in_p, *plow = low, *phigh = high;
5293
  return 1;
5294
}
5295
 
5296
 
5297
/* Subroutine of fold, looking inside expressions of the form
5298
   A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5299
   of the COND_EXPR.  This function is being used also to optimize
5300
   A op B ? C : A, by reversing the comparison first.
5301
 
5302
   Return a folded expression whose code is not a COND_EXPR
5303
   anymore, or NULL_TREE if no folding opportunity is found.  */
5304
 
5305
static tree
5306
fold_cond_expr_with_comparison (location_t loc, tree type,
5307
                                tree arg0, tree arg1, tree arg2)
5308
{
5309
  enum tree_code comp_code = TREE_CODE (arg0);
5310
  tree arg00 = TREE_OPERAND (arg0, 0);
5311
  tree arg01 = TREE_OPERAND (arg0, 1);
5312
  tree arg1_type = TREE_TYPE (arg1);
5313
  tree tem;
5314
 
5315
  STRIP_NOPS (arg1);
5316
  STRIP_NOPS (arg2);
5317
 
5318
  /* If we have A op 0 ? A : -A, consider applying the following
5319
     transformations:
5320
 
5321
     A == 0? A : -A    same as -A
5322
     A != 0? A : -A    same as A
5323
     A >= 0? A : -A    same as abs (A)
5324
     A > 0?  A : -A    same as abs (A)
5325
     A <= 0? A : -A    same as -abs (A)
5326
     A < 0?  A : -A    same as -abs (A)
5327
 
5328
     None of these transformations work for modes with signed
5329
     zeros.  If A is +/-0, the first two transformations will
5330
     change the sign of the result (from +0 to -0, or vice
5331
     versa).  The last four will fix the sign of the result,
5332
     even though the original expressions could be positive or
5333
     negative, depending on the sign of A.
5334
 
5335
     Note that all these transformations are correct if A is
5336
     NaN, since the two alternatives (A and -A) are also NaNs.  */
5337
  if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5338
      && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5339
          ? real_zerop (arg01)
5340
          : integer_zerop (arg01))
5341
      && ((TREE_CODE (arg2) == NEGATE_EXPR
5342
           && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5343
             /* In the case that A is of the form X-Y, '-A' (arg2) may
5344
                have already been folded to Y-X, check for that. */
5345
          || (TREE_CODE (arg1) == MINUS_EXPR
5346
              && TREE_CODE (arg2) == MINUS_EXPR
5347
              && operand_equal_p (TREE_OPERAND (arg1, 0),
5348
                                  TREE_OPERAND (arg2, 1), 0)
5349
              && operand_equal_p (TREE_OPERAND (arg1, 1),
5350
                                  TREE_OPERAND (arg2, 0), 0))))
5351
    switch (comp_code)
5352
      {
5353
      case EQ_EXPR:
5354
      case UNEQ_EXPR:
5355
        tem = fold_convert_loc (loc, arg1_type, arg1);
5356
        return pedantic_non_lvalue_loc (loc,
5357
                                    fold_convert_loc (loc, type,
5358
                                                  negate_expr (tem)));
5359
      case NE_EXPR:
5360
      case LTGT_EXPR:
5361
        return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5362
      case UNGE_EXPR:
5363
      case UNGT_EXPR:
5364
        if (flag_trapping_math)
5365
          break;
5366
        /* Fall through.  */
5367
      case GE_EXPR:
5368
      case GT_EXPR:
5369
        if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5370
          arg1 = fold_convert_loc (loc, signed_type_for
5371
                               (TREE_TYPE (arg1)), arg1);
5372
        tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5373
        return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5374
      case UNLE_EXPR:
5375
      case UNLT_EXPR:
5376
        if (flag_trapping_math)
5377
          break;
5378
      case LE_EXPR:
5379
      case LT_EXPR:
5380
        if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5381
          arg1 = fold_convert_loc (loc, signed_type_for
5382
                               (TREE_TYPE (arg1)), arg1);
5383
        tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5384
        return negate_expr (fold_convert_loc (loc, type, tem));
5385
      default:
5386
        gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5387
        break;
5388
      }
5389
 
5390
  /* A != 0 ? A : 0 is simply A, unless A is -0.  Likewise
5391
     A == 0 ? A : 0 is always 0 unless A is -0.  Note that
5392
     both transformations are correct when A is NaN: A != 0
5393
     is then true, and A == 0 is false.  */
5394
 
5395
  if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5396
      && integer_zerop (arg01) && integer_zerop (arg2))
5397
    {
5398
      if (comp_code == NE_EXPR)
5399
        return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5400
      else if (comp_code == EQ_EXPR)
5401
        return build_int_cst (type, 0);
5402
    }
5403
 
5404
  /* Try some transformations of A op B ? A : B.
5405
 
5406
     A == B? A : B    same as B
5407
     A != B? A : B    same as A
5408
     A >= B? A : B    same as max (A, B)
5409
     A > B?  A : B    same as max (B, A)
5410
     A <= B? A : B    same as min (A, B)
5411
     A < B?  A : B    same as min (B, A)
5412
 
5413
     As above, these transformations don't work in the presence
5414
     of signed zeros.  For example, if A and B are zeros of
5415
     opposite sign, the first two transformations will change
5416
     the sign of the result.  In the last four, the original
5417
     expressions give different results for (A=+0, B=-0) and
5418
     (A=-0, B=+0), but the transformed expressions do not.
5419
 
5420
     The first two transformations are correct if either A or B
5421
     is a NaN.  In the first transformation, the condition will
5422
     be false, and B will indeed be chosen.  In the case of the
5423
     second transformation, the condition A != B will be true,
5424
     and A will be chosen.
5425
 
5426
     The conversions to max() and min() are not correct if B is
5427
     a number and A is not.  The conditions in the original
5428
     expressions will be false, so all four give B.  The min()
5429
     and max() versions would give a NaN instead.  */
5430
  if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5431
      && operand_equal_for_comparison_p (arg01, arg2, arg00)
5432
      /* Avoid these transformations if the COND_EXPR may be used
5433
         as an lvalue in the C++ front-end.  PR c++/19199.  */
5434
      && (in_gimple_form
5435
          || (strcmp (lang_hooks.name, "GNU C++") != 0
5436
              && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5437
          || ! maybe_lvalue_p (arg1)
5438
          || ! maybe_lvalue_p (arg2)))
5439
    {
5440
      tree comp_op0 = arg00;
5441
      tree comp_op1 = arg01;
5442
      tree comp_type = TREE_TYPE (comp_op0);
5443
 
5444
      /* Avoid adding NOP_EXPRs in case this is an lvalue.  */
5445
      if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5446
        {
5447
          comp_type = type;
5448
          comp_op0 = arg1;
5449
          comp_op1 = arg2;
5450
        }
5451
 
5452
      switch (comp_code)
5453
        {
5454
        case EQ_EXPR:
5455
          return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5456
        case NE_EXPR:
5457
          return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5458
        case LE_EXPR:
5459
        case LT_EXPR:
5460
        case UNLE_EXPR:
5461
        case UNLT_EXPR:
5462
          /* In C++ a ?: expression can be an lvalue, so put the
5463
             operand which will be used if they are equal first
5464
             so that we can convert this back to the
5465
             corresponding COND_EXPR.  */
5466
          if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5467
            {
5468
              comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5469
              comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5470
              tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5471
                    ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5472
                    : fold_build2_loc (loc, MIN_EXPR, comp_type,
5473
                                   comp_op1, comp_op0);
5474
              return pedantic_non_lvalue_loc (loc,
5475
                                          fold_convert_loc (loc, type, tem));
5476
            }
5477
          break;
5478
        case GE_EXPR:
5479
        case GT_EXPR:
5480
        case UNGE_EXPR:
5481
        case UNGT_EXPR:
5482
          if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5483
            {
5484
              comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5485
              comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5486
              tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5487
                    ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5488
                    : fold_build2_loc (loc, MAX_EXPR, comp_type,
5489
                                   comp_op1, comp_op0);
5490
              return pedantic_non_lvalue_loc (loc,
5491
                                          fold_convert_loc (loc, type, tem));
5492
            }
5493
          break;
5494
        case UNEQ_EXPR:
5495
          if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5496
            return pedantic_non_lvalue_loc (loc,
5497
                                        fold_convert_loc (loc, type, arg2));
5498
          break;
5499
        case LTGT_EXPR:
5500
          if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5501
            return pedantic_non_lvalue_loc (loc,
5502
                                        fold_convert_loc (loc, type, arg1));
5503
          break;
5504
        default:
5505
          gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5506
          break;
5507
        }
5508
    }
5509
 
5510
  /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5511
     we might still be able to simplify this.  For example,
5512
     if C1 is one less or one more than C2, this might have started
5513
     out as a MIN or MAX and been transformed by this function.
5514
     Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE.  */
5515
 
5516
  if (INTEGRAL_TYPE_P (type)
5517
      && TREE_CODE (arg01) == INTEGER_CST
5518
      && TREE_CODE (arg2) == INTEGER_CST)
5519
    switch (comp_code)
5520
      {
5521
      case EQ_EXPR:
5522
        if (TREE_CODE (arg1) == INTEGER_CST)
5523
          break;
5524
        /* We can replace A with C1 in this case.  */
5525
        arg1 = fold_convert_loc (loc, type, arg01);
5526
        return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5527
 
5528
      case LT_EXPR:
5529
        /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5530
           MIN_EXPR, to preserve the signedness of the comparison.  */
5531
        if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5532
                               OEP_ONLY_CONST)
5533
            && operand_equal_p (arg01,
5534
                                const_binop (PLUS_EXPR, arg2,
5535
                                             build_int_cst (type, 1), 0),
5536
                                OEP_ONLY_CONST))
5537
          {
5538
            tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5539
                                   fold_convert_loc (loc, TREE_TYPE (arg00),
5540
                                                     arg2));
5541
            return pedantic_non_lvalue_loc (loc,
5542
                                            fold_convert_loc (loc, type, tem));
5543
          }
5544
        break;
5545
 
5546
      case LE_EXPR:
5547
        /* If C1 is C2 - 1, this is min(A, C2), with the same care
5548
           as above.  */
5549
        if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5550
                               OEP_ONLY_CONST)
5551
            && operand_equal_p (arg01,
5552
                                const_binop (MINUS_EXPR, arg2,
5553
                                             build_int_cst (type, 1), 0),
5554
                                OEP_ONLY_CONST))
5555
          {
5556
            tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5557
                                   fold_convert_loc (loc, TREE_TYPE (arg00),
5558
                                                     arg2));
5559
            return pedantic_non_lvalue_loc (loc,
5560
                                            fold_convert_loc (loc, type, tem));
5561
          }
5562
        break;
5563
 
5564
      case GT_EXPR:
5565
        /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5566
           MAX_EXPR, to preserve the signedness of the comparison.  */
5567
        if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5568
                               OEP_ONLY_CONST)
5569
            && operand_equal_p (arg01,
5570
                                const_binop (MINUS_EXPR, arg2,
5571
                                             build_int_cst (type, 1), 0),
5572
                                OEP_ONLY_CONST))
5573
          {
5574
            tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5575
                                   fold_convert_loc (loc, TREE_TYPE (arg00),
5576
                                                     arg2));
5577
            return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5578
          }
5579
        break;
5580
 
5581
      case GE_EXPR:
5582
        /* If C1 is C2 + 1, this is max(A, C2), with the same care as above.  */
5583
        if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5584
                               OEP_ONLY_CONST)
5585
            && operand_equal_p (arg01,
5586
                                const_binop (PLUS_EXPR, arg2,
5587
                                             build_int_cst (type, 1), 0),
5588
                                OEP_ONLY_CONST))
5589
          {
5590
            tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5591
                                   fold_convert_loc (loc, TREE_TYPE (arg00),
5592
                                                     arg2));
5593
            return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5594
          }
5595
        break;
5596
      case NE_EXPR:
5597
        break;
5598
      default:
5599
        gcc_unreachable ();
5600
      }
5601
 
5602
  return NULL_TREE;
5603
}
5604
 
5605
 
5606
 
5607
#ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5608
#define LOGICAL_OP_NON_SHORT_CIRCUIT \
5609
  (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5610
                false) >= 2)
5611
#endif
5612
 
5613
/* EXP is some logical combination of boolean tests.  See if we can
5614
   merge it into some range test.  Return the new tree if so.  */
5615
 
5616
static tree
5617
fold_range_test (location_t loc, enum tree_code code, tree type,
5618
                 tree op0, tree op1)
5619
{
5620
  int or_op = (code == TRUTH_ORIF_EXPR
5621
               || code == TRUTH_OR_EXPR);
5622
  int in0_p, in1_p, in_p;
5623
  tree low0, low1, low, high0, high1, high;
5624
  bool strict_overflow_p = false;
5625
  tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5626
  tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5627
  tree tem;
5628
  const char * const warnmsg = G_("assuming signed overflow does not occur "
5629
                                  "when simplifying range test");
5630
 
5631
  /* If this is an OR operation, invert both sides; we will invert
5632
     again at the end.  */
5633
  if (or_op)
5634
    in0_p = ! in0_p, in1_p = ! in1_p;
5635
 
5636
  /* If both expressions are the same, if we can merge the ranges, and we
5637
     can build the range test, return it or it inverted.  If one of the
5638
     ranges is always true or always false, consider it to be the same
5639
     expression as the other.  */
5640
  if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5641
      && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5642
                       in1_p, low1, high1)
5643
      && 0 != (tem = (build_range_check (UNKNOWN_LOCATION, type,
5644
                                         lhs != 0 ? lhs
5645
                                         : rhs != 0 ? rhs : integer_zero_node,
5646
                                         in_p, low, high))))
5647
    {
5648
      if (strict_overflow_p)
5649
        fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5650
      return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5651
    }
5652
 
5653
  /* On machines where the branch cost is expensive, if this is a
5654
     short-circuited branch and the underlying object on both sides
5655
     is the same, make a non-short-circuit operation.  */
5656
  else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5657
           && lhs != 0 && rhs != 0
5658
           && (code == TRUTH_ANDIF_EXPR
5659
               || code == TRUTH_ORIF_EXPR)
5660
           && operand_equal_p (lhs, rhs, 0))
5661
    {
5662
      /* If simple enough, just rewrite.  Otherwise, make a SAVE_EXPR
5663
         unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5664
         which cases we can't do this.  */
5665
      if (simple_operand_p (lhs))
5666
        {
5667
          tem = build2 (code == TRUTH_ANDIF_EXPR
5668
                        ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5669
                        type, op0, op1);
5670
          SET_EXPR_LOCATION (tem, loc);
5671
          return tem;
5672
        }
5673
 
5674
      else if (lang_hooks.decls.global_bindings_p () == 0
5675
               && ! CONTAINS_PLACEHOLDER_P (lhs))
5676
        {
5677
          tree common = save_expr (lhs);
5678
 
5679
          if (0 != (lhs = build_range_check (loc, type, common,
5680
                                             or_op ? ! in0_p : in0_p,
5681
                                             low0, high0))
5682
              && (0 != (rhs = build_range_check (loc, type, common,
5683
                                                 or_op ? ! in1_p : in1_p,
5684
                                                 low1, high1))))
5685
            {
5686
              if (strict_overflow_p)
5687
                fold_overflow_warning (warnmsg,
5688
                                       WARN_STRICT_OVERFLOW_COMPARISON);
5689
              tem = build2 (code == TRUTH_ANDIF_EXPR
5690
                            ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5691
                            type, lhs, rhs);
5692
              SET_EXPR_LOCATION (tem, loc);
5693
              return tem;
5694
            }
5695
        }
5696
    }
5697
 
5698
  return 0;
5699
}
5700
 
5701
/* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5702
   bit value.  Arrange things so the extra bits will be set to zero if and
5703
   only if C is signed-extended to its full width.  If MASK is nonzero,
5704
   it is an INTEGER_CST that should be AND'ed with the extra bits.  */
5705
 
5706
static tree
5707
unextend (tree c, int p, int unsignedp, tree mask)
5708
{
5709
  tree type = TREE_TYPE (c);
5710
  int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5711
  tree temp;
5712
 
5713
  if (p == modesize || unsignedp)
5714
    return c;
5715
 
5716
  /* We work by getting just the sign bit into the low-order bit, then
5717
     into the high-order bit, then sign-extend.  We then XOR that value
5718
     with C.  */
5719
  temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5720
  temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5721
 
5722
  /* We must use a signed type in order to get an arithmetic right shift.
5723
     However, we must also avoid introducing accidental overflows, so that
5724
     a subsequent call to integer_zerop will work.  Hence we must
5725
     do the type conversion here.  At this point, the constant is either
5726
     zero or one, and the conversion to a signed type can never overflow.
5727
     We could get an overflow if this conversion is done anywhere else.  */
5728
  if (TYPE_UNSIGNED (type))
5729
    temp = fold_convert (signed_type_for (type), temp);
5730
 
5731
  temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5732
  temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5733
  if (mask != 0)
5734
    temp = const_binop (BIT_AND_EXPR, temp,
5735
                        fold_convert (TREE_TYPE (c), mask),
5736
                        0);
5737
  /* If necessary, convert the type back to match the type of C.  */
5738
  if (TYPE_UNSIGNED (type))
5739
    temp = fold_convert (type, temp);
5740
 
5741
  return fold_convert (type,
5742
                       const_binop (BIT_XOR_EXPR, c, temp, 0));
5743
}
5744
 
5745
/* Find ways of folding logical expressions of LHS and RHS:
5746
   Try to merge two comparisons to the same innermost item.
5747
   Look for range tests like "ch >= '0' && ch <= '9'".
5748
   Look for combinations of simple terms on machines with expensive branches
5749
   and evaluate the RHS unconditionally.
5750
 
5751
   For example, if we have p->a == 2 && p->b == 4 and we can make an
5752
   object large enough to span both A and B, we can do this with a comparison
5753
   against the object ANDed with the a mask.
5754
 
5755
   If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5756
   operations to do this with one comparison.
5757
 
5758
   We check for both normal comparisons and the BIT_AND_EXPRs made this by
5759
   function and the one above.
5760
 
5761
   CODE is the logical operation being done.  It can be TRUTH_ANDIF_EXPR,
5762
   TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5763
 
5764
   TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5765
   two operands.
5766
 
5767
   We return the simplified tree or 0 if no optimization is possible.  */
5768
 
5769
static tree
5770
fold_truthop (location_t loc, enum tree_code code, tree truth_type,
5771
              tree lhs, tree rhs)
5772
{
5773
  /* If this is the "or" of two comparisons, we can do something if
5774
     the comparisons are NE_EXPR.  If this is the "and", we can do something
5775
     if the comparisons are EQ_EXPR.  I.e.,
5776
        (a->b == 2 && a->c == 4) can become (a->new == NEW).
5777
 
5778
     WANTED_CODE is this operation code.  For single bit fields, we can
5779
     convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5780
     comparison for one-bit fields.  */
5781
 
5782
  enum tree_code wanted_code;
5783
  enum tree_code lcode, rcode;
5784
  tree ll_arg, lr_arg, rl_arg, rr_arg;
5785
  tree ll_inner, lr_inner, rl_inner, rr_inner;
5786
  HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5787
  HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5788
  HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5789
  HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5790
  int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5791
  enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5792
  enum machine_mode lnmode, rnmode;
5793
  tree ll_mask, lr_mask, rl_mask, rr_mask;
5794
  tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5795
  tree l_const, r_const;
5796
  tree lntype, rntype, result;
5797
  HOST_WIDE_INT first_bit, end_bit;
5798
  int volatilep;
5799
  tree orig_lhs = lhs, orig_rhs = rhs;
5800
  enum tree_code orig_code = code;
5801
 
5802
  /* Start by getting the comparison codes.  Fail if anything is volatile.
5803
     If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5804
     it were surrounded with a NE_EXPR.  */
5805
 
5806
  if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5807
    return 0;
5808
 
5809
  lcode = TREE_CODE (lhs);
5810
  rcode = TREE_CODE (rhs);
5811
 
5812
  if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5813
    {
5814
      lhs = build2 (NE_EXPR, truth_type, lhs,
5815
                    build_int_cst (TREE_TYPE (lhs), 0));
5816
      lcode = NE_EXPR;
5817
    }
5818
 
5819
  if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5820
    {
5821
      rhs = build2 (NE_EXPR, truth_type, rhs,
5822
                    build_int_cst (TREE_TYPE (rhs), 0));
5823
      rcode = NE_EXPR;
5824
    }
5825
 
5826
  if (TREE_CODE_CLASS (lcode) != tcc_comparison
5827
      || TREE_CODE_CLASS (rcode) != tcc_comparison)
5828
    return 0;
5829
 
5830
  ll_arg = TREE_OPERAND (lhs, 0);
5831
  lr_arg = TREE_OPERAND (lhs, 1);
5832
  rl_arg = TREE_OPERAND (rhs, 0);
5833
  rr_arg = TREE_OPERAND (rhs, 1);
5834
 
5835
  /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations.  */
5836
  if (simple_operand_p (ll_arg)
5837
      && simple_operand_p (lr_arg))
5838
    {
5839
      tree result;
5840
      if (operand_equal_p (ll_arg, rl_arg, 0)
5841
          && operand_equal_p (lr_arg, rr_arg, 0))
5842
        {
5843
          result = combine_comparisons (loc, code, lcode, rcode,
5844
                                        truth_type, ll_arg, lr_arg);
5845
          if (result)
5846
            return result;
5847
        }
5848
      else if (operand_equal_p (ll_arg, rr_arg, 0)
5849
               && operand_equal_p (lr_arg, rl_arg, 0))
5850
        {
5851
          result = combine_comparisons (loc, code, lcode,
5852
                                        swap_tree_comparison (rcode),
5853
                                        truth_type, ll_arg, lr_arg);
5854
          if (result)
5855
            return result;
5856
        }
5857
    }
5858
 
5859
  code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5860
          ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5861
 
5862
  /* If the RHS can be evaluated unconditionally and its operands are
5863
     simple, it wins to evaluate the RHS unconditionally on machines
5864
     with expensive branches.  In this case, this isn't a comparison
5865
     that can be merged.  Avoid doing this if the RHS is a floating-point
5866
     comparison since those can trap.  */
5867
 
5868
  if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5869
                   false) >= 2
5870
      && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5871
      && simple_operand_p (rl_arg)
5872
      && simple_operand_p (rr_arg))
5873
    {
5874
      /* Convert (a != 0) || (b != 0) into (a | b) != 0.  */
5875
      if (code == TRUTH_OR_EXPR
5876
          && lcode == NE_EXPR && integer_zerop (lr_arg)
5877
          && rcode == NE_EXPR && integer_zerop (rr_arg)
5878
          && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5879
          && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5880
        {
5881
          result = build2 (NE_EXPR, truth_type,
5882
                           build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5883
                                   ll_arg, rl_arg),
5884
                           build_int_cst (TREE_TYPE (ll_arg), 0));
5885
          goto fold_truthop_exit;
5886
        }
5887
 
5888
      /* Convert (a == 0) && (b == 0) into (a | b) == 0.  */
5889
      if (code == TRUTH_AND_EXPR
5890
          && lcode == EQ_EXPR && integer_zerop (lr_arg)
5891
          && rcode == EQ_EXPR && integer_zerop (rr_arg)
5892
          && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5893
          && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5894
        {
5895
          result = build2 (EQ_EXPR, truth_type,
5896
                           build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5897
                                   ll_arg, rl_arg),
5898
                           build_int_cst (TREE_TYPE (ll_arg), 0));
5899
          goto fold_truthop_exit;
5900
        }
5901
 
5902
      if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5903
        {
5904
          if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5905
            {
5906
              result = build2 (code, truth_type, lhs, rhs);
5907
              goto fold_truthop_exit;
5908
            }
5909
          return NULL_TREE;
5910
        }
5911
    }
5912
 
5913
  /* See if the comparisons can be merged.  Then get all the parameters for
5914
     each side.  */
5915
 
5916
  if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5917
      || (rcode != EQ_EXPR && rcode != NE_EXPR))
5918
    return 0;
5919
 
5920
  volatilep = 0;
5921
  ll_inner = decode_field_reference (loc, ll_arg,
5922
                                     &ll_bitsize, &ll_bitpos, &ll_mode,
5923
                                     &ll_unsignedp, &volatilep, &ll_mask,
5924
                                     &ll_and_mask);
5925
  lr_inner = decode_field_reference (loc, lr_arg,
5926
                                     &lr_bitsize, &lr_bitpos, &lr_mode,
5927
                                     &lr_unsignedp, &volatilep, &lr_mask,
5928
                                     &lr_and_mask);
5929
  rl_inner = decode_field_reference (loc, rl_arg,
5930
                                     &rl_bitsize, &rl_bitpos, &rl_mode,
5931
                                     &rl_unsignedp, &volatilep, &rl_mask,
5932
                                     &rl_and_mask);
5933
  rr_inner = decode_field_reference (loc, rr_arg,
5934
                                     &rr_bitsize, &rr_bitpos, &rr_mode,
5935
                                     &rr_unsignedp, &volatilep, &rr_mask,
5936
                                     &rr_and_mask);
5937
 
5938
  /* It must be true that the inner operation on the lhs of each
5939
     comparison must be the same if we are to be able to do anything.
5940
     Then see if we have constants.  If not, the same must be true for
5941
     the rhs's.  */
5942
  if (volatilep || ll_inner == 0 || rl_inner == 0
5943
      || ! operand_equal_p (ll_inner, rl_inner, 0))
5944
    return 0;
5945
 
5946
  if (TREE_CODE (lr_arg) == INTEGER_CST
5947
      && TREE_CODE (rr_arg) == INTEGER_CST)
5948
    l_const = lr_arg, r_const = rr_arg;
5949
  else if (lr_inner == 0 || rr_inner == 0
5950
           || ! operand_equal_p (lr_inner, rr_inner, 0))
5951
    return 0;
5952
  else
5953
    l_const = r_const = 0;
5954
 
5955
  /* If either comparison code is not correct for our logical operation,
5956
     fail.  However, we can convert a one-bit comparison against zero into
5957
     the opposite comparison against that bit being set in the field.  */
5958
 
5959
  wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5960
  if (lcode != wanted_code)
5961
    {
5962
      if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5963
        {
5964
          /* Make the left operand unsigned, since we are only interested
5965
             in the value of one bit.  Otherwise we are doing the wrong
5966
             thing below.  */
5967
          ll_unsignedp = 1;
5968
          l_const = ll_mask;
5969
        }
5970
      else
5971
        return 0;
5972
    }
5973
 
5974
  /* This is analogous to the code for l_const above.  */
5975
  if (rcode != wanted_code)
5976
    {
5977
      if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5978
        {
5979
          rl_unsignedp = 1;
5980
          r_const = rl_mask;
5981
        }
5982
      else
5983
        return 0;
5984
    }
5985
 
5986
  /* See if we can find a mode that contains both fields being compared on
5987
     the left.  If we can't, fail.  Otherwise, update all constants and masks
5988
     to be relative to a field of that size.  */
5989
  first_bit = MIN (ll_bitpos, rl_bitpos);
5990
  end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5991
  lnmode = get_best_mode (end_bit - first_bit, first_bit,
5992
                          TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5993
                          volatilep);
5994
  if (lnmode == VOIDmode)
5995
    return 0;
5996
 
5997
  lnbitsize = GET_MODE_BITSIZE (lnmode);
5998
  lnbitpos = first_bit & ~ (lnbitsize - 1);
5999
  lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6000
  xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6001
 
6002
  if (BYTES_BIG_ENDIAN)
6003
    {
6004
      xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6005
      xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6006
    }
6007
 
6008
  ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6009
                         size_int (xll_bitpos), 0);
6010
  rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6011
                         size_int (xrl_bitpos), 0);
6012
 
6013
  if (l_const)
6014
    {
6015
      l_const = fold_convert_loc (loc, lntype, l_const);
6016
      l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6017
      l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
6018
      if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6019
                                        fold_build1_loc (loc, BIT_NOT_EXPR,
6020
                                                     lntype, ll_mask),
6021
                                        0)))
6022
        {
6023
          warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6024
 
6025
          return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6026
        }
6027
    }
6028
  if (r_const)
6029
    {
6030
      r_const = fold_convert_loc (loc, lntype, r_const);
6031
      r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6032
      r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
6033
      if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6034
                                        fold_build1_loc (loc, BIT_NOT_EXPR,
6035
                                                     lntype, rl_mask),
6036
                                        0)))
6037
        {
6038
          warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6039
 
6040
          return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6041
        }
6042
    }
6043
 
6044
  /* If the right sides are not constant, do the same for it.  Also,
6045
     disallow this optimization if a size or signedness mismatch occurs
6046
     between the left and right sides.  */
6047
  if (l_const == 0)
6048
    {
6049
      if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6050
          || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6051
          /* Make sure the two fields on the right
6052
             correspond to the left without being swapped.  */
6053
          || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6054
        return 0;
6055
 
6056
      first_bit = MIN (lr_bitpos, rr_bitpos);
6057
      end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6058
      rnmode = get_best_mode (end_bit - first_bit, first_bit,
6059
                              TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
6060
                              volatilep);
6061
      if (rnmode == VOIDmode)
6062
        return 0;
6063
 
6064
      rnbitsize = GET_MODE_BITSIZE (rnmode);
6065
      rnbitpos = first_bit & ~ (rnbitsize - 1);
6066
      rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6067
      xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6068
 
6069
      if (BYTES_BIG_ENDIAN)
6070
        {
6071
          xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6072
          xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6073
        }
6074
 
6075
      lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6076
                                                            rntype, lr_mask),
6077
                             size_int (xlr_bitpos), 0);
6078
      rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6079
                                                            rntype, rr_mask),
6080
                             size_int (xrr_bitpos), 0);
6081
 
6082
      /* Make a mask that corresponds to both fields being compared.
6083
         Do this for both items being compared.  If the operands are the
6084
         same size and the bits being compared are in the same position
6085
         then we can do this by masking both and comparing the masked
6086
         results.  */
6087
      ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
6088
      lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
6089
      if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
6090
        {
6091
          lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
6092
                                    ll_unsignedp || rl_unsignedp);
6093
          if (! all_ones_mask_p (ll_mask, lnbitsize))
6094
            lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6095
 
6096
          rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
6097
                                    lr_unsignedp || rr_unsignedp);
6098
          if (! all_ones_mask_p (lr_mask, rnbitsize))
6099
            rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6100
 
6101
          result = build2 (wanted_code, truth_type, lhs, rhs);
6102
          goto fold_truthop_exit;
6103
        }
6104
 
6105
      /* There is still another way we can do something:  If both pairs of
6106
         fields being compared are adjacent, we may be able to make a wider
6107
         field containing them both.
6108
 
6109
         Note that we still must mask the lhs/rhs expressions.  Furthermore,
6110
         the mask must be shifted to account for the shift done by
6111
         make_bit_field_ref.  */
6112
      if ((ll_bitsize + ll_bitpos == rl_bitpos
6113
           && lr_bitsize + lr_bitpos == rr_bitpos)
6114
          || (ll_bitpos == rl_bitpos + rl_bitsize
6115
              && lr_bitpos == rr_bitpos + rr_bitsize))
6116
        {
6117
          tree type;
6118
 
6119
          lhs = make_bit_field_ref (loc, ll_inner, lntype,
6120
                                    ll_bitsize + rl_bitsize,
6121
                                    MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
6122
          rhs = make_bit_field_ref (loc, lr_inner, rntype,
6123
                                    lr_bitsize + rr_bitsize,
6124
                                    MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
6125
 
6126
          ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6127
                                 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
6128
          lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6129
                                 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
6130
 
6131
          /* Convert to the smaller type before masking out unwanted bits.  */
6132
          type = lntype;
6133
          if (lntype != rntype)
6134
            {
6135
              if (lnbitsize > rnbitsize)
6136
                {
6137
                  lhs = fold_convert_loc (loc, rntype, lhs);
6138
                  ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6139
                  type = rntype;
6140
                }
6141
              else if (lnbitsize < rnbitsize)
6142
                {
6143
                  rhs = fold_convert_loc (loc, lntype, rhs);
6144
                  lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6145
                  type = lntype;
6146
                }
6147
            }
6148
 
6149
          if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6150
            lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6151
 
6152
          if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6153
            rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6154
 
6155
          result = build2 (wanted_code, truth_type, lhs, rhs);
6156
          goto fold_truthop_exit;
6157
        }
6158
 
6159
      return 0;
6160
    }
6161
 
6162
  /* Handle the case of comparisons with constants.  If there is something in
6163
     common between the masks, those bits of the constants must be the same.
6164
     If not, the condition is always false.  Test for this to avoid generating
6165
     incorrect code below.  */
6166
  result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
6167
  if (! integer_zerop (result)
6168
      && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
6169
                           const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
6170
    {
6171
      if (wanted_code == NE_EXPR)
6172
        {
6173
          warning (0, "%<or%> of unmatched not-equal tests is always 1");
6174
          return constant_boolean_node (true, truth_type);
6175
        }
6176
      else
6177
        {
6178
          warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6179
          return constant_boolean_node (false, truth_type);
6180
        }
6181
    }
6182
 
6183
  /* Construct the expression we will return.  First get the component
6184
     reference we will make.  Unless the mask is all ones the width of
6185
     that field, perform the mask operation.  Then compare with the
6186
     merged constant.  */
6187
  result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
6188
                               ll_unsignedp || rl_unsignedp);
6189
 
6190
  ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
6191
  if (! all_ones_mask_p (ll_mask, lnbitsize))
6192
    {
6193
      result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
6194
      SET_EXPR_LOCATION (result, loc);
6195
    }
6196
 
6197
  result = build2 (wanted_code, truth_type, result,
6198
                   const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
6199
 
6200
 fold_truthop_exit:
6201
  SET_EXPR_LOCATION (result, loc);
6202
  return result;
6203
}
6204
 
6205
/* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
6206
   constant.  */
6207
 
6208
static tree
6209
optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
6210
                            tree op0, tree op1)
6211
{
6212
  tree arg0 = op0;
6213
  enum tree_code op_code;
6214
  tree comp_const;
6215
  tree minmax_const;
6216
  int consts_equal, consts_lt;
6217
  tree inner;
6218
 
6219
  STRIP_SIGN_NOPS (arg0);
6220
 
6221
  op_code = TREE_CODE (arg0);
6222
  minmax_const = TREE_OPERAND (arg0, 1);
6223
  comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
6224
  consts_equal = tree_int_cst_equal (minmax_const, comp_const);
6225
  consts_lt = tree_int_cst_lt (minmax_const, comp_const);
6226
  inner = TREE_OPERAND (arg0, 0);
6227
 
6228
  /* If something does not permit us to optimize, return the original tree.  */
6229
  if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
6230
      || TREE_CODE (comp_const) != INTEGER_CST
6231
      || TREE_OVERFLOW (comp_const)
6232
      || TREE_CODE (minmax_const) != INTEGER_CST
6233
      || TREE_OVERFLOW (minmax_const))
6234
    return NULL_TREE;
6235
 
6236
  /* Now handle all the various comparison codes.  We only handle EQ_EXPR
6237
     and GT_EXPR, doing the rest with recursive calls using logical
6238
     simplifications.  */
6239
  switch (code)
6240
    {
6241
    case NE_EXPR:  case LT_EXPR:  case LE_EXPR:
6242
      {
6243
        tree tem
6244
          = optimize_minmax_comparison (loc,
6245
                                        invert_tree_comparison (code, false),
6246
                                        type, op0, op1);
6247
        if (tem)
6248
          return invert_truthvalue_loc (loc, tem);
6249
        return NULL_TREE;
6250
      }
6251
 
6252
    case GE_EXPR:
6253
      return
6254
        fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
6255
                     optimize_minmax_comparison
6256
                     (loc, EQ_EXPR, type, arg0, comp_const),
6257
                     optimize_minmax_comparison
6258
                     (loc, GT_EXPR, type, arg0, comp_const));
6259
 
6260
    case EQ_EXPR:
6261
      if (op_code == MAX_EXPR && consts_equal)
6262
        /* MAX (X, 0) == 0  ->  X <= 0  */
6263
        return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
6264
 
6265
      else if (op_code == MAX_EXPR && consts_lt)
6266
        /* MAX (X, 0) == 5  ->  X == 5   */
6267
        return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
6268
 
6269
      else if (op_code == MAX_EXPR)
6270
        /* MAX (X, 0) == -1  ->  false  */
6271
        return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6272
 
6273
      else if (consts_equal)
6274
        /* MIN (X, 0) == 0  ->  X >= 0  */
6275
        return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
6276
 
6277
      else if (consts_lt)
6278
        /* MIN (X, 0) == 5  ->  false  */
6279
        return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6280
 
6281
      else
6282
        /* MIN (X, 0) == -1  ->  X == -1  */
6283
        return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
6284
 
6285
    case GT_EXPR:
6286
      if (op_code == MAX_EXPR && (consts_equal || consts_lt))
6287
        /* MAX (X, 0) > 0  ->  X > 0
6288
           MAX (X, 0) > 5  ->  X > 5  */
6289
        return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
6290
 
6291
      else if (op_code == MAX_EXPR)
6292
        /* MAX (X, 0) > -1  ->  true  */
6293
        return omit_one_operand_loc (loc, type, integer_one_node, inner);
6294
 
6295
      else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
6296
        /* MIN (X, 0) > 0  ->  false
6297
           MIN (X, 0) > 5  ->  false  */
6298
        return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6299
 
6300
      else
6301
        /* MIN (X, 0) > -1  ->  X > -1  */
6302
        return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
6303
 
6304
    default:
6305
      return NULL_TREE;
6306
    }
6307
}
6308
 
6309
/* T is an integer expression that is being multiplied, divided, or taken a
6310
   modulus (CODE says which and what kind of divide or modulus) by a
6311
   constant C.  See if we can eliminate that operation by folding it with
6312
   other operations already in T.  WIDE_TYPE, if non-null, is a type that
6313
   should be used for the computation if wider than our type.
6314
 
6315
   For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6316
   (X * 2) + (Y * 4).  We must, however, be assured that either the original
6317
   expression would not overflow or that overflow is undefined for the type
6318
   in the language in question.
6319
 
6320
   If we return a non-null expression, it is an equivalent form of the
6321
   original computation, but need not be in the original type.
6322
 
6323
   We set *STRICT_OVERFLOW_P to true if the return values depends on
6324
   signed overflow being undefined.  Otherwise we do not change
6325
   *STRICT_OVERFLOW_P.  */
6326
 
6327
static tree
6328
extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6329
                bool *strict_overflow_p)
6330
{
6331
  /* To avoid exponential search depth, refuse to allow recursion past
6332
     three levels.  Beyond that (1) it's highly unlikely that we'll find
6333
     something interesting and (2) we've probably processed it before
6334
     when we built the inner expression.  */
6335
 
6336
  static int depth;
6337
  tree ret;
6338
 
6339
  if (depth > 3)
6340
    return NULL;
6341
 
6342
  depth++;
6343
  ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6344
  depth--;
6345
 
6346
  return ret;
6347
}
6348
 
6349
static tree
6350
extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6351
                  bool *strict_overflow_p)
6352
{
6353
  tree type = TREE_TYPE (t);
6354
  enum tree_code tcode = TREE_CODE (t);
6355
  tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6356
                                   > GET_MODE_SIZE (TYPE_MODE (type)))
6357
                ? wide_type : type);
6358
  tree t1, t2;
6359
  int same_p = tcode == code;
6360
  tree op0 = NULL_TREE, op1 = NULL_TREE;
6361
  bool sub_strict_overflow_p;
6362
 
6363
  /* Don't deal with constants of zero here; they confuse the code below.  */
6364
  if (integer_zerop (c))
6365
    return NULL_TREE;
6366
 
6367
  if (TREE_CODE_CLASS (tcode) == tcc_unary)
6368
    op0 = TREE_OPERAND (t, 0);
6369
 
6370
  if (TREE_CODE_CLASS (tcode) == tcc_binary)
6371
    op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6372
 
6373
  /* Note that we need not handle conditional operations here since fold
6374
     already handles those cases.  So just do arithmetic here.  */
6375
  switch (tcode)
6376
    {
6377
    case INTEGER_CST:
6378
      /* For a constant, we can always simplify if we are a multiply
6379
         or (for divide and modulus) if it is a multiple of our constant.  */
6380
      if (code == MULT_EXPR
6381
          || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
6382
        return const_binop (code, fold_convert (ctype, t),
6383
                            fold_convert (ctype, c), 0);
6384
      break;
6385
 
6386
    CASE_CONVERT: case NON_LVALUE_EXPR:
6387
      /* If op0 is an expression ...  */
6388
      if ((COMPARISON_CLASS_P (op0)
6389
           || UNARY_CLASS_P (op0)
6390
           || BINARY_CLASS_P (op0)
6391
           || VL_EXP_CLASS_P (op0)
6392
           || EXPRESSION_CLASS_P (op0))
6393
          /* ... and has wrapping overflow, and its type is smaller
6394
             than ctype, then we cannot pass through as widening.  */
6395
          && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6396
               && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
6397
                     && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
6398
               && (TYPE_PRECISION (ctype)
6399
                   > TYPE_PRECISION (TREE_TYPE (op0))))
6400
              /* ... or this is a truncation (t is narrower than op0),
6401
                 then we cannot pass through this narrowing.  */
6402
              || (TYPE_PRECISION (type)
6403
                  < TYPE_PRECISION (TREE_TYPE (op0)))
6404
              /* ... or signedness changes for division or modulus,
6405
                 then we cannot pass through this conversion.  */
6406
              || (code != MULT_EXPR
6407
                  && (TYPE_UNSIGNED (ctype)
6408
                      != TYPE_UNSIGNED (TREE_TYPE (op0))))
6409
              /* ... or has undefined overflow while the converted to
6410
                 type has not, we cannot do the operation in the inner type
6411
                 as that would introduce undefined overflow.  */
6412
              || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6413
                  && !TYPE_OVERFLOW_UNDEFINED (type))))
6414
        break;
6415
 
6416
      /* Pass the constant down and see if we can make a simplification.  If
6417
         we can, replace this expression with the inner simplification for
6418
         possible later conversion to our or some other type.  */
6419
      if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6420
          && TREE_CODE (t2) == INTEGER_CST
6421
          && !TREE_OVERFLOW (t2)
6422
          && (0 != (t1 = extract_muldiv (op0, t2, code,
6423
                                         code == MULT_EXPR
6424
                                         ? ctype : NULL_TREE,
6425
                                         strict_overflow_p))))
6426
        return t1;
6427
      break;
6428
 
6429
    case ABS_EXPR:
6430
      /* If widening the type changes it from signed to unsigned, then we
6431
         must avoid building ABS_EXPR itself as unsigned.  */
6432
      if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6433
        {
6434
          tree cstype = (*signed_type_for) (ctype);
6435
          if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6436
              != 0)
6437
            {
6438
              t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6439
              return fold_convert (ctype, t1);
6440
            }
6441
          break;
6442
        }
6443
      /* If the constant is negative, we cannot simplify this.  */
6444
      if (tree_int_cst_sgn (c) == -1)
6445
        break;
6446
      /* FALLTHROUGH */
6447
    case NEGATE_EXPR:
6448
      if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6449
          != 0)
6450
        return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6451
      break;
6452
 
6453
    case MIN_EXPR:  case MAX_EXPR:
6454
      /* If widening the type changes the signedness, then we can't perform
6455
         this optimization as that changes the result.  */
6456
      if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6457
        break;
6458
 
6459
      /* MIN (a, b) / 5 -> MIN (a / 5, b / 5)  */
6460
      sub_strict_overflow_p = false;
6461
      if ((t1 = extract_muldiv (op0, c, code, wide_type,
6462
                                &sub_strict_overflow_p)) != 0
6463
          && (t2 = extract_muldiv (op1, c, code, wide_type,
6464
                                   &sub_strict_overflow_p)) != 0)
6465
        {
6466
          if (tree_int_cst_sgn (c) < 0)
6467
            tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6468
          if (sub_strict_overflow_p)
6469
            *strict_overflow_p = true;
6470
          return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6471
                              fold_convert (ctype, t2));
6472
        }
6473
      break;
6474
 
6475
    case LSHIFT_EXPR:  case RSHIFT_EXPR:
6476
      /* If the second operand is constant, this is a multiplication
6477
         or floor division, by a power of two, so we can treat it that
6478
         way unless the multiplier or divisor overflows.  Signed
6479
         left-shift overflow is implementation-defined rather than
6480
         undefined in C90, so do not convert signed left shift into
6481
         multiplication.  */
6482
      if (TREE_CODE (op1) == INTEGER_CST
6483
          && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6484
          /* const_binop may not detect overflow correctly,
6485
             so check for it explicitly here.  */
6486
          && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
6487
          && TREE_INT_CST_HIGH (op1) == 0
6488
          && 0 != (t1 = fold_convert (ctype,
6489
                                      const_binop (LSHIFT_EXPR,
6490
                                                   size_one_node,
6491
                                                   op1, 0)))
6492
          && !TREE_OVERFLOW (t1))
6493
        return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6494
                                       ? MULT_EXPR : FLOOR_DIV_EXPR,
6495
                                       ctype,
6496
                                       fold_convert (ctype, op0),
6497
                                       t1),
6498
                               c, code, wide_type, strict_overflow_p);
6499
      break;
6500
 
6501
    case PLUS_EXPR:  case MINUS_EXPR:
6502
      /* See if we can eliminate the operation on both sides.  If we can, we
6503
         can return a new PLUS or MINUS.  If we can't, the only remaining
6504
         cases where we can do anything are if the second operand is a
6505
         constant.  */
6506
      sub_strict_overflow_p = false;
6507
      t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6508
      t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6509
      if (t1 != 0 && t2 != 0
6510
          && (code == MULT_EXPR
6511
              /* If not multiplication, we can only do this if both operands
6512
                 are divisible by c.  */
6513
              || (multiple_of_p (ctype, op0, c)
6514
                  && multiple_of_p (ctype, op1, c))))
6515
        {
6516
          if (sub_strict_overflow_p)
6517
            *strict_overflow_p = true;
6518
          return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6519
                              fold_convert (ctype, t2));
6520
        }
6521
 
6522
      /* If this was a subtraction, negate OP1 and set it to be an addition.
6523
         This simplifies the logic below.  */
6524
      if (tcode == MINUS_EXPR)
6525
        {
6526
          tcode = PLUS_EXPR, op1 = negate_expr (op1);
6527
          /* If OP1 was not easily negatable, the constant may be OP0.  */
6528
          if (TREE_CODE (op0) == INTEGER_CST)
6529
            {
6530
              tree tem = op0;
6531
              op0 = op1;
6532
              op1 = tem;
6533
              tem = t1;
6534
              t1 = t2;
6535
              t2 = tem;
6536
            }
6537
        }
6538
 
6539
      if (TREE_CODE (op1) != INTEGER_CST)
6540
        break;
6541
 
6542
      /* If either OP1 or C are negative, this optimization is not safe for
6543
         some of the division and remainder types while for others we need
6544
         to change the code.  */
6545
      if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6546
        {
6547
          if (code == CEIL_DIV_EXPR)
6548
            code = FLOOR_DIV_EXPR;
6549
          else if (code == FLOOR_DIV_EXPR)
6550
            code = CEIL_DIV_EXPR;
6551
          else if (code != MULT_EXPR
6552
                   && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6553
            break;
6554
        }
6555
 
6556
      /* If it's a multiply or a division/modulus operation of a multiple
6557
         of our constant, do the operation and verify it doesn't overflow.  */
6558
      if (code == MULT_EXPR
6559
          || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6560
        {
6561
          op1 = const_binop (code, fold_convert (ctype, op1),
6562
                             fold_convert (ctype, c), 0);
6563
          /* We allow the constant to overflow with wrapping semantics.  */
6564
          if (op1 == 0
6565
              || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6566
            break;
6567
        }
6568
      else
6569
        break;
6570
 
6571
      /* If we have an unsigned type is not a sizetype, we cannot widen
6572
         the operation since it will change the result if the original
6573
         computation overflowed.  */
6574
      if (TYPE_UNSIGNED (ctype)
6575
          && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
6576
          && ctype != type)
6577
        break;
6578
 
6579
      /* If we were able to eliminate our operation from the first side,
6580
         apply our operation to the second side and reform the PLUS.  */
6581
      if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6582
        return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6583
 
6584
      /* The last case is if we are a multiply.  In that case, we can
6585
         apply the distributive law to commute the multiply and addition
6586
         if the multiplication of the constants doesn't overflow.  */
6587
      if (code == MULT_EXPR)
6588
        return fold_build2 (tcode, ctype,
6589
                            fold_build2 (code, ctype,
6590
                                         fold_convert (ctype, op0),
6591
                                         fold_convert (ctype, c)),
6592
                            op1);
6593
 
6594
      break;
6595
 
6596
    case MULT_EXPR:
6597
      /* We have a special case here if we are doing something like
6598
         (C * 8) % 4 since we know that's zero.  */
6599
      if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6600
           || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6601
          /* If the multiplication can overflow we cannot optimize this.
6602
             ???  Until we can properly mark individual operations as
6603
             not overflowing we need to treat sizetype special here as
6604
             stor-layout relies on this opimization to make
6605
             DECL_FIELD_BIT_OFFSET always a constant.  */
6606
          && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6607
              || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
6608
                  && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
6609
          && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6610
          && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6611
        {
6612
          *strict_overflow_p = true;
6613
          return omit_one_operand (type, integer_zero_node, op0);
6614
        }
6615
 
6616
      /* ... fall through ...  */
6617
 
6618
    case TRUNC_DIV_EXPR:  case CEIL_DIV_EXPR:  case FLOOR_DIV_EXPR:
6619
    case ROUND_DIV_EXPR:  case EXACT_DIV_EXPR:
6620
      /* If we can extract our operation from the LHS, do so and return a
6621
         new operation.  Likewise for the RHS from a MULT_EXPR.  Otherwise,
6622
         do something only if the second operand is a constant.  */
6623
      if (same_p
6624
          && (t1 = extract_muldiv (op0, c, code, wide_type,
6625
                                   strict_overflow_p)) != 0)
6626
        return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6627
                            fold_convert (ctype, op1));
6628
      else if (tcode == MULT_EXPR && code == MULT_EXPR
6629
               && (t1 = extract_muldiv (op1, c, code, wide_type,
6630
                                        strict_overflow_p)) != 0)
6631
        return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6632
                            fold_convert (ctype, t1));
6633
      else if (TREE_CODE (op1) != INTEGER_CST)
6634
        return 0;
6635
 
6636
      /* If these are the same operation types, we can associate them
6637
         assuming no overflow.  */
6638
      if (tcode == code
6639
          && 0 != (t1 = int_const_binop (MULT_EXPR,
6640
                                         fold_convert (ctype, op1),
6641
                                         fold_convert (ctype, c), 1))
6642
          && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
6643
                                               TREE_INT_CST_HIGH (t1),
6644
                                               (TYPE_UNSIGNED (ctype)
6645
                                                && tcode != MULT_EXPR) ? -1 : 1,
6646
                                               TREE_OVERFLOW (t1)))
6647
          && !TREE_OVERFLOW (t1))
6648
        return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
6649
 
6650
      /* If these operations "cancel" each other, we have the main
6651
         optimizations of this pass, which occur when either constant is a
6652
         multiple of the other, in which case we replace this with either an
6653
         operation or CODE or TCODE.
6654
 
6655
         If we have an unsigned type that is not a sizetype, we cannot do
6656
         this since it will change the result if the original computation
6657
         overflowed.  */
6658
      if ((TYPE_OVERFLOW_UNDEFINED (ctype)
6659
           || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
6660
          && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6661
              || (tcode == MULT_EXPR
6662
                  && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6663
                  && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6664
                  && code != MULT_EXPR)))
6665
        {
6666
          if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6667
            {
6668
              if (TYPE_OVERFLOW_UNDEFINED (ctype))
6669
                *strict_overflow_p = true;
6670
              return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6671
                                  fold_convert (ctype,
6672
                                                const_binop (TRUNC_DIV_EXPR,
6673
                                                             op1, c, 0)));
6674
            }
6675
          else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
6676
            {
6677
              if (TYPE_OVERFLOW_UNDEFINED (ctype))
6678
                *strict_overflow_p = true;
6679
              return fold_build2 (code, ctype, fold_convert (ctype, op0),
6680
                                  fold_convert (ctype,
6681
                                                const_binop (TRUNC_DIV_EXPR,
6682
                                                             c, op1, 0)));
6683
            }
6684
        }
6685
      break;
6686
 
6687
    default:
6688
      break;
6689
    }
6690
 
6691
  return 0;
6692
}
6693
 
6694
/* Return a node which has the indicated constant VALUE (either 0 or
6695
   1), and is of the indicated TYPE.  */
6696
 
6697
tree
6698
constant_boolean_node (int value, tree type)
6699
{
6700
  if (type == integer_type_node)
6701
    return value ? integer_one_node : integer_zero_node;
6702
  else if (type == boolean_type_node)
6703
    return value ? boolean_true_node : boolean_false_node;
6704
  else
6705
    return build_int_cst (type, value);
6706
}
6707
 
6708
 
6709
/* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6710
   Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'.  Here
6711
   CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6712
   expression, and ARG to `a'.  If COND_FIRST_P is nonzero, then the
6713
   COND is the first argument to CODE; otherwise (as in the example
6714
   given here), it is the second argument.  TYPE is the type of the
6715
   original expression.  Return NULL_TREE if no simplification is
6716
   possible.  */
6717
 
6718
static tree
6719
fold_binary_op_with_conditional_arg (location_t loc,
6720
                                     enum tree_code code,
6721
                                     tree type, tree op0, tree op1,
6722
                                     tree cond, tree arg, int cond_first_p)
6723
{
6724
  tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6725
  tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6726
  tree test, true_value, false_value;
6727
  tree lhs = NULL_TREE;
6728
  tree rhs = NULL_TREE;
6729
 
6730
  /* This transformation is only worthwhile if we don't have to wrap
6731
     arg in a SAVE_EXPR, and the operation can be simplified on at least
6732
     one of the branches once its pushed inside the COND_EXPR.  */
6733
  if (!TREE_CONSTANT (arg))
6734
    return NULL_TREE;
6735
 
6736
  if (TREE_CODE (cond) == COND_EXPR)
6737
    {
6738
      test = TREE_OPERAND (cond, 0);
6739
      true_value = TREE_OPERAND (cond, 1);
6740
      false_value = TREE_OPERAND (cond, 2);
6741
      /* If this operand throws an expression, then it does not make
6742
         sense to try to perform a logical or arithmetic operation
6743
         involving it.  */
6744
      if (VOID_TYPE_P (TREE_TYPE (true_value)))
6745
        lhs = true_value;
6746
      if (VOID_TYPE_P (TREE_TYPE (false_value)))
6747
        rhs = false_value;
6748
    }
6749
  else
6750
    {
6751
      tree testtype = TREE_TYPE (cond);
6752
      test = cond;
6753
      true_value = constant_boolean_node (true, testtype);
6754
      false_value = constant_boolean_node (false, testtype);
6755
    }
6756
 
6757
  arg = fold_convert_loc (loc, arg_type, arg);
6758
  if (lhs == 0)
6759
    {
6760
      true_value = fold_convert_loc (loc, cond_type, true_value);
6761
      if (cond_first_p)
6762
        lhs = fold_build2_loc (loc, code, type, true_value, arg);
6763
      else
6764
        lhs = fold_build2_loc (loc, code, type, arg, true_value);
6765
    }
6766
  if (rhs == 0)
6767
    {
6768
      false_value = fold_convert_loc (loc, cond_type, false_value);
6769
      if (cond_first_p)
6770
        rhs = fold_build2_loc (loc, code, type, false_value, arg);
6771
      else
6772
        rhs = fold_build2_loc (loc, code, type, arg, false_value);
6773
    }
6774
 
6775
  test = fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6776
  return fold_convert_loc (loc, type, test);
6777
}
6778
 
6779
 
6780
/* Subroutine of fold() that checks for the addition of +/- 0.0.
6781
 
6782
   If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6783
   TYPE, X + ADDEND is the same as X.  If NEGATE, return true if X -
6784
   ADDEND is the same as X.
6785
 
6786
   X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6787
   and finite.  The problematic cases are when X is zero, and its mode
6788
   has signed zeros.  In the case of rounding towards -infinity,
6789
   X - 0 is not the same as X because 0 - 0 is -0.  In other rounding
6790
   modes, X + 0 is not the same as X because -0 + 0 is 0.  */
6791
 
6792
bool
6793
fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6794
{
6795
  if (!real_zerop (addend))
6796
    return false;
6797
 
6798
  /* Don't allow the fold with -fsignaling-nans.  */
6799
  if (HONOR_SNANS (TYPE_MODE (type)))
6800
    return false;
6801
 
6802
  /* Allow the fold if zeros aren't signed, or their sign isn't important.  */
6803
  if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6804
    return true;
6805
 
6806
  /* Treat x + -0 as x - 0 and x - -0 as x + 0.  */
6807
  if (TREE_CODE (addend) == REAL_CST
6808
      && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6809
    negate = !negate;
6810
 
6811
  /* The mode has signed zeros, and we have to honor their sign.
6812
     In this situation, there is only one case we can return true for.
6813
     X - 0 is the same as X unless rounding towards -infinity is
6814
     supported.  */
6815
  return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6816
}
6817
 
6818
/* Subroutine of fold() that checks comparisons of built-in math
6819
   functions against real constants.
6820
 
6821
   FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6822
   operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR.  TYPE
6823
   is the type of the result and ARG0 and ARG1 are the operands of the
6824
   comparison.  ARG1 must be a TREE_REAL_CST.
6825
 
6826
   The function returns the constant folded tree if a simplification
6827
   can be made, and NULL_TREE otherwise.  */
6828
 
6829
static tree
6830
fold_mathfn_compare (location_t loc,
6831
                     enum built_in_function fcode, enum tree_code code,
6832
                     tree type, tree arg0, tree arg1)
6833
{
6834
  REAL_VALUE_TYPE c;
6835
 
6836
  if (BUILTIN_SQRT_P (fcode))
6837
    {
6838
      tree arg = CALL_EXPR_ARG (arg0, 0);
6839
      enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6840
 
6841
      c = TREE_REAL_CST (arg1);
6842
      if (REAL_VALUE_NEGATIVE (c))
6843
        {
6844
          /* sqrt(x) < y is always false, if y is negative.  */
6845
          if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6846
            return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6847
 
6848
          /* sqrt(x) > y is always true, if y is negative and we
6849
             don't care about NaNs, i.e. negative values of x.  */
6850
          if (code == NE_EXPR || !HONOR_NANS (mode))
6851
            return omit_one_operand_loc (loc, type, integer_one_node, arg);
6852
 
6853
          /* sqrt(x) > y is the same as x >= 0, if y is negative.  */
6854
          return fold_build2_loc (loc, GE_EXPR, type, arg,
6855
                              build_real (TREE_TYPE (arg), dconst0));
6856
        }
6857
      else if (code == GT_EXPR || code == GE_EXPR)
6858
        {
6859
          REAL_VALUE_TYPE c2;
6860
 
6861
          REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6862
          real_convert (&c2, mode, &c2);
6863
 
6864
          if (REAL_VALUE_ISINF (c2))
6865
            {
6866
              /* sqrt(x) > y is x == +Inf, when y is very large.  */
6867
              if (HONOR_INFINITIES (mode))
6868
                return fold_build2_loc (loc, EQ_EXPR, type, arg,
6869
                                    build_real (TREE_TYPE (arg), c2));
6870
 
6871
              /* sqrt(x) > y is always false, when y is very large
6872
                 and we don't care about infinities.  */
6873
              return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6874
            }
6875
 
6876
          /* sqrt(x) > c is the same as x > c*c.  */
6877
          return fold_build2_loc (loc, code, type, arg,
6878
                              build_real (TREE_TYPE (arg), c2));
6879
        }
6880
      else if (code == LT_EXPR || code == LE_EXPR)
6881
        {
6882
          REAL_VALUE_TYPE c2;
6883
 
6884
          REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6885
          real_convert (&c2, mode, &c2);
6886
 
6887
          if (REAL_VALUE_ISINF (c2))
6888
            {
6889
              /* sqrt(x) < y is always true, when y is a very large
6890
                 value and we don't care about NaNs or Infinities.  */
6891
              if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6892
                return omit_one_operand_loc (loc, type, integer_one_node, arg);
6893
 
6894
              /* sqrt(x) < y is x != +Inf when y is very large and we
6895
                 don't care about NaNs.  */
6896
              if (! HONOR_NANS (mode))
6897
                return fold_build2_loc (loc, NE_EXPR, type, arg,
6898
                                    build_real (TREE_TYPE (arg), c2));
6899
 
6900
              /* sqrt(x) < y is x >= 0 when y is very large and we
6901
                 don't care about Infinities.  */
6902
              if (! HONOR_INFINITIES (mode))
6903
                return fold_build2_loc (loc, GE_EXPR, type, arg,
6904
                                    build_real (TREE_TYPE (arg), dconst0));
6905
 
6906
              /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large.  */
6907
              if (lang_hooks.decls.global_bindings_p () != 0
6908
                  || CONTAINS_PLACEHOLDER_P (arg))
6909
                return NULL_TREE;
6910
 
6911
              arg = save_expr (arg);
6912
              return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6913
                                  fold_build2_loc (loc, GE_EXPR, type, arg,
6914
                                               build_real (TREE_TYPE (arg),
6915
                                                           dconst0)),
6916
                                  fold_build2_loc (loc, NE_EXPR, type, arg,
6917
                                               build_real (TREE_TYPE (arg),
6918
                                                           c2)));
6919
            }
6920
 
6921
          /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs.  */
6922
          if (! HONOR_NANS (mode))
6923
            return fold_build2_loc (loc, code, type, arg,
6924
                                build_real (TREE_TYPE (arg), c2));
6925
 
6926
          /* sqrt(x) < c is the same as x >= 0 && x < c*c.  */
6927
          if (lang_hooks.decls.global_bindings_p () == 0
6928
              && ! CONTAINS_PLACEHOLDER_P (arg))
6929
            {
6930
              arg = save_expr (arg);
6931
              return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6932
                                  fold_build2_loc (loc, GE_EXPR, type, arg,
6933
                                               build_real (TREE_TYPE (arg),
6934
                                                           dconst0)),
6935
                                  fold_build2_loc (loc, code, type, arg,
6936
                                               build_real (TREE_TYPE (arg),
6937
                                                           c2)));
6938
            }
6939
        }
6940
    }
6941
 
6942
  return NULL_TREE;
6943
}
6944
 
6945
/* Subroutine of fold() that optimizes comparisons against Infinities,
6946
   either +Inf or -Inf.
6947
 
6948
   CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6949
   GE_EXPR or LE_EXPR.  TYPE is the type of the result and ARG0 and ARG1
6950
   are the operands of the comparison.  ARG1 must be a TREE_REAL_CST.
6951
 
6952
   The function returns the constant folded tree if a simplification
6953
   can be made, and NULL_TREE otherwise.  */
6954
 
6955
static tree
6956
fold_inf_compare (location_t loc, enum tree_code code, tree type,
6957
                  tree arg0, tree arg1)
6958
{
6959
  enum machine_mode mode;
6960
  REAL_VALUE_TYPE max;
6961
  tree temp;
6962
  bool neg;
6963
 
6964
  mode = TYPE_MODE (TREE_TYPE (arg0));
6965
 
6966
  /* For negative infinity swap the sense of the comparison.  */
6967
  neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6968
  if (neg)
6969
    code = swap_tree_comparison (code);
6970
 
6971
  switch (code)
6972
    {
6973
    case GT_EXPR:
6974
      /* x > +Inf is always false, if with ignore sNANs.  */
6975
      if (HONOR_SNANS (mode))
6976
        return NULL_TREE;
6977
      return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6978
 
6979
    case LE_EXPR:
6980
      /* x <= +Inf is always true, if we don't case about NaNs.  */
6981
      if (! HONOR_NANS (mode))
6982
        return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6983
 
6984
      /* x <= +Inf is the same as x == x, i.e. isfinite(x).  */
6985
      if (lang_hooks.decls.global_bindings_p () == 0
6986
          && ! CONTAINS_PLACEHOLDER_P (arg0))
6987
        {
6988
          arg0 = save_expr (arg0);
6989
          return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6990
        }
6991
      break;
6992
 
6993
    case EQ_EXPR:
6994
    case GE_EXPR:
6995
      /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX.  */
6996
      real_maxval (&max, neg, mode);
6997
      return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6998
                          arg0, build_real (TREE_TYPE (arg0), max));
6999
 
7000
    case LT_EXPR:
7001
      /* x < +Inf is always equal to x <= DBL_MAX.  */
7002
      real_maxval (&max, neg, mode);
7003
      return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
7004
                          arg0, build_real (TREE_TYPE (arg0), max));
7005
 
7006
    case NE_EXPR:
7007
      /* x != +Inf is always equal to !(x > DBL_MAX).  */
7008
      real_maxval (&max, neg, mode);
7009
      if (! HONOR_NANS (mode))
7010
        return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
7011
                            arg0, build_real (TREE_TYPE (arg0), max));
7012
 
7013
      temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
7014
                          arg0, build_real (TREE_TYPE (arg0), max));
7015
      return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
7016
 
7017
    default:
7018
      break;
7019
    }
7020
 
7021
  return NULL_TREE;
7022
}
7023
 
7024
/* Subroutine of fold() that optimizes comparisons of a division by
7025
   a nonzero integer constant against an integer constant, i.e.
7026
   X/C1 op C2.
7027
 
7028
   CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7029
   GE_EXPR or LE_EXPR.  TYPE is the type of the result and ARG0 and ARG1
7030
   are the operands of the comparison.  ARG1 must be a TREE_REAL_CST.
7031
 
7032
   The function returns the constant folded tree if a simplification
7033
   can be made, and NULL_TREE otherwise.  */
7034
 
7035
static tree
7036
fold_div_compare (location_t loc,
7037
                  enum tree_code code, tree type, tree arg0, tree arg1)
7038
{
7039
  tree prod, tmp, hi, lo;
7040
  tree arg00 = TREE_OPERAND (arg0, 0);
7041
  tree arg01 = TREE_OPERAND (arg0, 1);
7042
  unsigned HOST_WIDE_INT lpart;
7043
  HOST_WIDE_INT hpart;
7044
  bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
7045
  bool neg_overflow;
7046
  int overflow;
7047
 
7048
  /* We have to do this the hard way to detect unsigned overflow.
7049
     prod = int_const_binop (MULT_EXPR, arg01, arg1, 0);  */
7050
  overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
7051
                                   TREE_INT_CST_HIGH (arg01),
7052
                                   TREE_INT_CST_LOW (arg1),
7053
                                   TREE_INT_CST_HIGH (arg1),
7054
                                   &lpart, &hpart, unsigned_p);
7055
  prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
7056
                                -1, overflow);
7057
  neg_overflow = false;
7058
 
7059
  if (unsigned_p)
7060
    {
7061
      tmp = int_const_binop (MINUS_EXPR, arg01,
7062
                             build_int_cst (TREE_TYPE (arg01), 1), 0);
7063
      lo = prod;
7064
 
7065
      /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0).  */
7066
      overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
7067
                                       TREE_INT_CST_HIGH (prod),
7068
                                       TREE_INT_CST_LOW (tmp),
7069
                                       TREE_INT_CST_HIGH (tmp),
7070
                                       &lpart, &hpart, unsigned_p);
7071
      hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
7072
                                  -1, overflow | TREE_OVERFLOW (prod));
7073
    }
7074
  else if (tree_int_cst_sgn (arg01) >= 0)
7075
    {
7076
      tmp = int_const_binop (MINUS_EXPR, arg01,
7077
                             build_int_cst (TREE_TYPE (arg01), 1), 0);
7078
      switch (tree_int_cst_sgn (arg1))
7079
        {
7080
        case -1:
7081
          neg_overflow = true;
7082
          lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
7083
          hi = prod;
7084
          break;
7085
 
7086
        case  0:
7087
          lo = fold_negate_const (tmp, TREE_TYPE (arg0));
7088
          hi = tmp;
7089
          break;
7090
 
7091
        case  1:
7092
          hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
7093
          lo = prod;
7094
          break;
7095
 
7096
        default:
7097
          gcc_unreachable ();
7098
        }
7099
    }
7100
  else
7101
    {
7102
      /* A negative divisor reverses the relational operators.  */
7103
      code = swap_tree_comparison (code);
7104
 
7105
      tmp = int_const_binop (PLUS_EXPR, arg01,
7106
                             build_int_cst (TREE_TYPE (arg01), 1), 0);
7107
      switch (tree_int_cst_sgn (arg1))
7108
        {
7109
        case -1:
7110
          hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
7111
          lo = prod;
7112
          break;
7113
 
7114
        case  0:
7115
          hi = fold_negate_const (tmp, TREE_TYPE (arg0));
7116
          lo = tmp;
7117
          break;
7118
 
7119
        case  1:
7120
          neg_overflow = true;
7121
          lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
7122
          hi = prod;
7123
          break;
7124
 
7125
        default:
7126
          gcc_unreachable ();
7127
        }
7128
    }
7129
 
7130
  switch (code)
7131
    {
7132
    case EQ_EXPR:
7133
      if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
7134
        return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
7135
      if (TREE_OVERFLOW (hi))
7136
        return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
7137
      if (TREE_OVERFLOW (lo))
7138
        return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
7139
      return build_range_check (loc, type, arg00, 1, lo, hi);
7140
 
7141
    case NE_EXPR:
7142
      if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
7143
        return omit_one_operand_loc (loc, type, integer_one_node, arg00);
7144
      if (TREE_OVERFLOW (hi))
7145
        return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
7146
      if (TREE_OVERFLOW (lo))
7147
        return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
7148
      return build_range_check (loc, type, arg00, 0, lo, hi);
7149
 
7150
    case LT_EXPR:
7151
      if (TREE_OVERFLOW (lo))
7152
        {
7153
          tmp = neg_overflow ? integer_zero_node : integer_one_node;
7154
          return omit_one_operand_loc (loc, type, tmp, arg00);
7155
        }
7156
      return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
7157
 
7158
    case LE_EXPR:
7159
      if (TREE_OVERFLOW (hi))
7160
        {
7161
          tmp = neg_overflow ? integer_zero_node : integer_one_node;
7162
          return omit_one_operand_loc (loc, type, tmp, arg00);
7163
        }
7164
      return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
7165
 
7166
    case GT_EXPR:
7167
      if (TREE_OVERFLOW (hi))
7168
        {
7169
          tmp = neg_overflow ? integer_one_node : integer_zero_node;
7170
          return omit_one_operand_loc (loc, type, tmp, arg00);
7171
        }
7172
      return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
7173
 
7174
    case GE_EXPR:
7175
      if (TREE_OVERFLOW (lo))
7176
        {
7177
          tmp = neg_overflow ? integer_one_node : integer_zero_node;
7178
          return omit_one_operand_loc (loc, type, tmp, arg00);
7179
        }
7180
      return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
7181
 
7182
    default:
7183
      break;
7184
    }
7185
 
7186
  return NULL_TREE;
7187
}
7188
 
7189
 
7190
/* If CODE with arguments ARG0 and ARG1 represents a single bit
7191
   equality/inequality test, then return a simplified form of the test
7192
   using a sign testing.  Otherwise return NULL.  TYPE is the desired
7193
   result type.  */
7194
 
7195
static tree
7196
fold_single_bit_test_into_sign_test (location_t loc,
7197
                                     enum tree_code code, tree arg0, tree arg1,
7198
                                     tree result_type)
7199
{
7200
  /* If this is testing a single bit, we can optimize the test.  */
7201
  if ((code == NE_EXPR || code == EQ_EXPR)
7202
      && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7203
      && integer_pow2p (TREE_OPERAND (arg0, 1)))
7204
    {
7205
      /* If we have (A & C) != 0 where C is the sign bit of A, convert
7206
         this into A < 0.  Similarly for (A & C) == 0 into A >= 0.  */
7207
      tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
7208
 
7209
      if (arg00 != NULL_TREE
7210
          /* This is only a win if casting to a signed type is cheap,
7211
             i.e. when arg00's type is not a partial mode.  */
7212
          && TYPE_PRECISION (TREE_TYPE (arg00))
7213
             == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
7214
        {
7215
          tree stype = signed_type_for (TREE_TYPE (arg00));
7216
          return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
7217
                              result_type,
7218
                              fold_convert_loc (loc, stype, arg00),
7219
                              build_int_cst (stype, 0));
7220
        }
7221
    }
7222
 
7223
  return NULL_TREE;
7224
}
7225
 
7226
/* If CODE with arguments ARG0 and ARG1 represents a single bit
7227
   equality/inequality test, then return a simplified form of
7228
   the test using shifts and logical operations.  Otherwise return
7229
   NULL.  TYPE is the desired result type.  */
7230
 
7231
tree
7232
fold_single_bit_test (location_t loc, enum tree_code code,
7233
                      tree arg0, tree arg1, tree result_type)
7234
{
7235
  /* If this is testing a single bit, we can optimize the test.  */
7236
  if ((code == NE_EXPR || code == EQ_EXPR)
7237
      && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7238
      && integer_pow2p (TREE_OPERAND (arg0, 1)))
7239
    {
7240
      tree inner = TREE_OPERAND (arg0, 0);
7241
      tree type = TREE_TYPE (arg0);
7242
      int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
7243
      enum machine_mode operand_mode = TYPE_MODE (type);
7244
      int ops_unsigned;
7245
      tree signed_type, unsigned_type, intermediate_type;
7246
      tree tem, one;
7247
 
7248
      /* First, see if we can fold the single bit test into a sign-bit
7249
         test.  */
7250
      tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
7251
                                                 result_type);
7252
      if (tem)
7253
        return tem;
7254
 
7255
      /* Otherwise we have (A & C) != 0 where C is a single bit,
7256
         convert that into ((A >> C2) & 1).  Where C2 = log2(C).
7257
         Similarly for (A & C) == 0.  */
7258
 
7259
      /* If INNER is a right shift of a constant and it plus BITNUM does
7260
         not overflow, adjust BITNUM and INNER.  */
7261
      if (TREE_CODE (inner) == RSHIFT_EXPR
7262
          && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7263
          && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
7264
          && bitnum < TYPE_PRECISION (type)
7265
          && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
7266
                                   bitnum - TYPE_PRECISION (type)))
7267
        {
7268
          bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
7269
          inner = TREE_OPERAND (inner, 0);
7270
        }
7271
 
7272
      /* If we are going to be able to omit the AND below, we must do our
7273
         operations as unsigned.  If we must use the AND, we have a choice.
7274
         Normally unsigned is faster, but for some machines signed is.  */
7275
#ifdef LOAD_EXTEND_OP
7276
      ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
7277
                      && !flag_syntax_only) ? 0 : 1;
7278
#else
7279
      ops_unsigned = 1;
7280
#endif
7281
 
7282
      signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
7283
      unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7284
      intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7285
      inner = fold_convert_loc (loc, intermediate_type, inner);
7286
 
7287
      if (bitnum != 0)
7288
        inner = build2 (RSHIFT_EXPR, intermediate_type,
7289
                        inner, size_int (bitnum));
7290
 
7291
      one = build_int_cst (intermediate_type, 1);
7292
 
7293
      if (code == EQ_EXPR)
7294
        inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
7295
 
7296
      /* Put the AND last so it can combine with more things.  */
7297
      inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7298
 
7299
      /* Make sure to return the proper type.  */
7300
      inner = fold_convert_loc (loc, result_type, inner);
7301
 
7302
      return inner;
7303
    }
7304
  return NULL_TREE;
7305
}
7306
 
7307
/* Check whether we are allowed to reorder operands arg0 and arg1,
7308
   such that the evaluation of arg1 occurs before arg0.  */
7309
 
7310
static bool
7311
reorder_operands_p (const_tree arg0, const_tree arg1)
7312
{
7313
  if (! flag_evaluation_order)
7314
      return true;
7315
  if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
7316
    return true;
7317
  return ! TREE_SIDE_EFFECTS (arg0)
7318
         && ! TREE_SIDE_EFFECTS (arg1);
7319
}
7320
 
7321
/* Test whether it is preferable two swap two operands, ARG0 and
7322
   ARG1, for example because ARG0 is an integer constant and ARG1
7323
   isn't.  If REORDER is true, only recommend swapping if we can
7324
   evaluate the operands in reverse order.  */
7325
 
7326
bool
7327
tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
7328
{
7329
  STRIP_SIGN_NOPS (arg0);
7330
  STRIP_SIGN_NOPS (arg1);
7331
 
7332
  if (TREE_CODE (arg1) == INTEGER_CST)
7333
    return 0;
7334
  if (TREE_CODE (arg0) == INTEGER_CST)
7335
    return 1;
7336
 
7337
  if (TREE_CODE (arg1) == REAL_CST)
7338
    return 0;
7339
  if (TREE_CODE (arg0) == REAL_CST)
7340
    return 1;
7341
 
7342
  if (TREE_CODE (arg1) == FIXED_CST)
7343
    return 0;
7344
  if (TREE_CODE (arg0) == FIXED_CST)
7345
    return 1;
7346
 
7347
  if (TREE_CODE (arg1) == COMPLEX_CST)
7348
    return 0;
7349
  if (TREE_CODE (arg0) == COMPLEX_CST)
7350
    return 1;
7351
 
7352
  if (TREE_CONSTANT (arg1))
7353
    return 0;
7354
  if (TREE_CONSTANT (arg0))
7355
    return 1;
7356
 
7357
  if (optimize_function_for_size_p (cfun))
7358
    return 0;
7359
 
7360
  if (reorder && flag_evaluation_order
7361
      && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
7362
    return 0;
7363
 
7364
  /* It is preferable to swap two SSA_NAME to ensure a canonical form
7365
     for commutative and comparison operators.  Ensuring a canonical
7366
     form allows the optimizers to find additional redundancies without
7367
     having to explicitly check for both orderings.  */
7368
  if (TREE_CODE (arg0) == SSA_NAME
7369
      && TREE_CODE (arg1) == SSA_NAME
7370
      && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7371
    return 1;
7372
 
7373
  /* Put SSA_NAMEs last.  */
7374
  if (TREE_CODE (arg1) == SSA_NAME)
7375
    return 0;
7376
  if (TREE_CODE (arg0) == SSA_NAME)
7377
    return 1;
7378
 
7379
  /* Put variables last.  */
7380
  if (DECL_P (arg1))
7381
    return 0;
7382
  if (DECL_P (arg0))
7383
    return 1;
7384
 
7385
  return 0;
7386
}
7387
 
7388
/* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
7389
   ARG0 is extended to a wider type.  */
7390
 
7391
static tree
7392
fold_widened_comparison (location_t loc, enum tree_code code,
7393
                         tree type, tree arg0, tree arg1)
7394
{
7395
  tree arg0_unw = get_unwidened (arg0, NULL_TREE);
7396
  tree arg1_unw;
7397
  tree shorter_type, outer_type;
7398
  tree min, max;
7399
  bool above, below;
7400
 
7401
  if (arg0_unw == arg0)
7402
    return NULL_TREE;
7403
  shorter_type = TREE_TYPE (arg0_unw);
7404
 
7405
#ifdef HAVE_canonicalize_funcptr_for_compare
7406
  /* Disable this optimization if we're casting a function pointer
7407
     type on targets that require function pointer canonicalization.  */
7408
  if (HAVE_canonicalize_funcptr_for_compare
7409
      && TREE_CODE (shorter_type) == POINTER_TYPE
7410
      && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
7411
    return NULL_TREE;
7412
#endif
7413
 
7414
  if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
7415
    return NULL_TREE;
7416
 
7417
  arg1_unw = get_unwidened (arg1, NULL_TREE);
7418
 
7419
  /* If possible, express the comparison in the shorter mode.  */
7420
  if ((code == EQ_EXPR || code == NE_EXPR
7421
       || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
7422
      && (TREE_TYPE (arg1_unw) == shorter_type
7423
          || ((TYPE_PRECISION (shorter_type)
7424
               >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
7425
              && (TYPE_UNSIGNED (shorter_type)
7426
                  == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
7427
          || (TREE_CODE (arg1_unw) == INTEGER_CST
7428
              && (TREE_CODE (shorter_type) == INTEGER_TYPE
7429
                  || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
7430
              && int_fits_type_p (arg1_unw, shorter_type))))
7431
    return fold_build2_loc (loc, code, type, arg0_unw,
7432
                        fold_convert_loc (loc, shorter_type, arg1_unw));
7433
 
7434
  if (TREE_CODE (arg1_unw) != INTEGER_CST
7435
      || TREE_CODE (shorter_type) != INTEGER_TYPE
7436
      || !int_fits_type_p (arg1_unw, shorter_type))
7437
    return NULL_TREE;
7438
 
7439
  /* If we are comparing with the integer that does not fit into the range
7440
     of the shorter type, the result is known.  */
7441
  outer_type = TREE_TYPE (arg1_unw);
7442
  min = lower_bound_in_type (outer_type, shorter_type);
7443
  max = upper_bound_in_type (outer_type, shorter_type);
7444
 
7445
  above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7446
                                                   max, arg1_unw));
7447
  below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7448
                                                   arg1_unw, min));
7449
 
7450
  switch (code)
7451
    {
7452
    case EQ_EXPR:
7453
      if (above || below)
7454
        return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7455
      break;
7456
 
7457
    case NE_EXPR:
7458
      if (above || below)
7459
        return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7460
      break;
7461
 
7462
    case LT_EXPR:
7463
    case LE_EXPR:
7464
      if (above)
7465
        return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7466
      else if (below)
7467
        return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7468
 
7469
    case GT_EXPR:
7470
    case GE_EXPR:
7471
      if (above)
7472
        return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7473
      else if (below)
7474
        return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7475
 
7476
    default:
7477
      break;
7478
    }
7479
 
7480
  return NULL_TREE;
7481
}
7482
 
7483
/* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7484
   ARG0 just the signedness is changed.  */
7485
 
7486
static tree
7487
fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
7488
                              tree arg0, tree arg1)
7489
{
7490
  tree arg0_inner;
7491
  tree inner_type, outer_type;
7492
 
7493
  if (!CONVERT_EXPR_P (arg0))
7494
    return NULL_TREE;
7495
 
7496
  outer_type = TREE_TYPE (arg0);
7497
  arg0_inner = TREE_OPERAND (arg0, 0);
7498
  inner_type = TREE_TYPE (arg0_inner);
7499
 
7500
#ifdef HAVE_canonicalize_funcptr_for_compare
7501
  /* Disable this optimization if we're casting a function pointer
7502
     type on targets that require function pointer canonicalization.  */
7503
  if (HAVE_canonicalize_funcptr_for_compare
7504
      && TREE_CODE (inner_type) == POINTER_TYPE
7505
      && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7506
    return NULL_TREE;
7507
#endif
7508
 
7509
  if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7510
    return NULL_TREE;
7511
 
7512
  if (TREE_CODE (arg1) != INTEGER_CST
7513
      && !(CONVERT_EXPR_P (arg1)
7514
           && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7515
    return NULL_TREE;
7516
 
7517
  if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7518
       || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
7519
      && code != NE_EXPR
7520
      && code != EQ_EXPR)
7521
    return NULL_TREE;
7522
 
7523
  if (TREE_CODE (arg1) == INTEGER_CST)
7524
    arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
7525
                                  TREE_INT_CST_HIGH (arg1), 0,
7526
                                  TREE_OVERFLOW (arg1));
7527
  else
7528
    arg1 = fold_convert_loc (loc, inner_type, arg1);
7529
 
7530
  return fold_build2_loc (loc, code, type, arg0_inner, arg1);
7531
}
7532
 
7533
/* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
7534
   step of the array.  Reconstructs s and delta in the case of s *
7535
   delta being an integer constant (and thus already folded).  ADDR is
7536
   the address. MULT is the multiplicative expression.  If the
7537
   function succeeds, the new address expression is returned.
7538
   Otherwise NULL_TREE is returned.  LOC is the location of the
7539
   resulting expression.  */
7540
 
7541
static tree
7542
try_move_mult_to_index (location_t loc, tree addr, tree op1)
7543
{
7544
  tree s, delta, step;
7545
  tree ref = TREE_OPERAND (addr, 0), pref;
7546
  tree ret, pos;
7547
  tree itype;
7548
  bool mdim = false;
7549
 
7550
  /*  Strip the nops that might be added when converting op1 to sizetype. */
7551
  STRIP_NOPS (op1);
7552
 
7553
  /* Canonicalize op1 into a possibly non-constant delta
7554
     and an INTEGER_CST s.  */
7555
  if (TREE_CODE (op1) == MULT_EXPR)
7556
    {
7557
      tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
7558
 
7559
      STRIP_NOPS (arg0);
7560
      STRIP_NOPS (arg1);
7561
 
7562
      if (TREE_CODE (arg0) == INTEGER_CST)
7563
        {
7564
          s = arg0;
7565
          delta = arg1;
7566
        }
7567
      else if (TREE_CODE (arg1) == INTEGER_CST)
7568
        {
7569
          s = arg1;
7570
          delta = arg0;
7571
        }
7572
      else
7573
        return NULL_TREE;
7574
    }
7575
  else if (TREE_CODE (op1) == INTEGER_CST)
7576
    {
7577
      delta = op1;
7578
      s = NULL_TREE;
7579
    }
7580
  else
7581
    {
7582
      /* Simulate we are delta * 1.  */
7583
      delta = op1;
7584
      s = integer_one_node;
7585
    }
7586
 
7587
  for (;; ref = TREE_OPERAND (ref, 0))
7588
    {
7589
      if (TREE_CODE (ref) == ARRAY_REF)
7590
        {
7591
          tree domain;
7592
 
7593
          /* Remember if this was a multi-dimensional array.  */
7594
          if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7595
            mdim = true;
7596
 
7597
          domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7598
          if (! domain)
7599
            continue;
7600
          itype = TREE_TYPE (domain);
7601
 
7602
          step = array_ref_element_size (ref);
7603
          if (TREE_CODE (step) != INTEGER_CST)
7604
            continue;
7605
 
7606
          if (s)
7607
            {
7608
              if (! tree_int_cst_equal (step, s))
7609
                continue;
7610
            }
7611
          else
7612
            {
7613
              /* Try if delta is a multiple of step.  */
7614
              tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7615
              if (! tmp)
7616
                continue;
7617
              delta = tmp;
7618
            }
7619
 
7620
          /* Only fold here if we can verify we do not overflow one
7621
             dimension of a multi-dimensional array.  */
7622
          if (mdim)
7623
            {
7624
              tree tmp;
7625
 
7626
              if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7627
                  || !TYPE_MAX_VALUE (domain)
7628
                  || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7629
                continue;
7630
 
7631
              tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7632
                                     fold_convert_loc (loc, itype,
7633
                                                       TREE_OPERAND (ref, 1)),
7634
                                     fold_convert_loc (loc, itype, delta));
7635
              if (!tmp
7636
                  || TREE_CODE (tmp) != INTEGER_CST
7637
                  || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7638
                continue;
7639
            }
7640
 
7641
          break;
7642
        }
7643
      else
7644
        mdim = false;
7645
 
7646
      if (!handled_component_p (ref))
7647
        return NULL_TREE;
7648
    }
7649
 
7650
  /* We found the suitable array reference.  So copy everything up to it,
7651
     and replace the index.  */
7652
 
7653
  pref = TREE_OPERAND (addr, 0);
7654
  ret = copy_node (pref);
7655
  SET_EXPR_LOCATION (ret, loc);
7656
  pos = ret;
7657
 
7658
  while (pref != ref)
7659
    {
7660
      pref = TREE_OPERAND (pref, 0);
7661
      TREE_OPERAND (pos, 0) = copy_node (pref);
7662
      pos = TREE_OPERAND (pos, 0);
7663
    }
7664
 
7665
  TREE_OPERAND (pos, 1) = fold_build2_loc (loc, PLUS_EXPR, itype,
7666
                                       fold_convert_loc (loc, itype,
7667
                                                         TREE_OPERAND (pos, 1)),
7668
                                       fold_convert_loc (loc, itype, delta));
7669
 
7670
  return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7671
}
7672
 
7673
 
7674
/* Fold A < X && A + 1 > Y to A < X && A >= Y.  Normally A + 1 > Y
7675
   means A >= Y && A != MAX, but in this case we know that
7676
   A < X <= MAX.  INEQ is A + 1 > Y, BOUND is A < X.  */
7677
 
7678
static tree
7679
fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7680
{
7681
  tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7682
 
7683
  if (TREE_CODE (bound) == LT_EXPR)
7684
    a = TREE_OPERAND (bound, 0);
7685
  else if (TREE_CODE (bound) == GT_EXPR)
7686
    a = TREE_OPERAND (bound, 1);
7687
  else
7688
    return NULL_TREE;
7689
 
7690
  typea = TREE_TYPE (a);
7691
  if (!INTEGRAL_TYPE_P (typea)
7692
      && !POINTER_TYPE_P (typea))
7693
    return NULL_TREE;
7694
 
7695
  if (TREE_CODE (ineq) == LT_EXPR)
7696
    {
7697
      a1 = TREE_OPERAND (ineq, 1);
7698
      y = TREE_OPERAND (ineq, 0);
7699
    }
7700
  else if (TREE_CODE (ineq) == GT_EXPR)
7701
    {
7702
      a1 = TREE_OPERAND (ineq, 0);
7703
      y = TREE_OPERAND (ineq, 1);
7704
    }
7705
  else
7706
    return NULL_TREE;
7707
 
7708
  if (TREE_TYPE (a1) != typea)
7709
    return NULL_TREE;
7710
 
7711
  if (POINTER_TYPE_P (typea))
7712
    {
7713
      /* Convert the pointer types into integer before taking the difference.  */
7714
      tree ta = fold_convert_loc (loc, ssizetype, a);
7715
      tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7716
      diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7717
    }
7718
  else
7719
    diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7720
 
7721
  if (!diff || !integer_onep (diff))
7722
   return NULL_TREE;
7723
 
7724
  return fold_build2_loc (loc, GE_EXPR, type, a, y);
7725
}
7726
 
7727
/* Fold a sum or difference of at least one multiplication.
7728
   Returns the folded tree or NULL if no simplification could be made.  */
7729
 
7730
static tree
7731
fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7732
                          tree arg0, tree arg1)
7733
{
7734
  tree arg00, arg01, arg10, arg11;
7735
  tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7736
 
7737
  /* (A * C) +- (B * C) -> (A+-B) * C.
7738
     (A * C) +- A -> A * (C+-1).
7739
     We are most concerned about the case where C is a constant,
7740
     but other combinations show up during loop reduction.  Since
7741
     it is not difficult, try all four possibilities.  */
7742
 
7743
  if (TREE_CODE (arg0) == MULT_EXPR)
7744
    {
7745
      arg00 = TREE_OPERAND (arg0, 0);
7746
      arg01 = TREE_OPERAND (arg0, 1);
7747
    }
7748
  else if (TREE_CODE (arg0) == INTEGER_CST)
7749
    {
7750
      arg00 = build_one_cst (type);
7751
      arg01 = arg0;
7752
    }
7753
  else
7754
    {
7755
      /* We cannot generate constant 1 for fract.  */
7756
      if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7757
        return NULL_TREE;
7758
      arg00 = arg0;
7759
      arg01 = build_one_cst (type);
7760
    }
7761
  if (TREE_CODE (arg1) == MULT_EXPR)
7762
    {
7763
      arg10 = TREE_OPERAND (arg1, 0);
7764
      arg11 = TREE_OPERAND (arg1, 1);
7765
    }
7766
  else if (TREE_CODE (arg1) == INTEGER_CST)
7767
    {
7768
      arg10 = build_one_cst (type);
7769
      /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7770
         the purpose of this canonicalization.  */
7771
      if (TREE_INT_CST_HIGH (arg1) == -1
7772
          && negate_expr_p (arg1)
7773
          && code == PLUS_EXPR)
7774
        {
7775
          arg11 = negate_expr (arg1);
7776
          code = MINUS_EXPR;
7777
        }
7778
      else
7779
        arg11 = arg1;
7780
    }
7781
  else
7782
    {
7783
      /* We cannot generate constant 1 for fract.  */
7784
      if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7785
        return NULL_TREE;
7786
      arg10 = arg1;
7787
      arg11 = build_one_cst (type);
7788
    }
7789
  same = NULL_TREE;
7790
 
7791
  if (operand_equal_p (arg01, arg11, 0))
7792
    same = arg01, alt0 = arg00, alt1 = arg10;
7793
  else if (operand_equal_p (arg00, arg10, 0))
7794
    same = arg00, alt0 = arg01, alt1 = arg11;
7795
  else if (operand_equal_p (arg00, arg11, 0))
7796
    same = arg00, alt0 = arg01, alt1 = arg10;
7797
  else if (operand_equal_p (arg01, arg10, 0))
7798
    same = arg01, alt0 = arg00, alt1 = arg11;
7799
 
7800
  /* No identical multiplicands; see if we can find a common
7801
     power-of-two factor in non-power-of-two multiplies.  This
7802
     can help in multi-dimensional array access.  */
7803
  else if (host_integerp (arg01, 0)
7804
           && host_integerp (arg11, 0))
7805
    {
7806
      HOST_WIDE_INT int01, int11, tmp;
7807
      bool swap = false;
7808
      tree maybe_same;
7809
      int01 = TREE_INT_CST_LOW (arg01);
7810
      int11 = TREE_INT_CST_LOW (arg11);
7811
 
7812
      /* Move min of absolute values to int11.  */
7813
      if ((int01 >= 0 ? int01 : -int01)
7814
          < (int11 >= 0 ? int11 : -int11))
7815
        {
7816
          tmp = int01, int01 = int11, int11 = tmp;
7817
          alt0 = arg00, arg00 = arg10, arg10 = alt0;
7818
          maybe_same = arg01;
7819
          swap = true;
7820
        }
7821
      else
7822
        maybe_same = arg11;
7823
 
7824
      if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7825
          /* The remainder should not be a constant, otherwise we
7826
             end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7827
             increased the number of multiplications necessary.  */
7828
          && TREE_CODE (arg10) != INTEGER_CST)
7829
        {
7830
          alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7831
                              build_int_cst (TREE_TYPE (arg00),
7832
                                             int01 / int11));
7833
          alt1 = arg10;
7834
          same = maybe_same;
7835
          if (swap)
7836
            maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7837
        }
7838
    }
7839
 
7840
  if (same)
7841
    return fold_build2_loc (loc, MULT_EXPR, type,
7842
                        fold_build2_loc (loc, code, type,
7843
                                     fold_convert_loc (loc, type, alt0),
7844
                                     fold_convert_loc (loc, type, alt1)),
7845
                        fold_convert_loc (loc, type, same));
7846
 
7847
  return NULL_TREE;
7848
}
7849
 
7850
/* Subroutine of native_encode_expr.  Encode the INTEGER_CST
7851
   specified by EXPR into the buffer PTR of length LEN bytes.
7852
   Return the number of bytes placed in the buffer, or zero
7853
   upon failure.  */
7854
 
7855
static int
7856
native_encode_int (const_tree expr, unsigned char *ptr, int len)
7857
{
7858
  tree type = TREE_TYPE (expr);
7859
  int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7860
  int byte, offset, word, words;
7861
  unsigned char value;
7862
 
7863
  if (total_bytes > len)
7864
    return 0;
7865
  words = total_bytes / UNITS_PER_WORD;
7866
 
7867
  for (byte = 0; byte < total_bytes; byte++)
7868
    {
7869
      int bitpos = byte * BITS_PER_UNIT;
7870
      if (bitpos < HOST_BITS_PER_WIDE_INT)
7871
        value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7872
      else
7873
        value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7874
                                 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7875
 
7876
      if (total_bytes > UNITS_PER_WORD)
7877
        {
7878
          word = byte / UNITS_PER_WORD;
7879
          if (WORDS_BIG_ENDIAN)
7880
            word = (words - 1) - word;
7881
          offset = word * UNITS_PER_WORD;
7882
          if (BYTES_BIG_ENDIAN)
7883
            offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7884
          else
7885
            offset += byte % UNITS_PER_WORD;
7886
        }
7887
      else
7888
        offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7889
      ptr[offset] = value;
7890
    }
7891
  return total_bytes;
7892
}
7893
 
7894
 
7895
/* Subroutine of native_encode_expr.  Encode the REAL_CST
7896
   specified by EXPR into the buffer PTR of length LEN bytes.
7897
   Return the number of bytes placed in the buffer, or zero
7898
   upon failure.  */
7899
 
7900
static int
7901
native_encode_real (const_tree expr, unsigned char *ptr, int len)
7902
{
7903
  tree type = TREE_TYPE (expr);
7904
  int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7905
  int byte, offset, word, words, bitpos;
7906
  unsigned char value;
7907
 
7908
  /* There are always 32 bits in each long, no matter the size of
7909
     the hosts long.  We handle floating point representations with
7910
     up to 192 bits.  */
7911
  long tmp[6];
7912
 
7913
  if (total_bytes > len)
7914
    return 0;
7915
  words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7916
 
7917
  real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7918
 
7919
  for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7920
       bitpos += BITS_PER_UNIT)
7921
    {
7922
      byte = (bitpos / BITS_PER_UNIT) & 3;
7923
      value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7924
 
7925
      if (UNITS_PER_WORD < 4)
7926
        {
7927
          word = byte / UNITS_PER_WORD;
7928
          if (WORDS_BIG_ENDIAN)
7929
            word = (words - 1) - word;
7930
          offset = word * UNITS_PER_WORD;
7931
          if (BYTES_BIG_ENDIAN)
7932
            offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7933
          else
7934
            offset += byte % UNITS_PER_WORD;
7935
        }
7936
      else
7937
        offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7938
      ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7939
    }
7940
  return total_bytes;
7941
}
7942
 
7943
/* Subroutine of native_encode_expr.  Encode the COMPLEX_CST
7944
   specified by EXPR into the buffer PTR of length LEN bytes.
7945
   Return the number of bytes placed in the buffer, or zero
7946
   upon failure.  */
7947
 
7948
static int
7949
native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7950
{
7951
  int rsize, isize;
7952
  tree part;
7953
 
7954
  part = TREE_REALPART (expr);
7955
  rsize = native_encode_expr (part, ptr, len);
7956
  if (rsize == 0)
7957
    return 0;
7958
  part = TREE_IMAGPART (expr);
7959
  isize = native_encode_expr (part, ptr+rsize, len-rsize);
7960
  if (isize != rsize)
7961
    return 0;
7962
  return rsize + isize;
7963
}
7964
 
7965
 
7966
/* Subroutine of native_encode_expr.  Encode the VECTOR_CST
7967
   specified by EXPR into the buffer PTR of length LEN bytes.
7968
   Return the number of bytes placed in the buffer, or zero
7969
   upon failure.  */
7970
 
7971
static int
7972
native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7973
{
7974
  int i, size, offset, count;
7975
  tree itype, elem, elements;
7976
 
7977
  offset = 0;
7978
  elements = TREE_VECTOR_CST_ELTS (expr);
7979
  count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7980
  itype = TREE_TYPE (TREE_TYPE (expr));
7981
  size = GET_MODE_SIZE (TYPE_MODE (itype));
7982
  for (i = 0; i < count; i++)
7983
    {
7984
      if (elements)
7985
        {
7986
          elem = TREE_VALUE (elements);
7987
          elements = TREE_CHAIN (elements);
7988
        }
7989
      else
7990
        elem = NULL_TREE;
7991
 
7992
      if (elem)
7993
        {
7994
          if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7995
            return 0;
7996
        }
7997
      else
7998
        {
7999
          if (offset + size > len)
8000
            return 0;
8001
          memset (ptr+offset, 0, size);
8002
        }
8003
      offset += size;
8004
    }
8005
  return offset;
8006
}
8007
 
8008
 
8009
/* Subroutine of native_encode_expr.  Encode the STRING_CST
8010
   specified by EXPR into the buffer PTR of length LEN bytes.
8011
   Return the number of bytes placed in the buffer, or zero
8012
   upon failure.  */
8013
 
8014
static int
8015
native_encode_string (const_tree expr, unsigned char *ptr, int len)
8016
{
8017
  tree type = TREE_TYPE (expr);
8018
  HOST_WIDE_INT total_bytes;
8019
 
8020
  if (TREE_CODE (type) != ARRAY_TYPE
8021
      || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
8022
      || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
8023
      || !host_integerp (TYPE_SIZE_UNIT (type), 0))
8024
    return 0;
8025
  total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
8026
  if (total_bytes > len)
8027
    return 0;
8028
  if (TREE_STRING_LENGTH (expr) < total_bytes)
8029
    {
8030
      memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
8031
      memset (ptr + TREE_STRING_LENGTH (expr), 0,
8032
              total_bytes - TREE_STRING_LENGTH (expr));
8033
    }
8034
  else
8035
    memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
8036
  return total_bytes;
8037
}
8038
 
8039
 
8040
/* Subroutine of fold_view_convert_expr.  Encode the INTEGER_CST,
8041
   REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
8042
   buffer PTR of length LEN bytes.  Return the number of bytes
8043
   placed in the buffer, or zero upon failure.  */
8044
 
8045
int
8046
native_encode_expr (const_tree expr, unsigned char *ptr, int len)
8047
{
8048
  switch (TREE_CODE (expr))
8049
    {
8050
    case INTEGER_CST:
8051
      return native_encode_int (expr, ptr, len);
8052
 
8053
    case REAL_CST:
8054
      return native_encode_real (expr, ptr, len);
8055
 
8056
    case COMPLEX_CST:
8057
      return native_encode_complex (expr, ptr, len);
8058
 
8059
    case VECTOR_CST:
8060
      return native_encode_vector (expr, ptr, len);
8061
 
8062
    case STRING_CST:
8063
      return native_encode_string (expr, ptr, len);
8064
 
8065
    default:
8066
      return 0;
8067
    }
8068
}
8069
 
8070
 
8071
/* Subroutine of native_interpret_expr.  Interpret the contents of
8072
   the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8073
   If the buffer cannot be interpreted, return NULL_TREE.  */
8074
 
8075
static tree
8076
native_interpret_int (tree type, const unsigned char *ptr, int len)
8077
{
8078
  int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
8079
  int byte, offset, word, words;
8080
  unsigned char value;
8081
  unsigned int HOST_WIDE_INT lo = 0;
8082
  HOST_WIDE_INT hi = 0;
8083
 
8084
  if (total_bytes > len)
8085
    return NULL_TREE;
8086
  if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
8087
    return NULL_TREE;
8088
  words = total_bytes / UNITS_PER_WORD;
8089
 
8090
  for (byte = 0; byte < total_bytes; byte++)
8091
    {
8092
      int bitpos = byte * BITS_PER_UNIT;
8093
      if (total_bytes > UNITS_PER_WORD)
8094
        {
8095
          word = byte / UNITS_PER_WORD;
8096
          if (WORDS_BIG_ENDIAN)
8097
            word = (words - 1) - word;
8098
          offset = word * UNITS_PER_WORD;
8099
          if (BYTES_BIG_ENDIAN)
8100
            offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8101
          else
8102
            offset += byte % UNITS_PER_WORD;
8103
        }
8104
      else
8105
        offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
8106
      value = ptr[offset];
8107
 
8108
      if (bitpos < HOST_BITS_PER_WIDE_INT)
8109
        lo |= (unsigned HOST_WIDE_INT) value << bitpos;
8110
      else
8111
        hi |= (unsigned HOST_WIDE_INT) value
8112
              << (bitpos - HOST_BITS_PER_WIDE_INT);
8113
    }
8114
 
8115
  return build_int_cst_wide_type (type, lo, hi);
8116
}
8117
 
8118
 
8119
/* Subroutine of native_interpret_expr.  Interpret the contents of
8120
   the buffer PTR of length LEN as a REAL_CST of type TYPE.
8121
   If the buffer cannot be interpreted, return NULL_TREE.  */
8122
 
8123
static tree
8124
native_interpret_real (tree type, const unsigned char *ptr, int len)
8125
{
8126
  enum machine_mode mode = TYPE_MODE (type);
8127
  int total_bytes = GET_MODE_SIZE (mode);
8128
  int byte, offset, word, words, bitpos;
8129
  unsigned char value;
8130
  /* There are always 32 bits in each long, no matter the size of
8131
     the hosts long.  We handle floating point representations with
8132
     up to 192 bits.  */
8133
  REAL_VALUE_TYPE r;
8134
  long tmp[6];
8135
 
8136
  total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
8137
  if (total_bytes > len || total_bytes > 24)
8138
    return NULL_TREE;
8139
  words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8140
 
8141
  memset (tmp, 0, sizeof (tmp));
8142
  for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8143
       bitpos += BITS_PER_UNIT)
8144
    {
8145
      byte = (bitpos / BITS_PER_UNIT) & 3;
8146
      if (UNITS_PER_WORD < 4)
8147
        {
8148
          word = byte / UNITS_PER_WORD;
8149
          if (WORDS_BIG_ENDIAN)
8150
            word = (words - 1) - word;
8151
          offset = word * UNITS_PER_WORD;
8152
          if (BYTES_BIG_ENDIAN)
8153
            offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8154
          else
8155
            offset += byte % UNITS_PER_WORD;
8156
        }
8157
      else
8158
        offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
8159
      value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8160
 
8161
      tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8162
    }
8163
 
8164
  real_from_target (&r, tmp, mode);
8165
  return build_real (type, r);
8166
}
8167
 
8168
 
8169
/* Subroutine of native_interpret_expr.  Interpret the contents of
8170
   the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8171
   If the buffer cannot be interpreted, return NULL_TREE.  */
8172
 
8173
static tree
8174
native_interpret_complex (tree type, const unsigned char *ptr, int len)
8175
{
8176
  tree etype, rpart, ipart;
8177
  int size;
8178
 
8179
  etype = TREE_TYPE (type);
8180
  size = GET_MODE_SIZE (TYPE_MODE (etype));
8181
  if (size * 2 > len)
8182
    return NULL_TREE;
8183
  rpart = native_interpret_expr (etype, ptr, size);
8184
  if (!rpart)
8185
    return NULL_TREE;
8186
  ipart = native_interpret_expr (etype, ptr+size, size);
8187
  if (!ipart)
8188
    return NULL_TREE;
8189
  return build_complex (type, rpart, ipart);
8190
}
8191
 
8192
 
8193
/* Subroutine of native_interpret_expr.  Interpret the contents of
8194
   the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8195
   If the buffer cannot be interpreted, return NULL_TREE.  */
8196
 
8197
static tree
8198
native_interpret_vector (tree type, const unsigned char *ptr, int len)
8199
{
8200
  tree etype, elem, elements;
8201
  int i, size, count;
8202
 
8203
  etype = TREE_TYPE (type);
8204
  size = GET_MODE_SIZE (TYPE_MODE (etype));
8205
  count = TYPE_VECTOR_SUBPARTS (type);
8206
  if (size * count > len)
8207
    return NULL_TREE;
8208
 
8209
  elements = NULL_TREE;
8210
  for (i = count - 1; i >= 0; i--)
8211
    {
8212
      elem = native_interpret_expr (etype, ptr+(i*size), size);
8213
      if (!elem)
8214
        return NULL_TREE;
8215
      elements = tree_cons (NULL_TREE, elem, elements);
8216
    }
8217
  return build_vector (type, elements);
8218
}
8219
 
8220
 
8221
/* Subroutine of fold_view_convert_expr.  Interpret the contents of
8222
   the buffer PTR of length LEN as a constant of type TYPE.  For
8223
   INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8224
   we return a REAL_CST, etc...  If the buffer cannot be interpreted,
8225
   return NULL_TREE.  */
8226
 
8227
tree
8228
native_interpret_expr (tree type, const unsigned char *ptr, int len)
8229
{
8230
  switch (TREE_CODE (type))
8231
    {
8232
    case INTEGER_TYPE:
8233
    case ENUMERAL_TYPE:
8234
    case BOOLEAN_TYPE:
8235
      return native_interpret_int (type, ptr, len);
8236
 
8237
    case REAL_TYPE:
8238
      return native_interpret_real (type, ptr, len);
8239
 
8240
    case COMPLEX_TYPE:
8241
      return native_interpret_complex (type, ptr, len);
8242
 
8243
    case VECTOR_TYPE:
8244
      return native_interpret_vector (type, ptr, len);
8245
 
8246
    default:
8247
      return NULL_TREE;
8248
    }
8249
}
8250
 
8251
 
8252
/* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
8253
   TYPE at compile-time.  If we're unable to perform the conversion
8254
   return NULL_TREE.  */
8255
 
8256
static tree
8257
fold_view_convert_expr (tree type, tree expr)
8258
{
8259
  /* We support up to 512-bit values (for V8DFmode).  */
8260
  unsigned char buffer[64];
8261
  int len;
8262
 
8263
  /* Check that the host and target are sane.  */
8264
  if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
8265
    return NULL_TREE;
8266
 
8267
  len = native_encode_expr (expr, buffer, sizeof (buffer));
8268
  if (len == 0)
8269
    return NULL_TREE;
8270
 
8271
  return native_interpret_expr (type, buffer, len);
8272
}
8273
 
8274
/* Build an expression for the address of T.  Folds away INDIRECT_REF
8275
   to avoid confusing the gimplify process.  */
8276
 
8277
tree
8278
build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
8279
{
8280
  /* The size of the object is not relevant when talking about its address.  */
8281
  if (TREE_CODE (t) == WITH_SIZE_EXPR)
8282
    t = TREE_OPERAND (t, 0);
8283
 
8284
  /* Note: doesn't apply to ALIGN_INDIRECT_REF */
8285
  if (TREE_CODE (t) == INDIRECT_REF
8286
      || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
8287
    {
8288
      t = TREE_OPERAND (t, 0);
8289
 
8290
      if (TREE_TYPE (t) != ptrtype)
8291
        {
8292
          t = build1 (NOP_EXPR, ptrtype, t);
8293
          SET_EXPR_LOCATION (t, loc);
8294
        }
8295
    }
8296
  else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
8297
    {
8298
      t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
8299
 
8300
      if (TREE_TYPE (t) != ptrtype)
8301
        t = fold_convert_loc (loc, ptrtype, t);
8302
    }
8303
  else
8304
    {
8305
      t = build1 (ADDR_EXPR, ptrtype, t);
8306
      SET_EXPR_LOCATION (t, loc);
8307
    }
8308
 
8309
  return t;
8310
}
8311
 
8312
/* Build an expression for the address of T.  */
8313
 
8314
tree
8315
build_fold_addr_expr_loc (location_t loc, tree t)
8316
{
8317
  tree ptrtype = build_pointer_type (TREE_TYPE (t));
8318
 
8319
  return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
8320
}
8321
 
8322
/* Fold a unary expression of code CODE and type TYPE with operand
8323
   OP0.  Return the folded expression if folding is successful.
8324
   Otherwise, return NULL_TREE.  */
8325
 
8326
tree
8327
fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
8328
{
8329
  tree tem;
8330
  tree arg0;
8331
  enum tree_code_class kind = TREE_CODE_CLASS (code);
8332
 
8333
  gcc_assert (IS_EXPR_CODE_CLASS (kind)
8334
              && TREE_CODE_LENGTH (code) == 1);
8335
 
8336
  arg0 = op0;
8337
  if (arg0)
8338
    {
8339
      if (CONVERT_EXPR_CODE_P (code)
8340
          || code == FLOAT_EXPR || code == ABS_EXPR)
8341
        {
8342
          /* Don't use STRIP_NOPS, because signedness of argument type
8343
             matters.  */
8344
          STRIP_SIGN_NOPS (arg0);
8345
        }
8346
      else
8347
        {
8348
          /* Strip any conversions that don't change the mode.  This
8349
             is safe for every expression, except for a comparison
8350
             expression because its signedness is derived from its
8351
             operands.
8352
 
8353
             Note that this is done as an internal manipulation within
8354
             the constant folder, in order to find the simplest
8355
             representation of the arguments so that their form can be
8356
             studied.  In any cases, the appropriate type conversions
8357
             should be put back in the tree that will get out of the
8358
             constant folder.  */
8359
          STRIP_NOPS (arg0);
8360
        }
8361
    }
8362
 
8363
  if (TREE_CODE_CLASS (code) == tcc_unary)
8364
    {
8365
      if (TREE_CODE (arg0) == COMPOUND_EXPR)
8366
        return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8367
                       fold_build1_loc (loc, code, type,
8368
                                    fold_convert_loc (loc, TREE_TYPE (op0),
8369
                                                      TREE_OPERAND (arg0, 1))));
8370
      else if (TREE_CODE (arg0) == COND_EXPR)
8371
        {
8372
          tree arg01 = TREE_OPERAND (arg0, 1);
8373
          tree arg02 = TREE_OPERAND (arg0, 2);
8374
          if (! VOID_TYPE_P (TREE_TYPE (arg01)))
8375
            arg01 = fold_build1_loc (loc, code, type,
8376
                                 fold_convert_loc (loc,
8377
                                                   TREE_TYPE (op0), arg01));
8378
          if (! VOID_TYPE_P (TREE_TYPE (arg02)))
8379
            arg02 = fold_build1_loc (loc, code, type,
8380
                                 fold_convert_loc (loc,
8381
                                                   TREE_TYPE (op0), arg02));
8382
          tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
8383
                             arg01, arg02);
8384
 
8385
          /* If this was a conversion, and all we did was to move into
8386
             inside the COND_EXPR, bring it back out.  But leave it if
8387
             it is a conversion from integer to integer and the
8388
             result precision is no wider than a word since such a
8389
             conversion is cheap and may be optimized away by combine,
8390
             while it couldn't if it were outside the COND_EXPR.  Then return
8391
             so we don't get into an infinite recursion loop taking the
8392
             conversion out and then back in.  */
8393
 
8394
          if ((CONVERT_EXPR_CODE_P (code)
8395
               || code == NON_LVALUE_EXPR)
8396
              && TREE_CODE (tem) == COND_EXPR
8397
              && TREE_CODE (TREE_OPERAND (tem, 1)) == code
8398
              && TREE_CODE (TREE_OPERAND (tem, 2)) == code
8399
              && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
8400
              && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
8401
              && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
8402
                  == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
8403
              && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8404
                     && (INTEGRAL_TYPE_P
8405
                         (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8406
                     && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8407
                  || flag_syntax_only))
8408
            {
8409
              tem = build1 (code, type,
8410
                            build3 (COND_EXPR,
8411
                                    TREE_TYPE (TREE_OPERAND
8412
                                               (TREE_OPERAND (tem, 1), 0)),
8413
                                    TREE_OPERAND (tem, 0),
8414
                                    TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8415
                                    TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
8416
              SET_EXPR_LOCATION (tem, loc);
8417
            }
8418
          return tem;
8419
        }
8420
      else if (COMPARISON_CLASS_P (arg0))
8421
        {
8422
          if (TREE_CODE (type) == BOOLEAN_TYPE)
8423
            {
8424
              arg0 = copy_node (arg0);
8425
              TREE_TYPE (arg0) = type;
8426
              return arg0;
8427
            }
8428
          else if (TREE_CODE (type) != INTEGER_TYPE)
8429
            return fold_build3_loc (loc, COND_EXPR, type, arg0,
8430
                                fold_build1_loc (loc, code, type,
8431
                                             integer_one_node),
8432
                                fold_build1_loc (loc, code, type,
8433
                                             integer_zero_node));
8434
        }
8435
   }
8436
 
8437
  switch (code)
8438
    {
8439
    case PAREN_EXPR:
8440
      /* Re-association barriers around constants and other re-association
8441
         barriers can be removed.  */
8442
      if (CONSTANT_CLASS_P (op0)
8443
          || TREE_CODE (op0) == PAREN_EXPR)
8444
        return fold_convert_loc (loc, type, op0);
8445
      return NULL_TREE;
8446
 
8447
    CASE_CONVERT:
8448
    case FLOAT_EXPR:
8449
    case FIX_TRUNC_EXPR:
8450
      if (TREE_TYPE (op0) == type)
8451
        return op0;
8452
 
8453
      /* If we have (type) (a CMP b) and type is an integral type, return
8454
         new expression involving the new type.  */
8455
      if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
8456
        return fold_build2_loc (loc, TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
8457
                            TREE_OPERAND (op0, 1));
8458
 
8459
      /* Handle cases of two conversions in a row.  */
8460
      if (CONVERT_EXPR_P (op0))
8461
        {
8462
          tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
8463
          tree inter_type = TREE_TYPE (op0);
8464
          int inside_int = INTEGRAL_TYPE_P (inside_type);
8465
          int inside_ptr = POINTER_TYPE_P (inside_type);
8466
          int inside_float = FLOAT_TYPE_P (inside_type);
8467
          int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
8468
          unsigned int inside_prec = TYPE_PRECISION (inside_type);
8469
          int inside_unsignedp = TYPE_UNSIGNED (inside_type);
8470
          int inter_int = INTEGRAL_TYPE_P (inter_type);
8471
          int inter_ptr = POINTER_TYPE_P (inter_type);
8472
          int inter_float = FLOAT_TYPE_P (inter_type);
8473
          int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
8474
          unsigned int inter_prec = TYPE_PRECISION (inter_type);
8475
          int inter_unsignedp = TYPE_UNSIGNED (inter_type);
8476
          int final_int = INTEGRAL_TYPE_P (type);
8477
          int final_ptr = POINTER_TYPE_P (type);
8478
          int final_float = FLOAT_TYPE_P (type);
8479
          int final_vec = TREE_CODE (type) == VECTOR_TYPE;
8480
          unsigned int final_prec = TYPE_PRECISION (type);
8481
          int final_unsignedp = TYPE_UNSIGNED (type);
8482
 
8483
          /* In addition to the cases of two conversions in a row
8484
             handled below, if we are converting something to its own
8485
             type via an object of identical or wider precision, neither
8486
             conversion is needed.  */
8487
          if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
8488
              && (((inter_int || inter_ptr) && final_int)
8489
                  || (inter_float && final_float))
8490
              && inter_prec >= final_prec)
8491
            return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8492
 
8493
          /* Likewise, if the intermediate and initial types are either both
8494
             float or both integer, we don't need the middle conversion if the
8495
             former is wider than the latter and doesn't change the signedness
8496
             (for integers).  Avoid this if the final type is a pointer since
8497
             then we sometimes need the middle conversion.  Likewise if the
8498
             final type has a precision not equal to the size of its mode.  */
8499
          if (((inter_int && inside_int)
8500
               || (inter_float && inside_float)
8501
               || (inter_vec && inside_vec))
8502
              && inter_prec >= inside_prec
8503
              && (inter_float || inter_vec
8504
                  || inter_unsignedp == inside_unsignedp)
8505
              && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8506
                    && TYPE_MODE (type) == TYPE_MODE (inter_type))
8507
              && ! final_ptr
8508
              && (! final_vec || inter_prec == inside_prec))
8509
            return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8510
 
8511
          /* If we have a sign-extension of a zero-extended value, we can
8512
             replace that by a single zero-extension.  */
8513
          if (inside_int && inter_int && final_int
8514
              && inside_prec < inter_prec && inter_prec < final_prec
8515
              && inside_unsignedp && !inter_unsignedp)
8516
            return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8517
 
8518
          /* Two conversions in a row are not needed unless:
8519
             - some conversion is floating-point (overstrict for now), or
8520
             - some conversion is a vector (overstrict for now), or
8521
             - the intermediate type is narrower than both initial and
8522
               final, or
8523
             - the intermediate type and innermost type differ in signedness,
8524
               and the outermost type is wider than the intermediate, or
8525
             - the initial type is a pointer type and the precisions of the
8526
               intermediate and final types differ, or
8527
             - the final type is a pointer type and the precisions of the
8528
               initial and intermediate types differ.  */
8529
          if (! inside_float && ! inter_float && ! final_float
8530
              && ! inside_vec && ! inter_vec && ! final_vec
8531
              && (inter_prec >= inside_prec || inter_prec >= final_prec)
8532
              && ! (inside_int && inter_int
8533
                    && inter_unsignedp != inside_unsignedp
8534
                    && inter_prec < final_prec)
8535
              && ((inter_unsignedp && inter_prec > inside_prec)
8536
                  == (final_unsignedp && final_prec > inter_prec))
8537
              && ! (inside_ptr && inter_prec != final_prec)
8538
              && ! (final_ptr && inside_prec != inter_prec)
8539
              && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8540
                    && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8541
            return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8542
        }
8543
 
8544
      /* Handle (T *)&A.B.C for A being of type T and B and C
8545
         living at offset zero.  This occurs frequently in
8546
         C++ upcasting and then accessing the base.  */
8547
      if (TREE_CODE (op0) == ADDR_EXPR
8548
          && POINTER_TYPE_P (type)
8549
          && handled_component_p (TREE_OPERAND (op0, 0)))
8550
        {
8551
          HOST_WIDE_INT bitsize, bitpos;
8552
          tree offset;
8553
          enum machine_mode mode;
8554
          int unsignedp, volatilep;
8555
          tree base = TREE_OPERAND (op0, 0);
8556
          base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8557
                                      &mode, &unsignedp, &volatilep, false);
8558
          /* If the reference was to a (constant) zero offset, we can use
8559
             the address of the base if it has the same base type
8560
             as the result type.  */
8561
          if (! offset && bitpos == 0
8562
              && TYPE_MAIN_VARIANT (TREE_TYPE (type))
8563
                  == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8564
            return fold_convert_loc (loc, type,
8565
                                     build_fold_addr_expr_loc (loc, base));
8566
        }
8567
 
8568
      if (TREE_CODE (op0) == MODIFY_EXPR
8569
          && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8570
          /* Detect assigning a bitfield.  */
8571
          && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8572
               && DECL_BIT_FIELD
8573
               (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8574
        {
8575
          /* Don't leave an assignment inside a conversion
8576
             unless assigning a bitfield.  */
8577
          tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8578
          /* First do the assignment, then return converted constant.  */
8579
          tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8580
          TREE_NO_WARNING (tem) = 1;
8581
          TREE_USED (tem) = 1;
8582
          SET_EXPR_LOCATION (tem, loc);
8583
          return tem;
8584
        }
8585
 
8586
      /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8587
         constants (if x has signed type, the sign bit cannot be set
8588
         in c).  This folds extension into the BIT_AND_EXPR.
8589
         ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8590
         very likely don't have maximal range for their precision and this
8591
         transformation effectively doesn't preserve non-maximal ranges.  */
8592
      if (TREE_CODE (type) == INTEGER_TYPE
8593
          && TREE_CODE (op0) == BIT_AND_EXPR
8594
          && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8595
        {
8596
          tree and_expr = op0;
8597
          tree and0 = TREE_OPERAND (and_expr, 0);
8598
          tree and1 = TREE_OPERAND (and_expr, 1);
8599
          int change = 0;
8600
 
8601
          if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8602
              || (TYPE_PRECISION (type)
8603
                  <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8604
            change = 1;
8605
          else if (TYPE_PRECISION (TREE_TYPE (and1))
8606
                   <= HOST_BITS_PER_WIDE_INT
8607
                   && host_integerp (and1, 1))
8608
            {
8609
              unsigned HOST_WIDE_INT cst;
8610
 
8611
              cst = tree_low_cst (and1, 1);
8612
              cst &= (HOST_WIDE_INT) -1
8613
                     << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8614
              change = (cst == 0);
8615
#ifdef LOAD_EXTEND_OP
8616
              if (change
8617
                  && !flag_syntax_only
8618
                  && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8619
                      == ZERO_EXTEND))
8620
                {
8621
                  tree uns = unsigned_type_for (TREE_TYPE (and0));
8622
                  and0 = fold_convert_loc (loc, uns, and0);
8623
                  and1 = fold_convert_loc (loc, uns, and1);
8624
                }
8625
#endif
8626
            }
8627
          if (change)
8628
            {
8629
              tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
8630
                                           TREE_INT_CST_HIGH (and1), 0,
8631
                                           TREE_OVERFLOW (and1));
8632
              return fold_build2_loc (loc, BIT_AND_EXPR, type,
8633
                                  fold_convert_loc (loc, type, and0), tem);
8634
            }
8635
        }
8636
 
8637
      /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8638
         when one of the new casts will fold away. Conservatively we assume
8639
         that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8640
      if (POINTER_TYPE_P (type)
8641
          && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8642
          && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8643
              || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8644
              || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8645
        {
8646
          tree arg00 = TREE_OPERAND (arg0, 0);
8647
          tree arg01 = TREE_OPERAND (arg0, 1);
8648
 
8649
          return fold_build2_loc (loc,
8650
                              TREE_CODE (arg0), type,
8651
                              fold_convert_loc (loc, type, arg00),
8652
                              fold_convert_loc (loc, sizetype, arg01));
8653
        }
8654
 
8655
      /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8656
         of the same precision, and X is an integer type not narrower than
8657
         types T1 or T2, i.e. the cast (T2)X isn't an extension.  */
8658
      if (INTEGRAL_TYPE_P (type)
8659
          && TREE_CODE (op0) == BIT_NOT_EXPR
8660
          && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8661
          && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8662
          && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8663
        {
8664
          tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8665
          if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8666
              && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8667
            return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8668
                                fold_convert_loc (loc, type, tem));
8669
        }
8670
 
8671
      /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8672
         type of X and Y (integer types only).  */
8673
      if (INTEGRAL_TYPE_P (type)
8674
          && TREE_CODE (op0) == MULT_EXPR
8675
          && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8676
          && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8677
        {
8678
          /* Be careful not to introduce new overflows.  */
8679
          tree mult_type;
8680
          if (TYPE_OVERFLOW_WRAPS (type))
8681
            mult_type = type;
8682
          else
8683
            mult_type = unsigned_type_for (type);
8684
 
8685
          if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8686
            {
8687
              tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8688
                                 fold_convert_loc (loc, mult_type,
8689
                                                   TREE_OPERAND (op0, 0)),
8690
                                 fold_convert_loc (loc, mult_type,
8691
                                                   TREE_OPERAND (op0, 1)));
8692
              return fold_convert_loc (loc, type, tem);
8693
            }
8694
        }
8695
 
8696
      tem = fold_convert_const (code, type, op0);
8697
      return tem ? tem : NULL_TREE;
8698
 
8699
    case ADDR_SPACE_CONVERT_EXPR:
8700
      if (integer_zerop (arg0))
8701
        return fold_convert_const (code, type, arg0);
8702
      return NULL_TREE;
8703
 
8704
    case FIXED_CONVERT_EXPR:
8705
      tem = fold_convert_const (code, type, arg0);
8706
      return tem ? tem : NULL_TREE;
8707
 
8708
    case VIEW_CONVERT_EXPR:
8709
      if (TREE_TYPE (op0) == type)
8710
        return op0;
8711
      if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8712
        return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8713
                            type, TREE_OPERAND (op0, 0));
8714
 
8715
      /* For integral conversions with the same precision or pointer
8716
         conversions use a NOP_EXPR instead.  */
8717
      if ((INTEGRAL_TYPE_P (type)
8718
           || POINTER_TYPE_P (type))
8719
          && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8720
              || POINTER_TYPE_P (TREE_TYPE (op0)))
8721
          && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8722
        return fold_convert_loc (loc, type, op0);
8723
 
8724
      /* Strip inner integral conversions that do not change the precision.  */
8725
      if (CONVERT_EXPR_P (op0)
8726
          && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8727
              || POINTER_TYPE_P (TREE_TYPE (op0)))
8728
          && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8729
              || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8730
          && (TYPE_PRECISION (TREE_TYPE (op0))
8731
              == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8732
        return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8733
                            type, TREE_OPERAND (op0, 0));
8734
 
8735
      return fold_view_convert_expr (type, op0);
8736
 
8737
    case NEGATE_EXPR:
8738
      tem = fold_negate_expr (loc, arg0);
8739
      if (tem)
8740
        return fold_convert_loc (loc, type, tem);
8741
      return NULL_TREE;
8742
 
8743
    case ABS_EXPR:
8744
      if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8745
        return fold_abs_const (arg0, type);
8746
      else if (TREE_CODE (arg0) == NEGATE_EXPR)
8747
        return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8748
      /* Convert fabs((double)float) into (double)fabsf(float).  */
8749
      else if (TREE_CODE (arg0) == NOP_EXPR
8750
               && TREE_CODE (type) == REAL_TYPE)
8751
        {
8752
          tree targ0 = strip_float_extensions (arg0);
8753
          if (targ0 != arg0)
8754
            return fold_convert_loc (loc, type,
8755
                                     fold_build1_loc (loc, ABS_EXPR,
8756
                                                  TREE_TYPE (targ0),
8757
                                                  targ0));
8758
        }
8759
      /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on.  */
8760
      else if (TREE_CODE (arg0) == ABS_EXPR)
8761
        return arg0;
8762
      else if (tree_expr_nonnegative_p (arg0))
8763
        return arg0;
8764
 
8765
      /* Strip sign ops from argument.  */
8766
      if (TREE_CODE (type) == REAL_TYPE)
8767
        {
8768
          tem = fold_strip_sign_ops (arg0);
8769
          if (tem)
8770
            return fold_build1_loc (loc, ABS_EXPR, type,
8771
                                fold_convert_loc (loc, type, tem));
8772
        }
8773
      return NULL_TREE;
8774
 
8775
    case CONJ_EXPR:
8776
      if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8777
        return fold_convert_loc (loc, type, arg0);
8778
      if (TREE_CODE (arg0) == COMPLEX_EXPR)
8779
        {
8780
          tree itype = TREE_TYPE (type);
8781
          tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8782
          tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8783
          return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8784
                              negate_expr (ipart));
8785
        }
8786
      if (TREE_CODE (arg0) == COMPLEX_CST)
8787
        {
8788
          tree itype = TREE_TYPE (type);
8789
          tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8790
          tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8791
          return build_complex (type, rpart, negate_expr (ipart));
8792
        }
8793
      if (TREE_CODE (arg0) == CONJ_EXPR)
8794
        return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8795
      return NULL_TREE;
8796
 
8797
    case BIT_NOT_EXPR:
8798
      if (TREE_CODE (arg0) == INTEGER_CST)
8799
        return fold_not_const (arg0, type);
8800
      else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8801
        return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8802
      /* Convert ~ (-A) to A - 1.  */
8803
      else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8804
        return fold_build2_loc (loc, MINUS_EXPR, type,
8805
                            fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8806
                            build_int_cst (type, 1));
8807
      /* Convert ~ (A - 1) or ~ (A + -1) to -A.  */
8808
      else if (INTEGRAL_TYPE_P (type)
8809
               && ((TREE_CODE (arg0) == MINUS_EXPR
8810
                    && integer_onep (TREE_OPERAND (arg0, 1)))
8811
                   || (TREE_CODE (arg0) == PLUS_EXPR
8812
                       && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8813
        return fold_build1_loc (loc, NEGATE_EXPR, type,
8814
                            fold_convert_loc (loc, type,
8815
                                              TREE_OPERAND (arg0, 0)));
8816
      /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify.  */
8817
      else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8818
               && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8819
                                     fold_convert_loc (loc, type,
8820
                                                       TREE_OPERAND (arg0, 0)))))
8821
        return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8822
                            fold_convert_loc (loc, type,
8823
                                              TREE_OPERAND (arg0, 1)));
8824
      else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8825
               && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8826
                                     fold_convert_loc (loc, type,
8827
                                                       TREE_OPERAND (arg0, 1)))))
8828
        return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8829
                            fold_convert_loc (loc, type,
8830
                                              TREE_OPERAND (arg0, 0)), tem);
8831
      /* Perform BIT_NOT_EXPR on each element individually.  */
8832
      else if (TREE_CODE (arg0) == VECTOR_CST)
8833
        {
8834
          tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8835
          int count = TYPE_VECTOR_SUBPARTS (type), i;
8836
 
8837
          for (i = 0; i < count; i++)
8838
            {
8839
              if (elements)
8840
                {
8841
                  elem = TREE_VALUE (elements);
8842
                  elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8843
                  if (elem == NULL_TREE)
8844
                    break;
8845
                  elements = TREE_CHAIN (elements);
8846
                }
8847
              else
8848
                elem = build_int_cst (TREE_TYPE (type), -1);
8849
              list = tree_cons (NULL_TREE, elem, list);
8850
            }
8851
          if (i == count)
8852
            return build_vector (type, nreverse (list));
8853
        }
8854
 
8855
      return NULL_TREE;
8856
 
8857
    case TRUTH_NOT_EXPR:
8858
      /* The argument to invert_truthvalue must have Boolean type.  */
8859
      if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8860
          arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8861
 
8862
      /* Note that the operand of this must be an int
8863
         and its values must be 0 or 1.
8864
         ("true" is a fixed value perhaps depending on the language,
8865
         but we don't handle values other than 1 correctly yet.)  */
8866
      tem = fold_truth_not_expr (loc, arg0);
8867
      if (!tem)
8868
        return NULL_TREE;
8869
      return fold_convert_loc (loc, type, tem);
8870
 
8871
    case REALPART_EXPR:
8872
      if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8873
        return fold_convert_loc (loc, type, arg0);
8874
      if (TREE_CODE (arg0) == COMPLEX_EXPR)
8875
        return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8876
                                 TREE_OPERAND (arg0, 1));
8877
      if (TREE_CODE (arg0) == COMPLEX_CST)
8878
        return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8879
      if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8880
        {
8881
          tree itype = TREE_TYPE (TREE_TYPE (arg0));
8882
          tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8883
                             fold_build1_loc (loc, REALPART_EXPR, itype,
8884
                                          TREE_OPERAND (arg0, 0)),
8885
                             fold_build1_loc (loc, REALPART_EXPR, itype,
8886
                                          TREE_OPERAND (arg0, 1)));
8887
          return fold_convert_loc (loc, type, tem);
8888
        }
8889
      if (TREE_CODE (arg0) == CONJ_EXPR)
8890
        {
8891
          tree itype = TREE_TYPE (TREE_TYPE (arg0));
8892
          tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8893
                             TREE_OPERAND (arg0, 0));
8894
          return fold_convert_loc (loc, type, tem);
8895
        }
8896
      if (TREE_CODE (arg0) == CALL_EXPR)
8897
        {
8898
          tree fn = get_callee_fndecl (arg0);
8899
          if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8900
            switch (DECL_FUNCTION_CODE (fn))
8901
              {
8902
              CASE_FLT_FN (BUILT_IN_CEXPI):
8903
                fn = mathfn_built_in (type, BUILT_IN_COS);
8904
                if (fn)
8905
                  return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8906
                break;
8907
 
8908
              default:
8909
                break;
8910
              }
8911
        }
8912
      return NULL_TREE;
8913
 
8914
    case IMAGPART_EXPR:
8915
      if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8916
        return fold_convert_loc (loc, type, integer_zero_node);
8917
      if (TREE_CODE (arg0) == COMPLEX_EXPR)
8918
        return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8919
                                 TREE_OPERAND (arg0, 0));
8920
      if (TREE_CODE (arg0) == COMPLEX_CST)
8921
        return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8922
      if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8923
        {
8924
          tree itype = TREE_TYPE (TREE_TYPE (arg0));
8925
          tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8926
                             fold_build1_loc (loc, IMAGPART_EXPR, itype,
8927
                                          TREE_OPERAND (arg0, 0)),
8928
                             fold_build1_loc (loc, IMAGPART_EXPR, itype,
8929
                                          TREE_OPERAND (arg0, 1)));
8930
          return fold_convert_loc (loc, type, tem);
8931
        }
8932
      if (TREE_CODE (arg0) == CONJ_EXPR)
8933
        {
8934
          tree itype = TREE_TYPE (TREE_TYPE (arg0));
8935
          tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8936
          return fold_convert_loc (loc, type, negate_expr (tem));
8937
        }
8938
      if (TREE_CODE (arg0) == CALL_EXPR)
8939
        {
8940
          tree fn = get_callee_fndecl (arg0);
8941
          if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8942
            switch (DECL_FUNCTION_CODE (fn))
8943
              {
8944
              CASE_FLT_FN (BUILT_IN_CEXPI):
8945
                fn = mathfn_built_in (type, BUILT_IN_SIN);
8946
                if (fn)
8947
                  return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8948
                break;
8949
 
8950
              default:
8951
                break;
8952
              }
8953
        }
8954
      return NULL_TREE;
8955
 
8956
    case INDIRECT_REF:
8957
      /* Fold *&X to X if X is an lvalue.  */
8958
      if (TREE_CODE (op0) == ADDR_EXPR)
8959
        {
8960
          tree op00 = TREE_OPERAND (op0, 0);
8961
          if ((TREE_CODE (op00) == VAR_DECL
8962
               || TREE_CODE (op00) == PARM_DECL
8963
               || TREE_CODE (op00) == RESULT_DECL)
8964
              && !TREE_READONLY (op00))
8965
            return op00;
8966
        }
8967
      return NULL_TREE;
8968
 
8969
    default:
8970
      return NULL_TREE;
8971
    } /* switch (code) */
8972
}
8973
 
8974
 
8975
/* If the operation was a conversion do _not_ mark a resulting constant
8976
   with TREE_OVERFLOW if the original constant was not.  These conversions
8977
   have implementation defined behavior and retaining the TREE_OVERFLOW
8978
   flag here would confuse later passes such as VRP.  */
8979
tree
8980
fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8981
                                tree type, tree op0)
8982
{
8983
  tree res = fold_unary_loc (loc, code, type, op0);
8984
  if (res
8985
      && TREE_CODE (res) == INTEGER_CST
8986
      && TREE_CODE (op0) == INTEGER_CST
8987
      && CONVERT_EXPR_CODE_P (code))
8988
    TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8989
 
8990
  return res;
8991
}
8992
 
8993
/* Fold a binary expression of code CODE and type TYPE with operands
8994
   OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8995
   Return the folded expression if folding is successful.  Otherwise,
8996
   return NULL_TREE.  */
8997
 
8998
static tree
8999
fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
9000
{
9001
  enum tree_code compl_code;
9002
 
9003
  if (code == MIN_EXPR)
9004
    compl_code = MAX_EXPR;
9005
  else if (code == MAX_EXPR)
9006
    compl_code = MIN_EXPR;
9007
  else
9008
    gcc_unreachable ();
9009
 
9010
  /* MIN (MAX (a, b), b) == b.  */
9011
  if (TREE_CODE (op0) == compl_code
9012
      && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
9013
    return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
9014
 
9015
  /* MIN (MAX (b, a), b) == b.  */
9016
  if (TREE_CODE (op0) == compl_code
9017
      && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
9018
      && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
9019
    return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
9020
 
9021
  /* MIN (a, MAX (a, b)) == a.  */
9022
  if (TREE_CODE (op1) == compl_code
9023
      && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
9024
      && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
9025
    return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
9026
 
9027
  /* MIN (a, MAX (b, a)) == a.  */
9028
  if (TREE_CODE (op1) == compl_code
9029
      && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
9030
      && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
9031
    return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
9032
 
9033
  return NULL_TREE;
9034
}
9035
 
9036
/* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9037
   by changing CODE to reduce the magnitude of constants involved in
9038
   ARG0 of the comparison.
9039
   Returns a canonicalized comparison tree if a simplification was
9040
   possible, otherwise returns NULL_TREE.
9041
   Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9042
   valid if signed overflow is undefined.  */
9043
 
9044
static tree
9045
maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
9046
                                 tree arg0, tree arg1,
9047
                                 bool *strict_overflow_p)
9048
{
9049
  enum tree_code code0 = TREE_CODE (arg0);
9050
  tree t, cst0 = NULL_TREE;
9051
  int sgn0;
9052
  bool swap = false;
9053
 
9054
  /* Match A +- CST code arg1 and CST code arg1.  We can change the
9055
     first form only if overflow is undefined.  */
9056
  if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9057
         /* In principle pointers also have undefined overflow behavior,
9058
            but that causes problems elsewhere.  */
9059
         && !POINTER_TYPE_P (TREE_TYPE (arg0))
9060
         && (code0 == MINUS_EXPR
9061
             || code0 == PLUS_EXPR)
9062
         && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9063
        || code0 == INTEGER_CST))
9064
    return NULL_TREE;
9065
 
9066
  /* Identify the constant in arg0 and its sign.  */
9067
  if (code0 == INTEGER_CST)
9068
    cst0 = arg0;
9069
  else
9070
    cst0 = TREE_OPERAND (arg0, 1);
9071
  sgn0 = tree_int_cst_sgn (cst0);
9072
 
9073
  /* Overflowed constants and zero will cause problems.  */
9074
  if (integer_zerop (cst0)
9075
      || TREE_OVERFLOW (cst0))
9076
    return NULL_TREE;
9077
 
9078
  /* See if we can reduce the magnitude of the constant in
9079
     arg0 by changing the comparison code.  */
9080
  if (code0 == INTEGER_CST)
9081
    {
9082
      /* CST <= arg1  ->  CST-1 < arg1.  */
9083
      if (code == LE_EXPR && sgn0 == 1)
9084
        code = LT_EXPR;
9085
      /* -CST < arg1  ->  -CST-1 <= arg1.  */
9086
      else if (code == LT_EXPR && sgn0 == -1)
9087
        code = LE_EXPR;
9088
      /* CST > arg1  ->  CST-1 >= arg1.  */
9089
      else if (code == GT_EXPR && sgn0 == 1)
9090
        code = GE_EXPR;
9091
      /* -CST >= arg1  ->  -CST-1 > arg1.  */
9092
      else if (code == GE_EXPR && sgn0 == -1)
9093
        code = GT_EXPR;
9094
      else
9095
        return NULL_TREE;
9096
      /* arg1 code' CST' might be more canonical.  */
9097
      swap = true;
9098
    }
9099
  else
9100
    {
9101
      /* A - CST < arg1  ->  A - CST-1 <= arg1.  */
9102
      if (code == LT_EXPR
9103
          && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9104
        code = LE_EXPR;
9105
      /* A + CST > arg1  ->  A + CST-1 >= arg1.  */
9106
      else if (code == GT_EXPR
9107
               && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9108
        code = GE_EXPR;
9109
      /* A + CST <= arg1  ->  A + CST-1 < arg1.  */
9110
      else if (code == LE_EXPR
9111
               && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9112
        code = LT_EXPR;
9113
      /* A - CST >= arg1  ->  A - CST-1 > arg1.  */
9114
      else if (code == GE_EXPR
9115
               && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9116
        code = GT_EXPR;
9117
      else
9118
        return NULL_TREE;
9119
      *strict_overflow_p = true;
9120
    }
9121
 
9122
  /* Now build the constant reduced in magnitude.  But not if that
9123
     would produce one outside of its types range.  */
9124
  if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9125
      && ((sgn0 == 1
9126
           && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9127
           && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9128
          || (sgn0 == -1
9129
              && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9130
              && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9131
    /* We cannot swap the comparison here as that would cause us to
9132
       endlessly recurse.  */
9133
    return NULL_TREE;
9134
 
9135
  t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9136
                       cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
9137
  if (code0 != INTEGER_CST)
9138
    t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9139
 
9140
  /* If swapping might yield to a more canonical form, do so.  */
9141
  if (swap)
9142
    return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
9143
  else
9144
    return fold_build2_loc (loc, code, type, t, arg1);
9145
}
9146
 
9147
/* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9148
   overflow further.  Try to decrease the magnitude of constants involved
9149
   by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9150
   and put sole constants at the second argument position.
9151
   Returns the canonicalized tree if changed, otherwise NULL_TREE.  */
9152
 
9153
static tree
9154
maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9155
                               tree arg0, tree arg1)
9156
{
9157
  tree t;
9158
  bool strict_overflow_p;
9159
  const char * const warnmsg = G_("assuming signed overflow does not occur "
9160
                                  "when reducing constant in comparison");
9161
 
9162
  /* Try canonicalization by simplifying arg0.  */
9163
  strict_overflow_p = false;
9164
  t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9165
                                       &strict_overflow_p);
9166
  if (t)
9167
    {
9168
      if (strict_overflow_p)
9169
        fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9170
      return t;
9171
    }
9172
 
9173
  /* Try canonicalization by simplifying arg1 using the swapped
9174
     comparison.  */
9175
  code = swap_tree_comparison (code);
9176
  strict_overflow_p = false;
9177
  t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
9178
                                       &strict_overflow_p);
9179
  if (t && strict_overflow_p)
9180
    fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9181
  return t;
9182
}
9183
 
9184
/* Return whether BASE + OFFSET + BITPOS may wrap around the address
9185
   space.  This is used to avoid issuing overflow warnings for
9186
   expressions like &p->x which can not wrap.  */
9187
 
9188
static bool
9189
pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
9190
{
9191
  unsigned HOST_WIDE_INT offset_low, total_low;
9192
  HOST_WIDE_INT size, offset_high, total_high;
9193
 
9194
  if (!POINTER_TYPE_P (TREE_TYPE (base)))
9195
    return true;
9196
 
9197
  if (bitpos < 0)
9198
    return true;
9199
 
9200
  if (offset == NULL_TREE)
9201
    {
9202
      offset_low = 0;
9203
      offset_high = 0;
9204
    }
9205
  else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
9206
    return true;
9207
  else
9208
    {
9209
      offset_low = TREE_INT_CST_LOW (offset);
9210
      offset_high = TREE_INT_CST_HIGH (offset);
9211
    }
9212
 
9213
  if (add_double_with_sign (offset_low, offset_high,
9214
                            bitpos / BITS_PER_UNIT, 0,
9215
                            &total_low, &total_high,
9216
                            true))
9217
    return true;
9218
 
9219
  if (total_high != 0)
9220
    return true;
9221
 
9222
  size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
9223
  if (size <= 0)
9224
    return true;
9225
 
9226
  /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9227
     array.  */
9228
  if (TREE_CODE (base) == ADDR_EXPR)
9229
    {
9230
      HOST_WIDE_INT base_size;
9231
 
9232
      base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
9233
      if (base_size > 0 && size < base_size)
9234
        size = base_size;
9235
    }
9236
 
9237
  return total_low > (unsigned HOST_WIDE_INT) size;
9238
}
9239
 
9240
/* Subroutine of fold_binary.  This routine performs all of the
9241
   transformations that are common to the equality/inequality
9242
   operators (EQ_EXPR and NE_EXPR) and the ordering operators
9243
   (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR).  Callers other than
9244
   fold_binary should call fold_binary.  Fold a comparison with
9245
   tree code CODE and type TYPE with operands OP0 and OP1.  Return
9246
   the folded comparison or NULL_TREE.  */
9247
 
9248
static tree
9249
fold_comparison (location_t loc, enum tree_code code, tree type,
9250
                 tree op0, tree op1)
9251
{
9252
  tree arg0, arg1, tem;
9253
 
9254
  arg0 = op0;
9255
  arg1 = op1;
9256
 
9257
  STRIP_SIGN_NOPS (arg0);
9258
  STRIP_SIGN_NOPS (arg1);
9259
 
9260
  tem = fold_relational_const (code, type, arg0, arg1);
9261
  if (tem != NULL_TREE)
9262
    return tem;
9263
 
9264
  /* If one arg is a real or integer constant, put it last.  */
9265
  if (tree_swap_operands_p (arg0, arg1, true))
9266
    return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9267
 
9268
  /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1.  */
9269
  if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9270
      && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9271
          && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9272
          && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
9273
      && (TREE_CODE (arg1) == INTEGER_CST
9274
          && !TREE_OVERFLOW (arg1)))
9275
    {
9276
      tree const1 = TREE_OPERAND (arg0, 1);
9277
      tree const2 = arg1;
9278
      tree variable = TREE_OPERAND (arg0, 0);
9279
      tree lhs;
9280
      int lhs_add;
9281
      lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
9282
 
9283
      lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
9284
                         TREE_TYPE (arg1), const2, const1);
9285
 
9286
      /* If the constant operation overflowed this can be
9287
         simplified as a comparison against INT_MAX/INT_MIN.  */
9288
      if (TREE_CODE (lhs) == INTEGER_CST
9289
          && TREE_OVERFLOW (lhs))
9290
        {
9291
          int const1_sgn = tree_int_cst_sgn (const1);
9292
          enum tree_code code2 = code;
9293
 
9294
          /* Get the sign of the constant on the lhs if the
9295
             operation were VARIABLE + CONST1.  */
9296
          if (TREE_CODE (arg0) == MINUS_EXPR)
9297
            const1_sgn = -const1_sgn;
9298
 
9299
          /* The sign of the constant determines if we overflowed
9300
             INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
9301
             Canonicalize to the INT_MIN overflow by swapping the comparison
9302
             if necessary.  */
9303
          if (const1_sgn == -1)
9304
            code2 = swap_tree_comparison (code);
9305
 
9306
          /* We now can look at the canonicalized case
9307
               VARIABLE + 1  CODE2  INT_MIN
9308
             and decide on the result.  */
9309
          if (code2 == LT_EXPR
9310
              || code2 == LE_EXPR
9311
              || code2 == EQ_EXPR)
9312
            return omit_one_operand_loc (loc, type, boolean_false_node, variable);
9313
          else if (code2 == NE_EXPR
9314
                   || code2 == GE_EXPR
9315
                   || code2 == GT_EXPR)
9316
            return omit_one_operand_loc (loc, type, boolean_true_node, variable);
9317
        }
9318
 
9319
      if (TREE_CODE (lhs) == TREE_CODE (arg1)
9320
          && (TREE_CODE (lhs) != INTEGER_CST
9321
              || !TREE_OVERFLOW (lhs)))
9322
        {
9323
          fold_overflow_warning (("assuming signed overflow does not occur "
9324
                                  "when changing X +- C1 cmp C2 to "
9325
                                  "X cmp C1 +- C2"),
9326
                                 WARN_STRICT_OVERFLOW_COMPARISON);
9327
          return fold_build2_loc (loc, code, type, variable, lhs);
9328
        }
9329
    }
9330
 
9331
  /* For comparisons of pointers we can decompose it to a compile time
9332
     comparison of the base objects and the offsets into the object.
9333
     This requires at least one operand being an ADDR_EXPR or a
9334
     POINTER_PLUS_EXPR to do more than the operand_equal_p test below.  */
9335
  if (POINTER_TYPE_P (TREE_TYPE (arg0))
9336
      && (TREE_CODE (arg0) == ADDR_EXPR
9337
          || TREE_CODE (arg1) == ADDR_EXPR
9338
          || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9339
          || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9340
    {
9341
      tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9342
      HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9343
      enum machine_mode mode;
9344
      int volatilep, unsignedp;
9345
      bool indirect_base0 = false, indirect_base1 = false;
9346
 
9347
      /* Get base and offset for the access.  Strip ADDR_EXPR for
9348
         get_inner_reference, but put it back by stripping INDIRECT_REF
9349
         off the base object if possible.  indirect_baseN will be true
9350
         if baseN is not an address but refers to the object itself.  */
9351
      base0 = arg0;
9352
      if (TREE_CODE (arg0) == ADDR_EXPR)
9353
        {
9354
          base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9355
                                       &bitsize, &bitpos0, &offset0, &mode,
9356
                                       &unsignedp, &volatilep, false);
9357
          if (TREE_CODE (base0) == INDIRECT_REF)
9358
            base0 = TREE_OPERAND (base0, 0);
9359
          else
9360
            indirect_base0 = true;
9361
        }
9362
      else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9363
        {
9364
          base0 = TREE_OPERAND (arg0, 0);
9365
          offset0 = TREE_OPERAND (arg0, 1);
9366
        }
9367
 
9368
      base1 = arg1;
9369
      if (TREE_CODE (arg1) == ADDR_EXPR)
9370
        {
9371
          base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9372
                                       &bitsize, &bitpos1, &offset1, &mode,
9373
                                       &unsignedp, &volatilep, false);
9374
          if (TREE_CODE (base1) == INDIRECT_REF)
9375
            base1 = TREE_OPERAND (base1, 0);
9376
          else
9377
            indirect_base1 = true;
9378
        }
9379
      else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9380
        {
9381
          base1 = TREE_OPERAND (arg1, 0);
9382
          offset1 = TREE_OPERAND (arg1, 1);
9383
        }
9384
 
9385
      /* If we have equivalent bases we might be able to simplify.  */
9386
      if (indirect_base0 == indirect_base1
9387
          && operand_equal_p (base0, base1, 0))
9388
        {
9389
          /* We can fold this expression to a constant if the non-constant
9390
             offset parts are equal.  */
9391
          if ((offset0 == offset1
9392
               || (offset0 && offset1
9393
                   && operand_equal_p (offset0, offset1, 0)))
9394
              && (code == EQ_EXPR
9395
                  || code == NE_EXPR
9396
                  || POINTER_TYPE_OVERFLOW_UNDEFINED))
9397
 
9398
            {
9399
              if (code != EQ_EXPR
9400
                  && code != NE_EXPR
9401
                  && bitpos0 != bitpos1
9402
                  && (pointer_may_wrap_p (base0, offset0, bitpos0)
9403
                      || pointer_may_wrap_p (base1, offset1, bitpos1)))
9404
                fold_overflow_warning (("assuming pointer wraparound does not "
9405
                                        "occur when comparing P +- C1 with "
9406
                                        "P +- C2"),
9407
                                       WARN_STRICT_OVERFLOW_CONDITIONAL);
9408
 
9409
              switch (code)
9410
                {
9411
                case EQ_EXPR:
9412
                  return constant_boolean_node (bitpos0 == bitpos1, type);
9413
                case NE_EXPR:
9414
                  return constant_boolean_node (bitpos0 != bitpos1, type);
9415
                case LT_EXPR:
9416
                  return constant_boolean_node (bitpos0 < bitpos1, type);
9417
                case LE_EXPR:
9418
                  return constant_boolean_node (bitpos0 <= bitpos1, type);
9419
                case GE_EXPR:
9420
                  return constant_boolean_node (bitpos0 >= bitpos1, type);
9421
                case GT_EXPR:
9422
                  return constant_boolean_node (bitpos0 > bitpos1, type);
9423
                default:;
9424
                }
9425
            }
9426
          /* We can simplify the comparison to a comparison of the variable
9427
             offset parts if the constant offset parts are equal.
9428
             Be careful to use signed size type here because otherwise we
9429
             mess with array offsets in the wrong way.  This is possible
9430
             because pointer arithmetic is restricted to retain within an
9431
             object and overflow on pointer differences is undefined as of
9432
             6.5.6/8 and /9 with respect to the signed ptrdiff_t.  */
9433
          else if (bitpos0 == bitpos1
9434
                   && ((code == EQ_EXPR || code == NE_EXPR)
9435
                       || POINTER_TYPE_OVERFLOW_UNDEFINED))
9436
            {
9437
              tree signed_size_type_node;
9438
              signed_size_type_node = signed_type_for (size_type_node);
9439
 
9440
              /* By converting to signed size type we cover middle-end pointer
9441
                 arithmetic which operates on unsigned pointer types of size
9442
                 type size and ARRAY_REF offsets which are properly sign or
9443
                 zero extended from their type in case it is narrower than
9444
                 size type.  */
9445
              if (offset0 == NULL_TREE)
9446
                offset0 = build_int_cst (signed_size_type_node, 0);
9447
              else
9448
                offset0 = fold_convert_loc (loc, signed_size_type_node,
9449
                                            offset0);
9450
              if (offset1 == NULL_TREE)
9451
                offset1 = build_int_cst (signed_size_type_node, 0);
9452
              else
9453
                offset1 = fold_convert_loc (loc, signed_size_type_node,
9454
                                            offset1);
9455
 
9456
              if (code != EQ_EXPR
9457
                  && code != NE_EXPR
9458
                  && (pointer_may_wrap_p (base0, offset0, bitpos0)
9459
                      || pointer_may_wrap_p (base1, offset1, bitpos1)))
9460
                fold_overflow_warning (("assuming pointer wraparound does not "
9461
                                        "occur when comparing P +- C1 with "
9462
                                        "P +- C2"),
9463
                                       WARN_STRICT_OVERFLOW_COMPARISON);
9464
 
9465
              return fold_build2_loc (loc, code, type, offset0, offset1);
9466
            }
9467
        }
9468
      /* For non-equal bases we can simplify if they are addresses
9469
         of local binding decls or constants.  */
9470
      else if (indirect_base0 && indirect_base1
9471
               /* We know that !operand_equal_p (base0, base1, 0)
9472
                  because the if condition was false.  But make
9473
                  sure two decls are not the same.  */
9474
               && base0 != base1
9475
               && TREE_CODE (arg0) == ADDR_EXPR
9476
               && TREE_CODE (arg1) == ADDR_EXPR
9477
               && (((TREE_CODE (base0) == VAR_DECL
9478
                     || TREE_CODE (base0) == PARM_DECL)
9479
                    && (targetm.binds_local_p (base0)
9480
                        || CONSTANT_CLASS_P (base1)))
9481
                   || CONSTANT_CLASS_P (base0))
9482
               && (((TREE_CODE (base1) == VAR_DECL
9483
                     || TREE_CODE (base1) == PARM_DECL)
9484
                    && (targetm.binds_local_p (base1)
9485
                        || CONSTANT_CLASS_P (base0)))
9486
                   || CONSTANT_CLASS_P (base1)))
9487
        {
9488
          if (code == EQ_EXPR)
9489
            return omit_two_operands_loc (loc, type, boolean_false_node,
9490
                                      arg0, arg1);
9491
          else if (code == NE_EXPR)
9492
            return omit_two_operands_loc (loc, type, boolean_true_node,
9493
                                      arg0, arg1);
9494
        }
9495
      /* For equal offsets we can simplify to a comparison of the
9496
         base addresses.  */
9497
      else if (bitpos0 == bitpos1
9498
               && (indirect_base0
9499
                   ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9500
               && (indirect_base1
9501
                   ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9502
               && ((offset0 == offset1)
9503
                   || (offset0 && offset1
9504
                       && operand_equal_p (offset0, offset1, 0))))
9505
        {
9506
          if (indirect_base0)
9507
            base0 = build_fold_addr_expr_loc (loc, base0);
9508
          if (indirect_base1)
9509
            base1 = build_fold_addr_expr_loc (loc, base1);
9510
          return fold_build2_loc (loc, code, type, base0, base1);
9511
        }
9512
    }
9513
 
9514
  /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9515
     X CMP Y +- C2 +- C1 for signed X, Y.  This is valid if
9516
     the resulting offset is smaller in absolute value than the
9517
     original one.  */
9518
  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9519
      && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9520
      && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9521
          && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9522
      && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9523
      && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9524
          && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9525
    {
9526
      tree const1 = TREE_OPERAND (arg0, 1);
9527
      tree const2 = TREE_OPERAND (arg1, 1);
9528
      tree variable1 = TREE_OPERAND (arg0, 0);
9529
      tree variable2 = TREE_OPERAND (arg1, 0);
9530
      tree cst;
9531
      const char * const warnmsg = G_("assuming signed overflow does not "
9532
                                      "occur when combining constants around "
9533
                                      "a comparison");
9534
 
9535
      /* Put the constant on the side where it doesn't overflow and is
9536
         of lower absolute value than before.  */
9537
      cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9538
                             ? MINUS_EXPR : PLUS_EXPR,
9539
                             const2, const1, 0);
9540
      if (!TREE_OVERFLOW (cst)
9541
          && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9542
        {
9543
          fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9544
          return fold_build2_loc (loc, code, type,
9545
                              variable1,
9546
                              fold_build2_loc (loc,
9547
                                           TREE_CODE (arg1), TREE_TYPE (arg1),
9548
                                           variable2, cst));
9549
        }
9550
 
9551
      cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9552
                             ? MINUS_EXPR : PLUS_EXPR,
9553
                             const1, const2, 0);
9554
      if (!TREE_OVERFLOW (cst)
9555
          && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9556
        {
9557
          fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9558
          return fold_build2_loc (loc, code, type,
9559
                              fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9560
                                           variable1, cst),
9561
                              variable2);
9562
        }
9563
    }
9564
 
9565
  /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9566
     signed arithmetic case.  That form is created by the compiler
9567
     often enough for folding it to be of value.  One example is in
9568
     computing loop trip counts after Operator Strength Reduction.  */
9569
  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9570
      && TREE_CODE (arg0) == MULT_EXPR
9571
      && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9572
          && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9573
      && integer_zerop (arg1))
9574
    {
9575
      tree const1 = TREE_OPERAND (arg0, 1);
9576
      tree const2 = arg1;                       /* zero */
9577
      tree variable1 = TREE_OPERAND (arg0, 0);
9578
      enum tree_code cmp_code = code;
9579
 
9580
      /* Handle unfolded multiplication by zero.  */
9581
      if (integer_zerop (const1))
9582
        return fold_build2_loc (loc, cmp_code, type, const1, const2);
9583
 
9584
      fold_overflow_warning (("assuming signed overflow does not occur when "
9585
                              "eliminating multiplication in comparison "
9586
                              "with zero"),
9587
                             WARN_STRICT_OVERFLOW_COMPARISON);
9588
 
9589
      /* If const1 is negative we swap the sense of the comparison.  */
9590
      if (tree_int_cst_sgn (const1) < 0)
9591
        cmp_code = swap_tree_comparison (cmp_code);
9592
 
9593
      return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9594
    }
9595
 
9596
  tem = maybe_canonicalize_comparison (loc, code, type, op0, op1);
9597
  if (tem)
9598
    return tem;
9599
 
9600
  if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9601
    {
9602
      tree targ0 = strip_float_extensions (arg0);
9603
      tree targ1 = strip_float_extensions (arg1);
9604
      tree newtype = TREE_TYPE (targ0);
9605
 
9606
      if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9607
        newtype = TREE_TYPE (targ1);
9608
 
9609
      /* Fold (double)float1 CMP (double)float2 into float1 CMP float2.  */
9610
      if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9611
        return fold_build2_loc (loc, code, type,
9612
                            fold_convert_loc (loc, newtype, targ0),
9613
                            fold_convert_loc (loc, newtype, targ1));
9614
 
9615
      /* (-a) CMP (-b) -> b CMP a  */
9616
      if (TREE_CODE (arg0) == NEGATE_EXPR
9617
          && TREE_CODE (arg1) == NEGATE_EXPR)
9618
        return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9619
                            TREE_OPERAND (arg0, 0));
9620
 
9621
      if (TREE_CODE (arg1) == REAL_CST)
9622
        {
9623
          REAL_VALUE_TYPE cst;
9624
          cst = TREE_REAL_CST (arg1);
9625
 
9626
          /* (-a) CMP CST -> a swap(CMP) (-CST)  */
9627
          if (TREE_CODE (arg0) == NEGATE_EXPR)
9628
            return fold_build2_loc (loc, swap_tree_comparison (code), type,
9629
                                TREE_OPERAND (arg0, 0),
9630
                                build_real (TREE_TYPE (arg1),
9631
                                            REAL_VALUE_NEGATE (cst)));
9632
 
9633
          /* IEEE doesn't distinguish +0 and -0 in comparisons.  */
9634
          /* a CMP (-0) -> a CMP 0  */
9635
          if (REAL_VALUE_MINUS_ZERO (cst))
9636
            return fold_build2_loc (loc, code, type, arg0,
9637
                                build_real (TREE_TYPE (arg1), dconst0));
9638
 
9639
          /* x != NaN is always true, other ops are always false.  */
9640
          if (REAL_VALUE_ISNAN (cst)
9641
              && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9642
            {
9643
              tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9644
              return omit_one_operand_loc (loc, type, tem, arg0);
9645
            }
9646
 
9647
          /* Fold comparisons against infinity.  */
9648
          if (REAL_VALUE_ISINF (cst)
9649
              && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9650
            {
9651
              tem = fold_inf_compare (loc, code, type, arg0, arg1);
9652
              if (tem != NULL_TREE)
9653
                return tem;
9654
            }
9655
        }
9656
 
9657
      /* If this is a comparison of a real constant with a PLUS_EXPR
9658
         or a MINUS_EXPR of a real constant, we can convert it into a
9659
         comparison with a revised real constant as long as no overflow
9660
         occurs when unsafe_math_optimizations are enabled.  */
9661
      if (flag_unsafe_math_optimizations
9662
          && TREE_CODE (arg1) == REAL_CST
9663
          && (TREE_CODE (arg0) == PLUS_EXPR
9664
              || TREE_CODE (arg0) == MINUS_EXPR)
9665
          && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9666
          && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9667
                                      ? MINUS_EXPR : PLUS_EXPR,
9668
                                      arg1, TREE_OPERAND (arg0, 1), 0))
9669
          && !TREE_OVERFLOW (tem))
9670
        return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9671
 
9672
      /* Likewise, we can simplify a comparison of a real constant with
9673
         a MINUS_EXPR whose first operand is also a real constant, i.e.
9674
         (c1 - x) < c2 becomes x > c1-c2.  Reordering is allowed on
9675
         floating-point types only if -fassociative-math is set.  */
9676
      if (flag_associative_math
9677
          && TREE_CODE (arg1) == REAL_CST
9678
          && TREE_CODE (arg0) == MINUS_EXPR
9679
          && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9680
          && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9681
                                      arg1, 0))
9682
          && !TREE_OVERFLOW (tem))
9683
        return fold_build2_loc (loc, swap_tree_comparison (code), type,
9684
                            TREE_OPERAND (arg0, 1), tem);
9685
 
9686
      /* Fold comparisons against built-in math functions.  */
9687
      if (TREE_CODE (arg1) == REAL_CST
9688
          && flag_unsafe_math_optimizations
9689
          && ! flag_errno_math)
9690
        {
9691
          enum built_in_function fcode = builtin_mathfn_code (arg0);
9692
 
9693
          if (fcode != END_BUILTINS)
9694
            {
9695
              tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9696
              if (tem != NULL_TREE)
9697
                return tem;
9698
            }
9699
        }
9700
    }
9701
 
9702
  if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9703
      && CONVERT_EXPR_P (arg0))
9704
    {
9705
      /* If we are widening one operand of an integer comparison,
9706
         see if the other operand is similarly being widened.  Perhaps we
9707
         can do the comparison in the narrower type.  */
9708
      tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9709
      if (tem)
9710
        return tem;
9711
 
9712
      /* Or if we are changing signedness.  */
9713
      tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9714
      if (tem)
9715
        return tem;
9716
    }
9717
 
9718
  /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9719
     constant, we can simplify it.  */
9720
  if (TREE_CODE (arg1) == INTEGER_CST
9721
      && (TREE_CODE (arg0) == MIN_EXPR
9722
          || TREE_CODE (arg0) == MAX_EXPR)
9723
      && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9724
    {
9725
      tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9726
      if (tem)
9727
        return tem;
9728
    }
9729
 
9730
  /* Simplify comparison of something with itself.  (For IEEE
9731
     floating-point, we can only do some of these simplifications.)  */
9732
  if (operand_equal_p (arg0, arg1, 0))
9733
    {
9734
      switch (code)
9735
        {
9736
        case EQ_EXPR:
9737
          if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9738
              || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9739
            return constant_boolean_node (1, type);
9740
          break;
9741
 
9742
        case GE_EXPR:
9743
        case LE_EXPR:
9744
          if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9745
              || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9746
            return constant_boolean_node (1, type);
9747
          return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9748
 
9749
        case NE_EXPR:
9750
          /* For NE, we can only do this simplification if integer
9751
             or we don't honor IEEE floating point NaNs.  */
9752
          if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9753
              && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9754
            break;
9755
          /* ... fall through ...  */
9756
        case GT_EXPR:
9757
        case LT_EXPR:
9758
          return constant_boolean_node (0, type);
9759
        default:
9760
          gcc_unreachable ();
9761
        }
9762
    }
9763
 
9764
  /* If we are comparing an expression that just has comparisons
9765
     of two integer values, arithmetic expressions of those comparisons,
9766
     and constants, we can simplify it.  There are only three cases
9767
     to check: the two values can either be equal, the first can be
9768
     greater, or the second can be greater.  Fold the expression for
9769
     those three values.  Since each value must be 0 or 1, we have
9770
     eight possibilities, each of which corresponds to the constant 0
9771
     or 1 or one of the six possible comparisons.
9772
 
9773
     This handles common cases like (a > b) == 0 but also handles
9774
     expressions like  ((x > y) - (y > x)) > 0, which supposedly
9775
     occur in macroized code.  */
9776
 
9777
  if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9778
    {
9779
      tree cval1 = 0, cval2 = 0;
9780
      int save_p = 0;
9781
 
9782
      if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9783
          /* Don't handle degenerate cases here; they should already
9784
             have been handled anyway.  */
9785
          && cval1 != 0 && cval2 != 0
9786
          && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9787
          && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9788
          && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9789
          && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9790
          && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9791
          && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9792
                                TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9793
        {
9794
          tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9795
          tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9796
 
9797
          /* We can't just pass T to eval_subst in case cval1 or cval2
9798
             was the same as ARG1.  */
9799
 
9800
          tree high_result
9801
                = fold_build2_loc (loc, code, type,
9802
                               eval_subst (loc, arg0, cval1, maxval,
9803
                                           cval2, minval),
9804
                               arg1);
9805
          tree equal_result
9806
                = fold_build2_loc (loc, code, type,
9807
                               eval_subst (loc, arg0, cval1, maxval,
9808
                                           cval2, maxval),
9809
                               arg1);
9810
          tree low_result
9811
                = fold_build2_loc (loc, code, type,
9812
                               eval_subst (loc, arg0, cval1, minval,
9813
                                           cval2, maxval),
9814
                               arg1);
9815
 
9816
          /* All three of these results should be 0 or 1.  Confirm they are.
9817
             Then use those values to select the proper code to use.  */
9818
 
9819
          if (TREE_CODE (high_result) == INTEGER_CST
9820
              && TREE_CODE (equal_result) == INTEGER_CST
9821
              && TREE_CODE (low_result) == INTEGER_CST)
9822
            {
9823
              /* Make a 3-bit mask with the high-order bit being the
9824
                 value for `>', the next for '=', and the low for '<'.  */
9825
              switch ((integer_onep (high_result) * 4)
9826
                      + (integer_onep (equal_result) * 2)
9827
                      + integer_onep (low_result))
9828
                {
9829
                case 0:
9830
                  /* Always false.  */
9831
                  return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9832
                case 1:
9833
                  code = LT_EXPR;
9834
                  break;
9835
                case 2:
9836
                  code = EQ_EXPR;
9837
                  break;
9838
                case 3:
9839
                  code = LE_EXPR;
9840
                  break;
9841
                case 4:
9842
                  code = GT_EXPR;
9843
                  break;
9844
                case 5:
9845
                  code = NE_EXPR;
9846
                  break;
9847
                case 6:
9848
                  code = GE_EXPR;
9849
                  break;
9850
                case 7:
9851
                  /* Always true.  */
9852
                  return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9853
                }
9854
 
9855
              if (save_p)
9856
                {
9857
                  tem = save_expr (build2 (code, type, cval1, cval2));
9858
                  SET_EXPR_LOCATION (tem, loc);
9859
                  return tem;
9860
                }
9861
              return fold_build2_loc (loc, code, type, cval1, cval2);
9862
            }
9863
        }
9864
    }
9865
 
9866
  /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9867
     into a single range test.  */
9868
  if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9869
       || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9870
      && TREE_CODE (arg1) == INTEGER_CST
9871
      && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9872
      && !integer_zerop (TREE_OPERAND (arg0, 1))
9873
      && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9874
      && !TREE_OVERFLOW (arg1))
9875
    {
9876
      tem = fold_div_compare (loc, code, type, arg0, arg1);
9877
      if (tem != NULL_TREE)
9878
        return tem;
9879
    }
9880
 
9881
  /* Fold ~X op ~Y as Y op X.  */
9882
  if (TREE_CODE (arg0) == BIT_NOT_EXPR
9883
      && TREE_CODE (arg1) == BIT_NOT_EXPR)
9884
    {
9885
      tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9886
      return fold_build2_loc (loc, code, type,
9887
                          fold_convert_loc (loc, cmp_type,
9888
                                            TREE_OPERAND (arg1, 0)),
9889
                          TREE_OPERAND (arg0, 0));
9890
    }
9891
 
9892
  /* Fold ~X op C as X op' ~C, where op' is the swapped comparison.  */
9893
  if (TREE_CODE (arg0) == BIT_NOT_EXPR
9894
      && TREE_CODE (arg1) == INTEGER_CST)
9895
    {
9896
      tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9897
      return fold_build2_loc (loc, swap_tree_comparison (code), type,
9898
                          TREE_OPERAND (arg0, 0),
9899
                          fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9900
                                       fold_convert_loc (loc, cmp_type, arg1)));
9901
    }
9902
 
9903
  return NULL_TREE;
9904
}
9905
 
9906
 
9907
/* Subroutine of fold_binary.  Optimize complex multiplications of the
9908
   form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2).  The
9909
   argument EXPR represents the expression "z" of type TYPE.  */
9910
 
9911
static tree
9912
fold_mult_zconjz (location_t loc, tree type, tree expr)
9913
{
9914
  tree itype = TREE_TYPE (type);
9915
  tree rpart, ipart, tem;
9916
 
9917
  if (TREE_CODE (expr) == COMPLEX_EXPR)
9918
    {
9919
      rpart = TREE_OPERAND (expr, 0);
9920
      ipart = TREE_OPERAND (expr, 1);
9921
    }
9922
  else if (TREE_CODE (expr) == COMPLEX_CST)
9923
    {
9924
      rpart = TREE_REALPART (expr);
9925
      ipart = TREE_IMAGPART (expr);
9926
    }
9927
  else
9928
    {
9929
      expr = save_expr (expr);
9930
      rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9931
      ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9932
    }
9933
 
9934
  rpart = save_expr (rpart);
9935
  ipart = save_expr (ipart);
9936
  tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9937
                     fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9938
                     fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9939
  return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9940
                      fold_convert_loc (loc, itype, integer_zero_node));
9941
}
9942
 
9943
 
9944
/* Subroutine of fold_binary.  If P is the value of EXPR, computes
9945
   power-of-two M and (arbitrary) N such that M divides (P-N).  This condition
9946
   guarantees that P and N have the same least significant log2(M) bits.
9947
   N is not otherwise constrained.  In particular, N is not normalized to
9948
 
9949
   M is chosen as large as possible such that constant N can be determined.
9950
 
9951
   Returns M and sets *RESIDUE to N.
9952
 
9953
   If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9954
   account.  This is not always possible due to PR 35705.
9955
 */
9956
 
9957
static unsigned HOST_WIDE_INT
9958
get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9959
                                 bool allow_func_align)
9960
{
9961
  enum tree_code code;
9962
 
9963
  *residue = 0;
9964
 
9965
  code = TREE_CODE (expr);
9966
  if (code == ADDR_EXPR)
9967
    {
9968
      expr = TREE_OPERAND (expr, 0);
9969
      if (handled_component_p (expr))
9970
        {
9971
          HOST_WIDE_INT bitsize, bitpos;
9972
          tree offset;
9973
          enum machine_mode mode;
9974
          int unsignedp, volatilep;
9975
 
9976
          expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9977
                                      &mode, &unsignedp, &volatilep, false);
9978
          *residue = bitpos / BITS_PER_UNIT;
9979
          if (offset)
9980
            {
9981
              if (TREE_CODE (offset) == INTEGER_CST)
9982
                *residue += TREE_INT_CST_LOW (offset);
9983
              else
9984
                /* We don't handle more complicated offset expressions.  */
9985
                return 1;
9986
            }
9987
        }
9988
 
9989
      if (DECL_P (expr)
9990
          && (allow_func_align || TREE_CODE (expr) != FUNCTION_DECL))
9991
        return DECL_ALIGN_UNIT (expr);
9992
    }
9993
  else if (code == POINTER_PLUS_EXPR)
9994
    {
9995
      tree op0, op1;
9996
      unsigned HOST_WIDE_INT modulus;
9997
      enum tree_code inner_code;
9998
 
9999
      op0 = TREE_OPERAND (expr, 0);
10000
      STRIP_NOPS (op0);
10001
      modulus = get_pointer_modulus_and_residue (op0, residue,
10002
                                                 allow_func_align);
10003
 
10004
      op1 = TREE_OPERAND (expr, 1);
10005
      STRIP_NOPS (op1);
10006
      inner_code = TREE_CODE (op1);
10007
      if (inner_code == INTEGER_CST)
10008
        {
10009
          *residue += TREE_INT_CST_LOW (op1);
10010
          return modulus;
10011
        }
10012
      else if (inner_code == MULT_EXPR)
10013
        {
10014
          op1 = TREE_OPERAND (op1, 1);
10015
          if (TREE_CODE (op1) == INTEGER_CST)
10016
            {
10017
              unsigned HOST_WIDE_INT align;
10018
 
10019
              /* Compute the greatest power-of-2 divisor of op1.  */
10020
              align = TREE_INT_CST_LOW (op1);
10021
              align &= -align;
10022
 
10023
              /* If align is non-zero and less than *modulus, replace
10024
                 *modulus with align., If align is 0, then either op1 is 0
10025
                 or the greatest power-of-2 divisor of op1 doesn't fit in an
10026
                 unsigned HOST_WIDE_INT.  In either case, no additional
10027
                 constraint is imposed.  */
10028
              if (align)
10029
                modulus = MIN (modulus, align);
10030
 
10031
              return modulus;
10032
            }
10033
        }
10034
    }
10035
 
10036
    /* If we get here, we were unable to determine anything useful about the
10037
       expression.  */
10038
    return 1;
10039
}
10040
 
10041
 
10042
/* Fold a binary expression of code CODE and type TYPE with operands
10043
   OP0 and OP1.  LOC is the location of the resulting expression.
10044
   Return the folded expression if folding is successful.  Otherwise,
10045
   return NULL_TREE.  */
10046
 
10047
tree
10048
fold_binary_loc (location_t loc,
10049
             enum tree_code code, tree type, tree op0, tree op1)
10050
{
10051
  enum tree_code_class kind = TREE_CODE_CLASS (code);
10052
  tree arg0, arg1, tem;
10053
  tree t1 = NULL_TREE;
10054
  bool strict_overflow_p;
10055
 
10056
  gcc_assert (IS_EXPR_CODE_CLASS (kind)
10057
              && TREE_CODE_LENGTH (code) == 2
10058
              && op0 != NULL_TREE
10059
              && op1 != NULL_TREE);
10060
 
10061
  arg0 = op0;
10062
  arg1 = op1;
10063
 
10064
  /* Strip any conversions that don't change the mode.  This is
10065
     safe for every expression, except for a comparison expression
10066
     because its signedness is derived from its operands.  So, in
10067
     the latter case, only strip conversions that don't change the
10068
     signedness.  MIN_EXPR/MAX_EXPR also need signedness of arguments
10069
     preserved.
10070
 
10071
     Note that this is done as an internal manipulation within the
10072
     constant folder, in order to find the simplest representation
10073
     of the arguments so that their form can be studied.  In any
10074
     cases, the appropriate type conversions should be put back in
10075
     the tree that will get out of the constant folder.  */
10076
 
10077
  if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10078
    {
10079
      STRIP_SIGN_NOPS (arg0);
10080
      STRIP_SIGN_NOPS (arg1);
10081
    }
10082
  else
10083
    {
10084
      STRIP_NOPS (arg0);
10085
      STRIP_NOPS (arg1);
10086
    }
10087
 
10088
  /* Note that TREE_CONSTANT isn't enough: static var addresses are
10089
     constant but we can't do arithmetic on them.  */
10090
  if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10091
      || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10092
      || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
10093
      || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
10094
      || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
10095
      || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
10096
    {
10097
      if (kind == tcc_binary)
10098
        {
10099
          /* Make sure type and arg0 have the same saturating flag.  */
10100
          gcc_assert (TYPE_SATURATING (type)
10101
                      == TYPE_SATURATING (TREE_TYPE (arg0)));
10102
          tem = const_binop (code, arg0, arg1, 0);
10103
        }
10104
      else if (kind == tcc_comparison)
10105
        tem = fold_relational_const (code, type, arg0, arg1);
10106
      else
10107
        tem = NULL_TREE;
10108
 
10109
      if (tem != NULL_TREE)
10110
        {
10111
          if (TREE_TYPE (tem) != type)
10112
            tem = fold_convert_loc (loc, type, tem);
10113
          return tem;
10114
        }
10115
    }
10116
 
10117
  /* If this is a commutative operation, and ARG0 is a constant, move it
10118
     to ARG1 to reduce the number of tests below.  */
10119
  if (commutative_tree_code (code)
10120
      && tree_swap_operands_p (arg0, arg1, true))
10121
    return fold_build2_loc (loc, code, type, op1, op0);
10122
 
10123
  /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10124
 
10125
     First check for cases where an arithmetic operation is applied to a
10126
     compound, conditional, or comparison operation.  Push the arithmetic
10127
     operation inside the compound or conditional to see if any folding
10128
     can then be done.  Convert comparison to conditional for this purpose.
10129
     The also optimizes non-constant cases that used to be done in
10130
     expand_expr.
10131
 
10132
     Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10133
     one of the operands is a comparison and the other is a comparison, a
10134
     BIT_AND_EXPR with the constant 1, or a truth value.  In that case, the
10135
     code below would make the expression more complex.  Change it to a
10136
     TRUTH_{AND,OR}_EXPR.  Likewise, convert a similar NE_EXPR to
10137
     TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR.  */
10138
 
10139
  if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10140
       || code == EQ_EXPR || code == NE_EXPR)
10141
      && ((truth_value_p (TREE_CODE (arg0))
10142
           && (truth_value_p (TREE_CODE (arg1))
10143
               || (TREE_CODE (arg1) == BIT_AND_EXPR
10144
                   && integer_onep (TREE_OPERAND (arg1, 1)))))
10145
          || (truth_value_p (TREE_CODE (arg1))
10146
              && (truth_value_p (TREE_CODE (arg0))
10147
                  || (TREE_CODE (arg0) == BIT_AND_EXPR
10148
                      && integer_onep (TREE_OPERAND (arg0, 1)))))))
10149
    {
10150
      tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10151
                         : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10152
                         : TRUTH_XOR_EXPR,
10153
                         boolean_type_node,
10154
                         fold_convert_loc (loc, boolean_type_node, arg0),
10155
                         fold_convert_loc (loc, boolean_type_node, arg1));
10156
 
10157
      if (code == EQ_EXPR)
10158
        tem = invert_truthvalue_loc (loc, tem);
10159
 
10160
      return fold_convert_loc (loc, type, tem);
10161
    }
10162
 
10163
  if (TREE_CODE_CLASS (code) == tcc_binary
10164
      || TREE_CODE_CLASS (code) == tcc_comparison)
10165
    {
10166
      if (TREE_CODE (arg0) == COMPOUND_EXPR)
10167
        {
10168
          tem = fold_build2_loc (loc, code, type,
10169
                             fold_convert_loc (loc, TREE_TYPE (op0),
10170
                                               TREE_OPERAND (arg0, 1)), op1);
10171
          tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0), tem);
10172
          goto fold_binary_exit;
10173
        }
10174
      if (TREE_CODE (arg1) == COMPOUND_EXPR
10175
          && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10176
        {
10177
          tem = fold_build2_loc (loc, code, type, op0,
10178
                             fold_convert_loc (loc, TREE_TYPE (op1),
10179
                                               TREE_OPERAND (arg1, 1)));
10180
          tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0), tem);
10181
          goto fold_binary_exit;
10182
        }
10183
 
10184
      if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
10185
        {
10186
          tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10187
                                                     arg0, arg1,
10188
                                                     /*cond_first_p=*/1);
10189
          if (tem != NULL_TREE)
10190
            return tem;
10191
        }
10192
 
10193
      if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
10194
        {
10195
          tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10196
                                                     arg1, arg0,
10197
                                                     /*cond_first_p=*/0);
10198
          if (tem != NULL_TREE)
10199
            return tem;
10200
        }
10201
    }
10202
 
10203
  switch (code)
10204
    {
10205
    case POINTER_PLUS_EXPR:
10206
      /* 0 +p index -> (type)index */
10207
      if (integer_zerop (arg0))
10208
        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10209
 
10210
      /* PTR +p 0 -> PTR */
10211
      if (integer_zerop (arg1))
10212
        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10213
 
10214
      /* INT +p INT -> (PTR)(INT + INT).  Stripping types allows for this. */
10215
      if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10216
           && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10217
        return fold_convert_loc (loc, type,
10218
                                 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10219
                                              fold_convert_loc (loc, sizetype,
10220
                                                                arg1),
10221
                                              fold_convert_loc (loc, sizetype,
10222
                                                                arg0)));
10223
 
10224
      /* index +p PTR -> PTR +p index */
10225
      if (POINTER_TYPE_P (TREE_TYPE (arg1))
10226
          && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10227
        return fold_build2_loc (loc, POINTER_PLUS_EXPR, type,
10228
                            fold_convert_loc (loc, type, arg1),
10229
                            fold_convert_loc (loc, sizetype, arg0));
10230
 
10231
      /* (PTR +p B) +p A -> PTR +p (B + A) */
10232
      if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10233
        {
10234
          tree inner;
10235
          tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10236
          tree arg00 = TREE_OPERAND (arg0, 0);
10237
          inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10238
                               arg01, fold_convert_loc (loc, sizetype, arg1));
10239
          return fold_convert_loc (loc, type,
10240
                                   fold_build2_loc (loc, POINTER_PLUS_EXPR,
10241
                                                TREE_TYPE (arg00),
10242
                                                arg00, inner));
10243
        }
10244
 
10245
      /* PTR_CST +p CST -> CST1 */
10246
      if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10247
        return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10248
                            fold_convert_loc (loc, type, arg1));
10249
 
10250
     /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10251
        of the array.  Loop optimizer sometimes produce this type of
10252
        expressions.  */
10253
      if (TREE_CODE (arg0) == ADDR_EXPR)
10254
        {
10255
          tem = try_move_mult_to_index (loc, arg0,
10256
                                        fold_convert_loc (loc, sizetype, arg1));
10257
          if (tem)
10258
            return fold_convert_loc (loc, type, tem);
10259
        }
10260
 
10261
      return NULL_TREE;
10262
 
10263
    case PLUS_EXPR:
10264
      /* A + (-B) -> A - B */
10265
      if (TREE_CODE (arg1) == NEGATE_EXPR)
10266
        return fold_build2_loc (loc, MINUS_EXPR, type,
10267
                            fold_convert_loc (loc, type, arg0),
10268
                            fold_convert_loc (loc, type,
10269
                                              TREE_OPERAND (arg1, 0)));
10270
      /* (-A) + B -> B - A */
10271
      if (TREE_CODE (arg0) == NEGATE_EXPR
10272
          && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10273
        return fold_build2_loc (loc, MINUS_EXPR, type,
10274
                            fold_convert_loc (loc, type, arg1),
10275
                            fold_convert_loc (loc, type,
10276
                                              TREE_OPERAND (arg0, 0)));
10277
 
10278
      if (INTEGRAL_TYPE_P (type))
10279
        {
10280
          /* Convert ~A + 1 to -A.  */
10281
          if (TREE_CODE (arg0) == BIT_NOT_EXPR
10282
              && integer_onep (arg1))
10283
            return fold_build1_loc (loc, NEGATE_EXPR, type,
10284
                                fold_convert_loc (loc, type,
10285
                                                  TREE_OPERAND (arg0, 0)));
10286
 
10287
          /* ~X + X is -1.  */
10288
          if (TREE_CODE (arg0) == BIT_NOT_EXPR
10289
              && !TYPE_OVERFLOW_TRAPS (type))
10290
            {
10291
              tree tem = TREE_OPERAND (arg0, 0);
10292
 
10293
              STRIP_NOPS (tem);
10294
              if (operand_equal_p (tem, arg1, 0))
10295
                {
10296
                  t1 = build_int_cst_type (type, -1);
10297
                  return omit_one_operand_loc (loc, type, t1, arg1);
10298
                }
10299
            }
10300
 
10301
          /* X + ~X is -1.  */
10302
          if (TREE_CODE (arg1) == BIT_NOT_EXPR
10303
              && !TYPE_OVERFLOW_TRAPS (type))
10304
            {
10305
              tree tem = TREE_OPERAND (arg1, 0);
10306
 
10307
              STRIP_NOPS (tem);
10308
              if (operand_equal_p (arg0, tem, 0))
10309
                {
10310
                  t1 = build_int_cst_type (type, -1);
10311
                  return omit_one_operand_loc (loc, type, t1, arg0);
10312
                }
10313
            }
10314
 
10315
          /* X + (X / CST) * -CST is X % CST.  */
10316
          if (TREE_CODE (arg1) == MULT_EXPR
10317
              && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10318
              && operand_equal_p (arg0,
10319
                                  TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10320
            {
10321
              tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10322
              tree cst1 = TREE_OPERAND (arg1, 1);
10323
              tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10324
                                      cst1, cst0);
10325
              if (sum && integer_zerop (sum))
10326
                return fold_convert_loc (loc, type,
10327
                                         fold_build2_loc (loc, TRUNC_MOD_EXPR,
10328
                                                      TREE_TYPE (arg0), arg0,
10329
                                                      cst0));
10330
            }
10331
        }
10332
 
10333
      /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
10334
         same or one.  Make sure type is not saturating.
10335
         fold_plusminus_mult_expr will re-associate.  */
10336
      if ((TREE_CODE (arg0) == MULT_EXPR
10337
           || TREE_CODE (arg1) == MULT_EXPR)
10338
          && !TYPE_SATURATING (type)
10339
          && (!FLOAT_TYPE_P (type) || flag_associative_math))
10340
        {
10341
          tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10342
          if (tem)
10343
            return tem;
10344
        }
10345
 
10346
      if (! FLOAT_TYPE_P (type))
10347
        {
10348
          if (integer_zerop (arg1))
10349
            return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10350
 
10351
          /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10352
             with a constant, and the two constants have no bits in common,
10353
             we should treat this as a BIT_IOR_EXPR since this may produce more
10354
             simplifications.  */
10355
          if (TREE_CODE (arg0) == BIT_AND_EXPR
10356
              && TREE_CODE (arg1) == BIT_AND_EXPR
10357
              && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10358
              && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10359
              && integer_zerop (const_binop (BIT_AND_EXPR,
10360
                                             TREE_OPERAND (arg0, 1),
10361
                                             TREE_OPERAND (arg1, 1), 0)))
10362
            {
10363
              code = BIT_IOR_EXPR;
10364
              goto bit_ior;
10365
            }
10366
 
10367
          /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10368
             (plus (plus (mult) (mult)) (foo)) so that we can
10369
             take advantage of the factoring cases below.  */
10370
          if (((TREE_CODE (arg0) == PLUS_EXPR
10371
                || TREE_CODE (arg0) == MINUS_EXPR)
10372
               && TREE_CODE (arg1) == MULT_EXPR)
10373
              || ((TREE_CODE (arg1) == PLUS_EXPR
10374
                   || TREE_CODE (arg1) == MINUS_EXPR)
10375
                  && TREE_CODE (arg0) == MULT_EXPR))
10376
            {
10377
              tree parg0, parg1, parg, marg;
10378
              enum tree_code pcode;
10379
 
10380
              if (TREE_CODE (arg1) == MULT_EXPR)
10381
                parg = arg0, marg = arg1;
10382
              else
10383
                parg = arg1, marg = arg0;
10384
              pcode = TREE_CODE (parg);
10385
              parg0 = TREE_OPERAND (parg, 0);
10386
              parg1 = TREE_OPERAND (parg, 1);
10387
              STRIP_NOPS (parg0);
10388
              STRIP_NOPS (parg1);
10389
 
10390
              if (TREE_CODE (parg0) == MULT_EXPR
10391
                  && TREE_CODE (parg1) != MULT_EXPR)
10392
                return fold_build2_loc (loc, pcode, type,
10393
                                    fold_build2_loc (loc, PLUS_EXPR, type,
10394
                                                 fold_convert_loc (loc, type,
10395
                                                                   parg0),
10396
                                                 fold_convert_loc (loc, type,
10397
                                                                   marg)),
10398
                                    fold_convert_loc (loc, type, parg1));
10399
              if (TREE_CODE (parg0) != MULT_EXPR
10400
                  && TREE_CODE (parg1) == MULT_EXPR)
10401
                return
10402
                  fold_build2_loc (loc, PLUS_EXPR, type,
10403
                               fold_convert_loc (loc, type, parg0),
10404
                               fold_build2_loc (loc, pcode, type,
10405
                                            fold_convert_loc (loc, type, marg),
10406
                                            fold_convert_loc (loc, type,
10407
                                                              parg1)));
10408
            }
10409
        }
10410
      else
10411
        {
10412
          /* See if ARG1 is zero and X + ARG1 reduces to X.  */
10413
          if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10414
            return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10415
 
10416
          /* Likewise if the operands are reversed.  */
10417
          if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10418
            return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10419
 
10420
          /* Convert X + -C into X - C.  */
10421
          if (TREE_CODE (arg1) == REAL_CST
10422
              && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10423
            {
10424
              tem = fold_negate_const (arg1, type);
10425
              if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10426
                return fold_build2_loc (loc, MINUS_EXPR, type,
10427
                                    fold_convert_loc (loc, type, arg0),
10428
                                    fold_convert_loc (loc, type, tem));
10429
            }
10430
 
10431
          /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10432
             to __complex__ ( x, y ).  This is not the same for SNaNs or
10433
             if signed zeros are involved.  */
10434
          if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10435
              && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10436
              && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10437
            {
10438
              tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10439
              tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10440
              tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10441
              bool arg0rz = false, arg0iz = false;
10442
              if ((arg0r && (arg0rz = real_zerop (arg0r)))
10443
                  || (arg0i && (arg0iz = real_zerop (arg0i))))
10444
                {
10445
                  tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10446
                  tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10447
                  if (arg0rz && arg1i && real_zerop (arg1i))
10448
                    {
10449
                      tree rp = arg1r ? arg1r
10450
                                  : build1 (REALPART_EXPR, rtype, arg1);
10451
                      tree ip = arg0i ? arg0i
10452
                                  : build1 (IMAGPART_EXPR, rtype, arg0);
10453
                      return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10454
                    }
10455
                  else if (arg0iz && arg1r && real_zerop (arg1r))
10456
                    {
10457
                      tree rp = arg0r ? arg0r
10458
                                  : build1 (REALPART_EXPR, rtype, arg0);
10459
                      tree ip = arg1i ? arg1i
10460
                                  : build1 (IMAGPART_EXPR, rtype, arg1);
10461
                      return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10462
                    }
10463
                }
10464
            }
10465
 
10466
          if (flag_unsafe_math_optimizations
10467
              && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10468
              && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10469
              && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10470
            return tem;
10471
 
10472
          /* Convert x+x into x*2.0.  */
10473
          if (operand_equal_p (arg0, arg1, 0)
10474
              && SCALAR_FLOAT_TYPE_P (type))
10475
            return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10476
                                build_real (type, dconst2));
10477
 
10478
          /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10479
             We associate floats only if the user has specified
10480
             -fassociative-math.  */
10481
          if (flag_associative_math
10482
              && TREE_CODE (arg1) == PLUS_EXPR
10483
              && TREE_CODE (arg0) != MULT_EXPR)
10484
            {
10485
              tree tree10 = TREE_OPERAND (arg1, 0);
10486
              tree tree11 = TREE_OPERAND (arg1, 1);
10487
              if (TREE_CODE (tree11) == MULT_EXPR
10488
                  && TREE_CODE (tree10) == MULT_EXPR)
10489
                {
10490
                  tree tree0;
10491
                  tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10492
                  return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10493
                }
10494
            }
10495
          /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10496
             We associate floats only if the user has specified
10497
             -fassociative-math.  */
10498
          if (flag_associative_math
10499
              && TREE_CODE (arg0) == PLUS_EXPR
10500
              && TREE_CODE (arg1) != MULT_EXPR)
10501
            {
10502
              tree tree00 = TREE_OPERAND (arg0, 0);
10503
              tree tree01 = TREE_OPERAND (arg0, 1);
10504
              if (TREE_CODE (tree01) == MULT_EXPR
10505
                  && TREE_CODE (tree00) == MULT_EXPR)
10506
                {
10507
                  tree tree0;
10508
                  tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10509
                  return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10510
                }
10511
            }
10512
        }
10513
 
10514
     bit_rotate:
10515
      /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10516
         is a rotate of A by C1 bits.  */
10517
      /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10518
         is a rotate of A by B bits.  */
10519
      {
10520
        enum tree_code code0, code1;
10521
        tree rtype;
10522
        code0 = TREE_CODE (arg0);
10523
        code1 = TREE_CODE (arg1);
10524
        if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10525
             || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10526
            && operand_equal_p (TREE_OPERAND (arg0, 0),
10527
                                TREE_OPERAND (arg1, 0), 0)
10528
            && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10529
                TYPE_UNSIGNED (rtype))
10530
            /* Only create rotates in complete modes.  Other cases are not
10531
               expanded properly.  */
10532
            && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10533
          {
10534
            tree tree01, tree11;
10535
            enum tree_code code01, code11;
10536
 
10537
            tree01 = TREE_OPERAND (arg0, 1);
10538
            tree11 = TREE_OPERAND (arg1, 1);
10539
            STRIP_NOPS (tree01);
10540
            STRIP_NOPS (tree11);
10541
            code01 = TREE_CODE (tree01);
10542
            code11 = TREE_CODE (tree11);
10543
            if (code01 == INTEGER_CST
10544
                && code11 == INTEGER_CST
10545
                && TREE_INT_CST_HIGH (tree01) == 0
10546
                && TREE_INT_CST_HIGH (tree11) == 0
10547
                && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10548
                    == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10549
              {
10550
                tem = build2 (LROTATE_EXPR,
10551
                              TREE_TYPE (TREE_OPERAND (arg0, 0)),
10552
                              TREE_OPERAND (arg0, 0),
10553
                              code0 == LSHIFT_EXPR
10554
                              ? tree01 : tree11);
10555
                SET_EXPR_LOCATION (tem, loc);
10556
                return fold_convert_loc (loc, type, tem);
10557
              }
10558
            else if (code11 == MINUS_EXPR)
10559
              {
10560
                tree tree110, tree111;
10561
                tree110 = TREE_OPERAND (tree11, 0);
10562
                tree111 = TREE_OPERAND (tree11, 1);
10563
                STRIP_NOPS (tree110);
10564
                STRIP_NOPS (tree111);
10565
                if (TREE_CODE (tree110) == INTEGER_CST
10566
                    && 0 == compare_tree_int (tree110,
10567
                                              TYPE_PRECISION
10568
                                              (TREE_TYPE (TREE_OPERAND
10569
                                                          (arg0, 0))))
10570
                    && operand_equal_p (tree01, tree111, 0))
10571
                  return
10572
                    fold_convert_loc (loc, type,
10573
                                      build2 ((code0 == LSHIFT_EXPR
10574
                                               ? LROTATE_EXPR
10575
                                               : RROTATE_EXPR),
10576
                                              TREE_TYPE (TREE_OPERAND (arg0, 0)),
10577
                                              TREE_OPERAND (arg0, 0), tree01));
10578
              }
10579
            else if (code01 == MINUS_EXPR)
10580
              {
10581
                tree tree010, tree011;
10582
                tree010 = TREE_OPERAND (tree01, 0);
10583
                tree011 = TREE_OPERAND (tree01, 1);
10584
                STRIP_NOPS (tree010);
10585
                STRIP_NOPS (tree011);
10586
                if (TREE_CODE (tree010) == INTEGER_CST
10587
                    && 0 == compare_tree_int (tree010,
10588
                                              TYPE_PRECISION
10589
                                              (TREE_TYPE (TREE_OPERAND
10590
                                                          (arg0, 0))))
10591
                    && operand_equal_p (tree11, tree011, 0))
10592
                    return fold_convert_loc
10593
                      (loc, type,
10594
                       build2 ((code0 != LSHIFT_EXPR
10595
                                ? LROTATE_EXPR
10596
                                : RROTATE_EXPR),
10597
                               TREE_TYPE (TREE_OPERAND (arg0, 0)),
10598
                               TREE_OPERAND (arg0, 0), tree11));
10599
              }
10600
          }
10601
      }
10602
 
10603
    associate:
10604
      /* In most languages, can't associate operations on floats through
10605
         parentheses.  Rather than remember where the parentheses were, we
10606
         don't associate floats at all, unless the user has specified
10607
         -fassociative-math.
10608
         And, we need to make sure type is not saturating.  */
10609
 
10610
      if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10611
          && !TYPE_SATURATING (type))
10612
        {
10613
          tree var0, con0, lit0, minus_lit0;
10614
          tree var1, con1, lit1, minus_lit1;
10615
          bool ok = true;
10616
 
10617
          /* Split both trees into variables, constants, and literals.  Then
10618
             associate each group together, the constants with literals,
10619
             then the result with variables.  This increases the chances of
10620
             literals being recombined later and of generating relocatable
10621
             expressions for the sum of a constant and literal.  */
10622
          var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10623
          var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10624
                             code == MINUS_EXPR);
10625
 
10626
          /* With undefined overflow we can only associate constants
10627
             with one variable.  */
10628
          if (((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10629
               || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10630
              && var0 && var1)
10631
            {
10632
              tree tmp0 = var0;
10633
              tree tmp1 = var1;
10634
 
10635
              if (TREE_CODE (tmp0) == NEGATE_EXPR)
10636
                tmp0 = TREE_OPERAND (tmp0, 0);
10637
              if (TREE_CODE (tmp1) == NEGATE_EXPR)
10638
                tmp1 = TREE_OPERAND (tmp1, 0);
10639
              /* The only case we can still associate with two variables
10640
                 is if they are the same, modulo negation.  */
10641
              if (!operand_equal_p (tmp0, tmp1, 0))
10642
                ok = false;
10643
            }
10644
 
10645
          /* Only do something if we found more than two objects.  Otherwise,
10646
             nothing has changed and we risk infinite recursion.  */
10647
          if (ok
10648
              && (2 < ((var0 != 0) + (var1 != 0)
10649
                       + (con0 != 0) + (con1 != 0)
10650
                       + (lit0 != 0) + (lit1 != 0)
10651
                       + (minus_lit0 != 0) + (minus_lit1 != 0))))
10652
            {
10653
              /* Recombine MINUS_EXPR operands by using PLUS_EXPR.  */
10654
              if (code == MINUS_EXPR)
10655
                code = PLUS_EXPR;
10656
 
10657
              var0 = associate_trees (loc, var0, var1, code, type);
10658
              con0 = associate_trees (loc, con0, con1, code, type);
10659
              lit0 = associate_trees (loc, lit0, lit1, code, type);
10660
              minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10661
 
10662
              /* Preserve the MINUS_EXPR if the negative part of the literal is
10663
                 greater than the positive part.  Otherwise, the multiplicative
10664
                 folding code (i.e extract_muldiv) may be fooled in case
10665
                 unsigned constants are subtracted, like in the following
10666
                 example: ((X*2 + 4) - 8U)/2.  */
10667
              if (minus_lit0 && lit0)
10668
                {
10669
                  if (TREE_CODE (lit0) == INTEGER_CST
10670
                      && TREE_CODE (minus_lit0) == INTEGER_CST
10671
                      && tree_int_cst_lt (lit0, minus_lit0))
10672
                    {
10673
                      minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10674
                                                    MINUS_EXPR, type);
10675
                      lit0 = 0;
10676
                    }
10677
                  else
10678
                    {
10679
                      lit0 = associate_trees (loc, lit0, minus_lit0,
10680
                                              MINUS_EXPR, type);
10681
                      minus_lit0 = 0;
10682
                    }
10683
                }
10684
              if (minus_lit0)
10685
                {
10686
                  if (con0 == 0)
10687
                    return
10688
                      fold_convert_loc (loc, type,
10689
                                        associate_trees (loc, var0, minus_lit0,
10690
                                                         MINUS_EXPR, type));
10691
                  else
10692
                    {
10693
                      con0 = associate_trees (loc, con0, minus_lit0,
10694
                                              MINUS_EXPR, type);
10695
                      return
10696
                        fold_convert_loc (loc, type,
10697
                                          associate_trees (loc, var0, con0,
10698
                                                           PLUS_EXPR, type));
10699
                    }
10700
                }
10701
 
10702
              con0 = associate_trees (loc, con0, lit0, code, type);
10703
              return
10704
                fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10705
                                                              code, type));
10706
            }
10707
        }
10708
 
10709
      return NULL_TREE;
10710
 
10711
    case MINUS_EXPR:
10712
      /* Pointer simplifications for subtraction, simple reassociations. */
10713
      if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10714
        {
10715
          /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10716
          if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10717
              && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10718
            {
10719
              tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10720
              tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10721
              tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10722
              tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10723
              return fold_build2_loc (loc, PLUS_EXPR, type,
10724
                                  fold_build2_loc (loc, MINUS_EXPR, type,
10725
                                               arg00, arg10),
10726
                                  fold_build2_loc (loc, MINUS_EXPR, type,
10727
                                               arg01, arg11));
10728
            }
10729
          /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10730
          else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10731
            {
10732
              tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10733
              tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10734
              tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10735
                                      fold_convert_loc (loc, type, arg1));
10736
              if (tmp)
10737
                return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10738
            }
10739
        }
10740
      /* A - (-B) -> A + B */
10741
      if (TREE_CODE (arg1) == NEGATE_EXPR)
10742
        return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10743
                            fold_convert_loc (loc, type,
10744
                                              TREE_OPERAND (arg1, 0)));
10745
      /* (-A) - B -> (-B) - A  where B is easily negated and we can swap.  */
10746
      if (TREE_CODE (arg0) == NEGATE_EXPR
10747
          && (FLOAT_TYPE_P (type)
10748
              || INTEGRAL_TYPE_P (type))
10749
          && negate_expr_p (arg1)
10750
          && reorder_operands_p (arg0, arg1))
10751
        return fold_build2_loc (loc, MINUS_EXPR, type,
10752
                            fold_convert_loc (loc, type,
10753
                                              negate_expr (arg1)),
10754
                            fold_convert_loc (loc, type,
10755
                                              TREE_OPERAND (arg0, 0)));
10756
      /* Convert -A - 1 to ~A.  */
10757
      if (INTEGRAL_TYPE_P (type)
10758
          && TREE_CODE (arg0) == NEGATE_EXPR
10759
          && integer_onep (arg1)
10760
          && !TYPE_OVERFLOW_TRAPS (type))
10761
        return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10762
                            fold_convert_loc (loc, type,
10763
                                              TREE_OPERAND (arg0, 0)));
10764
 
10765
      /* Convert -1 - A to ~A.  */
10766
      if (INTEGRAL_TYPE_P (type)
10767
          && integer_all_onesp (arg0))
10768
        return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10769
 
10770
 
10771
      /* X - (X / CST) * CST is X % CST.  */
10772
      if (INTEGRAL_TYPE_P (type)
10773
          && TREE_CODE (arg1) == MULT_EXPR
10774
          && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10775
          && operand_equal_p (arg0,
10776
                              TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10777
          && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10778
                              TREE_OPERAND (arg1, 1), 0))
10779
        return
10780
          fold_convert_loc (loc, type,
10781
                            fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10782
                                         arg0, TREE_OPERAND (arg1, 1)));
10783
 
10784
      if (! FLOAT_TYPE_P (type))
10785
        {
10786
          if (integer_zerop (arg0))
10787
            return negate_expr (fold_convert_loc (loc, type, arg1));
10788
          if (integer_zerop (arg1))
10789
            return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10790
 
10791
          /* Fold A - (A & B) into ~B & A.  */
10792
          if (!TREE_SIDE_EFFECTS (arg0)
10793
              && TREE_CODE (arg1) == BIT_AND_EXPR)
10794
            {
10795
              if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10796
                {
10797
                  tree arg10 = fold_convert_loc (loc, type,
10798
                                                 TREE_OPERAND (arg1, 0));
10799
                  return fold_build2_loc (loc, BIT_AND_EXPR, type,
10800
                                      fold_build1_loc (loc, BIT_NOT_EXPR,
10801
                                                   type, arg10),
10802
                                      fold_convert_loc (loc, type, arg0));
10803
                }
10804
              if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10805
                {
10806
                  tree arg11 = fold_convert_loc (loc,
10807
                                                 type, TREE_OPERAND (arg1, 1));
10808
                  return fold_build2_loc (loc, BIT_AND_EXPR, type,
10809
                                      fold_build1_loc (loc, BIT_NOT_EXPR,
10810
                                                   type, arg11),
10811
                                      fold_convert_loc (loc, type, arg0));
10812
                }
10813
            }
10814
 
10815
          /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10816
             any power of 2 minus 1.  */
10817
          if (TREE_CODE (arg0) == BIT_AND_EXPR
10818
              && TREE_CODE (arg1) == BIT_AND_EXPR
10819
              && operand_equal_p (TREE_OPERAND (arg0, 0),
10820
                                  TREE_OPERAND (arg1, 0), 0))
10821
            {
10822
              tree mask0 = TREE_OPERAND (arg0, 1);
10823
              tree mask1 = TREE_OPERAND (arg1, 1);
10824
              tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10825
 
10826
              if (operand_equal_p (tem, mask1, 0))
10827
                {
10828
                  tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10829
                                     TREE_OPERAND (arg0, 0), mask1);
10830
                  return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10831
                }
10832
            }
10833
        }
10834
 
10835
      /* See if ARG1 is zero and X - ARG1 reduces to X.  */
10836
      else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10837
        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10838
 
10839
      /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0).  So check whether
10840
         ARG0 is zero and X + ARG0 reduces to X, since that would mean
10841
         (-ARG1 + ARG0) reduces to -ARG1.  */
10842
      else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10843
        return negate_expr (fold_convert_loc (loc, type, arg1));
10844
 
10845
      /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10846
         __complex__ ( x, -y ).  This is not the same for SNaNs or if
10847
         signed zeros are involved.  */
10848
      if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10849
          && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10850
          && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10851
        {
10852
          tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10853
          tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10854
          tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10855
          bool arg0rz = false, arg0iz = false;
10856
          if ((arg0r && (arg0rz = real_zerop (arg0r)))
10857
              || (arg0i && (arg0iz = real_zerop (arg0i))))
10858
            {
10859
              tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10860
              tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10861
              if (arg0rz && arg1i && real_zerop (arg1i))
10862
                {
10863
                  tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10864
                                         arg1r ? arg1r
10865
                                         : build1 (REALPART_EXPR, rtype, arg1));
10866
                  tree ip = arg0i ? arg0i
10867
                    : build1 (IMAGPART_EXPR, rtype, arg0);
10868
                  return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10869
                }
10870
              else if (arg0iz && arg1r && real_zerop (arg1r))
10871
                {
10872
                  tree rp = arg0r ? arg0r
10873
                    : build1 (REALPART_EXPR, rtype, arg0);
10874
                  tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10875
                                         arg1i ? arg1i
10876
                                         : build1 (IMAGPART_EXPR, rtype, arg1));
10877
                  return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10878
                }
10879
            }
10880
        }
10881
 
10882
      /* Fold &x - &x.  This can happen from &x.foo - &x.
10883
         This is unsafe for certain floats even in non-IEEE formats.
10884
         In IEEE, it is unsafe because it does wrong for NaNs.
10885
         Also note that operand_equal_p is always false if an operand
10886
         is volatile.  */
10887
 
10888
      if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10889
          && operand_equal_p (arg0, arg1, 0))
10890
        return fold_convert_loc (loc, type, integer_zero_node);
10891
 
10892
      /* A - B -> A + (-B) if B is easily negatable.  */
10893
      if (negate_expr_p (arg1)
10894
          && ((FLOAT_TYPE_P (type)
10895
               /* Avoid this transformation if B is a positive REAL_CST.  */
10896
               && (TREE_CODE (arg1) != REAL_CST
10897
                   ||  REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10898
              || INTEGRAL_TYPE_P (type)))
10899
        return fold_build2_loc (loc, PLUS_EXPR, type,
10900
                            fold_convert_loc (loc, type, arg0),
10901
                            fold_convert_loc (loc, type,
10902
                                              negate_expr (arg1)));
10903
 
10904
      /* Try folding difference of addresses.  */
10905
      {
10906
        HOST_WIDE_INT diff;
10907
 
10908
        if ((TREE_CODE (arg0) == ADDR_EXPR
10909
             || TREE_CODE (arg1) == ADDR_EXPR)
10910
            && ptr_difference_const (arg0, arg1, &diff))
10911
          return build_int_cst_type (type, diff);
10912
      }
10913
 
10914
      /* Fold &a[i] - &a[j] to i-j.  */
10915
      if (TREE_CODE (arg0) == ADDR_EXPR
10916
          && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10917
          && TREE_CODE (arg1) == ADDR_EXPR
10918
          && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10919
        {
10920
          tree aref0 = TREE_OPERAND (arg0, 0);
10921
          tree aref1 = TREE_OPERAND (arg1, 0);
10922
          if (operand_equal_p (TREE_OPERAND (aref0, 0),
10923
                               TREE_OPERAND (aref1, 0), 0))
10924
            {
10925
              tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10926
              tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10927
              tree esz = array_ref_element_size (aref0);
10928
              tree diff = build2 (MINUS_EXPR, type, op0, op1);
10929
              return fold_build2_loc (loc, MULT_EXPR, type, diff,
10930
                                  fold_convert_loc (loc, type, esz));
10931
 
10932
            }
10933
        }
10934
 
10935
      if (FLOAT_TYPE_P (type)
10936
          && flag_unsafe_math_optimizations
10937
          && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10938
          && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10939
          && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10940
        return tem;
10941
 
10942
      /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10943
         same or one.  Make sure type is not saturating.
10944
         fold_plusminus_mult_expr will re-associate.  */
10945
      if ((TREE_CODE (arg0) == MULT_EXPR
10946
           || TREE_CODE (arg1) == MULT_EXPR)
10947
          && !TYPE_SATURATING (type)
10948
          && (!FLOAT_TYPE_P (type) || flag_associative_math))
10949
        {
10950
          tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10951
          if (tem)
10952
            return tem;
10953
        }
10954
 
10955
      goto associate;
10956
 
10957
    case MULT_EXPR:
10958
      /* (-A) * (-B) -> A * B  */
10959
      if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10960
        return fold_build2_loc (loc, MULT_EXPR, type,
10961
                            fold_convert_loc (loc, type,
10962
                                              TREE_OPERAND (arg0, 0)),
10963
                            fold_convert_loc (loc, type,
10964
                                              negate_expr (arg1)));
10965
      if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10966
        return fold_build2_loc (loc, MULT_EXPR, type,
10967
                            fold_convert_loc (loc, type,
10968
                                              negate_expr (arg0)),
10969
                            fold_convert_loc (loc, type,
10970
                                              TREE_OPERAND (arg1, 0)));
10971
 
10972
      if (! FLOAT_TYPE_P (type))
10973
        {
10974
          if (integer_zerop (arg1))
10975
            return omit_one_operand_loc (loc, type, arg1, arg0);
10976
          if (integer_onep (arg1))
10977
            return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10978
          /* Transform x * -1 into -x.  Make sure to do the negation
10979
             on the original operand with conversions not stripped
10980
             because we can only strip non-sign-changing conversions.  */
10981
          if (integer_all_onesp (arg1))
10982
            return fold_convert_loc (loc, type, negate_expr (op0));
10983
          /* Transform x * -C into -x * C if x is easily negatable.  */
10984
          if (TREE_CODE (arg1) == INTEGER_CST
10985
              && tree_int_cst_sgn (arg1) == -1
10986
              && negate_expr_p (arg0)
10987
              && (tem = negate_expr (arg1)) != arg1
10988
              && !TREE_OVERFLOW (tem))
10989
            return fold_build2_loc (loc, MULT_EXPR, type,
10990
                                fold_convert_loc (loc, type,
10991
                                                  negate_expr (arg0)),
10992
                                tem);
10993
 
10994
          /* (a * (1 << b)) is (a << b)  */
10995
          if (TREE_CODE (arg1) == LSHIFT_EXPR
10996
              && integer_onep (TREE_OPERAND (arg1, 0)))
10997
            return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10998
                                TREE_OPERAND (arg1, 1));
10999
          if (TREE_CODE (arg0) == LSHIFT_EXPR
11000
              && integer_onep (TREE_OPERAND (arg0, 0)))
11001
            return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
11002
                                TREE_OPERAND (arg0, 1));
11003
 
11004
          /* (A + A) * C -> A * 2 * C  */
11005
          if (TREE_CODE (arg0) == PLUS_EXPR
11006
              && TREE_CODE (arg1) == INTEGER_CST
11007
              && operand_equal_p (TREE_OPERAND (arg0, 0),
11008
                                  TREE_OPERAND (arg0, 1), 0))
11009
            return fold_build2_loc (loc, MULT_EXPR, type,
11010
                                omit_one_operand_loc (loc, type,
11011
                                                  TREE_OPERAND (arg0, 0),
11012
                                                  TREE_OPERAND (arg0, 1)),
11013
                                fold_build2_loc (loc, MULT_EXPR, type,
11014
                                             build_int_cst (type, 2) , arg1));
11015
 
11016
          strict_overflow_p = false;
11017
          if (TREE_CODE (arg1) == INTEGER_CST
11018
              && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11019
                                             &strict_overflow_p)))
11020
            {
11021
              if (strict_overflow_p)
11022
                fold_overflow_warning (("assuming signed overflow does not "
11023
                                        "occur when simplifying "
11024
                                        "multiplication"),
11025
                                       WARN_STRICT_OVERFLOW_MISC);
11026
              return fold_convert_loc (loc, type, tem);
11027
            }
11028
 
11029
          /* Optimize z * conj(z) for integer complex numbers.  */
11030
          if (TREE_CODE (arg0) == CONJ_EXPR
11031
              && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11032
            return fold_mult_zconjz (loc, type, arg1);
11033
          if (TREE_CODE (arg1) == CONJ_EXPR
11034
              && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11035
            return fold_mult_zconjz (loc, type, arg0);
11036
        }
11037
      else
11038
        {
11039
          /* Maybe fold x * 0 to 0.  The expressions aren't the same
11040
             when x is NaN, since x * 0 is also NaN.  Nor are they the
11041
             same in modes with signed zeros, since multiplying a
11042
             negative value by 0 gives -0, not +0.  */
11043
          if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11044
              && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11045
              && real_zerop (arg1))
11046
            return omit_one_operand_loc (loc, type, arg1, arg0);
11047
          /* In IEEE floating point, x*1 is not equivalent to x for snans.
11048
             Likewise for complex arithmetic with signed zeros.  */
11049
          if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11050
              && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11051
                  || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11052
              && real_onep (arg1))
11053
            return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11054
 
11055
          /* Transform x * -1.0 into -x.  */
11056
          if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11057
              && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11058
                  || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11059
              && real_minus_onep (arg1))
11060
            return fold_convert_loc (loc, type, negate_expr (arg0));
11061
 
11062
          /* Convert (C1/X)*C2 into (C1*C2)/X.  This transformation may change
11063
             the result for floating point types due to rounding so it is applied
11064
             only if -fassociative-math was specify.  */
11065
          if (flag_associative_math
11066
              && TREE_CODE (arg0) == RDIV_EXPR
11067
              && TREE_CODE (arg1) == REAL_CST
11068
              && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
11069
            {
11070
              tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
11071
                                      arg1, 0);
11072
              if (tem)
11073
                return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11074
                                    TREE_OPERAND (arg0, 1));
11075
            }
11076
 
11077
          /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y.  */
11078
          if (operand_equal_p (arg0, arg1, 0))
11079
            {
11080
              tree tem = fold_strip_sign_ops (arg0);
11081
              if (tem != NULL_TREE)
11082
                {
11083
                  tem = fold_convert_loc (loc, type, tem);
11084
                  return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11085
                }
11086
            }
11087
 
11088
          /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11089
             This is not the same for NaNs or if signed zeros are
11090
             involved.  */
11091
          if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11092
              && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11093
              && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11094
              && TREE_CODE (arg1) == COMPLEX_CST
11095
              && real_zerop (TREE_REALPART (arg1)))
11096
            {
11097
              tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11098
              if (real_onep (TREE_IMAGPART (arg1)))
11099
                return
11100
                  fold_build2_loc (loc, COMPLEX_EXPR, type,
11101
                               negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11102
                                                             rtype, arg0)),
11103
                               fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11104
              else if (real_minus_onep (TREE_IMAGPART (arg1)))
11105
                return
11106
                  fold_build2_loc (loc, COMPLEX_EXPR, type,
11107
                               fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11108
                               negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11109
                                                             rtype, arg0)));
11110
            }
11111
 
11112
          /* Optimize z * conj(z) for floating point complex numbers.
11113
             Guarded by flag_unsafe_math_optimizations as non-finite
11114
             imaginary components don't produce scalar results.  */
11115
          if (flag_unsafe_math_optimizations
11116
              && TREE_CODE (arg0) == CONJ_EXPR
11117
              && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11118
            return fold_mult_zconjz (loc, type, arg1);
11119
          if (flag_unsafe_math_optimizations
11120
              && TREE_CODE (arg1) == CONJ_EXPR
11121
              && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11122
            return fold_mult_zconjz (loc, type, arg0);
11123
 
11124
          if (flag_unsafe_math_optimizations)
11125
            {
11126
              enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11127
              enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11128
 
11129
              /* Optimizations of root(...)*root(...).  */
11130
              if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11131
                {
11132
                  tree rootfn, arg;
11133
                  tree arg00 = CALL_EXPR_ARG (arg0, 0);
11134
                  tree arg10 = CALL_EXPR_ARG (arg1, 0);
11135
 
11136
                  /* Optimize sqrt(x)*sqrt(x) as x.  */
11137
                  if (BUILTIN_SQRT_P (fcode0)
11138
                      && operand_equal_p (arg00, arg10, 0)
11139
                      && ! HONOR_SNANS (TYPE_MODE (type)))
11140
                    return arg00;
11141
 
11142
                  /* Optimize root(x)*root(y) as root(x*y).  */
11143
                  rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11144
                  arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11145
                  return build_call_expr_loc (loc, rootfn, 1, arg);
11146
                }
11147
 
11148
              /* Optimize expN(x)*expN(y) as expN(x+y).  */
11149
              if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11150
                {
11151
                  tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11152
                  tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11153
                                          CALL_EXPR_ARG (arg0, 0),
11154
                                          CALL_EXPR_ARG (arg1, 0));
11155
                  return build_call_expr_loc (loc, expfn, 1, arg);
11156
                }
11157
 
11158
              /* Optimizations of pow(...)*pow(...).  */
11159
              if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11160
                  || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11161
                  || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11162
                {
11163
                  tree arg00 = CALL_EXPR_ARG (arg0, 0);
11164
                  tree arg01 = CALL_EXPR_ARG (arg0, 1);
11165
                  tree arg10 = CALL_EXPR_ARG (arg1, 0);
11166
                  tree arg11 = CALL_EXPR_ARG (arg1, 1);
11167
 
11168
                  /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y).  */
11169
                  if (operand_equal_p (arg01, arg11, 0))
11170
                    {
11171
                      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11172
                      tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11173
                                              arg00, arg10);
11174
                      return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11175
                    }
11176
 
11177
                  /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z).  */
11178
                  if (operand_equal_p (arg00, arg10, 0))
11179
                    {
11180
                      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11181
                      tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11182
                                              arg01, arg11);
11183
                      return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11184
                    }
11185
                }
11186
 
11187
              /* Optimize tan(x)*cos(x) as sin(x).  */
11188
              if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11189
                   || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11190
                   || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11191
                   || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11192
                   || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11193
                   || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11194
                  && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11195
                                      CALL_EXPR_ARG (arg1, 0), 0))
11196
                {
11197
                  tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11198
 
11199
                  if (sinfn != NULL_TREE)
11200
                    return build_call_expr_loc (loc, sinfn, 1,
11201
                                            CALL_EXPR_ARG (arg0, 0));
11202
                }
11203
 
11204
              /* Optimize x*pow(x,c) as pow(x,c+1).  */
11205
              if (fcode1 == BUILT_IN_POW
11206
                  || fcode1 == BUILT_IN_POWF
11207
                  || fcode1 == BUILT_IN_POWL)
11208
                {
11209
                  tree arg10 = CALL_EXPR_ARG (arg1, 0);
11210
                  tree arg11 = CALL_EXPR_ARG (arg1, 1);
11211
                  if (TREE_CODE (arg11) == REAL_CST
11212
                      && !TREE_OVERFLOW (arg11)
11213
                      && operand_equal_p (arg0, arg10, 0))
11214
                    {
11215
                      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11216
                      REAL_VALUE_TYPE c;
11217
                      tree arg;
11218
 
11219
                      c = TREE_REAL_CST (arg11);
11220
                      real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11221
                      arg = build_real (type, c);
11222
                      return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11223
                    }
11224
                }
11225
 
11226
              /* Optimize pow(x,c)*x as pow(x,c+1).  */
11227
              if (fcode0 == BUILT_IN_POW
11228
                  || fcode0 == BUILT_IN_POWF
11229
                  || fcode0 == BUILT_IN_POWL)
11230
                {
11231
                  tree arg00 = CALL_EXPR_ARG (arg0, 0);
11232
                  tree arg01 = CALL_EXPR_ARG (arg0, 1);
11233
                  if (TREE_CODE (arg01) == REAL_CST
11234
                      && !TREE_OVERFLOW (arg01)
11235
                      && operand_equal_p (arg1, arg00, 0))
11236
                    {
11237
                      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11238
                      REAL_VALUE_TYPE c;
11239
                      tree arg;
11240
 
11241
                      c = TREE_REAL_CST (arg01);
11242
                      real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11243
                      arg = build_real (type, c);
11244
                      return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11245
                    }
11246
                }
11247
 
11248
              /* Optimize x*x as pow(x,2.0), which is expanded as x*x.  */
11249
              if (optimize_function_for_speed_p (cfun)
11250
                  && operand_equal_p (arg0, arg1, 0))
11251
                {
11252
                  tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11253
 
11254
                  if (powfn)
11255
                    {
11256
                      tree arg = build_real (type, dconst2);
11257
                      return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11258
                    }
11259
                }
11260
            }
11261
        }
11262
      goto associate;
11263
 
11264
    case BIT_IOR_EXPR:
11265
    bit_ior:
11266
      if (integer_all_onesp (arg1))
11267
        return omit_one_operand_loc (loc, type, arg1, arg0);
11268
      if (integer_zerop (arg1))
11269
        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11270
      if (operand_equal_p (arg0, arg1, 0))
11271
        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11272
 
11273
      /* ~X | X is -1.  */
11274
      if (TREE_CODE (arg0) == BIT_NOT_EXPR
11275
          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11276
        {
11277
          t1 = fold_convert_loc (loc, type, integer_zero_node);
11278
          t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11279
          return omit_one_operand_loc (loc, type, t1, arg1);
11280
        }
11281
 
11282
      /* X | ~X is -1.  */
11283
      if (TREE_CODE (arg1) == BIT_NOT_EXPR
11284
          && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11285
        {
11286
          t1 = fold_convert_loc (loc, type, integer_zero_node);
11287
          t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11288
          return omit_one_operand_loc (loc, type, t1, arg0);
11289
        }
11290
 
11291
      /* Canonicalize (X & C1) | C2.  */
11292
      if (TREE_CODE (arg0) == BIT_AND_EXPR
11293
          && TREE_CODE (arg1) == INTEGER_CST
11294
          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11295
        {
11296
          unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
11297
          int width = TYPE_PRECISION (type), w;
11298
          hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
11299
          lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11300
          hi2 = TREE_INT_CST_HIGH (arg1);
11301
          lo2 = TREE_INT_CST_LOW (arg1);
11302
 
11303
          /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2).  */
11304
          if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
11305
            return omit_one_operand_loc (loc, type, arg1,
11306
                                     TREE_OPERAND (arg0, 0));
11307
 
11308
          if (width > HOST_BITS_PER_WIDE_INT)
11309
            {
11310
              mhi = (unsigned HOST_WIDE_INT) -1
11311
                    >> (2 * HOST_BITS_PER_WIDE_INT - width);
11312
              mlo = -1;
11313
            }
11314
          else
11315
            {
11316
              mhi = 0;
11317
              mlo = (unsigned HOST_WIDE_INT) -1
11318
                    >> (HOST_BITS_PER_WIDE_INT - width);
11319
            }
11320
 
11321
          /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2.  */
11322
          if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
11323
            return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11324
                                TREE_OPERAND (arg0, 0), arg1);
11325
 
11326
          /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11327
             unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11328
             mode which allows further optimizations.  */
11329
          hi1 &= mhi;
11330
          lo1 &= mlo;
11331
          hi2 &= mhi;
11332
          lo2 &= mlo;
11333
          hi3 = hi1 & ~hi2;
11334
          lo3 = lo1 & ~lo2;
11335
          for (w = BITS_PER_UNIT;
11336
               w <= width && w <= HOST_BITS_PER_WIDE_INT;
11337
               w <<= 1)
11338
            {
11339
              unsigned HOST_WIDE_INT mask
11340
                = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11341
              if (((lo1 | lo2) & mask) == mask
11342
                  && (lo1 & ~mask) == 0 && hi1 == 0)
11343
                {
11344
                  hi3 = 0;
11345
                  lo3 = mask;
11346
                  break;
11347
                }
11348
            }
11349
          if (hi3 != hi1 || lo3 != lo1)
11350
            return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11351
                                fold_build2_loc (loc, BIT_AND_EXPR, type,
11352
                                             TREE_OPERAND (arg0, 0),
11353
                                             build_int_cst_wide (type,
11354
                                                                 lo3, hi3)),
11355
                                arg1);
11356
        }
11357
 
11358
      /* (X & Y) | Y is (X, Y).  */
11359
      if (TREE_CODE (arg0) == BIT_AND_EXPR
11360
          && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11361
        return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11362
      /* (X & Y) | X is (Y, X).  */
11363
      if (TREE_CODE (arg0) == BIT_AND_EXPR
11364
          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11365
          && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11366
        return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11367
      /* X | (X & Y) is (Y, X).  */
11368
      if (TREE_CODE (arg1) == BIT_AND_EXPR
11369
          && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11370
          && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11371
        return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11372
      /* X | (Y & X) is (Y, X).  */
11373
      if (TREE_CODE (arg1) == BIT_AND_EXPR
11374
          && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11375
          && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11376
        return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11377
 
11378
      t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11379
      if (t1 != NULL_TREE)
11380
        return t1;
11381
 
11382
      /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11383
 
11384
         This results in more efficient code for machines without a NAND
11385
         instruction.  Combine will canonicalize to the first form
11386
         which will allow use of NAND instructions provided by the
11387
         backend if they exist.  */
11388
      if (TREE_CODE (arg0) == BIT_NOT_EXPR
11389
          && TREE_CODE (arg1) == BIT_NOT_EXPR)
11390
        {
11391
          return
11392
            fold_build1_loc (loc, BIT_NOT_EXPR, type,
11393
                         build2 (BIT_AND_EXPR, type,
11394
                                 fold_convert_loc (loc, type,
11395
                                                   TREE_OPERAND (arg0, 0)),
11396
                                 fold_convert_loc (loc, type,
11397
                                                   TREE_OPERAND (arg1, 0))));
11398
        }
11399
 
11400
      /* See if this can be simplified into a rotate first.  If that
11401
         is unsuccessful continue in the association code.  */
11402
      goto bit_rotate;
11403
 
11404
    case BIT_XOR_EXPR:
11405
      if (integer_zerop (arg1))
11406
        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11407
      if (integer_all_onesp (arg1))
11408
        return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11409
      if (operand_equal_p (arg0, arg1, 0))
11410
        return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11411
 
11412
      /* ~X ^ X is -1.  */
11413
      if (TREE_CODE (arg0) == BIT_NOT_EXPR
11414
          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11415
        {
11416
          t1 = fold_convert_loc (loc, type, integer_zero_node);
11417
          t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11418
          return omit_one_operand_loc (loc, type, t1, arg1);
11419
        }
11420
 
11421
      /* X ^ ~X is -1.  */
11422
      if (TREE_CODE (arg1) == BIT_NOT_EXPR
11423
          && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11424
        {
11425
          t1 = fold_convert_loc (loc, type, integer_zero_node);
11426
          t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11427
          return omit_one_operand_loc (loc, type, t1, arg0);
11428
        }
11429
 
11430
      /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11431
         with a constant, and the two constants have no bits in common,
11432
         we should treat this as a BIT_IOR_EXPR since this may produce more
11433
         simplifications.  */
11434
      if (TREE_CODE (arg0) == BIT_AND_EXPR
11435
          && TREE_CODE (arg1) == BIT_AND_EXPR
11436
          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11437
          && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11438
          && integer_zerop (const_binop (BIT_AND_EXPR,
11439
                                         TREE_OPERAND (arg0, 1),
11440
                                         TREE_OPERAND (arg1, 1), 0)))
11441
        {
11442
          code = BIT_IOR_EXPR;
11443
          goto bit_ior;
11444
        }
11445
 
11446
      /* (X | Y) ^ X -> Y & ~ X*/
11447
      if (TREE_CODE (arg0) == BIT_IOR_EXPR
11448
          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11449
        {
11450
          tree t2 = TREE_OPERAND (arg0, 1);
11451
          t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11452
                            arg1);
11453
          t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11454
                            fold_convert_loc (loc, type, t2),
11455
                            fold_convert_loc (loc, type, t1));
11456
          return t1;
11457
        }
11458
 
11459
      /* (Y | X) ^ X -> Y & ~ X*/
11460
      if (TREE_CODE (arg0) == BIT_IOR_EXPR
11461
          && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11462
        {
11463
          tree t2 = TREE_OPERAND (arg0, 0);
11464
          t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11465
                            arg1);
11466
          t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11467
                            fold_convert_loc (loc, type, t2),
11468
                            fold_convert_loc (loc, type, t1));
11469
          return t1;
11470
        }
11471
 
11472
      /* X ^ (X | Y) -> Y & ~ X*/
11473
      if (TREE_CODE (arg1) == BIT_IOR_EXPR
11474
          && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11475
        {
11476
          tree t2 = TREE_OPERAND (arg1, 1);
11477
          t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11478
                            arg0);
11479
          t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11480
                            fold_convert_loc (loc, type, t2),
11481
                            fold_convert_loc (loc, type, t1));
11482
          return t1;
11483
        }
11484
 
11485
      /* X ^ (Y | X) -> Y & ~ X*/
11486
      if (TREE_CODE (arg1) == BIT_IOR_EXPR
11487
          && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11488
        {
11489
          tree t2 = TREE_OPERAND (arg1, 0);
11490
          t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11491
                            arg0);
11492
          t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11493
                            fold_convert_loc (loc, type, t2),
11494
                            fold_convert_loc (loc, type, t1));
11495
          return t1;
11496
        }
11497
 
11498
      /* Convert ~X ^ ~Y to X ^ Y.  */
11499
      if (TREE_CODE (arg0) == BIT_NOT_EXPR
11500
          && TREE_CODE (arg1) == BIT_NOT_EXPR)
11501
        return fold_build2_loc (loc, code, type,
11502
                            fold_convert_loc (loc, type,
11503
                                              TREE_OPERAND (arg0, 0)),
11504
                            fold_convert_loc (loc, type,
11505
                                              TREE_OPERAND (arg1, 0)));
11506
 
11507
      /* Convert ~X ^ C to X ^ ~C.  */
11508
      if (TREE_CODE (arg0) == BIT_NOT_EXPR
11509
          && TREE_CODE (arg1) == INTEGER_CST)
11510
        return fold_build2_loc (loc, code, type,
11511
                            fold_convert_loc (loc, type,
11512
                                              TREE_OPERAND (arg0, 0)),
11513
                            fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11514
 
11515
      /* Fold (X & 1) ^ 1 as (X & 1) == 0.  */
11516
      if (TREE_CODE (arg0) == BIT_AND_EXPR
11517
          && integer_onep (TREE_OPERAND (arg0, 1))
11518
          && integer_onep (arg1))
11519
        return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11520
                            build_int_cst (TREE_TYPE (arg0), 0));
11521
 
11522
      /* Fold (X & Y) ^ Y as ~X & Y.  */
11523
      if (TREE_CODE (arg0) == BIT_AND_EXPR
11524
          && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11525
        {
11526
          tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11527
          return fold_build2_loc (loc, BIT_AND_EXPR, type,
11528
                              fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11529
                              fold_convert_loc (loc, type, arg1));
11530
        }
11531
      /* Fold (X & Y) ^ X as ~Y & X.  */
11532
      if (TREE_CODE (arg0) == BIT_AND_EXPR
11533
          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11534
          && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11535
        {
11536
          tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11537
          return fold_build2_loc (loc, BIT_AND_EXPR, type,
11538
                              fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11539
                              fold_convert_loc (loc, type, arg1));
11540
        }
11541
      /* Fold X ^ (X & Y) as X & ~Y.  */
11542
      if (TREE_CODE (arg1) == BIT_AND_EXPR
11543
          && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11544
        {
11545
          tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11546
          return fold_build2_loc (loc, BIT_AND_EXPR, type,
11547
                              fold_convert_loc (loc, type, arg0),
11548
                              fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11549
        }
11550
      /* Fold X ^ (Y & X) as ~Y & X.  */
11551
      if (TREE_CODE (arg1) == BIT_AND_EXPR
11552
          && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11553
          && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11554
        {
11555
          tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11556
          return fold_build2_loc (loc, BIT_AND_EXPR, type,
11557
                              fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11558
                              fold_convert_loc (loc, type, arg0));
11559
        }
11560
 
11561
      /* See if this can be simplified into a rotate first.  If that
11562
         is unsuccessful continue in the association code.  */
11563
      goto bit_rotate;
11564
 
11565
    case BIT_AND_EXPR:
11566
      if (integer_all_onesp (arg1))
11567
        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11568
      if (integer_zerop (arg1))
11569
        return omit_one_operand_loc (loc, type, arg1, arg0);
11570
      if (operand_equal_p (arg0, arg1, 0))
11571
        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11572
 
11573
      /* ~X & X is always zero.  */
11574
      if (TREE_CODE (arg0) == BIT_NOT_EXPR
11575
          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11576
        return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11577
 
11578
      /* X & ~X is always zero.  */
11579
      if (TREE_CODE (arg1) == BIT_NOT_EXPR
11580
          && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11581
        return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11582
 
11583
      /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2).  */
11584
      if (TREE_CODE (arg0) == BIT_IOR_EXPR
11585
          && TREE_CODE (arg1) == INTEGER_CST
11586
          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11587
        {
11588
          tree tmp1 = fold_convert_loc (loc, type, arg1);
11589
          tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11590
          tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11591
          tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11592
          tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11593
          return
11594
            fold_convert_loc (loc, type,
11595
                              fold_build2_loc (loc, BIT_IOR_EXPR,
11596
                                           type, tmp2, tmp3));
11597
        }
11598
 
11599
      /* (X | Y) & Y is (X, Y).  */
11600
      if (TREE_CODE (arg0) == BIT_IOR_EXPR
11601
          && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11602
        return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11603
      /* (X | Y) & X is (Y, X).  */
11604
      if (TREE_CODE (arg0) == BIT_IOR_EXPR
11605
          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11606
          && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11607
        return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11608
      /* X & (X | Y) is (Y, X).  */
11609
      if (TREE_CODE (arg1) == BIT_IOR_EXPR
11610
          && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11611
          && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11612
        return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11613
      /* X & (Y | X) is (Y, X).  */
11614
      if (TREE_CODE (arg1) == BIT_IOR_EXPR
11615
          && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11616
          && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11617
        return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11618
 
11619
      /* Fold (X ^ 1) & 1 as (X & 1) == 0.  */
11620
      if (TREE_CODE (arg0) == BIT_XOR_EXPR
11621
          && integer_onep (TREE_OPERAND (arg0, 1))
11622
          && integer_onep (arg1))
11623
        {
11624
          tem = TREE_OPERAND (arg0, 0);
11625
          return fold_build2_loc (loc, EQ_EXPR, type,
11626
                              fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11627
                                           build_int_cst (TREE_TYPE (tem), 1)),
11628
                              build_int_cst (TREE_TYPE (tem), 0));
11629
        }
11630
      /* Fold ~X & 1 as (X & 1) == 0.  */
11631
      if (TREE_CODE (arg0) == BIT_NOT_EXPR
11632
          && integer_onep (arg1))
11633
        {
11634
          tem = TREE_OPERAND (arg0, 0);
11635
          return fold_build2_loc (loc, EQ_EXPR, type,
11636
                              fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11637
                                           build_int_cst (TREE_TYPE (tem), 1)),
11638
                              build_int_cst (TREE_TYPE (tem), 0));
11639
        }
11640
 
11641
      /* Fold (X ^ Y) & Y as ~X & Y.  */
11642
      if (TREE_CODE (arg0) == BIT_XOR_EXPR
11643
          && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11644
        {
11645
          tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11646
          return fold_build2_loc (loc, BIT_AND_EXPR, type,
11647
                              fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11648
                              fold_convert_loc (loc, type, arg1));
11649
        }
11650
      /* Fold (X ^ Y) & X as ~Y & X.  */
11651
      if (TREE_CODE (arg0) == BIT_XOR_EXPR
11652
          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11653
          && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11654
        {
11655
          tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11656
          return fold_build2_loc (loc, BIT_AND_EXPR, type,
11657
                              fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11658
                              fold_convert_loc (loc, type, arg1));
11659
        }
11660
      /* Fold X & (X ^ Y) as X & ~Y.  */
11661
      if (TREE_CODE (arg1) == BIT_XOR_EXPR
11662
          && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11663
        {
11664
          tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11665
          return fold_build2_loc (loc, BIT_AND_EXPR, type,
11666
                              fold_convert_loc (loc, type, arg0),
11667
                              fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11668
        }
11669
      /* Fold X & (Y ^ X) as ~Y & X.  */
11670
      if (TREE_CODE (arg1) == BIT_XOR_EXPR
11671
          && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11672
          && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11673
        {
11674
          tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11675
          return fold_build2_loc (loc, BIT_AND_EXPR, type,
11676
                              fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11677
                              fold_convert_loc (loc, type, arg0));
11678
        }
11679
 
11680
      t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11681
      if (t1 != NULL_TREE)
11682
        return t1;
11683
      /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char.  */
11684
      if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11685
          && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11686
        {
11687
          unsigned int prec
11688
            = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11689
 
11690
          if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11691
              && (~TREE_INT_CST_LOW (arg1)
11692
                  & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11693
            return
11694
              fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11695
        }
11696
 
11697
      /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11698
 
11699
         This results in more efficient code for machines without a NOR
11700
         instruction.  Combine will canonicalize to the first form
11701
         which will allow use of NOR instructions provided by the
11702
         backend if they exist.  */
11703
      if (TREE_CODE (arg0) == BIT_NOT_EXPR
11704
          && TREE_CODE (arg1) == BIT_NOT_EXPR)
11705
        {
11706
          return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11707
                              build2 (BIT_IOR_EXPR, type,
11708
                                      fold_convert_loc (loc, type,
11709
                                                        TREE_OPERAND (arg0, 0)),
11710
                                      fold_convert_loc (loc, type,
11711
                                                        TREE_OPERAND (arg1, 0))));
11712
        }
11713
 
11714
      /* If arg0 is derived from the address of an object or function, we may
11715
         be able to fold this expression using the object or function's
11716
         alignment.  */
11717
      if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11718
        {
11719
          unsigned HOST_WIDE_INT modulus, residue;
11720
          unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11721
 
11722
          modulus = get_pointer_modulus_and_residue (arg0, &residue,
11723
                                                     integer_onep (arg1));
11724
 
11725
          /* This works because modulus is a power of 2.  If this weren't the
11726
             case, we'd have to replace it by its greatest power-of-2
11727
             divisor: modulus & -modulus.  */
11728
          if (low < modulus)
11729
            return build_int_cst (type, residue & low);
11730
        }
11731
 
11732
      /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11733
              (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11734
         if the new mask might be further optimized.  */
11735
      if ((TREE_CODE (arg0) == LSHIFT_EXPR
11736
           || TREE_CODE (arg0) == RSHIFT_EXPR)
11737
          && host_integerp (TREE_OPERAND (arg0, 1), 1)
11738
          && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11739
          && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11740
             < TYPE_PRECISION (TREE_TYPE (arg0))
11741
          && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11742
          && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11743
        {
11744
          unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11745
          unsigned HOST_WIDE_INT mask
11746
            = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11747
          unsigned HOST_WIDE_INT newmask, zerobits = 0;
11748
          tree shift_type = TREE_TYPE (arg0);
11749
 
11750
          if (TREE_CODE (arg0) == LSHIFT_EXPR)
11751
            zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11752
          else if (TREE_CODE (arg0) == RSHIFT_EXPR
11753
                   && TYPE_PRECISION (TREE_TYPE (arg0))
11754
                      == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11755
            {
11756
              unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11757
              tree arg00 = TREE_OPERAND (arg0, 0);
11758
              /* See if more bits can be proven as zero because of
11759
                 zero extension.  */
11760
              if (TREE_CODE (arg00) == NOP_EXPR
11761
                  && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11762
                {
11763
                  tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11764
                  if (TYPE_PRECISION (inner_type)
11765
                      == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11766
                      && TYPE_PRECISION (inner_type) < prec)
11767
                    {
11768
                      prec = TYPE_PRECISION (inner_type);
11769
                      /* See if we can shorten the right shift.  */
11770
                      if (shiftc < prec)
11771
                        shift_type = inner_type;
11772
                    }
11773
                }
11774
              zerobits = ~(unsigned HOST_WIDE_INT) 0;
11775
              zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11776
              zerobits <<= prec - shiftc;
11777
              /* For arithmetic shift if sign bit could be set, zerobits
11778
                 can contain actually sign bits, so no transformation is
11779
                 possible, unless MASK masks them all away.  In that
11780
                 case the shift needs to be converted into logical shift.  */
11781
              if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11782
                  && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11783
                {
11784
                  if ((mask & zerobits) == 0)
11785
                    shift_type = unsigned_type_for (TREE_TYPE (arg0));
11786
                  else
11787
                    zerobits = 0;
11788
                }
11789
            }
11790
 
11791
          /* ((X << 16) & 0xff00) is (X, 0).  */
11792
          if ((mask & zerobits) == mask)
11793
            return omit_one_operand_loc (loc, type,
11794
                                     build_int_cst (type, 0), arg0);
11795
 
11796
          newmask = mask | zerobits;
11797
          if (newmask != mask && (newmask & (newmask + 1)) == 0)
11798
            {
11799
              unsigned int prec;
11800
 
11801
              /* Only do the transformation if NEWMASK is some integer
11802
                 mode's mask.  */
11803
              for (prec = BITS_PER_UNIT;
11804
                   prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11805
                if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11806
                  break;
11807
              if (prec < HOST_BITS_PER_WIDE_INT
11808
                  || newmask == ~(unsigned HOST_WIDE_INT) 0)
11809
                {
11810
                  tree newmaskt;
11811
 
11812
                  if (shift_type != TREE_TYPE (arg0))
11813
                    {
11814
                      tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11815
                                         fold_convert_loc (loc, shift_type,
11816
                                                           TREE_OPERAND (arg0, 0)),
11817
                                         TREE_OPERAND (arg0, 1));
11818
                      tem = fold_convert_loc (loc, type, tem);
11819
                    }
11820
                  else
11821
                    tem = op0;
11822
                  newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11823
                  if (!tree_int_cst_equal (newmaskt, arg1))
11824
                    return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11825
                }
11826
            }
11827
        }
11828
 
11829
      goto associate;
11830
 
11831
    case RDIV_EXPR:
11832
      /* Don't touch a floating-point divide by zero unless the mode
11833
         of the constant can represent infinity.  */
11834
      if (TREE_CODE (arg1) == REAL_CST
11835
          && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11836
          && real_zerop (arg1))
11837
        return NULL_TREE;
11838
 
11839
      /* Optimize A / A to 1.0 if we don't care about
11840
         NaNs or Infinities.  Skip the transformation
11841
         for non-real operands.  */
11842
      if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11843
          && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11844
          && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11845
          && operand_equal_p (arg0, arg1, 0))
11846
        {
11847
          tree r = build_real (TREE_TYPE (arg0), dconst1);
11848
 
11849
          return omit_two_operands_loc (loc, type, r, arg0, arg1);
11850
        }
11851
 
11852
      /* The complex version of the above A / A optimization.  */
11853
      if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11854
          && operand_equal_p (arg0, arg1, 0))
11855
        {
11856
          tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11857
          if (! HONOR_NANS (TYPE_MODE (elem_type))
11858
              && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11859
            {
11860
              tree r = build_real (elem_type, dconst1);
11861
              /* omit_two_operands will call fold_convert for us.  */
11862
              return omit_two_operands_loc (loc, type, r, arg0, arg1);
11863
            }
11864
        }
11865
 
11866
      /* (-A) / (-B) -> A / B  */
11867
      if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11868
        return fold_build2_loc (loc, RDIV_EXPR, type,
11869
                            TREE_OPERAND (arg0, 0),
11870
                            negate_expr (arg1));
11871
      if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11872
        return fold_build2_loc (loc, RDIV_EXPR, type,
11873
                            negate_expr (arg0),
11874
                            TREE_OPERAND (arg1, 0));
11875
 
11876
      /* In IEEE floating point, x/1 is not equivalent to x for snans.  */
11877
      if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11878
          && real_onep (arg1))
11879
        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11880
 
11881
      /* In IEEE floating point, x/-1 is not equivalent to -x for snans.  */
11882
      if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11883
          && real_minus_onep (arg1))
11884
        return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11885
                                                  negate_expr (arg0)));
11886
 
11887
      /* If ARG1 is a constant, we can convert this to a multiply by the
11888
         reciprocal.  This does not have the same rounding properties,
11889
         so only do this if -freciprocal-math.  We can actually
11890
         always safely do it if ARG1 is a power of two, but it's hard to
11891
         tell if it is or not in a portable manner.  */
11892
      if (TREE_CODE (arg1) == REAL_CST)
11893
        {
11894
          if (flag_reciprocal_math
11895
              && 0 != (tem = const_binop (code, build_real (type, dconst1),
11896
                                          arg1, 0)))
11897
            return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11898
          /* Find the reciprocal if optimizing and the result is exact.  */
11899
          if (optimize)
11900
            {
11901
              REAL_VALUE_TYPE r;
11902
              r = TREE_REAL_CST (arg1);
11903
              if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11904
                {
11905
                  tem = build_real (type, r);
11906
                  return fold_build2_loc (loc, MULT_EXPR, type,
11907
                                      fold_convert_loc (loc, type, arg0), tem);
11908
                }
11909
            }
11910
        }
11911
      /* Convert A/B/C to A/(B*C).  */
11912
      if (flag_reciprocal_math
11913
          && TREE_CODE (arg0) == RDIV_EXPR)
11914
        return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11915
                            fold_build2_loc (loc, MULT_EXPR, type,
11916
                                         TREE_OPERAND (arg0, 1), arg1));
11917
 
11918
      /* Convert A/(B/C) to (A/B)*C.  */
11919
      if (flag_reciprocal_math
11920
          && TREE_CODE (arg1) == RDIV_EXPR)
11921
        return fold_build2_loc (loc, MULT_EXPR, type,
11922
                            fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11923
                                         TREE_OPERAND (arg1, 0)),
11924
                            TREE_OPERAND (arg1, 1));
11925
 
11926
      /* Convert C1/(X*C2) into (C1/C2)/X.  */
11927
      if (flag_reciprocal_math
11928
          && TREE_CODE (arg1) == MULT_EXPR
11929
          && TREE_CODE (arg0) == REAL_CST
11930
          && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11931
        {
11932
          tree tem = const_binop (RDIV_EXPR, arg0,
11933
                                  TREE_OPERAND (arg1, 1), 0);
11934
          if (tem)
11935
            return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11936
                                TREE_OPERAND (arg1, 0));
11937
        }
11938
 
11939
      if (flag_unsafe_math_optimizations)
11940
        {
11941
          enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11942
          enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11943
 
11944
          /* Optimize sin(x)/cos(x) as tan(x).  */
11945
          if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11946
               || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11947
               || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11948
              && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11949
                                  CALL_EXPR_ARG (arg1, 0), 0))
11950
            {
11951
              tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11952
 
11953
              if (tanfn != NULL_TREE)
11954
                return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11955
            }
11956
 
11957
          /* Optimize cos(x)/sin(x) as 1.0/tan(x).  */
11958
          if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11959
               || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11960
               || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11961
              && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11962
                                  CALL_EXPR_ARG (arg1, 0), 0))
11963
            {
11964
              tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11965
 
11966
              if (tanfn != NULL_TREE)
11967
                {
11968
                  tree tmp = build_call_expr_loc (loc, tanfn, 1,
11969
                                              CALL_EXPR_ARG (arg0, 0));
11970
                  return fold_build2_loc (loc, RDIV_EXPR, type,
11971
                                      build_real (type, dconst1), tmp);
11972
                }
11973
            }
11974
 
11975
          /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11976
             NaNs or Infinities.  */
11977
          if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11978
               || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11979
               || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11980
            {
11981
              tree arg00 = CALL_EXPR_ARG (arg0, 0);
11982
              tree arg01 = CALL_EXPR_ARG (arg1, 0);
11983
 
11984
              if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11985
                  && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11986
                  && operand_equal_p (arg00, arg01, 0))
11987
                {
11988
                  tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11989
 
11990
                  if (cosfn != NULL_TREE)
11991
                    return build_call_expr_loc (loc, cosfn, 1, arg00);
11992
                }
11993
            }
11994
 
11995
          /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11996
             NaNs or Infinities.  */
11997
          if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11998
               || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11999
               || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12000
            {
12001
              tree arg00 = CALL_EXPR_ARG (arg0, 0);
12002
              tree arg01 = CALL_EXPR_ARG (arg1, 0);
12003
 
12004
              if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12005
                  && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12006
                  && operand_equal_p (arg00, arg01, 0))
12007
                {
12008
                  tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12009
 
12010
                  if (cosfn != NULL_TREE)
12011
                    {
12012
                      tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12013
                      return fold_build2_loc (loc, RDIV_EXPR, type,
12014
                                          build_real (type, dconst1),
12015
                                          tmp);
12016
                    }
12017
                }
12018
            }
12019
 
12020
          /* Optimize pow(x,c)/x as pow(x,c-1).  */
12021
          if (fcode0 == BUILT_IN_POW
12022
              || fcode0 == BUILT_IN_POWF
12023
              || fcode0 == BUILT_IN_POWL)
12024
            {
12025
              tree arg00 = CALL_EXPR_ARG (arg0, 0);
12026
              tree arg01 = CALL_EXPR_ARG (arg0, 1);
12027
              if (TREE_CODE (arg01) == REAL_CST
12028
                  && !TREE_OVERFLOW (arg01)
12029
                  && operand_equal_p (arg1, arg00, 0))
12030
                {
12031
                  tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12032
                  REAL_VALUE_TYPE c;
12033
                  tree arg;
12034
 
12035
                  c = TREE_REAL_CST (arg01);
12036
                  real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12037
                  arg = build_real (type, c);
12038
                  return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12039
                }
12040
            }
12041
 
12042
          /* Optimize a/root(b/c) into a*root(c/b).  */
12043
          if (BUILTIN_ROOT_P (fcode1))
12044
            {
12045
              tree rootarg = CALL_EXPR_ARG (arg1, 0);
12046
 
12047
              if (TREE_CODE (rootarg) == RDIV_EXPR)
12048
                {
12049
                  tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12050
                  tree b = TREE_OPERAND (rootarg, 0);
12051
                  tree c = TREE_OPERAND (rootarg, 1);
12052
 
12053
                  tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12054
 
12055
                  tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12056
                  return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12057
                }
12058
            }
12059
 
12060
          /* Optimize x/expN(y) into x*expN(-y).  */
12061
          if (BUILTIN_EXPONENT_P (fcode1))
12062
            {
12063
              tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12064
              tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12065
              arg1 = build_call_expr_loc (loc,
12066
                                      expfn, 1,
12067
                                      fold_convert_loc (loc, type, arg));
12068
              return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12069
            }
12070
 
12071
          /* Optimize x/pow(y,z) into x*pow(y,-z).  */
12072
          if (fcode1 == BUILT_IN_POW
12073
              || fcode1 == BUILT_IN_POWF
12074
              || fcode1 == BUILT_IN_POWL)
12075
            {
12076
              tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12077
              tree arg10 = CALL_EXPR_ARG (arg1, 0);
12078
              tree arg11 = CALL_EXPR_ARG (arg1, 1);
12079
              tree neg11 = fold_convert_loc (loc, type,
12080
                                             negate_expr (arg11));
12081
              arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12082
              return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12083
            }
12084
        }
12085
      return NULL_TREE;
12086
 
12087
    case TRUNC_DIV_EXPR:
12088
    case FLOOR_DIV_EXPR:
12089
      /* Simplify A / (B << N) where A and B are positive and B is
12090
         a power of 2, to A >> (N + log2(B)).  */
12091
      strict_overflow_p = false;
12092
      if (TREE_CODE (arg1) == LSHIFT_EXPR
12093
          && (TYPE_UNSIGNED (type)
12094
              || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12095
        {
12096
          tree sval = TREE_OPERAND (arg1, 0);
12097
          if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12098
            {
12099
              tree sh_cnt = TREE_OPERAND (arg1, 1);
12100
              unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12101
 
12102
              if (strict_overflow_p)
12103
                fold_overflow_warning (("assuming signed overflow does not "
12104
                                        "occur when simplifying A / (B << N)"),
12105
                                       WARN_STRICT_OVERFLOW_MISC);
12106
 
12107
              sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12108
                                    sh_cnt, build_int_cst (NULL_TREE, pow2));
12109
              return fold_build2_loc (loc, RSHIFT_EXPR, type,
12110
                                  fold_convert_loc (loc, type, arg0), sh_cnt);
12111
            }
12112
        }
12113
 
12114
      /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12115
         TRUNC_DIV_EXPR.  Rewrite into the latter in this case.  */
12116
      if (INTEGRAL_TYPE_P (type)
12117
          && TYPE_UNSIGNED (type)
12118
          && code == FLOOR_DIV_EXPR)
12119
        return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12120
 
12121
      /* Fall thru */
12122
 
12123
    case ROUND_DIV_EXPR:
12124
    case CEIL_DIV_EXPR:
12125
    case EXACT_DIV_EXPR:
12126
      if (integer_onep (arg1))
12127
        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12128
      if (integer_zerop (arg1))
12129
        return NULL_TREE;
12130
      /* X / -1 is -X.  */
12131
      if (!TYPE_UNSIGNED (type)
12132
          && TREE_CODE (arg1) == INTEGER_CST
12133
          && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12134
          && TREE_INT_CST_HIGH (arg1) == -1)
12135
        return fold_convert_loc (loc, type, negate_expr (arg0));
12136
 
12137
      /* Convert -A / -B to A / B when the type is signed and overflow is
12138
         undefined.  */
12139
      if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12140
          && TREE_CODE (arg0) == NEGATE_EXPR
12141
          && negate_expr_p (arg1))
12142
        {
12143
          if (INTEGRAL_TYPE_P (type))
12144
            fold_overflow_warning (("assuming signed overflow does not occur "
12145
                                    "when distributing negation across "
12146
                                    "division"),
12147
                                   WARN_STRICT_OVERFLOW_MISC);
12148
          return fold_build2_loc (loc, code, type,
12149
                              fold_convert_loc (loc, type,
12150
                                                TREE_OPERAND (arg0, 0)),
12151
                              fold_convert_loc (loc, type,
12152
                                                negate_expr (arg1)));
12153
        }
12154
      if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12155
          && TREE_CODE (arg1) == NEGATE_EXPR
12156
          && negate_expr_p (arg0))
12157
        {
12158
          if (INTEGRAL_TYPE_P (type))
12159
            fold_overflow_warning (("assuming signed overflow does not occur "
12160
                                    "when distributing negation across "
12161
                                    "division"),
12162
                                   WARN_STRICT_OVERFLOW_MISC);
12163
          return fold_build2_loc (loc, code, type,
12164
                              fold_convert_loc (loc, type,
12165
                                                negate_expr (arg0)),
12166
                              fold_convert_loc (loc, type,
12167
                                                TREE_OPERAND (arg1, 0)));
12168
        }
12169
 
12170
      /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12171
         operation, EXACT_DIV_EXPR.
12172
 
12173
         Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12174
         At one time others generated faster code, it's not clear if they do
12175
         after the last round to changes to the DIV code in expmed.c.  */
12176
      if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12177
          && multiple_of_p (type, arg0, arg1))
12178
        return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12179
 
12180
      strict_overflow_p = false;
12181
      if (TREE_CODE (arg1) == INTEGER_CST
12182
          && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12183
                                         &strict_overflow_p)))
12184
        {
12185
          if (strict_overflow_p)
12186
            fold_overflow_warning (("assuming signed overflow does not occur "
12187
                                    "when simplifying division"),
12188
                                   WARN_STRICT_OVERFLOW_MISC);
12189
          return fold_convert_loc (loc, type, tem);
12190
        }
12191
 
12192
      return NULL_TREE;
12193
 
12194
    case CEIL_MOD_EXPR:
12195
    case FLOOR_MOD_EXPR:
12196
    case ROUND_MOD_EXPR:
12197
    case TRUNC_MOD_EXPR:
12198
      /* X % 1 is always zero, but be sure to preserve any side
12199
         effects in X.  */
12200
      if (integer_onep (arg1))
12201
        return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12202
 
12203
      /* X % 0, return X % 0 unchanged so that we can get the
12204
         proper warnings and errors.  */
12205
      if (integer_zerop (arg1))
12206
        return NULL_TREE;
12207
 
12208
      /* 0 % X is always zero, but be sure to preserve any side
12209
         effects in X.  Place this after checking for X == 0.  */
12210
      if (integer_zerop (arg0))
12211
        return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12212
 
12213
      /* X % -1 is zero.  */
12214
      if (!TYPE_UNSIGNED (type)
12215
          && TREE_CODE (arg1) == INTEGER_CST
12216
          && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12217
          && TREE_INT_CST_HIGH (arg1) == -1)
12218
        return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12219
 
12220
      /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12221
         i.e. "X % C" into "X & (C - 1)", if X and C are positive.  */
12222
      strict_overflow_p = false;
12223
      if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12224
          && (TYPE_UNSIGNED (type)
12225
              || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12226
        {
12227
          tree c = arg1;
12228
          /* Also optimize A % (C << N)  where C is a power of 2,
12229
             to A & ((C << N) - 1).  */
12230
          if (TREE_CODE (arg1) == LSHIFT_EXPR)
12231
            c = TREE_OPERAND (arg1, 0);
12232
 
12233
          if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12234
            {
12235
              tree mask = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12236
                                       build_int_cst (TREE_TYPE (arg1), 1));
12237
              if (strict_overflow_p)
12238
                fold_overflow_warning (("assuming signed overflow does not "
12239
                                        "occur when simplifying "
12240
                                        "X % (power of two)"),
12241
                                       WARN_STRICT_OVERFLOW_MISC);
12242
              return fold_build2_loc (loc, BIT_AND_EXPR, type,
12243
                                  fold_convert_loc (loc, type, arg0),
12244
                                  fold_convert_loc (loc, type, mask));
12245
            }
12246
        }
12247
 
12248
      /* X % -C is the same as X % C.  */
12249
      if (code == TRUNC_MOD_EXPR
12250
          && !TYPE_UNSIGNED (type)
12251
          && TREE_CODE (arg1) == INTEGER_CST
12252
          && !TREE_OVERFLOW (arg1)
12253
          && TREE_INT_CST_HIGH (arg1) < 0
12254
          && !TYPE_OVERFLOW_TRAPS (type)
12255
          /* Avoid this transformation if C is INT_MIN, i.e. C == -C.  */
12256
          && !sign_bit_p (arg1, arg1))
12257
        return fold_build2_loc (loc, code, type,
12258
                            fold_convert_loc (loc, type, arg0),
12259
                            fold_convert_loc (loc, type,
12260
                                              negate_expr (arg1)));
12261
 
12262
      /* X % -Y is the same as X % Y.  */
12263
      if (code == TRUNC_MOD_EXPR
12264
          && !TYPE_UNSIGNED (type)
12265
          && TREE_CODE (arg1) == NEGATE_EXPR
12266
          && !TYPE_OVERFLOW_TRAPS (type))
12267
        return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12268
                            fold_convert_loc (loc, type,
12269
                                              TREE_OPERAND (arg1, 0)));
12270
 
12271
      if (TREE_CODE (arg1) == INTEGER_CST
12272
          && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12273
                                         &strict_overflow_p)))
12274
        {
12275
          if (strict_overflow_p)
12276
            fold_overflow_warning (("assuming signed overflow does not occur "
12277
                                    "when simplifying modulus"),
12278
                                   WARN_STRICT_OVERFLOW_MISC);
12279
          return fold_convert_loc (loc, type, tem);
12280
        }
12281
 
12282
      return NULL_TREE;
12283
 
12284
    case LROTATE_EXPR:
12285
    case RROTATE_EXPR:
12286
      if (integer_all_onesp (arg0))
12287
        return omit_one_operand_loc (loc, type, arg0, arg1);
12288
      goto shift;
12289
 
12290
    case RSHIFT_EXPR:
12291
      /* Optimize -1 >> x for arithmetic right shifts.  */
12292
      if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12293
          && tree_expr_nonnegative_p (arg1))
12294
        return omit_one_operand_loc (loc, type, arg0, arg1);
12295
      /* ... fall through ...  */
12296
 
12297
    case LSHIFT_EXPR:
12298
    shift:
12299
      if (integer_zerop (arg1))
12300
        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12301
      if (integer_zerop (arg0))
12302
        return omit_one_operand_loc (loc, type, arg0, arg1);
12303
 
12304
      /* Since negative shift count is not well-defined,
12305
         don't try to compute it in the compiler.  */
12306
      if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12307
        return NULL_TREE;
12308
 
12309
      /* Turn (a OP c1) OP c2 into a OP (c1+c2).  */
12310
      if (TREE_CODE (op0) == code && host_integerp (arg1, false)
12311
          && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12312
          && host_integerp (TREE_OPERAND (arg0, 1), false)
12313
          && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12314
        {
12315
          HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12316
                               + TREE_INT_CST_LOW (arg1));
12317
 
12318
          /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12319
             being well defined.  */
12320
          if (low >= TYPE_PRECISION (type))
12321
            {
12322
              if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12323
                low = low % TYPE_PRECISION (type);
12324
              else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12325
                return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
12326
                                         TREE_OPERAND (arg0, 0));
12327
              else
12328
                low = TYPE_PRECISION (type) - 1;
12329
            }
12330
 
12331
          return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12332
                              build_int_cst (type, low));
12333
        }
12334
 
12335
      /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12336
         into x & ((unsigned)-1 >> c) for unsigned types.  */
12337
      if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12338
           || (TYPE_UNSIGNED (type)
12339
               && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12340
          && host_integerp (arg1, false)
12341
          && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12342
          && host_integerp (TREE_OPERAND (arg0, 1), false)
12343
          && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12344
        {
12345
          HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12346
          HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12347
          tree lshift;
12348
          tree arg00;
12349
 
12350
          if (low0 == low1)
12351
            {
12352
              arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12353
 
12354
              lshift = build_int_cst (type, -1);
12355
              lshift = int_const_binop (code, lshift, arg1, 0);
12356
 
12357
              return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12358
            }
12359
        }
12360
 
12361
      /* Rewrite an LROTATE_EXPR by a constant into an
12362
         RROTATE_EXPR by a new constant.  */
12363
      if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12364
        {
12365
          tree tem = build_int_cst (TREE_TYPE (arg1),
12366
                                    TYPE_PRECISION (type));
12367
          tem = const_binop (MINUS_EXPR, tem, arg1, 0);
12368
          return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12369
        }
12370
 
12371
      /* If we have a rotate of a bit operation with the rotate count and
12372
         the second operand of the bit operation both constant,
12373
         permute the two operations.  */
12374
      if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12375
          && (TREE_CODE (arg0) == BIT_AND_EXPR
12376
              || TREE_CODE (arg0) == BIT_IOR_EXPR
12377
              || TREE_CODE (arg0) == BIT_XOR_EXPR)
12378
          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12379
        return fold_build2_loc (loc, TREE_CODE (arg0), type,
12380
                            fold_build2_loc (loc, code, type,
12381
                                         TREE_OPERAND (arg0, 0), arg1),
12382
                            fold_build2_loc (loc, code, type,
12383
                                         TREE_OPERAND (arg0, 1), arg1));
12384
 
12385
      /* Two consecutive rotates adding up to the precision of the
12386
         type can be ignored.  */
12387
      if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12388
          && TREE_CODE (arg0) == RROTATE_EXPR
12389
          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12390
          && TREE_INT_CST_HIGH (arg1) == 0
12391
          && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12392
          && ((TREE_INT_CST_LOW (arg1)
12393
               + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12394
              == (unsigned int) TYPE_PRECISION (type)))
12395
        return TREE_OPERAND (arg0, 0);
12396
 
12397
      /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12398
              (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12399
         if the latter can be further optimized.  */
12400
      if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12401
          && TREE_CODE (arg0) == BIT_AND_EXPR
12402
          && TREE_CODE (arg1) == INTEGER_CST
12403
          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12404
        {
12405
          tree mask = fold_build2_loc (loc, code, type,
12406
                                   fold_convert_loc (loc, type,
12407
                                                     TREE_OPERAND (arg0, 1)),
12408
                                   arg1);
12409
          tree shift = fold_build2_loc (loc, code, type,
12410
                                    fold_convert_loc (loc, type,
12411
                                                      TREE_OPERAND (arg0, 0)),
12412
                                    arg1);
12413
          tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12414
          if (tem)
12415
            return tem;
12416
        }
12417
 
12418
      return NULL_TREE;
12419
 
12420
    case MIN_EXPR:
12421
      if (operand_equal_p (arg0, arg1, 0))
12422
        return omit_one_operand_loc (loc, type, arg0, arg1);
12423
      if (INTEGRAL_TYPE_P (type)
12424
          && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12425
        return omit_one_operand_loc (loc, type, arg1, arg0);
12426
      tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12427
      if (tem)
12428
        return tem;
12429
      goto associate;
12430
 
12431
    case MAX_EXPR:
12432
      if (operand_equal_p (arg0, arg1, 0))
12433
        return omit_one_operand_loc (loc, type, arg0, arg1);
12434
      if (INTEGRAL_TYPE_P (type)
12435
          && TYPE_MAX_VALUE (type)
12436
          && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12437
        return omit_one_operand_loc (loc, type, arg1, arg0);
12438
      tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12439
      if (tem)
12440
        return tem;
12441
      goto associate;
12442
 
12443
    case TRUTH_ANDIF_EXPR:
12444
      /* Note that the operands of this must be ints
12445
         and their values must be 0 or 1.
12446
         ("true" is a fixed value perhaps depending on the language.)  */
12447
      /* If first arg is constant zero, return it.  */
12448
      if (integer_zerop (arg0))
12449
        return fold_convert_loc (loc, type, arg0);
12450
    case TRUTH_AND_EXPR:
12451
      /* If either arg is constant true, drop it.  */
12452
      if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12453
        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12454
      if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12455
          /* Preserve sequence points.  */
12456
          && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12457
        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12458
      /* If second arg is constant zero, result is zero, but first arg
12459
         must be evaluated.  */
12460
      if (integer_zerop (arg1))
12461
        return omit_one_operand_loc (loc, type, arg1, arg0);
12462
      /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12463
         case will be handled here.  */
12464
      if (integer_zerop (arg0))
12465
        return omit_one_operand_loc (loc, type, arg0, arg1);
12466
 
12467
      /* !X && X is always false.  */
12468
      if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12469
          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12470
        return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12471
      /* X && !X is always false.  */
12472
      if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12473
          && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12474
        return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12475
 
12476
      /* A < X && A + 1 > Y ==> A < X && A >= Y.  Normally A + 1 > Y
12477
         means A >= Y && A != MAX, but in this case we know that
12478
         A < X <= MAX.  */
12479
 
12480
      if (!TREE_SIDE_EFFECTS (arg0)
12481
          && !TREE_SIDE_EFFECTS (arg1))
12482
        {
12483
          tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12484
          if (tem && !operand_equal_p (tem, arg0, 0))
12485
            return fold_build2_loc (loc, code, type, tem, arg1);
12486
 
12487
          tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12488
          if (tem && !operand_equal_p (tem, arg1, 0))
12489
            return fold_build2_loc (loc, code, type, arg0, tem);
12490
        }
12491
 
12492
    truth_andor:
12493
      /* We only do these simplifications if we are optimizing.  */
12494
      if (!optimize)
12495
        return NULL_TREE;
12496
 
12497
      /* Check for things like (A || B) && (A || C).  We can convert this
12498
         to A || (B && C).  Note that either operator can be any of the four
12499
         truth and/or operations and the transformation will still be
12500
         valid.   Also note that we only care about order for the
12501
         ANDIF and ORIF operators.  If B contains side effects, this
12502
         might change the truth-value of A.  */
12503
      if (TREE_CODE (arg0) == TREE_CODE (arg1)
12504
          && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
12505
              || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
12506
              || TREE_CODE (arg0) == TRUTH_AND_EXPR
12507
              || TREE_CODE (arg0) == TRUTH_OR_EXPR)
12508
          && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
12509
        {
12510
          tree a00 = TREE_OPERAND (arg0, 0);
12511
          tree a01 = TREE_OPERAND (arg0, 1);
12512
          tree a10 = TREE_OPERAND (arg1, 0);
12513
          tree a11 = TREE_OPERAND (arg1, 1);
12514
          int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
12515
                              || TREE_CODE (arg0) == TRUTH_AND_EXPR)
12516
                             && (code == TRUTH_AND_EXPR
12517
                                 || code == TRUTH_OR_EXPR));
12518
 
12519
          if (operand_equal_p (a00, a10, 0))
12520
            return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
12521
                                fold_build2_loc (loc, code, type, a01, a11));
12522
          else if (commutative && operand_equal_p (a00, a11, 0))
12523
            return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
12524
                                fold_build2_loc (loc, code, type, a01, a10));
12525
          else if (commutative && operand_equal_p (a01, a10, 0))
12526
            return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
12527
                                fold_build2_loc (loc, code, type, a00, a11));
12528
 
12529
          /* This case if tricky because we must either have commutative
12530
             operators or else A10 must not have side-effects.  */
12531
 
12532
          else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
12533
                   && operand_equal_p (a01, a11, 0))
12534
            return fold_build2_loc (loc, TREE_CODE (arg0), type,
12535
                                fold_build2_loc (loc, code, type, a00, a10),
12536
                                a01);
12537
        }
12538
 
12539
      /* See if we can build a range comparison.  */
12540
      if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
12541
        return tem;
12542
 
12543
      /* Check for the possibility of merging component references.  If our
12544
         lhs is another similar operation, try to merge its rhs with our
12545
         rhs.  Then try to merge our lhs and rhs.  */
12546
      if (TREE_CODE (arg0) == code
12547
          && 0 != (tem = fold_truthop (loc, code, type,
12548
                                       TREE_OPERAND (arg0, 1), arg1)))
12549
        return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12550
 
12551
      if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
12552
        return tem;
12553
 
12554
      return NULL_TREE;
12555
 
12556
    case TRUTH_ORIF_EXPR:
12557
      /* Note that the operands of this must be ints
12558
         and their values must be 0 or true.
12559
         ("true" is a fixed value perhaps depending on the language.)  */
12560
      /* If first arg is constant true, return it.  */
12561
      if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12562
        return fold_convert_loc (loc, type, arg0);
12563
    case TRUTH_OR_EXPR:
12564
      /* If either arg is constant zero, drop it.  */
12565
      if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12566
        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12567
      if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12568
          /* Preserve sequence points.  */
12569
          && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12570
        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12571
      /* If second arg is constant true, result is true, but we must
12572
         evaluate first arg.  */
12573
      if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12574
        return omit_one_operand_loc (loc, type, arg1, arg0);
12575
      /* Likewise for first arg, but note this only occurs here for
12576
         TRUTH_OR_EXPR.  */
12577
      if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12578
        return omit_one_operand_loc (loc, type, arg0, arg1);
12579
 
12580
      /* !X || X is always true.  */
12581
      if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12582
          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12583
        return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12584
      /* X || !X is always true.  */
12585
      if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12586
          && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12587
        return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12588
 
12589
      goto truth_andor;
12590
 
12591
    case TRUTH_XOR_EXPR:
12592
      /* If the second arg is constant zero, drop it.  */
12593
      if (integer_zerop (arg1))
12594
        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12595
      /* If the second arg is constant true, this is a logical inversion.  */
12596
      if (integer_onep (arg1))
12597
        {
12598
          /* Only call invert_truthvalue if operand is a truth value.  */
12599
          if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12600
            tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12601
          else
12602
            tem = invert_truthvalue_loc (loc, arg0);
12603
          return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12604
        }
12605
      /* Identical arguments cancel to zero.  */
12606
      if (operand_equal_p (arg0, arg1, 0))
12607
        return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12608
 
12609
      /* !X ^ X is always true.  */
12610
      if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12611
          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12612
        return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12613
 
12614
      /* X ^ !X is always true.  */
12615
      if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12616
          && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12617
        return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12618
 
12619
      return NULL_TREE;
12620
 
12621
    case EQ_EXPR:
12622
    case NE_EXPR:
12623
      tem = fold_comparison (loc, code, type, op0, op1);
12624
      if (tem != NULL_TREE)
12625
        return tem;
12626
 
12627
      /* bool_var != 0 becomes bool_var. */
12628
      if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12629
          && code == NE_EXPR)
12630
        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12631
 
12632
      /* bool_var == 1 becomes bool_var. */
12633
      if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12634
          && code == EQ_EXPR)
12635
        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12636
 
12637
      /* bool_var != 1 becomes !bool_var. */
12638
      if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12639
          && code == NE_EXPR)
12640
        return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12641
                            fold_convert_loc (loc, type, arg0));
12642
 
12643
      /* bool_var == 0 becomes !bool_var. */
12644
      if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12645
          && code == EQ_EXPR)
12646
        return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12647
                            fold_convert_loc (loc, type, arg0));
12648
 
12649
      /* !exp != 0 becomes !exp */
12650
      if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12651
          && code == NE_EXPR)
12652
        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12653
 
12654
      /* If this is an equality comparison of the address of two non-weak,
12655
         unaliased symbols neither of which are extern (since we do not
12656
         have access to attributes for externs), then we know the result.  */
12657
      if (TREE_CODE (arg0) == ADDR_EXPR
12658
          && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12659
          && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12660
          && ! lookup_attribute ("alias",
12661
                                 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12662
          && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12663
          && TREE_CODE (arg1) == ADDR_EXPR
12664
          && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12665
          && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12666
          && ! lookup_attribute ("alias",
12667
                                 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12668
          && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12669
        {
12670
          /* We know that we're looking at the address of two
12671
             non-weak, unaliased, static _DECL nodes.
12672
 
12673
             It is both wasteful and incorrect to call operand_equal_p
12674
             to compare the two ADDR_EXPR nodes.  It is wasteful in that
12675
             all we need to do is test pointer equality for the arguments
12676
             to the two ADDR_EXPR nodes.  It is incorrect to use
12677
             operand_equal_p as that function is NOT equivalent to a
12678
             C equality test.  It can in fact return false for two
12679
             objects which would test as equal using the C equality
12680
             operator.  */
12681
          bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12682
          return constant_boolean_node (equal
12683
                                        ? code == EQ_EXPR : code != EQ_EXPR,
12684
                                        type);
12685
        }
12686
 
12687
      /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12688
         a MINUS_EXPR of a constant, we can convert it into a comparison with
12689
         a revised constant as long as no overflow occurs.  */
12690
      if (TREE_CODE (arg1) == INTEGER_CST
12691
          && (TREE_CODE (arg0) == PLUS_EXPR
12692
              || TREE_CODE (arg0) == MINUS_EXPR)
12693
          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12694
          && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12695
                                      ? MINUS_EXPR : PLUS_EXPR,
12696
                                      fold_convert_loc (loc, TREE_TYPE (arg0),
12697
                                                        arg1),
12698
                                      TREE_OPERAND (arg0, 1), 0))
12699
          && !TREE_OVERFLOW (tem))
12700
        return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12701
 
12702
      /* Similarly for a NEGATE_EXPR.  */
12703
      if (TREE_CODE (arg0) == NEGATE_EXPR
12704
          && TREE_CODE (arg1) == INTEGER_CST
12705
          && 0 != (tem = negate_expr (arg1))
12706
          && TREE_CODE (tem) == INTEGER_CST
12707
          && !TREE_OVERFLOW (tem))
12708
        return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12709
 
12710
      /* Similarly for a BIT_XOR_EXPR;  X ^ C1 == C2 is X == (C1 ^ C2).  */
12711
      if (TREE_CODE (arg0) == BIT_XOR_EXPR
12712
          && TREE_CODE (arg1) == INTEGER_CST
12713
          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12714
        return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12715
                            fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12716
                                         fold_convert_loc (loc,
12717
                                                           TREE_TYPE (arg0),
12718
                                                           arg1),
12719
                                         TREE_OPERAND (arg0, 1)));
12720
 
12721
      /* Transform comparisons of the form X +- Y CMP X to Y CMP 0.  */
12722
      if ((TREE_CODE (arg0) == PLUS_EXPR
12723
           || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12724
           || TREE_CODE (arg0) == MINUS_EXPR)
12725
          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12726
          && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12727
              || POINTER_TYPE_P (TREE_TYPE (arg0))))
12728
        {
12729
          tree val = TREE_OPERAND (arg0, 1);
12730
          return omit_two_operands_loc (loc, type,
12731
                                    fold_build2_loc (loc, code, type,
12732
                                                 val,
12733
                                                 build_int_cst (TREE_TYPE (val),
12734
                                                                0)),
12735
                                    TREE_OPERAND (arg0, 0), arg1);
12736
        }
12737
 
12738
      /* Transform comparisons of the form C - X CMP X if C % 2 == 1.  */
12739
      if (TREE_CODE (arg0) == MINUS_EXPR
12740
          && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12741
          && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)
12742
          && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12743
        {
12744
          return omit_two_operands_loc (loc, type,
12745
                                    code == NE_EXPR
12746
                                    ? boolean_true_node : boolean_false_node,
12747
                                    TREE_OPERAND (arg0, 1), arg1);
12748
        }
12749
 
12750
      /* If we have X - Y == 0, we can convert that to X == Y and similarly
12751
         for !=.  Don't do this for ordered comparisons due to overflow.  */
12752
      if (TREE_CODE (arg0) == MINUS_EXPR
12753
          && integer_zerop (arg1))
12754
        return fold_build2_loc (loc, code, type,
12755
                            TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12756
 
12757
      /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0.  */
12758
      if (TREE_CODE (arg0) == ABS_EXPR
12759
          && (integer_zerop (arg1) || real_zerop (arg1)))
12760
        return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12761
 
12762
      /* If this is an EQ or NE comparison with zero and ARG0 is
12763
         (1 << foo) & bar, convert it to (bar >> foo) & 1.  Both require
12764
         two operations, but the latter can be done in one less insn
12765
         on machines that have only two-operand insns or on which a
12766
         constant cannot be the first operand.  */
12767
      if (TREE_CODE (arg0) == BIT_AND_EXPR
12768
          && integer_zerop (arg1))
12769
        {
12770
          tree arg00 = TREE_OPERAND (arg0, 0);
12771
          tree arg01 = TREE_OPERAND (arg0, 1);
12772
          if (TREE_CODE (arg00) == LSHIFT_EXPR
12773
              && integer_onep (TREE_OPERAND (arg00, 0)))
12774
            {
12775
              tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12776
                                      arg01, TREE_OPERAND (arg00, 1));
12777
              tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12778
                                 build_int_cst (TREE_TYPE (arg0), 1));
12779
              return fold_build2_loc (loc, code, type,
12780
                                  fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12781
                                  arg1);
12782
            }
12783
          else if (TREE_CODE (arg01) == LSHIFT_EXPR
12784
                   && integer_onep (TREE_OPERAND (arg01, 0)))
12785
            {
12786
              tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12787
                                      arg00, TREE_OPERAND (arg01, 1));
12788
              tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12789
                                 build_int_cst (TREE_TYPE (arg0), 1));
12790
              return fold_build2_loc (loc, code, type,
12791
                                  fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12792
                                  arg1);
12793
            }
12794
        }
12795
 
12796
      /* If this is an NE or EQ comparison of zero against the result of a
12797
         signed MOD operation whose second operand is a power of 2, make
12798
         the MOD operation unsigned since it is simpler and equivalent.  */
12799
      if (integer_zerop (arg1)
12800
          && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12801
          && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12802
              || TREE_CODE (arg0) == CEIL_MOD_EXPR
12803
              || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12804
              || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12805
          && integer_pow2p (TREE_OPERAND (arg0, 1)))
12806
        {
12807
          tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12808
          tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12809
                                     fold_convert_loc (loc, newtype,
12810
                                                       TREE_OPERAND (arg0, 0)),
12811
                                     fold_convert_loc (loc, newtype,
12812
                                                       TREE_OPERAND (arg0, 1)));
12813
 
12814
          return fold_build2_loc (loc, code, type, newmod,
12815
                              fold_convert_loc (loc, newtype, arg1));
12816
        }
12817
 
12818
      /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12819
         C1 is a valid shift constant, and C2 is a power of two, i.e.
12820
         a single bit.  */
12821
      if (TREE_CODE (arg0) == BIT_AND_EXPR
12822
          && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12823
          && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12824
             == INTEGER_CST
12825
          && integer_pow2p (TREE_OPERAND (arg0, 1))
12826
          && integer_zerop (arg1))
12827
        {
12828
          tree itype = TREE_TYPE (arg0);
12829
          unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12830
          tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12831
 
12832
          /* Check for a valid shift count.  */
12833
          if (TREE_INT_CST_HIGH (arg001) == 0
12834
              && TREE_INT_CST_LOW (arg001) < prec)
12835
            {
12836
              tree arg01 = TREE_OPERAND (arg0, 1);
12837
              tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12838
              unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12839
              /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12840
                 can be rewritten as (X & (C2 << C1)) != 0.  */
12841
              if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12842
                {
12843
                  tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12844
                  tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12845
                  return fold_build2_loc (loc, code, type, tem, arg1);
12846
                }
12847
              /* Otherwise, for signed (arithmetic) shifts,
12848
                 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12849
                 ((X >> C1) & C2) == 0 is rewritten as X >= 0.  */
12850
              else if (!TYPE_UNSIGNED (itype))
12851
                return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12852
                                    arg000, build_int_cst (itype, 0));
12853
              /* Otherwise, of unsigned (logical) shifts,
12854
                 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12855
                 ((X >> C1) & C2) == 0 is rewritten as (X,true).  */
12856
              else
12857
                return omit_one_operand_loc (loc, type,
12858
                                         code == EQ_EXPR ? integer_one_node
12859
                                                         : integer_zero_node,
12860
                                         arg000);
12861
            }
12862
        }
12863
 
12864
      /* If this is an NE comparison of zero with an AND of one, remove the
12865
         comparison since the AND will give the correct value.  */
12866
      if (code == NE_EXPR
12867
          && integer_zerop (arg1)
12868
          && TREE_CODE (arg0) == BIT_AND_EXPR
12869
          && integer_onep (TREE_OPERAND (arg0, 1)))
12870
        return fold_convert_loc (loc, type, arg0);
12871
 
12872
      /* If we have (A & C) == C where C is a power of 2, convert this into
12873
         (A & C) != 0.  Similarly for NE_EXPR.  */
12874
      if (TREE_CODE (arg0) == BIT_AND_EXPR
12875
          && integer_pow2p (TREE_OPERAND (arg0, 1))
12876
          && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12877
        return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12878
                            arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12879
                                                    integer_zero_node));
12880
 
12881
      /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12882
         bit, then fold the expression into A < 0 or A >= 0.  */
12883
      tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12884
      if (tem)
12885
        return tem;
12886
 
12887
      /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12888
         Similarly for NE_EXPR.  */
12889
      if (TREE_CODE (arg0) == BIT_AND_EXPR
12890
          && TREE_CODE (arg1) == INTEGER_CST
12891
          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12892
        {
12893
          tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12894
                                   TREE_TYPE (TREE_OPERAND (arg0, 1)),
12895
                                   TREE_OPERAND (arg0, 1));
12896
          tree dandnotc = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12897
                                       arg1, notc);
12898
          tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12899
          if (integer_nonzerop (dandnotc))
12900
            return omit_one_operand_loc (loc, type, rslt, arg0);
12901
        }
12902
 
12903
      /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12904
         Similarly for NE_EXPR.  */
12905
      if (TREE_CODE (arg0) == BIT_IOR_EXPR
12906
          && TREE_CODE (arg1) == INTEGER_CST
12907
          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12908
        {
12909
          tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12910
          tree candnotd = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12911
                                       TREE_OPERAND (arg0, 1), notd);
12912
          tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12913
          if (integer_nonzerop (candnotd))
12914
            return omit_one_operand_loc (loc, type, rslt, arg0);
12915
        }
12916
 
12917
      /* If this is a comparison of a field, we may be able to simplify it.  */
12918
      if ((TREE_CODE (arg0) == COMPONENT_REF
12919
           || TREE_CODE (arg0) == BIT_FIELD_REF)
12920
          /* Handle the constant case even without -O
12921
             to make sure the warnings are given.  */
12922
          && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12923
        {
12924
          t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12925
          if (t1)
12926
            return t1;
12927
        }
12928
 
12929
      /* Optimize comparisons of strlen vs zero to a compare of the
12930
         first character of the string vs zero.  To wit,
12931
                strlen(ptr) == 0   =>  *ptr == 0
12932
                strlen(ptr) != 0   =>  *ptr != 0
12933
         Other cases should reduce to one of these two (or a constant)
12934
         due to the return value of strlen being unsigned.  */
12935
      if (TREE_CODE (arg0) == CALL_EXPR
12936
          && integer_zerop (arg1))
12937
        {
12938
          tree fndecl = get_callee_fndecl (arg0);
12939
 
12940
          if (fndecl
12941
              && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12942
              && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12943
              && call_expr_nargs (arg0) == 1
12944
              && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12945
            {
12946
              tree iref = build_fold_indirect_ref_loc (loc,
12947
                                                   CALL_EXPR_ARG (arg0, 0));
12948
              return fold_build2_loc (loc, code, type, iref,
12949
                                  build_int_cst (TREE_TYPE (iref), 0));
12950
            }
12951
        }
12952
 
12953
      /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12954
         of X.  Similarly fold (X >> C) == 0 into X >= 0.  */
12955
      if (TREE_CODE (arg0) == RSHIFT_EXPR
12956
          && integer_zerop (arg1)
12957
          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12958
        {
12959
          tree arg00 = TREE_OPERAND (arg0, 0);
12960
          tree arg01 = TREE_OPERAND (arg0, 1);
12961
          tree itype = TREE_TYPE (arg00);
12962
          if (TREE_INT_CST_HIGH (arg01) == 0
12963
              && TREE_INT_CST_LOW (arg01)
12964
                 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12965
            {
12966
              if (TYPE_UNSIGNED (itype))
12967
                {
12968
                  itype = signed_type_for (itype);
12969
                  arg00 = fold_convert_loc (loc, itype, arg00);
12970
                }
12971
              return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12972
                                  type, arg00, build_int_cst (itype, 0));
12973
            }
12974
        }
12975
 
12976
      /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y.  */
12977
      if (integer_zerop (arg1)
12978
          && TREE_CODE (arg0) == BIT_XOR_EXPR)
12979
        return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12980
                            TREE_OPERAND (arg0, 1));
12981
 
12982
      /* (X ^ Y) == Y becomes X == 0.  We know that Y has no side-effects.  */
12983
      if (TREE_CODE (arg0) == BIT_XOR_EXPR
12984
          && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12985
        return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12986
                            build_int_cst (TREE_TYPE (arg1), 0));
12987
      /* Likewise (X ^ Y) == X becomes Y == 0.  X has no side-effects.  */
12988
      if (TREE_CODE (arg0) == BIT_XOR_EXPR
12989
          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12990
          && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12991
        return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12992
                            build_int_cst (TREE_TYPE (arg1), 0));
12993
 
12994
      /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2).  */
12995
      if (TREE_CODE (arg0) == BIT_XOR_EXPR
12996
          && TREE_CODE (arg1) == INTEGER_CST
12997
          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12998
        return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12999
                            fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13000
                                         TREE_OPERAND (arg0, 1), arg1));
13001
 
13002
      /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13003
         (X & C) == 0 when C is a single bit.  */
13004
      if (TREE_CODE (arg0) == BIT_AND_EXPR
13005
          && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13006
          && integer_zerop (arg1)
13007
          && integer_pow2p (TREE_OPERAND (arg0, 1)))
13008
        {
13009
          tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13010
                             TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13011
                             TREE_OPERAND (arg0, 1));
13012
          return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13013
                              type, tem, arg1);
13014
        }
13015
 
13016
      /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13017
         constant C is a power of two, i.e. a single bit.  */
13018
      if (TREE_CODE (arg0) == BIT_XOR_EXPR
13019
          && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13020
          && integer_zerop (arg1)
13021
          && integer_pow2p (TREE_OPERAND (arg0, 1))
13022
          && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13023
                              TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13024
        {
13025
          tree arg00 = TREE_OPERAND (arg0, 0);
13026
          return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13027
                              arg00, build_int_cst (TREE_TYPE (arg00), 0));
13028
        }
13029
 
13030
      /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13031
         when is C is a power of two, i.e. a single bit.  */
13032
      if (TREE_CODE (arg0) == BIT_AND_EXPR
13033
          && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13034
          && integer_zerop (arg1)
13035
          && integer_pow2p (TREE_OPERAND (arg0, 1))
13036
          && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13037
                              TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13038
        {
13039
          tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13040
          tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13041
                             arg000, TREE_OPERAND (arg0, 1));
13042
          return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13043
                              tem, build_int_cst (TREE_TYPE (tem), 0));
13044
        }
13045
 
13046
      if (integer_zerop (arg1)
13047
          && tree_expr_nonzero_p (arg0))
13048
        {
13049
          tree res = constant_boolean_node (code==NE_EXPR, type);
13050
          return omit_one_operand_loc (loc, type, res, arg0);
13051
        }
13052
 
13053
      /* Fold -X op -Y as X op Y, where op is eq/ne.  */
13054
      if (TREE_CODE (arg0) == NEGATE_EXPR
13055
          && TREE_CODE (arg1) == NEGATE_EXPR)
13056
        return fold_build2_loc (loc, code, type,
13057
                            TREE_OPERAND (arg0, 0),
13058
                            TREE_OPERAND (arg1, 0));
13059
 
13060
      /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries.  */
13061
      if (TREE_CODE (arg0) == BIT_AND_EXPR
13062
          && TREE_CODE (arg1) == BIT_AND_EXPR)
13063
        {
13064
          tree arg00 = TREE_OPERAND (arg0, 0);
13065
          tree arg01 = TREE_OPERAND (arg0, 1);
13066
          tree arg10 = TREE_OPERAND (arg1, 0);
13067
          tree arg11 = TREE_OPERAND (arg1, 1);
13068
          tree itype = TREE_TYPE (arg0);
13069
 
13070
          if (operand_equal_p (arg01, arg11, 0))
13071
            return fold_build2_loc (loc, code, type,
13072
                                fold_build2_loc (loc, BIT_AND_EXPR, itype,
13073
                                             fold_build2_loc (loc,
13074
                                                          BIT_XOR_EXPR, itype,
13075
                                                          arg00, arg10),
13076
                                             arg01),
13077
                                build_int_cst (itype, 0));
13078
 
13079
          if (operand_equal_p (arg01, arg10, 0))
13080
            return fold_build2_loc (loc, code, type,
13081
                                fold_build2_loc (loc, BIT_AND_EXPR, itype,
13082
                                             fold_build2_loc (loc,
13083
                                                          BIT_XOR_EXPR, itype,
13084
                                                          arg00, arg11),
13085
                                             arg01),
13086
                                build_int_cst (itype, 0));
13087
 
13088
          if (operand_equal_p (arg00, arg11, 0))
13089
            return fold_build2_loc (loc, code, type,
13090
                                fold_build2_loc (loc, BIT_AND_EXPR, itype,
13091
                                             fold_build2_loc (loc,
13092
                                                          BIT_XOR_EXPR, itype,
13093
                                                          arg01, arg10),
13094
                                             arg00),
13095
                                build_int_cst (itype, 0));
13096
 
13097
          if (operand_equal_p (arg00, arg10, 0))
13098
            return fold_build2_loc (loc, code, type,
13099
                                fold_build2_loc (loc, BIT_AND_EXPR, itype,
13100
                                             fold_build2_loc (loc,
13101
                                                          BIT_XOR_EXPR, itype,
13102
                                                          arg01, arg11),
13103
                                             arg00),
13104
                                build_int_cst (itype, 0));
13105
        }
13106
 
13107
      if (TREE_CODE (arg0) == BIT_XOR_EXPR
13108
          && TREE_CODE (arg1) == BIT_XOR_EXPR)
13109
        {
13110
          tree arg00 = TREE_OPERAND (arg0, 0);
13111
          tree arg01 = TREE_OPERAND (arg0, 1);
13112
          tree arg10 = TREE_OPERAND (arg1, 0);
13113
          tree arg11 = TREE_OPERAND (arg1, 1);
13114
          tree itype = TREE_TYPE (arg0);
13115
 
13116
          /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13117
             operand_equal_p guarantees no side-effects so we don't need
13118
             to use omit_one_operand on Z.  */
13119
          if (operand_equal_p (arg01, arg11, 0))
13120
            return fold_build2_loc (loc, code, type, arg00, arg10);
13121
          if (operand_equal_p (arg01, arg10, 0))
13122
            return fold_build2_loc (loc, code, type, arg00, arg11);
13123
          if (operand_equal_p (arg00, arg11, 0))
13124
            return fold_build2_loc (loc, code, type, arg01, arg10);
13125
          if (operand_equal_p (arg00, arg10, 0))
13126
            return fold_build2_loc (loc, code, type, arg01, arg11);
13127
 
13128
          /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y.  */
13129
          if (TREE_CODE (arg01) == INTEGER_CST
13130
              && TREE_CODE (arg11) == INTEGER_CST)
13131
            return fold_build2_loc (loc, code, type,
13132
                                fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00,
13133
                                             fold_build2_loc (loc,
13134
                                                          BIT_XOR_EXPR, itype,
13135
                                                          arg01, arg11)),
13136
                                arg10);
13137
        }
13138
 
13139
      /* Attempt to simplify equality/inequality comparisons of complex
13140
         values.  Only lower the comparison if the result is known or
13141
         can be simplified to a single scalar comparison.  */
13142
      if ((TREE_CODE (arg0) == COMPLEX_EXPR
13143
           || TREE_CODE (arg0) == COMPLEX_CST)
13144
          && (TREE_CODE (arg1) == COMPLEX_EXPR
13145
              || TREE_CODE (arg1) == COMPLEX_CST))
13146
        {
13147
          tree real0, imag0, real1, imag1;
13148
          tree rcond, icond;
13149
 
13150
          if (TREE_CODE (arg0) == COMPLEX_EXPR)
13151
            {
13152
              real0 = TREE_OPERAND (arg0, 0);
13153
              imag0 = TREE_OPERAND (arg0, 1);
13154
            }
13155
          else
13156
            {
13157
              real0 = TREE_REALPART (arg0);
13158
              imag0 = TREE_IMAGPART (arg0);
13159
            }
13160
 
13161
          if (TREE_CODE (arg1) == COMPLEX_EXPR)
13162
            {
13163
              real1 = TREE_OPERAND (arg1, 0);
13164
              imag1 = TREE_OPERAND (arg1, 1);
13165
            }
13166
          else
13167
            {
13168
              real1 = TREE_REALPART (arg1);
13169
              imag1 = TREE_IMAGPART (arg1);
13170
            }
13171
 
13172
          rcond = fold_binary_loc (loc, code, type, real0, real1);
13173
          if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13174
            {
13175
              if (integer_zerop (rcond))
13176
                {
13177
                  if (code == EQ_EXPR)
13178
                    return omit_two_operands_loc (loc, type, boolean_false_node,
13179
                                              imag0, imag1);
13180
                  return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13181
                }
13182
              else
13183
                {
13184
                  if (code == NE_EXPR)
13185
                    return omit_two_operands_loc (loc, type, boolean_true_node,
13186
                                              imag0, imag1);
13187
                  return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13188
                }
13189
            }
13190
 
13191
          icond = fold_binary_loc (loc, code, type, imag0, imag1);
13192
          if (icond && TREE_CODE (icond) == INTEGER_CST)
13193
            {
13194
              if (integer_zerop (icond))
13195
                {
13196
                  if (code == EQ_EXPR)
13197
                    return omit_two_operands_loc (loc, type, boolean_false_node,
13198
                                              real0, real1);
13199
                  return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13200
                }
13201
              else
13202
                {
13203
                  if (code == NE_EXPR)
13204
                    return omit_two_operands_loc (loc, type, boolean_true_node,
13205
                                              real0, real1);
13206
                  return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13207
                }
13208
            }
13209
        }
13210
 
13211
      return NULL_TREE;
13212
 
13213
    case LT_EXPR:
13214
    case GT_EXPR:
13215
    case LE_EXPR:
13216
    case GE_EXPR:
13217
      tem = fold_comparison (loc, code, type, op0, op1);
13218
      if (tem != NULL_TREE)
13219
        return tem;
13220
 
13221
      /* Transform comparisons of the form X +- C CMP X.  */
13222
      if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13223
          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13224
          && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13225
               && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13226
              || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13227
                  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13228
        {
13229
          tree arg01 = TREE_OPERAND (arg0, 1);
13230
          enum tree_code code0 = TREE_CODE (arg0);
13231
          int is_positive;
13232
 
13233
          if (TREE_CODE (arg01) == REAL_CST)
13234
            is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13235
          else
13236
            is_positive = tree_int_cst_sgn (arg01);
13237
 
13238
          /* (X - c) > X becomes false.  */
13239
          if (code == GT_EXPR
13240
              && ((code0 == MINUS_EXPR && is_positive >= 0)
13241
                  || (code0 == PLUS_EXPR && is_positive <= 0)))
13242
            {
13243
              if (TREE_CODE (arg01) == INTEGER_CST
13244
                  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13245
                fold_overflow_warning (("assuming signed overflow does not "
13246
                                        "occur when assuming that (X - c) > X "
13247
                                        "is always false"),
13248
                                       WARN_STRICT_OVERFLOW_ALL);
13249
              return constant_boolean_node (0, type);
13250
            }
13251
 
13252
          /* Likewise (X + c) < X becomes false.  */
13253
          if (code == LT_EXPR
13254
              && ((code0 == PLUS_EXPR && is_positive >= 0)
13255
                  || (code0 == MINUS_EXPR && is_positive <= 0)))
13256
            {
13257
              if (TREE_CODE (arg01) == INTEGER_CST
13258
                  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13259
                fold_overflow_warning (("assuming signed overflow does not "
13260
                                        "occur when assuming that "
13261
                                        "(X + c) < X is always false"),
13262
                                       WARN_STRICT_OVERFLOW_ALL);
13263
              return constant_boolean_node (0, type);
13264
            }
13265
 
13266
          /* Convert (X - c) <= X to true.  */
13267
          if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13268
              && code == LE_EXPR
13269
              && ((code0 == MINUS_EXPR && is_positive >= 0)
13270
                  || (code0 == PLUS_EXPR && is_positive <= 0)))
13271
            {
13272
              if (TREE_CODE (arg01) == INTEGER_CST
13273
                  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13274
                fold_overflow_warning (("assuming signed overflow does not "
13275
                                        "occur when assuming that "
13276
                                        "(X - c) <= X is always true"),
13277
                                       WARN_STRICT_OVERFLOW_ALL);
13278
              return constant_boolean_node (1, type);
13279
            }
13280
 
13281
          /* Convert (X + c) >= X to true.  */
13282
          if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13283
              && code == GE_EXPR
13284
              && ((code0 == PLUS_EXPR && is_positive >= 0)
13285
                  || (code0 == MINUS_EXPR && is_positive <= 0)))
13286
            {
13287
              if (TREE_CODE (arg01) == INTEGER_CST
13288
                  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13289
                fold_overflow_warning (("assuming signed overflow does not "
13290
                                        "occur when assuming that "
13291
                                        "(X + c) >= X is always true"),
13292
                                       WARN_STRICT_OVERFLOW_ALL);
13293
              return constant_boolean_node (1, type);
13294
            }
13295
 
13296
          if (TREE_CODE (arg01) == INTEGER_CST)
13297
            {
13298
              /* Convert X + c > X and X - c < X to true for integers.  */
13299
              if (code == GT_EXPR
13300
                  && ((code0 == PLUS_EXPR && is_positive > 0)
13301
                      || (code0 == MINUS_EXPR && is_positive < 0)))
13302
                {
13303
                  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13304
                    fold_overflow_warning (("assuming signed overflow does "
13305
                                            "not occur when assuming that "
13306
                                            "(X + c) > X is always true"),
13307
                                           WARN_STRICT_OVERFLOW_ALL);
13308
                  return constant_boolean_node (1, type);
13309
                }
13310
 
13311
              if (code == LT_EXPR
13312
                  && ((code0 == MINUS_EXPR && is_positive > 0)
13313
                      || (code0 == PLUS_EXPR && is_positive < 0)))
13314
                {
13315
                  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13316
                    fold_overflow_warning (("assuming signed overflow does "
13317
                                            "not occur when assuming that "
13318
                                            "(X - c) < X is always true"),
13319
                                           WARN_STRICT_OVERFLOW_ALL);
13320
                  return constant_boolean_node (1, type);
13321
                }
13322
 
13323
              /* Convert X + c <= X and X - c >= X to false for integers.  */
13324
              if (code == LE_EXPR
13325
                  && ((code0 == PLUS_EXPR && is_positive > 0)
13326
                      || (code0 == MINUS_EXPR && is_positive < 0)))
13327
                {
13328
                  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13329
                    fold_overflow_warning (("assuming signed overflow does "
13330
                                            "not occur when assuming that "
13331
                                            "(X + c) <= X is always false"),
13332
                                           WARN_STRICT_OVERFLOW_ALL);
13333
                  return constant_boolean_node (0, type);
13334
                }
13335
 
13336
              if (code == GE_EXPR
13337
                  && ((code0 == MINUS_EXPR && is_positive > 0)
13338
                      || (code0 == PLUS_EXPR && is_positive < 0)))
13339
                {
13340
                  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13341
                    fold_overflow_warning (("assuming signed overflow does "
13342
                                            "not occur when assuming that "
13343
                                            "(X - c) >= X is always false"),
13344
                                           WARN_STRICT_OVERFLOW_ALL);
13345
                  return constant_boolean_node (0, type);
13346
                }
13347
            }
13348
        }
13349
 
13350
      /* Comparisons with the highest or lowest possible integer of
13351
         the specified precision will have known values.  */
13352
      {
13353
        tree arg1_type = TREE_TYPE (arg1);
13354
        unsigned int width = TYPE_PRECISION (arg1_type);
13355
 
13356
        if (TREE_CODE (arg1) == INTEGER_CST
13357
            && width <= 2 * HOST_BITS_PER_WIDE_INT
13358
            && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13359
          {
13360
            HOST_WIDE_INT signed_max_hi;
13361
            unsigned HOST_WIDE_INT signed_max_lo;
13362
            unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13363
 
13364
            if (width <= HOST_BITS_PER_WIDE_INT)
13365
              {
13366
                signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13367
                                - 1;
13368
                signed_max_hi = 0;
13369
                max_hi = 0;
13370
 
13371
                if (TYPE_UNSIGNED (arg1_type))
13372
                  {
13373
                    max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13374
                    min_lo = 0;
13375
                    min_hi = 0;
13376
                  }
13377
                else
13378
                  {
13379
                    max_lo = signed_max_lo;
13380
                    min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13381
                    min_hi = -1;
13382
                  }
13383
              }
13384
            else
13385
              {
13386
                width -= HOST_BITS_PER_WIDE_INT;
13387
                signed_max_lo = -1;
13388
                signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13389
                                - 1;
13390
                max_lo = -1;
13391
                min_lo = 0;
13392
 
13393
                if (TYPE_UNSIGNED (arg1_type))
13394
                  {
13395
                    max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13396
                    min_hi = 0;
13397
                  }
13398
                else
13399
                  {
13400
                    max_hi = signed_max_hi;
13401
                    min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13402
                  }
13403
              }
13404
 
13405
            if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13406
                && TREE_INT_CST_LOW (arg1) == max_lo)
13407
              switch (code)
13408
                {
13409
                case GT_EXPR:
13410
                  return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13411
 
13412
                case GE_EXPR:
13413
                  return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13414
 
13415
                case LE_EXPR:
13416
                  return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13417
 
13418
                case LT_EXPR:
13419
                  return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13420
 
13421
                /* The GE_EXPR and LT_EXPR cases above are not normally
13422
                   reached because of previous transformations.  */
13423
 
13424
                default:
13425
                  break;
13426
                }
13427
            else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13428
                     == max_hi
13429
                     && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13430
              switch (code)
13431
                {
13432
                case GT_EXPR:
13433
                  arg1 = const_binop (PLUS_EXPR, arg1,
13434
                                      build_int_cst (TREE_TYPE (arg1), 1), 0);
13435
                  return fold_build2_loc (loc, EQ_EXPR, type,
13436
                                      fold_convert_loc (loc,
13437
                                                        TREE_TYPE (arg1), arg0),
13438
                                      arg1);
13439
                case LE_EXPR:
13440
                  arg1 = const_binop (PLUS_EXPR, arg1,
13441
                                      build_int_cst (TREE_TYPE (arg1), 1), 0);
13442
                  return fold_build2_loc (loc, NE_EXPR, type,
13443
                                      fold_convert_loc (loc, TREE_TYPE (arg1),
13444
                                                        arg0),
13445
                                      arg1);
13446
                default:
13447
                  break;
13448
                }
13449
            else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13450
                     == min_hi
13451
                     && TREE_INT_CST_LOW (arg1) == min_lo)
13452
              switch (code)
13453
                {
13454
                case LT_EXPR:
13455
                  return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13456
 
13457
                case LE_EXPR:
13458
                  return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13459
 
13460
                case GE_EXPR:
13461
                  return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13462
 
13463
                case GT_EXPR:
13464
                  return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13465
 
13466
                default:
13467
                  break;
13468
                }
13469
            else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13470
                     == min_hi
13471
                     && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13472
              switch (code)
13473
                {
13474
                case GE_EXPR:
13475
                  arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
13476
                  return fold_build2_loc (loc, NE_EXPR, type,
13477
                                      fold_convert_loc (loc,
13478
                                                        TREE_TYPE (arg1), arg0),
13479
                                      arg1);
13480
                case LT_EXPR:
13481
                  arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
13482
                  return fold_build2_loc (loc, EQ_EXPR, type,
13483
                                      fold_convert_loc (loc, TREE_TYPE (arg1),
13484
                                                        arg0),
13485
                                      arg1);
13486
                default:
13487
                  break;
13488
                }
13489
 
13490
            else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13491
                     && TREE_INT_CST_LOW (arg1) == signed_max_lo
13492
                     && TYPE_UNSIGNED (arg1_type)
13493
                     /* We will flip the signedness of the comparison operator
13494
                        associated with the mode of arg1, so the sign bit is
13495
                        specified by this mode.  Check that arg1 is the signed
13496
                        max associated with this sign bit.  */
13497
                     && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13498
                     /* signed_type does not work on pointer types.  */
13499
                     && INTEGRAL_TYPE_P (arg1_type))
13500
              {
13501
                /* The following case also applies to X < signed_max+1
13502
                   and X >= signed_max+1 because previous transformations.  */
13503
                if (code == LE_EXPR || code == GT_EXPR)
13504
                  {
13505
                    tree st;
13506
                    st = signed_type_for (TREE_TYPE (arg1));
13507
                    return fold_build2_loc (loc,
13508
                                        code == LE_EXPR ? GE_EXPR : LT_EXPR,
13509
                                        type, fold_convert_loc (loc, st, arg0),
13510
                                        build_int_cst (st, 0));
13511
                  }
13512
              }
13513
          }
13514
      }
13515
 
13516
      /* If we are comparing an ABS_EXPR with a constant, we can
13517
         convert all the cases into explicit comparisons, but they may
13518
         well not be faster than doing the ABS and one comparison.
13519
         But ABS (X) <= C is a range comparison, which becomes a subtraction
13520
         and a comparison, and is probably faster.  */
13521
      if (code == LE_EXPR
13522
          && TREE_CODE (arg1) == INTEGER_CST
13523
          && TREE_CODE (arg0) == ABS_EXPR
13524
          && ! TREE_SIDE_EFFECTS (arg0)
13525
          && (0 != (tem = negate_expr (arg1)))
13526
          && TREE_CODE (tem) == INTEGER_CST
13527
          && !TREE_OVERFLOW (tem))
13528
        return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13529
                            build2 (GE_EXPR, type,
13530
                                    TREE_OPERAND (arg0, 0), tem),
13531
                            build2 (LE_EXPR, type,
13532
                                    TREE_OPERAND (arg0, 0), arg1));
13533
 
13534
      /* Convert ABS_EXPR<x> >= 0 to true.  */
13535
      strict_overflow_p = false;
13536
      if (code == GE_EXPR
13537
          && (integer_zerop (arg1)
13538
              || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13539
                  && real_zerop (arg1)))
13540
          && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13541
        {
13542
          if (strict_overflow_p)
13543
            fold_overflow_warning (("assuming signed overflow does not occur "
13544
                                    "when simplifying comparison of "
13545
                                    "absolute value and zero"),
13546
                                   WARN_STRICT_OVERFLOW_CONDITIONAL);
13547
          return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13548
        }
13549
 
13550
      /* Convert ABS_EXPR<x> < 0 to false.  */
13551
      strict_overflow_p = false;
13552
      if (code == LT_EXPR
13553
          && (integer_zerop (arg1) || real_zerop (arg1))
13554
          && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13555
        {
13556
          if (strict_overflow_p)
13557
            fold_overflow_warning (("assuming signed overflow does not occur "
13558
                                    "when simplifying comparison of "
13559
                                    "absolute value and zero"),
13560
                                   WARN_STRICT_OVERFLOW_CONDITIONAL);
13561
          return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13562
        }
13563
 
13564
      /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13565
         and similarly for >= into !=.  */
13566
      if ((code == LT_EXPR || code == GE_EXPR)
13567
          && TYPE_UNSIGNED (TREE_TYPE (arg0))
13568
          && TREE_CODE (arg1) == LSHIFT_EXPR
13569
          && integer_onep (TREE_OPERAND (arg1, 0)))
13570
        {
13571
          tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13572
                        build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13573
                                TREE_OPERAND (arg1, 1)),
13574
                        build_int_cst (TREE_TYPE (arg0), 0));
13575
          goto fold_binary_exit;
13576
        }
13577
 
13578
      if ((code == LT_EXPR || code == GE_EXPR)
13579
          && TYPE_UNSIGNED (TREE_TYPE (arg0))
13580
          && CONVERT_EXPR_P (arg1)
13581
          && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13582
          && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13583
        {
13584
          tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13585
                        fold_convert_loc (loc, TREE_TYPE (arg0),
13586
                                          build2 (RSHIFT_EXPR,
13587
                                                  TREE_TYPE (arg0), arg0,
13588
                                                  TREE_OPERAND (TREE_OPERAND (arg1, 0),
13589
                                                                1))),
13590
                        build_int_cst (TREE_TYPE (arg0), 0));
13591
          goto fold_binary_exit;
13592
        }
13593
 
13594
      return NULL_TREE;
13595
 
13596
    case UNORDERED_EXPR:
13597
    case ORDERED_EXPR:
13598
    case UNLT_EXPR:
13599
    case UNLE_EXPR:
13600
    case UNGT_EXPR:
13601
    case UNGE_EXPR:
13602
    case UNEQ_EXPR:
13603
    case LTGT_EXPR:
13604
      if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13605
        {
13606
          t1 = fold_relational_const (code, type, arg0, arg1);
13607
          if (t1 != NULL_TREE)
13608
            return t1;
13609
        }
13610
 
13611
      /* If the first operand is NaN, the result is constant.  */
13612
      if (TREE_CODE (arg0) == REAL_CST
13613
          && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13614
          && (code != LTGT_EXPR || ! flag_trapping_math))
13615
        {
13616
          t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13617
               ? integer_zero_node
13618
               : integer_one_node;
13619
          return omit_one_operand_loc (loc, type, t1, arg1);
13620
        }
13621
 
13622
      /* If the second operand is NaN, the result is constant.  */
13623
      if (TREE_CODE (arg1) == REAL_CST
13624
          && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13625
          && (code != LTGT_EXPR || ! flag_trapping_math))
13626
        {
13627
          t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13628
               ? integer_zero_node
13629
               : integer_one_node;
13630
          return omit_one_operand_loc (loc, type, t1, arg0);
13631
        }
13632
 
13633
      /* Simplify unordered comparison of something with itself.  */
13634
      if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13635
          && operand_equal_p (arg0, arg1, 0))
13636
        return constant_boolean_node (1, type);
13637
 
13638
      if (code == LTGT_EXPR
13639
          && !flag_trapping_math
13640
          && operand_equal_p (arg0, arg1, 0))
13641
        return constant_boolean_node (0, type);
13642
 
13643
      /* Fold (double)float1 CMP (double)float2 into float1 CMP float2.  */
13644
      {
13645
        tree targ0 = strip_float_extensions (arg0);
13646
        tree targ1 = strip_float_extensions (arg1);
13647
        tree newtype = TREE_TYPE (targ0);
13648
 
13649
        if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13650
          newtype = TREE_TYPE (targ1);
13651
 
13652
        if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13653
          return fold_build2_loc (loc, code, type,
13654
                              fold_convert_loc (loc, newtype, targ0),
13655
                              fold_convert_loc (loc, newtype, targ1));
13656
      }
13657
 
13658
      return NULL_TREE;
13659
 
13660
    case COMPOUND_EXPR:
13661
      /* When pedantic, a compound expression can be neither an lvalue
13662
         nor an integer constant expression.  */
13663
      if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13664
        return NULL_TREE;
13665
      /* Don't let (0, 0) be null pointer constant.  */
13666
      tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13667
                                 : fold_convert_loc (loc, type, arg1);
13668
      return pedantic_non_lvalue_loc (loc, tem);
13669
 
13670
    case COMPLEX_EXPR:
13671
      if ((TREE_CODE (arg0) == REAL_CST
13672
           && TREE_CODE (arg1) == REAL_CST)
13673
          || (TREE_CODE (arg0) == INTEGER_CST
13674
              && TREE_CODE (arg1) == INTEGER_CST))
13675
        return build_complex (type, arg0, arg1);
13676
      return NULL_TREE;
13677
 
13678
    case ASSERT_EXPR:
13679
      /* An ASSERT_EXPR should never be passed to fold_binary.  */
13680
      gcc_unreachable ();
13681
 
13682
    default:
13683
      return NULL_TREE;
13684
    } /* switch (code) */
13685
 fold_binary_exit:
13686
  protected_set_expr_location (tem, loc);
13687
  return tem;
13688
}
13689
 
13690
/* Callback for walk_tree, looking for LABEL_EXPR.  Return *TP if it is
13691
   a LABEL_EXPR; otherwise return NULL_TREE.  Do not check the subtrees
13692
   of GOTO_EXPR.  */
13693
 
13694
static tree
13695
contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13696
{
13697
  switch (TREE_CODE (*tp))
13698
    {
13699
    case LABEL_EXPR:
13700
      return *tp;
13701
 
13702
    case GOTO_EXPR:
13703
      *walk_subtrees = 0;
13704
 
13705
      /* ... fall through ...  */
13706
 
13707
    default:
13708
      return NULL_TREE;
13709
    }
13710
}
13711
 
13712
/* Return whether the sub-tree ST contains a label which is accessible from
13713
   outside the sub-tree.  */
13714
 
13715
static bool
13716
contains_label_p (tree st)
13717
{
13718
  return
13719
   (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13720
}
13721
 
13722
/* Fold a ternary expression of code CODE and type TYPE with operands
13723
   OP0, OP1, and OP2.  Return the folded expression if folding is
13724
   successful.  Otherwise, return NULL_TREE.  */
13725
 
13726
tree
13727
fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13728
              tree op0, tree op1, tree op2)
13729
{
13730
  tree tem;
13731
  tree arg0 = NULL_TREE, arg1 = NULL_TREE;
13732
  enum tree_code_class kind = TREE_CODE_CLASS (code);
13733
 
13734
  gcc_assert (IS_EXPR_CODE_CLASS (kind)
13735
              && TREE_CODE_LENGTH (code) == 3);
13736
 
13737
  /* Strip any conversions that don't change the mode.  This is safe
13738
     for every expression, except for a comparison expression because
13739
     its signedness is derived from its operands.  So, in the latter
13740
     case, only strip conversions that don't change the signedness.
13741
 
13742
     Note that this is done as an internal manipulation within the
13743
     constant folder, in order to find the simplest representation of
13744
     the arguments so that their form can be studied.  In any cases,
13745
     the appropriate type conversions should be put back in the tree
13746
     that will get out of the constant folder.  */
13747
  if (op0)
13748
    {
13749
      arg0 = op0;
13750
      STRIP_NOPS (arg0);
13751
    }
13752
 
13753
  if (op1)
13754
    {
13755
      arg1 = op1;
13756
      STRIP_NOPS (arg1);
13757
    }
13758
 
13759
  switch (code)
13760
    {
13761
    case COMPONENT_REF:
13762
      if (TREE_CODE (arg0) == CONSTRUCTOR
13763
          && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13764
        {
13765
          unsigned HOST_WIDE_INT idx;
13766
          tree field, value;
13767
          FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13768
            if (field == arg1)
13769
              return value;
13770
        }
13771
      return NULL_TREE;
13772
 
13773
    case COND_EXPR:
13774
      /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13775
         so all simple results must be passed through pedantic_non_lvalue.  */
13776
      if (TREE_CODE (arg0) == INTEGER_CST)
13777
        {
13778
          tree unused_op = integer_zerop (arg0) ? op1 : op2;
13779
          tem = integer_zerop (arg0) ? op2 : op1;
13780
          /* Only optimize constant conditions when the selected branch
13781
             has the same type as the COND_EXPR.  This avoids optimizing
13782
             away "c ? x : throw", where the throw has a void type.
13783
             Avoid throwing away that operand which contains label.  */
13784
          if ((!TREE_SIDE_EFFECTS (unused_op)
13785
               || !contains_label_p (unused_op))
13786
              && (! VOID_TYPE_P (TREE_TYPE (tem))
13787
                  || VOID_TYPE_P (type)))
13788
            return pedantic_non_lvalue_loc (loc, tem);
13789
          return NULL_TREE;
13790
        }
13791
      if (operand_equal_p (arg1, op2, 0))
13792
        return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13793
 
13794
      /* If we have A op B ? A : C, we may be able to convert this to a
13795
         simpler expression, depending on the operation and the values
13796
         of B and C.  Signed zeros prevent all of these transformations,
13797
         for reasons given above each one.
13798
 
13799
         Also try swapping the arguments and inverting the conditional.  */
13800
      if (COMPARISON_CLASS_P (arg0)
13801
          && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13802
                                             arg1, TREE_OPERAND (arg0, 1))
13803
          && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13804
        {
13805
          tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13806
          if (tem)
13807
            return tem;
13808
        }
13809
 
13810
      if (COMPARISON_CLASS_P (arg0)
13811
          && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13812
                                             op2,
13813
                                             TREE_OPERAND (arg0, 1))
13814
          && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13815
        {
13816
          tem = fold_truth_not_expr (loc, arg0);
13817
          if (tem && COMPARISON_CLASS_P (tem))
13818
            {
13819
              tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13820
              if (tem)
13821
                return tem;
13822
            }
13823
        }
13824
 
13825
      /* If the second operand is simpler than the third, swap them
13826
         since that produces better jump optimization results.  */
13827
      if (truth_value_p (TREE_CODE (arg0))
13828
          && tree_swap_operands_p (op1, op2, false))
13829
        {
13830
          /* See if this can be inverted.  If it can't, possibly because
13831
             it was a floating-point inequality comparison, don't do
13832
             anything.  */
13833
          tem = fold_truth_not_expr (loc, arg0);
13834
          if (tem)
13835
            return fold_build3_loc (loc, code, type, tem, op2, op1);
13836
        }
13837
 
13838
      /* Convert A ? 1 : 0 to simply A.  */
13839
      if (integer_onep (op1)
13840
          && integer_zerop (op2)
13841
          /* If we try to convert OP0 to our type, the
13842
             call to fold will try to move the conversion inside
13843
             a COND, which will recurse.  In that case, the COND_EXPR
13844
             is probably the best choice, so leave it alone.  */
13845
          && type == TREE_TYPE (arg0))
13846
        return pedantic_non_lvalue_loc (loc, arg0);
13847
 
13848
      /* Convert A ? 0 : 1 to !A.  This prefers the use of NOT_EXPR
13849
         over COND_EXPR in cases such as floating point comparisons.  */
13850
      if (integer_zerop (op1)
13851
          && integer_onep (op2)
13852
          && truth_value_p (TREE_CODE (arg0)))
13853
        return pedantic_non_lvalue_loc (loc,
13854
                                    fold_convert_loc (loc, type,
13855
                                              invert_truthvalue_loc (loc,
13856
                                                                     arg0)));
13857
 
13858
      /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>).  */
13859
      if (TREE_CODE (arg0) == LT_EXPR
13860
          && integer_zerop (TREE_OPERAND (arg0, 1))
13861
          && integer_zerop (op2)
13862
          && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13863
        {
13864
          /* sign_bit_p only checks ARG1 bits within A's precision.
13865
             If <sign bit of A> has wider type than A, bits outside
13866
             of A's precision in <sign bit of A> need to be checked.
13867
             If they are all 0, this optimization needs to be done
13868
             in unsigned A's type, if they are all 1 in signed A's type,
13869
             otherwise this can't be done.  */
13870
          if (TYPE_PRECISION (TREE_TYPE (tem))
13871
              < TYPE_PRECISION (TREE_TYPE (arg1))
13872
              && TYPE_PRECISION (TREE_TYPE (tem))
13873
                 < TYPE_PRECISION (type))
13874
            {
13875
              unsigned HOST_WIDE_INT mask_lo;
13876
              HOST_WIDE_INT mask_hi;
13877
              int inner_width, outer_width;
13878
              tree tem_type;
13879
 
13880
              inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13881
              outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13882
              if (outer_width > TYPE_PRECISION (type))
13883
                outer_width = TYPE_PRECISION (type);
13884
 
13885
              if (outer_width > HOST_BITS_PER_WIDE_INT)
13886
                {
13887
                  mask_hi = ((unsigned HOST_WIDE_INT) -1
13888
                             >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13889
                  mask_lo = -1;
13890
                }
13891
              else
13892
                {
13893
                  mask_hi = 0;
13894
                  mask_lo = ((unsigned HOST_WIDE_INT) -1
13895
                             >> (HOST_BITS_PER_WIDE_INT - outer_width));
13896
                }
13897
              if (inner_width > HOST_BITS_PER_WIDE_INT)
13898
                {
13899
                  mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13900
                               >> (HOST_BITS_PER_WIDE_INT - inner_width));
13901
                  mask_lo = 0;
13902
                }
13903
              else
13904
                mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13905
                             >> (HOST_BITS_PER_WIDE_INT - inner_width));
13906
 
13907
              if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13908
                  && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13909
                {
13910
                  tem_type = signed_type_for (TREE_TYPE (tem));
13911
                  tem = fold_convert_loc (loc, tem_type, tem);
13912
                }
13913
              else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13914
                       && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13915
                {
13916
                  tem_type = unsigned_type_for (TREE_TYPE (tem));
13917
                  tem = fold_convert_loc (loc, tem_type, tem);
13918
                }
13919
              else
13920
                tem = NULL;
13921
            }
13922
 
13923
          if (tem)
13924
            return
13925
              fold_convert_loc (loc, type,
13926
                                fold_build2_loc (loc, BIT_AND_EXPR,
13927
                                             TREE_TYPE (tem), tem,
13928
                                             fold_convert_loc (loc,
13929
                                                               TREE_TYPE (tem),
13930
                                                               arg1)));
13931
        }
13932
 
13933
      /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N).  A & 1 was
13934
         already handled above.  */
13935
      if (TREE_CODE (arg0) == BIT_AND_EXPR
13936
          && integer_onep (TREE_OPERAND (arg0, 1))
13937
          && integer_zerop (op2)
13938
          && integer_pow2p (arg1))
13939
        {
13940
          tree tem = TREE_OPERAND (arg0, 0);
13941
          STRIP_NOPS (tem);
13942
          if (TREE_CODE (tem) == RSHIFT_EXPR
13943
              && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13944
              && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13945
                 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13946
            return fold_build2_loc (loc, BIT_AND_EXPR, type,
13947
                                TREE_OPERAND (tem, 0), arg1);
13948
        }
13949
 
13950
      /* A & N ? N : 0 is simply A & N if N is a power of two.  This
13951
         is probably obsolete because the first operand should be a
13952
         truth value (that's why we have the two cases above), but let's
13953
         leave it in until we can confirm this for all front-ends.  */
13954
      if (integer_zerop (op2)
13955
          && TREE_CODE (arg0) == NE_EXPR
13956
          && integer_zerop (TREE_OPERAND (arg0, 1))
13957
          && integer_pow2p (arg1)
13958
          && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13959
          && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13960
                              arg1, OEP_ONLY_CONST))
13961
        return pedantic_non_lvalue_loc (loc,
13962
                                    fold_convert_loc (loc, type,
13963
                                                      TREE_OPERAND (arg0, 0)));
13964
 
13965
      /* Convert A ? B : 0 into A && B if A and B are truth values.  */
13966
      if (integer_zerop (op2)
13967
          && truth_value_p (TREE_CODE (arg0))
13968
          && truth_value_p (TREE_CODE (arg1)))
13969
        return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13970
                            fold_convert_loc (loc, type, arg0),
13971
                            arg1);
13972
 
13973
      /* Convert A ? B : 1 into !A || B if A and B are truth values.  */
13974
      if (integer_onep (op2)
13975
          && truth_value_p (TREE_CODE (arg0))
13976
          && truth_value_p (TREE_CODE (arg1)))
13977
        {
13978
          /* Only perform transformation if ARG0 is easily inverted.  */
13979
          tem = fold_truth_not_expr (loc, arg0);
13980
          if (tem)
13981
            return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13982
                                fold_convert_loc (loc, type, tem),
13983
                                arg1);
13984
        }
13985
 
13986
      /* Convert A ? 0 : B into !A && B if A and B are truth values.  */
13987
      if (integer_zerop (arg1)
13988
          && truth_value_p (TREE_CODE (arg0))
13989
          && truth_value_p (TREE_CODE (op2)))
13990
        {
13991
          /* Only perform transformation if ARG0 is easily inverted.  */
13992
          tem = fold_truth_not_expr (loc, arg0);
13993
          if (tem)
13994
            return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13995
                                fold_convert_loc (loc, type, tem),
13996
                                op2);
13997
        }
13998
 
13999
      /* Convert A ? 1 : B into A || B if A and B are truth values.  */
14000
      if (integer_onep (arg1)
14001
          && truth_value_p (TREE_CODE (arg0))
14002
          && truth_value_p (TREE_CODE (op2)))
14003
        return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14004
                            fold_convert_loc (loc, type, arg0),
14005
                            op2);
14006
 
14007
      return NULL_TREE;
14008
 
14009
    case CALL_EXPR:
14010
      /* CALL_EXPRs used to be ternary exprs.  Catch any mistaken uses
14011
         of fold_ternary on them.  */
14012
      gcc_unreachable ();
14013
 
14014
    case BIT_FIELD_REF:
14015
      if ((TREE_CODE (arg0) == VECTOR_CST
14016
           || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
14017
          && type == TREE_TYPE (TREE_TYPE (arg0)))
14018
        {
14019
          unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
14020
          unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14021
 
14022
          if (width != 0
14023
              && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
14024
              && (idx % width) == 0
14025
              && (idx = idx / width)
14026
                 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14027
            {
14028
              tree elements = NULL_TREE;
14029
 
14030
              if (TREE_CODE (arg0) == VECTOR_CST)
14031
                elements = TREE_VECTOR_CST_ELTS (arg0);
14032
              else
14033
                {
14034
                  unsigned HOST_WIDE_INT idx;
14035
                  tree value;
14036
 
14037
                  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
14038
                    elements = tree_cons (NULL_TREE, value, elements);
14039
                }
14040
              while (idx-- > 0 && elements)
14041
                elements = TREE_CHAIN (elements);
14042
              if (elements)
14043
                return TREE_VALUE (elements);
14044
              else
14045
                return fold_convert_loc (loc, type, integer_zero_node);
14046
            }
14047
        }
14048
 
14049
      /* A bit-field-ref that referenced the full argument can be stripped.  */
14050
      if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14051
          && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14052
          && integer_zerop (op2))
14053
        return fold_convert_loc (loc, type, arg0);
14054
 
14055
      return NULL_TREE;
14056
 
14057
    default:
14058
      return NULL_TREE;
14059
    } /* switch (code) */
14060
}
14061
 
14062
/* Perform constant folding and related simplification of EXPR.
14063
   The related simplifications include x*1 => x, x*0 => 0, etc.,
14064
   and application of the associative law.
14065
   NOP_EXPR conversions may be removed freely (as long as we
14066
   are careful not to change the type of the overall expression).
14067
   We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14068
   but we can constant-fold them if they have constant operands.  */
14069
 
14070
#ifdef ENABLE_FOLD_CHECKING
14071
# define fold(x) fold_1 (x)
14072
static tree fold_1 (tree);
14073
static
14074
#endif
14075
tree
14076
fold (tree expr)
14077
{
14078
  const tree t = expr;
14079
  enum tree_code code = TREE_CODE (t);
14080
  enum tree_code_class kind = TREE_CODE_CLASS (code);
14081
  tree tem;
14082
  location_t loc = EXPR_LOCATION (expr);
14083
 
14084
  /* Return right away if a constant.  */
14085
  if (kind == tcc_constant)
14086
    return t;
14087
 
14088
  /* CALL_EXPR-like objects with variable numbers of operands are
14089
     treated specially.  */
14090
  if (kind == tcc_vl_exp)
14091
    {
14092
      if (code == CALL_EXPR)
14093
        {
14094
          tem = fold_call_expr (loc, expr, false);
14095
          return tem ? tem : expr;
14096
        }
14097
      return expr;
14098
    }
14099
 
14100
  if (IS_EXPR_CODE_CLASS (kind))
14101
    {
14102
      tree type = TREE_TYPE (t);
14103
      tree op0, op1, op2;
14104
 
14105
      switch (TREE_CODE_LENGTH (code))
14106
        {
14107
        case 1:
14108
          op0 = TREE_OPERAND (t, 0);
14109
          tem = fold_unary_loc (loc, code, type, op0);
14110
          return tem ? tem : expr;
14111
        case 2:
14112
          op0 = TREE_OPERAND (t, 0);
14113
          op1 = TREE_OPERAND (t, 1);
14114
          tem = fold_binary_loc (loc, code, type, op0, op1);
14115
          return tem ? tem : expr;
14116
        case 3:
14117
          op0 = TREE_OPERAND (t, 0);
14118
          op1 = TREE_OPERAND (t, 1);
14119
          op2 = TREE_OPERAND (t, 2);
14120
          tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14121
          return tem ? tem : expr;
14122
        default:
14123
          break;
14124
        }
14125
    }
14126
 
14127
  switch (code)
14128
    {
14129
    case ARRAY_REF:
14130
      {
14131
        tree op0 = TREE_OPERAND (t, 0);
14132
        tree op1 = TREE_OPERAND (t, 1);
14133
 
14134
        if (TREE_CODE (op1) == INTEGER_CST
14135
            && TREE_CODE (op0) == CONSTRUCTOR
14136
            && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14137
          {
14138
            VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
14139
            unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
14140
            unsigned HOST_WIDE_INT begin = 0;
14141
 
14142
            /* Find a matching index by means of a binary search.  */
14143
            while (begin != end)
14144
              {
14145
                unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14146
                tree index = VEC_index (constructor_elt, elts, middle)->index;
14147
 
14148
                if (TREE_CODE (index) == INTEGER_CST
14149
                    && tree_int_cst_lt (index, op1))
14150
                  begin = middle + 1;
14151
                else if (TREE_CODE (index) == INTEGER_CST
14152
                         && tree_int_cst_lt (op1, index))
14153
                  end = middle;
14154
                else if (TREE_CODE (index) == RANGE_EXPR
14155
                         && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14156
                  begin = middle + 1;
14157
                else if (TREE_CODE (index) == RANGE_EXPR
14158
                         && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14159
                  end = middle;
14160
                else
14161
                  return VEC_index (constructor_elt, elts, middle)->value;
14162
              }
14163
          }
14164
 
14165
        return t;
14166
      }
14167
 
14168
    case CONST_DECL:
14169
      return fold (DECL_INITIAL (t));
14170
 
14171
    default:
14172
      return t;
14173
    } /* switch (code) */
14174
}
14175
 
14176
#ifdef ENABLE_FOLD_CHECKING
14177
#undef fold
14178
 
14179
static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
14180
static void fold_check_failed (const_tree, const_tree);
14181
void print_fold_checksum (const_tree);
14182
 
14183
/* When --enable-checking=fold, compute a digest of expr before
14184
   and after actual fold call to see if fold did not accidentally
14185
   change original expr.  */
14186
 
14187
tree
14188
fold (tree expr)
14189
{
14190
  tree ret;
14191
  struct md5_ctx ctx;
14192
  unsigned char checksum_before[16], checksum_after[16];
14193
  htab_t ht;
14194
 
14195
  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14196
  md5_init_ctx (&ctx);
14197
  fold_checksum_tree (expr, &ctx, ht);
14198
  md5_finish_ctx (&ctx, checksum_before);
14199
  htab_empty (ht);
14200
 
14201
  ret = fold_1 (expr);
14202
 
14203
  md5_init_ctx (&ctx);
14204
  fold_checksum_tree (expr, &ctx, ht);
14205
  md5_finish_ctx (&ctx, checksum_after);
14206
  htab_delete (ht);
14207
 
14208
  if (memcmp (checksum_before, checksum_after, 16))
14209
    fold_check_failed (expr, ret);
14210
 
14211
  return ret;
14212
}
14213
 
14214
void
14215
print_fold_checksum (const_tree expr)
14216
{
14217
  struct md5_ctx ctx;
14218
  unsigned char checksum[16], cnt;
14219
  htab_t ht;
14220
 
14221
  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14222
  md5_init_ctx (&ctx);
14223
  fold_checksum_tree (expr, &ctx, ht);
14224
  md5_finish_ctx (&ctx, checksum);
14225
  htab_delete (ht);
14226
  for (cnt = 0; cnt < 16; ++cnt)
14227
    fprintf (stderr, "%02x", checksum[cnt]);
14228
  putc ('\n', stderr);
14229
}
14230
 
14231
static void
14232
fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14233
{
14234
  internal_error ("fold check: original tree changed by fold");
14235
}
14236
 
14237
static void
14238
fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
14239
{
14240
  const void **slot;
14241
  enum tree_code code;
14242
  union tree_node buf;
14243
  int i, len;
14244
 
14245
recursive_label:
14246
 
14247
  gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
14248
               <= sizeof (struct tree_function_decl))
14249
              && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
14250
  if (expr == NULL)
14251
    return;
14252
  slot = (const void **) htab_find_slot (ht, expr, INSERT);
14253
  if (*slot != NULL)
14254
    return;
14255
  *slot = expr;
14256
  code = TREE_CODE (expr);
14257
  if (TREE_CODE_CLASS (code) == tcc_declaration
14258
      && DECL_ASSEMBLER_NAME_SET_P (expr))
14259
    {
14260
      /* Allow DECL_ASSEMBLER_NAME to be modified.  */
14261
      memcpy ((char *) &buf, expr, tree_size (expr));
14262
      SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14263
      expr = (tree) &buf;
14264
    }
14265
  else if (TREE_CODE_CLASS (code) == tcc_type
14266
           && (TYPE_POINTER_TO (expr)
14267
               || TYPE_REFERENCE_TO (expr)
14268
               || TYPE_CACHED_VALUES_P (expr)
14269
               || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14270
               || TYPE_NEXT_VARIANT (expr)))
14271
    {
14272
      /* Allow these fields to be modified.  */
14273
      tree tmp;
14274
      memcpy ((char *) &buf, expr, tree_size (expr));
14275
      expr = tmp = (tree) &buf;
14276
      TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14277
      TYPE_POINTER_TO (tmp) = NULL;
14278
      TYPE_REFERENCE_TO (tmp) = NULL;
14279
      TYPE_NEXT_VARIANT (tmp) = NULL;
14280
      if (TYPE_CACHED_VALUES_P (tmp))
14281
        {
14282
          TYPE_CACHED_VALUES_P (tmp) = 0;
14283
          TYPE_CACHED_VALUES (tmp) = NULL;
14284
        }
14285
    }
14286
  md5_process_bytes (expr, tree_size (expr), ctx);
14287
  fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14288
  if (TREE_CODE_CLASS (code) != tcc_type
14289
      && TREE_CODE_CLASS (code) != tcc_declaration
14290
      && code != TREE_LIST
14291
      && code != SSA_NAME)
14292
    fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14293
  switch (TREE_CODE_CLASS (code))
14294
    {
14295
    case tcc_constant:
14296
      switch (code)
14297
        {
14298
        case STRING_CST:
14299
          md5_process_bytes (TREE_STRING_POINTER (expr),
14300
                             TREE_STRING_LENGTH (expr), ctx);
14301
          break;
14302
        case COMPLEX_CST:
14303
          fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14304
          fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14305
          break;
14306
        case VECTOR_CST:
14307
          fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
14308
          break;
14309
        default:
14310
          break;
14311
        }
14312
      break;
14313
    case tcc_exceptional:
14314
      switch (code)
14315
        {
14316
        case TREE_LIST:
14317
          fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14318
          fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14319
          expr = TREE_CHAIN (expr);
14320
          goto recursive_label;
14321
          break;
14322
        case TREE_VEC:
14323
          for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14324
            fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14325
          break;
14326
        default:
14327
          break;
14328
        }
14329
      break;
14330
    case tcc_expression:
14331
    case tcc_reference:
14332
    case tcc_comparison:
14333
    case tcc_unary:
14334
    case tcc_binary:
14335
    case tcc_statement:
14336
    case tcc_vl_exp:
14337
      len = TREE_OPERAND_LENGTH (expr);
14338
      for (i = 0; i < len; ++i)
14339
        fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14340
      break;
14341
    case tcc_declaration:
14342
      fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14343
      fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14344
      if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14345
        {
14346
          fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14347
          fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14348
          fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14349
          fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14350
          fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14351
        }
14352
      if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14353
        fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14354
 
14355
      if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14356
        {
14357
          fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14358
          fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14359
          fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14360
        }
14361
      break;
14362
    case tcc_type:
14363
      if (TREE_CODE (expr) == ENUMERAL_TYPE)
14364
        fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14365
      fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14366
      fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14367
      fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14368
      fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14369
      if (INTEGRAL_TYPE_P (expr)
14370
          || SCALAR_FLOAT_TYPE_P (expr))
14371
        {
14372
          fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14373
          fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14374
        }
14375
      fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14376
      if (TREE_CODE (expr) == RECORD_TYPE
14377
          || TREE_CODE (expr) == UNION_TYPE
14378
          || TREE_CODE (expr) == QUAL_UNION_TYPE)
14379
        fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14380
      fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14381
      break;
14382
    default:
14383
      break;
14384
    }
14385
}
14386
 
14387
/* Helper function for outputting the checksum of a tree T.  When
14388
   debugging with gdb, you can "define mynext" to be "next" followed
14389
   by "call debug_fold_checksum (op0)", then just trace down till the
14390
   outputs differ.  */
14391
 
14392
void
14393
debug_fold_checksum (const_tree t)
14394
{
14395
  int i;
14396
  unsigned char checksum[16];
14397
  struct md5_ctx ctx;
14398
  htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14399
 
14400
  md5_init_ctx (&ctx);
14401
  fold_checksum_tree (t, &ctx, ht);
14402
  md5_finish_ctx (&ctx, checksum);
14403
  htab_empty (ht);
14404
 
14405
  for (i = 0; i < 16; i++)
14406
    fprintf (stderr, "%d ", checksum[i]);
14407
 
14408
  fprintf (stderr, "\n");
14409
}
14410
 
14411
#endif
14412
 
14413
/* Fold a unary tree expression with code CODE of type TYPE with an
14414
   operand OP0.  LOC is the location of the resulting expression.
14415
   Return a folded expression if successful.  Otherwise, return a tree
14416
   expression with code CODE of type TYPE with an operand OP0.  */
14417
 
14418
tree
14419
fold_build1_stat_loc (location_t loc,
14420
                      enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14421
{
14422
  tree tem;
14423
#ifdef ENABLE_FOLD_CHECKING
14424
  unsigned char checksum_before[16], checksum_after[16];
14425
  struct md5_ctx ctx;
14426
  htab_t ht;
14427
 
14428
  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14429
  md5_init_ctx (&ctx);
14430
  fold_checksum_tree (op0, &ctx, ht);
14431
  md5_finish_ctx (&ctx, checksum_before);
14432
  htab_empty (ht);
14433
#endif
14434
 
14435
  tem = fold_unary_loc (loc, code, type, op0);
14436
  if (!tem)
14437
    {
14438
      tem = build1_stat (code, type, op0 PASS_MEM_STAT);
14439
      SET_EXPR_LOCATION (tem, loc);
14440
    }
14441
 
14442
#ifdef ENABLE_FOLD_CHECKING
14443
  md5_init_ctx (&ctx);
14444
  fold_checksum_tree (op0, &ctx, ht);
14445
  md5_finish_ctx (&ctx, checksum_after);
14446
  htab_delete (ht);
14447
 
14448
  if (memcmp (checksum_before, checksum_after, 16))
14449
    fold_check_failed (op0, tem);
14450
#endif
14451
  return tem;
14452
}
14453
 
14454
/* Fold a binary tree expression with code CODE of type TYPE with
14455
   operands OP0 and OP1.  LOC is the location of the resulting
14456
   expression.  Return a folded expression if successful.  Otherwise,
14457
   return a tree expression with code CODE of type TYPE with operands
14458
   OP0 and OP1.  */
14459
 
14460
tree
14461
fold_build2_stat_loc (location_t loc,
14462
                      enum tree_code code, tree type, tree op0, tree op1
14463
                      MEM_STAT_DECL)
14464
{
14465
  tree tem;
14466
#ifdef ENABLE_FOLD_CHECKING
14467
  unsigned char checksum_before_op0[16],
14468
                checksum_before_op1[16],
14469
                checksum_after_op0[16],
14470
                checksum_after_op1[16];
14471
  struct md5_ctx ctx;
14472
  htab_t ht;
14473
 
14474
  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14475
  md5_init_ctx (&ctx);
14476
  fold_checksum_tree (op0, &ctx, ht);
14477
  md5_finish_ctx (&ctx, checksum_before_op0);
14478
  htab_empty (ht);
14479
 
14480
  md5_init_ctx (&ctx);
14481
  fold_checksum_tree (op1, &ctx, ht);
14482
  md5_finish_ctx (&ctx, checksum_before_op1);
14483
  htab_empty (ht);
14484
#endif
14485
 
14486
  tem = fold_binary_loc (loc, code, type, op0, op1);
14487
  if (!tem)
14488
    {
14489
      tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
14490
      SET_EXPR_LOCATION (tem, loc);
14491
    }
14492
 
14493
#ifdef ENABLE_FOLD_CHECKING
14494
  md5_init_ctx (&ctx);
14495
  fold_checksum_tree (op0, &ctx, ht);
14496
  md5_finish_ctx (&ctx, checksum_after_op0);
14497
  htab_empty (ht);
14498
 
14499
  if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14500
    fold_check_failed (op0, tem);
14501
 
14502
  md5_init_ctx (&ctx);
14503
  fold_checksum_tree (op1, &ctx, ht);
14504
  md5_finish_ctx (&ctx, checksum_after_op1);
14505
  htab_delete (ht);
14506
 
14507
  if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14508
    fold_check_failed (op1, tem);
14509
#endif
14510
  return tem;
14511
}
14512
 
14513
/* Fold a ternary tree expression with code CODE of type TYPE with
14514
   operands OP0, OP1, and OP2.  Return a folded expression if
14515
   successful.  Otherwise, return a tree expression with code CODE of
14516
   type TYPE with operands OP0, OP1, and OP2.  */
14517
 
14518
tree
14519
fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14520
                      tree op0, tree op1, tree op2 MEM_STAT_DECL)
14521
{
14522
  tree tem;
14523
#ifdef ENABLE_FOLD_CHECKING
14524
  unsigned char checksum_before_op0[16],
14525
                checksum_before_op1[16],
14526
                checksum_before_op2[16],
14527
                checksum_after_op0[16],
14528
                checksum_after_op1[16],
14529
                checksum_after_op2[16];
14530
  struct md5_ctx ctx;
14531
  htab_t ht;
14532
 
14533
  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14534
  md5_init_ctx (&ctx);
14535
  fold_checksum_tree (op0, &ctx, ht);
14536
  md5_finish_ctx (&ctx, checksum_before_op0);
14537
  htab_empty (ht);
14538
 
14539
  md5_init_ctx (&ctx);
14540
  fold_checksum_tree (op1, &ctx, ht);
14541
  md5_finish_ctx (&ctx, checksum_before_op1);
14542
  htab_empty (ht);
14543
 
14544
  md5_init_ctx (&ctx);
14545
  fold_checksum_tree (op2, &ctx, ht);
14546
  md5_finish_ctx (&ctx, checksum_before_op2);
14547
  htab_empty (ht);
14548
#endif
14549
 
14550
  gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14551
  tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14552
  if (!tem)
14553
    {
14554
      tem =  build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
14555
      SET_EXPR_LOCATION (tem, loc);
14556
    }
14557
 
14558
#ifdef ENABLE_FOLD_CHECKING
14559
  md5_init_ctx (&ctx);
14560
  fold_checksum_tree (op0, &ctx, ht);
14561
  md5_finish_ctx (&ctx, checksum_after_op0);
14562
  htab_empty (ht);
14563
 
14564
  if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14565
    fold_check_failed (op0, tem);
14566
 
14567
  md5_init_ctx (&ctx);
14568
  fold_checksum_tree (op1, &ctx, ht);
14569
  md5_finish_ctx (&ctx, checksum_after_op1);
14570
  htab_empty (ht);
14571
 
14572
  if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14573
    fold_check_failed (op1, tem);
14574
 
14575
  md5_init_ctx (&ctx);
14576
  fold_checksum_tree (op2, &ctx, ht);
14577
  md5_finish_ctx (&ctx, checksum_after_op2);
14578
  htab_delete (ht);
14579
 
14580
  if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14581
    fold_check_failed (op2, tem);
14582
#endif
14583
  return tem;
14584
}
14585
 
14586
/* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14587
   arguments in ARGARRAY, and a null static chain.
14588
   Return a folded expression if successful.  Otherwise, return a CALL_EXPR
14589
   of type TYPE from the given operands as constructed by build_call_array.  */
14590
 
14591
tree
14592
fold_build_call_array_loc (location_t loc, tree type, tree fn,
14593
                           int nargs, tree *argarray)
14594
{
14595
  tree tem;
14596
#ifdef ENABLE_FOLD_CHECKING
14597
  unsigned char checksum_before_fn[16],
14598
                checksum_before_arglist[16],
14599
                checksum_after_fn[16],
14600
                checksum_after_arglist[16];
14601
  struct md5_ctx ctx;
14602
  htab_t ht;
14603
  int i;
14604
 
14605
  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14606
  md5_init_ctx (&ctx);
14607
  fold_checksum_tree (fn, &ctx, ht);
14608
  md5_finish_ctx (&ctx, checksum_before_fn);
14609
  htab_empty (ht);
14610
 
14611
  md5_init_ctx (&ctx);
14612
  for (i = 0; i < nargs; i++)
14613
    fold_checksum_tree (argarray[i], &ctx, ht);
14614
  md5_finish_ctx (&ctx, checksum_before_arglist);
14615
  htab_empty (ht);
14616
#endif
14617
 
14618
  tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14619
 
14620
#ifdef ENABLE_FOLD_CHECKING
14621
  md5_init_ctx (&ctx);
14622
  fold_checksum_tree (fn, &ctx, ht);
14623
  md5_finish_ctx (&ctx, checksum_after_fn);
14624
  htab_empty (ht);
14625
 
14626
  if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14627
    fold_check_failed (fn, tem);
14628
 
14629
  md5_init_ctx (&ctx);
14630
  for (i = 0; i < nargs; i++)
14631
    fold_checksum_tree (argarray[i], &ctx, ht);
14632
  md5_finish_ctx (&ctx, checksum_after_arglist);
14633
  htab_delete (ht);
14634
 
14635
  if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14636
    fold_check_failed (NULL_TREE, tem);
14637
#endif
14638
  return tem;
14639
}
14640
 
14641
/* Perform constant folding and related simplification of initializer
14642
   expression EXPR.  These behave identically to "fold_buildN" but ignore
14643
   potential run-time traps and exceptions that fold must preserve.  */
14644
 
14645
#define START_FOLD_INIT \
14646
  int saved_signaling_nans = flag_signaling_nans;\
14647
  int saved_trapping_math = flag_trapping_math;\
14648
  int saved_rounding_math = flag_rounding_math;\
14649
  int saved_trapv = flag_trapv;\
14650
  int saved_folding_initializer = folding_initializer;\
14651
  flag_signaling_nans = 0;\
14652
  flag_trapping_math = 0;\
14653
  flag_rounding_math = 0;\
14654
  flag_trapv = 0;\
14655
  folding_initializer = 1;
14656
 
14657
#define END_FOLD_INIT \
14658
  flag_signaling_nans = saved_signaling_nans;\
14659
  flag_trapping_math = saved_trapping_math;\
14660
  flag_rounding_math = saved_rounding_math;\
14661
  flag_trapv = saved_trapv;\
14662
  folding_initializer = saved_folding_initializer;
14663
 
14664
tree
14665
fold_build1_initializer_loc (location_t loc, enum tree_code code,
14666
                             tree type, tree op)
14667
{
14668
  tree result;
14669
  START_FOLD_INIT;
14670
 
14671
  result = fold_build1_loc (loc, code, type, op);
14672
 
14673
  END_FOLD_INIT;
14674
  return result;
14675
}
14676
 
14677
tree
14678
fold_build2_initializer_loc (location_t loc, enum tree_code code,
14679
                             tree type, tree op0, tree op1)
14680
{
14681
  tree result;
14682
  START_FOLD_INIT;
14683
 
14684
  result = fold_build2_loc (loc, code, type, op0, op1);
14685
 
14686
  END_FOLD_INIT;
14687
  return result;
14688
}
14689
 
14690
tree
14691
fold_build3_initializer_loc (location_t loc, enum tree_code code,
14692
                             tree type, tree op0, tree op1, tree op2)
14693
{
14694
  tree result;
14695
  START_FOLD_INIT;
14696
 
14697
  result = fold_build3_loc (loc, code, type, op0, op1, op2);
14698
 
14699
  END_FOLD_INIT;
14700
  return result;
14701
}
14702
 
14703
tree
14704
fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14705
                                       int nargs, tree *argarray)
14706
{
14707
  tree result;
14708
  START_FOLD_INIT;
14709
 
14710
  result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14711
 
14712
  END_FOLD_INIT;
14713
  return result;
14714
}
14715
 
14716
#undef START_FOLD_INIT
14717
#undef END_FOLD_INIT
14718
 
14719
/* Determine if first argument is a multiple of second argument.  Return 0 if
14720
   it is not, or we cannot easily determined it to be.
14721
 
14722
   An example of the sort of thing we care about (at this point; this routine
14723
   could surely be made more general, and expanded to do what the *_DIV_EXPR's
14724
   fold cases do now) is discovering that
14725
 
14726
     SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14727
 
14728
   is a multiple of
14729
 
14730
     SAVE_EXPR (J * 8)
14731
 
14732
   when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14733
 
14734
   This code also handles discovering that
14735
 
14736
     SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14737
 
14738
   is a multiple of 8 so we don't have to worry about dealing with a
14739
   possible remainder.
14740
 
14741
   Note that we *look* inside a SAVE_EXPR only to determine how it was
14742
   calculated; it is not safe for fold to do much of anything else with the
14743
   internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14744
   at run time.  For example, the latter example above *cannot* be implemented
14745
   as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14746
   evaluation time of the original SAVE_EXPR is not necessarily the same at
14747
   the time the new expression is evaluated.  The only optimization of this
14748
   sort that would be valid is changing
14749
 
14750
     SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14751
 
14752
   divided by 8 to
14753
 
14754
     SAVE_EXPR (I) * SAVE_EXPR (J)
14755
 
14756
   (where the same SAVE_EXPR (J) is used in the original and the
14757
   transformed version).  */
14758
 
14759
int
14760
multiple_of_p (tree type, const_tree top, const_tree bottom)
14761
{
14762
  if (operand_equal_p (top, bottom, 0))
14763
    return 1;
14764
 
14765
  if (TREE_CODE (type) != INTEGER_TYPE)
14766
    return 0;
14767
 
14768
  switch (TREE_CODE (top))
14769
    {
14770
    case BIT_AND_EXPR:
14771
      /* Bitwise and provides a power of two multiple.  If the mask is
14772
         a multiple of BOTTOM then TOP is a multiple of BOTTOM.  */
14773
      if (!integer_pow2p (bottom))
14774
        return 0;
14775
      /* FALLTHRU */
14776
 
14777
    case MULT_EXPR:
14778
      return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14779
              || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14780
 
14781
    case PLUS_EXPR:
14782
    case MINUS_EXPR:
14783
      return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14784
              && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14785
 
14786
    case LSHIFT_EXPR:
14787
      if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14788
        {
14789
          tree op1, t1;
14790
 
14791
          op1 = TREE_OPERAND (top, 1);
14792
          /* const_binop may not detect overflow correctly,
14793
             so check for it explicitly here.  */
14794
          if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14795
              > TREE_INT_CST_LOW (op1)
14796
              && TREE_INT_CST_HIGH (op1) == 0
14797
              && 0 != (t1 = fold_convert (type,
14798
                                          const_binop (LSHIFT_EXPR,
14799
                                                       size_one_node,
14800
                                                       op1, 0)))
14801
              && !TREE_OVERFLOW (t1))
14802
            return multiple_of_p (type, t1, bottom);
14803
        }
14804
      return 0;
14805
 
14806
    case NOP_EXPR:
14807
      /* Can't handle conversions from non-integral or wider integral type.  */
14808
      if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14809
          || (TYPE_PRECISION (type)
14810
              < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14811
        return 0;
14812
 
14813
      /* .. fall through ...  */
14814
 
14815
    case SAVE_EXPR:
14816
      return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14817
 
14818
    case INTEGER_CST:
14819
      if (TREE_CODE (bottom) != INTEGER_CST
14820
          || integer_zerop (bottom)
14821
          || (TYPE_UNSIGNED (type)
14822
              && (tree_int_cst_sgn (top) < 0
14823
                  || tree_int_cst_sgn (bottom) < 0)))
14824
        return 0;
14825
      return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14826
                                             top, bottom, 0));
14827
 
14828
    default:
14829
      return 0;
14830
    }
14831
}
14832
 
14833
/* Return true if CODE or TYPE is known to be non-negative. */
14834
 
14835
static bool
14836
tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14837
{
14838
  if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14839
      && truth_value_p (code))
14840
    /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14841
       have a signed:1 type (where the value is -1 and 0).  */
14842
    return true;
14843
  return false;
14844
}
14845
 
14846
/* Return true if (CODE OP0) is known to be non-negative.  If the return
14847
   value is based on the assumption that signed overflow is undefined,
14848
   set *STRICT_OVERFLOW_P to true; otherwise, don't change
14849
   *STRICT_OVERFLOW_P.  */
14850
 
14851
bool
14852
tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14853
                                bool *strict_overflow_p)
14854
{
14855
  if (TYPE_UNSIGNED (type))
14856
    return true;
14857
 
14858
  switch (code)
14859
    {
14860
    case ABS_EXPR:
14861
      /* We can't return 1 if flag_wrapv is set because
14862
         ABS_EXPR<INT_MIN> = INT_MIN.  */
14863
      if (!INTEGRAL_TYPE_P (type))
14864
        return true;
14865
      if (TYPE_OVERFLOW_UNDEFINED (type))
14866
        {
14867
          *strict_overflow_p = true;
14868
          return true;
14869
        }
14870
      break;
14871
 
14872
    case NON_LVALUE_EXPR:
14873
    case FLOAT_EXPR:
14874
    case FIX_TRUNC_EXPR:
14875
      return tree_expr_nonnegative_warnv_p (op0,
14876
                                            strict_overflow_p);
14877
 
14878
    case NOP_EXPR:
14879
      {
14880
        tree inner_type = TREE_TYPE (op0);
14881
        tree outer_type = type;
14882
 
14883
        if (TREE_CODE (outer_type) == REAL_TYPE)
14884
          {
14885
            if (TREE_CODE (inner_type) == REAL_TYPE)
14886
              return tree_expr_nonnegative_warnv_p (op0,
14887
                                                    strict_overflow_p);
14888
            if (TREE_CODE (inner_type) == INTEGER_TYPE)
14889
              {
14890
                if (TYPE_UNSIGNED (inner_type))
14891
                  return true;
14892
                return tree_expr_nonnegative_warnv_p (op0,
14893
                                                      strict_overflow_p);
14894
              }
14895
          }
14896
        else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14897
          {
14898
            if (TREE_CODE (inner_type) == REAL_TYPE)
14899
              return tree_expr_nonnegative_warnv_p (op0,
14900
                                                    strict_overflow_p);
14901
            if (TREE_CODE (inner_type) == INTEGER_TYPE)
14902
              return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14903
                      && TYPE_UNSIGNED (inner_type);
14904
          }
14905
      }
14906
      break;
14907
 
14908
    default:
14909
      return tree_simple_nonnegative_warnv_p (code, type);
14910
    }
14911
 
14912
  /* We don't know sign of `t', so be conservative and return false.  */
14913
  return false;
14914
}
14915
 
14916
/* Return true if (CODE OP0 OP1) is known to be non-negative.  If the return
14917
   value is based on the assumption that signed overflow is undefined,
14918
   set *STRICT_OVERFLOW_P to true; otherwise, don't change
14919
   *STRICT_OVERFLOW_P.  */
14920
 
14921
bool
14922
tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14923
                                      tree op1, bool *strict_overflow_p)
14924
{
14925
  if (TYPE_UNSIGNED (type))
14926
    return true;
14927
 
14928
  switch (code)
14929
    {
14930
    case POINTER_PLUS_EXPR:
14931
    case PLUS_EXPR:
14932
      if (FLOAT_TYPE_P (type))
14933
        return (tree_expr_nonnegative_warnv_p (op0,
14934
                                               strict_overflow_p)
14935
                && tree_expr_nonnegative_warnv_p (op1,
14936
                                                  strict_overflow_p));
14937
 
14938
      /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14939
         both unsigned and at least 2 bits shorter than the result.  */
14940
      if (TREE_CODE (type) == INTEGER_TYPE
14941
          && TREE_CODE (op0) == NOP_EXPR
14942
          && TREE_CODE (op1) == NOP_EXPR)
14943
        {
14944
          tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14945
          tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14946
          if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14947
              && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14948
            {
14949
              unsigned int prec = MAX (TYPE_PRECISION (inner1),
14950
                                       TYPE_PRECISION (inner2)) + 1;
14951
              return prec < TYPE_PRECISION (type);
14952
            }
14953
        }
14954
      break;
14955
 
14956
    case MULT_EXPR:
14957
      if (FLOAT_TYPE_P (type))
14958
        {
14959
          /* x * x for floating point x is always non-negative.  */
14960
          if (operand_equal_p (op0, op1, 0))
14961
            return true;
14962
          return (tree_expr_nonnegative_warnv_p (op0,
14963
                                                 strict_overflow_p)
14964
                  && tree_expr_nonnegative_warnv_p (op1,
14965
                                                    strict_overflow_p));
14966
        }
14967
 
14968
      /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14969
         both unsigned and their total bits is shorter than the result.  */
14970
      if (TREE_CODE (type) == INTEGER_TYPE
14971
          && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14972
          && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14973
        {
14974
          tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14975
            ? TREE_TYPE (TREE_OPERAND (op0, 0))
14976
            : TREE_TYPE (op0);
14977
          tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14978
            ? TREE_TYPE (TREE_OPERAND (op1, 0))
14979
            : TREE_TYPE (op1);
14980
 
14981
          bool unsigned0 = TYPE_UNSIGNED (inner0);
14982
          bool unsigned1 = TYPE_UNSIGNED (inner1);
14983
 
14984
          if (TREE_CODE (op0) == INTEGER_CST)
14985
            unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14986
 
14987
          if (TREE_CODE (op1) == INTEGER_CST)
14988
            unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14989
 
14990
          if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14991
              && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14992
            {
14993
              unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14994
                ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14995
                : TYPE_PRECISION (inner0);
14996
 
14997
              unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14998
                ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14999
                : TYPE_PRECISION (inner1);
15000
 
15001
              return precision0 + precision1 < TYPE_PRECISION (type);
15002
            }
15003
        }
15004
      return false;
15005
 
15006
    case BIT_AND_EXPR:
15007
    case MAX_EXPR:
15008
      return (tree_expr_nonnegative_warnv_p (op0,
15009
                                             strict_overflow_p)
15010
              || tree_expr_nonnegative_warnv_p (op1,
15011
                                                strict_overflow_p));
15012
 
15013
    case BIT_IOR_EXPR:
15014
    case BIT_XOR_EXPR:
15015
    case MIN_EXPR:
15016
    case RDIV_EXPR:
15017
    case TRUNC_DIV_EXPR:
15018
    case CEIL_DIV_EXPR:
15019
    case FLOOR_DIV_EXPR:
15020
    case ROUND_DIV_EXPR:
15021
      return (tree_expr_nonnegative_warnv_p (op0,
15022
                                             strict_overflow_p)
15023
              && tree_expr_nonnegative_warnv_p (op1,
15024
                                                strict_overflow_p));
15025
 
15026
    case TRUNC_MOD_EXPR:
15027
    case CEIL_MOD_EXPR:
15028
    case FLOOR_MOD_EXPR:
15029
    case ROUND_MOD_EXPR:
15030
      return tree_expr_nonnegative_warnv_p (op0,
15031
                                            strict_overflow_p);
15032
    default:
15033
      return tree_simple_nonnegative_warnv_p (code, type);
15034
    }
15035
 
15036
  /* We don't know sign of `t', so be conservative and return false.  */
15037
  return false;
15038
}
15039
 
15040
/* Return true if T is known to be non-negative.  If the return
15041
   value is based on the assumption that signed overflow is undefined,
15042
   set *STRICT_OVERFLOW_P to true; otherwise, don't change
15043
   *STRICT_OVERFLOW_P.  */
15044
 
15045
bool
15046
tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15047
{
15048
  if (TYPE_UNSIGNED (TREE_TYPE (t)))
15049
    return true;
15050
 
15051
  switch (TREE_CODE (t))
15052
    {
15053
    case INTEGER_CST:
15054
      return tree_int_cst_sgn (t) >= 0;
15055
 
15056
    case REAL_CST:
15057
      return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15058
 
15059
    case FIXED_CST:
15060
      return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15061
 
15062
    case COND_EXPR:
15063
      return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15064
                                             strict_overflow_p)
15065
              && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15066
                                                strict_overflow_p));
15067
    default:
15068
      return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15069
                                                   TREE_TYPE (t));
15070
    }
15071
  /* We don't know sign of `t', so be conservative and return false.  */
15072
  return false;
15073
}
15074
 
15075
/* Return true if T is known to be non-negative.  If the return
15076
   value is based on the assumption that signed overflow is undefined,
15077
   set *STRICT_OVERFLOW_P to true; otherwise, don't change
15078
   *STRICT_OVERFLOW_P.  */
15079
 
15080
bool
15081
tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15082
                               tree arg0, tree arg1, bool *strict_overflow_p)
15083
{
15084
  if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15085
    switch (DECL_FUNCTION_CODE (fndecl))
15086
      {
15087
        CASE_FLT_FN (BUILT_IN_ACOS):
15088
        CASE_FLT_FN (BUILT_IN_ACOSH):
15089
        CASE_FLT_FN (BUILT_IN_CABS):
15090
        CASE_FLT_FN (BUILT_IN_COSH):
15091
        CASE_FLT_FN (BUILT_IN_ERFC):
15092
        CASE_FLT_FN (BUILT_IN_EXP):
15093
        CASE_FLT_FN (BUILT_IN_EXP10):
15094
        CASE_FLT_FN (BUILT_IN_EXP2):
15095
        CASE_FLT_FN (BUILT_IN_FABS):
15096
        CASE_FLT_FN (BUILT_IN_FDIM):
15097
        CASE_FLT_FN (BUILT_IN_HYPOT):
15098
        CASE_FLT_FN (BUILT_IN_POW10):
15099
        CASE_INT_FN (BUILT_IN_FFS):
15100
        CASE_INT_FN (BUILT_IN_PARITY):
15101
        CASE_INT_FN (BUILT_IN_POPCOUNT):
15102
      case BUILT_IN_BSWAP32:
15103
      case BUILT_IN_BSWAP64:
15104
        /* Always true.  */
15105
        return true;
15106
 
15107
        CASE_FLT_FN (BUILT_IN_SQRT):
15108
        /* sqrt(-0.0) is -0.0.  */
15109
        if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15110
          return true;
15111
        return tree_expr_nonnegative_warnv_p (arg0,
15112
                                              strict_overflow_p);
15113
 
15114
        CASE_FLT_FN (BUILT_IN_ASINH):
15115
        CASE_FLT_FN (BUILT_IN_ATAN):
15116
        CASE_FLT_FN (BUILT_IN_ATANH):
15117
        CASE_FLT_FN (BUILT_IN_CBRT):
15118
        CASE_FLT_FN (BUILT_IN_CEIL):
15119
        CASE_FLT_FN (BUILT_IN_ERF):
15120
        CASE_FLT_FN (BUILT_IN_EXPM1):
15121
        CASE_FLT_FN (BUILT_IN_FLOOR):
15122
        CASE_FLT_FN (BUILT_IN_FMOD):
15123
        CASE_FLT_FN (BUILT_IN_FREXP):
15124
        CASE_FLT_FN (BUILT_IN_LCEIL):
15125
        CASE_FLT_FN (BUILT_IN_LDEXP):
15126
        CASE_FLT_FN (BUILT_IN_LFLOOR):
15127
        CASE_FLT_FN (BUILT_IN_LLCEIL):
15128
        CASE_FLT_FN (BUILT_IN_LLFLOOR):
15129
        CASE_FLT_FN (BUILT_IN_LLRINT):
15130
        CASE_FLT_FN (BUILT_IN_LLROUND):
15131
        CASE_FLT_FN (BUILT_IN_LRINT):
15132
        CASE_FLT_FN (BUILT_IN_LROUND):
15133
        CASE_FLT_FN (BUILT_IN_MODF):
15134
        CASE_FLT_FN (BUILT_IN_NEARBYINT):
15135
        CASE_FLT_FN (BUILT_IN_RINT):
15136
        CASE_FLT_FN (BUILT_IN_ROUND):
15137
        CASE_FLT_FN (BUILT_IN_SCALB):
15138
        CASE_FLT_FN (BUILT_IN_SCALBLN):
15139
        CASE_FLT_FN (BUILT_IN_SCALBN):
15140
        CASE_FLT_FN (BUILT_IN_SIGNBIT):
15141
        CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15142
        CASE_FLT_FN (BUILT_IN_SINH):
15143
        CASE_FLT_FN (BUILT_IN_TANH):
15144
        CASE_FLT_FN (BUILT_IN_TRUNC):
15145
        /* True if the 1st argument is nonnegative.  */
15146
        return tree_expr_nonnegative_warnv_p (arg0,
15147
                                              strict_overflow_p);
15148
 
15149
        CASE_FLT_FN (BUILT_IN_FMAX):
15150
        /* True if the 1st OR 2nd arguments are nonnegative.  */
15151
        return (tree_expr_nonnegative_warnv_p (arg0,
15152
                                               strict_overflow_p)
15153
                || (tree_expr_nonnegative_warnv_p (arg1,
15154
                                                   strict_overflow_p)));
15155
 
15156
        CASE_FLT_FN (BUILT_IN_FMIN):
15157
        /* True if the 1st AND 2nd arguments are nonnegative.  */
15158
        return (tree_expr_nonnegative_warnv_p (arg0,
15159
                                               strict_overflow_p)
15160
                && (tree_expr_nonnegative_warnv_p (arg1,
15161
                                                   strict_overflow_p)));
15162
 
15163
        CASE_FLT_FN (BUILT_IN_COPYSIGN):
15164
        /* True if the 2nd argument is nonnegative.  */
15165
        return tree_expr_nonnegative_warnv_p (arg1,
15166
                                              strict_overflow_p);
15167
 
15168
        CASE_FLT_FN (BUILT_IN_POWI):
15169
        /* True if the 1st argument is nonnegative or the second
15170
           argument is an even integer.  */
15171
        if (TREE_CODE (arg1) == INTEGER_CST
15172
            && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15173
          return true;
15174
        return tree_expr_nonnegative_warnv_p (arg0,
15175
                                              strict_overflow_p);
15176
 
15177
        CASE_FLT_FN (BUILT_IN_POW):
15178
        /* True if the 1st argument is nonnegative or the second
15179
           argument is an even integer valued real.  */
15180
        if (TREE_CODE (arg1) == REAL_CST)
15181
          {
15182
            REAL_VALUE_TYPE c;
15183
            HOST_WIDE_INT n;
15184
 
15185
            c = TREE_REAL_CST (arg1);
15186
            n = real_to_integer (&c);
15187
            if ((n & 1) == 0)
15188
              {
15189
                REAL_VALUE_TYPE cint;
15190
                real_from_integer (&cint, VOIDmode, n,
15191
                                   n < 0 ? -1 : 0, 0);
15192
                if (real_identical (&c, &cint))
15193
                  return true;
15194
              }
15195
          }
15196
        return tree_expr_nonnegative_warnv_p (arg0,
15197
                                              strict_overflow_p);
15198
 
15199
      default:
15200
        break;
15201
      }
15202
  return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15203
                                          type);
15204
}
15205
 
15206
/* Return true if T is known to be non-negative.  If the return
15207
   value is based on the assumption that signed overflow is undefined,
15208
   set *STRICT_OVERFLOW_P to true; otherwise, don't change
15209
   *STRICT_OVERFLOW_P.  */
15210
 
15211
bool
15212
tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15213
{
15214
  enum tree_code code = TREE_CODE (t);
15215
  if (TYPE_UNSIGNED (TREE_TYPE (t)))
15216
    return true;
15217
 
15218
  switch (code)
15219
    {
15220
    case TARGET_EXPR:
15221
      {
15222
        tree temp = TARGET_EXPR_SLOT (t);
15223
        t = TARGET_EXPR_INITIAL (t);
15224
 
15225
        /* If the initializer is non-void, then it's a normal expression
15226
           that will be assigned to the slot.  */
15227
        if (!VOID_TYPE_P (t))
15228
          return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15229
 
15230
        /* Otherwise, the initializer sets the slot in some way.  One common
15231
           way is an assignment statement at the end of the initializer.  */
15232
        while (1)
15233
          {
15234
            if (TREE_CODE (t) == BIND_EXPR)
15235
              t = expr_last (BIND_EXPR_BODY (t));
15236
            else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15237
                     || TREE_CODE (t) == TRY_CATCH_EXPR)
15238
              t = expr_last (TREE_OPERAND (t, 0));
15239
            else if (TREE_CODE (t) == STATEMENT_LIST)
15240
              t = expr_last (t);
15241
            else
15242
              break;
15243
          }
15244
        if (TREE_CODE (t) == MODIFY_EXPR
15245
            && TREE_OPERAND (t, 0) == temp)
15246
          return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15247
                                                strict_overflow_p);
15248
 
15249
        return false;
15250
      }
15251
 
15252
    case CALL_EXPR:
15253
      {
15254
        tree arg0 = call_expr_nargs (t) > 0 ?  CALL_EXPR_ARG (t, 0) : NULL_TREE;
15255
        tree arg1 = call_expr_nargs (t) > 1 ?  CALL_EXPR_ARG (t, 1) : NULL_TREE;
15256
 
15257
        return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15258
                                              get_callee_fndecl (t),
15259
                                              arg0,
15260
                                              arg1,
15261
                                              strict_overflow_p);
15262
      }
15263
    case COMPOUND_EXPR:
15264
    case MODIFY_EXPR:
15265
      return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15266
                                            strict_overflow_p);
15267
    case BIND_EXPR:
15268
      return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15269
                                            strict_overflow_p);
15270
    case SAVE_EXPR:
15271
      return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15272
                                            strict_overflow_p);
15273
 
15274
    default:
15275
      return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15276
                                                   TREE_TYPE (t));
15277
    }
15278
 
15279
  /* We don't know sign of `t', so be conservative and return false.  */
15280
  return false;
15281
}
15282
 
15283
/* Return true if T is known to be non-negative.  If the return
15284
   value is based on the assumption that signed overflow is undefined,
15285
   set *STRICT_OVERFLOW_P to true; otherwise, don't change
15286
   *STRICT_OVERFLOW_P.  */
15287
 
15288
bool
15289
tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15290
{
15291
  enum tree_code code;
15292
  if (t == error_mark_node)
15293
    return false;
15294
 
15295
  code = TREE_CODE (t);
15296
  switch (TREE_CODE_CLASS (code))
15297
    {
15298
    case tcc_binary:
15299
    case tcc_comparison:
15300
      return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15301
                                              TREE_TYPE (t),
15302
                                              TREE_OPERAND (t, 0),
15303
                                              TREE_OPERAND (t, 1),
15304
                                              strict_overflow_p);
15305
 
15306
    case tcc_unary:
15307
      return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15308
                                             TREE_TYPE (t),
15309
                                             TREE_OPERAND (t, 0),
15310
                                             strict_overflow_p);
15311
 
15312
    case tcc_constant:
15313
    case tcc_declaration:
15314
    case tcc_reference:
15315
      return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15316
 
15317
    default:
15318
      break;
15319
    }
15320
 
15321
  switch (code)
15322
    {
15323
    case TRUTH_AND_EXPR:
15324
    case TRUTH_OR_EXPR:
15325
    case TRUTH_XOR_EXPR:
15326
      return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15327
                                              TREE_TYPE (t),
15328
                                              TREE_OPERAND (t, 0),
15329
                                              TREE_OPERAND (t, 1),
15330
                                              strict_overflow_p);
15331
    case TRUTH_NOT_EXPR:
15332
      return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15333
                                             TREE_TYPE (t),
15334
                                             TREE_OPERAND (t, 0),
15335
                                             strict_overflow_p);
15336
 
15337
    case COND_EXPR:
15338
    case CONSTRUCTOR:
15339
    case OBJ_TYPE_REF:
15340
    case ASSERT_EXPR:
15341
    case ADDR_EXPR:
15342
    case WITH_SIZE_EXPR:
15343
    case SSA_NAME:
15344
      return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15345
 
15346
    default:
15347
      return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15348
    }
15349
}
15350
 
15351
/* Return true if `t' is known to be non-negative.  Handle warnings
15352
   about undefined signed overflow.  */
15353
 
15354
bool
15355
tree_expr_nonnegative_p (tree t)
15356
{
15357
  bool ret, strict_overflow_p;
15358
 
15359
  strict_overflow_p = false;
15360
  ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15361
  if (strict_overflow_p)
15362
    fold_overflow_warning (("assuming signed overflow does not occur when "
15363
                            "determining that expression is always "
15364
                            "non-negative"),
15365
                           WARN_STRICT_OVERFLOW_MISC);
15366
  return ret;
15367
}
15368
 
15369
 
15370
/* Return true when (CODE OP0) is an address and is known to be nonzero.
15371
   For floating point we further ensure that T is not denormal.
15372
   Similar logic is present in nonzero_address in rtlanal.h.
15373
 
15374
   If the return value is based on the assumption that signed overflow
15375
   is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15376
   change *STRICT_OVERFLOW_P.  */
15377
 
15378
bool
15379
tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15380
                                 bool *strict_overflow_p)
15381
{
15382
  switch (code)
15383
    {
15384
    case ABS_EXPR:
15385
      return tree_expr_nonzero_warnv_p (op0,
15386
                                        strict_overflow_p);
15387
 
15388
    case NOP_EXPR:
15389
      {
15390
        tree inner_type = TREE_TYPE (op0);
15391
        tree outer_type = type;
15392
 
15393
        return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15394
                && tree_expr_nonzero_warnv_p (op0,
15395
                                              strict_overflow_p));
15396
      }
15397
      break;
15398
 
15399
    case NON_LVALUE_EXPR:
15400
      return tree_expr_nonzero_warnv_p (op0,
15401
                                        strict_overflow_p);
15402
 
15403
    default:
15404
      break;
15405
  }
15406
 
15407
  return false;
15408
}
15409
 
15410
/* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15411
   For floating point we further ensure that T is not denormal.
15412
   Similar logic is present in nonzero_address in rtlanal.h.
15413
 
15414
   If the return value is based on the assumption that signed overflow
15415
   is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15416
   change *STRICT_OVERFLOW_P.  */
15417
 
15418
bool
15419
tree_binary_nonzero_warnv_p (enum tree_code code,
15420
                             tree type,
15421
                             tree op0,
15422
                             tree op1, bool *strict_overflow_p)
15423
{
15424
  bool sub_strict_overflow_p;
15425
  switch (code)
15426
    {
15427
    case POINTER_PLUS_EXPR:
15428
    case PLUS_EXPR:
15429
      if (TYPE_OVERFLOW_UNDEFINED (type))
15430
        {
15431
          /* With the presence of negative values it is hard
15432
             to say something.  */
15433
          sub_strict_overflow_p = false;
15434
          if (!tree_expr_nonnegative_warnv_p (op0,
15435
                                              &sub_strict_overflow_p)
15436
              || !tree_expr_nonnegative_warnv_p (op1,
15437
                                                 &sub_strict_overflow_p))
15438
            return false;
15439
          /* One of operands must be positive and the other non-negative.  */
15440
          /* We don't set *STRICT_OVERFLOW_P here: even if this value
15441
             overflows, on a twos-complement machine the sum of two
15442
             nonnegative numbers can never be zero.  */
15443
          return (tree_expr_nonzero_warnv_p (op0,
15444
                                             strict_overflow_p)
15445
                  || tree_expr_nonzero_warnv_p (op1,
15446
                                                strict_overflow_p));
15447
        }
15448
      break;
15449
 
15450
    case MULT_EXPR:
15451
      if (TYPE_OVERFLOW_UNDEFINED (type))
15452
        {
15453
          if (tree_expr_nonzero_warnv_p (op0,
15454
                                         strict_overflow_p)
15455
              && tree_expr_nonzero_warnv_p (op1,
15456
                                            strict_overflow_p))
15457
            {
15458
              *strict_overflow_p = true;
15459
              return true;
15460
            }
15461
        }
15462
      break;
15463
 
15464
    case MIN_EXPR:
15465
      sub_strict_overflow_p = false;
15466
      if (tree_expr_nonzero_warnv_p (op0,
15467
                                     &sub_strict_overflow_p)
15468
          && tree_expr_nonzero_warnv_p (op1,
15469
                                        &sub_strict_overflow_p))
15470
        {
15471
          if (sub_strict_overflow_p)
15472
            *strict_overflow_p = true;
15473
        }
15474
      break;
15475
 
15476
    case MAX_EXPR:
15477
      sub_strict_overflow_p = false;
15478
      if (tree_expr_nonzero_warnv_p (op0,
15479
                                     &sub_strict_overflow_p))
15480
        {
15481
          if (sub_strict_overflow_p)
15482
            *strict_overflow_p = true;
15483
 
15484
          /* When both operands are nonzero, then MAX must be too.  */
15485
          if (tree_expr_nonzero_warnv_p (op1,
15486
                                         strict_overflow_p))
15487
            return true;
15488
 
15489
          /* MAX where operand 0 is positive is positive.  */
15490
          return tree_expr_nonnegative_warnv_p (op0,
15491
                                               strict_overflow_p);
15492
        }
15493
      /* MAX where operand 1 is positive is positive.  */
15494
      else if (tree_expr_nonzero_warnv_p (op1,
15495
                                          &sub_strict_overflow_p)
15496
               && tree_expr_nonnegative_warnv_p (op1,
15497
                                                 &sub_strict_overflow_p))
15498
        {
15499
          if (sub_strict_overflow_p)
15500
            *strict_overflow_p = true;
15501
          return true;
15502
        }
15503
      break;
15504
 
15505
    case BIT_IOR_EXPR:
15506
      return (tree_expr_nonzero_warnv_p (op1,
15507
                                         strict_overflow_p)
15508
              || tree_expr_nonzero_warnv_p (op0,
15509
                                            strict_overflow_p));
15510
 
15511
    default:
15512
      break;
15513
  }
15514
 
15515
  return false;
15516
}
15517
 
15518
/* Return true when T is an address and is known to be nonzero.
15519
   For floating point we further ensure that T is not denormal.
15520
   Similar logic is present in nonzero_address in rtlanal.h.
15521
 
15522
   If the return value is based on the assumption that signed overflow
15523
   is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15524
   change *STRICT_OVERFLOW_P.  */
15525
 
15526
bool
15527
tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15528
{
15529
  bool sub_strict_overflow_p;
15530
  switch (TREE_CODE (t))
15531
    {
15532
    case INTEGER_CST:
15533
      return !integer_zerop (t);
15534
 
15535
    case ADDR_EXPR:
15536
      {
15537
        tree base = get_base_address (TREE_OPERAND (t, 0));
15538
 
15539
        if (!base)
15540
          return false;
15541
 
15542
        /* Weak declarations may link to NULL.  Other things may also be NULL
15543
           so protect with -fdelete-null-pointer-checks; but not variables
15544
           allocated on the stack.  */
15545
        if (DECL_P (base)
15546
            && (flag_delete_null_pointer_checks
15547
                || (TREE_CODE (base) == VAR_DECL && !TREE_STATIC (base))))
15548
          return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15549
 
15550
        /* Constants are never weak.  */
15551
        if (CONSTANT_CLASS_P (base))
15552
          return true;
15553
 
15554
        return false;
15555
      }
15556
 
15557
    case COND_EXPR:
15558
      sub_strict_overflow_p = false;
15559
      if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15560
                                     &sub_strict_overflow_p)
15561
          && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15562
                                        &sub_strict_overflow_p))
15563
        {
15564
          if (sub_strict_overflow_p)
15565
            *strict_overflow_p = true;
15566
          return true;
15567
        }
15568
      break;
15569
 
15570
    default:
15571
      break;
15572
    }
15573
  return false;
15574
}
15575
 
15576
/* Return true when T is an address and is known to be nonzero.
15577
   For floating point we further ensure that T is not denormal.
15578
   Similar logic is present in nonzero_address in rtlanal.h.
15579
 
15580
   If the return value is based on the assumption that signed overflow
15581
   is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15582
   change *STRICT_OVERFLOW_P.  */
15583
 
15584
bool
15585
tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15586
{
15587
  tree type = TREE_TYPE (t);
15588
  enum tree_code code;
15589
 
15590
  /* Doing something useful for floating point would need more work.  */
15591
  if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15592
    return false;
15593
 
15594
  code = TREE_CODE (t);
15595
  switch (TREE_CODE_CLASS (code))
15596
    {
15597
    case tcc_unary:
15598
      return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15599
                                              strict_overflow_p);
15600
    case tcc_binary:
15601
    case tcc_comparison:
15602
      return tree_binary_nonzero_warnv_p (code, type,
15603
                                               TREE_OPERAND (t, 0),
15604
                                               TREE_OPERAND (t, 1),
15605
                                               strict_overflow_p);
15606
    case tcc_constant:
15607
    case tcc_declaration:
15608
    case tcc_reference:
15609
      return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15610
 
15611
    default:
15612
      break;
15613
    }
15614
 
15615
  switch (code)
15616
    {
15617
    case TRUTH_NOT_EXPR:
15618
      return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15619
                                              strict_overflow_p);
15620
 
15621
    case TRUTH_AND_EXPR:
15622
    case TRUTH_OR_EXPR:
15623
    case TRUTH_XOR_EXPR:
15624
      return tree_binary_nonzero_warnv_p (code, type,
15625
                                               TREE_OPERAND (t, 0),
15626
                                               TREE_OPERAND (t, 1),
15627
                                               strict_overflow_p);
15628
 
15629
    case COND_EXPR:
15630
    case CONSTRUCTOR:
15631
    case OBJ_TYPE_REF:
15632
    case ASSERT_EXPR:
15633
    case ADDR_EXPR:
15634
    case WITH_SIZE_EXPR:
15635
    case SSA_NAME:
15636
      return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15637
 
15638
    case COMPOUND_EXPR:
15639
    case MODIFY_EXPR:
15640
    case BIND_EXPR:
15641
      return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15642
                                        strict_overflow_p);
15643
 
15644
    case SAVE_EXPR:
15645
      return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15646
                                        strict_overflow_p);
15647
 
15648
    case CALL_EXPR:
15649
      return alloca_call_p (t);
15650
 
15651
    default:
15652
      break;
15653
    }
15654
  return false;
15655
}
15656
 
15657
/* Return true when T is an address and is known to be nonzero.
15658
   Handle warnings about undefined signed overflow.  */
15659
 
15660
bool
15661
tree_expr_nonzero_p (tree t)
15662
{
15663
  bool ret, strict_overflow_p;
15664
 
15665
  strict_overflow_p = false;
15666
  ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15667
  if (strict_overflow_p)
15668
    fold_overflow_warning (("assuming signed overflow does not occur when "
15669
                            "determining that expression is always "
15670
                            "non-zero"),
15671
                           WARN_STRICT_OVERFLOW_MISC);
15672
  return ret;
15673
}
15674
 
15675
/* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15676
   attempt to fold the expression to a constant without modifying TYPE,
15677
   OP0 or OP1.
15678
 
15679
   If the expression could be simplified to a constant, then return
15680
   the constant.  If the expression would not be simplified to a
15681
   constant, then return NULL_TREE.  */
15682
 
15683
tree
15684
fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15685
{
15686
  tree tem = fold_binary (code, type, op0, op1);
15687
  return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15688
}
15689
 
15690
/* Given the components of a unary expression CODE, TYPE and OP0,
15691
   attempt to fold the expression to a constant without modifying
15692
   TYPE or OP0.
15693
 
15694
   If the expression could be simplified to a constant, then return
15695
   the constant.  If the expression would not be simplified to a
15696
   constant, then return NULL_TREE.  */
15697
 
15698
tree
15699
fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15700
{
15701
  tree tem = fold_unary (code, type, op0);
15702
  return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15703
}
15704
 
15705
/* If EXP represents referencing an element in a constant string
15706
   (either via pointer arithmetic or array indexing), return the
15707
   tree representing the value accessed, otherwise return NULL.  */
15708
 
15709
tree
15710
fold_read_from_constant_string (tree exp)
15711
{
15712
  if ((TREE_CODE (exp) == INDIRECT_REF
15713
       || TREE_CODE (exp) == ARRAY_REF)
15714
      && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15715
    {
15716
      tree exp1 = TREE_OPERAND (exp, 0);
15717
      tree index;
15718
      tree string;
15719
      location_t loc = EXPR_LOCATION (exp);
15720
 
15721
      if (TREE_CODE (exp) == INDIRECT_REF)
15722
        string = string_constant (exp1, &index);
15723
      else
15724
        {
15725
          tree low_bound = array_ref_low_bound (exp);
15726
          index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15727
 
15728
          /* Optimize the special-case of a zero lower bound.
15729
 
15730
             We convert the low_bound to sizetype to avoid some problems
15731
             with constant folding.  (E.g. suppose the lower bound is 1,
15732
             and its mode is QI.  Without the conversion,l (ARRAY
15733
             +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15734
             +INDEX), which becomes (ARRAY+255+INDEX).  Oops!)  */
15735
          if (! integer_zerop (low_bound))
15736
            index = size_diffop_loc (loc, index,
15737
                                 fold_convert_loc (loc, sizetype, low_bound));
15738
 
15739
          string = exp1;
15740
        }
15741
 
15742
      if (string
15743
          && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15744
          && TREE_CODE (string) == STRING_CST
15745
          && TREE_CODE (index) == INTEGER_CST
15746
          && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15747
          && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15748
              == MODE_INT)
15749
          && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15750
        return build_int_cst_type (TREE_TYPE (exp),
15751
                                   (TREE_STRING_POINTER (string)
15752
                                    [TREE_INT_CST_LOW (index)]));
15753
    }
15754
  return NULL;
15755
}
15756
 
15757
/* Return the tree for neg (ARG0) when ARG0 is known to be either
15758
   an integer constant, real, or fixed-point constant.
15759
 
15760
   TYPE is the type of the result.  */
15761
 
15762
static tree
15763
fold_negate_const (tree arg0, tree type)
15764
{
15765
  tree t = NULL_TREE;
15766
 
15767
  switch (TREE_CODE (arg0))
15768
    {
15769
    case INTEGER_CST:
15770
      {
15771
        unsigned HOST_WIDE_INT low;
15772
        HOST_WIDE_INT high;
15773
        int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15774
                                   TREE_INT_CST_HIGH (arg0),
15775
                                   &low, &high);
15776
        t = force_fit_type_double (type, low, high, 1,
15777
                                   (overflow | TREE_OVERFLOW (arg0))
15778
                                   && !TYPE_UNSIGNED (type));
15779
        break;
15780
      }
15781
 
15782
    case REAL_CST:
15783
      t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15784
      break;
15785
 
15786
    case FIXED_CST:
15787
      {
15788
        FIXED_VALUE_TYPE f;
15789
        bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15790
                                            &(TREE_FIXED_CST (arg0)), NULL,
15791
                                            TYPE_SATURATING (type));
15792
        t = build_fixed (type, f);
15793
        /* Propagate overflow flags.  */
15794
        if (overflow_p | TREE_OVERFLOW (arg0))
15795
          TREE_OVERFLOW (t) = 1;
15796
        break;
15797
      }
15798
 
15799
    default:
15800
      gcc_unreachable ();
15801
    }
15802
 
15803
  return t;
15804
}
15805
 
15806
/* Return the tree for abs (ARG0) when ARG0 is known to be either
15807
   an integer constant or real constant.
15808
 
15809
   TYPE is the type of the result.  */
15810
 
15811
tree
15812
fold_abs_const (tree arg0, tree type)
15813
{
15814
  tree t = NULL_TREE;
15815
 
15816
  switch (TREE_CODE (arg0))
15817
    {
15818
    case INTEGER_CST:
15819
      /* If the value is unsigned, then the absolute value is
15820
         the same as the ordinary value.  */
15821
      if (TYPE_UNSIGNED (type))
15822
        t = arg0;
15823
      /* Similarly, if the value is non-negative.  */
15824
      else if (INT_CST_LT (integer_minus_one_node, arg0))
15825
        t = arg0;
15826
      /* If the value is negative, then the absolute value is
15827
         its negation.  */
15828
      else
15829
        {
15830
          unsigned HOST_WIDE_INT low;
15831
          HOST_WIDE_INT high;
15832
          int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15833
                                     TREE_INT_CST_HIGH (arg0),
15834
                                     &low, &high);
15835
          t = force_fit_type_double (type, low, high, -1,
15836
                                     overflow | TREE_OVERFLOW (arg0));
15837
        }
15838
      break;
15839
 
15840
    case REAL_CST:
15841
      if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15842
        t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15843
      else
15844
        t =  arg0;
15845
      break;
15846
 
15847
    default:
15848
      gcc_unreachable ();
15849
    }
15850
 
15851
  return t;
15852
}
15853
 
15854
/* Return the tree for not (ARG0) when ARG0 is known to be an integer
15855
   constant.  TYPE is the type of the result.  */
15856
 
15857
static tree
15858
fold_not_const (tree arg0, tree type)
15859
{
15860
  tree t = NULL_TREE;
15861
 
15862
  gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15863
 
15864
  t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
15865
                             ~TREE_INT_CST_HIGH (arg0), 0,
15866
                             TREE_OVERFLOW (arg0));
15867
 
15868
  return t;
15869
}
15870
 
15871
/* Given CODE, a relational operator, the target type, TYPE and two
15872
   constant operands OP0 and OP1, return the result of the
15873
   relational operation.  If the result is not a compile time
15874
   constant, then return NULL_TREE.  */
15875
 
15876
static tree
15877
fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15878
{
15879
  int result, invert;
15880
 
15881
  /* From here on, the only cases we handle are when the result is
15882
     known to be a constant.  */
15883
 
15884
  if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15885
    {
15886
      const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15887
      const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15888
 
15889
      /* Handle the cases where either operand is a NaN.  */
15890
      if (real_isnan (c0) || real_isnan (c1))
15891
        {
15892
          switch (code)
15893
            {
15894
            case EQ_EXPR:
15895
            case ORDERED_EXPR:
15896
              result = 0;
15897
              break;
15898
 
15899
            case NE_EXPR:
15900
            case UNORDERED_EXPR:
15901
            case UNLT_EXPR:
15902
            case UNLE_EXPR:
15903
            case UNGT_EXPR:
15904
            case UNGE_EXPR:
15905
            case UNEQ_EXPR:
15906
              result = 1;
15907
              break;
15908
 
15909
            case LT_EXPR:
15910
            case LE_EXPR:
15911
            case GT_EXPR:
15912
            case GE_EXPR:
15913
            case LTGT_EXPR:
15914
              if (flag_trapping_math)
15915
                return NULL_TREE;
15916
              result = 0;
15917
              break;
15918
 
15919
            default:
15920
              gcc_unreachable ();
15921
            }
15922
 
15923
          return constant_boolean_node (result, type);
15924
        }
15925
 
15926
      return constant_boolean_node (real_compare (code, c0, c1), type);
15927
    }
15928
 
15929
  if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15930
    {
15931
      const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15932
      const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15933
      return constant_boolean_node (fixed_compare (code, c0, c1), type);
15934
    }
15935
 
15936
  /* Handle equality/inequality of complex constants.  */
15937
  if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15938
    {
15939
      tree rcond = fold_relational_const (code, type,
15940
                                          TREE_REALPART (op0),
15941
                                          TREE_REALPART (op1));
15942
      tree icond = fold_relational_const (code, type,
15943
                                          TREE_IMAGPART (op0),
15944
                                          TREE_IMAGPART (op1));
15945
      if (code == EQ_EXPR)
15946
        return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15947
      else if (code == NE_EXPR)
15948
        return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15949
      else
15950
        return NULL_TREE;
15951
    }
15952
 
15953
  /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15954
 
15955
     To compute GT, swap the arguments and do LT.
15956
     To compute GE, do LT and invert the result.
15957
     To compute LE, swap the arguments, do LT and invert the result.
15958
     To compute NE, do EQ and invert the result.
15959
 
15960
     Therefore, the code below must handle only EQ and LT.  */
15961
 
15962
  if (code == LE_EXPR || code == GT_EXPR)
15963
    {
15964
      tree tem = op0;
15965
      op0 = op1;
15966
      op1 = tem;
15967
      code = swap_tree_comparison (code);
15968
    }
15969
 
15970
  /* Note that it is safe to invert for real values here because we
15971
     have already handled the one case that it matters.  */
15972
 
15973
  invert = 0;
15974
  if (code == NE_EXPR || code == GE_EXPR)
15975
    {
15976
      invert = 1;
15977
      code = invert_tree_comparison (code, false);
15978
    }
15979
 
15980
  /* Compute a result for LT or EQ if args permit;
15981
     Otherwise return T.  */
15982
  if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15983
    {
15984
      if (code == EQ_EXPR)
15985
        result = tree_int_cst_equal (op0, op1);
15986
      else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15987
        result = INT_CST_LT_UNSIGNED (op0, op1);
15988
      else
15989
        result = INT_CST_LT (op0, op1);
15990
    }
15991
  else
15992
    return NULL_TREE;
15993
 
15994
  if (invert)
15995
    result ^= 1;
15996
  return constant_boolean_node (result, type);
15997
}
15998
 
15999
/* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16000
   indicated TYPE.  If no CLEANUP_POINT_EXPR is necessary, return EXPR
16001
   itself.  */
16002
 
16003
tree
16004
fold_build_cleanup_point_expr (tree type, tree expr)
16005
{
16006
  /* If the expression does not have side effects then we don't have to wrap
16007
     it with a cleanup point expression.  */
16008
  if (!TREE_SIDE_EFFECTS (expr))
16009
    return expr;
16010
 
16011
  /* If the expression is a return, check to see if the expression inside the
16012
     return has no side effects or the right hand side of the modify expression
16013
     inside the return. If either don't have side effects set we don't need to
16014
     wrap the expression in a cleanup point expression.  Note we don't check the
16015
     left hand side of the modify because it should always be a return decl.  */
16016
  if (TREE_CODE (expr) == RETURN_EXPR)
16017
    {
16018
      tree op = TREE_OPERAND (expr, 0);
16019
      if (!op || !TREE_SIDE_EFFECTS (op))
16020
        return expr;
16021
      op = TREE_OPERAND (op, 1);
16022
      if (!TREE_SIDE_EFFECTS (op))
16023
        return expr;
16024
    }
16025
 
16026
  return build1 (CLEANUP_POINT_EXPR, type, expr);
16027
}
16028
 
16029
/* Given a pointer value OP0 and a type TYPE, return a simplified version
16030
   of an indirection through OP0, or NULL_TREE if no simplification is
16031
   possible.  */
16032
 
16033
tree
16034
fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16035
{
16036
  tree sub = op0;
16037
  tree subtype;
16038
 
16039
  STRIP_NOPS (sub);
16040
  subtype = TREE_TYPE (sub);
16041
  if (!POINTER_TYPE_P (subtype))
16042
    return NULL_TREE;
16043
 
16044
  if (TREE_CODE (sub) == ADDR_EXPR)
16045
    {
16046
      tree op = TREE_OPERAND (sub, 0);
16047
      tree optype = TREE_TYPE (op);
16048
      /* *&CONST_DECL -> to the value of the const decl.  */
16049
      if (TREE_CODE (op) == CONST_DECL)
16050
        return DECL_INITIAL (op);
16051
      /* *&p => p;  make sure to handle *&"str"[cst] here.  */
16052
      if (type == optype)
16053
        {
16054
          tree fop = fold_read_from_constant_string (op);
16055
          if (fop)
16056
            return fop;
16057
          else
16058
            return op;
16059
        }
16060
      /* *(foo *)&fooarray => fooarray[0] */
16061
      else if (TREE_CODE (optype) == ARRAY_TYPE
16062
               && type == TREE_TYPE (optype))
16063
        {
16064
          tree type_domain = TYPE_DOMAIN (optype);
16065
          tree min_val = size_zero_node;
16066
          if (type_domain && TYPE_MIN_VALUE (type_domain))
16067
            min_val = TYPE_MIN_VALUE (type_domain);
16068
          op0 = build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
16069
          SET_EXPR_LOCATION (op0, loc);
16070
          return op0;
16071
        }
16072
      /* *(foo *)&complexfoo => __real__ complexfoo */
16073
      else if (TREE_CODE (optype) == COMPLEX_TYPE
16074
               && type == TREE_TYPE (optype))
16075
        return fold_build1_loc (loc, REALPART_EXPR, type, op);
16076
      /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16077
      else if (TREE_CODE (optype) == VECTOR_TYPE
16078
               && type == TREE_TYPE (optype))
16079
        {
16080
          tree part_width = TYPE_SIZE (type);
16081
          tree index = bitsize_int (0);
16082
          return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16083
        }
16084
    }
16085
 
16086
  /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16087
  if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16088
      && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16089
    {
16090
      tree op00 = TREE_OPERAND (sub, 0);
16091
      tree op01 = TREE_OPERAND (sub, 1);
16092
      tree op00type;
16093
 
16094
      STRIP_NOPS (op00);
16095
      op00type = TREE_TYPE (op00);
16096
      if (TREE_CODE (op00) == ADDR_EXPR
16097
          && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
16098
          && type == TREE_TYPE (TREE_TYPE (op00type)))
16099
        {
16100
          HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16101
          tree part_width = TYPE_SIZE (type);
16102
          unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16103
          unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16104
          tree index = bitsize_int (indexi);
16105
 
16106
          if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
16107
            return fold_build3_loc (loc,
16108
                                BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
16109
                                part_width, index);
16110
 
16111
        }
16112
    }
16113
 
16114
 
16115
  /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16116
  if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16117
      && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16118
    {
16119
      tree op00 = TREE_OPERAND (sub, 0);
16120
      tree op01 = TREE_OPERAND (sub, 1);
16121
      tree op00type;
16122
 
16123
      STRIP_NOPS (op00);
16124
      op00type = TREE_TYPE (op00);
16125
      if (TREE_CODE (op00) == ADDR_EXPR
16126
          && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
16127
          && type == TREE_TYPE (TREE_TYPE (op00type)))
16128
        {
16129
          tree size = TYPE_SIZE_UNIT (type);
16130
          if (tree_int_cst_equal (size, op01))
16131
            return fold_build1_loc (loc, IMAGPART_EXPR, type,
16132
                                TREE_OPERAND (op00, 0));
16133
        }
16134
    }
16135
 
16136
  /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16137
  if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16138
      && type == TREE_TYPE (TREE_TYPE (subtype)))
16139
    {
16140
      tree type_domain;
16141
      tree min_val = size_zero_node;
16142
      sub = build_fold_indirect_ref_loc (loc, sub);
16143
      type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16144
      if (type_domain && TYPE_MIN_VALUE (type_domain))
16145
        min_val = TYPE_MIN_VALUE (type_domain);
16146
      op0 = build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
16147
      SET_EXPR_LOCATION (op0, loc);
16148
      return op0;
16149
    }
16150
 
16151
  return NULL_TREE;
16152
}
16153
 
16154
/* Builds an expression for an indirection through T, simplifying some
16155
   cases.  */
16156
 
16157
tree
16158
build_fold_indirect_ref_loc (location_t loc, tree t)
16159
{
16160
  tree type = TREE_TYPE (TREE_TYPE (t));
16161
  tree sub = fold_indirect_ref_1 (loc, type, t);
16162
 
16163
  if (sub)
16164
    return sub;
16165
 
16166
  t = build1 (INDIRECT_REF, type, t);
16167
  SET_EXPR_LOCATION (t, loc);
16168
  return t;
16169
}
16170
 
16171
/* Given an INDIRECT_REF T, return either T or a simplified version.  */
16172
 
16173
tree
16174
fold_indirect_ref_loc (location_t loc, tree t)
16175
{
16176
  tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16177
 
16178
  if (sub)
16179
    return sub;
16180
  else
16181
    return t;
16182
}
16183
 
16184
/* Strip non-trapping, non-side-effecting tree nodes from an expression
16185
   whose result is ignored.  The type of the returned tree need not be
16186
   the same as the original expression.  */
16187
 
16188
tree
16189
fold_ignored_result (tree t)
16190
{
16191
  if (!TREE_SIDE_EFFECTS (t))
16192
    return integer_zero_node;
16193
 
16194
  for (;;)
16195
    switch (TREE_CODE_CLASS (TREE_CODE (t)))
16196
      {
16197
      case tcc_unary:
16198
        t = TREE_OPERAND (t, 0);
16199
        break;
16200
 
16201
      case tcc_binary:
16202
      case tcc_comparison:
16203
        if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16204
          t = TREE_OPERAND (t, 0);
16205
        else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16206
          t = TREE_OPERAND (t, 1);
16207
        else
16208
          return t;
16209
        break;
16210
 
16211
      case tcc_expression:
16212
        switch (TREE_CODE (t))
16213
          {
16214
          case COMPOUND_EXPR:
16215
            if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16216
              return t;
16217
            t = TREE_OPERAND (t, 0);
16218
            break;
16219
 
16220
          case COND_EXPR:
16221
            if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16222
                || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16223
              return t;
16224
            t = TREE_OPERAND (t, 0);
16225
            break;
16226
 
16227
          default:
16228
            return t;
16229
          }
16230
        break;
16231
 
16232
      default:
16233
        return t;
16234
      }
16235
}
16236
 
16237
/* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16238
   This can only be applied to objects of a sizetype.  */
16239
 
16240
tree
16241
round_up_loc (location_t loc, tree value, int divisor)
16242
{
16243
  tree div = NULL_TREE;
16244
 
16245
  gcc_assert (divisor > 0);
16246
  if (divisor == 1)
16247
    return value;
16248
 
16249
  /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
16250
     have to do anything.  Only do this when we are not given a const,
16251
     because in that case, this check is more expensive than just
16252
     doing it.  */
16253
  if (TREE_CODE (value) != INTEGER_CST)
16254
    {
16255
      div = build_int_cst (TREE_TYPE (value), divisor);
16256
 
16257
      if (multiple_of_p (TREE_TYPE (value), value, div))
16258
        return value;
16259
    }
16260
 
16261
  /* If divisor is a power of two, simplify this to bit manipulation.  */
16262
  if (divisor == (divisor & -divisor))
16263
    {
16264
      if (TREE_CODE (value) == INTEGER_CST)
16265
        {
16266
          unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
16267
          unsigned HOST_WIDE_INT high;
16268
          bool overflow_p;
16269
 
16270
          if ((low & (divisor - 1)) == 0)
16271
            return value;
16272
 
16273
          overflow_p = TREE_OVERFLOW (value);
16274
          high = TREE_INT_CST_HIGH (value);
16275
          low &= ~(divisor - 1);
16276
          low += divisor;
16277
          if (low == 0)
16278
            {
16279
              high++;
16280
              if (high == 0)
16281
                overflow_p = true;
16282
            }
16283
 
16284
          return force_fit_type_double (TREE_TYPE (value), low, high,
16285
                                        -1, overflow_p);
16286
        }
16287
      else
16288
        {
16289
          tree t;
16290
 
16291
          t = build_int_cst (TREE_TYPE (value), divisor - 1);
16292
          value = size_binop_loc (loc, PLUS_EXPR, value, t);
16293
          t = build_int_cst (TREE_TYPE (value), -divisor);
16294
          value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16295
        }
16296
    }
16297
  else
16298
    {
16299
      if (!div)
16300
        div = build_int_cst (TREE_TYPE (value), divisor);
16301
      value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16302
      value = size_binop_loc (loc, MULT_EXPR, value, div);
16303
    }
16304
 
16305
  return value;
16306
}
16307
 
16308
/* Likewise, but round down.  */
16309
 
16310
tree
16311
round_down_loc (location_t loc, tree value, int divisor)
16312
{
16313
  tree div = NULL_TREE;
16314
 
16315
  gcc_assert (divisor > 0);
16316
  if (divisor == 1)
16317
    return value;
16318
 
16319
  /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
16320
     have to do anything.  Only do this when we are not given a const,
16321
     because in that case, this check is more expensive than just
16322
     doing it.  */
16323
  if (TREE_CODE (value) != INTEGER_CST)
16324
    {
16325
      div = build_int_cst (TREE_TYPE (value), divisor);
16326
 
16327
      if (multiple_of_p (TREE_TYPE (value), value, div))
16328
        return value;
16329
    }
16330
 
16331
  /* If divisor is a power of two, simplify this to bit manipulation.  */
16332
  if (divisor == (divisor & -divisor))
16333
    {
16334
      tree t;
16335
 
16336
      t = build_int_cst (TREE_TYPE (value), -divisor);
16337
      value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16338
    }
16339
  else
16340
    {
16341
      if (!div)
16342
        div = build_int_cst (TREE_TYPE (value), divisor);
16343
      value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16344
      value = size_binop_loc (loc, MULT_EXPR, value, div);
16345
    }
16346
 
16347
  return value;
16348
}
16349
 
16350
/* Returns the pointer to the base of the object addressed by EXP and
16351
   extracts the information about the offset of the access, storing it
16352
   to PBITPOS and POFFSET.  */
16353
 
16354
static tree
16355
split_address_to_core_and_offset (tree exp,
16356
                                  HOST_WIDE_INT *pbitpos, tree *poffset)
16357
{
16358
  tree core;
16359
  enum machine_mode mode;
16360
  int unsignedp, volatilep;
16361
  HOST_WIDE_INT bitsize;
16362
  location_t loc = EXPR_LOCATION (exp);
16363
 
16364
  if (TREE_CODE (exp) == ADDR_EXPR)
16365
    {
16366
      core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16367
                                  poffset, &mode, &unsignedp, &volatilep,
16368
                                  false);
16369
      core = build_fold_addr_expr_loc (loc, core);
16370
    }
16371
  else
16372
    {
16373
      core = exp;
16374
      *pbitpos = 0;
16375
      *poffset = NULL_TREE;
16376
    }
16377
 
16378
  return core;
16379
}
16380
 
16381
/* Returns true if addresses of E1 and E2 differ by a constant, false
16382
   otherwise.  If they do, E1 - E2 is stored in *DIFF.  */
16383
 
16384
bool
16385
ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16386
{
16387
  tree core1, core2;
16388
  HOST_WIDE_INT bitpos1, bitpos2;
16389
  tree toffset1, toffset2, tdiff, type;
16390
 
16391
  core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16392
  core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16393
 
16394
  if (bitpos1 % BITS_PER_UNIT != 0
16395
      || bitpos2 % BITS_PER_UNIT != 0
16396
      || !operand_equal_p (core1, core2, 0))
16397
    return false;
16398
 
16399
  if (toffset1 && toffset2)
16400
    {
16401
      type = TREE_TYPE (toffset1);
16402
      if (type != TREE_TYPE (toffset2))
16403
        toffset2 = fold_convert (type, toffset2);
16404
 
16405
      tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16406
      if (!cst_and_fits_in_hwi (tdiff))
16407
        return false;
16408
 
16409
      *diff = int_cst_value (tdiff);
16410
    }
16411
  else if (toffset1 || toffset2)
16412
    {
16413
      /* If only one of the offsets is non-constant, the difference cannot
16414
         be a constant.  */
16415
      return false;
16416
    }
16417
  else
16418
    *diff = 0;
16419
 
16420
  *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16421
  return true;
16422
}
16423
 
16424
/* Simplify the floating point expression EXP when the sign of the
16425
   result is not significant.  Return NULL_TREE if no simplification
16426
   is possible.  */
16427
 
16428
tree
16429
fold_strip_sign_ops (tree exp)
16430
{
16431
  tree arg0, arg1;
16432
  location_t loc = EXPR_LOCATION (exp);
16433
 
16434
  switch (TREE_CODE (exp))
16435
    {
16436
    case ABS_EXPR:
16437
    case NEGATE_EXPR:
16438
      arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16439
      return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16440
 
16441
    case MULT_EXPR:
16442
    case RDIV_EXPR:
16443
      if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16444
        return NULL_TREE;
16445
      arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16446
      arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16447
      if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16448
        return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16449
                            arg0 ? arg0 : TREE_OPERAND (exp, 0),
16450
                            arg1 ? arg1 : TREE_OPERAND (exp, 1));
16451
      break;
16452
 
16453
    case COMPOUND_EXPR:
16454
      arg0 = TREE_OPERAND (exp, 0);
16455
      arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16456
      if (arg1)
16457
        return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16458
      break;
16459
 
16460
    case COND_EXPR:
16461
      arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16462
      arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16463
      if (arg0 || arg1)
16464
        return fold_build3_loc (loc,
16465
                            COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16466
                            arg0 ? arg0 : TREE_OPERAND (exp, 1),
16467
                            arg1 ? arg1 : TREE_OPERAND (exp, 2));
16468
      break;
16469
 
16470
    case CALL_EXPR:
16471
      {
16472
        const enum built_in_function fcode = builtin_mathfn_code (exp);
16473
        switch (fcode)
16474
        {
16475
        CASE_FLT_FN (BUILT_IN_COPYSIGN):
16476
          /* Strip copysign function call, return the 1st argument. */
16477
          arg0 = CALL_EXPR_ARG (exp, 0);
16478
          arg1 = CALL_EXPR_ARG (exp, 1);
16479
          return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16480
 
16481
        default:
16482
          /* Strip sign ops from the argument of "odd" math functions.  */
16483
          if (negate_mathfn_p (fcode))
16484
            {
16485
              arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16486
              if (arg0)
16487
                return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16488
            }
16489
          break;
16490
        }
16491
      }
16492
      break;
16493
 
16494
    default:
16495
      break;
16496
    }
16497
  return NULL_TREE;
16498
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.